GRAPHIC: New Entity Operations™ Alpha Logo

EntityScript



core_architect




# -*- coding: utf-8 -*-
"""
COPYRIGHT (C) 2020-2023 NEW ENTITY OPERATIONS INC. ALL RIGHTS RESERVED
INSTANCE: core_architect
MODIFIED: 2023/05/30
OVERVIEW:

core_architect is a module to establish handlers for various digital
groupings and class structures of any desired context.

You start with a simple object example of a commonly excepted class grouping
pattern, and you're able to extend this and build into it as needed.

When you build out of it, you can make the system ‘ingest_commands’
 based off of types set and enforced at runtime here.

For those groupings that are ran dynamicly, use quantified/qualifying
 ouput formatters at data-output according to the base example methods
 provided.

VECTOR_BUCKETS can be imported as needed to store and handle in-transit
behavior as a BUCKET_*VECTOR_TYPE* instacne

The first example is a basic online digital card system:

It operates according to a transaction/digital-origin format.
"""
__version__ = "0.0.8"
__author__ = "Ryan McKenna"
__copyright__ = "Copyright (C) 2020-2023 New Entity Operations Inc."
__credits__ = [
 "Ryan McKenna",
 "New Entity Operations Inc.", "New Entity Operations, LLC"]
__email__ = "Operator@NewEntityOperations.com"
__license__ = "New Entity License"
__maintainer__ = "Ryan McKenna"
__status__ = "Production"

## MODE-> facilities
from MODE.facilities import (BUCKET_AMOUNT, BUCKET_CARD, BUCKET_CODE,
 BUCKET_DATE, BUCKET_FLUSH, BUCKET_ID, BUCKET_SLUG, BUCKET_TO, BUCKET_TYPE,
 BUCKET_Z_INDEX_LOCATOR, Creader, CReal, Cure, oFo, PATH_INSTANCE, SDT,
 terms_architect, transactions, VDT)

## MODE-> debug_architect
from MODE.debug_architect import DEBUG_ARCHITECT

## Imports: Custom
from core_middlelayer import (DIRDATA, OUTPUT_ENTITY, OUTPUT_ENTITY_CHECK,
 OUTPUT_ENTITY_FRESH, OUTPUT_ENTITY_UPDATE, OUTPUT_FLUSH, OUTPUT_STRUCTURE,
 OUTPUT_UPDATE_SLUG, OUTPUT_VECTORIZED_DATA, OVD, SLUG_BLOB_OBJECT)

## Imports: OpenPackager->Internal
from OPENPACKAGER.internal.ingest_commands import (DigitalTransactionObject,
 Vectorize)

## OPENPACKAGER.internal.constructed_class
from OPENPACKAGER.internal.constructed_class import (blob_object)

## header_fields_now is dynamicly set by the loaded constructor: Defaults to 0
header_fields_now = 0

## Print out the header to orient the reader
ConstructedClassDTO = DigitalTransactionObject(
 VDT['KEY_ID'],
 VDT['KEY_CODE'],
 VDT['KEY_PAYMENT_ORIGIN'],
 VDT['KEY_DESCRIPTION'],
 VDT['KEY_AMOUNT'],
 VDT['KEY_DATETIME_THEN'],
 VDT['KEY_TO'])

## Localize a default KeyConstructor for the output_structure.es phase
## The output_structure contains the Constructor, which can be applied to
## enforcing *Object headers
DEFAULT_KEY_CONSTRUCTOR = ConstructedClassDTO

## Write the default key constructor phase to the in-transit output_structure
def SetKeyConstructor(STRUCTURE=DEFAULT_KEY_CONSTRUCTOR):
 with open(PATH_INSTANCE+DIRDATA+OUTPUT_STRUCTURE, oFo.write) as file:
  file.write(str(STRUCTURE))
 ## Return the basic ConstructedClass: Default is the
 ## DTO (Digital Transaction Object)
 DEBUG_ARCHITECT.set_key_constructor(
  PREFACE=terms_architect.wrote_constructor_structure, STRUCTURE=STRUCTURE)
 file.close()

## Constructor runner
SetKeyConstructor()

## Establish Dynamic Bucket Classes
class AddDynamicValue:
 """
 The base class that can be expanded on to follow the add, edit, delete
 format.

 The convention should follow, *DESCRIPTION*_*TYPE*: Conventions are set to
 strict input mode
 """
 def Transaction_Digital(code="", payment_origin="", description="",
  amount=CReal.no, datetime_then="", to="") -> str:
  """
  Common Data Setup Classes: should also have an associated VECTOR bucket
  imported. i.e. transactions.
  code-> A specific short-code for the transaction
  payment_origin-> A descriptive name of a digital-transaction origin silo
  description-> A type of digital-transaction
  amount-> A value amount that represents a unit of a commonly excepted 
   end-currency system, such as USD
  datetime_then-> A date associated with this transaction. You can also get more
   specific if needed, and require time units down to a specific schedule.
   Overall, this is the machine or general time of the digital-transaction
  to-> is the party that receives the outcome of this digital-transaction

  When you gain access to a NEW_DYNAMIC_VALUE, you also gain access to
  each meta-instance, such as NEW_DYNAMIC_VALUE.code and more.

  core_architect.AddDynamicValue.Transaction_Digital(
   code="A01", payment_origin="RJM", description="For services",
   amount=300000, datetime_then="05/31/2023", to="John")

  Then, access the SLUG_BUCKET:
  BUCKET_SLUG[-1]
  BUCKET_SLUG[-1].amount
  """
  if code==Cure.muted:
   code = input(terms_architect.enter_code)
  else:
   code=code
  if payment_origin==Cure.muted:
   payment_origin = input(terms_architect.enter_payment_origin)
  else:
   payment_origin=payment_origin
  if description==Cure.muted:
   description = input(terms_architect.enter_description)
  else:
   description=description
  if amount==CReal.no:
   amount = input(terms_architect.enter_amount)
  else:
   amount=amount
  if datetime_then==Cure.muted:
   datetime_then = input(terms_architect.enter_datetime_then)
  else:
   datetime_then=datetime_then
  if to==Cure.muted:
   to = input(terms_architect.who_to)
  else:
   to=to
  DEBUG_ARCHITECT.add_dynamic_value_transaction(TYPE="Digital",
   AMOUNT=amount, CODE=code, DATETIME_THEN=datetime_then,
   DESCRIPTION=description, PAYMENT_ORIGIN=payment_origin, TO=to)

  ## Create a new dynamic value
  NEW_DYNAMIC_VALUE = DigitalTransactionObject(id=0, code=code,
   payment_origin=payment_origin, description=description, amount=amount,
   datetime_then=datetime_then, to=to)

  ## Append the new dynamic values to the bucket of constructor slugs
  BUCKET_SLUG.append(NEW_DYNAMIC_VALUE)

## retrieval vectorized_data by keys
def vectorize_data(by_key, header_fields=header_fields_now):
 """
 Open the output_vectorized_data.es instance and read the contents. Upon
 completion, dump them into the OUTPUT_ENTITY phase. Data should only be
 dumped to disk or long term data through the OUTPUT_ENTITY.es

 OUTPUT_ENTITY.es can be stored through the interface
 Search for the 1st: Key, on the structured field type, with no headers
 core_architect.vectorize_data(by_key=1)
 Search for the 1st: Key, on the structured field type, with 4 header field rows
 core_architect.vectorize_data(by_key=1, header_fields=4)
 """
 Vectorize.dataDTO(by_key, header_fields)

## Id
class Id:
 """
 'BUCKET_ID' is empty on initialization

 In the event that there are no initializer fields, it still gets setup
 """
 def __init__(self):
  ## Mock: Setup code goes here.
  pass

 def zoom():
  """
  First run, ca.Id.zoom() checks for no value

  However, this then writes the null value 0 to BUCKET_ID
  Afterwards, whenever it runs, you're setting an ID of your choosing.

  The Id is then used by accessing BUCKET_ID to zoom into dynamic entity
   instances at a specified position

  A value of 0 or empty will setup the default key constructor.
  """
  if len(BUCKET_ID) < CReal.yes:
   BUCKET_ID.append(CReal.no)
  else:
   if BUCKET_ID[CReal.no] == CReal.no:
    BUCKET_ID.clear()
    FormIdInput = input(terms_architect.add_id)
    BUCKET_ID.append(FormIdInput)
    vectorize_data(by_key=int(FormIdInput))
   else:
    FormIdInput = input(terms_architect.add_id)
    BUCKET_ID.append(FormIdInput)
    vectorize_data(by_key=int(FormIdInput))

## RUNTIME-> WATERFALL
## Execute various default operations
def wipe_output_entities_fresh():
 """
 clear the output_entity.es instance: This is the generic I/O bucket
 """
 with open(PATH_INSTANCE+DIRDATA+OUTPUT_ENTITY, oFo.write) as file:
  file.write(Cure.muted)
 print(terms_architect.wiped_records)
 file.close()

def wipe_output_flush_fresh():
 """
 clear the output_flush.es instance: This is the generic I/O bucket
 """
 with open(PATH_INSTANCE+DIRDATA+OUTPUT_FLUSH, oFo.write) as file:
  file.write(Cure.muted)
 print(terms_architect.wiped_records)
 file.close()

def update_entity_output():
 with open(DIRDATA+OUTPUT_ENTITY, oFo.write) as file:
  for i in BUCKET_SLUG:
   file.write(i)
 file.close()

def entity():
 """
 Default entity task runner: Extend as ncessary
 """
 update_entity_output()

def update_slug():
 """
 Take *Constructors that are in the BUCKET_SLUGS and push them to the flush
 phase. If they aren't stored from the flush phase, they'll be dropped
 and not executed into the database schema
 """
 ## Write the temporary OUTPUT_SLUG.es phase
 with open(PATH_INSTANCE+DIRDATA+OUTPUT_UPDATE_SLUG, oFo.write) as file:
  for i in BUCKET_SLUG:
   file.write(str(i))
 file.close()

 for i in BUCKET_SLUG:
  BUCKET_FLUSH.append(i)
 ## Reset the BUCKET_SLUG phase and prepare it for new operations
 BUCKET_SLUG.clear()

def slug():
 update_slug()

## destroy the flush buckets and write to a flush phase
def flush():
 ## Clear the tempoary output_slug phase
 with open(PATH_INSTANCE+DIRDATA+OUTPUT_UPDATE_SLUG, oFo.write) as file:
  file.write(Cure.muted)
  print(terms_architect.wiped_records)
 file.close()
 ## Commit the OUTPUT_FLUSH phase to storage
 with open(DIRDATA+OUTPUT_FLUSH, oFo.append_text) as file:
  for i in BUCKET_FLUSH:
   file.write(str(i))
  BUCKET_FLUSH.clear()
 file.close()

###############################################################################
## Dynamic operations
################################################################################
## check_entity
def check_entity():
 """
 Check for various attributes. This could be how many active entities are loaded
 into the architect creator phase OUTPUT_ENTITY.es

 check_entity() provides you with the amount of entity items by default
 """
 total = CReal.no
 with open(PATH_INSTANCE+DIRDATA+OUTPUT_ENTITY, oFo.read) as file:
  for line in file:
   try:
    total = total+1
   except ValueError:
    print(terms_architect.value_not_a_number.format(line))
  print(terms_architect.records_in_entity.format(total))
 file.close()

## Generate desired field-based outputs
class Generate:
 """
 Generate the *TYPE*_*KIND*
 """
 ## Additional KINDS here
 def transactions_write_ready():
  """
  Read from the injectsed blob_value.es instance
  """
  Z_KEYS = list(blob_object.keys())
  print("KEYS-> "+str(Z_KEYS))
  ## Also populate the transaction bucket
  with open(PATH_INSTANCE+DIRDATA+OVD, oFo.write) as OVD_INSTANCE:
   for i in enumerate(blob_object):
    KEY_NOW = blob_object[i[1]]
    print("KEY NOW-> "+str(KEY_NOW))
    OVD_INSTANCE.write(str(KEY_NOW))
  OVD_INSTANCE.close()

 def transactions_from_blob():
  """
  Read transaction from the blob, and populate the transaction bucket with raw
  inputs
  """
  ## Clear way for the updated transactions
  transactions.clear()
  with open(PATH_INSTANCE+SLUG_BLOB_OBJECT, oFo.read) as BLOB_INSTANCE:
   CREADER = Creader(BLOB_INSTANCE)
   for i in CREADER:
    transactions.append(i)
  BLOB_INSTANCE.close()

 def write_from_transactions_to_entity():
  with open(PATH_INSTANCE+DIRDATA+OUTPUT_ENTITY, oFo.write) as file:
   for line in transactions:
    file.write(str(line)+"\n")
  file.close()

 def Z_index(INDEX=0) -> int:
  """
  Generate a Z-index of all the related-blob values
  core_architect.Generate.Z_index(INDEX=4)
  """
  OUT_OF_RANGE = 0
  BUCKET_Z_INDEX_LOCATOR.clear()
  if INDEX==0:
   header='id'
  elif INDEX==1:
   header='code'
  elif INDEX==2:
   header='payment_origin'
  elif INDEX==3:
   header='description'
  elif INDEX==4:
   header='amount'
  elif INDEX==5:
   header='datetime_then'
  elif INDEX==6:
   header='to'
  else:
   header=""
  for i in transactions:
   try:
    zX = i[INDEX]
    # Add each value to the flush buckett
    BUCKET_Z_INDEX_LOCATOR.append(zX)
   except IndexError:
    print("That Z_INDEX is out of range.")
    OUT_OF_RANGE = 1
    break
  if OUT_OF_RANGE==0:
   print("Z_INDEX-> Header '"+str(header)+"' Located")
   for i in BUCKET_Z_INDEX_LOCATOR:
    print(i)
  else:
   pass

###############################################################################
## Automate routines
################################################################################
def automate():
 print(terms_architect.starting_automate)
 wipe_output_entities_fresh()
 wipe_output_flush_fresh()
 Generate.transactions_from_blob()
 Generate.transactions_write_ready()
 Generate.write_from_transactions_to_entity()
 check_entity()

## Runner: automate
automate()



Return HOME