CONCURRENCY! ah ha! PARALLELIZED moment of experience, in an imaginary pseudocode* see bottom
// Sensory processing section.
vector<float> eyes = receive(ENVIRONMENT, EYES);
vector<float> ears = receive(ENVIRONMENT, EARS);
vector<float> nose = receive(ENVIRONMENT, NOSE);
vector<float> tongue = receive(ENVIRONMENT, TONGUE);
vector<float> skin = receive(ENVIRONMENT, SKIN);
// Interpretation of sensory information section.
matrix<float> prior_knowledge = update(PRIOR_KNOWLEDGE, SENSORY);
matrix<float> expectations = update(EXPECTATIONS, SENSORY);
matrix<float> emotions = update(EMOTIONS, SENSORY);
// Emotion regulation section.
float intensity = regulate(INTENSITY, COGNITIVE_STRATEGIES, SOCIAL_ENVIRONMENTAL_CUES, PHYSIOLOGICAL_AROUSAL);
float valence = regulate(VALENCE, COGNITIVE_STRATEGIES, SOCIAL_ENVIRONMENTAL_CUES, PHYSIOLOGICAL_AROUSAL);
// Attention section.
vector<float> focus;
vector<float> salience;
if (ATTENTION_TYPE == SELECTIVE) {
focus = control(FOCUS, VOLUNTARY);
} else {
salience = trigger(SALIENCE, SALIENT_STIMULI);
}
// Memory section.
matrix<float> storage = store(STORAGE, INFORMATION);
matrix<float> retrieval = retrieve(RETRIEVAL, INFORMATION);
// Communication section.
matrix<float> media = select(MEDIA, INFORMATION);
matrix<float> protocols = select(PROTOCOLS, INFORMATION);
matrix<float> interactions = exchange(INTERACTIONS, INFORMATION);
// Decision making section.
matrix<float> goals = update(GOALS, CURRENT_STATE);
matrix<float> current_state = update(CURRENT_STATE, GOALS, ACTIONS);
matrix<float> actions = select(ACTIONS, GOALS, CURRENT_STATE);
// Execution section.
execute(ACTIONS);
* “the sensory processing, interpretation of sensory information, emotion regulation, attention, memory, communication, and decision making sections can all be executed concurrently, while the execution section would depend on the results of the previous sections ” #chatGPT