From d91ffb4fad345f04c289e91a3978bbe5df1f9783 Mon Sep 17 00:00:00 2001 From: Dillon Smith Date: Mon, 20 Apr 2020 10:20:14 -0400 Subject: [PATCH 01/24] RPC: DeliveryConditions and pipeline implemented --- psyneulink/core/components/component.py | 15 +- psyneulink/core/globals/context.py | 10 +- psyneulink/core/globals/graph_pb2.py | 804 +++++++++++++++++++++ psyneulink/core/globals/graph_pb2_grpc.py | 165 +++++ psyneulink/core/globals/log.py | 73 +- psyneulink/core/globals/parameters.py | 78 +- psyneulink/core/globals/protos/graph.proto | 88 +++ 7 files changed, 1225 insertions(+), 8 deletions(-) create mode 100644 psyneulink/core/globals/graph_pb2.py create mode 100644 psyneulink/core/globals/graph_pb2_grpc.py create mode 100644 psyneulink/core/globals/protos/graph.proto diff --git a/psyneulink/core/components/component.py b/psyneulink/core/components/component.py index 159782e45f4..eac0e343d57 100644 --- a/psyneulink/core/components/component.py +++ b/psyneulink/core/components/component.py @@ -463,7 +463,7 @@ MODEL_SPEC_ID_PARAMETER_VALUE, MODEL_SPEC_ID_INPUT_PORTS, MODEL_SPEC_ID_OUTPUT_PORTS, \ MODULATORY_SPEC_KEYWORDS, NAME, OUTPUT_PORTS, PARAMS, PREFS_ARG, \ REINITIALIZE_WHEN, SIZE, VALUE, VARIABLE -from psyneulink.core.globals.log import LogCondition +from psyneulink.core.globals.log import LogCondition, DeliveryCondition from psyneulink.core.scheduling.time import Time, TimeScale from psyneulink.core.globals.parameters import Defaults, Parameter, ParameterAlias, ParameterError, ParametersBase, copy_parameter_value from psyneulink.core.globals.preferences.basepreferenceset import BasePreferenceSet, VERBOSE_PREF @@ -2838,6 +2838,19 @@ def set_log_conditions(self, items, log_condition=LogCondition.EXECUTION): """ self.log.set_log_conditions(items=items, log_condition=log_condition) + def set_delivery_conditions(self, items, delivery_condition=DeliveryCondition.EXECUTION): + """ + set_delivery_conditions( \ + items \ + delivery_condition=EXECUTION \ + ) + + Specifies items to be delivered to external application via gRPC; these must be be `loggable_items ` + of the Component's `log `. This is a convenience method that calls the `set_delivery_conditions ` + method of the Component's `log `. + """ + self.log.set_delivery_conditions(items=items, delivery_condition=delivery_condition) + def log_values(self, entries): """ log_values( \ diff --git a/psyneulink/core/globals/context.py b/psyneulink/core/globals/context.py index cc639a15c61..26e40cdf83a 100644 --- a/psyneulink/core/globals/context.py +++ b/psyneulink/core/globals/context.py @@ -88,6 +88,7 @@ import warnings from collections import defaultdict, namedtuple +from queue import Queue import typecheck as tc @@ -326,6 +327,10 @@ class Context(): references it, but it is possible that future uses will involve other messages. Note that this is *not* the same as the `flags_string ` attribute (see `note `). + rpc_pipeline : Queue + queue to populate with messages for external environment in cases where execution was triggered via RPC call + (e.g. through PsyNeuLinkView). + """ __name__ = 'Context' @@ -340,7 +345,9 @@ def __init__(self, source=ContextFlags.NONE, runmode=ContextFlags.DEFAULT_MODE, execution_id=None, - string:str='', time=None): + string:str='', + time=None, + rpc_pipeline:Queue=None): self.owner = owner self.composition = composition @@ -362,6 +369,7 @@ def __init__(self, self.execution_id = execution_id self.execution_time = None self.string = string + self.rpc_pipeline = rpc_pipeline __deepcopy__ = get_deepcopy_with_shared(_deepcopy_shared_keys) diff --git a/psyneulink/core/globals/graph_pb2.py b/psyneulink/core/globals/graph_pb2.py new file mode 100644 index 00000000000..0e48a6e6947 --- /dev/null +++ b/psyneulink/core/globals/graph_pb2.py @@ -0,0 +1,804 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: graph.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='graph.proto', + package='graph', + syntax='proto3', + serialized_options=None, + serialized_pb=_b('\n\x0bgraph.proto\x12\x05graph\"\x0e\n\x0cNullArgument\"\x1e\n\x0cHealthStatus\x12\x0e\n\x06status\x18\x01 \x01(\t\"\x17\n\x07PNLPath\x12\x0c\n\x04path\x18\x01 \x01(\t\"\x1a\n\nScriptPath\x12\x0c\n\x04path\x18\x01 \x01(\t\"*\n\x12ScriptCompositions\x12\x14\n\x0c\x63ompositions\x18\x01 \x03(\t\"\x19\n\tGraphName\x12\x0c\n\x04name\x18\x01 \x01(\t\"3\n\tGraphJSON\x12\x13\n\x0bobjectsJSON\x18\x01 \x01(\t\x12\x11\n\tstyleJSON\x18\x02 \x01(\t\"\x1e\n\tStyleJSON\x12\x11\n\tstyleJSON\x18\x01 \x01(\t\"<\n\x0c\x44oubleMatrix\x12\x0c\n\x04rows\x18\x01 \x01(\r\x12\x0c\n\x04\x63ols\x18\x02 \x01(\r\x12\x10\n\x04\x64\x61ta\x18\x03 \x03(\x01\x42\x02\x10\x01\"x\n\x05\x45ntry\x12\x15\n\rcomponentName\x18\x01 \x01(\t\x12\x15\n\rparameterName\x18\x02 \x01(\t\x12\x0c\n\x04time\x18\x03 \x01(\t\x12\x0f\n\x07\x63ontext\x18\x04 \x01(\t\x12\"\n\x05value\x18\x05 \x01(\x0b\x32\x13.graph.DoubleMatrix\"c\n\tServePref\x12\x15\n\rcomponentName\x18\x01 \x01(\t\x12\x15\n\rparameterName\x18\x02 \x01(\t\x12(\n\tcondition\x18\x03 \x01(\x0e\x32\x15.graph.serveCondition\"4\n\nServePrefs\x12&\n\x0cservePrefSet\x18\x01 \x03(\x0b\x32\x10.graph.ServePref\"\xac\x01\n\rRunTimeParams\x12\x30\n\x06inputs\x18\x01 \x03(\x0b\x32 .graph.RunTimeParams.InputsEntry\x12%\n\nservePrefs\x18\x02 \x01(\x0b\x32\x11.graph.ServePrefs\x1a\x42\n\x0bInputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\"\n\x05value\x18\x02 \x01(\x0b\x32\x13.graph.DoubleMatrix:\x02\x38\x01*\x92\x01\n\x0eserveCondition\x12\x12\n\x0eINITIALIZATION\x10\x00\x12\x0e\n\nVALIDATION\x10\x01\x12\r\n\tEXECUTION\x10\x02\x12\x0e\n\nPROCESSING\x10\x03\x12\x0c\n\x08LEARNING\x10\x04\x12\x0b\n\x07\x43ONTROL\x10\x05\x12\x0e\n\nSIMULATION\x10\x06\x12\t\n\x05TRIAL\x10\x07\x12\x07\n\x03RUN\x10\x08\x32\xe3\x03\n\nServeGraph\x12\x36\n\rLoadCustomPnl\x12\x0e.graph.PNLPath\x1a\x13.graph.NullArgument\"\x00\x12<\n\nLoadScript\x12\x11.graph.ScriptPath\x1a\x19.graph.ScriptCompositions\"\x00\x12\x35\n\x0cLoadGraphics\x12\x11.graph.ScriptPath\x1a\x10.graph.StyleJSON\"\x00\x12\x43\n\x0fGetCompositions\x12\x13.graph.NullArgument\x1a\x19.graph.ScriptCompositions\"\x00\x12/\n\x07GetJSON\x12\x10.graph.GraphName\x1a\x10.graph.GraphJSON\"\x00\x12\x39\n\x0bHealthCheck\x12\x13.graph.NullArgument\x1a\x13.graph.HealthStatus\"\x00\x12=\n\x10UpdateStylesheet\x12\x10.graph.StyleJSON\x1a\x13.graph.NullArgument\"\x00(\x01\x12\x38\n\x0eRunComposition\x12\x14.graph.RunTimeParams\x1a\x0c.graph.Entry\"\x00\x30\x01\x62\x06proto3') +) + +_SERVECONDITION = _descriptor.EnumDescriptor( + name='serveCondition', + full_name='graph.serveCondition', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='INITIALIZATION', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='VALIDATION', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='EXECUTION', index=2, number=2, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PROCESSING', index=3, number=3, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='LEARNING', index=4, number=4, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CONTROL', index=5, number=5, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SIMULATION', index=6, number=6, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TRIAL', index=7, number=7, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='RUN', index=8, number=8, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=794, + serialized_end=940, +) +_sym_db.RegisterEnumDescriptor(_SERVECONDITION) + +serveCondition = enum_type_wrapper.EnumTypeWrapper(_SERVECONDITION) +INITIALIZATION = 0 +VALIDATION = 1 +EXECUTION = 2 +PROCESSING = 3 +LEARNING = 4 +CONTROL = 5 +SIMULATION = 6 +TRIAL = 7 +RUN = 8 + + + +_NULLARGUMENT = _descriptor.Descriptor( + name='NullArgument', + full_name='graph.NullArgument', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=22, + serialized_end=36, +) + + +_HEALTHSTATUS = _descriptor.Descriptor( + name='HealthStatus', + full_name='graph.HealthStatus', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='status', full_name='graph.HealthStatus.status', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=38, + serialized_end=68, +) + + +_PNLPATH = _descriptor.Descriptor( + name='PNLPath', + full_name='graph.PNLPath', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='path', full_name='graph.PNLPath.path', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=70, + serialized_end=93, +) + + +_SCRIPTPATH = _descriptor.Descriptor( + name='ScriptPath', + full_name='graph.ScriptPath', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='path', full_name='graph.ScriptPath.path', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=95, + serialized_end=121, +) + + +_SCRIPTCOMPOSITIONS = _descriptor.Descriptor( + name='ScriptCompositions', + full_name='graph.ScriptCompositions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='compositions', full_name='graph.ScriptCompositions.compositions', index=0, + number=1, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=123, + serialized_end=165, +) + + +_GRAPHNAME = _descriptor.Descriptor( + name='GraphName', + full_name='graph.GraphName', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='graph.GraphName.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=167, + serialized_end=192, +) + + +_GRAPHJSON = _descriptor.Descriptor( + name='GraphJSON', + full_name='graph.GraphJSON', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='objectsJSON', full_name='graph.GraphJSON.objectsJSON', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='styleJSON', full_name='graph.GraphJSON.styleJSON', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=194, + serialized_end=245, +) + + +_STYLEJSON = _descriptor.Descriptor( + name='StyleJSON', + full_name='graph.StyleJSON', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='styleJSON', full_name='graph.StyleJSON.styleJSON', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=247, + serialized_end=277, +) + + +_DOUBLEMATRIX = _descriptor.Descriptor( + name='DoubleMatrix', + full_name='graph.DoubleMatrix', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='rows', full_name='graph.DoubleMatrix.rows', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='cols', full_name='graph.DoubleMatrix.cols', index=1, + number=2, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='data', full_name='graph.DoubleMatrix.data', index=2, + number=3, type=1, cpp_type=5, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=_b('\020\001'), file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=279, + serialized_end=339, +) + + +_ENTRY = _descriptor.Descriptor( + name='Entry', + full_name='graph.Entry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='componentName', full_name='graph.Entry.componentName', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='parameterName', full_name='graph.Entry.parameterName', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='time', full_name='graph.Entry.time', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='context', full_name='graph.Entry.context', index=3, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='value', full_name='graph.Entry.value', index=4, + number=5, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=341, + serialized_end=461, +) + + +_SERVEPREF = _descriptor.Descriptor( + name='ServePref', + full_name='graph.ServePref', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='componentName', full_name='graph.ServePref.componentName', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='parameterName', full_name='graph.ServePref.parameterName', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='condition', full_name='graph.ServePref.condition', index=2, + number=3, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=463, + serialized_end=562, +) + + +_SERVEPREFS = _descriptor.Descriptor( + name='ServePrefs', + full_name='graph.ServePrefs', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='servePrefSet', full_name='graph.ServePrefs.servePrefSet', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=564, + serialized_end=616, +) + + +_RUNTIMEPARAMS_INPUTSENTRY = _descriptor.Descriptor( + name='InputsEntry', + full_name='graph.RunTimeParams.InputsEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='graph.RunTimeParams.InputsEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='value', full_name='graph.RunTimeParams.InputsEntry.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=_b('8\001'), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=725, + serialized_end=791, +) + +_RUNTIMEPARAMS = _descriptor.Descriptor( + name='RunTimeParams', + full_name='graph.RunTimeParams', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='inputs', full_name='graph.RunTimeParams.inputs', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='servePrefs', full_name='graph.RunTimeParams.servePrefs', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[_RUNTIMEPARAMS_INPUTSENTRY, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=619, + serialized_end=791, +) + +_ENTRY.fields_by_name['value'].message_type = _DOUBLEMATRIX +_SERVEPREF.fields_by_name['condition'].enum_type = _SERVECONDITION +_SERVEPREFS.fields_by_name['servePrefSet'].message_type = _SERVEPREF +_RUNTIMEPARAMS_INPUTSENTRY.fields_by_name['value'].message_type = _DOUBLEMATRIX +_RUNTIMEPARAMS_INPUTSENTRY.containing_type = _RUNTIMEPARAMS +_RUNTIMEPARAMS.fields_by_name['inputs'].message_type = _RUNTIMEPARAMS_INPUTSENTRY +_RUNTIMEPARAMS.fields_by_name['servePrefs'].message_type = _SERVEPREFS +DESCRIPTOR.message_types_by_name['NullArgument'] = _NULLARGUMENT +DESCRIPTOR.message_types_by_name['HealthStatus'] = _HEALTHSTATUS +DESCRIPTOR.message_types_by_name['PNLPath'] = _PNLPATH +DESCRIPTOR.message_types_by_name['ScriptPath'] = _SCRIPTPATH +DESCRIPTOR.message_types_by_name['ScriptCompositions'] = _SCRIPTCOMPOSITIONS +DESCRIPTOR.message_types_by_name['GraphName'] = _GRAPHNAME +DESCRIPTOR.message_types_by_name['GraphJSON'] = _GRAPHJSON +DESCRIPTOR.message_types_by_name['StyleJSON'] = _STYLEJSON +DESCRIPTOR.message_types_by_name['DoubleMatrix'] = _DOUBLEMATRIX +DESCRIPTOR.message_types_by_name['Entry'] = _ENTRY +DESCRIPTOR.message_types_by_name['ServePref'] = _SERVEPREF +DESCRIPTOR.message_types_by_name['ServePrefs'] = _SERVEPREFS +DESCRIPTOR.message_types_by_name['RunTimeParams'] = _RUNTIMEPARAMS +DESCRIPTOR.enum_types_by_name['serveCondition'] = _SERVECONDITION +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +NullArgument = _reflection.GeneratedProtocolMessageType('NullArgument', (_message.Message,), { + 'DESCRIPTOR' : _NULLARGUMENT, + '__module__' : 'graph_pb2' + # @@protoc_insertion_point(class_scope:graph.NullArgument) + }) +_sym_db.RegisterMessage(NullArgument) + +HealthStatus = _reflection.GeneratedProtocolMessageType('HealthStatus', (_message.Message,), { + 'DESCRIPTOR' : _HEALTHSTATUS, + '__module__' : 'graph_pb2' + # @@protoc_insertion_point(class_scope:graph.HealthStatus) + }) +_sym_db.RegisterMessage(HealthStatus) + +PNLPath = _reflection.GeneratedProtocolMessageType('PNLPath', (_message.Message,), { + 'DESCRIPTOR' : _PNLPATH, + '__module__' : 'graph_pb2' + # @@protoc_insertion_point(class_scope:graph.PNLPath) + }) +_sym_db.RegisterMessage(PNLPath) + +ScriptPath = _reflection.GeneratedProtocolMessageType('ScriptPath', (_message.Message,), { + 'DESCRIPTOR' : _SCRIPTPATH, + '__module__' : 'graph_pb2' + # @@protoc_insertion_point(class_scope:graph.ScriptPath) + }) +_sym_db.RegisterMessage(ScriptPath) + +ScriptCompositions = _reflection.GeneratedProtocolMessageType('ScriptCompositions', (_message.Message,), { + 'DESCRIPTOR' : _SCRIPTCOMPOSITIONS, + '__module__' : 'graph_pb2' + # @@protoc_insertion_point(class_scope:graph.ScriptCompositions) + }) +_sym_db.RegisterMessage(ScriptCompositions) + +GraphName = _reflection.GeneratedProtocolMessageType('GraphName', (_message.Message,), { + 'DESCRIPTOR' : _GRAPHNAME, + '__module__' : 'graph_pb2' + # @@protoc_insertion_point(class_scope:graph.GraphName) + }) +_sym_db.RegisterMessage(GraphName) + +GraphJSON = _reflection.GeneratedProtocolMessageType('GraphJSON', (_message.Message,), { + 'DESCRIPTOR' : _GRAPHJSON, + '__module__' : 'graph_pb2' + # @@protoc_insertion_point(class_scope:graph.GraphJSON) + }) +_sym_db.RegisterMessage(GraphJSON) + +StyleJSON = _reflection.GeneratedProtocolMessageType('StyleJSON', (_message.Message,), { + 'DESCRIPTOR' : _STYLEJSON, + '__module__' : 'graph_pb2' + # @@protoc_insertion_point(class_scope:graph.StyleJSON) + }) +_sym_db.RegisterMessage(StyleJSON) + +DoubleMatrix = _reflection.GeneratedProtocolMessageType('DoubleMatrix', (_message.Message,), { + 'DESCRIPTOR' : _DOUBLEMATRIX, + '__module__' : 'graph_pb2' + # @@protoc_insertion_point(class_scope:graph.DoubleMatrix) + }) +_sym_db.RegisterMessage(DoubleMatrix) + +Entry = _reflection.GeneratedProtocolMessageType('Entry', (_message.Message,), { + 'DESCRIPTOR' : _ENTRY, + '__module__' : 'graph_pb2' + # @@protoc_insertion_point(class_scope:graph.Entry) + }) +_sym_db.RegisterMessage(Entry) + +ServePref = _reflection.GeneratedProtocolMessageType('ServePref', (_message.Message,), { + 'DESCRIPTOR' : _SERVEPREF, + '__module__' : 'graph_pb2' + # @@protoc_insertion_point(class_scope:graph.ServePref) + }) +_sym_db.RegisterMessage(ServePref) + +ServePrefs = _reflection.GeneratedProtocolMessageType('ServePrefs', (_message.Message,), { + 'DESCRIPTOR' : _SERVEPREFS, + '__module__' : 'graph_pb2' + # @@protoc_insertion_point(class_scope:graph.ServePrefs) + }) +_sym_db.RegisterMessage(ServePrefs) + +RunTimeParams = _reflection.GeneratedProtocolMessageType('RunTimeParams', (_message.Message,), { + + 'InputsEntry' : _reflection.GeneratedProtocolMessageType('InputsEntry', (_message.Message,), { + 'DESCRIPTOR' : _RUNTIMEPARAMS_INPUTSENTRY, + '__module__' : 'graph_pb2' + # @@protoc_insertion_point(class_scope:graph.RunTimeParams.InputsEntry) + }) + , + 'DESCRIPTOR' : _RUNTIMEPARAMS, + '__module__' : 'graph_pb2' + # @@protoc_insertion_point(class_scope:graph.RunTimeParams) + }) +_sym_db.RegisterMessage(RunTimeParams) +_sym_db.RegisterMessage(RunTimeParams.InputsEntry) + + +_DOUBLEMATRIX.fields_by_name['data']._options = None +_RUNTIMEPARAMS_INPUTSENTRY._options = None + +_SERVEGRAPH = _descriptor.ServiceDescriptor( + name='ServeGraph', + full_name='graph.ServeGraph', + file=DESCRIPTOR, + index=0, + serialized_options=None, + serialized_start=943, + serialized_end=1426, + methods=[ + _descriptor.MethodDescriptor( + name='LoadCustomPnl', + full_name='graph.ServeGraph.LoadCustomPnl', + index=0, + containing_service=None, + input_type=_PNLPATH, + output_type=_NULLARGUMENT, + serialized_options=None, + ), + _descriptor.MethodDescriptor( + name='LoadScript', + full_name='graph.ServeGraph.LoadScript', + index=1, + containing_service=None, + input_type=_SCRIPTPATH, + output_type=_SCRIPTCOMPOSITIONS, + serialized_options=None, + ), + _descriptor.MethodDescriptor( + name='LoadGraphics', + full_name='graph.ServeGraph.LoadGraphics', + index=2, + containing_service=None, + input_type=_SCRIPTPATH, + output_type=_STYLEJSON, + serialized_options=None, + ), + _descriptor.MethodDescriptor( + name='GetCompositions', + full_name='graph.ServeGraph.GetCompositions', + index=3, + containing_service=None, + input_type=_NULLARGUMENT, + output_type=_SCRIPTCOMPOSITIONS, + serialized_options=None, + ), + _descriptor.MethodDescriptor( + name='GetJSON', + full_name='graph.ServeGraph.GetJSON', + index=4, + containing_service=None, + input_type=_GRAPHNAME, + output_type=_GRAPHJSON, + serialized_options=None, + ), + _descriptor.MethodDescriptor( + name='HealthCheck', + full_name='graph.ServeGraph.HealthCheck', + index=5, + containing_service=None, + input_type=_NULLARGUMENT, + output_type=_HEALTHSTATUS, + serialized_options=None, + ), + _descriptor.MethodDescriptor( + name='UpdateStylesheet', + full_name='graph.ServeGraph.UpdateStylesheet', + index=6, + containing_service=None, + input_type=_STYLEJSON, + output_type=_NULLARGUMENT, + serialized_options=None, + ), + _descriptor.MethodDescriptor( + name='RunComposition', + full_name='graph.ServeGraph.RunComposition', + index=7, + containing_service=None, + input_type=_RUNTIMEPARAMS, + output_type=_ENTRY, + serialized_options=None, + ), +]) +_sym_db.RegisterServiceDescriptor(_SERVEGRAPH) + +DESCRIPTOR.services_by_name['ServeGraph'] = _SERVEGRAPH + +# @@protoc_insertion_point(module_scope) diff --git a/psyneulink/core/globals/graph_pb2_grpc.py b/psyneulink/core/globals/graph_pb2_grpc.py new file mode 100644 index 00000000000..1ef894f1192 --- /dev/null +++ b/psyneulink/core/globals/graph_pb2_grpc.py @@ -0,0 +1,165 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +from . import graph_pb2 as graph__pb2 + + +class ServeGraphStub(object): + # missing associated documentation comment in .proto file + pass + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.LoadCustomPnl = channel.unary_unary( + '/graph.ServeGraph/LoadCustomPnl', + request_serializer=graph__pb2.PNLPath.SerializeToString, + response_deserializer=graph__pb2.NullArgument.FromString, + ) + self.LoadScript = channel.unary_unary( + '/graph.ServeGraph/LoadScript', + request_serializer=graph__pb2.ScriptPath.SerializeToString, + response_deserializer=graph__pb2.ScriptCompositions.FromString, + ) + self.LoadGraphics = channel.unary_unary( + '/graph.ServeGraph/LoadGraphics', + request_serializer=graph__pb2.ScriptPath.SerializeToString, + response_deserializer=graph__pb2.StyleJSON.FromString, + ) + self.GetCompositions = channel.unary_unary( + '/graph.ServeGraph/GetCompositions', + request_serializer=graph__pb2.NullArgument.SerializeToString, + response_deserializer=graph__pb2.ScriptCompositions.FromString, + ) + self.GetJSON = channel.unary_unary( + '/graph.ServeGraph/GetJSON', + request_serializer=graph__pb2.GraphName.SerializeToString, + response_deserializer=graph__pb2.GraphJSON.FromString, + ) + self.HealthCheck = channel.unary_unary( + '/graph.ServeGraph/HealthCheck', + request_serializer=graph__pb2.NullArgument.SerializeToString, + response_deserializer=graph__pb2.HealthStatus.FromString, + ) + self.UpdateStylesheet = channel.stream_unary( + '/graph.ServeGraph/UpdateStylesheet', + request_serializer=graph__pb2.StyleJSON.SerializeToString, + response_deserializer=graph__pb2.NullArgument.FromString, + ) + self.RunComposition = channel.unary_stream( + '/graph.ServeGraph/RunComposition', + request_serializer=graph__pb2.RunTimeParams.SerializeToString, + response_deserializer=graph__pb2.Entry.FromString, + ) + + +class ServeGraphServicer(object): + # missing associated documentation comment in .proto file + pass + + def LoadCustomPnl(self, request, context): + # missing associated documentation comment in .proto file + pass + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def LoadScript(self, request, context): + # missing associated documentation comment in .proto file + pass + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def LoadGraphics(self, request, context): + # missing associated documentation comment in .proto file + pass + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetCompositions(self, request, context): + # missing associated documentation comment in .proto file + pass + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetJSON(self, request, context): + # missing associated documentation comment in .proto file + pass + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def HealthCheck(self, request, context): + # missing associated documentation comment in .proto file + pass + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateStylesheet(self, request_iterator, context): + # missing associated documentation comment in .proto file + pass + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def RunComposition(self, request, context): + # missing associated documentation comment in .proto file + pass + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_ServeGraphServicer_to_server(servicer, server): + rpc_method_handlers = { + 'LoadCustomPnl': grpc.unary_unary_rpc_method_handler( + servicer.LoadCustomPnl, + request_deserializer=graph__pb2.PNLPath.FromString, + response_serializer=graph__pb2.NullArgument.SerializeToString, + ), + 'LoadScript': grpc.unary_unary_rpc_method_handler( + servicer.LoadScript, + request_deserializer=graph__pb2.ScriptPath.FromString, + response_serializer=graph__pb2.ScriptCompositions.SerializeToString, + ), + 'LoadGraphics': grpc.unary_unary_rpc_method_handler( + servicer.LoadGraphics, + request_deserializer=graph__pb2.ScriptPath.FromString, + response_serializer=graph__pb2.StyleJSON.SerializeToString, + ), + 'GetCompositions': grpc.unary_unary_rpc_method_handler( + servicer.GetCompositions, + request_deserializer=graph__pb2.NullArgument.FromString, + response_serializer=graph__pb2.ScriptCompositions.SerializeToString, + ), + 'GetJSON': grpc.unary_unary_rpc_method_handler( + servicer.GetJSON, + request_deserializer=graph__pb2.GraphName.FromString, + response_serializer=graph__pb2.GraphJSON.SerializeToString, + ), + 'HealthCheck': grpc.unary_unary_rpc_method_handler( + servicer.HealthCheck, + request_deserializer=graph__pb2.NullArgument.FromString, + response_serializer=graph__pb2.HealthStatus.SerializeToString, + ), + 'UpdateStylesheet': grpc.stream_unary_rpc_method_handler( + servicer.UpdateStylesheet, + request_deserializer=graph__pb2.StyleJSON.FromString, + response_serializer=graph__pb2.NullArgument.SerializeToString, + ), + 'RunComposition': grpc.unary_stream_rpc_method_handler( + servicer.RunComposition, + request_deserializer=graph__pb2.RunTimeParams.FromString, + response_serializer=graph__pb2.Entry.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'graph.ServeGraph', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/psyneulink/core/globals/log.py b/psyneulink/core/globals/log.py index b38b660697a..ed69fbde5a8 100644 --- a/psyneulink/core/globals/log.py +++ b/psyneulink/core/globals/log.py @@ -400,7 +400,7 @@ LogEntry = namedtuple('LogEntry', 'time, context, value') - +DeliveryEntry = namedtuple('DeliveryEntry', 'time, context, value') class LogCondition(enum.IntFlag): """Used to specify the context in which a value of the Component or its attribute is `logged `. @@ -471,6 +471,9 @@ def from_string(string): except KeyError: raise LogError("\'{}\' is not a value of {}".format(string, LogCondition)) +# Alias LogCondition +DeliveryCondition = LogCondition +DeliveryCondition._get_delivery_condition_string = DeliveryCondition._get_log_condition_string TIME_NOT_SPECIFIED = 'Time Not Specified' EXECUTION_CONDITION_NAMES = {LogCondition.PROCESSING.name, @@ -833,6 +836,74 @@ def assign_log_condition(item, level): assign_log_condition(item, log_condition) else: assign_log_condition(item[0], item[1]) + + def set_delivery_conditions(self, items, delivery_condition=LogCondition.EXECUTION): + """Specifies items to be delivered via gRPC under the specified `DeliveryCondition`\\(s). + + Arguments + --------- + + items : str, Component, tuple or List of these + specifies items to be logged; these must be be `loggable_items ` of the Log. + Each item must be a: + * string that is the name of a `loggable_item` ` of the Log's `owner `; + * a reference to a Component; + * tuple, the first item of which is one of the above, and the second a `ContextFlags` to use for the item. + + delivery_condition : DeliveryCondition : default DeliveryCondition.EXECUTION + specifies `DeliveryCondition` to use as the default for items not specified in tuples (see above). + For convenience, the name of a DeliveryCondition can be used in place of its full specification + (e.g., *EXECUTION* instead of `DeliveryCondition.EXECUTION`). + + params_set : list : default None + list of parameters to include as loggable items; these must be attributes of the `owner ` + (for example, Mechanism + + """ + from psyneulink.core.components.component import Component + from psyneulink.core.globals.preferences.preferenceset import PreferenceEntry, PreferenceLevel + from psyneulink.core.globals.keywords import ALL + + def assign_delivery_condition(item, level): + + # Handle multiple level assignments (as LogCondition or strings in a list) + if not isinstance(level, list): + level = [level] + levels = DeliveryCondition.OFF + for l in level: + if isinstance(l, str): + l = DeliveryCondition.from_string(l) + levels |= l + level = levels + + if not item in self.loggable_items: + # KDM 8/13/18: NOTE: add_entries is not defined anywhere + raise LogError("\'{0}\' is not a loggable item for {1} (try using \'{1}.log.add_entries()\')". + format(item, self.owner.name)) + + self._get_parameter_from_item_string(item).delivery_condition = level + + if items == ALL: + for component in self.loggable_components: + component.logPref = PreferenceEntry(delivery_condition, PreferenceLevel.INSTANCE) + + for item in self.all_items: + self._get_parameter_from_item_string(item).delivery_condition = delivery_condition + # self.logPref = PreferenceEntry(log_condition, PreferenceLevel.INSTANCE) + return + + if not isinstance(items, list): + items = [items] + + # allow multiple sets of conditions to be set for multiple items with one call + for item in items: + if isinstance(item, (str, Component)): + if isinstance(item, Component): + item = item.name + assign_delivery_condition(item, delivery_condition) + else: + assign_delivery_condition(item[0], item[1]) + @tc.typecheck def _log_value( diff --git a/psyneulink/core/globals/parameters.py b/psyneulink/core/globals/parameters.py index 124a6ddbd67..def3a6f9918 100644 --- a/psyneulink/core/globals/parameters.py +++ b/psyneulink/core/globals/parameters.py @@ -245,10 +245,11 @@ def _recurrent_transfer_mechanism_matrix_setter(value, owning_component=None, co import warnings import weakref +from psyneulink.core.globals.graph_pb2 import Entry, DoubleMatrix from psyneulink.core.globals.keywords import MULTIPLICATIVE from psyneulink.core.globals.context import Context, ContextError, ContextFlags, _get_time, handle_external_context from psyneulink.core.globals.context import time as time_object -from psyneulink.core.globals.log import LogCondition, LogEntry, LogError +from psyneulink.core.globals.log import DeliveryCondition, LogCondition, LogEntry, LogError from psyneulink.core.globals.utilities import call_with_pruned_args, copy_iterable_with_shared, get_alias_property_getter, get_alias_property_setter, get_deepcopy_with_shared, unproxy_weakproxy __all__ = [ @@ -598,6 +599,12 @@ class Parameter(types.SimpleNamespace): :type: `LogCondition` :default: `OFF ` + delivery_condition + the DeliveryCondition for which the parameter shoud be delivered. + + :type: `DeliveryCondition` + :default: `OFF ` + history stores the history of the parameter (previous values). Also see `get_previous`. @@ -667,6 +674,7 @@ class Parameter(types.SimpleNamespace): 'default_value', 'history_max_length', 'log_condition', + 'delivery_condition', 'spec', } @@ -690,6 +698,7 @@ def __init__( loggable=True, log=None, log_condition=LogCondition.OFF, + delivery_condition=DeliveryCondition.OFF, history=None, history_max_length=1, history_min_length=0, @@ -740,6 +749,7 @@ def __init__( loggable=loggable, log=log, log_condition=log_condition, + delivery_condition=delivery_condition, history=history, history_max_length=history_max_length, history_min_length=history_min_length, @@ -1085,7 +1095,7 @@ def _set(self, value, context=None, skip_history=False, skip_log=False, **kwargs self._set_value(value, execution_id=execution_id, context=context, skip_history=skip_history, skip_log=skip_log) - def _set_value(self, value, execution_id=None, context=None, skip_history=False, skip_log=False): + def _set_value(self, value, execution_id=None, context=None, skip_history=False, skip_log=False, skip_delivery=False): # store history if not skip_history: if execution_id in self.values: @@ -1094,9 +1104,13 @@ def _set_value(self, value, execution_id=None, context=None, skip_history=False, except KeyError: self.history[execution_id] = collections.deque([self.values[execution_id]], maxlen=self.history_max_length) - # log value - if not skip_log and self.loggable: - self._log_value(value, context) + if self.loggable: + # log value + if not skip_log: + self._log_value(value, context) + # Deliver value to external application + if not skip_delivery: + self._deliver_value(value, context) # set value self.values[execution_id] = value @@ -1159,6 +1173,60 @@ def _log_value(self, value, context=None): LogEntry(time, context_str, value) ) + def _deliver_value(self, value, context=None): + # if a context is attached and a pipeline is attached to the context + if context and context.rpc_pipeline: + # manual delivery + if context is not None and context.source is ContextFlags.COMMAND_LINE: + try: + time = _get_time(self._owner._owner, context) + except (AttributeError, ContextError): + time = time_object(None, None, None, None) + + # this branch only ran previously when context was ContextFlags.COMMAND_LINE + context_str = ContextFlags._get_context_string(ContextFlags.COMMAND_LINE) + delivery_condition_satisfied = True + + # standard logging + else: + if self.delivery_condition is None or self.delivery_condition is LogCondition.OFF: + return + + if context is None: + context = self._owner._owner.most_recent_context + + time = _get_time(self._owner._owner, context) + context_str = ContextFlags._get_context_string(context.flags) + delivery_condition_satisfied = self.delivery_condition & context.flags + + if ( + not delivery_condition_satisfied + and self.delivery_condition & LogCondition.INITIALIZATION + and self._owner._owner.initialization_status is ContextFlags.INITIALIZING + ): + delivery_condition_satisfied = True + + if delivery_condition_satisfied: + if not self.stateful: + execution_id = None + else: + execution_id = context.execution_id + + ##### ADD TO PIPELINE HERE ##### + context.rpc_pipeline.put( + Entry( + componentName=self._owner._owner.name, + parameterName=self.name, + time=f'{time.run}:{time.trial}:{time.pass_}:{time.time_step}', + context=context.execution_id, + value=DoubleMatrix( + rows=value.shape[0], + cols=value.shape[1], + data=value.flatten().tolist() + ) + ) + ) + def clear_log(self, contexts=NotImplemented): """ Clears the log of this Parameter for every context in **contexts** diff --git a/psyneulink/core/globals/protos/graph.proto b/psyneulink/core/globals/protos/graph.proto new file mode 100644 index 00000000000..e7242ccb4fb --- /dev/null +++ b/psyneulink/core/globals/protos/graph.proto @@ -0,0 +1,88 @@ +syntax = "proto3"; + +package graph; + +service ServeGraph { + rpc LoadCustomPnl (PNLPath) returns (NullArgument) {} + rpc LoadScript (ScriptPath) returns (ScriptCompositions) {} + rpc LoadGraphics (ScriptPath) returns (StyleJSON) {} + rpc GetCompositions (NullArgument) returns (ScriptCompositions) {} + rpc GetJSON (GraphName) returns (GraphJSON) {} + rpc HealthCheck (NullArgument) returns (HealthStatus) {} + rpc UpdateStylesheet (stream StyleJSON) returns (NullArgument) {} + rpc RunComposition (RunTimeParams) returns (stream Entry) {} +} + +message NullArgument { + +} + +message HealthStatus { + string status = 1; +} + +message PNLPath { + string path = 1; +} + +message ScriptPath { + string path = 1; +} + +message ScriptCompositions { + repeated string compositions = 1; +} + +message GraphName { + string name = 1; +} + +message GraphJSON { + string objectsJSON = 1; + string styleJSON = 2; +} + +message StyleJSON { + string styleJSON = 1; +} + +message DoubleMatrix { + uint32 rows = 1; + uint32 cols = 2; + repeated double data = 3 [packed=true]; +} + +message Entry { + string componentName = 1; + string parameterName = 2; + string time = 3; + string context = 4; + DoubleMatrix value = 5; +} + +enum serveCondition { + INITIALIZATION = 0; + VALIDATION = 1; + EXECUTION = 2; + PROCESSING = 3; + LEARNING = 4; + CONTROL = 5; + SIMULATION = 6; + TRIAL = 7; + RUN = 8; +} + +message ServePref { + string componentName = 1; + string parameterName = 2; + serveCondition condition = 3; +} + +message ServePrefs { + repeated ServePref servePrefSet = 1; +} + +message RunTimeParams { + map inputs = 1; + ServePrefs servePrefs = 2; +} From c5d99cc9ce00fd5af0985c474e8cc32dfed42503 Mon Sep 17 00:00:00 2001 From: Dillon Smith Date: Tue, 21 Jul 2020 17:23:29 -0400 Subject: [PATCH 02/24] Remove inactive link from docs -Remove link at top of `optimizationfunctions` module docstring for `ParamEstimationFunction`, which is currently hidden because it is still in development --- psyneulink/core/components/functions/optimizationfunctions.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/psyneulink/core/components/functions/optimizationfunctions.py b/psyneulink/core/components/functions/optimizationfunctions.py index ff4f200a322..86f9f2250f0 100644 --- a/psyneulink/core/components/functions/optimizationfunctions.py +++ b/psyneulink/core/components/functions/optimizationfunctions.py @@ -9,12 +9,13 @@ # # ****************************************** OPTIMIZATION FUNCTIONS ************************************************** """ +Contents +-------- * `OptimizationFunction` * `GradientOptimization` * `GridSearch` * `GaussianProcess` -* `ParamEstimationFunction` Overview -------- From 9d7e42cc7f8ece5442f381e511a21a37818c92d2 Mon Sep 17 00:00:00 2001 From: Dillon Smith Date: Tue, 28 Jul 2020 10:07:50 -0400 Subject: [PATCH 03/24] Remove DeliveryCondition class and just use LogCondition --- psyneulink/core/components/component.py | 4 ++-- psyneulink/core/globals/log.py | 19 +++++++------------ psyneulink/core/globals/parameters.py | 18 +++++++----------- 3 files changed, 16 insertions(+), 25 deletions(-) diff --git a/psyneulink/core/components/component.py b/psyneulink/core/components/component.py index aaaeac3ba02..57b7e8d4e0b 100644 --- a/psyneulink/core/components/component.py +++ b/psyneulink/core/components/component.py @@ -504,7 +504,7 @@ MODEL_SPEC_ID_PARAMETER_VALUE, MODEL_SPEC_ID_INPUT_PORTS, MODEL_SPEC_ID_OUTPUT_PORTS, \ MODULATORY_SPEC_KEYWORDS, NAME, OUTPUT_PORTS, OWNER, PARAMS, PREFS_ARG, \ RESET_STATEFUL_FUNCTION_WHEN, VALUE, VARIABLE -from psyneulink.core.globals.log import LogCondition, DeliveryCondition +from psyneulink.core.globals.log import LogCondition from psyneulink.core.scheduling.time import Time, TimeScale from psyneulink.core.globals.sampleiterator import SampleIterator from psyneulink.core.globals.parameters import \ @@ -3178,7 +3178,7 @@ def set_log_conditions(self, items, log_condition=LogCondition.EXECUTION): """ self.log.set_log_conditions(items=items, log_condition=log_condition) - def set_delivery_conditions(self, items, delivery_condition=DeliveryCondition.EXECUTION): + def set_delivery_conditions(self, items, delivery_condition=LogCondition.EXECUTION): """ set_delivery_conditions( \ items \ diff --git a/psyneulink/core/globals/log.py b/psyneulink/core/globals/log.py index 83f621887ed..8f34b2b7a48 100644 --- a/psyneulink/core/globals/log.py +++ b/psyneulink/core/globals/log.py @@ -400,7 +400,6 @@ LogEntry = namedtuple('LogEntry', 'time, context, value') -DeliveryEntry = namedtuple('DeliveryEntry', 'time, context, value') class LogCondition(enum.IntFlag): """Used to specify the context in which a value of the Component or its attribute is `logged `. @@ -471,10 +470,6 @@ def from_string(string): except KeyError: raise LogError("\'{}\' is not a value of {}".format(string, LogCondition)) -# Alias LogCondition -DeliveryCondition = LogCondition -DeliveryCondition._get_delivery_condition_string = DeliveryCondition._get_log_condition_string - TIME_NOT_SPECIFIED = 'Time Not Specified' EXECUTION_CONDITION_NAMES = {LogCondition.PROCESSING.name, LogCondition.LEARNING.name, @@ -837,7 +832,7 @@ def assign_log_condition(item, level): assign_log_condition(item[0], item[1]) def set_delivery_conditions(self, items, delivery_condition=LogCondition.EXECUTION): - """Specifies items to be delivered via gRPC under the specified `DeliveryCondition`\\(s). + """Specifies items to be delivered via gRPC under the specified `LogCondition`\\(s). Arguments --------- @@ -849,10 +844,10 @@ def set_delivery_conditions(self, items, delivery_condition=LogCondition.EXECUTI * a reference to a Component; * tuple, the first item of which is one of the above, and the second a `ContextFlags` to use for the item. - delivery_condition : DeliveryCondition : default DeliveryCondition.EXECUTION - specifies `DeliveryCondition` to use as the default for items not specified in tuples (see above). - For convenience, the name of a DeliveryCondition can be used in place of its full specification - (e.g., *EXECUTION* instead of `DeliveryCondition.EXECUTION`). + delivery_condition : LogCondition : default LogCondition.EXECUTION + specifies `LogCondition` to use as the default for items not specified in tuples (see above). + For convenience, the name of a LogCondition can be used in place of its full specification + (e.g., *EXECUTION* instead of `LogCondition.EXECUTION`). params_set : list : default None list of parameters to include as loggable items; these must be attributes of the `owner ` @@ -868,10 +863,10 @@ def assign_delivery_condition(item, level): # Handle multiple level assignments (as LogCondition or strings in a list) if not isinstance(level, list): level = [level] - levels = DeliveryCondition.OFF + levels = LogCondition.OFF for l in level: if isinstance(l, str): - l = DeliveryCondition.from_string(l) + l = LogCondition.from_string(l) levels |= l level = levels diff --git a/psyneulink/core/globals/parameters.py b/psyneulink/core/globals/parameters.py index 8a3b458d079..e889ab7cfcb 100644 --- a/psyneulink/core/globals/parameters.py +++ b/psyneulink/core/globals/parameters.py @@ -250,7 +250,7 @@ def _recurrent_transfer_mechanism_matrix_setter(value, owning_component=None, co from psyneulink.core.globals.keywords import MULTIPLICATIVE from psyneulink.core.globals.context import Context, ContextError, ContextFlags, _get_time, handle_external_context from psyneulink.core.globals.context import time as time_object -from psyneulink.core.globals.log import DeliveryCondition, LogCondition, LogEntry, LogError +from psyneulink.core.globals.log import LogCondition, LogEntry, LogError from psyneulink.core.globals.utilities import call_with_pruned_args, copy_iterable_with_shared, get_alias_property_getter, get_alias_property_setter, get_deepcopy_with_shared, unproxy_weakproxy __all__ = [ @@ -632,10 +632,10 @@ class Parameter(types.SimpleNamespace): :default: `OFF ` delivery_condition - the DeliveryCondition for which the parameter shoud be delivered. + the LogCondition for which the parameter shoud be delivered. - :type: `DeliveryCondition` - :default: `OFF ` + :type: `LogCondition` + :default: `OFF ` history stores the history of the parameter (previous values). Also see `get_previous`. @@ -742,7 +742,7 @@ def __init__( loggable=True, log=None, log_condition=LogCondition.OFF, - delivery_condition=DeliveryCondition.OFF, + delivery_condition=LogCondition.OFF, history=None, history_max_length=1, history_min_length=0, @@ -1288,14 +1288,11 @@ def _deliver_value(self, value, context=None): # if a context is attached and a pipeline is attached to the context if context and context.rpc_pipeline: # manual delivery - if context is not None and context.source is ContextFlags.COMMAND_LINE: + if context.source is ContextFlags.COMMAND_LINE: try: time = _get_time(self._owner._owner, context) except (AttributeError, ContextError): time = time_object(None, None, None, None) - - # this branch only ran previously when context was ContextFlags.COMMAND_LINE - context_str = ContextFlags._get_context_string(ContextFlags.COMMAND_LINE) delivery_condition_satisfied = True # standard logging @@ -1307,7 +1304,6 @@ def _deliver_value(self, value, context=None): context = self._owner._owner.most_recent_context time = _get_time(self._owner._owner, context) - context_str = ContextFlags._get_context_string(context.flags) delivery_condition_satisfied = self.delivery_condition & context.flags if ( @@ -1329,7 +1325,7 @@ def _deliver_value(self, value, context=None): componentName=self._owner._owner.name, parameterName=self.name, time=f'{time.run}:{time.trial}:{time.pass_}:{time.time_step}', - context=context.execution_id, + context=execution_id, value=DoubleMatrix( rows=value.shape[0], cols=value.shape[1], From 487209bd4d060808d2ad235bd7fef2ad11324a59 Mon Sep 17 00:00:00 2001 From: Dillon Smith Date: Thu, 30 Jul 2020 09:29:42 -0400 Subject: [PATCH 04/24] Impose np.atleast2d on values to be sent via RPC --- psyneulink/core/globals/parameters.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/psyneulink/core/globals/parameters.py b/psyneulink/core/globals/parameters.py index e889ab7cfcb..8b46e63fd2d 100644 --- a/psyneulink/core/globals/parameters.py +++ b/psyneulink/core/globals/parameters.py @@ -246,6 +246,8 @@ def _recurrent_transfer_mechanism_matrix_setter(value, owning_component=None, co import typing import weakref +import numpy as np + from psyneulink.core.globals.graph_pb2 import Entry, DoubleMatrix from psyneulink.core.globals.keywords import MULTIPLICATIVE from psyneulink.core.globals.context import Context, ContextError, ContextFlags, _get_time, handle_external_context @@ -1319,17 +1321,19 @@ def _deliver_value(self, value, context=None): else: execution_id = context.execution_id + deliver_value = np.atleast_2d(value) + ##### ADD TO PIPELINE HERE ##### context.rpc_pipeline.put( Entry( - componentName=self._owner._owner.name, + componentName=self._owner._owner._owner.name, parameterName=self.name, time=f'{time.run}:{time.trial}:{time.pass_}:{time.time_step}', context=execution_id, value=DoubleMatrix( - rows=value.shape[0], - cols=value.shape[1], - data=value.flatten().tolist() + rows=deliver_value.shape[0], + cols=deliver_value.shape[1], + data=deliver_value.flatten().tolist() ) ) ) From 03b86107c664953395534f32a8263c60976fd39e Mon Sep 17 00:00:00 2001 From: Dillon Smith Date: Tue, 4 Aug 2020 11:01:10 -0400 Subject: [PATCH 05/24] Generalized RPC to ndArrays, added protobufs to repo --- psyneulink/core/globals/graph_pb2_grpc.py | 165 ---------- psyneulink/core/globals/parameters.py | 25 +- .../core/{globals/protos => rpc}/graph.proto | 21 +- psyneulink/core/{globals => rpc}/graph_pb2.py | 251 ++++++++++----- psyneulink/core/rpc/graph_pb2_grpc.py | 299 ++++++++++++++++++ 5 files changed, 501 insertions(+), 260 deletions(-) delete mode 100644 psyneulink/core/globals/graph_pb2_grpc.py rename psyneulink/core/{globals/protos => rpc}/graph.proto (64%) rename psyneulink/core/{globals => rpc}/graph_pb2.py (65%) create mode 100644 psyneulink/core/rpc/graph_pb2_grpc.py diff --git a/psyneulink/core/globals/graph_pb2_grpc.py b/psyneulink/core/globals/graph_pb2_grpc.py deleted file mode 100644 index 1ef894f1192..00000000000 --- a/psyneulink/core/globals/graph_pb2_grpc.py +++ /dev/null @@ -1,165 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from . import graph_pb2 as graph__pb2 - - -class ServeGraphStub(object): - # missing associated documentation comment in .proto file - pass - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.LoadCustomPnl = channel.unary_unary( - '/graph.ServeGraph/LoadCustomPnl', - request_serializer=graph__pb2.PNLPath.SerializeToString, - response_deserializer=graph__pb2.NullArgument.FromString, - ) - self.LoadScript = channel.unary_unary( - '/graph.ServeGraph/LoadScript', - request_serializer=graph__pb2.ScriptPath.SerializeToString, - response_deserializer=graph__pb2.ScriptCompositions.FromString, - ) - self.LoadGraphics = channel.unary_unary( - '/graph.ServeGraph/LoadGraphics', - request_serializer=graph__pb2.ScriptPath.SerializeToString, - response_deserializer=graph__pb2.StyleJSON.FromString, - ) - self.GetCompositions = channel.unary_unary( - '/graph.ServeGraph/GetCompositions', - request_serializer=graph__pb2.NullArgument.SerializeToString, - response_deserializer=graph__pb2.ScriptCompositions.FromString, - ) - self.GetJSON = channel.unary_unary( - '/graph.ServeGraph/GetJSON', - request_serializer=graph__pb2.GraphName.SerializeToString, - response_deserializer=graph__pb2.GraphJSON.FromString, - ) - self.HealthCheck = channel.unary_unary( - '/graph.ServeGraph/HealthCheck', - request_serializer=graph__pb2.NullArgument.SerializeToString, - response_deserializer=graph__pb2.HealthStatus.FromString, - ) - self.UpdateStylesheet = channel.stream_unary( - '/graph.ServeGraph/UpdateStylesheet', - request_serializer=graph__pb2.StyleJSON.SerializeToString, - response_deserializer=graph__pb2.NullArgument.FromString, - ) - self.RunComposition = channel.unary_stream( - '/graph.ServeGraph/RunComposition', - request_serializer=graph__pb2.RunTimeParams.SerializeToString, - response_deserializer=graph__pb2.Entry.FromString, - ) - - -class ServeGraphServicer(object): - # missing associated documentation comment in .proto file - pass - - def LoadCustomPnl(self, request, context): - # missing associated documentation comment in .proto file - pass - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def LoadScript(self, request, context): - # missing associated documentation comment in .proto file - pass - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def LoadGraphics(self, request, context): - # missing associated documentation comment in .proto file - pass - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def GetCompositions(self, request, context): - # missing associated documentation comment in .proto file - pass - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def GetJSON(self, request, context): - # missing associated documentation comment in .proto file - pass - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def HealthCheck(self, request, context): - # missing associated documentation comment in .proto file - pass - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def UpdateStylesheet(self, request_iterator, context): - # missing associated documentation comment in .proto file - pass - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def RunComposition(self, request, context): - # missing associated documentation comment in .proto file - pass - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - -def add_ServeGraphServicer_to_server(servicer, server): - rpc_method_handlers = { - 'LoadCustomPnl': grpc.unary_unary_rpc_method_handler( - servicer.LoadCustomPnl, - request_deserializer=graph__pb2.PNLPath.FromString, - response_serializer=graph__pb2.NullArgument.SerializeToString, - ), - 'LoadScript': grpc.unary_unary_rpc_method_handler( - servicer.LoadScript, - request_deserializer=graph__pb2.ScriptPath.FromString, - response_serializer=graph__pb2.ScriptCompositions.SerializeToString, - ), - 'LoadGraphics': grpc.unary_unary_rpc_method_handler( - servicer.LoadGraphics, - request_deserializer=graph__pb2.ScriptPath.FromString, - response_serializer=graph__pb2.StyleJSON.SerializeToString, - ), - 'GetCompositions': grpc.unary_unary_rpc_method_handler( - servicer.GetCompositions, - request_deserializer=graph__pb2.NullArgument.FromString, - response_serializer=graph__pb2.ScriptCompositions.SerializeToString, - ), - 'GetJSON': grpc.unary_unary_rpc_method_handler( - servicer.GetJSON, - request_deserializer=graph__pb2.GraphName.FromString, - response_serializer=graph__pb2.GraphJSON.SerializeToString, - ), - 'HealthCheck': grpc.unary_unary_rpc_method_handler( - servicer.HealthCheck, - request_deserializer=graph__pb2.NullArgument.FromString, - response_serializer=graph__pb2.HealthStatus.SerializeToString, - ), - 'UpdateStylesheet': grpc.stream_unary_rpc_method_handler( - servicer.UpdateStylesheet, - request_deserializer=graph__pb2.StyleJSON.FromString, - response_serializer=graph__pb2.NullArgument.SerializeToString, - ), - 'RunComposition': grpc.unary_stream_rpc_method_handler( - servicer.RunComposition, - request_deserializer=graph__pb2.RunTimeParams.FromString, - response_serializer=graph__pb2.Entry.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'graph.ServeGraph', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/psyneulink/core/globals/parameters.py b/psyneulink/core/globals/parameters.py index 8b46e63fd2d..25aad9d787a 100644 --- a/psyneulink/core/globals/parameters.py +++ b/psyneulink/core/globals/parameters.py @@ -248,8 +248,7 @@ def _recurrent_transfer_mechanism_matrix_setter(value, owning_component=None, co import numpy as np -from psyneulink.core.globals.graph_pb2 import Entry, DoubleMatrix -from psyneulink.core.globals.keywords import MULTIPLICATIVE +from psyneulink.core.rpc.graph_pb2 import Entry, ndArray from psyneulink.core.globals.context import Context, ContextError, ContextFlags, _get_time, handle_external_context from psyneulink.core.globals.context import time as time_object from psyneulink.core.globals.log import LogCondition, LogEntry, LogError @@ -1320,24 +1319,30 @@ def _deliver_value(self, value, context=None): execution_id = None else: execution_id = context.execution_id - - deliver_value = np.atleast_2d(value) - + print(self._get_root_owner()) + print(value) ##### ADD TO PIPELINE HERE ##### context.rpc_pipeline.put( Entry( - componentName=self._owner._owner._owner.name, + componentName=self._get_root_owner().name, parameterName=self.name, time=f'{time.run}:{time.trial}:{time.pass_}:{time.time_step}', context=execution_id, - value=DoubleMatrix( - rows=deliver_value.shape[0], - cols=deliver_value.shape[1], - data=deliver_value.flatten().tolist() + value=ndArray( + shape=list(value.shape), + data=list(value.flatten()) ) ) ) + def _get_root_owner(self): + owner = self + while True: + if hasattr(owner, '_owner'): + owner = owner._owner + else: + return owner + def clear_log(self, contexts=NotImplemented): """ Clears the log of this Parameter for every context in **contexts** diff --git a/psyneulink/core/globals/protos/graph.proto b/psyneulink/core/rpc/graph.proto similarity index 64% rename from psyneulink/core/globals/protos/graph.proto rename to psyneulink/core/rpc/graph.proto index e7242ccb4fb..0747e667992 100644 --- a/psyneulink/core/globals/protos/graph.proto +++ b/psyneulink/core/rpc/graph.proto @@ -1,3 +1,13 @@ +/* Princeton University licenses this file to You under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. You may obtain a copy of the License at: + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and limitations under the License. + +*********************************************** PNL ProtoBuffer Definitions ************************************************************** +*/ + syntax = "proto3"; package graph; @@ -46,7 +56,12 @@ message StyleJSON { string styleJSON = 1; } -message DoubleMatrix { +message ndArray { + repeated uint32 shape = 1; + repeated double data = 2; +} + +message Matrix { uint32 rows = 1; uint32 cols = 2; repeated double data = 3 [packed=true]; @@ -57,7 +72,7 @@ message Entry { string parameterName = 2; string time = 3; string context = 4; - DoubleMatrix value = 5; + ndArray value = 5; } enum serveCondition { @@ -83,6 +98,6 @@ message ServePrefs { } message RunTimeParams { - map inputs = 1; + map inputs = 1; ServePrefs servePrefs = 2; } diff --git a/psyneulink/core/globals/graph_pb2.py b/psyneulink/core/rpc/graph_pb2.py similarity index 65% rename from psyneulink/core/globals/graph_pb2.py rename to psyneulink/core/rpc/graph_pb2.py index 0e48a6e6947..00672f00307 100644 --- a/psyneulink/core/globals/graph_pb2.py +++ b/psyneulink/core/rpc/graph_pb2.py @@ -1,9 +1,16 @@ +# Princeton University licenses this file to You under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. You may obtain a copy of the License at: +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed +# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and limitations under the License. + +# ********************************** PNL ProtoBuffer classes ***************************************************** + # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: graph.proto -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) from google.protobuf.internal import enum_type_wrapper from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message @@ -15,13 +22,13 @@ - DESCRIPTOR = _descriptor.FileDescriptor( name='graph.proto', package='graph', syntax='proto3', serialized_options=None, - serialized_pb=_b('\n\x0bgraph.proto\x12\x05graph\"\x0e\n\x0cNullArgument\"\x1e\n\x0cHealthStatus\x12\x0e\n\x06status\x18\x01 \x01(\t\"\x17\n\x07PNLPath\x12\x0c\n\x04path\x18\x01 \x01(\t\"\x1a\n\nScriptPath\x12\x0c\n\x04path\x18\x01 \x01(\t\"*\n\x12ScriptCompositions\x12\x14\n\x0c\x63ompositions\x18\x01 \x03(\t\"\x19\n\tGraphName\x12\x0c\n\x04name\x18\x01 \x01(\t\"3\n\tGraphJSON\x12\x13\n\x0bobjectsJSON\x18\x01 \x01(\t\x12\x11\n\tstyleJSON\x18\x02 \x01(\t\"\x1e\n\tStyleJSON\x12\x11\n\tstyleJSON\x18\x01 \x01(\t\"<\n\x0c\x44oubleMatrix\x12\x0c\n\x04rows\x18\x01 \x01(\r\x12\x0c\n\x04\x63ols\x18\x02 \x01(\r\x12\x10\n\x04\x64\x61ta\x18\x03 \x03(\x01\x42\x02\x10\x01\"x\n\x05\x45ntry\x12\x15\n\rcomponentName\x18\x01 \x01(\t\x12\x15\n\rparameterName\x18\x02 \x01(\t\x12\x0c\n\x04time\x18\x03 \x01(\t\x12\x0f\n\x07\x63ontext\x18\x04 \x01(\t\x12\"\n\x05value\x18\x05 \x01(\x0b\x32\x13.graph.DoubleMatrix\"c\n\tServePref\x12\x15\n\rcomponentName\x18\x01 \x01(\t\x12\x15\n\rparameterName\x18\x02 \x01(\t\x12(\n\tcondition\x18\x03 \x01(\x0e\x32\x15.graph.serveCondition\"4\n\nServePrefs\x12&\n\x0cservePrefSet\x18\x01 \x03(\x0b\x32\x10.graph.ServePref\"\xac\x01\n\rRunTimeParams\x12\x30\n\x06inputs\x18\x01 \x03(\x0b\x32 .graph.RunTimeParams.InputsEntry\x12%\n\nservePrefs\x18\x02 \x01(\x0b\x32\x11.graph.ServePrefs\x1a\x42\n\x0bInputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\"\n\x05value\x18\x02 \x01(\x0b\x32\x13.graph.DoubleMatrix:\x02\x38\x01*\x92\x01\n\x0eserveCondition\x12\x12\n\x0eINITIALIZATION\x10\x00\x12\x0e\n\nVALIDATION\x10\x01\x12\r\n\tEXECUTION\x10\x02\x12\x0e\n\nPROCESSING\x10\x03\x12\x0c\n\x08LEARNING\x10\x04\x12\x0b\n\x07\x43ONTROL\x10\x05\x12\x0e\n\nSIMULATION\x10\x06\x12\t\n\x05TRIAL\x10\x07\x12\x07\n\x03RUN\x10\x08\x32\xe3\x03\n\nServeGraph\x12\x36\n\rLoadCustomPnl\x12\x0e.graph.PNLPath\x1a\x13.graph.NullArgument\"\x00\x12<\n\nLoadScript\x12\x11.graph.ScriptPath\x1a\x19.graph.ScriptCompositions\"\x00\x12\x35\n\x0cLoadGraphics\x12\x11.graph.ScriptPath\x1a\x10.graph.StyleJSON\"\x00\x12\x43\n\x0fGetCompositions\x12\x13.graph.NullArgument\x1a\x19.graph.ScriptCompositions\"\x00\x12/\n\x07GetJSON\x12\x10.graph.GraphName\x1a\x10.graph.GraphJSON\"\x00\x12\x39\n\x0bHealthCheck\x12\x13.graph.NullArgument\x1a\x13.graph.HealthStatus\"\x00\x12=\n\x10UpdateStylesheet\x12\x10.graph.StyleJSON\x1a\x13.graph.NullArgument\"\x00(\x01\x12\x38\n\x0eRunComposition\x12\x14.graph.RunTimeParams\x1a\x0c.graph.Entry\"\x00\x30\x01\x62\x06proto3') + create_key=_descriptor._internal_create_key, + serialized_pb=b'\n\x0bgraph.proto\x12\x05graph\"\x0e\n\x0cNullArgument\"\x1e\n\x0cHealthStatus\x12\x0e\n\x06status\x18\x01 \x01(\t\"\x17\n\x07PNLPath\x12\x0c\n\x04path\x18\x01 \x01(\t\"\x1a\n\nScriptPath\x12\x0c\n\x04path\x18\x01 \x01(\t\"*\n\x12ScriptCompositions\x12\x14\n\x0c\x63ompositions\x18\x01 \x03(\t\"\x19\n\tGraphName\x12\x0c\n\x04name\x18\x01 \x01(\t\"3\n\tGraphJSON\x12\x13\n\x0bobjectsJSON\x18\x01 \x01(\t\x12\x11\n\tstyleJSON\x18\x02 \x01(\t\"\x1e\n\tStyleJSON\x12\x11\n\tstyleJSON\x18\x01 \x01(\t\"&\n\x07ndArray\x12\r\n\x05shape\x18\x01 \x03(\r\x12\x0c\n\x04\x64\x61ta\x18\x02 \x03(\x01\"6\n\x06Matrix\x12\x0c\n\x04rows\x18\x01 \x01(\r\x12\x0c\n\x04\x63ols\x18\x02 \x01(\r\x12\x10\n\x04\x64\x61ta\x18\x03 \x03(\x01\x42\x02\x10\x01\"s\n\x05\x45ntry\x12\x15\n\rcomponentName\x18\x01 \x01(\t\x12\x15\n\rparameterName\x18\x02 \x01(\t\x12\x0c\n\x04time\x18\x03 \x01(\t\x12\x0f\n\x07\x63ontext\x18\x04 \x01(\t\x12\x1d\n\x05value\x18\x05 \x01(\x0b\x32\x0e.graph.ndArray\"c\n\tServePref\x12\x15\n\rcomponentName\x18\x01 \x01(\t\x12\x15\n\rparameterName\x18\x02 \x01(\t\x12(\n\tcondition\x18\x03 \x01(\x0e\x32\x15.graph.serveCondition\"4\n\nServePrefs\x12&\n\x0cservePrefSet\x18\x01 \x03(\x0b\x32\x10.graph.ServePref\"\xa6\x01\n\rRunTimeParams\x12\x30\n\x06inputs\x18\x01 \x03(\x0b\x32 .graph.RunTimeParams.InputsEntry\x12%\n\nservePrefs\x18\x02 \x01(\x0b\x32\x11.graph.ServePrefs\x1a<\n\x0bInputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1c\n\x05value\x18\x02 \x01(\x0b\x32\r.graph.Matrix:\x02\x38\x01*\x92\x01\n\x0eserveCondition\x12\x12\n\x0eINITIALIZATION\x10\x00\x12\x0e\n\nVALIDATION\x10\x01\x12\r\n\tEXECUTION\x10\x02\x12\x0e\n\nPROCESSING\x10\x03\x12\x0c\n\x08LEARNING\x10\x04\x12\x0b\n\x07\x43ONTROL\x10\x05\x12\x0e\n\nSIMULATION\x10\x06\x12\t\n\x05TRIAL\x10\x07\x12\x07\n\x03RUN\x10\x08\x32\xe3\x03\n\nServeGraph\x12\x36\n\rLoadCustomPnl\x12\x0e.graph.PNLPath\x1a\x13.graph.NullArgument\"\x00\x12<\n\nLoadScript\x12\x11.graph.ScriptPath\x1a\x19.graph.ScriptCompositions\"\x00\x12\x35\n\x0cLoadGraphics\x12\x11.graph.ScriptPath\x1a\x10.graph.StyleJSON\"\x00\x12\x43\n\x0fGetCompositions\x12\x13.graph.NullArgument\x1a\x19.graph.ScriptCompositions\"\x00\x12/\n\x07GetJSON\x12\x10.graph.GraphName\x1a\x10.graph.GraphJSON\"\x00\x12\x39\n\x0bHealthCheck\x12\x13.graph.NullArgument\x1a\x13.graph.HealthStatus\"\x00\x12=\n\x10UpdateStylesheet\x12\x10.graph.StyleJSON\x1a\x13.graph.NullArgument\"\x00(\x01\x12\x38\n\x0eRunComposition\x12\x14.graph.RunTimeParams\x1a\x0c.graph.Entry\"\x00\x30\x01\x62\x06proto3' ) _SERVECONDITION = _descriptor.EnumDescriptor( @@ -29,48 +36,58 @@ full_name='graph.serveCondition', filename=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='INITIALIZATION', index=0, number=0, serialized_options=None, - type=None), + type=None, + create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='VALIDATION', index=1, number=1, serialized_options=None, - type=None), + type=None, + create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='EXECUTION', index=2, number=2, serialized_options=None, - type=None), + type=None, + create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='PROCESSING', index=3, number=3, serialized_options=None, - type=None), + type=None, + create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='LEARNING', index=4, number=4, serialized_options=None, - type=None), + type=None, + create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='CONTROL', index=5, number=5, serialized_options=None, - type=None), + type=None, + create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='SIMULATION', index=6, number=6, serialized_options=None, - type=None), + type=None, + create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='TRIAL', index=7, number=7, serialized_options=None, - type=None), + type=None, + create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='RUN', index=8, number=8, serialized_options=None, - type=None), + type=None, + create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, - serialized_start=794, - serialized_end=940, + serialized_start=817, + serialized_end=963, ) _sym_db.RegisterEnumDescriptor(_SERVECONDITION) @@ -93,6 +110,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ ], extensions=[ @@ -117,14 +135,15 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='status', full_name='graph.HealthStatus.status', index=0, number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), + has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], @@ -148,14 +167,15 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='path', full_name='graph.PNLPath.path', index=0, number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), + has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], @@ -179,14 +199,15 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='path', full_name='graph.ScriptPath.path', index=0, number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), + has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], @@ -210,6 +231,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='compositions', full_name='graph.ScriptCompositions.compositions', index=0, @@ -217,7 +239,7 @@ has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], @@ -241,14 +263,15 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='name', full_name='graph.GraphName.name', index=0, number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), + has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], @@ -272,21 +295,22 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='objectsJSON', full_name='graph.GraphJSON.objectsJSON', index=0, number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), + has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='styleJSON', full_name='graph.GraphJSON.styleJSON', index=1, number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), + has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], @@ -310,14 +334,15 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='styleJSON', full_name='graph.StyleJSON.styleJSON', index=0, number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), + has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], @@ -335,34 +360,74 @@ ) -_DOUBLEMATRIX = _descriptor.Descriptor( - name='DoubleMatrix', - full_name='graph.DoubleMatrix', +_NDARRAY = _descriptor.Descriptor( + name='ndArray', + full_name='graph.ndArray', filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( - name='rows', full_name='graph.DoubleMatrix.rows', index=0, + name='shape', full_name='graph.ndArray.shape', index=0, + number=1, type=13, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='data', full_name='graph.ndArray.data', index=1, + number=2, type=1, cpp_type=5, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=279, + serialized_end=317, +) + + +_MATRIX = _descriptor.Descriptor( + name='Matrix', + full_name='graph.Matrix', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='rows', full_name='graph.Matrix.rows', index=0, number=1, type=13, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( - name='cols', full_name='graph.DoubleMatrix.cols', index=1, + name='cols', full_name='graph.Matrix.cols', index=1, number=2, type=13, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( - name='data', full_name='graph.DoubleMatrix.data', index=2, + name='data', full_name='graph.Matrix.data', index=2, number=3, type=1, cpp_type=5, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b('\020\001'), file=DESCRIPTOR), + serialized_options=b'\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], @@ -375,8 +440,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=279, - serialized_end=339, + serialized_start=319, + serialized_end=373, ) @@ -386,42 +451,43 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='componentName', full_name='graph.Entry.componentName', index=0, number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), + has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='parameterName', full_name='graph.Entry.parameterName', index=1, number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), + has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='time', full_name='graph.Entry.time', index=2, number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), + has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='context', full_name='graph.Entry.context', index=3, number=4, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), + has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='value', full_name='graph.Entry.value', index=4, number=5, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], @@ -434,8 +500,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=341, - serialized_end=461, + serialized_start=375, + serialized_end=490, ) @@ -445,28 +511,29 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='componentName', full_name='graph.ServePref.componentName', index=0, number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), + has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='parameterName', full_name='graph.ServePref.parameterName', index=1, number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), + has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='condition', full_name='graph.ServePref.condition', index=2, number=3, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], @@ -479,8 +546,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=463, - serialized_end=562, + serialized_start=492, + serialized_end=591, ) @@ -490,6 +557,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='servePrefSet', full_name='graph.ServePrefs.servePrefSet', index=0, @@ -497,7 +565,7 @@ has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], @@ -510,8 +578,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=564, - serialized_end=616, + serialized_start=593, + serialized_end=645, ) @@ -521,35 +589,36 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='key', full_name='graph.RunTimeParams.InputsEntry.key', index=0, number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), + has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='value', full_name='graph.RunTimeParams.InputsEntry.value', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], - serialized_options=_b('8\001'), + serialized_options=b'8\001', is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], - serialized_start=725, - serialized_end=791, + serialized_start=754, + serialized_end=814, ) _RUNTIMEPARAMS = _descriptor.Descriptor( @@ -558,6 +627,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='inputs', full_name='graph.RunTimeParams.inputs', index=0, @@ -565,14 +635,14 @@ has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='servePrefs', full_name='graph.RunTimeParams.servePrefs', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], @@ -585,14 +655,14 @@ extension_ranges=[], oneofs=[ ], - serialized_start=619, - serialized_end=791, + serialized_start=648, + serialized_end=814, ) -_ENTRY.fields_by_name['value'].message_type = _DOUBLEMATRIX +_ENTRY.fields_by_name['value'].message_type = _NDARRAY _SERVEPREF.fields_by_name['condition'].enum_type = _SERVECONDITION _SERVEPREFS.fields_by_name['servePrefSet'].message_type = _SERVEPREF -_RUNTIMEPARAMS_INPUTSENTRY.fields_by_name['value'].message_type = _DOUBLEMATRIX +_RUNTIMEPARAMS_INPUTSENTRY.fields_by_name['value'].message_type = _MATRIX _RUNTIMEPARAMS_INPUTSENTRY.containing_type = _RUNTIMEPARAMS _RUNTIMEPARAMS.fields_by_name['inputs'].message_type = _RUNTIMEPARAMS_INPUTSENTRY _RUNTIMEPARAMS.fields_by_name['servePrefs'].message_type = _SERVEPREFS @@ -604,7 +674,8 @@ DESCRIPTOR.message_types_by_name['GraphName'] = _GRAPHNAME DESCRIPTOR.message_types_by_name['GraphJSON'] = _GRAPHJSON DESCRIPTOR.message_types_by_name['StyleJSON'] = _STYLEJSON -DESCRIPTOR.message_types_by_name['DoubleMatrix'] = _DOUBLEMATRIX +DESCRIPTOR.message_types_by_name['ndArray'] = _NDARRAY +DESCRIPTOR.message_types_by_name['Matrix'] = _MATRIX DESCRIPTOR.message_types_by_name['Entry'] = _ENTRY DESCRIPTOR.message_types_by_name['ServePref'] = _SERVEPREF DESCRIPTOR.message_types_by_name['ServePrefs'] = _SERVEPREFS @@ -668,12 +739,19 @@ }) _sym_db.RegisterMessage(StyleJSON) -DoubleMatrix = _reflection.GeneratedProtocolMessageType('DoubleMatrix', (_message.Message,), { - 'DESCRIPTOR' : _DOUBLEMATRIX, +ndArray = _reflection.GeneratedProtocolMessageType('ndArray', (_message.Message,), { + 'DESCRIPTOR' : _NDARRAY, + '__module__' : 'graph_pb2' + # @@protoc_insertion_point(class_scope:graph.ndArray) + }) +_sym_db.RegisterMessage(ndArray) + +Matrix = _reflection.GeneratedProtocolMessageType('Matrix', (_message.Message,), { + 'DESCRIPTOR' : _MATRIX, '__module__' : 'graph_pb2' - # @@protoc_insertion_point(class_scope:graph.DoubleMatrix) + # @@protoc_insertion_point(class_scope:graph.Matrix) }) -_sym_db.RegisterMessage(DoubleMatrix) +_sym_db.RegisterMessage(Matrix) Entry = _reflection.GeneratedProtocolMessageType('Entry', (_message.Message,), { 'DESCRIPTOR' : _ENTRY, @@ -712,7 +790,7 @@ _sym_db.RegisterMessage(RunTimeParams.InputsEntry) -_DOUBLEMATRIX.fields_by_name['data']._options = None +_MATRIX.fields_by_name['data']._options = None _RUNTIMEPARAMS_INPUTSENTRY._options = None _SERVEGRAPH = _descriptor.ServiceDescriptor( @@ -721,8 +799,9 @@ file=DESCRIPTOR, index=0, serialized_options=None, - serialized_start=943, - serialized_end=1426, + create_key=_descriptor._internal_create_key, + serialized_start=966, + serialized_end=1449, methods=[ _descriptor.MethodDescriptor( name='LoadCustomPnl', @@ -732,6 +811,7 @@ input_type=_PNLPATH, output_type=_NULLARGUMENT, serialized_options=None, + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name='LoadScript', @@ -741,6 +821,7 @@ input_type=_SCRIPTPATH, output_type=_SCRIPTCOMPOSITIONS, serialized_options=None, + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name='LoadGraphics', @@ -750,6 +831,7 @@ input_type=_SCRIPTPATH, output_type=_STYLEJSON, serialized_options=None, + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name='GetCompositions', @@ -759,6 +841,7 @@ input_type=_NULLARGUMENT, output_type=_SCRIPTCOMPOSITIONS, serialized_options=None, + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name='GetJSON', @@ -768,6 +851,7 @@ input_type=_GRAPHNAME, output_type=_GRAPHJSON, serialized_options=None, + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name='HealthCheck', @@ -777,6 +861,7 @@ input_type=_NULLARGUMENT, output_type=_HEALTHSTATUS, serialized_options=None, + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name='UpdateStylesheet', @@ -786,6 +871,7 @@ input_type=_STYLEJSON, output_type=_NULLARGUMENT, serialized_options=None, + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name='RunComposition', @@ -795,6 +881,7 @@ input_type=_RUNTIMEPARAMS, output_type=_ENTRY, serialized_options=None, + create_key=_descriptor._internal_create_key, ), ]) _sym_db.RegisterServiceDescriptor(_SERVEGRAPH) diff --git a/psyneulink/core/rpc/graph_pb2_grpc.py b/psyneulink/core/rpc/graph_pb2_grpc.py new file mode 100644 index 00000000000..e223c74cd19 --- /dev/null +++ b/psyneulink/core/rpc/graph_pb2_grpc.py @@ -0,0 +1,299 @@ +# Princeton University licenses this file to You under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. You may obtain a copy of the License at: +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed +# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and limitations under the License. + +# ********************************** PNL ProtoBuffer Service Definitions ***************************************** + + +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +import graph_pb2 as graph__pb2 + + +class ServeGraphStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.LoadCustomPnl = channel.unary_unary( + '/graph.ServeGraph/LoadCustomPnl', + request_serializer=graph__pb2.PNLPath.SerializeToString, + response_deserializer=graph__pb2.NullArgument.FromString, + ) + self.LoadScript = channel.unary_unary( + '/graph.ServeGraph/LoadScript', + request_serializer=graph__pb2.ScriptPath.SerializeToString, + response_deserializer=graph__pb2.ScriptCompositions.FromString, + ) + self.LoadGraphics = channel.unary_unary( + '/graph.ServeGraph/LoadGraphics', + request_serializer=graph__pb2.ScriptPath.SerializeToString, + response_deserializer=graph__pb2.StyleJSON.FromString, + ) + self.GetCompositions = channel.unary_unary( + '/graph.ServeGraph/GetCompositions', + request_serializer=graph__pb2.NullArgument.SerializeToString, + response_deserializer=graph__pb2.ScriptCompositions.FromString, + ) + self.GetJSON = channel.unary_unary( + '/graph.ServeGraph/GetJSON', + request_serializer=graph__pb2.GraphName.SerializeToString, + response_deserializer=graph__pb2.GraphJSON.FromString, + ) + self.HealthCheck = channel.unary_unary( + '/graph.ServeGraph/HealthCheck', + request_serializer=graph__pb2.NullArgument.SerializeToString, + response_deserializer=graph__pb2.HealthStatus.FromString, + ) + self.UpdateStylesheet = channel.stream_unary( + '/graph.ServeGraph/UpdateStylesheet', + request_serializer=graph__pb2.StyleJSON.SerializeToString, + response_deserializer=graph__pb2.NullArgument.FromString, + ) + self.RunComposition = channel.unary_stream( + '/graph.ServeGraph/RunComposition', + request_serializer=graph__pb2.RunTimeParams.SerializeToString, + response_deserializer=graph__pb2.Entry.FromString, + ) + + +class ServeGraphServicer(object): + """Missing associated documentation comment in .proto file.""" + + def LoadCustomPnl(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def LoadScript(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def LoadGraphics(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetCompositions(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetJSON(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def HealthCheck(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateStylesheet(self, request_iterator, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def RunComposition(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_ServeGraphServicer_to_server(servicer, server): + rpc_method_handlers = { + 'LoadCustomPnl': grpc.unary_unary_rpc_method_handler( + servicer.LoadCustomPnl, + request_deserializer=graph__pb2.PNLPath.FromString, + response_serializer=graph__pb2.NullArgument.SerializeToString, + ), + 'LoadScript': grpc.unary_unary_rpc_method_handler( + servicer.LoadScript, + request_deserializer=graph__pb2.ScriptPath.FromString, + response_serializer=graph__pb2.ScriptCompositions.SerializeToString, + ), + 'LoadGraphics': grpc.unary_unary_rpc_method_handler( + servicer.LoadGraphics, + request_deserializer=graph__pb2.ScriptPath.FromString, + response_serializer=graph__pb2.StyleJSON.SerializeToString, + ), + 'GetCompositions': grpc.unary_unary_rpc_method_handler( + servicer.GetCompositions, + request_deserializer=graph__pb2.NullArgument.FromString, + response_serializer=graph__pb2.ScriptCompositions.SerializeToString, + ), + 'GetJSON': grpc.unary_unary_rpc_method_handler( + servicer.GetJSON, + request_deserializer=graph__pb2.GraphName.FromString, + response_serializer=graph__pb2.GraphJSON.SerializeToString, + ), + 'HealthCheck': grpc.unary_unary_rpc_method_handler( + servicer.HealthCheck, + request_deserializer=graph__pb2.NullArgument.FromString, + response_serializer=graph__pb2.HealthStatus.SerializeToString, + ), + 'UpdateStylesheet': grpc.stream_unary_rpc_method_handler( + servicer.UpdateStylesheet, + request_deserializer=graph__pb2.StyleJSON.FromString, + response_serializer=graph__pb2.NullArgument.SerializeToString, + ), + 'RunComposition': grpc.unary_stream_rpc_method_handler( + servicer.RunComposition, + request_deserializer=graph__pb2.RunTimeParams.FromString, + response_serializer=graph__pb2.Entry.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'graph.ServeGraph', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + # This class is part of an EXPERIMENTAL API. +class ServeGraph(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def LoadCustomPnl(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/graph.ServeGraph/LoadCustomPnl', + graph__pb2.PNLPath.SerializeToString, + graph__pb2.NullArgument.FromString, + options, channel_credentials, + call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def LoadScript(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/graph.ServeGraph/LoadScript', + graph__pb2.ScriptPath.SerializeToString, + graph__pb2.ScriptCompositions.FromString, + options, channel_credentials, + call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def LoadGraphics(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/graph.ServeGraph/LoadGraphics', + graph__pb2.ScriptPath.SerializeToString, + graph__pb2.StyleJSON.FromString, + options, channel_credentials, + call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetCompositions(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/graph.ServeGraph/GetCompositions', + graph__pb2.NullArgument.SerializeToString, + graph__pb2.ScriptCompositions.FromString, + options, channel_credentials, + call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetJSON(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/graph.ServeGraph/GetJSON', + graph__pb2.GraphName.SerializeToString, + graph__pb2.GraphJSON.FromString, + options, channel_credentials, + call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def HealthCheck(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/graph.ServeGraph/HealthCheck', + graph__pb2.NullArgument.SerializeToString, + graph__pb2.HealthStatus.FromString, + options, channel_credentials, + call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def UpdateStylesheet(request_iterator, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.stream_unary(request_iterator, target, '/graph.ServeGraph/UpdateStylesheet', + graph__pb2.StyleJSON.SerializeToString, + graph__pb2.NullArgument.FromString, + options, channel_credentials, + call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def RunComposition(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_stream(request, target, '/graph.ServeGraph/RunComposition', + graph__pb2.RunTimeParams.SerializeToString, + graph__pb2.Entry.FromString, + options, channel_credentials, + call_credentials, compression, wait_for_ready, timeout, metadata) From 297e4452f8787e71ad3c9f092d86b848534b0e11 Mon Sep 17 00:00:00 2001 From: Katherine Mantel Date: Thu, 30 Jul 2020 22:22:29 -0400 Subject: [PATCH 06/24] Parameter: return the set value from set/_set --- psyneulink/core/globals/parameters.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/psyneulink/core/globals/parameters.py b/psyneulink/core/globals/parameters.py index 31584abb66f..5fd90fd994c 100644 --- a/psyneulink/core/globals/parameters.py +++ b/psyneulink/core/globals/parameters.py @@ -1175,7 +1175,7 @@ def set(self, value, context=None, override=False, skip_history=False, skip_log= if not override and self.read_only: raise ParameterError('Parameter \'{0}\' is read-only. Set at your own risk. Pass override=True to force set.'.format(self.name)) - self._set(self._parse(value), context, skip_history, skip_log, **kwargs) + return self._set(self._parse(value), context, skip_history, skip_log, **kwargs) def _set(self, value, context=None, skip_history=False, skip_log=False, **kwargs): if not self.stateful: @@ -1198,6 +1198,7 @@ def _set(self, value, context=None, skip_history=False, skip_log=False, **kwargs value = call_with_pruned_args(self.setter, value, context=context, **kwargs) self._set_value(value, execution_id=execution_id, context=context, skip_history=skip_history, skip_log=skip_log) + return value def _set_value(self, value, execution_id=None, context=None, skip_history=False, skip_log=False): # store history From 907ba6ec26d2efff53349aa07ce1e56d5cf7848d Mon Sep 17 00:00:00 2001 From: Katherine Mantel Date: Mon, 3 Aug 2020 22:04:39 -0400 Subject: [PATCH 07/24] Parameter: fix bug causing multi-level tuple for valid_types --- psyneulink/core/globals/parameters.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/psyneulink/core/globals/parameters.py b/psyneulink/core/globals/parameters.py index 5fd90fd994c..62ef3ef937b 100644 --- a/psyneulink/core/globals/parameters.py +++ b/psyneulink/core/globals/parameters.py @@ -768,7 +768,7 @@ def __init__( log = {} if valid_types is not None: - if isinstance(valid_types, list): + if isinstance(valid_types, (list, tuple)): valid_types = tuple(valid_types) else: valid_types = (valid_types, ) From eee20baaa6f7a94024127f2dd9aa913f97b8180d Mon Sep 17 00:00:00 2001 From: Katherine Mantel Date: Mon, 3 Aug 2020 21:20:09 -0400 Subject: [PATCH 08/24] LCAMechanism: handle specifying auto instead of self_excitation --- .../processing/transfer/lcamechanism.py | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/psyneulink/library/components/mechanisms/processing/transfer/lcamechanism.py b/psyneulink/library/components/mechanisms/processing/transfer/lcamechanism.py index ae65ac9b8e9..001a3e1a4f0 100644 --- a/psyneulink/library/components/mechanisms/processing/transfer/lcamechanism.py +++ b/psyneulink/library/components/mechanisms/processing/transfer/lcamechanism.py @@ -199,7 +199,7 @@ from psyneulink.core.components.mechanisms.processing.transfermechanism import _integrator_mode_setter from psyneulink.core.globals.keywords import \ CONVERGENCE, FUNCTION, GREATER_THAN_OR_EQUAL, INITIALIZER, LCA_MECHANISM, LEAK, LESS_THAN_OR_EQUAL, MATRIX, NAME, \ - NOISE, RATE, RESULT, TERMINATION_THRESHOLD, TERMINATION_MEASURE, TERMINATION_COMPARISION_OP, TIME_STEP_SIZE, VALUE, INVERSE_HOLLOW_MATRIX + NOISE, RATE, RESULT, TERMINATION_THRESHOLD, TERMINATION_MEASURE, TERMINATION_COMPARISION_OP, TIME_STEP_SIZE, VALUE, INVERSE_HOLLOW_MATRIX, AUTO from psyneulink.core.globals.parameters import Parameter from psyneulink.core.globals.context import ContextFlags from psyneulink.core.globals.preferences.basepreferenceset import is_pref_set @@ -464,6 +464,23 @@ def __init__(self, matrix = None # MODIFIED 1/22/20 END + try: + if self_excitation is not None and kwargs[AUTO] is not None: + if kwargs[AUTO] != self_excitation: + raise LCAError( + 'Both self_excitation and auto are specified with' + ' different values. self_excitation is an alias of auto.' + ' Provided values: self_excitation = {0} , auto = {1}'.format( + self_excitation, + kwargs[AUTO] + ) + ) + elif kwargs[AUTO] is not None: + self_excitation = kwargs[AUTO] + del kwargs[AUTO] + except KeyError: + pass + if competition is not None and hetero is not None: if competition != -1.0 * hetero: raise LCAError( From e9d3eb395392702a03957bb435beb22fa1ddc2af Mon Sep 17 00:00:00 2001 From: Dillon Smith Date: Wed, 5 Aug 2020 14:40:44 -0400 Subject: [PATCH 09/24] Generalized RPC to ndArrays, added protobufs to repo --- psyneulink/core/globals/parameters.py | 10 +- .../globals/preferences/basepreferenceset.py | 21 +- .../core/globals/preferences/preferenceset.py | 6 + tests/log/test_rpc.py | 1046 +++++++++++++++++ 4 files changed, 1078 insertions(+), 5 deletions(-) create mode 100644 tests/log/test_rpc.py diff --git a/psyneulink/core/globals/parameters.py b/psyneulink/core/globals/parameters.py index 51a1ee47c11..8c3c55ead71 100644 --- a/psyneulink/core/globals/parameters.py +++ b/psyneulink/core/globals/parameters.py @@ -1256,7 +1256,7 @@ def _log_value(self, value, context=None): context_str = ContextFlags._get_context_string(ContextFlags.COMMAND_LINE) log_condition_satisfied = True - # standard logging + # standard loggingd else: if self.log_condition is None or self.log_condition is LogCondition.OFF: return @@ -1322,13 +1322,11 @@ def _deliver_value(self, value, context=None): execution_id = None else: execution_id = context.execution_id - print(self._get_root_owner()) - print(value) ##### ADD TO PIPELINE HERE ##### context.rpc_pipeline.put( Entry( componentName=self._get_root_owner().name, - parameterName=self.name, + parameterName=self._get_root_parameter().name, time=f'{time.run}:{time.trial}:{time.pass_}:{time.time_step}', context=execution_id, value=ndArray( @@ -1346,6 +1344,10 @@ def _get_root_owner(self): else: return owner + def _get_root_parameter(self): + root = self._get_root_owner() + return self._owner._owner if not self._owner._owner == root else self + def clear_log(self, contexts=NotImplemented): """ Clears the log of this Parameter for every context in **contexts** diff --git a/psyneulink/core/globals/preferences/basepreferenceset.py b/psyneulink/core/globals/preferences/basepreferenceset.py index 94d108dc193..64fa9b2535a 100644 --- a/psyneulink/core/globals/preferences/basepreferenceset.py +++ b/psyneulink/core/globals/preferences/basepreferenceset.py @@ -21,7 +21,7 @@ __all__ = [ 'CategoryDefaultPreferencesDict', 'ComponentDefaultPrefDicts', 'BasePreferenceSet', 'BasePreferenceSetPrefs', - 'CompositionDefaultPreferencesDict', + 'CompositionDefaultPreferencesDict', 'DELIVERY_PREF', 'InstanceDefaultPreferencesDict', 'is_pref', 'is_pref_set', 'CATEGORY_DEFAULT_PREFERENCES', 'INSTANCE_DEFAULT_PREFERENCES', 'SUBTYPE_DEFAULT_PREFERENCES', 'TYPE_DEFAULT_PREFERENCES', 'LOG_PREF', 'PARAM_VALIDATION_PREF', @@ -32,6 +32,7 @@ # Keypaths for preferences: REPORT_OUTPUT_PREF = '_report_output_pref' LOG_PREF = '_log_pref' +DELIVERY_PREF = '_delivery_pref' PARAM_VALIDATION_PREF = '_param_validation_pref' VERBOSE_PREF = '_verbose_pref' RUNTIME_PARAM_MODULATION_PREF = '_runtime_param_modulation_pref' @@ -50,6 +51,7 @@ PARAM_VALIDATION_PREF, REPORT_OUTPUT_PREF, LOG_PREF, + DELIVERY_PREF, RUNTIME_PARAM_MODULATION_PREF } @@ -59,6 +61,7 @@ PARAM_VALIDATION_PREF: PreferenceEntry(True, PreferenceLevel.COMPOSITION), REPORT_OUTPUT_PREF: PreferenceEntry(False, PreferenceLevel.COMPOSITION), LOG_PREF: PreferenceEntry(LogCondition.OFF, PreferenceLevel.CATEGORY), + DELIVERY_PREF: PreferenceEntry(LogCondition.OFF, PreferenceLevel.CATEGORY), RUNTIME_PARAM_MODULATION_PREF: PreferenceEntry(Modulation.MULTIPLY, PreferenceLevel.COMPOSITION)} CategoryDefaultPreferencesDict = { @@ -67,6 +70,7 @@ PARAM_VALIDATION_PREF: PreferenceEntry(True, PreferenceLevel.CATEGORY), REPORT_OUTPUT_PREF: PreferenceEntry(False, PreferenceLevel.CATEGORY), LOG_PREF: PreferenceEntry(LogCondition.OFF, PreferenceLevel.CATEGORY), + DELIVERY_PREF: PreferenceEntry(LogCondition.OFF, PreferenceLevel.CATEGORY), RUNTIME_PARAM_MODULATION_PREF: PreferenceEntry(Modulation.MULTIPLY,PreferenceLevel.CATEGORY)} TypeDefaultPreferencesDict = { @@ -75,6 +79,7 @@ PARAM_VALIDATION_PREF: PreferenceEntry(True, PreferenceLevel.TYPE), REPORT_OUTPUT_PREF: PreferenceEntry(False, PreferenceLevel.TYPE), LOG_PREF: PreferenceEntry(LogCondition.OFF, PreferenceLevel.CATEGORY), # This gives control to Mechanisms + DELIVERY_PREF: PreferenceEntry(LogCondition.OFF, PreferenceLevel.CATEGORY), RUNTIME_PARAM_MODULATION_PREF: PreferenceEntry(Modulation.ADD,PreferenceLevel.TYPE)} SubtypeDefaultPreferencesDict = { @@ -83,6 +88,7 @@ PARAM_VALIDATION_PREF: PreferenceEntry(True, PreferenceLevel.SUBTYPE), REPORT_OUTPUT_PREF: PreferenceEntry(False, PreferenceLevel.SUBTYPE), LOG_PREF: PreferenceEntry(LogCondition.OFF, PreferenceLevel.CATEGORY), # This gives control to Mechanisms + DELIVERY_PREF: PreferenceEntry(LogCondition.OFF, PreferenceLevel.CATEGORY), RUNTIME_PARAM_MODULATION_PREF: PreferenceEntry(Modulation.ADD,PreferenceLevel.SUBTYPE)} InstanceDefaultPreferencesDict = { @@ -91,6 +97,7 @@ PARAM_VALIDATION_PREF: PreferenceEntry(False, PreferenceLevel.INSTANCE), REPORT_OUTPUT_PREF: PreferenceEntry(False, PreferenceLevel.INSTANCE), LOG_PREF: PreferenceEntry(LogCondition.OFF, PreferenceLevel.CATEGORY), # This gives control to Mechanisms + DELIVERY_PREF: PreferenceEntry(LogCondition.OFF, PreferenceLevel.CATEGORY), RUNTIME_PARAM_MODULATION_PREF: PreferenceEntry(Modulation.OVERRIDE, PreferenceLevel.INSTANCE)} # Dict of default dicts @@ -169,6 +176,7 @@ class BasePreferenceSet(PreferenceSet): + PARAM_VALIDATION_PREF: validate parameters during execution + REPORT_OUTPUT_PREF: report object's ouptut during execution + LOG_PREF: record attribute data for the object during execution + + DELIVERY_PREF: add attribute data to context rpc pipeline for delivery to external applications + RUNTIME_PARAM_MODULATION_PREF: modulate parameters using runtime specification (in pathway) value that is either a PreferenceSet, valid setting for the preference, or a PreferenceLevel; defaults - level (PreferenceLevel): ?? @@ -220,6 +228,7 @@ class BasePreferenceSet(PreferenceSet): PARAM_VALIDATION_PREF: PreferenceEntry(True, PreferenceLevel.COMPOSITION), REPORT_OUTPUT_PREF: PreferenceEntry(True, PreferenceLevel.COMPOSITION), LOG_PREF: PreferenceEntry(LogCondition.OFF, PreferenceLevel.CATEGORY), + DELIVERY_PREF: PreferenceEntry(LogCondition.OFF, PreferenceLevel.CATEGORY), RUNTIME_PARAM_MODULATION_PREF: PreferenceEntry(Modulation.MULTIPLY, PreferenceLevel.COMPOSITION) } @@ -376,6 +385,16 @@ def logPref(self): # recursively calls base (super) classes to get preference at specified level return self.get_pref_setting_for_level(LOG_PREF, self._log_pref.level)[0] + @property + def deliveryPref(self): + """Return setting of owner's deliveryPref at level specified in its PreferenceEntry.level + :param level: + :return: + """ + # If the level of the object is below the Preference level, + # recursively calls base (super) classes to get preference at specified level + return self.get_pref_setting_for_level(DELIVERY_PREF, self._delivery_pref.level)[0] + # # VERSION THAT USES OWNER'S logPref TO LIST ENTRIES TO BE RECORDED # @logPref.setter # def logPref(self, setting): diff --git a/psyneulink/core/globals/preferences/preferenceset.py b/psyneulink/core/globals/preferences/preferenceset.py index f5606a9d066..5cadb043533 100644 --- a/psyneulink/core/globals/preferences/preferenceset.py +++ b/psyneulink/core/globals/preferences/preferenceset.py @@ -848,3 +848,9 @@ def _assign_prefs(object, prefs, prefs_class:PreferenceSet): object.parameters.value.log_condition = object.prefs._log_pref.setting except AttributeError: pass + + try: + # assign delivery conditions from preferences + object.parameters.value.delivery_condition = object.prefs._delivery_pref.setting + except AttributeError: + pass \ No newline at end of file diff --git a/tests/log/test_rpc.py b/tests/log/test_rpc.py new file mode 100644 index 00000000000..af1247e4c12 --- /dev/null +++ b/tests/log/test_rpc.py @@ -0,0 +1,1046 @@ +import numpy as np +import psyneulink as pnl +import pytest + +from queue import Queue +from collections import OrderedDict + +import psyneulink.core.components.functions.transferfunctions +from psyneulink.core.globals.keywords import ALLOCATION_SAMPLES, PROJECTIONS + +class TestRPC: + + def test_transfer_mech(self): + + T_1 = pnl.TransferMechanism(name='log_test_T_1', size=2) + T_2 = pnl.TransferMechanism(name='log_test_T_2', size=2) + con_with_rpc_pipeline = pnl.Context(rpc_pipeline=Queue()) + PS = pnl.Composition(name='log_test_PS', pathways=[T_1, T_2]) + + T_1.set_log_conditions('mod_noise') + T_1.set_log_conditions(pnl.RESULT) + + T_1.set_delivery_conditions('mod_noise') + T_1.set_delivery_conditions(pnl.RESULT) + + PS.run(inputs={T_1: [0, 0]}, context=con_with_rpc_pipeline) + PS.run(inputs={T_1: [1, 2]}, context=con_with_rpc_pipeline) + PS.run(inputs={T_1: [3, 4]}, context=con_with_rpc_pipeline) + + # assert T_1.log.print_entries() == + # test_log.py::TestLog::test_log + # Log for log_test_T_1: + # Logged Item: Time Context Value + # 'RESULT' 0:0:0:0 'PROCESSING, COMPOSI... [0. 0.] + # 'RESULT' 1:0:0:0 'PROCESSING, COMPOSI... [1. 2.] + # 'RESULT' 2:0:0:0 'PROCESSING, COMPOSI... [3. 4.] + # 'mod_noise' 0:0:0:0 'PROCESSING, COMPOSI... [0.] + # 'mod_noise' 1:0:0:0 'PROCESSING, COMPOSI... [0.] + # 'mod_noise' 2:0:0:0 'PROCESSING, COMPOSI... [0.] + + expected = [ + [[0], [1], [2]], + [[0.], [0.], [0.]], + [[0.], [0.], [0.]], + [[0.], [0.], [0.]], + [[0.0], [0.0], [0.0]], + [[0., 0.], [1., 2.], [3., 4.]], + ] + + actual = [] + pipeline = con_with_rpc_pipeline.rpc_pipeline + while not pipeline.empty(): actual.append(pipeline.get()) + t_1_entries = [i for i in actual if i.componentName == 'log_test_T_1'] + noise = [i for i in t_1_entries if i.parameterName == 'noise'] + results = [i for i in t_1_entries if i.parameterName == 'RESULT'] + assert all([ + noise[0].time == '0:0:0:0', noise[0].value.data == [0],results[0].value.data == [0.0, 0.0], + noise[1].time == '1:0:0:0', noise[1].value.data == [0],results[1].value.data == [1.0, 2.0], + noise[2].time == '2:0:0:0', noise[2].value.data == [0],results[2].value.data == [3.0, 4.0], + ]) + + def test_delivery_initialization(self): + con_with_rpc_pipeline = pnl.Context(rpc_pipeline=Queue()) + pipeline = con_with_rpc_pipeline.rpc_pipeline + T = pnl.TransferMechanism( + prefs={pnl.DELIVERY_PREF: pnl.PreferenceEntry(pnl.LogCondition.EXECUTION, pnl.PreferenceLevel.INSTANCE)} + ) + comp = pnl.Composition(name='comp', nodes=[T]) + comp.run([1], context=con_with_rpc_pipeline) + actual = [] + while not pipeline.empty(): actual.append(pipeline.get()) + assert all([ + len(actual) == 1, + actual[0].time == '0:0:0:0', + actual[0].value.shape == [1, 1], + actual[0].value.data == [1.0] + ]) + + def test_run_resets(self): + con_with_rpc_pipeline = pnl.Context(rpc_pipeline=Queue()) + pipeline = con_with_rpc_pipeline.rpc_pipeline + T1 = pnl.TransferMechanism(name='log_test_T1', + size=2) + T2 = pnl.TransferMechanism(name='log_test_T2', + size=2) + COMP = pnl.Composition(name='COMP', pathways=[T1, T2]) + T1.set_delivery_conditions('mod_slope') + T2.set_delivery_conditions('value') + COMP.run(inputs={T1: [[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]]}, + context=con_with_rpc_pipeline) + pipeline = con_with_rpc_pipeline.rpc_pipeline + actual = [] + while not pipeline.empty(): actual.append(pipeline.get()) + assert all([i.context == 'COMP' for i in actual]) + assert np.allclose([ + np.ndarray(shape=np.array(actual[1].value.shape), buffer=np.array(actual[1].value.data)), + np.ndarray(shape=np.array(actual[3].value.shape), buffer=np.array(actual[3].value.data)), + np.ndarray(shape=np.array(actual[5].value.shape), buffer=np.array(actual[5].value.data)), + ], [[[1.0, 2.0]], [[3.0, 4.0]], [[5.0, 6.0]]]) + + COMP.run(inputs={T1: [[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]]}, + context=con_with_rpc_pipeline) + actual = [] + while not pipeline.empty(): actual.append(pipeline.get()) + assert np.allclose([ + np.ndarray(shape=np.array(actual[1].value.shape), buffer=np.array(actual[1].value.data)), + np.ndarray(shape=np.array(actual[3].value.shape), buffer=np.array(actual[3].value.data)), + np.ndarray(shape=np.array(actual[5].value.shape), buffer=np.array(actual[5].value.data)), + ], [[[1.0, 2.0]], [[3.0, 4.0]], [[5.0, 6.0]]]) + + def test_log_dictionary_with_time(self): + + T1 = pnl.TransferMechanism(name='log_test_T1', + size=2) + T2 = pnl.TransferMechanism(name='log_test_T2', + function=psyneulink.core.components.functions.transferfunctions.Linear(slope=2.0), + size=2) + COMP = pnl.Composition(name='log_test_COMP', pathways=[T1, T2]) + + assert T1.loggable_items == { + 'InputPort-0': 'OFF', + 'RESULT': 'OFF', + 'clip': 'OFF', + 'termination_threshold': 'OFF', + 'execute_until_finished': 'OFF', + 'func_additive_param': 'OFF', + 'func_bounds': 'OFF', + 'func_execute_until_finished': 'OFF', + 'func_has_initializers': 'OFF', + 'func_intercept': 'OFF', + 'func_max_executions_before_finished': 'OFF', + 'func_multiplicative_param': 'OFF', + 'func_num_executions_before_finished': 'OFF', + 'func_slope': 'OFF', + 'func_value': 'OFF', + 'func_variable': 'OFF', + 'has_initializers': 'OFF', + 'initial_value': 'OFF', + 'integration_rate': 'OFF', + 'integrator_function_value': 'OFF', + 'integrator_mode': 'OFF', + 'max_executions_before_finished': 'OFF', + 'mod_integration_rate': 'OFF', + 'mod_intercept': 'OFF', + 'mod_noise': 'OFF', + 'mod_slope': 'OFF', + 'noise': 'OFF', + 'num_executions_before_finished': 'OFF', + 'termination_measure_value': 'OFF', + 'value': 'OFF', + 'variable': 'OFF' + } + assert T2.loggable_items == { + 'InputPort-0': 'OFF', + 'RESULT': 'OFF', + 'clip': 'OFF', + 'termination_threshold': 'OFF', + 'execute_until_finished': 'OFF', + 'func_additive_param': 'OFF', + 'func_bounds': 'OFF', + 'func_execute_until_finished': 'OFF', + 'func_has_initializers': 'OFF', + 'func_intercept': 'OFF', + 'func_max_executions_before_finished': 'OFF', + 'func_multiplicative_param': 'OFF', + 'func_num_executions_before_finished': 'OFF', + 'func_slope': 'OFF', + 'func_value': 'OFF', + 'func_variable': 'OFF', + 'has_initializers': 'OFF', + 'initial_value': 'OFF', + 'integration_rate': 'OFF', + 'integrator_function_value': 'OFF', + 'integrator_mode': 'OFF', + 'max_executions_before_finished': 'OFF', + 'mod_integration_rate': 'OFF', + 'mod_intercept': 'OFF', + 'mod_noise': 'OFF', + 'mod_slope': 'OFF', + 'noise': 'OFF', + 'num_executions_before_finished': 'OFF', + 'termination_measure_value': 'OFF', + 'value': 'OFF', + 'variable': 'OFF' + } + + T1.set_log_conditions('mod_slope') + T1.set_log_conditions(pnl.RESULT) + T1.set_log_conditions(pnl.VALUE) + + assert T1.loggable_items == { + 'execute_until_finished': 'OFF', + 'InputPort-0': 'OFF', + 'RESULT': 'EXECUTION', + 'clip': 'OFF', + 'termination_threshold': 'OFF', + 'func_additive_param': 'OFF', + 'func_bounds': 'OFF', + 'func_execute_until_finished': 'OFF', + 'func_has_initializers': 'OFF', + 'func_intercept': 'OFF', + 'func_max_executions_before_finished': 'OFF', + 'func_multiplicative_param': 'OFF', + 'func_num_executions_before_finished': 'OFF', + 'func_slope': 'OFF', + 'func_value': 'OFF', + 'func_variable': 'OFF', + 'has_initializers': 'OFF', + 'initial_value': 'OFF', + 'integration_rate': 'OFF', + 'integrator_function_value': 'OFF', + 'integrator_mode': 'OFF', + 'max_executions_before_finished': 'OFF', + 'mod_integration_rate': 'OFF', + 'mod_intercept': 'OFF', + 'mod_noise': 'OFF', + 'mod_slope': 'EXECUTION', + 'noise': 'OFF', + 'num_executions_before_finished': 'OFF', + 'termination_measure_value': 'OFF', + 'value': 'EXECUTION', + 'variable': 'OFF' + } + + T2.set_log_conditions('mod_slope') + T2.set_log_conditions(pnl.RESULT) + T2.set_log_conditions(pnl.VALUE) + + assert T2.loggable_items == { + 'InputPort-0': 'OFF', + 'RESULT': 'EXECUTION', + 'clip': 'OFF', + 'termination_threshold': 'OFF', + 'execute_until_finished': 'OFF', + 'func_additive_param': 'OFF', + 'func_bounds': 'OFF', + 'func_execute_until_finished': 'OFF', + 'func_has_initializers': 'OFF', + 'func_intercept': 'OFF', + 'func_max_executions_before_finished': 'OFF', + 'func_multiplicative_param': 'OFF', + 'func_num_executions_before_finished': 'OFF', + 'func_slope': 'OFF', + 'func_value': 'OFF', + 'func_variable': 'OFF', + 'has_initializers': 'OFF', + 'initial_value': 'OFF', + 'integration_rate': 'OFF', + 'integrator_function_value': 'OFF', + 'integrator_mode': 'OFF', + 'max_executions_before_finished': 'OFF', + 'mod_integration_rate': 'OFF', + 'mod_intercept': 'OFF', + 'mod_noise': 'OFF', + 'mod_slope': 'EXECUTION', + 'noise': 'OFF', + 'num_executions_before_finished': 'OFF', + 'termination_measure_value': 'OFF', + 'value': 'EXECUTION', + 'variable': 'OFF' + } + + # RUN ZERO | TRIALS ZERO, ONE, TWO ---------------------------------- + + COMP.run(inputs={T1: [[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]]}) + + assert T1.logged_items == {'RESULT': 'EXECUTION', + 'mod_slope': 'EXECUTION', + 'value': 'EXECUTION'} + assert T2.logged_items == {'RESULT': 'EXECUTION', + 'mod_slope': 'EXECUTION', + 'value': 'EXECUTION'} + + # T1 log after zero-th run ------------------------------------------- + + log_dict_T1 = T1.log.nparray_dictionary(entries=['value', 'mod_slope', 'RESULT']) + + expected_run_T1 = [[0], [0], [0]] + expected_trial_T1 = [[0], [1], [2]] + expected_time_step_T1 = [[0], [0], [0]] + expected_values_T1 = [[[1.0, 2.0]], [[3.0, 4.0]], [[5.0, 6.0]]] + expected_slopes_T1 = [[1.0], [1.0], [1.0]] + expected_results_T1 = [[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]] + + assert list(log_dict_T1.keys()) == [COMP.default_execution_id] + sys_log_dict = log_dict_T1[COMP.default_execution_id] + + assert np.allclose(expected_run_T1, sys_log_dict['Run']) + assert np.allclose(expected_trial_T1, sys_log_dict['Trial']) + assert np.allclose(expected_time_step_T1, sys_log_dict['Time_step']) + assert np.allclose(expected_values_T1, sys_log_dict['value']) + assert np.allclose(expected_slopes_T1, sys_log_dict['mod_slope']) + assert np.allclose(expected_results_T1, sys_log_dict['RESULT']) + + # T2 log after zero-th run -------------------------------------------- + + log_dict_T2 = T2.log.nparray_dictionary(entries=['value', 'mod_slope', 'RESULT']) + + expected_run_T2 = [[0], [0], [0]] + expected_trial_T2 = [[0], [1], [2]] + expected_time_step_T2 = [[1], [1], [1]] + expected_values_T2 = [[[2.0, 4.0]], [[6.0, 8.0]], [[10.0, 12.0]]] + expected_slopes_T2 = [[2.0], [2.0], [2.0]] + expected_results_T2 = [[2.0, 4.0], [6.0, 8.0], [10.0, 12.0]] + + assert list(log_dict_T2.keys()) == [COMP.default_execution_id] + sys_log_dict = log_dict_T2[COMP.default_execution_id] + + assert np.allclose(expected_run_T2, sys_log_dict['Run']) + assert np.allclose(expected_trial_T2, sys_log_dict['Trial']) + assert np.allclose(expected_time_step_T2, sys_log_dict['Time_step']) + assert np.allclose(expected_values_T2, sys_log_dict['value']) + assert np.allclose(expected_slopes_T2, sys_log_dict['mod_slope']) + assert np.allclose(expected_results_T2, sys_log_dict['RESULT']) + + # RUN ONE | TRIALS ZERO, ONE, TWO ------------------------------------- + + COMP.run(inputs={T1: [[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]]}) + + # T1 log after first run ------------------------------------------- + + log_dict_T1 = T1.log.nparray_dictionary(entries=['value', 'mod_slope', 'RESULT']) + + assert list(log_dict_T1.keys()) == [COMP.default_execution_id] + sys_log_dict = log_dict_T1[COMP.default_execution_id] + + # expected_run_T1_2 = [[1], [1], [1]] + expected_run_T1_2 = [[0], [0], [0]] + expected_run_T1 + expected_trial_T1_2 = [[0], [1], [2]] + expected_trial_T1 + expected_time_step_T1_2 = [[0], [0], [0]] + expected_time_step_T1 + expected_values_T1_2 = expected_values_T1 + expected_values_T1 + expected_slopes_T1_2 = expected_slopes_T1 + expected_slopes_T1 + expected_results_T1_2 = expected_results_T1 + expected_results_T1 + + # assert np.allclose(expected_run_T1_2, sys_log_dict['Run']) + # assert np.allclose(expected_trial_T1_2, sys_log_dict['Trial']) + # assert np.allclose(expected_time_step_T1_2, sys_log_dict['Time_step']) + assert np.allclose(expected_values_T1_2, sys_log_dict['value']) + assert np.allclose(expected_slopes_T1_2, sys_log_dict['mod_slope']) + assert np.allclose(expected_results_T1_2, sys_log_dict['RESULT']) + + # T2 log after first run ------------------------------------------- + + log_dict_T2_2 = T2.log.nparray_dictionary(entries=['value', 'mod_slope', 'RESULT']) + + assert list(log_dict_T2_2.keys()) == [COMP.default_execution_id] + sys_log_dict = log_dict_T2_2[COMP.default_execution_id] + + expected_run_T2_2 = [[0], [0], [0]] + expected_run_T2 + expected_trial_T2_2 = [[0], [1], [2]] + expected_trial_T2 + expected_time_step_T2_2 = [[1], [1], [1]] + expected_time_step_T2 + expected_values_T2_2 = [[[2.0, 4.0]], [[6.0, 8.0]], [[10.0, 12.0]]] + expected_values_T2 + expected_slopes_T2_2 = [[2.0], [2.0], [2.0]] + expected_slopes_T2 + expected_results_T2_2 = [[2.0, 4.0], [6.0, 8.0], [10.0, 12.0]] + expected_results_T2 + + # assert np.allclose(expected_run_T2_2, sys_log_dict['Run']) + # assert np.allclose(expected_trial_T2_2, sys_log_dict['Trial']) + # assert np.allclose(expected_time_step_T2_2, sys_log_dict['Time_step']) + assert np.allclose(expected_values_T2_2, sys_log_dict['value']) + assert np.allclose(expected_slopes_T2_2, sys_log_dict['mod_slope']) + assert np.allclose(expected_results_T2_2, sys_log_dict['RESULT']) + + def test_log_dictionary_with_scheduler(self): + T1 = pnl.TransferMechanism(name='log_test_T1', + integrator_mode=True, + integration_rate=0.5) + T2 = pnl.TransferMechanism(name='log_test_T2', + function=psyneulink.core.components.functions.transferfunctions.Linear(slope=6.0)) + COMP = pnl.Composition(name='log_test_COMP', pathways=[T1, T2]) + + def pass_threshold(mech, thresh): + results = mech.output_ports[0].parameters.value.get(COMP) + for val in results: + if abs(val) >= thresh: + return True + return False + + terminate_trial = { + pnl.TimeScale.TRIAL: pnl.While(pass_threshold, T2, 5.0) + } + + T1.set_log_conditions(pnl.VALUE) + T1.set_log_conditions('mod_slope') + T1.set_log_conditions(pnl.RESULT) + T2.set_log_conditions(pnl.VALUE) + T2.set_log_conditions('mod_slope') + + COMP.run(inputs={T1: [[1.0]]}, termination_processing=terminate_trial) + + log_dict_T1 = T1.log.nparray_dictionary(entries=['RESULT', 'mod_slope', 'value']) + log_dict_T2 = T2.log.nparray_dictionary(entries=['value', 'mod_slope']) + + assert list(log_dict_T1.keys()) == [COMP.default_execution_id] + sys_log_dict = log_dict_T1[COMP.default_execution_id] + + # Check order of keys (must match order of specification) + assert list(sys_log_dict.keys()) == ['Run', 'Trial', 'Pass', 'Time_step', 'RESULT', 'mod_slope', 'value'] + assert list(log_dict_T2[COMP.default_execution_id].keys()) == ['Run', 'Trial', 'Pass', 'Time_step', 'value', + 'mod_slope'] + + # Check values T1 + assert np.allclose(sys_log_dict["Run"], [[0], [0], [0]]) + assert np.allclose(sys_log_dict["Trial"], [[0], [0], [0]]) + assert np.allclose(sys_log_dict["Time_step"], [[0], [0], [0]]) + assert np.allclose(sys_log_dict["RESULT"], [[0.5], [0.75], [0.875]]) + assert np.allclose(sys_log_dict["value"], [[[0.5]], [[0.75]], [[0.875]]]) + assert np.allclose(sys_log_dict["mod_slope"], [[1], [1], [1]]) + + # Check values T2 + assert np.allclose(log_dict_T2[COMP.default_execution_id]["Run"], [[0], [0], [0]]) + assert np.allclose(log_dict_T2[COMP.default_execution_id]["Trial"], [[0], [0], [0]]) + assert np.allclose(log_dict_T2[COMP.default_execution_id]["Time_step"], [[1], [1], [1]]) + assert np.allclose(log_dict_T2[COMP.default_execution_id]["value"], [[[3]], [[4.5]], [[5.25]]]) + assert np.allclose(log_dict_T2[COMP.default_execution_id]["mod_slope"], [[6], [6], [6]]) + + def test_log_array_with_scheduler(self): + T1 = pnl.TransferMechanism(name='log_test_T1', + integrator_mode=True, + integration_rate=0.5) + T2 = pnl.TransferMechanism(name='log_test_T2', + function=psyneulink.core.components.functions.transferfunctions.Linear(slope=6.0)) + COMP = pnl.Composition(name='log_test_COMP', pathways=[T1, T2]) + + def pass_threshold(mech, thresh): + results = mech.output_ports[0].parameters.value.get(COMP) + for val in results: + if abs(val) >= thresh: + return True + return False + + terminate_trial = { + pnl.TimeScale.TRIAL: pnl.While(pass_threshold, T2, 5.0) + } + + T1.set_log_conditions(pnl.VALUE) + T1.set_log_conditions('mod_slope') + T1.set_log_conditions(pnl.RESULT) + T2.set_log_conditions(pnl.VALUE) + T2.set_log_conditions('mod_slope') + + COMP.run(inputs={T1: [[1.0]]}, termination_processing=terminate_trial) + + log_array_T1 = T1.log.nparray(entries=['RESULT', 'mod_slope', 'value']) + log_array_T2 = T2.log.nparray(entries=['value', 'mod_slope']) + + context_results = [pnl.Log.context_header, COMP.default_execution_id] + # Check values + run_results = [["Run"], [0], [0], [0]] + trial_results = [["Trial"], [0], [0], [0]] + pass_results = [["Pass"], [0], [1], [2]] + time_step_results = [["Time_step"], [0], [0], [0]] + results_results = ["RESULT", [0.5], [0.75], [0.875]] + slope_results = ["mod_slope", [1], [1], [1]] + value_results = ["value", [[0.5]], [[0.75]], [[0.875]]] + + for i in range(2): + assert log_array_T1[0][i] == context_results[i] + + assert log_array_T1[1][0] == pnl.Log.data_header + data_array = log_array_T1[1][1] + j = 0 + for i in range(4): + assert data_array[j][i] == run_results[i] + assert data_array[j + 1][i] == trial_results[i] + assert data_array[j + 2][i] == pass_results[i] + assert data_array[j + 3][i] == time_step_results[i] + assert data_array[j + 4][i] == results_results[i] + assert data_array[j + 5][i] == slope_results[i] + assert data_array[j + 6][i] == value_results[i] + + # Check values + run_results = [["Run"], [0], [0], [0]] + trial_results = [["Trial"], [0], [0], [0]] + pass_results = [["Pass"], [0], [1], [2]] + time_step_results = [["Time_step"], [1], [1], [1]] + value_results = ["value", [[3]], [[4.5]], [[5.25]]] + slope_results = ["mod_slope", [6], [6], [6]] + + for i in range(2): + assert log_array_T1[0][i] == context_results[i] + + assert log_array_T2[1][0] == pnl.Log.data_header + data_array = log_array_T2[1][1] + j = 0 + for i in range(4): + assert data_array[j][i] == run_results[i] + assert data_array[j + 1][i] == trial_results[i] + assert data_array[j + 2][i] == pass_results[i] + assert data_array[j + 3][i] == time_step_results[i] + assert data_array[j + 4][i] == value_results[i] + assert data_array[j + 5][i] == slope_results[i] + + def test_log_dictionary_with_scheduler_many_time_step_increments(self): + T1 = pnl.TransferMechanism(name='log_test_T1', + integrator_mode=True, + integration_rate=0.05) + COMP = pnl.Composition(name='log_test_COMP', pathways=[T1]) + + def pass_threshold(mech, thresh): + results = mech.output_ports[0].parameters.value.get(COMP) + for val in results: + if abs(val) >= thresh: + return True + return False + + terminate_trial = { + pnl.TimeScale.TRIAL: pnl.While(pass_threshold, T1, 0.95) + } + + T1.set_log_conditions(pnl.VALUE) + + COMP.run(inputs={T1: [[1.0]]}, termination_processing=terminate_trial) + + log_dict_T1 = T1.log.nparray_dictionary(entries=['value']) + + assert list(log_dict_T1.keys()) == [COMP.default_execution_id] + sys_log_dict = log_dict_T1[COMP.default_execution_id] + + # Check order of keys (must match order of specification) + assert list(sys_log_dict.keys()) == ['Run', 'Trial', 'Pass', 'Time_step', 'value'] + + # # Check values T1 + assert len(sys_log_dict["Run"]) == 59 + assert np.allclose(sys_log_dict["Pass"][30], 30) + assert np.allclose(sys_log_dict["Time_step"][30], 0) + assert abs(sys_log_dict["value"][58]) >= 0.95 + assert abs(sys_log_dict["value"][57]) < 0.95 + + def test_log_csv_multiple_contexts(self): + A = pnl.TransferMechanism(name='A') + B = pnl.TransferMechanism(name='B') + C = pnl.TransferMechanism(name='C') + + C.set_log_conditions(pnl.VALUE) + + X = pnl.Composition(name='comp X') + Y = pnl.Composition(name='comp Y') + + X.add_linear_processing_pathway([A, C]) + Y.add_linear_processing_pathway([B, C]) + + # running with manual contexts for consistent output + # because output is sorted by context + X.run(inputs={A: 1}, context='comp X') + Y.run(inputs={B: 2}, context='comp Y') + + expected_str = "'Execution Context', 'Data'\n" \ + + "'comp X', 'Run', 'Trial', 'Pass', 'Time_step', 'value'\n" \ + + ", '0', '0', '0', '1', '1.0'\n" \ + + "'comp Y', 'Run', 'Trial', 'Pass', 'Time_step', 'value'\n" \ + + ", '0', '0', '0', '1', '2.0'\n" + assert C.log.csv() == expected_str + + print() + print() + + @pytest.mark.parametrize( + 'scheduler_conditions, multi_run', [ + (False, False), + (True, False), + (True, True) + ] + ) + def test_log_multi_calls_single_timestep(self, scheduler_conditions, multi_run): + lca = pnl.LCAMechanism( + size=2, + leak=0.5, + threshold=0.515, + reset_stateful_function_when=pnl.AtTrialStart() + ) + lca.set_log_conditions(pnl.VALUE) + m0 = pnl.ProcessingMechanism( + size=2 + ) + comp = pnl.Composition() + comp.add_linear_processing_pathway([m0, lca]) + if scheduler_conditions: + comp.scheduler.add_condition(lca, pnl.AfterNCalls(m0, 2)) + comp.run(inputs={m0: [[1, 0], [1, 0], [1, 0]]}) + log_dict = lca.log.nparray_dictionary()['Composition-0'] + assert log_dict['Run'] == [[0], [0], [0]] + assert log_dict['Trial'] == [[0], [1], [2]] + assert log_dict['Pass'] == [[1], [1], [1]] if scheduler_conditions else [[0], [0], [0]] + assert log_dict['Time_step'] == [[1], [1], [1]] + # floats in value, so use np.allclose + assert np.allclose(log_dict['value'], [[[0.52466739, 0.47533261]] * 3]) + if multi_run: + comp.run(inputs={m0: [[1, 0], [1, 0], [1, 0]]}) + log_dict = lca.log.nparray_dictionary()['Composition-0'] + assert log_dict['Run'] == [[0], [0], [0], [1], [1], [1]] + assert np.allclose(log_dict['value'], [[[0.52466739, 0.47533261]] * 6]) + + +class TestClearLog: + + def test_clear_log(self): + + # Create System + T_1 = pnl.TransferMechanism(name='log_test_T_1', size=2) + T_2 = pnl.TransferMechanism(name='log_test_T_2', size=2) + COMP = pnl.Composition(name="log_test_COMP", pathways=[T_1, T_2]) + PJ = T_2.path_afferents[0] + + # Set log conditions on each component + T_1.set_log_conditions('mod_noise') + T_1.set_log_conditions(pnl.RESULT) + T_2.set_log_conditions('mod_slope') + T_2.set_log_conditions(pnl.RESULT) + PJ.set_log_conditions('mod_matrix') + + # Run system + COMP.run(inputs={T_1: [1.0, 1.0]}) + + # Create log dict for each component + log_dict_T_1 = T_1.log.nparray_dictionary() + log_dict_T_2 = T_2.log.nparray_dictionary() + log_dict_PJ = PJ.log.nparray_dictionary() + + assert list(log_dict_T_1.keys()) == [COMP.default_execution_id] + assert list(log_dict_T_2.keys()) == [COMP.default_execution_id] + assert list(log_dict_PJ.keys()) == [COMP.default_execution_id] + + # Confirm that values were logged correctly + sys_log_dict = log_dict_T_1[COMP.default_execution_id] + assert np.allclose(sys_log_dict['RESULT'], np.array([[1.0, 1.0]])) + assert np.allclose(sys_log_dict['mod_noise'], np.array([[0.0]])) + + sys_log_dict = log_dict_T_2[COMP.default_execution_id] + assert np.allclose(sys_log_dict['RESULT'], np.array([[1.0, 1.0]])) + assert np.allclose(sys_log_dict['mod_slope'], np.array([[1.0]])) + + sys_log_dict = log_dict_PJ[COMP.default_execution_id] + assert np.allclose(sys_log_dict['mod_matrix'], np.array([[1.0, 0.0], [0.0, 1.0]])) + + # KDM 10/3/18: below was changed to delete_entry=True because it's not implemented in Parameter logs, + # and it's not clear this option results in much difference than just deleting the entries and + # is stated to be included only for future use + # Clear T_1s log and DO NOT delete entries + T_1.log.clear_entries(delete_entry=True) + + # Clear T_2s log and delete entries + T_2.log.clear_entries(delete_entry=True) + + # Create new log dict for each component + log_dict_T_1 = T_1.log.nparray_dictionary() + log_dict_T_2 = T_2.log.nparray_dictionary() + log_dict_PJ = PJ.log.nparray_dictionary() + + # Confirm that T_1 log values were removed + assert log_dict_T_1 == OrderedDict() + + # Confirm that T_2 log values were removed and dictionary entries were destroyed + assert log_dict_T_2 == OrderedDict() + + # Confirm that PJ log values were not affected by changes to T_1 and T_2's logs + assert np.allclose(log_dict_PJ[COMP.default_execution_id]['mod_matrix'], np.array([[1.0, 0.0], [0.0, 1.0]])) + + # Run system again + COMP.run(inputs={T_1: [2.0, 2.0]}) + + # Create new log dict for each component + log_dict_T_1 = T_1.log.nparray_dictionary() + log_dict_T_2 = T_2.log.nparray_dictionary() + log_dict_PJ = PJ.log.nparray_dictionary() + + # Confirm that T_1 log values only include most recent run + sys_log_dict = log_dict_T_1[COMP.default_execution_id] + assert np.allclose(sys_log_dict['RESULT'], np.array([[2.0, 2.0]])) + assert np.allclose(sys_log_dict['mod_noise'], np.array([[0.0]])) + # NOTE: "Run" value still incremented, but only the most recent one is returned (# runs does not reset to zero) + assert np.allclose(sys_log_dict['Run'], np.array([[1]])) + + # Confirm that T_2 log values only include most recent run + sys_log_dict = log_dict_T_2[COMP.default_execution_id] + assert np.allclose(sys_log_dict['RESULT'], np.array([[2.0, 2.0]])) + assert np.allclose(sys_log_dict['mod_slope'], np.array([[1.0]])) + assert np.allclose(sys_log_dict['Run'], np.array([[1]])) + + # Confirm that PJ log values include all runs + sys_log_dict = log_dict_PJ[COMP.default_execution_id] + assert np.allclose(sys_log_dict['mod_matrix'], np.array([[[1.0, 0.0], [0.0, 1.0]], [[1.0, 0.0], [0.0, 1.0]]])) + assert np.allclose(sys_log_dict['Run'], np.array([[0], [1]])) + + @pytest.mark.parametrize( + 'insertion_eids, deletion_eids, log_is_empty', + [ + (['context'], 'context', True), # fails if string handling not correct due to str being Iterable + (['context'], ['context'], True), + ] + ) + def test_clear_log_arguments(self, insertion_eids, deletion_eids, log_is_empty): + t = pnl.TransferMechanism() + c = pnl.Composition() + c.add_node(t) + + t.parameters.value.log_condition = True + + for eid in insertion_eids: + c.run({t: 0}, context=eid) + + t.parameters.value.clear_log(deletion_eids) + + if log_is_empty: + assert len(t.parameters.value.log) == 0 + else: + assert len(t.parameters.value.log) != 0 + + +class TestFiltering: + + @pytest.fixture(scope='module') + def node_logged_in_simulation(self): + Input = pnl.TransferMechanism(name='Input') + reward = pnl.TransferMechanism( + output_ports=[pnl.RESULT, pnl.MEAN, pnl.VARIANCE], name='reward') + Decision = pnl.DDM( + function=pnl.DriftDiffusionAnalytical( + drift_rate=(1.0, pnl.ControlProjection( + function=pnl.Linear, + control_signal_params={pnl.ALLOCATION_SAMPLES: np.arange(0.1, 1.01, 0.3)}) + ), + threshold=(1.0, pnl.ControlProjection( + function=pnl.Linear, + control_signal_params={pnl.ALLOCATION_SAMPLES: np.arange(0.1, 1.01, 0.3)}) + ), + noise=0.5, + starting_point=0, + t0=0.45 + ), + output_ports=[ + pnl.DECISION_VARIABLE, + pnl.RESPONSE_TIME, + pnl.PROBABILITY_UPPER_THRESHOLD], + name='Decision' + ) + + comp = pnl.Composition(name="evc", retain_old_simulation_data=True) + comp.add_node(reward, required_roles=[pnl.NodeRole.OUTPUT]) + comp.add_node(Decision, required_roles=[pnl.NodeRole.OUTPUT]) + task_execution_pathway = [Input, pnl.IDENTITY_MATRIX, Decision] + comp.add_linear_processing_pathway(task_execution_pathway) + + comp.add_controller( + controller=pnl.OptimizationControlMechanism( + agent_rep=comp, + features=[Input.input_port, reward.input_port], + feature_function=pnl.AdaptiveIntegrator(rate=0.5), + objective_mechanism=pnl.ObjectiveMechanism( + function=pnl.LinearCombination(operation=pnl.PRODUCT), + monitor=[ + reward, + Decision.output_ports[pnl.PROBABILITY_UPPER_THRESHOLD], + (Decision.output_ports[pnl.RESPONSE_TIME], -1, 1) + ] + ), + function=pnl.GridSearch(), + control_signals=[ + {PROJECTIONS: ("drift_rate", Decision), + ALLOCATION_SAMPLES: np.arange(0.1, 1.01, 0.3)}, + {PROJECTIONS: ("threshold", Decision), + ALLOCATION_SAMPLES: np.arange(0.1, 1.01, 0.3)} + ] + ) + ) + + comp.enable_controller = True + + stim_list_dict = { + Input: [0.5, 0.123], + reward: [20, 20] + } + + Input.parameters.value.log_condition = True + + comp.run(inputs=stim_list_dict) + + return Input + + def test_node_has_logged_sims(self, node_logged_in_simulation): + for logged_value, eid_dict in node_logged_in_simulation.log.logged_entries.items(): + for eid in eid_dict: + if pnl.EID_SIMULATION in str(eid): + return + else: + assert False, 'No simulation execution_id found in log' + + def test_nparray(self, node_logged_in_simulation): + for eid in node_logged_in_simulation.log.nparray(exclude_sims=True)[0]: + assert pnl.EID_SIMULATION not in str(eid) + + def test_nparray_dictionary(self, node_logged_in_simulation): + for eid in node_logged_in_simulation.log.nparray_dictionary(exclude_sims=True): + assert pnl.EID_SIMULATION not in str(eid) + + def test_csv(self, node_logged_in_simulation): + full_csv = node_logged_in_simulation.log.csv(exclude_sims=True) + + # get each row, excluding header + for row in full_csv.split('\n')[1:]: + # if present in a row, context will be in the first cell + assert pnl.EID_SIMULATION not in row.replace("'", '').split(',')[0] + + +class TestFullModels: + def test_multilayer(self): + + input_layer = pnl.TransferMechanism(name='input_layer', + function=pnl.Logistic, + size=2) + + hidden_layer_1 = pnl.TransferMechanism(name='hidden_layer_1', + function=pnl.Logistic, + size=5) + + hidden_layer_2 = pnl.TransferMechanism(name='hidden_layer_2', + function=pnl.Logistic, + size=4) + + output_layer = pnl.TransferMechanism(name='output_layer', + function=pnl.Logistic, + size=3) + + input_weights_matrix = (np.arange(2 * 5).reshape((2, 5)) + 1) / (2 * 5) + middle_weights_matrix = (np.arange(5 * 4).reshape((5, 4)) + 1) / (5 * 4) + output_weights_matrix = (np.arange(4 * 3).reshape((4, 3)) + 1) / (4 * 3) + + # This projection will be used by the process below by referencing it in the process' pathway; + # note: sender and receiver args don't need to be specified + input_weights = pnl.MappingProjection( + name='Input Weights', + matrix=input_weights_matrix, + ) + + # This projection will be used by the process below by assigning its sender and receiver args + # to mechanismss in the pathway + middle_weights = pnl.MappingProjection( + name='Middle Weights', + sender=hidden_layer_1, + receiver=hidden_layer_2, + matrix=middle_weights_matrix, + ) + + # Commented lines in this projection illustrate variety of ways in which matrix and learning signals can be specified + output_weights = pnl.MappingProjection( + name='Output Weights', + sender=hidden_layer_2, + receiver=output_layer, + matrix=output_weights_matrix, + ) + + comp = pnl.Composition(name='multilayer') + + p = [input_layer, input_weights, hidden_layer_1, middle_weights, hidden_layer_2, output_weights, output_layer] + backprop_pathway = comp.add_backpropagation_learning_pathway( + pathway=p, + loss_function='sse', + learning_rate=1. + ) + + input_dictionary = {backprop_pathway.target: [[0., 0., 1.]], + input_layer: [[-1., 30.]]} + + middle_weights.set_log_conditions(('mod_matrix', pnl.PROCESSING)) + + comp.learn(inputs=input_dictionary, + num_trials=10) + + expected_log_val = np.array( + [ + ['multilayer'], + [[ + [[0], [0], [0], [0], [0], [0], [0], [0], [0], [0]], + [[0], [1], [2], [3], [4], [5], [6], [7], [8], [9]], + [[0], [0], [0], [0], [0], [0], [0], [0], [0], [0]], + [[2], [2], [2], [2], [2], [2], [2], [2], [2], [2]], + [[[0.05, 0.1, 0.15, 0.2], + [0.25, 0.3, 0.35, 0.4], + [0.45, 0.5, 0.55, 0.6], + [0.65, 0.7, 0.75, 0.8], + [0.85, 0.9, 0.95, 1.]], + [[0.04789907, 0.09413833, 0.14134241, 0.18938924], + [0.24780811, 0.29388455, 0.34096758, 0.38892985], + [0.44772121, 0.49364209, 0.54060947, 0.58849095], + [0.64763875, 0.69341202, 0.74026967, 0.78807449], + [0.84756101, 0.89319513, 0.93994932, 0.98768187]], + [[0.04738148, 0.08891106, 0.13248753, 0.177898], + [0.24726841, 0.28843403, 0.33173452, 0.37694783], + [0.44716034, 0.48797777, 0.53101423, 0.57603893], + [0.64705774, 0.6875443, 0.73032986, 0.77517531], + [0.84696096, 0.88713512, 0.92968378, 0.97435998]], + [[0.04937771, 0.08530344, 0.12439361, 0.16640433], + [0.24934878, 0.28467436, 0.32329947, 0.36496974], + [0.44932147, 0.48407216, 0.52225175, 0.56359587], + [0.64929589, 0.68349948, 0.72125508, 0.76228876], + [0.84927212, 0.88295836, 0.92031297, 0.96105307]], + [[0.05440291, 0.08430585, 0.1183739, 0.15641064], + [0.25458348, 0.28363519, 0.3170288, 0.35455942], + [0.45475764, 0.48299299, 0.51573974, 0.55278488], + [0.65492462, 0.68238209, 0.7145124, 0.75109483], + [0.85508376, 0.88180465, 0.91335119, 0.94949538]], + [[0.06177218, 0.0860581, 0.11525064, 0.14926369], + [0.26225812, 0.28546004, 0.31377611, 0.34711631], + [0.46272625, 0.48488774, 0.51236246, 0.54505667], + [0.66317453, 0.68434373, 0.7110159, 0.74309381], + [0.86360121, 0.88382991, 0.9097413, 0.94123489]], + [[0.06989398, 0.08959148, 0.11465594, 0.14513241], + [0.27071639, 0.2891398, 0.31315677, 0.34281389], + [0.47150846, 0.48870843, 0.5117194, 0.54058946], + [0.67226675, 0.68829929, 0.71035014, 0.73846891], + [0.87298831, 0.88791376, 0.90905395, 0.93646]], + [[0.07750784, 0.09371987, 0.11555569, 0.143181], + [0.27864693, 0.29343991, 0.31409396, 0.3407813], + [0.47974374, 0.49317377, 0.5126926, 0.53847878], + [0.68079346, 0.69292265, 0.71135777, 0.73628353], + [0.88179203, 0.89268732, 0.91009431, 0.93420362]], + [[0.0841765, 0.09776672, 0.11711835, 0.14249779], + [0.28559463, 0.29765609, 0.31572199, 0.34006951], + [0.48695967, 0.49755273, 0.51438349, 0.5377395], + [0.68826567, 0.69745713, 0.71310872, 0.735518], + [0.88950757, 0.89736946, 0.91190228, 0.93341316]], + [[0.08992499, 0.10150104, 0.11891032, 0.14250149], + [0.29158517, 0.30154765, 0.31758943, 0.34007336], + [0.49318268, 0.50159531, 0.51632339, 0.5377435], + [0.69471052, 0.70164382, 0.71511777, 0.73552215], + [0.8961628, 0.90169281, 0.91397691, 0.93341744]]] + ]] + ], + dtype=object + ) + log_val = middle_weights.log.nparray(entries='mod_matrix', header=False) + + assert log_val[0] == expected_log_val[0] + + for i in range(1, len(log_val)): + try: + np.testing.assert_allclose(log_val[i], expected_log_val[i]) + except TypeError: + for j in range(len(log_val[i])): + np.testing.assert_allclose( + np.array(log_val[i][j][0]), + np.array(expected_log_val[i][j][0]), + atol=1e-08, + err_msg='Failed on test item {0} of logged values'.format(i) + ) + + middle_weights.log.print_entries() + + # Test Programatic logging + hidden_layer_2.log.log_values(pnl.VALUE, comp) + log_val = hidden_layer_2.log.nparray(header=False) + expected_log_val = np.array( + [ + ['multilayer'], + [[ + [[1]], + [[0]], + [[0]], + [[0]], + [[[0.8565238418942037, 0.8601053239957609, 0.8662098921116546, 0.8746933736954071]]] + ]] + ], + dtype=object + ) + assert log_val[0] == expected_log_val[0] + + for i in range(1, len(log_val)): + try: + np.testing.assert_allclose(log_val[i], expected_log_val[i]) + except TypeError: + for j in range(len(log_val[i])): + np.testing.assert_allclose( + np.array(log_val[i][j][0]), + np.array(expected_log_val[i][j][0]), + atol=1e-08, + err_msg='Failed on test item {0} of logged values'.format(i) + ) + hidden_layer_2.log.print_entries() + + # Clear log and test with logging of weights set to LEARNING for another 5 trials of learning + middle_weights.log.clear_entries(entries=None, confirm=False) + middle_weights.set_log_conditions(('mod_matrix', pnl.LEARNING)) + comp.learn( + num_trials=5, + inputs=input_dictionary, + ) + log_val = middle_weights.log.nparray(entries='mod_matrix', header=False) + expected_log_val = np.array( + [ + ['multilayer'], + [[ + [[1], [1], [1], [1], [1]], # RUN + # [[0], [1], [2], [3], [4]], # TRIAL + [[0], [1], [2], [3], [4]], # TRIAL + [[1], [1], [1], [1], [1]], # PASS + # [[0], [0], [0], [0], [0]], # PASS + # [[1], [1], [1], [1], [1]], # TIME_STEP + [[0], [0], [0], [0], [0]], # TIME_STEP + [[[0.09925812411381937, 0.1079522130303428, 0.12252820028789306, 0.14345816973727732], + [0.30131473371328343, 0.30827285172236585, 0.3213609999139731, 0.3410707131678078], + [0.5032924245149345, 0.5085833053183328, 0.5202423523987703, 0.5387798509126243], + [0.70518251216691, 0.7088822116145151, 0.7191771716324874, 0.7365956448426355], + [0.9069777724600303, 0.9091682860319945, 0.9181692763668221, 0.93452610920817]], + [[0.103113468050986, 0.11073719161508278, 0.12424368674464399, 0.14415219181047598], + [0.3053351724284921, 0.3111770895557729, 0.3231499474835138, 0.341794454877438], + [0.5074709829757806, 0.5116017638574931, 0.5221016574478528, 0.5395320566440044], + [0.7095115080472698, 0.7120093413898914, 0.7211034158081356, 0.7373749316571768], + [0.9114489813353512, 0.9123981459792809, 0.9201588001021687, 0.935330996581107]], + [[0.10656261740658036, 0.11328192907953168, 0.12587702586370172, 0.14490737831188183], + [0.30893272045369513, 0.31383131362555394, 0.32485356055342113, 0.3425821330631872], + [0.5112105492674988, 0.5143607671543178, 0.5238725230390068, 0.5403508295336265], + [0.7133860755337162, 0.7148679468096026, 0.7229382109974996, 0.7382232628724675], + [0.9154510531345043, 0.9153508224199809, 0.9220539747533424, 0.936207244690072]], + [[0.10967776822419642, 0.11562091141141007, 0.12742795007904037, 0.14569308665620523], + [0.3121824816018084, 0.316271366885665, 0.3264715025259811, 0.34340179304134666], + [0.5145890402653069, 0.5168974760377518, 0.5255545550838675, 0.5412029579613059], + [0.7168868378231593, 0.7174964619674593, 0.7246811176253708, 0.7391062307617761], + [0.9190671994078436, 0.9180659725806082, 0.923854327015523, 0.9371193149131859]], + [[0.11251466428344682, 0.11778293740676549, 0.12890014813698167, 0.14649079441816393], + [0.31514245505635713, 0.3185271913574249, 0.328007571201157, 0.3442341089776976], + [0.5176666356203712, 0.5192429413004418, 0.5271516632648602, 0.5420683480396268], + [0.7200760707077265, 0.7199270072739019, 0.7263361597421493, 0.7400030122347587], + [0.922361699102421, 0.9205767427437028, 0.9255639970037588, 0.9380456963960624]]] + ]] + ], + dtype=object + ) + + assert log_val.shape == expected_log_val.shape + assert log_val[0] == expected_log_val[0] + assert len(log_val[1]) == len(expected_log_val[1]) == 1 + + for i in range(len(log_val[1][0])): + try: + np.testing.assert_allclose( + log_val[1][0][i], + expected_log_val[1][0][i], + err_msg='Failed on test item {0} of logged values'.format(i) + ) + except TypeError: + for j in range(len(log_val[1][0][i])): + np.testing.assert_allclose( + np.array(log_val[1][0][i][j]), + np.array(expected_log_val[1][0][i][j]), + atol=1e-08, + err_msg='Failed on test item {0} of logged values'.format(i) + ) From a15933b16813a34bddf1e7253a625665db720438 Mon Sep 17 00:00:00 2001 From: Dillon Smith Date: Wed, 5 Aug 2020 15:04:44 -0400 Subject: [PATCH 10/24] added grpcio and grpcio-tools to dev_requirements --- dev_requirements.txt | 2 ++ 1 file changed, 2 insertions(+) diff --git a/dev_requirements.txt b/dev_requirements.txt index a0352c6e221..0607049d5e2 100644 --- a/dev_requirements.txt +++ b/dev_requirements.txt @@ -1,3 +1,5 @@ +grpcio +grpcio-tools ipykernel ipython jupyter From 4c7eace319d52fe2dd7e83763bbfb4091ebdc57a Mon Sep 17 00:00:00 2001 From: Dillon Smith Date: Thu, 6 Aug 2020 12:57:04 -0400 Subject: [PATCH 11/24] adapting log tests --- tests/log/test_rpc.py | 665 +++++++++--------------------------------- 1 file changed, 142 insertions(+), 523 deletions(-) diff --git a/tests/log/test_rpc.py b/tests/log/test_rpc.py index af1247e4c12..b8edd10867a 100644 --- a/tests/log/test_rpc.py +++ b/tests/log/test_rpc.py @@ -109,263 +109,151 @@ def test_run_resets(self): ], [[[1.0, 2.0]], [[3.0, 4.0]], [[5.0, 6.0]]]) def test_log_dictionary_with_time(self): - + con_with_rpc_pipeline = pnl.Context(rpc_pipeline=Queue()) + pipeline = con_with_rpc_pipeline.rpc_pipeline T1 = pnl.TransferMechanism(name='log_test_T1', size=2) T2 = pnl.TransferMechanism(name='log_test_T2', - function=psyneulink.core.components.functions.transferfunctions.Linear(slope=2.0), + function=pnl.Linear(slope=2.0), size=2) COMP = pnl.Composition(name='log_test_COMP', pathways=[T1, T2]) - assert T1.loggable_items == { - 'InputPort-0': 'OFF', - 'RESULT': 'OFF', - 'clip': 'OFF', - 'termination_threshold': 'OFF', - 'execute_until_finished': 'OFF', - 'func_additive_param': 'OFF', - 'func_bounds': 'OFF', - 'func_execute_until_finished': 'OFF', - 'func_has_initializers': 'OFF', - 'func_intercept': 'OFF', - 'func_max_executions_before_finished': 'OFF', - 'func_multiplicative_param': 'OFF', - 'func_num_executions_before_finished': 'OFF', - 'func_slope': 'OFF', - 'func_value': 'OFF', - 'func_variable': 'OFF', - 'has_initializers': 'OFF', - 'initial_value': 'OFF', - 'integration_rate': 'OFF', - 'integrator_function_value': 'OFF', - 'integrator_mode': 'OFF', - 'max_executions_before_finished': 'OFF', - 'mod_integration_rate': 'OFF', - 'mod_intercept': 'OFF', - 'mod_noise': 'OFF', - 'mod_slope': 'OFF', - 'noise': 'OFF', - 'num_executions_before_finished': 'OFF', - 'termination_measure_value': 'OFF', - 'value': 'OFF', - 'variable': 'OFF' - } - assert T2.loggable_items == { - 'InputPort-0': 'OFF', - 'RESULT': 'OFF', - 'clip': 'OFF', - 'termination_threshold': 'OFF', - 'execute_until_finished': 'OFF', - 'func_additive_param': 'OFF', - 'func_bounds': 'OFF', - 'func_execute_until_finished': 'OFF', - 'func_has_initializers': 'OFF', - 'func_intercept': 'OFF', - 'func_max_executions_before_finished': 'OFF', - 'func_multiplicative_param': 'OFF', - 'func_num_executions_before_finished': 'OFF', - 'func_slope': 'OFF', - 'func_value': 'OFF', - 'func_variable': 'OFF', - 'has_initializers': 'OFF', - 'initial_value': 'OFF', - 'integration_rate': 'OFF', - 'integrator_function_value': 'OFF', - 'integrator_mode': 'OFF', - 'max_executions_before_finished': 'OFF', - 'mod_integration_rate': 'OFF', - 'mod_intercept': 'OFF', - 'mod_noise': 'OFF', - 'mod_slope': 'OFF', - 'noise': 'OFF', - 'num_executions_before_finished': 'OFF', - 'termination_measure_value': 'OFF', - 'value': 'OFF', - 'variable': 'OFF' - } + T1.set_delivery_conditions('mod_slope') + T1.set_delivery_conditions(pnl.RESULT) + T1.set_delivery_conditions(pnl.VALUE) - T1.set_log_conditions('mod_slope') - T1.set_log_conditions(pnl.RESULT) - T1.set_log_conditions(pnl.VALUE) + T2.set_delivery_conditions('mod_slope') + T2.set_delivery_conditions(pnl.RESULT) + T2.set_delivery_conditions(pnl.VALUE) - assert T1.loggable_items == { - 'execute_until_finished': 'OFF', - 'InputPort-0': 'OFF', - 'RESULT': 'EXECUTION', - 'clip': 'OFF', - 'termination_threshold': 'OFF', - 'func_additive_param': 'OFF', - 'func_bounds': 'OFF', - 'func_execute_until_finished': 'OFF', - 'func_has_initializers': 'OFF', - 'func_intercept': 'OFF', - 'func_max_executions_before_finished': 'OFF', - 'func_multiplicative_param': 'OFF', - 'func_num_executions_before_finished': 'OFF', - 'func_slope': 'OFF', - 'func_value': 'OFF', - 'func_variable': 'OFF', - 'has_initializers': 'OFF', - 'initial_value': 'OFF', - 'integration_rate': 'OFF', - 'integrator_function_value': 'OFF', - 'integrator_mode': 'OFF', - 'max_executions_before_finished': 'OFF', - 'mod_integration_rate': 'OFF', - 'mod_intercept': 'OFF', - 'mod_noise': 'OFF', - 'mod_slope': 'EXECUTION', - 'noise': 'OFF', - 'num_executions_before_finished': 'OFF', - 'termination_measure_value': 'OFF', - 'value': 'EXECUTION', - 'variable': 'OFF' - } + # RUN ZERO | TRIALS ZERO, ONE, TWO ---------------------------------- - T2.set_log_conditions('mod_slope') - T2.set_log_conditions(pnl.RESULT) - T2.set_log_conditions(pnl.VALUE) - - assert T2.loggable_items == { - 'InputPort-0': 'OFF', - 'RESULT': 'EXECUTION', - 'clip': 'OFF', - 'termination_threshold': 'OFF', - 'execute_until_finished': 'OFF', - 'func_additive_param': 'OFF', - 'func_bounds': 'OFF', - 'func_execute_until_finished': 'OFF', - 'func_has_initializers': 'OFF', - 'func_intercept': 'OFF', - 'func_max_executions_before_finished': 'OFF', - 'func_multiplicative_param': 'OFF', - 'func_num_executions_before_finished': 'OFF', - 'func_slope': 'OFF', - 'func_value': 'OFF', - 'func_variable': 'OFF', - 'has_initializers': 'OFF', - 'initial_value': 'OFF', - 'integration_rate': 'OFF', - 'integrator_function_value': 'OFF', - 'integrator_mode': 'OFF', - 'max_executions_before_finished': 'OFF', - 'mod_integration_rate': 'OFF', - 'mod_intercept': 'OFF', - 'mod_noise': 'OFF', - 'mod_slope': 'EXECUTION', - 'noise': 'OFF', - 'num_executions_before_finished': 'OFF', - 'termination_measure_value': 'OFF', - 'value': 'EXECUTION', - 'variable': 'OFF' - } + COMP.run(inputs={T1: [[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]]}, + context=con_with_rpc_pipeline) - # RUN ZERO | TRIALS ZERO, ONE, TWO ---------------------------------- + actual = [] + while not pipeline.empty(): actual.append(pipeline.get()) - COMP.run(inputs={T1: [[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]]}) + t1_slope_entries = [i for i in actual if i.parameterName == pnl.SLOPE and i.componentName == 'log_test_T1'] + t1_slope_values = [np.ndarray(shape=np.array(i.value.shape), buffer=np.array(i.value.data)) for i in t1_slope_entries] + t1_slope_times = [i.time for i in t1_slope_entries] - assert T1.logged_items == {'RESULT': 'EXECUTION', - 'mod_slope': 'EXECUTION', - 'value': 'EXECUTION'} - assert T2.logged_items == {'RESULT': 'EXECUTION', - 'mod_slope': 'EXECUTION', - 'value': 'EXECUTION'} + t2_slope_entries = [i for i in actual if i.parameterName == pnl.SLOPE and i.componentName == 'log_test_T2'] + t2_slope_values = [np.ndarray(shape=np.array(i.value.shape), buffer=np.array(i.value.data)) for i in t2_slope_entries] + t2_slope_times = [i.time for i in t2_slope_entries] - # T1 log after zero-th run ------------------------------------------- + t1_result_entries = [i for i in actual if i.parameterName == pnl.RESULT and i.componentName == 'log_test_T1'] + t1_result_values = [np.ndarray(shape=np.array(i.value.shape), buffer=np.array(i.value.data)) for i in t1_result_entries] + t1_result_times = [i.time for i in t1_result_entries] + + t2_result_entries = [i for i in actual if i.parameterName == pnl.RESULT and i.componentName == 'log_test_T2'] + t2_result_values = [np.ndarray(shape=np.array(i.value.shape), buffer=np.array(i.value.data)) for i in t2_result_entries] + t2_result_times = [i.time for i in t2_result_entries] + + t1_value_entries = [i for i in actual if i.parameterName == pnl.VALUE and i.componentName == 'log_test_T1'] + t1_value_values = [np.ndarray(shape=np.array(i.value.shape), buffer=np.array(i.value.data)) for i in t1_value_entries] + t1_value_times = [i.time for i in t1_value_entries] + + t2_value_entries = [i for i in actual if i.parameterName == pnl.VALUE and i.componentName == 'log_test_T2'] + t2_value_values = [np.ndarray(shape=np.array(i.value.shape), buffer=np.array(i.value.data)) for i in t2_value_entries] + t2_value_times = [i.time for i in t2_value_entries] + + # Test execution contexts for all entries + + assert all([True if i.context == COMP.default_execution_id else False for i in actual]) - log_dict_T1 = T1.log.nparray_dictionary(entries=['value', 'mod_slope', 'RESULT']) + # T1 log after zero-th run ------------------------------------------- - expected_run_T1 = [[0], [0], [0]] - expected_trial_T1 = [[0], [1], [2]] - expected_time_step_T1 = [[0], [0], [0]] + expected_times_T1 = ['0:0:0:0', '0:1:0:0', '0:2:0:0'] expected_values_T1 = [[[1.0, 2.0]], [[3.0, 4.0]], [[5.0, 6.0]]] expected_slopes_T1 = [[1.0], [1.0], [1.0]] expected_results_T1 = [[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]] - assert list(log_dict_T1.keys()) == [COMP.default_execution_id] - sys_log_dict = log_dict_T1[COMP.default_execution_id] - - assert np.allclose(expected_run_T1, sys_log_dict['Run']) - assert np.allclose(expected_trial_T1, sys_log_dict['Trial']) - assert np.allclose(expected_time_step_T1, sys_log_dict['Time_step']) - assert np.allclose(expected_values_T1, sys_log_dict['value']) - assert np.allclose(expected_slopes_T1, sys_log_dict['mod_slope']) - assert np.allclose(expected_results_T1, sys_log_dict['RESULT']) + assert expected_times_T1 == t1_result_times == t1_slope_times == t1_value_times + assert np.allclose(expected_values_T1, t1_value_values) + assert np.allclose(expected_results_T1, t1_result_values) + assert np.allclose(expected_slopes_T1, t1_slope_values) # T2 log after zero-th run -------------------------------------------- - log_dict_T2 = T2.log.nparray_dictionary(entries=['value', 'mod_slope', 'RESULT']) - - expected_run_T2 = [[0], [0], [0]] - expected_trial_T2 = [[0], [1], [2]] - expected_time_step_T2 = [[1], [1], [1]] + expected_times_T2 = ['0:0:0:1', '0:1:0:1', '0:2:0:1'] expected_values_T2 = [[[2.0, 4.0]], [[6.0, 8.0]], [[10.0, 12.0]]] expected_slopes_T2 = [[2.0], [2.0], [2.0]] expected_results_T2 = [[2.0, 4.0], [6.0, 8.0], [10.0, 12.0]] - assert list(log_dict_T2.keys()) == [COMP.default_execution_id] - sys_log_dict = log_dict_T2[COMP.default_execution_id] - - assert np.allclose(expected_run_T2, sys_log_dict['Run']) - assert np.allclose(expected_trial_T2, sys_log_dict['Trial']) - assert np.allclose(expected_time_step_T2, sys_log_dict['Time_step']) - assert np.allclose(expected_values_T2, sys_log_dict['value']) - assert np.allclose(expected_slopes_T2, sys_log_dict['mod_slope']) - assert np.allclose(expected_results_T2, sys_log_dict['RESULT']) + assert expected_times_T2 == t2_result_times == t2_slope_times == t2_value_times + assert np.allclose(expected_values_T2, t2_value_values) + assert np.allclose(expected_results_T2, t2_result_values) + assert np.allclose(expected_slopes_T2, t2_slope_values) # RUN ONE | TRIALS ZERO, ONE, TWO ------------------------------------- - COMP.run(inputs={T1: [[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]]}) + COMP.run(inputs={T1: [[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]]}, + context=con_with_rpc_pipeline) - # T1 log after first run ------------------------------------------- + actual = [] + while not pipeline.empty(): actual.append(pipeline.get()) - log_dict_T1 = T1.log.nparray_dictionary(entries=['value', 'mod_slope', 'RESULT']) + t1_slope_entries = [i for i in actual if i.parameterName == pnl.SLOPE and i.componentName == 'log_test_T1'] + t1_slope_values = [np.ndarray(shape=np.array(i.value.shape), buffer=np.array(i.value.data)) for i in t1_slope_entries] + t1_slope_times = [i.time for i in t1_slope_entries] - assert list(log_dict_T1.keys()) == [COMP.default_execution_id] - sys_log_dict = log_dict_T1[COMP.default_execution_id] + t2_slope_entries = [i for i in actual if i.parameterName == pnl.SLOPE and i.componentName == 'log_test_T2'] + t2_slope_values = [np.ndarray(shape=np.array(i.value.shape), buffer=np.array(i.value.data)) for i in t2_slope_entries] + t2_slope_times = [i.time for i in t2_slope_entries] - # expected_run_T1_2 = [[1], [1], [1]] - expected_run_T1_2 = [[0], [0], [0]] + expected_run_T1 - expected_trial_T1_2 = [[0], [1], [2]] + expected_trial_T1 - expected_time_step_T1_2 = [[0], [0], [0]] + expected_time_step_T1 - expected_values_T1_2 = expected_values_T1 + expected_values_T1 - expected_slopes_T1_2 = expected_slopes_T1 + expected_slopes_T1 - expected_results_T1_2 = expected_results_T1 + expected_results_T1 - - # assert np.allclose(expected_run_T1_2, sys_log_dict['Run']) - # assert np.allclose(expected_trial_T1_2, sys_log_dict['Trial']) - # assert np.allclose(expected_time_step_T1_2, sys_log_dict['Time_step']) - assert np.allclose(expected_values_T1_2, sys_log_dict['value']) - assert np.allclose(expected_slopes_T1_2, sys_log_dict['mod_slope']) - assert np.allclose(expected_results_T1_2, sys_log_dict['RESULT']) + t1_result_entries = [i for i in actual if i.parameterName == pnl.RESULT and i.componentName == 'log_test_T1'] + t1_result_values = [np.ndarray(shape=np.array(i.value.shape), buffer=np.array(i.value.data)) for i in t1_result_entries] + t1_result_times = [i.time for i in t1_result_entries] - # T2 log after first run ------------------------------------------- + t2_result_entries = [i for i in actual if i.parameterName == pnl.RESULT and i.componentName == 'log_test_T2'] + t2_result_values = [np.ndarray(shape=np.array(i.value.shape), buffer=np.array(i.value.data)) for i in t2_result_entries] + t2_result_times = [i.time for i in t2_result_entries] - log_dict_T2_2 = T2.log.nparray_dictionary(entries=['value', 'mod_slope', 'RESULT']) + t1_value_entries = [i for i in actual if i.parameterName == pnl.VALUE and i.componentName == 'log_test_T1'] + t1_value_values = [np.ndarray(shape=np.array(i.value.shape), buffer=np.array(i.value.data)) for i in t1_value_entries] + t1_value_times = [i.time for i in t1_value_entries] - assert list(log_dict_T2_2.keys()) == [COMP.default_execution_id] - sys_log_dict = log_dict_T2_2[COMP.default_execution_id] + t2_value_entries = [i for i in actual if i.parameterName == pnl.VALUE and i.componentName == 'log_test_T2'] + t2_value_values = [np.ndarray(shape=np.array(i.value.shape), buffer=np.array(i.value.data)) for i in t2_value_entries] + t2_value_times = [i.time for i in t2_value_entries] - expected_run_T2_2 = [[0], [0], [0]] + expected_run_T2 - expected_trial_T2_2 = [[0], [1], [2]] + expected_trial_T2 - expected_time_step_T2_2 = [[1], [1], [1]] + expected_time_step_T2 - expected_values_T2_2 = [[[2.0, 4.0]], [[6.0, 8.0]], [[10.0, 12.0]]] + expected_values_T2 - expected_slopes_T2_2 = [[2.0], [2.0], [2.0]] + expected_slopes_T2 - expected_results_T2_2 = [[2.0, 4.0], [6.0, 8.0], [10.0, 12.0]] + expected_results_T2 + # T1 log after first run ------------------------------------------- + + # Test execution contexts for all entries + + assert all([True if i.context == COMP.default_execution_id else False for i in actual]) + + expected_times_T1 = ['1:0:0:0', '1:1:0:0', '1:2:0:0'] + expected_values_T1 = [[[1.0, 2.0]], [[3.0, 4.0]], [[5.0, 6.0]]] + expected_slopes_T1 = [[1.0], [1.0], [1.0]] + expected_results_T1 = [[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]] - # assert np.allclose(expected_run_T2_2, sys_log_dict['Run']) - # assert np.allclose(expected_trial_T2_2, sys_log_dict['Trial']) - # assert np.allclose(expected_time_step_T2_2, sys_log_dict['Time_step']) - assert np.allclose(expected_values_T2_2, sys_log_dict['value']) - assert np.allclose(expected_slopes_T2_2, sys_log_dict['mod_slope']) - assert np.allclose(expected_results_T2_2, sys_log_dict['RESULT']) + assert expected_times_T1 == t1_result_times == t1_slope_times == t1_value_times + assert np.allclose(expected_values_T1, t1_value_values) + assert np.allclose(expected_results_T1, t1_result_values) + assert np.allclose(expected_slopes_T1, t1_slope_values) + + # T2 log after first run ------------------------------------------- + + expected_times_T2 = ['1:0:0:1', '1:1:0:1', '1:2:0:1'] + expected_values_T2 = [[[2.0, 4.0]], [[6.0, 8.0]], [[10.0, 12.0]]] + expected_slopes_T2 = [[2.0], [2.0], [2.0]] + expected_results_T2 = [[2.0, 4.0], [6.0, 8.0], [10.0, 12.0]] + + assert expected_times_T2 == t2_result_times == t2_slope_times == t2_value_times + assert np.allclose(expected_values_T2, t2_value_values) + assert np.allclose(expected_results_T2, t2_result_values) + assert np.allclose(expected_slopes_T2, t2_slope_values) def test_log_dictionary_with_scheduler(self): + con_with_rpc_pipeline = pnl.Context(rpc_pipeline=Queue()) + pipeline = con_with_rpc_pipeline.rpc_pipeline T1 = pnl.TransferMechanism(name='log_test_T1', integrator_mode=True, integration_rate=0.5) T2 = pnl.TransferMechanism(name='log_test_T2', - function=psyneulink.core.components.functions.transferfunctions.Linear(slope=6.0)) + function=pnl.Linear(slope=6.0)) COMP = pnl.Composition(name='log_test_COMP', pathways=[T1, T2]) def pass_threshold(mech, thresh): @@ -379,116 +267,58 @@ def pass_threshold(mech, thresh): pnl.TimeScale.TRIAL: pnl.While(pass_threshold, T2, 5.0) } - T1.set_log_conditions(pnl.VALUE) - T1.set_log_conditions('mod_slope') - T1.set_log_conditions(pnl.RESULT) - T2.set_log_conditions(pnl.VALUE) - T2.set_log_conditions('mod_slope') - - COMP.run(inputs={T1: [[1.0]]}, termination_processing=terminate_trial) + T1.set_delivery_conditions(pnl.VALUE) + T1.set_delivery_conditions('mod_slope') + T1.set_delivery_conditions(pnl.RESULT) + T2.set_delivery_conditions(pnl.VALUE) + T2.set_delivery_conditions('mod_slope') - log_dict_T1 = T1.log.nparray_dictionary(entries=['RESULT', 'mod_slope', 'value']) - log_dict_T2 = T2.log.nparray_dictionary(entries=['value', 'mod_slope']) + COMP.run(inputs={T1: [[1.0]]}, termination_processing=terminate_trial, + context=con_with_rpc_pipeline) - assert list(log_dict_T1.keys()) == [COMP.default_execution_id] - sys_log_dict = log_dict_T1[COMP.default_execution_id] + actual = [] + while not pipeline.empty(): actual.append(pipeline.get()) + assert all([True if i.context == COMP.default_execution_id else False for i in actual]) - # Check order of keys (must match order of specification) - assert list(sys_log_dict.keys()) == ['Run', 'Trial', 'Pass', 'Time_step', 'RESULT', 'mod_slope', 'value'] - assert list(log_dict_T2[COMP.default_execution_id].keys()) == ['Run', 'Trial', 'Pass', 'Time_step', 'value', - 'mod_slope'] + t1_slope_entries = [i for i in actual if i.parameterName == pnl.SLOPE and i.componentName == 'log_test_T1'] + t1_slope_values = [np.ndarray(shape=np.array(i.value.shape), buffer=np.array(i.value.data)) for i in t1_slope_entries] + t1_slope_times = [i.time for i in t1_slope_entries] - # Check values T1 - assert np.allclose(sys_log_dict["Run"], [[0], [0], [0]]) - assert np.allclose(sys_log_dict["Trial"], [[0], [0], [0]]) - assert np.allclose(sys_log_dict["Time_step"], [[0], [0], [0]]) - assert np.allclose(sys_log_dict["RESULT"], [[0.5], [0.75], [0.875]]) - assert np.allclose(sys_log_dict["value"], [[[0.5]], [[0.75]], [[0.875]]]) - assert np.allclose(sys_log_dict["mod_slope"], [[1], [1], [1]]) + t2_slope_entries = [i for i in actual if i.parameterName == pnl.SLOPE and i.componentName == 'log_test_T2'] + t2_slope_values = [np.ndarray(shape=np.array(i.value.shape), buffer=np.array(i.value.data)) for i in t2_slope_entries] + t2_slope_times = [i.time for i in t2_slope_entries] - # Check values T2 - assert np.allclose(log_dict_T2[COMP.default_execution_id]["Run"], [[0], [0], [0]]) - assert np.allclose(log_dict_T2[COMP.default_execution_id]["Trial"], [[0], [0], [0]]) - assert np.allclose(log_dict_T2[COMP.default_execution_id]["Time_step"], [[1], [1], [1]]) - assert np.allclose(log_dict_T2[COMP.default_execution_id]["value"], [[[3]], [[4.5]], [[5.25]]]) - assert np.allclose(log_dict_T2[COMP.default_execution_id]["mod_slope"], [[6], [6], [6]]) + t1_result_entries = [i for i in actual if i.parameterName == pnl.RESULT and i.componentName == 'log_test_T1'] + t1_result_values = [np.ndarray(shape=np.array(i.value.shape), buffer=np.array(i.value.data)) for i in t1_result_entries] + t1_result_times = [i.time for i in t1_result_entries] - def test_log_array_with_scheduler(self): - T1 = pnl.TransferMechanism(name='log_test_T1', - integrator_mode=True, - integration_rate=0.5) - T2 = pnl.TransferMechanism(name='log_test_T2', - function=psyneulink.core.components.functions.transferfunctions.Linear(slope=6.0)) - COMP = pnl.Composition(name='log_test_COMP', pathways=[T1, T2]) + t1_value_entries = [i for i in actual if i.parameterName == pnl.VALUE and i.componentName == 'log_test_T1'] + t1_value_values = [np.ndarray(shape=np.array(i.value.shape), buffer=np.array(i.value.data)) for i in t1_value_entries] + t1_value_times = [i.time for i in t1_value_entries] - def pass_threshold(mech, thresh): - results = mech.output_ports[0].parameters.value.get(COMP) - for val in results: - if abs(val) >= thresh: - return True - return False + t2_value_entries = [i for i in actual if i.parameterName == pnl.VALUE and i.componentName == 'log_test_T2'] + t2_value_values = [np.ndarray(shape=np.array(i.value.shape), buffer=np.array(i.value.data)) for i in t2_value_entries] + t2_value_times = [i.time for i in t2_value_entries] - terminate_trial = { - pnl.TimeScale.TRIAL: pnl.While(pass_threshold, T2, 5.0) - } + # Check values T1 - T1.set_log_conditions(pnl.VALUE) - T1.set_log_conditions('mod_slope') - T1.set_log_conditions(pnl.RESULT) - T2.set_log_conditions(pnl.VALUE) - T2.set_log_conditions('mod_slope') + expected_times_T1 = ['0:0:0:0', '0:0:1:0', '0:0:2:0'] + expected_results_T1 = [[0.5], [0.75], [0.875]] + expected_values_T1 = [[[0.5]], [[0.75]], [[0.875]]] + expected_slopes_T1 = [[1], [1], [1]] + assert expected_times_T1 == t1_result_times == t1_slope_times == t1_value_times + assert np.allclose(expected_values_T1, t1_value_values) + assert np.allclose(expected_results_T1, t1_result_values) + assert np.allclose(expected_slopes_T1, t1_slope_values) - COMP.run(inputs={T1: [[1.0]]}, termination_processing=terminate_trial) + # Check values T2 - log_array_T1 = T1.log.nparray(entries=['RESULT', 'mod_slope', 'value']) - log_array_T2 = T2.log.nparray(entries=['value', 'mod_slope']) - - context_results = [pnl.Log.context_header, COMP.default_execution_id] - # Check values - run_results = [["Run"], [0], [0], [0]] - trial_results = [["Trial"], [0], [0], [0]] - pass_results = [["Pass"], [0], [1], [2]] - time_step_results = [["Time_step"], [0], [0], [0]] - results_results = ["RESULT", [0.5], [0.75], [0.875]] - slope_results = ["mod_slope", [1], [1], [1]] - value_results = ["value", [[0.5]], [[0.75]], [[0.875]]] - - for i in range(2): - assert log_array_T1[0][i] == context_results[i] - - assert log_array_T1[1][0] == pnl.Log.data_header - data_array = log_array_T1[1][1] - j = 0 - for i in range(4): - assert data_array[j][i] == run_results[i] - assert data_array[j + 1][i] == trial_results[i] - assert data_array[j + 2][i] == pass_results[i] - assert data_array[j + 3][i] == time_step_results[i] - assert data_array[j + 4][i] == results_results[i] - assert data_array[j + 5][i] == slope_results[i] - assert data_array[j + 6][i] == value_results[i] - - # Check values - run_results = [["Run"], [0], [0], [0]] - trial_results = [["Trial"], [0], [0], [0]] - pass_results = [["Pass"], [0], [1], [2]] - time_step_results = [["Time_step"], [1], [1], [1]] - value_results = ["value", [[3]], [[4.5]], [[5.25]]] - slope_results = ["mod_slope", [6], [6], [6]] - - for i in range(2): - assert log_array_T1[0][i] == context_results[i] - - assert log_array_T2[1][0] == pnl.Log.data_header - data_array = log_array_T2[1][1] - j = 0 - for i in range(4): - assert data_array[j][i] == run_results[i] - assert data_array[j + 1][i] == trial_results[i] - assert data_array[j + 2][i] == pass_results[i] - assert data_array[j + 3][i] == time_step_results[i] - assert data_array[j + 4][i] == value_results[i] - assert data_array[j + 5][i] == slope_results[i] + expected_times_T2 = ['0:0:0:1', '0:0:1:1', '0:0:2:1'] + expected_values_T2 = [[[3]], [[4.5]], [[5.25]]] + expected_slopes_T2 = [[6], [6], [6]] + assert expected_times_T2 == t2_slope_times == t2_value_times + assert np.allclose(expected_values_T2, t2_value_values) + assert np.allclose(expected_slopes_T2, t2_slope_values) def test_log_dictionary_with_scheduler_many_time_step_increments(self): T1 = pnl.TransferMechanism(name='log_test_T1', @@ -590,217 +420,6 @@ def test_log_multi_calls_single_timestep(self, scheduler_conditions, multi_run): assert log_dict['Run'] == [[0], [0], [0], [1], [1], [1]] assert np.allclose(log_dict['value'], [[[0.52466739, 0.47533261]] * 6]) - -class TestClearLog: - - def test_clear_log(self): - - # Create System - T_1 = pnl.TransferMechanism(name='log_test_T_1', size=2) - T_2 = pnl.TransferMechanism(name='log_test_T_2', size=2) - COMP = pnl.Composition(name="log_test_COMP", pathways=[T_1, T_2]) - PJ = T_2.path_afferents[0] - - # Set log conditions on each component - T_1.set_log_conditions('mod_noise') - T_1.set_log_conditions(pnl.RESULT) - T_2.set_log_conditions('mod_slope') - T_2.set_log_conditions(pnl.RESULT) - PJ.set_log_conditions('mod_matrix') - - # Run system - COMP.run(inputs={T_1: [1.0, 1.0]}) - - # Create log dict for each component - log_dict_T_1 = T_1.log.nparray_dictionary() - log_dict_T_2 = T_2.log.nparray_dictionary() - log_dict_PJ = PJ.log.nparray_dictionary() - - assert list(log_dict_T_1.keys()) == [COMP.default_execution_id] - assert list(log_dict_T_2.keys()) == [COMP.default_execution_id] - assert list(log_dict_PJ.keys()) == [COMP.default_execution_id] - - # Confirm that values were logged correctly - sys_log_dict = log_dict_T_1[COMP.default_execution_id] - assert np.allclose(sys_log_dict['RESULT'], np.array([[1.0, 1.0]])) - assert np.allclose(sys_log_dict['mod_noise'], np.array([[0.0]])) - - sys_log_dict = log_dict_T_2[COMP.default_execution_id] - assert np.allclose(sys_log_dict['RESULT'], np.array([[1.0, 1.0]])) - assert np.allclose(sys_log_dict['mod_slope'], np.array([[1.0]])) - - sys_log_dict = log_dict_PJ[COMP.default_execution_id] - assert np.allclose(sys_log_dict['mod_matrix'], np.array([[1.0, 0.0], [0.0, 1.0]])) - - # KDM 10/3/18: below was changed to delete_entry=True because it's not implemented in Parameter logs, - # and it's not clear this option results in much difference than just deleting the entries and - # is stated to be included only for future use - # Clear T_1s log and DO NOT delete entries - T_1.log.clear_entries(delete_entry=True) - - # Clear T_2s log and delete entries - T_2.log.clear_entries(delete_entry=True) - - # Create new log dict for each component - log_dict_T_1 = T_1.log.nparray_dictionary() - log_dict_T_2 = T_2.log.nparray_dictionary() - log_dict_PJ = PJ.log.nparray_dictionary() - - # Confirm that T_1 log values were removed - assert log_dict_T_1 == OrderedDict() - - # Confirm that T_2 log values were removed and dictionary entries were destroyed - assert log_dict_T_2 == OrderedDict() - - # Confirm that PJ log values were not affected by changes to T_1 and T_2's logs - assert np.allclose(log_dict_PJ[COMP.default_execution_id]['mod_matrix'], np.array([[1.0, 0.0], [0.0, 1.0]])) - - # Run system again - COMP.run(inputs={T_1: [2.0, 2.0]}) - - # Create new log dict for each component - log_dict_T_1 = T_1.log.nparray_dictionary() - log_dict_T_2 = T_2.log.nparray_dictionary() - log_dict_PJ = PJ.log.nparray_dictionary() - - # Confirm that T_1 log values only include most recent run - sys_log_dict = log_dict_T_1[COMP.default_execution_id] - assert np.allclose(sys_log_dict['RESULT'], np.array([[2.0, 2.0]])) - assert np.allclose(sys_log_dict['mod_noise'], np.array([[0.0]])) - # NOTE: "Run" value still incremented, but only the most recent one is returned (# runs does not reset to zero) - assert np.allclose(sys_log_dict['Run'], np.array([[1]])) - - # Confirm that T_2 log values only include most recent run - sys_log_dict = log_dict_T_2[COMP.default_execution_id] - assert np.allclose(sys_log_dict['RESULT'], np.array([[2.0, 2.0]])) - assert np.allclose(sys_log_dict['mod_slope'], np.array([[1.0]])) - assert np.allclose(sys_log_dict['Run'], np.array([[1]])) - - # Confirm that PJ log values include all runs - sys_log_dict = log_dict_PJ[COMP.default_execution_id] - assert np.allclose(sys_log_dict['mod_matrix'], np.array([[[1.0, 0.0], [0.0, 1.0]], [[1.0, 0.0], [0.0, 1.0]]])) - assert np.allclose(sys_log_dict['Run'], np.array([[0], [1]])) - - @pytest.mark.parametrize( - 'insertion_eids, deletion_eids, log_is_empty', - [ - (['context'], 'context', True), # fails if string handling not correct due to str being Iterable - (['context'], ['context'], True), - ] - ) - def test_clear_log_arguments(self, insertion_eids, deletion_eids, log_is_empty): - t = pnl.TransferMechanism() - c = pnl.Composition() - c.add_node(t) - - t.parameters.value.log_condition = True - - for eid in insertion_eids: - c.run({t: 0}, context=eid) - - t.parameters.value.clear_log(deletion_eids) - - if log_is_empty: - assert len(t.parameters.value.log) == 0 - else: - assert len(t.parameters.value.log) != 0 - - -class TestFiltering: - - @pytest.fixture(scope='module') - def node_logged_in_simulation(self): - Input = pnl.TransferMechanism(name='Input') - reward = pnl.TransferMechanism( - output_ports=[pnl.RESULT, pnl.MEAN, pnl.VARIANCE], name='reward') - Decision = pnl.DDM( - function=pnl.DriftDiffusionAnalytical( - drift_rate=(1.0, pnl.ControlProjection( - function=pnl.Linear, - control_signal_params={pnl.ALLOCATION_SAMPLES: np.arange(0.1, 1.01, 0.3)}) - ), - threshold=(1.0, pnl.ControlProjection( - function=pnl.Linear, - control_signal_params={pnl.ALLOCATION_SAMPLES: np.arange(0.1, 1.01, 0.3)}) - ), - noise=0.5, - starting_point=0, - t0=0.45 - ), - output_ports=[ - pnl.DECISION_VARIABLE, - pnl.RESPONSE_TIME, - pnl.PROBABILITY_UPPER_THRESHOLD], - name='Decision' - ) - - comp = pnl.Composition(name="evc", retain_old_simulation_data=True) - comp.add_node(reward, required_roles=[pnl.NodeRole.OUTPUT]) - comp.add_node(Decision, required_roles=[pnl.NodeRole.OUTPUT]) - task_execution_pathway = [Input, pnl.IDENTITY_MATRIX, Decision] - comp.add_linear_processing_pathway(task_execution_pathway) - - comp.add_controller( - controller=pnl.OptimizationControlMechanism( - agent_rep=comp, - features=[Input.input_port, reward.input_port], - feature_function=pnl.AdaptiveIntegrator(rate=0.5), - objective_mechanism=pnl.ObjectiveMechanism( - function=pnl.LinearCombination(operation=pnl.PRODUCT), - monitor=[ - reward, - Decision.output_ports[pnl.PROBABILITY_UPPER_THRESHOLD], - (Decision.output_ports[pnl.RESPONSE_TIME], -1, 1) - ] - ), - function=pnl.GridSearch(), - control_signals=[ - {PROJECTIONS: ("drift_rate", Decision), - ALLOCATION_SAMPLES: np.arange(0.1, 1.01, 0.3)}, - {PROJECTIONS: ("threshold", Decision), - ALLOCATION_SAMPLES: np.arange(0.1, 1.01, 0.3)} - ] - ) - ) - - comp.enable_controller = True - - stim_list_dict = { - Input: [0.5, 0.123], - reward: [20, 20] - } - - Input.parameters.value.log_condition = True - - comp.run(inputs=stim_list_dict) - - return Input - - def test_node_has_logged_sims(self, node_logged_in_simulation): - for logged_value, eid_dict in node_logged_in_simulation.log.logged_entries.items(): - for eid in eid_dict: - if pnl.EID_SIMULATION in str(eid): - return - else: - assert False, 'No simulation execution_id found in log' - - def test_nparray(self, node_logged_in_simulation): - for eid in node_logged_in_simulation.log.nparray(exclude_sims=True)[0]: - assert pnl.EID_SIMULATION not in str(eid) - - def test_nparray_dictionary(self, node_logged_in_simulation): - for eid in node_logged_in_simulation.log.nparray_dictionary(exclude_sims=True): - assert pnl.EID_SIMULATION not in str(eid) - - def test_csv(self, node_logged_in_simulation): - full_csv = node_logged_in_simulation.log.csv(exclude_sims=True) - - # get each row, excluding header - for row in full_csv.split('\n')[1:]: - # if present in a row, context will be in the first cell - assert pnl.EID_SIMULATION not in row.replace("'", '').split(',')[0] - - class TestFullModels: def test_multilayer(self): From b055adb3cf9f863ad6449216340f272983e7fb39 Mon Sep 17 00:00:00 2001 From: Dillon Smith Date: Thu, 6 Aug 2020 15:10:52 -0400 Subject: [PATCH 12/24] added programmatic delivery of parameter values via RPC --- psyneulink/core/globals/log.py | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/psyneulink/core/globals/log.py b/psyneulink/core/globals/log.py index 8f34b2b7a48..4937e03a016 100644 --- a/psyneulink/core/globals/log.py +++ b/psyneulink/core/globals/log.py @@ -898,6 +898,40 @@ def assign_delivery_condition(item, level): else: assign_delivery_condition(item[0], item[1]) + @tc.typecheck + @handle_external_context() + def _deliver_values(self, entries, context=None): + from psyneulink.core.globals.parameters import parse_context + """Deliver the value of one or more Components programmatically. + + This can be used to "manually" prepare the `value ` of any of a Component's `loggable_items + ` (including its own `value `) for delivery to an external application via gRPC. + The context item of its `LogEntry` is assigned *COMMAND_LINE*. If the call to _deliver_values is made while a + Composition to which the Component belongs is being run (e.g., in a **call_before..** or **call_after...** argument + of its `run ` method), then the time of the LogEntry is assigned the value of the `Clock` of + the Composition's `scheduler` or `scheduler_learning`, whichever is currently executing + (see `System_Scheduler`). + + Arguments + --------- + + entries : string, Component or list containing either : default ALL + specifies the Components, the current `value `\\s of which should be added prepared for + transmission to an external application via gRPC. + they must be `loggable_items ` of the owner's Log. If **entries** is *ALL* or is not + specified, then the `value `\\s of all `loggable_items ` are logged. + """ + entries = self._validate_entries_arg(entries) + original_source = context.source + context.source = ContextFlags.COMMAND_LINE + + # Validate the Component field of each LogEntry + for entry in entries: + param = self._get_parameter_from_item_string(entry) + context = parse_context(context) + param._deliver_value(param._get(context), context) + + context.source = original_source @tc.typecheck def _log_value( From 9cc79d451f7b1fe1e6465b6264b44350987dcd8c Mon Sep 17 00:00:00 2001 From: Dillon Smith Date: Thu, 6 Aug 2020 15:11:05 -0400 Subject: [PATCH 13/24] Adapted all relevant logging tests for RPC --- tests/log/test_rpc.py | 175 +++++++++++++++++++----------------------- 1 file changed, 81 insertions(+), 94 deletions(-) diff --git a/tests/log/test_rpc.py b/tests/log/test_rpc.py index b8edd10867a..a462ed02795 100644 --- a/tests/log/test_rpc.py +++ b/tests/log/test_rpc.py @@ -321,6 +321,8 @@ def pass_threshold(mech, thresh): assert np.allclose(expected_slopes_T2, t2_slope_values) def test_log_dictionary_with_scheduler_many_time_step_increments(self): + con_with_rpc_pipeline = pnl.Context(rpc_pipeline=Queue()) + pipeline = con_with_rpc_pipeline.rpc_pipeline T1 = pnl.TransferMechanism(name='log_test_T1', integrator_mode=True, integration_rate=0.05) @@ -337,31 +339,34 @@ def pass_threshold(mech, thresh): pnl.TimeScale.TRIAL: pnl.While(pass_threshold, T1, 0.95) } - T1.set_log_conditions(pnl.VALUE) - - COMP.run(inputs={T1: [[1.0]]}, termination_processing=terminate_trial) + T1.set_delivery_conditions(pnl.VALUE) - log_dict_T1 = T1.log.nparray_dictionary(entries=['value']) + COMP.run(inputs={T1: [[1.0]]}, termination_processing=terminate_trial, context=con_with_rpc_pipeline) - assert list(log_dict_T1.keys()) == [COMP.default_execution_id] - sys_log_dict = log_dict_T1[COMP.default_execution_id] + actual = [] + while not pipeline.empty(): actual.append(pipeline.get()) + assert all([True if i.context == COMP.default_execution_id else False for i in actual]) - # Check order of keys (must match order of specification) - assert list(sys_log_dict.keys()) == ['Run', 'Trial', 'Pass', 'Time_step', 'value'] + t1_value_entries = [i for i in actual if i.parameterName == pnl.VALUE and i.componentName == 'log_test_T1'] + t1_value_values = [np.ndarray(shape=np.array(i.value.shape), buffer=np.array(i.value.data)) for i in + t1_value_entries] - # # Check values T1 - assert len(sys_log_dict["Run"]) == 59 - assert np.allclose(sys_log_dict["Pass"][30], 30) - assert np.allclose(sys_log_dict["Time_step"][30], 0) - assert abs(sys_log_dict["value"][58]) >= 0.95 - assert abs(sys_log_dict["value"][57]) < 0.95 + # Check values T1 + assert len(actual) == 59 + assert actual[30].time == '0:0:30:0' + assert t1_value_values[58] >= 0.95 + assert t1_value_values[57] < 0.95 def test_log_csv_multiple_contexts(self): + pipeline = Queue() + con_X = pnl.Context(execution_id='comp X', rpc_pipeline=pipeline) + con_Y = pnl.Context(execution_id='comp Y', rpc_pipeline=pipeline) + A = pnl.TransferMechanism(name='A') B = pnl.TransferMechanism(name='B') C = pnl.TransferMechanism(name='C') - C.set_log_conditions(pnl.VALUE) + C.set_delivery_conditions(pnl.VALUE) X = pnl.Composition(name='comp X') Y = pnl.Composition(name='comp Y') @@ -371,18 +376,18 @@ def test_log_csv_multiple_contexts(self): # running with manual contexts for consistent output # because output is sorted by context - X.run(inputs={A: 1}, context='comp X') - Y.run(inputs={B: 2}, context='comp Y') + X.run(inputs={A: 1}, context=con_X) + Y.run(inputs={B: 2}, context=con_Y) - expected_str = "'Execution Context', 'Data'\n" \ - + "'comp X', 'Run', 'Trial', 'Pass', 'Time_step', 'value'\n" \ - + ", '0', '0', '0', '1', '1.0'\n" \ - + "'comp Y', 'Run', 'Trial', 'Pass', 'Time_step', 'value'\n" \ - + ", '0', '0', '0', '1', '2.0'\n" - assert C.log.csv() == expected_str + actual = [] + while not pipeline.empty(): actual.append(pipeline.get()) - print() - print() + assert actual[0].context == 'comp X' + assert actual[0].time == '0:0:0:1' + assert actual[0].value.data == [1] + assert actual[1].context == 'comp Y' + assert actual[1].time == '0:0:0:1' + assert actual[1].value.data == [2] @pytest.mark.parametrize( 'scheduler_conditions, multi_run', [ @@ -392,13 +397,15 @@ def test_log_csv_multiple_contexts(self): ] ) def test_log_multi_calls_single_timestep(self, scheduler_conditions, multi_run): + con_with_rpc_pipeline = pnl.Context(rpc_pipeline=Queue()) + pipeline = con_with_rpc_pipeline.rpc_pipeline lca = pnl.LCAMechanism( size=2, leak=0.5, threshold=0.515, reset_stateful_function_when=pnl.AtTrialStart() ) - lca.set_log_conditions(pnl.VALUE) + lca.set_delivery_conditions(pnl.VALUE) m0 = pnl.ProcessingMechanism( size=2 ) @@ -406,23 +413,30 @@ def test_log_multi_calls_single_timestep(self, scheduler_conditions, multi_run): comp.add_linear_processing_pathway([m0, lca]) if scheduler_conditions: comp.scheduler.add_condition(lca, pnl.AfterNCalls(m0, 2)) - comp.run(inputs={m0: [[1, 0], [1, 0], [1, 0]]}) - log_dict = lca.log.nparray_dictionary()['Composition-0'] - assert log_dict['Run'] == [[0], [0], [0]] - assert log_dict['Trial'] == [[0], [1], [2]] - assert log_dict['Pass'] == [[1], [1], [1]] if scheduler_conditions else [[0], [0], [0]] - assert log_dict['Time_step'] == [[1], [1], [1]] + comp.run(inputs={m0: [[1, 0], [1, 0], [1, 0]]}, context=con_with_rpc_pipeline) + + actual = [] + while not pipeline.empty(): actual.append(pipeline.get()) + integration_end_dict = {i.time: i for i in actual} + assert list(integration_end_dict.keys()) == ['0:0:0:1', '0:1:0:1', '0:2:0:1'] + vals = [i.value.data for i in integration_end_dict.values()] # floats in value, so use np.allclose - assert np.allclose(log_dict['value'], [[[0.52466739, 0.47533261]] * 3]) + assert np.allclose(vals, [[[0.52466739, 0.47533261]] * 3]) if multi_run: comp.run(inputs={m0: [[1, 0], [1, 0], [1, 0]]}) - log_dict = lca.log.nparray_dictionary()['Composition-0'] - assert log_dict['Run'] == [[0], [0], [0], [1], [1], [1]] - assert np.allclose(log_dict['value'], [[[0.52466739, 0.47533261]] * 6]) + actual = [] + while not pipeline.empty(): actual.append(pipeline.get()) + integration_end_dict.update({i.time: i for i in actual}) + assert list(integration_end_dict.keys()) == ['0:0:0:1', '0:1:0:1', '0:2:0:1', '1:0:0:1', '1:1:0:1', + '1:2:0:1'] + vals = [i.value.data for i in integration_end_dict.values()] + # floats in value, so use np.allclose + assert np.allclose(vals, [[[0.52466739, 0.47533261]] * 6]) class TestFullModels: def test_multilayer(self): - + con_with_rpc_pipeline = pnl.Context(rpc_pipeline=Queue()) + pipeline = con_with_rpc_pipeline.rpc_pipeline input_layer = pnl.TransferMechanism(name='input_layer', function=pnl.Logistic, size=2) @@ -479,10 +493,11 @@ def test_multilayer(self): input_dictionary = {backprop_pathway.target: [[0., 0., 1.]], input_layer: [[-1., 30.]]} - middle_weights.set_log_conditions(('mod_matrix', pnl.PROCESSING)) + middle_weights.set_delivery_conditions(('mod_matrix', pnl.PROCESSING)) comp.learn(inputs=input_dictionary, - num_trials=10) + num_trials=10, + context=con_with_rpc_pipeline) expected_log_val = np.array( [ @@ -546,27 +561,17 @@ def test_multilayer(self): ], dtype=object ) - log_val = middle_weights.log.nparray(entries='mod_matrix', header=False) - - assert log_val[0] == expected_log_val[0] - - for i in range(1, len(log_val)): - try: - np.testing.assert_allclose(log_val[i], expected_log_val[i]) - except TypeError: - for j in range(len(log_val[i])): - np.testing.assert_allclose( - np.array(log_val[i][j][0]), - np.array(expected_log_val[i][j][0]), - atol=1e-08, - err_msg='Failed on test item {0} of logged values'.format(i) - ) - - middle_weights.log.print_entries() + actual = [] + while not pipeline.empty(): actual.append(pipeline.get()) + log_val = [np.ndarray(shape=np.array(i.value.shape), buffer=np.array(i.value.data)) for i in actual] + assert all([True if i.context == 'multilayer' else False for i in actual]) + assert np.allclose(log_val, expected_log_val[1][0][4]) # Test Programatic logging - hidden_layer_2.log.log_values(pnl.VALUE, comp) - log_val = hidden_layer_2.log.nparray(header=False) + hidden_layer_2.log._deliver_values(pnl.VALUE, con_with_rpc_pipeline) + actual = [] + while not pipeline.empty(): actual.append(pipeline.get()) + log_val = np.ndarray(shape=np.array(actual[0].value.shape), buffer=np.array(actual[0].value.data)) expected_log_val = np.array( [ ['multilayer'], @@ -580,29 +585,25 @@ def test_multilayer(self): ], dtype=object ) - assert log_val[0] == expected_log_val[0] - - for i in range(1, len(log_val)): - try: - np.testing.assert_allclose(log_val[i], expected_log_val[i]) - except TypeError: - for j in range(len(log_val[i])): - np.testing.assert_allclose( - np.array(log_val[i][j][0]), - np.array(expected_log_val[i][j][0]), - atol=1e-08, - err_msg='Failed on test item {0} of logged values'.format(i) - ) - hidden_layer_2.log.print_entries() + assert actual[0].context == 'multilayer' + assert actual[0].time == '1:0:0:0' + assert np.allclose( + expected_log_val[1][0][4], + log_val + ) # Clear log and test with logging of weights set to LEARNING for another 5 trials of learning - middle_weights.log.clear_entries(entries=None, confirm=False) - middle_weights.set_log_conditions(('mod_matrix', pnl.LEARNING)) + middle_weights.set_delivery_conditions(('mod_matrix', pnl.LEARNING)) comp.learn( num_trials=5, inputs=input_dictionary, + context=con_with_rpc_pipeline ) - log_val = middle_weights.log.nparray(entries='mod_matrix', header=False) + actual = [] + while not pipeline.empty(): actual.append(pipeline.get()) + assert all([True if i.context == 'multilayer' else False for i in actual]) + matrices = [i for i in actual if i.parameterName == 'matrix'] + log_val = [np.ndarray(shape=np.array(i.value.shape), buffer=np.array(i.value.data)) for i in matrices] expected_log_val = np.array( [ ['multilayer'], @@ -644,22 +645,8 @@ def test_multilayer(self): dtype=object ) - assert log_val.shape == expected_log_val.shape - assert log_val[0] == expected_log_val[0] - assert len(log_val[1]) == len(expected_log_val[1]) == 1 - - for i in range(len(log_val[1][0])): - try: - np.testing.assert_allclose( - log_val[1][0][i], - expected_log_val[1][0][i], - err_msg='Failed on test item {0} of logged values'.format(i) - ) - except TypeError: - for j in range(len(log_val[1][0][i])): - np.testing.assert_allclose( - np.array(log_val[1][0][i][j]), - np.array(expected_log_val[1][0][i][j]), - atol=1e-08, - err_msg='Failed on test item {0} of logged values'.format(i) - ) + assert [i.time for i in matrices] == ['1:0:1:0', '1:1:1:0', '1:2:1:0', '1:3:1:0', '1:4:1:0'] + assert np.allclose( + expected_log_val[1][0][4], + log_val + ) From c460001b9624e1be8cb1f383032231c63fb334f6 Mon Sep 17 00:00:00 2001 From: Dillon Smith Date: Thu, 6 Aug 2020 15:15:55 -0400 Subject: [PATCH 14/24] fixed docstring for _deliver_values method --- psyneulink/core/globals/log.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/psyneulink/core/globals/log.py b/psyneulink/core/globals/log.py index 4937e03a016..08188c33a87 100644 --- a/psyneulink/core/globals/log.py +++ b/psyneulink/core/globals/log.py @@ -906,11 +906,10 @@ def _deliver_values(self, entries, context=None): This can be used to "manually" prepare the `value ` of any of a Component's `loggable_items ` (including its own `value `) for delivery to an external application via gRPC. - The context item of its `LogEntry` is assigned *COMMAND_LINE*. If the call to _deliver_values is made while a - Composition to which the Component belongs is being run (e.g., in a **call_before..** or **call_after...** argument - of its `run ` method), then the time of the LogEntry is assigned the value of the `Clock` of - the Composition's `scheduler` or `scheduler_learning`, whichever is currently executing - (see `System_Scheduler`). + If the call to _deliver_values is made while a Composition to which the Component belongs is being run (e.g., + in a **call_before..** or **call_after...** argument of its `run ` method), then the time of + the LogEntry is assigned the value of the `Clock` of the Composition's `scheduler` or `scheduler_learning`, + whichever is currently executing (see `Composition_Scheduler`). Arguments --------- From 9d00eee6138b5cb739e0bcb2b43e339c27f3dd9e Mon Sep 17 00:00:00 2001 From: Dillon Smith Date: Thu, 6 Aug 2020 15:18:21 -0400 Subject: [PATCH 15/24] changed public RPC-related methods to private --- psyneulink/core/components/component.py | 6 +++--- psyneulink/core/globals/log.py | 2 +- psyneulink/core/globals/preferences/basepreferenceset.py | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/psyneulink/core/components/component.py b/psyneulink/core/components/component.py index 0bf202417f6..421433dcb14 100644 --- a/psyneulink/core/components/component.py +++ b/psyneulink/core/components/component.py @@ -3189,16 +3189,16 @@ def set_log_conditions(self, items, log_condition=LogCondition.EXECUTION): def set_delivery_conditions(self, items, delivery_condition=LogCondition.EXECUTION): """ - set_delivery_conditions( \ + _set_delivery_conditions( \ items \ delivery_condition=EXECUTION \ ) Specifies items to be delivered to external application via gRPC; these must be be `loggable_items ` - of the Component's `log `. This is a convenience method that calls the `set_delivery_conditions ` + of the Component's `log `. This is a convenience method that calls the `_set_delivery_conditions ` method of the Component's `log `. """ - self.log.set_delivery_conditions(items=items, delivery_condition=delivery_condition) + self.log._set_delivery_conditions(items=items, delivery_condition=delivery_condition) def log_values(self, entries): """ diff --git a/psyneulink/core/globals/log.py b/psyneulink/core/globals/log.py index 08188c33a87..9b0ba9bb462 100644 --- a/psyneulink/core/globals/log.py +++ b/psyneulink/core/globals/log.py @@ -831,7 +831,7 @@ def assign_log_condition(item, level): else: assign_log_condition(item[0], item[1]) - def set_delivery_conditions(self, items, delivery_condition=LogCondition.EXECUTION): + def _set_delivery_conditions(self, items, delivery_condition=LogCondition.EXECUTION): """Specifies items to be delivered via gRPC under the specified `LogCondition`\\(s). Arguments diff --git a/psyneulink/core/globals/preferences/basepreferenceset.py b/psyneulink/core/globals/preferences/basepreferenceset.py index 64fa9b2535a..9243531eafe 100644 --- a/psyneulink/core/globals/preferences/basepreferenceset.py +++ b/psyneulink/core/globals/preferences/basepreferenceset.py @@ -386,8 +386,8 @@ def logPref(self): return self.get_pref_setting_for_level(LOG_PREF, self._log_pref.level)[0] @property - def deliveryPref(self): - """Return setting of owner's deliveryPref at level specified in its PreferenceEntry.level + def _deliveryPref(self): + """Return setting of owner's _deliveryPref at level specified in its PreferenceEntry.level :param level: :return: """ From bc3d27357e33193c8e954c81629bd77e1485256a Mon Sep 17 00:00:00 2001 From: Dillon Smith Date: Thu, 6 Aug 2020 15:29:25 -0400 Subject: [PATCH 16/24] fixed incorrect expected times in one RPC test --- tests/log/test_rpc.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/tests/log/test_rpc.py b/tests/log/test_rpc.py index a462ed02795..294f5f7e565 100644 --- a/tests/log/test_rpc.py +++ b/tests/log/test_rpc.py @@ -418,17 +418,24 @@ def test_log_multi_calls_single_timestep(self, scheduler_conditions, multi_run): actual = [] while not pipeline.empty(): actual.append(pipeline.get()) integration_end_dict = {i.time: i for i in actual} - assert list(integration_end_dict.keys()) == ['0:0:0:1', '0:1:0:1', '0:2:0:1'] + if scheduler_conditions: + expected_times = ['0:0:1:1', '0:1:1:1', '0:2:1:1'] + else: + expected_times = ['0:0:0:1', '0:1:0:1', '0:2:0:1'] + assert list(integration_end_dict.keys()) == expected_times vals = [i.value.data for i in integration_end_dict.values()] # floats in value, so use np.allclose assert np.allclose(vals, [[[0.52466739, 0.47533261]] * 3]) if multi_run: - comp.run(inputs={m0: [[1, 0], [1, 0], [1, 0]]}) + comp.run(inputs={m0: [[1, 0], [1, 0], [1, 0]]}, context=con_with_rpc_pipeline) actual = [] while not pipeline.empty(): actual.append(pipeline.get()) integration_end_dict.update({i.time: i for i in actual}) - assert list(integration_end_dict.keys()) == ['0:0:0:1', '0:1:0:1', '0:2:0:1', '1:0:0:1', '1:1:0:1', - '1:2:0:1'] + if scheduler_conditions: + expected_times = ['0:0:1:1', '0:1:1:1', '0:2:1:1', '1:0:1:1', '1:1:1:1', '1:2:1:1'] + else: + expected_times = ['0:0:0:1', '0:1:0:1', '0:2:0:1', '1:0:0:1', '1:1:0:1', '1:2:0:1'] + assert list(integration_end_dict.keys()) == expected_times vals = [i.value.data for i in integration_end_dict.values()] # floats in value, so use np.allclose assert np.allclose(vals, [[[0.52466739, 0.47533261]] * 6]) From c6f50ae96ecca38e88cd14de2a3bb17191c030bf Mon Sep 17 00:00:00 2001 From: Dillon Smith Date: Thu, 6 Aug 2020 16:19:58 -0400 Subject: [PATCH 17/24] fixed pycodestyle problems --- psyneulink/core/globals/context.py | 4 +- psyneulink/core/globals/log.py | 12 ++--- psyneulink/core/globals/parameters.py | 2 +- .../core/globals/preferences/preferenceset.py | 2 +- tests/log/test_rpc.py | 46 ++++++++++++------- 5 files changed, 40 insertions(+), 26 deletions(-) diff --git a/psyneulink/core/globals/context.py b/psyneulink/core/globals/context.py index 186c029d8ab..2103d454d05 100644 --- a/psyneulink/core/globals/context.py +++ b/psyneulink/core/globals/context.py @@ -332,7 +332,7 @@ class Context(): rpc_pipeline : Queue queue to populate with messages for external environment in cases where execution was triggered via RPC call (e.g. through PsyNeuLinkView). - + """ __name__ = 'Context' @@ -347,7 +347,7 @@ def __init__(self, source=ContextFlags.NONE, runmode=ContextFlags.DEFAULT_MODE, execution_id=None, - string:str='', + string:str='', time=None, rpc_pipeline:Queue=None): diff --git a/psyneulink/core/globals/log.py b/psyneulink/core/globals/log.py index 9b0ba9bb462..e4bc87760f9 100644 --- a/psyneulink/core/globals/log.py +++ b/psyneulink/core/globals/log.py @@ -830,7 +830,7 @@ def assign_log_condition(item, level): assign_log_condition(item, log_condition) else: assign_log_condition(item[0], item[1]) - + def _set_delivery_conditions(self, items, delivery_condition=LogCondition.EXECUTION): """Specifies items to be delivered via gRPC under the specified `LogCondition`\\(s). @@ -870,7 +870,7 @@ def assign_delivery_condition(item, level): levels |= l level = levels - if not item in self.loggable_items: + if item not in self.loggable_items: # KDM 8/13/18: NOTE: add_entries is not defined anywhere raise LogError("\'{0}\' is not a loggable item for {1} (try using \'{1}.log.add_entries()\')". format(item, self.owner.name)) @@ -906,16 +906,16 @@ def _deliver_values(self, entries, context=None): This can be used to "manually" prepare the `value ` of any of a Component's `loggable_items ` (including its own `value `) for delivery to an external application via gRPC. - If the call to _deliver_values is made while a Composition to which the Component belongs is being run (e.g., - in a **call_before..** or **call_after...** argument of its `run ` method), then the time of - the LogEntry is assigned the value of the `Clock` of the Composition's `scheduler` or `scheduler_learning`, + If the call to _deliver_values is made while a Composition to which the Component belongs is being run (e.g., + in a **call_before..** or **call_after...** argument of its `run ` method), then the time of + the LogEntry is assigned the value of the `Clock` of the Composition's `scheduler` or `scheduler_learning`, whichever is currently executing (see `Composition_Scheduler`). Arguments --------- entries : string, Component or list containing either : default ALL - specifies the Components, the current `value `\\s of which should be added prepared for + specifies the Components, the current `value `\\s of which should be added prepared for transmission to an external application via gRPC. they must be `loggable_items ` of the owner's Log. If **entries** is *ALL* or is not specified, then the `value `\\s of all `loggable_items ` are logged. diff --git a/psyneulink/core/globals/parameters.py b/psyneulink/core/globals/parameters.py index 3230c3a4bdf..5278b8a703f 100644 --- a/psyneulink/core/globals/parameters.py +++ b/psyneulink/core/globals/parameters.py @@ -1323,7 +1323,7 @@ def _deliver_value(self, value, context=None): execution_id = None else: execution_id = context.execution_id - ##### ADD TO PIPELINE HERE ##### + # ADD TO PIPELINE HERE context.rpc_pipeline.put( Entry( componentName=self._get_root_owner().name, diff --git a/psyneulink/core/globals/preferences/preferenceset.py b/psyneulink/core/globals/preferences/preferenceset.py index 5cadb043533..ceb86910f2e 100644 --- a/psyneulink/core/globals/preferences/preferenceset.py +++ b/psyneulink/core/globals/preferences/preferenceset.py @@ -853,4 +853,4 @@ def _assign_prefs(object, prefs, prefs_class:PreferenceSet): # assign delivery conditions from preferences object.parameters.value.delivery_condition = object.prefs._delivery_pref.setting except AttributeError: - pass \ No newline at end of file + pass diff --git a/tests/log/test_rpc.py b/tests/log/test_rpc.py index 294f5f7e565..aa72cbe1f1b 100644 --- a/tests/log/test_rpc.py +++ b/tests/log/test_rpc.py @@ -49,7 +49,8 @@ def test_transfer_mech(self): actual = [] pipeline = con_with_rpc_pipeline.rpc_pipeline - while not pipeline.empty(): actual.append(pipeline.get()) + while not pipeline.empty(): + actual.append(pipeline.get()) t_1_entries = [i for i in actual if i.componentName == 'log_test_T_1'] noise = [i for i in t_1_entries if i.parameterName == 'noise'] results = [i for i in t_1_entries if i.parameterName == 'RESULT'] @@ -68,7 +69,8 @@ def test_delivery_initialization(self): comp = pnl.Composition(name='comp', nodes=[T]) comp.run([1], context=con_with_rpc_pipeline) actual = [] - while not pipeline.empty(): actual.append(pipeline.get()) + while not pipeline.empty(): + actual.append(pipeline.get()) assert all([ len(actual) == 1, actual[0].time == '0:0:0:0', @@ -90,7 +92,8 @@ def test_run_resets(self): context=con_with_rpc_pipeline) pipeline = con_with_rpc_pipeline.rpc_pipeline actual = [] - while not pipeline.empty(): actual.append(pipeline.get()) + while not pipeline.empty(): + actual.append(pipeline.get()) assert all([i.context == 'COMP' for i in actual]) assert np.allclose([ np.ndarray(shape=np.array(actual[1].value.shape), buffer=np.array(actual[1].value.data)), @@ -101,7 +104,8 @@ def test_run_resets(self): COMP.run(inputs={T1: [[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]]}, context=con_with_rpc_pipeline) actual = [] - while not pipeline.empty(): actual.append(pipeline.get()) + while not pipeline.empty(): + actual.append(pipeline.get()) assert np.allclose([ np.ndarray(shape=np.array(actual[1].value.shape), buffer=np.array(actual[1].value.data)), np.ndarray(shape=np.array(actual[3].value.shape), buffer=np.array(actual[3].value.data)), @@ -132,7 +136,8 @@ def test_log_dictionary_with_time(self): context=con_with_rpc_pipeline) actual = [] - while not pipeline.empty(): actual.append(pipeline.get()) + while not pipeline.empty(): + actual.append(pipeline.get()) t1_slope_entries = [i for i in actual if i.parameterName == pnl.SLOPE and i.componentName == 'log_test_T1'] t1_slope_values = [np.ndarray(shape=np.array(i.value.shape), buffer=np.array(i.value.data)) for i in t1_slope_entries] @@ -192,7 +197,8 @@ def test_log_dictionary_with_time(self): context=con_with_rpc_pipeline) actual = [] - while not pipeline.empty(): actual.append(pipeline.get()) + while not pipeline.empty(): + actual.append(pipeline.get()) t1_slope_entries = [i for i in actual if i.parameterName == pnl.SLOPE and i.componentName == 'log_test_T1'] t1_slope_values = [np.ndarray(shape=np.array(i.value.shape), buffer=np.array(i.value.data)) for i in t1_slope_entries] @@ -277,7 +283,8 @@ def pass_threshold(mech, thresh): context=con_with_rpc_pipeline) actual = [] - while not pipeline.empty(): actual.append(pipeline.get()) + while not pipeline.empty(): + actual.append(pipeline.get()) assert all([True if i.context == COMP.default_execution_id else False for i in actual]) t1_slope_entries = [i for i in actual if i.parameterName == pnl.SLOPE and i.componentName == 'log_test_T1'] @@ -305,7 +312,7 @@ def pass_threshold(mech, thresh): expected_times_T1 = ['0:0:0:0', '0:0:1:0', '0:0:2:0'] expected_results_T1 = [[0.5], [0.75], [0.875]] expected_values_T1 = [[[0.5]], [[0.75]], [[0.875]]] - expected_slopes_T1 = [[1], [1], [1]] + expected_slopes_T1 = [[1], [1], [1]] assert expected_times_T1 == t1_result_times == t1_slope_times == t1_value_times assert np.allclose(expected_values_T1, t1_value_values) assert np.allclose(expected_results_T1, t1_result_values) @@ -315,7 +322,7 @@ def pass_threshold(mech, thresh): expected_times_T2 = ['0:0:0:1', '0:0:1:1', '0:0:2:1'] expected_values_T2 = [[[3]], [[4.5]], [[5.25]]] - expected_slopes_T2 = [[6], [6], [6]] + expected_slopes_T2 = [[6], [6], [6]] assert expected_times_T2 == t2_slope_times == t2_value_times assert np.allclose(expected_values_T2, t2_value_values) assert np.allclose(expected_slopes_T2, t2_slope_values) @@ -344,7 +351,8 @@ def pass_threshold(mech, thresh): COMP.run(inputs={T1: [[1.0]]}, termination_processing=terminate_trial, context=con_with_rpc_pipeline) actual = [] - while not pipeline.empty(): actual.append(pipeline.get()) + while not pipeline.empty(): + actual.append(pipeline.get()) assert all([True if i.context == COMP.default_execution_id else False for i in actual]) t1_value_entries = [i for i in actual if i.parameterName == pnl.VALUE and i.componentName == 'log_test_T1'] @@ -380,7 +388,8 @@ def test_log_csv_multiple_contexts(self): Y.run(inputs={B: 2}, context=con_Y) actual = [] - while not pipeline.empty(): actual.append(pipeline.get()) + while not pipeline.empty(): + actual.append(pipeline.get()) assert actual[0].context == 'comp X' assert actual[0].time == '0:0:0:1' @@ -416,7 +425,8 @@ def test_log_multi_calls_single_timestep(self, scheduler_conditions, multi_run): comp.run(inputs={m0: [[1, 0], [1, 0], [1, 0]]}, context=con_with_rpc_pipeline) actual = [] - while not pipeline.empty(): actual.append(pipeline.get()) + while not pipeline.empty(): + actual.append(pipeline.get()) integration_end_dict = {i.time: i for i in actual} if scheduler_conditions: expected_times = ['0:0:1:1', '0:1:1:1', '0:2:1:1'] @@ -429,7 +439,8 @@ def test_log_multi_calls_single_timestep(self, scheduler_conditions, multi_run): if multi_run: comp.run(inputs={m0: [[1, 0], [1, 0], [1, 0]]}, context=con_with_rpc_pipeline) actual = [] - while not pipeline.empty(): actual.append(pipeline.get()) + while not pipeline.empty(): + actual.append(pipeline.get()) integration_end_dict.update({i.time: i for i in actual}) if scheduler_conditions: expected_times = ['0:0:1:1', '0:1:1:1', '0:2:1:1', '1:0:1:1', '1:1:1:1', '1:2:1:1'] @@ -569,7 +580,8 @@ def test_multilayer(self): dtype=object ) actual = [] - while not pipeline.empty(): actual.append(pipeline.get()) + while not pipeline.empty(): + actual.append(pipeline.get()) log_val = [np.ndarray(shape=np.array(i.value.shape), buffer=np.array(i.value.data)) for i in actual] assert all([True if i.context == 'multilayer' else False for i in actual]) assert np.allclose(log_val, expected_log_val[1][0][4]) @@ -577,7 +589,8 @@ def test_multilayer(self): # Test Programatic logging hidden_layer_2.log._deliver_values(pnl.VALUE, con_with_rpc_pipeline) actual = [] - while not pipeline.empty(): actual.append(pipeline.get()) + while not pipeline.empty(): + actual.append(pipeline.get()) log_val = np.ndarray(shape=np.array(actual[0].value.shape), buffer=np.array(actual[0].value.data)) expected_log_val = np.array( [ @@ -607,7 +620,8 @@ def test_multilayer(self): context=con_with_rpc_pipeline ) actual = [] - while not pipeline.empty(): actual.append(pipeline.get()) + while not pipeline.empty(): + actual.append(pipeline.get()) assert all([True if i.context == 'multilayer' else False for i in actual]) matrices = [i for i in actual if i.parameterName == 'matrix'] log_val = [np.ndarray(shape=np.array(i.value.shape), buffer=np.array(i.value.data)) for i in matrices] From 0252d2aa2abe39ab8994882574eeb5c52ab355c6 Mon Sep 17 00:00:00 2001 From: Dillon Smith Date: Thu, 6 Aug 2020 16:45:21 -0400 Subject: [PATCH 18/24] fixed titles for protobuf files --- psyneulink/core/rpc/graph.proto | 2 +- psyneulink/core/rpc/graph_pb2.py | 2 +- psyneulink/core/rpc/graph_pb2_grpc.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/psyneulink/core/rpc/graph.proto b/psyneulink/core/rpc/graph.proto index 0747e667992..535084afeac 100644 --- a/psyneulink/core/rpc/graph.proto +++ b/psyneulink/core/rpc/graph.proto @@ -5,7 +5,7 @@ * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and limitations under the License. -*********************************************** PNL ProtoBuffer Definitions ************************************************************** + ***************************************** PNL ProtoBuf Definitions *********************************************** */ syntax = "proto3"; diff --git a/psyneulink/core/rpc/graph_pb2.py b/psyneulink/core/rpc/graph_pb2.py index 00672f00307..1bd3267ea9d 100644 --- a/psyneulink/core/rpc/graph_pb2.py +++ b/psyneulink/core/rpc/graph_pb2.py @@ -5,7 +5,7 @@ # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and limitations under the License. -# ********************************** PNL ProtoBuffer classes ***************************************************** +# ********************************** PNL ProtoBuf Service Definitions ******************************************** # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! diff --git a/psyneulink/core/rpc/graph_pb2_grpc.py b/psyneulink/core/rpc/graph_pb2_grpc.py index e223c74cd19..867472ff7ba 100644 --- a/psyneulink/core/rpc/graph_pb2_grpc.py +++ b/psyneulink/core/rpc/graph_pb2_grpc.py @@ -5,7 +5,7 @@ # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and limitations under the License. -# ********************************** PNL ProtoBuffer Service Definitions ***************************************** +# ********************************** PNL ProtoBuf Classes ******************************************************** # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! From 4e7a2f83fbf8635553f35915980868746eb13bad Mon Sep 17 00:00:00 2001 From: Dillon Smith Date: Fri, 7 Aug 2020 09:54:32 -0400 Subject: [PATCH 19/24] prepare RPC files for merge into devel --- psyneulink/core/rpc/__init__.py | 3 +++ psyneulink/core/rpc/graph_pb2.py | 2 +- psyneulink/core/rpc/graph_pb2_grpc.py | 2 +- 3 files changed, 5 insertions(+), 2 deletions(-) create mode 100644 psyneulink/core/rpc/__init__.py diff --git a/psyneulink/core/rpc/__init__.py b/psyneulink/core/rpc/__init__.py new file mode 100644 index 00000000000..eb465aed547 --- /dev/null +++ b/psyneulink/core/rpc/__init__.py @@ -0,0 +1,3 @@ +from graph_pb2_grpc import ServeGraph + +__all__ = ['ServeGraph'] \ No newline at end of file diff --git a/psyneulink/core/rpc/graph_pb2.py b/psyneulink/core/rpc/graph_pb2.py index 1bd3267ea9d..f98898c8692 100644 --- a/psyneulink/core/rpc/graph_pb2.py +++ b/psyneulink/core/rpc/graph_pb2.py @@ -5,7 +5,7 @@ # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and limitations under the License. -# ********************************** PNL ProtoBuf Service Definitions ******************************************** +# ********************************** PNL ProtoBuf Server/Client Definitions ************************************** # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! diff --git a/psyneulink/core/rpc/graph_pb2_grpc.py b/psyneulink/core/rpc/graph_pb2_grpc.py index 867472ff7ba..c28b23a9b34 100644 --- a/psyneulink/core/rpc/graph_pb2_grpc.py +++ b/psyneulink/core/rpc/graph_pb2_grpc.py @@ -5,7 +5,7 @@ # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and limitations under the License. -# ********************************** PNL ProtoBuf Classes ******************************************************** +# ********************************** PNL ProtoBuf Python Classes ************************************************* # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! From a0adcf5aa985f3f4c5888fa38d7d1a932c178685 Mon Sep 17 00:00:00 2001 From: Dillon Smith Date: Fri, 7 Aug 2020 10:03:20 -0400 Subject: [PATCH 20/24] prepare RPC files for merge into devel --- psyneulink/core/__init__.py | 3 +++ psyneulink/core/rpc/__init__.py | 2 +- psyneulink/core/rpc/graph_pb2_grpc.py | 2 +- 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/psyneulink/core/__init__.py b/psyneulink/core/__init__.py index 9dedb7ee610..7e53794be1b 100644 --- a/psyneulink/core/__init__.py +++ b/psyneulink/core/__init__.py @@ -3,15 +3,18 @@ from . import globals from . import llvm from . import scheduling +from . import rpc from .components import * from .compositions import * from .globals import * from .llvm import * from .scheduling import * +from .rpc import * __all__ = list(components.__all__) __all__.extend(llvm.__all__) __all__.extend(compositions.__all__) __all__.extend(globals.__all__) __all__.extend(scheduling.__all__) +__all__.extend(rpc.__all__) \ No newline at end of file diff --git a/psyneulink/core/rpc/__init__.py b/psyneulink/core/rpc/__init__.py index eb465aed547..8c14b198043 100644 --- a/psyneulink/core/rpc/__init__.py +++ b/psyneulink/core/rpc/__init__.py @@ -1,3 +1,3 @@ -from graph_pb2_grpc import ServeGraph +from .graph_pb2_grpc import ServeGraph __all__ = ['ServeGraph'] \ No newline at end of file diff --git a/psyneulink/core/rpc/graph_pb2_grpc.py b/psyneulink/core/rpc/graph_pb2_grpc.py index c28b23a9b34..784ff03ab1a 100644 --- a/psyneulink/core/rpc/graph_pb2_grpc.py +++ b/psyneulink/core/rpc/graph_pb2_grpc.py @@ -12,7 +12,7 @@ """Client and server classes corresponding to protobuf-defined services.""" import grpc -import graph_pb2 as graph__pb2 +from . import graph_pb2 as graph__pb2 class ServeGraphStub(object): From 20ada8a6520f1c0e70bc9e5d99762cda9f5f1415 Mon Sep 17 00:00:00 2001 From: Dillon Smith Date: Fri, 7 Aug 2020 10:40:27 -0400 Subject: [PATCH 21/24] moved gRPC dependencies from `dev_requirements` to `requirements` --- dev_requirements.txt | 2 -- psyneulink/core/__init__.py | 2 +- psyneulink/core/rpc/__init__.py | 2 +- requirements.txt | 2 ++ 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/dev_requirements.txt b/dev_requirements.txt index 0607049d5e2..a0352c6e221 100644 --- a/dev_requirements.txt +++ b/dev_requirements.txt @@ -1,5 +1,3 @@ -grpcio -grpcio-tools ipykernel ipython jupyter diff --git a/psyneulink/core/__init__.py b/psyneulink/core/__init__.py index 7e53794be1b..ca39a2fd88f 100644 --- a/psyneulink/core/__init__.py +++ b/psyneulink/core/__init__.py @@ -17,4 +17,4 @@ __all__.extend(compositions.__all__) __all__.extend(globals.__all__) __all__.extend(scheduling.__all__) -__all__.extend(rpc.__all__) \ No newline at end of file +__all__.extend(rpc.__all__) diff --git a/psyneulink/core/rpc/__init__.py b/psyneulink/core/rpc/__init__.py index 8c14b198043..e33091a4cf7 100644 --- a/psyneulink/core/rpc/__init__.py +++ b/psyneulink/core/rpc/__init__.py @@ -1,3 +1,3 @@ from .graph_pb2_grpc import ServeGraph -__all__ = ['ServeGraph'] \ No newline at end of file +__all__ = ['ServeGraph'] diff --git a/requirements.txt b/requirements.txt index 966792fe67f..24ac4941397 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,6 +2,8 @@ autograd dill elfi graphviz +grpcio +grpcio-tools llvmlite matplotlib networkx==1.11 From 367cd020f9c95eaf5aecef940a01150a3a754b23 Mon Sep 17 00:00:00 2001 From: Dillon Smith Date: Fri, 7 Aug 2020 11:23:46 -0400 Subject: [PATCH 22/24] removed unreachable statement --- psyneulink/core/globals/parameters.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/psyneulink/core/globals/parameters.py b/psyneulink/core/globals/parameters.py index 5278b8a703f..6de3b241ab0 100644 --- a/psyneulink/core/globals/parameters.py +++ b/psyneulink/core/globals/parameters.py @@ -1305,9 +1305,6 @@ def _deliver_value(self, value, context=None): if self.delivery_condition is None or self.delivery_condition is LogCondition.OFF: return - if context is None: - context = self._owner._owner.most_recent_context - time = _get_time(self._owner._owner, context) delivery_condition_satisfied = self.delivery_condition & context.flags From 447061e0bdc17ef5650f01a426ac897afb36a009 Mon Sep 17 00:00:00 2001 From: Jan Vesely Date: Sat, 8 Aug 2020 20:33:28 -0400 Subject: [PATCH 23/24] travis: Drop all arches other than arm from 'precache' stage Instances of ppc64le and s390x are fast enough to build wheels and run tests in one go. Signed-off-by: Jan Vesely --- .travis.yml | 30 ------------------------------ 1 file changed, 30 deletions(-) diff --git a/.travis.yml b/.travis.yml index 909a3698072..d35a09b18d4 100644 --- a/.travis.yml +++ b/.travis.yml @@ -37,36 +37,6 @@ jobs: stage: precache env: PYTHON=3.6 arch: arm64 - - script: true - after_script: true - stage: precache - env: PYTHON=3.8 - arch: ppc64le - - script: true - after_script: true - stage: precache - env: PYTHON=3.7 - arch: ppc64le - - script: true - after_script: true - stage: precache - env: PYTHON=3.6 - arch: ppc64le - - script: true - after_script: true - stage: precache - env: PYTHON=3.8 - arch: s390x - - script: true - after_script: true - stage: precache - env: PYTHON=3.7 - arch: s390x - - script: true - after_script: true - stage: precache - env: PYTHON=3.6 - arch: s390x env: jobs: From 59d8ef1e293193f70289e11fc8b4e157b59d9103 Mon Sep 17 00:00:00 2001 From: Jan Vesely Date: Sat, 8 Aug 2020 20:35:40 -0400 Subject: [PATCH 24/24] travis: Disable s390x jobs. https://travis-ci.community/t/s390x-build-fails-to-get-queued/9533 Signed-off-by: Jan Vesely --- .travis.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index d35a09b18d4..5cb7d8a2680 100644 --- a/.travis.yml +++ b/.travis.yml @@ -14,7 +14,9 @@ arch: - amd64 - arm64 - ppc64le - - s390x +# Disabled until the scheduling issues are fixed: +# https://travis-ci.community/t/s390x-build-fails-to-get-queued/9533 +# - s390x stages: - precache