diff --git a/.appveyor.yml b/.appveyor.yml new file mode 100644 index 0000000000..495db08624 --- /dev/null +++ b/.appveyor.yml @@ -0,0 +1,54 @@ +version: '{build}' +branches: + except: + - gh-pages +skip_tags: true +skip_branch_with_pr: true +clone_depth: 15 +image: Ubuntu2004 +environment: + PIP_PROGRESS_BAR: off + APPVEYOR_CONSOLE_DISABLE_PTY: true + OMP_NUM_THREADS: 1 + OPENBLAS_NUM_THREADS: 1 + matrix: + - PYTHON: python3.8 + +init: +- ps: | + sudo apt-get update + sudo apt-get install -y --no-install-recommends build-essential graphviz git curl $Env:PYTHON-dev $Env:PYTHON-distutils $Env:PYTHON-venv + sudo update-alternatives --install /usr/bin/python python /usr/bin/$Env:PYTHON 10 + sudo update-alternatives --install /usr/bin/python3 python3 /usr/bin/$Env:PYTHON 10 + sudo update-alternatives --set python /usr/bin/$Env:PYTHON + sudo update-alternatives --set python3 /usr/bin/$Env:PYTHON + +install: +- ps: | + python -m venv venv + venv/bin/Activate.ps1 + echo "#!/bin/sh" > venv/bin/xdg-open + chmod +x venv/bin/xdg-open + + pip --version + pip install -U pip wheel cython + pip install -U coveralls + pip install -U -e .[dev] + +build: off + +test_script: +- ps: | + if (Test-Path Env:\COVERALLS_REPO_TOKEN ) { + $Env:COV="--cov=psyneulink" + } + python -m pytest --junit-xml=tests_out.xml $Env:COV --verbosity=0 --capture=sys -o console_output_style=count + +on_success: +- ps: | + if (Test-Path Env:\COVERALLS_REPO_TOKEN ) { + coveralls + } + +on_finish: +- ps: curl -X POST -F "file=@tests_out.xml" https://ci.appveyor.com/api/testresults/junit/$Env:APPVEYOR_JOB_ID diff --git a/.github/workflows/compare-comment.yml b/.github/workflows/compare-comment.yml index 4120632494..7c8cb9cd0c 100644 --- a/.github/workflows/compare-comment.yml +++ b/.github/workflows/compare-comment.yml @@ -16,7 +16,7 @@ jobs: - name: 'Download docs artifacts' id: docs-artifacts - uses: actions/github-script@v4.0.2 + uses: actions/github-script@v4.1 with: script: | var artifacts = await github.actions.listWorkflowRunArtifacts({ @@ -32,11 +32,14 @@ jobs: core.setOutput('DOCS_GEN_ENV', docsSuffix); var docsArtifacts = artifacts.data.artifacts.filter((artifact) => { - return artifact.name.endsWith(docsSuffix) && artifact.name.startsWith('Documentation-') + return ( + (artifact.name.endsWith(docsSuffix) && artifact.name.startsWith('Documentation-')) + || artifact.name == 'pr_number' + ) }); - // check that we got exactly 2 artifacts - console.assert(docsArtifacts.length == 2, docsSuffix, docsArtifacts, artifacts.data.artifacts); + // check that we got exactly 3 artifacts + console.assert(docsArtifacts.length == 3, docsSuffix, docsArtifacts, artifacts.data.artifacts); var fs = require('fs'); for (artifact of docsArtifacts) { @@ -55,6 +58,7 @@ jobs: run: | unzip Documentation-base-*.zip -d docs-base/ unzip Documentation-head-*.zip -d docs-head/ + unzip pr_number.zip - name: Compare shell: bash @@ -64,16 +68,17 @@ jobs: (diff -r docs-base docs-head && echo 'No differences!' || true) | tee ./result.diff - name: Post comment with docs diff - uses: actions/github-script@v4.0.2 + uses: actions/github-script@v4.1 with: script: | var fs = require('fs'); var text = fs.readFileSync("./result.diff").slice(0,16384); + var issue_number = Number(fs.readFileSync('./pr_number.txt')); - console.log('Posting diff to PR: ${{ github.event.workflow_run.pull_requests[0].number }}') + console.log('Posting diff to PR: ' + issue_number); github.issues.createComment({ - issue_number: ${{ github.event.workflow_run.pull_requests[0].number }}, + issue_number: issue_number, owner: context.repo.owner, repo: context.repo.repo, body: 'This PR causes the following changes to the html docs (${{ steps.docs-artifacts.outputs.DOCS_GEN_ENV }}):\n```\n' + text + '\n...\n```\nSee CI logs for the full diff.' diff --git a/.github/workflows/pnl-ci-docs.yml b/.github/workflows/pnl-ci-docs.yml index 257a44c55a..adbaf310d8 100644 --- a/.github/workflows/pnl-ci-docs.yml +++ b/.github/workflows/pnl-ci-docs.yml @@ -94,14 +94,26 @@ jobs: run: git tag 'v999.999.999.999' - name: Build Documentation - run: sphinx-build -b html -aE -j auto docs/source pnl-html + run: make -C docs/ html -e SPHINXOPTS="-aE -j auto" - name: Upload Documentation uses: actions/upload-artifact@v2.2.4 with: name: Documentation-${{matrix.pnl-version}}-${{ matrix.os }}-${{ matrix.python-version }}-${{ matrix.python-architecture }} retention-days: 1 - path: pnl-html + path: docs/build/html + + - name: Store PR number + if: ${{ github.event_name == 'pull_request' }} + run: echo ${{ github.event.pull_request.number }} > ./pr_number.txt + + - name: Upload PR number for other workflows + if: ${{ github.event_name == 'pull_request' }} + uses: actions/upload-artifact@v2.2.4 + with: + name: pr_number + path: ./pr_number.txt + docs-deploy: strategy: diff --git a/.github/workflows/pnl-docs-compare.yml b/.github/workflows/pnl-docs-compare.yml deleted file mode 100644 index 42d475d2f3..0000000000 --- a/.github/workflows/pnl-docs-compare.yml +++ /dev/null @@ -1,120 +0,0 @@ -name: PsyNeuLink Docs Compare - -on: pull_request - -jobs: - docs-build: - strategy: - fail-fast: false - matrix: - python-version: [3.7] - os: [ubuntu-latest] - pnl-version: [ 'base', 'merge'] - - runs-on: ${{ matrix.os }} - - defaults: - run: - shell: bash - - steps: - - name: Checkout merge commit - uses: actions/checkout@v2.3.4 - if: ${{ matrix.pnl-version == 'merge' }} - with: - fetch-depth: 10 - ref: ${{ github.ref }} - - - name: Checkout pull base - uses: actions/checkout@v2.3.4 - if: ${{ matrix.pnl-version == 'base' }} - with: - fetch-depth: 10 - ref: ${{ github.base_ref }} - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2.2.2 - with: - python-version: ${{ matrix.python-version }} - architecture: ${{ matrix.python-architecture }} - - - name: Get pip cache location - shell: bash - id: pip_cache - run: | - python -m pip install -U pip - python -m pip --version - echo ::set-output name=pip_cache_dir::$(python -m pip cache dir) - - - name: Wheels cache - uses: actions/cache@v2.1.6 - with: - path: ${{ steps.pip_cache.outputs.pip_cache_dir }}/wheels - key: ${{ runner.os }}-python-${{ matrix.python-version }}-x64-pip-wheels-doc-compare-${{ github.sha }} - restore-keys: | - ${{ runner.os }}-python-${{ matrix.python-version }}-x64-pip-wheels-doc-compare - ${{ runner.os }}-python-${{ matrix.python-version }}-x64-pip-wheels-v2 - - # We need to install all PNL deps since docs config imports psyneulink module - - name: Install local, editable PNL package - uses: ./.github/actions/install-pnl - with: - features: 'doc' - - - name: Add git tag - # The generated docs include PNL version, - # set it to a fixed value to prevent polluting the diff - run: git tag 'v999.999.999.999' - - - name: Build docs - run: sphinx-build -b html -aE -j auto docs/source pnl-html - - - name: Upload generated docs - uses: actions/upload-artifact@v2.2.4 - with: - name: docs-${{ matrix.pnl-version }}-${{ matrix.os }}-${{ matrix.python-version }} - path: pnl-html - retention-days: 1 - - docs-compare: - strategy: - fail-fast: false - matrix: - python-version: [3.7] - os: [ubuntu-latest] - - runs-on: ${{ matrix.os }} - needs: [docs-build] - - steps: - - - name: Download generated base docs - uses: actions/download-artifact@v2 - with: - name: docs-base-${{ matrix.os }}-${{ matrix.python-version }} - path: docs-base - - - name: Download generated merge docs - uses: actions/download-artifact@v2 - with: - name: docs-merge-${{ matrix.os }}-${{ matrix.python-version }} - path: docs-merge - - - name: Compare - shell: bash - run: | - mkdir -p compare - # Store the resulting diff, or 'No differences!' to and output file - # The 'or true' part is needed to workaround 'pipefail' flag used by github-actions - (diff -r docs-base docs-merge && echo 'No differences!' || true) | tee ./compare/result.diff - # save PR number - echo ${{ github.event.number }} > ./compare/PR_NR - echo ${{ matrix.os }} > ./compare/PR_OS - echo ${{ matrix.python-version }} > ./compare/PR_PYTHON_VERSION - - - name: Upload comparison results - uses: actions/upload-artifact@v2.2.4 - with: - name: compare-${{ matrix.os }}-${{ matrix.python-version }} - path: compare - retention-days: 1 diff --git a/.github/workflows/prepare-release.yml b/.github/workflows/prepare-release.yml index f548eb61da..2d9e5c569a 100644 --- a/.github/workflows/prepare-release.yml +++ b/.github/workflows/prepare-release.yml @@ -22,7 +22,7 @@ jobs: branch: master - name: Check for existing release with the reference tag - uses: actions/github-script@v4 + uses: actions/github-script@v4.1 id: exist_check with: script: | @@ -47,7 +47,7 @@ jobs: } - name: Create Release - uses: actions/github-script@v4 + uses: actions/github-script@v4.1 if: steps.on_master.outputs.on-branch == 'master' && steps.exist_check.outputs.exists == 'no' with: # We need custom token since the default one doesn't trigger actions diff --git a/.github/workflows/test-release.yml b/.github/workflows/test-release.yml index 5b2c986fca..c3e675219c 100644 --- a/.github/workflows/test-release.yml +++ b/.github/workflows/test-release.yml @@ -180,7 +180,7 @@ jobs: TWINE_REPOSITORY: ${{ secrets.TWINE_REPOSITORY }} - name: Upload dist files to release - uses: actions/github-script@v4 + uses: actions/github-script@v4.1 with: script: | const fs = require('fs') diff --git a/conftest.py b/conftest.py index c20a1f7ed3..c80f45d3e5 100644 --- a/conftest.py +++ b/conftest.py @@ -118,7 +118,7 @@ def get_func_execution(func, func_mode): elif func_mode == 'Python': return func.function else: - assert False, "Unknown function mode: {}".format(mode) + assert False, "Unknown function mode: {}".format(mech_mode) @pytest.helpers.register def get_mech_execution(mech, mech_mode): @@ -132,7 +132,7 @@ def mech_wrapper(x): return mech.output_values return mech_wrapper else: - assert False, "Unknown mechanism mode: {}".format(mode) + assert False, "Unknown mechanism mode: {}".format(mech_mode) @pytest.helpers.register def expand_np_ndarray(arr): diff --git a/dev_requirements.txt b/dev_requirements.txt index 871633b764..de4c63f74c 100644 --- a/dev_requirements.txt +++ b/dev_requirements.txt @@ -1,5 +1,5 @@ jupyter<=1.0.0 -pytest<6.2.5 +pytest<6.2.6 pytest-benchmark<3.4.2 pytest-cov<2.12.2 pytest-helpers-namespace<2021.4.30 diff --git a/docs/Makefile b/docs/Makefile index 119d9c626c..99feb08f21 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -51,44 +51,44 @@ clean: rm -rf $(BUILDDIR)/* .PHONY: html -html: +html: generated $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." .PHONY: dirhtml -dirhtml: +dirhtml: generated $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." .PHONY: singlehtml -singlehtml: +singlehtml: generated $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." .PHONY: pickle -pickle: +pickle: generated $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." .PHONY: json -json: +json: generated $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." .PHONY: htmlhelp -htmlhelp: +htmlhelp: generated $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." .PHONY: qthelp -qthelp: +qthelp: generated $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ @@ -98,7 +98,7 @@ qthelp: @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/PsyNeuLink.qhc" .PHONY: applehelp -applehelp: +applehelp: generated $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp @echo @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." @@ -107,7 +107,7 @@ applehelp: "bundle." .PHONY: devhelp -devhelp: +devhelp: generated $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @@ -117,19 +117,19 @@ devhelp: @echo "# devhelp" .PHONY: epub -epub: +epub: generated $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." .PHONY: epub3 -epub3: +epub3: generated $(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3 @echo @echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3." .PHONY: latex -latex: +latex: generated $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @@ -137,33 +137,33 @@ latex: "(use \`make latexpdf' here to do that automatically)." .PHONY: latexpdf -latexpdf: +latexpdf: generated $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." .PHONY: latexpdfja -latexpdfja: +latexpdfja: generated $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through platex and dvipdfmx..." $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." .PHONY: text -text: +text: generated $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." .PHONY: man -man: +man: generated $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." .PHONY: texinfo -texinfo: +texinfo: generated $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @@ -171,26 +171,26 @@ texinfo: "(use \`make info' here to do that automatically)." .PHONY: info -info: +info: generated $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." .PHONY: gettext -gettext: +gettext: generated $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." .PHONY: changes -changes: +changes: generated $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." .PHONY: linkcheck -linkcheck: +linkcheck: generated $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ @@ -209,13 +209,13 @@ coverage: "results in $(BUILDDIR)/coverage/python.txt." .PHONY: xml -xml: +xml: generated $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml @echo @echo "Build finished. The XML files are in $(BUILDDIR)/xml." .PHONY: pseudoxml -pseudoxml: +pseudoxml: generated $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml @echo @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." @@ -239,3 +239,7 @@ gh-pages: git add -A git ci -m "Generated gh-pages for `git log devel -1 --pretty=short --abbrev-commit`" && git push origin gh-pages ; git checkout devel make clean + +.PHONY: generated +generated: + find source/generator_scripts -name "*.py" -exec python {} \; diff --git a/docs/source/_static/Composition_XOR_animation.gif b/docs/source/_static/Composition_XOR_animation.gif deleted file mode 100644 index 08e3f6abab..0000000000 Binary files a/docs/source/_static/Composition_XOR_animation.gif and /dev/null differ diff --git a/docs/source/generator_scripts/generate_animations.py b/docs/source/generator_scripts/generate_animations.py new file mode 100644 index 0000000000..b11080c56b --- /dev/null +++ b/docs/source/generator_scripts/generate_animations.py @@ -0,0 +1,64 @@ +import argparse +import os + +import numpy as np +import psyneulink as pnl + +parser = argparse.ArgumentParser() +parser.add_argument( + "-d", + "--directory", + type=str, + default=os.path.join(os.path.dirname(__file__), "..", "_images"), + help="Path to store generated animations", +) +args = parser.parse_args() + + +# Based on tests/composition/test_learning.py::TestLearningPathwayMethods::test_run_no_targets +def composition_xor_animation(): + in_to_hidden_matrix = np.random.rand(2, 10) + hidden_to_out_matrix = np.random.rand(10, 1) + + inp = pnl.TransferMechanism(name="Input", default_variable=np.zeros(2)) + + hidden = pnl.TransferMechanism( + name="Hidden", default_variable=np.zeros(10), function=pnl.Logistic() + ) + + output = pnl.TransferMechanism( + name="Output", default_variable=np.zeros(1), function=pnl.Logistic() + ) + + in_to_hidden = pnl.MappingProjection( + name="Input Weights", + matrix=in_to_hidden_matrix.copy(), + sender=inp, + receiver=hidden, + ) + + hidden_to_out = pnl.MappingProjection( + name="Output Weights", + matrix=hidden_to_out_matrix.copy(), + sender=hidden, + receiver=output, + ) + + xor_comp = pnl.Composition() + + xor_comp.add_backpropagation_learning_pathway( + [inp, in_to_hidden, hidden, hidden_to_out, output], + learning_rate=10, + ) + xor_inputs = np.array([[0, 0], [0, 1], [1, 0], [1, 1]]) + xor_comp.learn( + inputs={inp: xor_inputs}, + animate={ + pnl.SHOW_LEARNING: True, + pnl.MOVIE_DIR: args.directory, + pnl.MOVIE_NAME: "Composition_XOR_animation", + }, + ) + + +composition_xor_animation() diff --git a/psyneulink/core/components/component.py b/psyneulink/core/components/component.py index b04ab4dc23..44214c1e4d 100644 --- a/psyneulink/core/components/component.py +++ b/psyneulink/core/components/component.py @@ -423,14 +423,14 @@ * **num_executions_before_finished** -- contains the number of times the Component has executed prior to finishing (and since it last finished); depending upon the class, these may all be within a single call to the Component's - `execute ` method, or extend over several calls. It is set to 0 each time `is_finished` evalutes + `execute ` method, or extend over several calls. It is set to 0 each time `is_finished` evaluates to True. Note that this is distinct from the `execution_count ` and `num_executions ` attributes. .. _Component_Max_Executions_Before_Finished: * **max_executions_before_finished** -- determines the maximum number of executions allowed before finishing - (i.e., the maxmium allowable value of `num_executions_before_finished `). + (i.e., the maximum allowable value of `num_executions_before_finished `). If it is exceeded, a warning message is generated. Note that this only pertains to `num_executions_before_finished `, and not its `execution_count `, which can be unlimited. @@ -444,7 +444,7 @@ * **execution_count** -- maintains a record of the number of times a Component has executed since it was constructed, *excluding* executions carried out during initialization and validation, but including all others whether they are - of the Component on its own are as part of a `Composition`, and irresective of the `context ` in which + of the Component on its own are as part of a `Composition`, and irrespective of the `context ` in which they are occur. The value can be changed "manually" or programmatically by assigning an integer value directly to the attribute. Note that this is the distinct from the `num_executions ` and `num_executions_before_finished ` attributes. diff --git a/psyneulink/core/compositions/composition.py b/psyneulink/core/compositions/composition.py index 551217d16b..74d588a3d6 100644 --- a/psyneulink/core/compositions/composition.py +++ b/psyneulink/core/compositions/composition.py @@ -775,7 +775,7 @@ **Composition with Learning** - .. figure:: _static/Composition_XOR_animation.gif + .. figure:: _images/Composition_XOR_animation.gif :alt: Animation of Composition with learning :scale: 50 % @@ -9914,7 +9914,10 @@ def _delete_contexts(self, *contexts, check_simulation_storage=False, visited=No super()._delete_contexts(*contexts, check_simulation_storage=check_simulation_storage, visited=visited) for c in contexts: - self.scheduler._delete_counts(c.execution_id) + try: + self.scheduler._delete_counts(c.execution_id) + except AttributeError: + self.scheduler._delete_counts(c) # ****************************************************************************************************************** # LLVM diff --git a/psyneulink/core/globals/utilities.py b/psyneulink/core/globals/utilities.py index daecf10d29..1200048873 100644 --- a/psyneulink/core/globals/utilities.py +++ b/psyneulink/core/globals/utilities.py @@ -1739,20 +1739,32 @@ def parse_string_to_psyneulink_object_string(string): The output of this function will cause getattr(psyneulink, ) to return a psyneulink object """ - try: - eval(f'psyneulink.{string}') + def is_pnl_obj(string): + try: + # remove parens to get rid of class instantiations + string = re.sub(r'\(.*?\)', '', string) + attr_sequence = string.split('.') + obj = getattr(psyneulink, attr_sequence[0]) + + for item in attr_sequence[1:]: + obj = getattr(obj, item) + + return True + except (AttributeError, TypeError): + return False + + if is_pnl_obj(string): return string - except (AttributeError, SyntaxError, TypeError): - pass # handle potential psyneulink keyword try: # insert space between camel case words keyword = re.sub('([a-z])([A-Z])', r'\1 \2', string) keyword = keyword.upper().replace(' ', '_') - eval(f'psyneulink.{keyword}') - return keyword - except (AttributeError, SyntaxError, TypeError): + + if is_pnl_obj(keyword): + return keyword + except TypeError: pass return None diff --git a/requirements.txt b/requirements.txt index 661823bdfd..2a3be343b5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,18 +1,18 @@ autograd<=1.3 -graph-scheduler>=0.2.0, <=1.0.0rc1 +graph-scheduler>=0.2.0, <=1.0.0rc2 dill<=0.32 elfi<0.8.1 -graphviz<0.17.0 +graphviz<0.18.0 grpcio<1.35.0 grpcio-tools<1.35.0 -llvmlite<0.37 -matplotlib<3.3.4 +llvmlite<0.38 +matplotlib<3.4.4 networkx<2.6 -numpy<1.21.2 -pillow<8.1.0 +numpy<1.21.3 +pillow<8.4.0 pint<0.18 toposort<1.7 -torch>=1.8.0, <1.9.0; (platform_machine == 'AMD64' or platform_machine == 'x86_64') and platform_python_implementation == 'CPython' and implementation_name == 'cpython' +torch>=1.8.0, <2.0.0; (platform_machine == 'AMD64' or platform_machine == 'x86_64') and platform_python_implementation == 'CPython' and implementation_name == 'cpython' typecheck-decorator<=1.2 leabra-psyneulink<=0.3.2 -rich>=10.1, <10.7 +rich>=10.1, <10.10 diff --git a/tutorial_requirements.txt b/tutorial_requirements.txt index fc0bb1f1ac..4f354bda2e 100644 --- a/tutorial_requirements.txt +++ b/tutorial_requirements.txt @@ -1,3 +1,3 @@ -graphviz<0.17.0 +graphviz<0.18.0 jupyter<=1.0.0 -matplotlib<3.3.4 +matplotlib<3.4.4