From 3490214e296eb7449a2e97e6ecea61a155b4ca8c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 16 Oct 2023 01:45:24 +0000 Subject: [PATCH 01/17] requirements: update fastkde requirement (#2813) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 9cf9b3581d2..59305935688 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ autograd<1.7 beartype<0.16.0 dill<0.3.8 -fastkde>=1.0.24, <1.0.27 +fastkde>=1.0.24, <1.0.31 graph-scheduler>=0.2.0, <1.1.3 graphviz<0.21.0 grpcio<1.60.0 From c00173eff9831862fdd268618e6ec9fd3102f185 Mon Sep 17 00:00:00 2001 From: Jan Vesely Date: Tue, 17 Oct 2023 08:51:33 -0400 Subject: [PATCH 02/17] ci/codeql: Reduce disk space usage(#2817) Clean up pip cache after installing PNL. Restrict the fetch depth to 100 entries to save more space. Signed-off-by: Jan Vesely --- .github/workflows/codeql.yml | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index e2def4ea4fa..29c740dde40 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -2,7 +2,7 @@ name: "CodeQL" on: push: - branches: [ "master", "devel" ] + branches: [ "master", "devel", "codeql" ] paths-ignore: - 'docs/**' pull_request: @@ -34,6 +34,8 @@ jobs: steps: - name: Checkout uses: actions/checkout@v4 + with: + fetch-depth: 100 - name: Initialize CodeQL uses: github/codeql-action/init@v2 @@ -44,6 +46,12 @@ jobs: - name: Autobuild uses: github/codeql-action/autobuild@v2 + - name: Cache cleanup + shell: bash + run: | + $CODEQL_PYTHON -m pip cache info + $CODEQL_PYTHON -m pip cache purge + - name: Perform CodeQL Analysis uses: github/codeql-action/analyze@v2 with: From 204ac0d4ec818114bc0d5b1b270c33bac0b552ec Mon Sep 17 00:00:00 2001 From: Jan Vesely Date: Tue, 17 Oct 2023 11:12:28 -0400 Subject: [PATCH 03/17] llvm: Add support for fp32 to printf helper (#2816) libc printf supports only fp64 and expects other float types to be extended to fp64 Signed-off-by: Jan Vesely --- psyneulink/core/llvm/helpers.py | 3 ++- tests/llvm/test_helpers.py | 9 +++++---- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/psyneulink/core/llvm/helpers.py b/psyneulink/core/llvm/helpers.py index 7c3b0414d00..d99980d9bfb 100644 --- a/psyneulink/core/llvm/helpers.py +++ b/psyneulink/core/llvm/helpers.py @@ -436,7 +436,8 @@ def printf(builder, fmt, *args, override_debug=False): global_fmt.initializer = fmt_ty(fmt_data) fmt_ptr = builder.gep(global_fmt, [ir.IntType(32)(0), ir.IntType(32)(0)]) - builder.call(printf, [fmt_ptr] + list(args)) + conv_args = [builder.fpext(a, ir.DoubleType()) if is_floating_point(a) else a for a in args] + builder.call(printf, [fmt_ptr] + conv_args) def printf_float_array(builder, array, prefix="", suffix="\n", override_debug=False): diff --git a/tests/llvm/test_helpers.py b/tests/llvm/test_helpers.py index 0965c4fdac4..f2e2cb141e6 100644 --- a/tests/llvm/test_helpers.py +++ b/tests/llvm/test_helpers.py @@ -224,10 +224,11 @@ def test_helper_all_close(mode, var1, var2, atol, rtol): @pytest.mark.llvm @pytest.mark.parametrize("ir_argtype,format_spec,values_to_check", [ - (pnlvm.ir.IntType(32), "%u", range(0, 20)), - (pnlvm.ir.IntType(64), "%lld", [int(-4E10), int(-3E10), int(-2E10)]), - (pnlvm.ir.DoubleType(), "%lf", [x *.5 for x in range(0, 5)]), - ], ids=["i32", "i64", "double"]) + pytest.param(pnlvm.ir.IntType(32), "%u", range(0, 20), id="i32"), + pytest.param(pnlvm.ir.IntType(64), "%lld", [int(-4E10), int(-3E10), int(-2E10)], id="i64"), + pytest.param(pnlvm.ir.DoubleType(), "%lf", [x *.5 for x in range(0, 5)], id="double"), + pytest.param(pnlvm.ir.FloatType(), "%lf", [x *.5 for x in range(0, 5)], id="float"), + ]) def test_helper_printf(capfd, ir_argtype, format_spec, values_to_check): format_str = f"Hello {(format_spec + ' ') * len(values_to_check)}\n" From c283c584568f4520bd2b3493d54ef230f3b5952c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 17 Oct 2023 17:10:37 +0000 Subject: [PATCH 04/17] requirements: update pillow requirement from <10.1.0 to <10.2.0 (#2815) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 59305935688..f6e775a7b60 100644 --- a/requirements.txt +++ b/requirements.txt @@ -14,7 +14,7 @@ numpy>=1.19.0, <1.24.5 optuna<3.4.0 packaging<24.0 pandas<2.1.1 -pillow<10.1.0 +pillow<10.2.0 pint<0.22.0 protobuf<3.20.4 rich>=10.1, <10.13 From 1c74859dd70205fa53afd69f740e6cc37a10c2ed Mon Sep 17 00:00:00 2001 From: Jan Vesely Date: Wed, 18 Oct 2023 22:24:31 -0400 Subject: [PATCH 05/17] LogEntries: Do not store references to owner's owner (#2819) Not used anywhere Signed-off-by: Jan Vesely --- psyneulink/core/globals/log.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/psyneulink/core/globals/log.py b/psyneulink/core/globals/log.py index 7275a2a5cc5..213ce751dde 100644 --- a/psyneulink/core/globals/log.py +++ b/psyneulink/core/globals/log.py @@ -527,8 +527,6 @@ def __init__(self, owner): # Log to which this dict belongs self._ownerLog = owner - # Object to which the log belongs - self._owner = owner.owner # # VERSION THAT USES OWNER'S logPref TO LIST ENTRIES TO BE RECORDED # # List of entries (in owner's logPrefs) of entries to record From aa710551bff627c0ef934df8860f30a2f92c1264 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 20 Oct 2023 02:09:53 +0000 Subject: [PATCH 06/17] requirements: update networkx requirement from <3.2 to <3.3 (#2820) --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index f6e775a7b60..9e5c3c89dda 100644 --- a/requirements.txt +++ b/requirements.txt @@ -9,7 +9,7 @@ leabra-psyneulink<0.3.3 llvmlite<0.42 matplotlib<3.7.3 modeci_mdf<0.5, >=0.3.4; (platform_machine == 'AMD64' or platform_machine == 'x86_64') and platform_python_implementation == 'CPython' and implementation_name == 'cpython' -networkx<3.2 +networkx<3.3 numpy>=1.19.0, <1.24.5 optuna<3.4.0 packaging<24.0 From 16b8ff149e732ab6de9982b30983f27430fbc9c6 Mon Sep 17 00:00:00 2001 From: Jan Vesely Date: Wed, 18 Oct 2023 15:55:36 -0400 Subject: [PATCH 07/17] deps: Bump minimum version of modeci_mdf to 0.4.3 modeci_mdf version 0.3.4 does not exist. modeci_mdf versions <0.4.3 fail PNL test: test_mdf_equivalence_individual_functions[IntegratorMechanism-FitzHughNagumoIntegrator-None-pnl.AfterNCalls(A, 10)] needs the new implementation of runge_kutta function Bump graph-scheduler >=1.1.1 as needed by modeci_mdf==0.4.3 graph-scheduler == 0.2.0 requires psyneulink<0.9.1.0. Signed-off-by: Jan Vesely --- requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 9e5c3c89dda..747dfe8a98b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,13 +2,13 @@ autograd<1.7 beartype<0.16.0 dill<0.3.8 fastkde>=1.0.24, <1.0.31 -graph-scheduler>=0.2.0, <1.1.3 +graph-scheduler>=1.1.1, <1.1.3 graphviz<0.21.0 grpcio<1.60.0 leabra-psyneulink<0.3.3 llvmlite<0.42 matplotlib<3.7.3 -modeci_mdf<0.5, >=0.3.4; (platform_machine == 'AMD64' or platform_machine == 'x86_64') and platform_python_implementation == 'CPython' and implementation_name == 'cpython' +modeci_mdf<0.5, >=0.4.3; (platform_machine == 'AMD64' or platform_machine == 'x86_64') and platform_python_implementation == 'CPython' and implementation_name == 'cpython' networkx<3.3 numpy>=1.19.0, <1.24.5 optuna<3.4.0 From 4f59709c9adef0447a684796c77b452d0ce389ab Mon Sep 17 00:00:00 2001 From: Jan Vesely Date: Thu, 19 Oct 2023 12:32:58 -0400 Subject: [PATCH 08/17] deps: Bump minimum version of pytorch to 1.10.0 pytorch < 1.10.0 doesn't support 2d tensors in CrossEntropyLoss. Signed-off-by: Jan Vesely --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 747dfe8a98b..ad156790305 100644 --- a/requirements.txt +++ b/requirements.txt @@ -19,4 +19,4 @@ pint<0.22.0 protobuf<3.20.4 rich>=10.1, <10.13 toposort<1.11 -torch>=1.8.0, <2.1.0; (platform_machine == 'AMD64' or platform_machine == 'x86_64') and platform_python_implementation == 'CPython' and implementation_name == 'cpython' +torch>=1.10.0, <2.1.0; (platform_machine == 'AMD64' or platform_machine == 'x86_64') and platform_python_implementation == 'CPython' and implementation_name == 'cpython' From 8160d4dae0354a5c3337094e5e62486a751b95d6 Mon Sep 17 00:00:00 2001 From: Jan Vesely Date: Thu, 19 Oct 2023 10:47:26 -0400 Subject: [PATCH 09/17] deps: Bump minimum version of numpy to 1.21.0 EMStorageMechanism passes 'dtype' to numpy concatenate which is only available starting from numpy 1.20.0 torch-1.10.0 requires numpy >=1.21.0 Signed-off-by: Jan Vesely --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index ad156790305..c45850bc178 100644 --- a/requirements.txt +++ b/requirements.txt @@ -10,7 +10,7 @@ llvmlite<0.42 matplotlib<3.7.3 modeci_mdf<0.5, >=0.4.3; (platform_machine == 'AMD64' or platform_machine == 'x86_64') and platform_python_implementation == 'CPython' and implementation_name == 'cpython' networkx<3.3 -numpy>=1.19.0, <1.24.5 +numpy>=1.21.0, <1.24.5 optuna<3.4.0 packaging<24.0 pandas<2.1.1 From 76eeef504e5f758dabc79e09be61cb24149a3754 Mon Sep 17 00:00:00 2001 From: Jan Vesely Date: Wed, 18 Oct 2023 17:41:40 -0400 Subject: [PATCH 10/17] ci/ga: Add a CI run with version restricted dependencies Restrict dependencies to the lowest supported version. Signed-off-by: Jan Vesely --- .github/workflows/pnl-ci.yml | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pnl-ci.yml b/.github/workflows/pnl-ci.yml index 8235041211d..4c9658d67e7 100644 --- a/.github/workflows/pnl-ci.yml +++ b/.github/workflows/pnl-ci.yml @@ -52,6 +52,7 @@ jobs: python-architecture: ['x64'] extra-args: [''] os: [ubuntu, macos, windows] + version-restrict: [''] include: # code-coverage build on macos python 3.9 - python-version: '3.9' @@ -78,11 +79,18 @@ jobs: - python-version: '3.10' os: macos # pytest needs both '--benchmark-only' and '-m benchmark' - # The former fails the test if benchamrks cannot be enabled + # The former fails the test if benchmarks cannot be enabled + # (e.g. due to --dist setting) # The latter works around a crash in pytest when collecting tests: # https://github.com/ionelmc/pytest-benchmark/issues/243 extra-args: '-m benchmark --benchmark-enable --benchmark-only --benchmark-min-rounds=2 --benchmark-max-time=0.001 --benchmark-warmup=off -n0 --dist=no' + # add python 3.7 with deps restricted to min supported version + - python-version: '3.7' + python-architecture: 'x64' + os: ubuntu + version-restrict: 'min' + # add python 3.8 build on macos since 3.7 is broken # https://github.com/actions/virtual-environments/issues/4230 - python-version: '3.8' @@ -115,6 +123,15 @@ jobs: python-version: ${{ matrix.python-version }} architecture: ${{ matrix.python-architecture }} + - name: Restrict version of direct dependencies + if: ${{ matrix.version-restrict == 'min' }} + shell: bash + run: | + sed -i '/^[^#]/s/>=/==/' *requirements.txt + git config user.name "github actions" + git config user.email "none" + git commit -a -m "Restrict version of direct dependencies to min" + - name: Get pip cache location shell: bash id: pip_cache From 9c827ff5e1c20297d450b5bb14e2dfbdab7cbda1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 25 Oct 2023 03:49:48 +0000 Subject: [PATCH 11/17] requirements: update pytest requirement from <7.4.3 to <7.4.4 (#2822) --- dev_requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev_requirements.txt b/dev_requirements.txt index 0d20ada4c09..866efa000a8 100644 --- a/dev_requirements.txt +++ b/dev_requirements.txt @@ -1,6 +1,6 @@ jupyter<1.0.1 packaging<24.0 -pytest<7.4.3 +pytest<7.4.4 pytest-benchmark<4.0.1 pytest-cov<4.1.1 pytest-forked<1.7.0 From 6bd555fb20ae5bf05f343487abec1241b57e20ef Mon Sep 17 00:00:00 2001 From: Jan Vesely Date: Sat, 21 Oct 2023 20:20:59 -0400 Subject: [PATCH 12/17] tests/MemoryFunctions: Use more accurate and descriptive expected results Use different size for MT and Philox case. Drop atol/rtol parameters in result check. Signed-off-by: Jan Vesely --- tests/functions/test_memory.py | 77 +++++++++++++++++----------------- 1 file changed, 38 insertions(+), 39 deletions(-) diff --git a/tests/functions/test_memory.py b/tests/functions/test_memory.py index fd69756d255..c47b284399b 100644 --- a/tests/functions/test_memory.py +++ b/tests/functions/test_memory.py @@ -24,35 +24,36 @@ RAND1 = np.random.random(1) RAND2 = np.random.random() -philox_var = np.random.rand(2, SIZE) +# Use different size for Philox case, +# to easily detect mixups +philox_var = np.random.rand(2, SIZE - 1) +philox_initializer = np.array([[philox_var[0] * 5, philox_var[1] * 4]]) test_data = [ # Default initializer does not work # (Functions.Buffer, test_var, {'rate':RAND1}, [[0.0],[0.0]]), pytest.param(Functions.Buffer, test_var[0], {'history':512, 'rate':RAND1, 'initializer':[test_var[0]]}, - [[0.03841128, 0.05005587, 0.04218721, 0.0381362 , 0.02965146, 0.04520592, 0.03062659, 0.0624149 , 0.06744644, 0.02683695], - [0.14519169, 0.18920736, 0.15946443, 0.1441519 , 0.11208025, 0.17087491, 0.11576615, 0.23592355, 0.25494239, 0.10144161]], id="Buffer"), + # TODO: Why is the first result using rate^2 ? + [test_var[0] * RAND1 * RAND1, test_var[0] * RAND1], id="Buffer"), + + # Tests using Mersenne-Twister as function PRNG pytest.param(Functions.DictionaryMemory, test_var, {'seed': module_seed}, - [[0.5488135039273248, 0.7151893663724195, 0.6027633760716439, 0.5448831829968969, 0.4236547993389047, 0.6458941130666561, 0.4375872112626925, 0.8917730007820798, 0.9636627605010293, 0.3834415188257777], - [0.7917250380826646, 0.5288949197529045, 0.5680445610939323, 0.925596638292661, 0.07103605819788694, 0.08712929970154071, 0.02021839744032572, 0.832619845547938, 0.7781567509498505, 0.8700121482468192 ]], + [test_var[0], test_var[1]], id="DictionaryMemory"), pytest.param(Functions.DictionaryMemory, test_var, {'rate':RAND1, 'seed': module_seed}, - [[0.5488135039273248, 0.7151893663724195, 0.6027633760716439, 0.5448831829968969, 0.4236547993389047, 0.6458941130666561, 0.4375872112626925, 0.8917730007820798, 0.9636627605010293, 0.3834415188257777], - [0.7917250380826646, 0.5288949197529045, 0.5680445610939323, 0.925596638292661, 0.07103605819788694, 0.08712929970154071, 0.02021839744032572, 0.832619845547938, 0.7781567509498505, 0.8700121482468192]], + [test_var[0], test_var[1]], id="DictionaryMemory Rate"), pytest.param(Functions.DictionaryMemory, test_var, {'initializer':test_initializer, 'rate':RAND1, 'seed': module_seed}, - [[0.5488135039273248, 0.7151893663724195, 0.6027633760716439, 0.5448831829968969, 0.4236547993389047, 0.6458941130666561, 0.4375872112626925, 0.8917730007820798, 0.9636627605010293, 0.3834415188257777], - [0.7917250380826646, 0.5288949197529045, 0.5680445610939323, 0.925596638292661, 0.07103605819788694, 0.08712929970154071, 0.02021839744032572, 0.832619845547938, 0.7781567509498505, 0.8700121482468192]], + [test_var[0], test_var[1]], id="DictionaryMemory Initializer"), pytest.param(Functions.DictionaryMemory, test_var, {'rate':RAND1, 'retrieval_prob':0.5, 'seed': module_seed}, - [[ 0. for i in range(SIZE) ],[ 0. for i in range(SIZE) ]], + np.zeros_like(test_var), id="DictionaryMemory Low Retrieval"), pytest.param(Functions.DictionaryMemory, test_var, {'rate':RAND1, 'storage_prob':0.1, 'seed': module_seed}, - [[ 0. for i in range(SIZE) ],[ 0. for i in range(SIZE) ]], + np.zeros_like(test_var), id="DictionaryMemory Low Storage"), pytest.param(Functions.DictionaryMemory, test_var, {'rate':RAND1, 'retrieval_prob':0.9, 'storage_prob':0.9, 'seed': module_seed}, - [[0.5488135039273248, 0.7151893663724195, 0.6027633760716439, 0.5448831829968969, 0.4236547993389047, 0.6458941130666561, 0.4375872112626925, 0.8917730007820798, 0.9636627605010293, 0.3834415188257777], - [0.7917250380826646, 0.5288949197529045, 0.5680445610939323, 0.925596638292661, 0.07103605819788694, 0.08712929970154071, 0.02021839744032572, 0.832619845547938, 0.7781567509498505, 0.8700121482468192]], + [test_var[0], test_var[1]], id="DictionaryMemory High Storage/Retrieve"), # Disable noise tests for now as they trigger failure in DictionaryMemory lookup # (Functions.DictionaryMemory, test_var, {'rate':RAND1, 'noise':RAND2}, [[ @@ -68,40 +69,36 @@ # 1.3230471933615413, 1.4894230558066361, 1.3769970655058605, 1.3191168724311135, 1.1978884887731214, 1.4201278025008728, 1.2118209006969092, 1.6660066902162964, 1.737896449935246, 1.1576752082599944 #]]), pytest.param(Functions.ContentAddressableMemory, test_var, {'rate':RAND1, 'retrieval_prob':0.5, 'seed': module_seed}, - [[ 0. for i in range(SIZE) ],[ 0. for i in range(SIZE) ]], + np.zeros_like(test_var), id="ContentAddressableMemory Low Retrieval"), pytest.param(Functions.ContentAddressableMemory, test_var, {'rate':RAND1, 'storage_prob':0.1, 'seed': module_seed}, - [[ 0. for i in range(SIZE) ],[ 0. for i in range(SIZE) ]], + np.zeros_like(test_var), id="ContentAddressableMemory Low Storage"), pytest.param(Functions.ContentAddressableMemory, test_var, {'rate':RAND1, 'retrieval_prob':0.9, 'storage_prob':0.9, 'seed': module_seed}, - [[0.5488135039273248, 0.7151893663724195, 0.6027633760716439, 0.5448831829968969, 0.4236547993389047, 0.6458941130666561, 0.4375872112626925, 0.8917730007820798, 0.9636627605010293, 0.3834415188257777], - [0.7917250380826646, 0.5288949197529045, 0.5680445610939323, 0.925596638292661, 0.07103605819788694, 0.08712929970154071, 0.02021839744032572, 0.832619845547938, 0.7781567509498505, 0.8700121482468192]], + [test_var[0], test_var[1]], id="ContentAddressableMemory High Storage/Retrieval"), pytest.param(Functions.ContentAddressableMemory, test_var, {'initializer':test_initializer, 'rate':RAND1, 'seed': module_seed}, - [[0.5488135039273248, 0.7151893663724195, 0.6027633760716439, 0.5448831829968969, 0.4236547993389047, 0.6458941130666561, 0.4375872112626925, 0.8917730007820798, 0.9636627605010293, 0.3834415188257777], - [0.7917250380826646, 0.5288949197529045, 0.5680445610939323, 0.925596638292661, 0.07103605819788694, 0.08712929970154071, 0.02021839744032572, 0.832619845547938, 0.7781567509498505, 0.8700121482468192]], + [test_var[0], test_var[1]], id="ContentAddressableMemory Initializer"), + + # Tests using philox var pytest.param(Functions.DictionaryMemory, philox_var, {'seed': module_seed}, - [[0.45615033221654855, 0.5684339488686485, 0.018789800436355142, 0.6176354970758771, 0.6120957227224214, 0.6169339968747569, 0.9437480785146242, 0.6818202991034834, 0.359507900573786, 0.43703195379934145], - [0.6976311959272649, 0.06022547162926983, 0.6667667154456677, 0.6706378696181594, 0.2103825610738409, 0.1289262976548533, 0.31542835092418386, 0.3637107709426226, 0.5701967704178796, 0.43860151346232035]], + [philox_var[0], philox_var[1]], id="DictionaryMemory Philox"), pytest.param(Functions.DictionaryMemory, philox_var, {'rate':RAND1, 'seed': module_seed}, - [[0.45615033221654855, 0.5684339488686485, 0.018789800436355142, 0.6176354970758771, 0.6120957227224214, 0.6169339968747569, 0.9437480785146242, 0.6818202991034834, 0.359507900573786, 0.43703195379934145], - [0.6976311959272649, 0.06022547162926983, 0.6667667154456677, 0.6706378696181594, 0.2103825610738409, 0.1289262976548533, 0.31542835092418386, 0.3637107709426226, 0.5701967704178796, 0.43860151346232035]], + [philox_var[0], philox_var[1]], id="DictionaryMemory Rate Philox"), - pytest.param(Functions.DictionaryMemory, philox_var, {'initializer':test_initializer, 'rate':RAND1, 'seed': module_seed}, - [[0.45615033221654855, 0.5684339488686485, 0.018789800436355142, 0.6176354970758771, 0.6120957227224214, 0.6169339968747569, 0.9437480785146242, 0.6818202991034834, 0.359507900573786, 0.43703195379934145], - [0.6976311959272649, 0.06022547162926983, 0.6667667154456677, 0.6706378696181594, 0.2103825610738409, 0.1289262976548533, 0.31542835092418386, 0.3637107709426226, 0.5701967704178796, 0.43860151346232035]], + pytest.param(Functions.DictionaryMemory, philox_var, {'initializer':philox_initializer, 'rate':RAND1, 'seed': module_seed}, + [philox_var[0], philox_var[1]], id="DictionaryMemory Initializer Philox"), pytest.param(Functions.DictionaryMemory, philox_var, {'rate':RAND1, 'retrieval_prob':0.01, 'seed': module_seed}, - [[ 0. for i in range(SIZE) ],[ 0. for i in range(SIZE) ]], + np.zeros_like(philox_var), id="DictionaryMemory Low Retrieval Philox"), pytest.param(Functions.DictionaryMemory, philox_var, {'rate':RAND1, 'storage_prob':0.01, 'seed': module_seed}, - [[ 0. for i in range(SIZE) ],[ 0. for i in range(SIZE) ]], + np.zeros_like(philox_var), id="DictionaryMemory Low Storage Philox"), pytest.param(Functions.DictionaryMemory, philox_var, {'rate':RAND1, 'retrieval_prob':0.95, 'storage_prob':0.95, 'seed': module_seed}, - [[0.45615033221654855, 0.5684339488686485, 0.018789800436355142, 0.6176354970758771, 0.6120957227224214, 0.6169339968747569, 0.9437480785146242, 0.6818202991034834, 0.359507900573786, 0.43703195379934145], - [0.6976311959272649, 0.06022547162926983, 0.6667667154456677, 0.6706378696181594, 0.2103825610738409, 0.1289262976548533, 0.31542835092418386, 0.3637107709426226, 0.5701967704178796, 0.43860151346232035]], + [philox_var[0], philox_var[1]], id="DictionaryMemory High Storage/Retrieve Philox"), # Disable noise tests for now as they trigger failure in DictionaryMemory lookup # (Functions.DictionaryMemory, philox_var, {'rate':RAND1, 'noise':RAND2}, [[ @@ -117,18 +114,16 @@ # 1.3230471933615413, 1.4894230558066361, 1.3769970655058605, 1.3191168724311135, 1.1978884887731214, 1.4201278025008728, 1.2118209006969092, 1.6660066902162964, 1.737896449935246, 1.1576752082599944 #]]), pytest.param(Functions.ContentAddressableMemory, philox_var, {'rate':RAND1, 'retrieval_prob':0.1, 'seed': module_seed}, - [[ 0. for i in range(SIZE) ],[ 0. for i in range(SIZE) ]], + np.zeros_like(philox_var), id="ContentAddressableMemory Low Retrieval Philox"), pytest.param(Functions.ContentAddressableMemory, philox_var, {'rate':RAND1, 'storage_prob':0.01, 'seed': module_seed}, - [[ 0. for i in range(SIZE) ],[ 0. for i in range(SIZE) ]], + np.zeros_like(philox_var), id="ContentAddressableMemory Low Storage Philox"), pytest.param(Functions.ContentAddressableMemory, philox_var, {'rate':RAND1, 'retrieval_prob':0.9, 'storage_prob':0.9, 'seed': module_seed}, - [[0.45615033221654855, 0.5684339488686485, 0.018789800436355142, 0.6176354970758771, 0.6120957227224214, 0.6169339968747569, 0.9437480785146242, 0.6818202991034834, 0.359507900573786, 0.43703195379934145], - [0.6976311959272649, 0.06022547162926983, 0.6667667154456677, 0.6706378696181594, 0.2103825610738409, 0.1289262976548533, 0.31542835092418386, 0.3637107709426226, 0.5701967704178796, 0.43860151346232035]], + [philox_var[0], philox_var[1]], id="ContentAddressableMemory High Storage/Retrieval Philox"), - pytest.param(Functions.ContentAddressableMemory, philox_var, {'initializer':test_initializer, 'rate':RAND1, 'seed': module_seed}, - [[0.45615033221654855, 0.5684339488686485, 0.018789800436355142, 0.6176354970758771, 0.6120957227224214, 0.6169339968747569, 0.9437480785146242, 0.6818202991034834, 0.359507900573786, 0.43703195379934145], - [0.6976311959272649, 0.06022547162926983, 0.6667667154456677, 0.6706378696181594, 0.2103825610738409, 0.1289262976548533, 0.31542835092418386, 0.3637107709426226, 0.5701967704178796, 0.43860151346232035]], + pytest.param(Functions.ContentAddressableMemory, philox_var, {'initializer':philox_initializer, 'rate':RAND1, 'seed': module_seed}, + [philox_var[0], philox_var[1]], id="ContentAddressableMemory Initializer Philox"), ] @@ -153,8 +148,12 @@ def test_basic(func, variable, params, expected, benchmark, func_mode): EX(variable) res = benchmark(EX, variable) - np.testing.assert_allclose(res[0], expected[0], rtol=1e-5, atol=1e-8) - np.testing.assert_allclose(res[1], expected[1], rtol=1e-5, atol=1e-8) + + # This still needs to use "allclose" as the key gets manipulated before + # storage in some subtests. The rounding in that calculation might not + # match the one done for expected values above. + np.testing.assert_allclose(res[0], expected[0]) + np.testing.assert_allclose(res[1], expected[1]) #endregion From 2e42c2425de77555d2c4fe6d80507c9d3745b463 Mon Sep 17 00:00:00 2001 From: Jan Vesely Date: Sat, 21 Oct 2023 22:33:39 -0400 Subject: [PATCH 13/17] DictionaryMemory: Store key after applying noise Use numpy casting rules to apply noise value. Signed-off-by: Jan Vesely --- .../components/functions/stateful/memoryfunctions.py | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/psyneulink/core/components/functions/stateful/memoryfunctions.py b/psyneulink/core/components/functions/stateful/memoryfunctions.py index bb16c12e8b8..72b1701c9f0 100644 --- a/psyneulink/core/components/functions/stateful/memoryfunctions.py +++ b/psyneulink/core/components/functions/stateful/memoryfunctions.py @@ -2676,16 +2676,12 @@ def _function(self, memory = [[0]* self.parameters.key_size._get(context), [0]* self.parameters.val_size._get(context)] # Store variable to dict: if noise is not None: - key = np.asarray(key, dtype=float) - if isinstance(noise, numbers.Number): - key += noise - else: - # assume array with same shape as variable - # TODO: does val need noise? - key += noise[KEYS] + # TODO: does val need noise? + key = np.asfarray(key) + np.asfarray(noise)[KEYS] + assert len(key) == len(variable[KEYS]), "{} vs. {}".format(key, variable[KEYS]) if storage_prob == 1.0 or (storage_prob > 0.0 and storage_prob > random_state.uniform()): - self._store_memory(variable, context) + self._store_memory([key, val], context) # Return 3d array with keys and vals as lists # IMPLEMENTATION NOTE: if try to create np.ndarray directly, and keys and vals have same length From 3f3c531af38ed7a30162ae9cbd0a18694e5072ed Mon Sep 17 00:00:00 2001 From: Jan Vesely Date: Sat, 21 Oct 2023 23:23:40 -0400 Subject: [PATCH 14/17] llvm, DictionaryMemory: Implement noise application to key before storage Enable tests. Signed-off-by: Jan Vesely --- .../functions/stateful/memoryfunctions.py | 25 +++++++++-- tests/functions/test_memory.py | 44 ++++++++----------- 2 files changed, 39 insertions(+), 30 deletions(-) diff --git a/psyneulink/core/components/functions/stateful/memoryfunctions.py b/psyneulink/core/components/functions/stateful/memoryfunctions.py index 72b1701c9f0..920af04b7f7 100644 --- a/psyneulink/core/components/functions/stateful/memoryfunctions.py +++ b/psyneulink/core/components/functions/stateful/memoryfunctions.py @@ -2427,8 +2427,8 @@ def _gen_llvm_function_body(self, ctx, builder, params, state, arg_in, arg_out, store_prob = pnlvm.helpers.load_extract_scalar_array_one(builder, store_prob_ptr) store_rand = builder.fcmp_ordered('<', store_prob, store_prob.type(1.0)) - # The call to random function needs to be behind jump to match python - # code + # The call to random function needs to be behind the check of 'store_rand' + # to match python code semantics with builder.if_then(store_rand): rand_ptr = builder.alloca(ctx.float_ty) builder.call(uniform_f, [rand_struct, rand_ptr]) @@ -2439,6 +2439,23 @@ def _gen_llvm_function_body(self, ctx, builder, params, state, arg_in, arg_out, # Store store = builder.load(store_ptr) with builder.if_then(store, likely=True): + modified_key_ptr = builder.alloca(var_key_ptr.type.pointee) + + # Apply noise to key. + # There are 3 types of noise: scalar, vector1, and vector matching variable + noise_ptr = pnlvm.helpers.get_param_ptr(builder, self, params, "noise") + with pnlvm.helpers.array_ptr_loop(b, var_key_ptr, "key_apply_noise") as (b, idx): + if pnlvm.helpers.is_2d_matrix(noise_ptr): + noise_elem_ptr = b.gep(noise_ptr, [ctx.int32_ty(0), ctx.int32_ty(0), idx]) + noise_val = b.load(noise_elem_ptr) + else: + noise_val = pnlvm.helpers.load_extract_scalar_array_one(b, noise_ptr) + + modified_key_elem_ptr = b.gep(modified_key_ptr, [ctx.int32_ty(0), idx]) + key_elem_ptr = b.gep(var_key_ptr, [ctx.int32_ty(0), idx]) + key_elem = b.load(key_elem_ptr) + key_elem = b.fadd(key_elem, noise_val) + b.store(key_elem, modified_key_elem_ptr) # Check if such key already exists is_new_key_ptr = builder.alloca(ctx.bool_ty) @@ -2451,7 +2468,7 @@ def _gen_llvm_function_body(self, ctx, builder, params, state, arg_in, arg_out, key_differs_ptr = b.alloca(ctx.bool_ty) b.store(key_differs_ptr.type.pointee(0), key_differs_ptr) with pnlvm.helpers.array_ptr_loop(b, cmp_key_ptr, "key_compare") as (b2, idx2): - var_key_element = b2.gep(var_key_ptr, [ctx.int32_ty(0), idx2]) + var_key_element = b2.gep(modified_key_ptr, [ctx.int32_ty(0), idx2]) cmp_key_element = b2.gep(cmp_key_ptr, [ctx.int32_ty(0), idx2]) element_differs = b.fcmp_unordered('!=', b.load(var_key_element), @@ -2473,7 +2490,7 @@ def _gen_llvm_function_body(self, ctx, builder, params, state, arg_in, arg_out, store_key_ptr = builder.gep(keys_ptr, [ctx.int32_ty(0), write_idx]) store_val_ptr = builder.gep(vals_ptr, [ctx.int32_ty(0), write_idx]) - builder.store(builder.load(var_key_ptr), store_key_ptr) + builder.store(builder.load(modified_key_ptr), store_key_ptr) builder.store(builder.load(var_val_ptr), store_val_ptr) # Update counters diff --git a/tests/functions/test_memory.py b/tests/functions/test_memory.py index c47b284399b..1fdbe13c41c 100644 --- a/tests/functions/test_memory.py +++ b/tests/functions/test_memory.py @@ -55,19 +55,15 @@ pytest.param(Functions.DictionaryMemory, test_var, {'rate':RAND1, 'retrieval_prob':0.9, 'storage_prob':0.9, 'seed': module_seed}, [test_var[0], test_var[1]], id="DictionaryMemory High Storage/Retrieve"), -# Disable noise tests for now as they trigger failure in DictionaryMemory lookup -# (Functions.DictionaryMemory, test_var, {'rate':RAND1, 'noise':RAND2}, [[ -# 0.79172504, 0.52889492, 0.56804456, 0.92559664, 0.07103606, 0.0871293 , 0.0202184 , 0.83261985, 0.77815675, 0.87001215 ],[ -# 1.3230471933615413, 1.4894230558066361, 1.3769970655058605, 1.3191168724311135, 1.1978884887731214, 1.4201278025008728, 1.2118209006969092, 1.6660066902162964, 1.737896449935246, 1.1576752082599944 -#]]), -# (Functions.DictionaryMemory, test_var, {'rate':RAND1, 'noise':[RAND2], 'retrieval_prob':0.5}, -# [[ 0. for i in range(SIZE) ],[ 0. for i in range(SIZE) ]]), -# (Functions.DictionaryMemory, test_var, {'rate':RAND1, 'noise':RAND2, 'storage_prob':0.5}, -# [[ 0. for i in range(SIZE) ],[ 0. for i in range(SIZE) ]]), -# (Functions.DictionaryMemory, test_var, {'initializer':test_initializer, 'rate':RAND1, 'noise':RAND2}, [[ -# 0.79172504, 0.52889492, 0.56804456, 0.92559664, 0.07103606, 0.0871293 , 0.0202184 , 0.83261985, 0.77815675, 0.87001215 ],[ -# 1.3230471933615413, 1.4894230558066361, 1.3769970655058605, 1.3191168724311135, 1.1978884887731214, 1.4201278025008728, 1.2118209006969092, 1.6660066902162964, 1.737896449935246, 1.1576752082599944 -#]]), + pytest.param(Functions.DictionaryMemory, test_var, {'rate':RAND1, 'noise':RAND2}, + [test_var[0] + RAND2, test_var[1]], + id="DictionaryMemory NoiseScalar"), + pytest.param(Functions.DictionaryMemory, test_var, {'rate':RAND1, 'noise':[RAND2]}, + [test_var[0] + RAND2, test_var[1]], + id="DictionaryMemory NoiseVec1"), + pytest.param(Functions.DictionaryMemory, test_var, {'rate':RAND1, 'noise':test_var / 2}, + [test_var[0] + test_var[0] / 2, test_var[1]], + id="DictionaryMemory NoiseVecN"), pytest.param(Functions.ContentAddressableMemory, test_var, {'rate':RAND1, 'retrieval_prob':0.5, 'seed': module_seed}, np.zeros_like(test_var), id="ContentAddressableMemory Low Retrieval"), @@ -100,19 +96,15 @@ pytest.param(Functions.DictionaryMemory, philox_var, {'rate':RAND1, 'retrieval_prob':0.95, 'storage_prob':0.95, 'seed': module_seed}, [philox_var[0], philox_var[1]], id="DictionaryMemory High Storage/Retrieve Philox"), -# Disable noise tests for now as they trigger failure in DictionaryMemory lookup -# (Functions.DictionaryMemory, philox_var, {'rate':RAND1, 'noise':RAND2}, [[ -# 0.79172504, 0.52889492, 0.56804456, 0.92559664, 0.07103606, 0.0871293 , 0.0202184 , 0.83261985, 0.77815675, 0.87001215 ],[ -# 1.3230471933615413, 1.4894230558066361, 1.3769970655058605, 1.3191168724311135, 1.1978884887731214, 1.4201278025008728, 1.2118209006969092, 1.6660066902162964, 1.737896449935246, 1.1576752082599944 -#]]), -# (Functions.DictionaryMemory, philox_var, {'rate':RAND1, 'noise':[RAND2], 'retrieval_prob':0.5}, -# [[ 0. for i in range(SIZE) ],[ 0. for i in range(SIZE) ]]), -# (Functions.DictionaryMemory, philox_var, {'rate':RAND1, 'noise':RAND2, 'storage_prob':0.5}, -# [[ 0. for i in range(SIZE) ],[ 0. for i in range(SIZE) ]]), -# (Functions.DictionaryMemory, philox_var, {'initializer':test_initializer, 'rate':RAND1, 'noise':RAND2}, [[ -# 0.79172504, 0.52889492, 0.56804456, 0.92559664, 0.07103606, 0.0871293 , 0.0202184 , 0.83261985, 0.77815675, 0.87001215 ],[ -# 1.3230471933615413, 1.4894230558066361, 1.3769970655058605, 1.3191168724311135, 1.1978884887731214, 1.4201278025008728, 1.2118209006969092, 1.6660066902162964, 1.737896449935246, 1.1576752082599944 -#]]), + pytest.param(Functions.DictionaryMemory, philox_var, {'rate':RAND1, 'noise':RAND2}, + [philox_var[0] + RAND2, philox_var[1]], + id="DictionaryMemory NoiseScalar Philox"), + pytest.param(Functions.DictionaryMemory, philox_var, {'rate':RAND1, 'noise':[RAND2]}, + [philox_var[0] + RAND2, philox_var[1]], + id="DictionaryMemory NoiseVec1 Philox"), + pytest.param(Functions.DictionaryMemory, philox_var, {'rate':RAND1, 'noise':philox_var / 2}, + [philox_var[0] + philox_var[0] / 2, philox_var[1]], + id="DictionaryMemory NoiseVecN Philox"), pytest.param(Functions.ContentAddressableMemory, philox_var, {'rate':RAND1, 'retrieval_prob':0.1, 'seed': module_seed}, np.zeros_like(philox_var), id="ContentAddressableMemory Low Retrieval Philox"), From d863b306d4e5644fa243ac3e7fa5d42054bd59e9 Mon Sep 17 00:00:00 2001 From: Jan Vesely Date: Sat, 21 Oct 2023 23:35:17 -0400 Subject: [PATCH 15/17] llvm, DictionaryMemory: Apply 'rate' value before storing 'key' Update expected test results. Signed-off-by: Jan Vesely --- .../functions/stateful/memoryfunctions.py | 12 +++++- tests/functions/test_memory.py | 38 +++++++++++-------- 2 files changed, 33 insertions(+), 17 deletions(-) diff --git a/psyneulink/core/components/functions/stateful/memoryfunctions.py b/psyneulink/core/components/functions/stateful/memoryfunctions.py index 920af04b7f7..ab854cb0a0a 100644 --- a/psyneulink/core/components/functions/stateful/memoryfunctions.py +++ b/psyneulink/core/components/functions/stateful/memoryfunctions.py @@ -2444,16 +2444,20 @@ def _gen_llvm_function_body(self, ctx, builder, params, state, arg_in, arg_out, # Apply noise to key. # There are 3 types of noise: scalar, vector1, and vector matching variable noise_ptr = pnlvm.helpers.get_param_ptr(builder, self, params, "noise") - with pnlvm.helpers.array_ptr_loop(b, var_key_ptr, "key_apply_noise") as (b, idx): + rate_ptr = pnlvm.helpers.get_param_ptr(builder, self, params, "rate") + with pnlvm.helpers.array_ptr_loop(b, var_key_ptr, "key_apply_rate_noise") as (b, idx): if pnlvm.helpers.is_2d_matrix(noise_ptr): noise_elem_ptr = b.gep(noise_ptr, [ctx.int32_ty(0), ctx.int32_ty(0), idx]) noise_val = b.load(noise_elem_ptr) else: noise_val = pnlvm.helpers.load_extract_scalar_array_one(b, noise_ptr) + rate_val = pnlvm.helpers.load_extract_scalar_array_one(b, rate_ptr) + modified_key_elem_ptr = b.gep(modified_key_ptr, [ctx.int32_ty(0), idx]) key_elem_ptr = b.gep(var_key_ptr, [ctx.int32_ty(0), idx]) key_elem = b.load(key_elem_ptr) + key_elem = b.fmul(key_elem, rate_val) key_elem = b.fadd(key_elem, noise_val) b.store(key_elem, modified_key_elem_ptr) @@ -2691,7 +2695,13 @@ def _function(self, # CURRENT PROBLEM WITH LATTER IS THAT IT CAUSES CRASH ON INIT, SINCE NOT OUTPUT_PORT # SO, WOULD HAVE TO RETURN ZEROS ON INIT AND THEN SUPPRESS AFTERWARDS, AS MOCKED UP BELOW memory = [[0]* self.parameters.key_size._get(context), [0]* self.parameters.val_size._get(context)] + # Store variable to dict: + rate = self._get_current_parameter_value(RATE, context) + if rate is not None: + key = np.asfarray(key) * np.asfarray(rate) + assert len(key) == len(variable[KEYS]), "{} vs. {}".format(key, variable[KEYS]) + if noise is not None: # TODO: does val need noise? key = np.asfarray(key) + np.asfarray(noise)[KEYS] diff --git a/tests/functions/test_memory.py b/tests/functions/test_memory.py index 1fdbe13c41c..abfeb7d2239 100644 --- a/tests/functions/test_memory.py +++ b/tests/functions/test_memory.py @@ -41,27 +41,30 @@ [test_var[0], test_var[1]], id="DictionaryMemory"), pytest.param(Functions.DictionaryMemory, test_var, {'rate':RAND1, 'seed': module_seed}, - [test_var[0], test_var[1]], + [test_var[0] * RAND1, test_var[1]], id="DictionaryMemory Rate"), - pytest.param(Functions.DictionaryMemory, test_var, {'initializer':test_initializer, 'rate':RAND1, 'seed': module_seed}, + pytest.param(Functions.DictionaryMemory, test_var, {'initializer':test_initializer, 'seed': module_seed}, [test_var[0], test_var[1]], id="DictionaryMemory Initializer"), - pytest.param(Functions.DictionaryMemory, test_var, {'rate':RAND1, 'retrieval_prob':0.5, 'seed': module_seed}, + pytest.param(Functions.DictionaryMemory, test_var, {'retrieval_prob':0.5, 'seed': module_seed}, np.zeros_like(test_var), id="DictionaryMemory Low Retrieval"), - pytest.param(Functions.DictionaryMemory, test_var, {'rate':RAND1, 'storage_prob':0.1, 'seed': module_seed}, + pytest.param(Functions.DictionaryMemory, test_var, {'storage_prob':0.1, 'seed': module_seed}, np.zeros_like(test_var), id="DictionaryMemory Low Storage"), - pytest.param(Functions.DictionaryMemory, test_var, {'rate':RAND1, 'retrieval_prob':0.9, 'storage_prob':0.9, 'seed': module_seed}, + pytest.param(Functions.DictionaryMemory, test_var, {'retrieval_prob':0.9, 'storage_prob':0.9, 'seed': module_seed}, [test_var[0], test_var[1]], id="DictionaryMemory High Storage/Retrieve"), - pytest.param(Functions.DictionaryMemory, test_var, {'rate':RAND1, 'noise':RAND2}, + pytest.param(Functions.DictionaryMemory, test_var, {'noise':RAND2}, [test_var[0] + RAND2, test_var[1]], id="DictionaryMemory NoiseScalar"), - pytest.param(Functions.DictionaryMemory, test_var, {'rate':RAND1, 'noise':[RAND2]}, + pytest.param(Functions.DictionaryMemory, test_var, {'noise':RAND2, 'rate':RAND1}, + [test_var[0] * RAND1 + RAND2, test_var[1]], + id="DictionaryMemory Rate NoiseScalar"), + pytest.param(Functions.DictionaryMemory, test_var, {'noise':[RAND2]}, [test_var[0] + RAND2, test_var[1]], id="DictionaryMemory NoiseVec1"), - pytest.param(Functions.DictionaryMemory, test_var, {'rate':RAND1, 'noise':test_var / 2}, + pytest.param(Functions.DictionaryMemory, test_var, {'noise':test_var / 2}, [test_var[0] + test_var[0] / 2, test_var[1]], id="DictionaryMemory NoiseVecN"), pytest.param(Functions.ContentAddressableMemory, test_var, {'rate':RAND1, 'retrieval_prob':0.5, 'seed': module_seed}, @@ -82,27 +85,30 @@ [philox_var[0], philox_var[1]], id="DictionaryMemory Philox"), pytest.param(Functions.DictionaryMemory, philox_var, {'rate':RAND1, 'seed': module_seed}, - [philox_var[0], philox_var[1]], + [philox_var[0] * RAND1, philox_var[1]], id="DictionaryMemory Rate Philox"), - pytest.param(Functions.DictionaryMemory, philox_var, {'initializer':philox_initializer, 'rate':RAND1, 'seed': module_seed}, + pytest.param(Functions.DictionaryMemory, philox_var, {'initializer':philox_initializer, 'seed': module_seed}, [philox_var[0], philox_var[1]], id="DictionaryMemory Initializer Philox"), - pytest.param(Functions.DictionaryMemory, philox_var, {'rate':RAND1, 'retrieval_prob':0.01, 'seed': module_seed}, + pytest.param(Functions.DictionaryMemory, philox_var, {'retrieval_prob':0.01, 'seed': module_seed}, np.zeros_like(philox_var), id="DictionaryMemory Low Retrieval Philox"), - pytest.param(Functions.DictionaryMemory, philox_var, {'rate':RAND1, 'storage_prob':0.01, 'seed': module_seed}, + pytest.param(Functions.DictionaryMemory, philox_var, {'storage_prob':0.01, 'seed': module_seed}, np.zeros_like(philox_var), id="DictionaryMemory Low Storage Philox"), - pytest.param(Functions.DictionaryMemory, philox_var, {'rate':RAND1, 'retrieval_prob':0.95, 'storage_prob':0.95, 'seed': module_seed}, + pytest.param(Functions.DictionaryMemory, philox_var, {'retrieval_prob':0.95, 'storage_prob':0.95, 'seed': module_seed}, [philox_var[0], philox_var[1]], id="DictionaryMemory High Storage/Retrieve Philox"), - pytest.param(Functions.DictionaryMemory, philox_var, {'rate':RAND1, 'noise':RAND2}, + pytest.param(Functions.DictionaryMemory, philox_var, {'noise':RAND2}, [philox_var[0] + RAND2, philox_var[1]], id="DictionaryMemory NoiseScalar Philox"), - pytest.param(Functions.DictionaryMemory, philox_var, {'rate':RAND1, 'noise':[RAND2]}, + pytest.param(Functions.DictionaryMemory, philox_var, {'noise':RAND2, 'rate':RAND1}, + [philox_var[0] * RAND1 + RAND2, philox_var[1]], + id="DictionaryMemory Rate NoiseScalar Philox"), + pytest.param(Functions.DictionaryMemory, philox_var, {'noise':[RAND2]}, [philox_var[0] + RAND2, philox_var[1]], id="DictionaryMemory NoiseVec1 Philox"), - pytest.param(Functions.DictionaryMemory, philox_var, {'rate':RAND1, 'noise':philox_var / 2}, + pytest.param(Functions.DictionaryMemory, philox_var, {'noise':philox_var / 2}, [philox_var[0] + philox_var[0] / 2, philox_var[1]], id="DictionaryMemory NoiseVecN Philox"), pytest.param(Functions.ContentAddressableMemory, philox_var, {'rate':RAND1, 'retrieval_prob':0.1, 'seed': module_seed}, From c46c7b86458a0894e05584dd49b54c7a72ca2365 Mon Sep 17 00:00:00 2001 From: Jan Vesely Date: Thu, 26 Oct 2023 22:49:59 -0400 Subject: [PATCH 16/17] tests/MemoryFunctions: Add extra insertion to test 'duplicate_keys' The test are not added yet, so the second call should not insert data, but it uses up draws from the PRNG so the probabilities need to be adjusted. Signed-off-by: Jan Vesely --- tests/functions/test_memory.py | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/tests/functions/test_memory.py b/tests/functions/test_memory.py index abfeb7d2239..2ccb11e032a 100644 --- a/tests/functions/test_memory.py +++ b/tests/functions/test_memory.py @@ -46,7 +46,7 @@ pytest.param(Functions.DictionaryMemory, test_var, {'initializer':test_initializer, 'seed': module_seed}, [test_var[0], test_var[1]], id="DictionaryMemory Initializer"), - pytest.param(Functions.DictionaryMemory, test_var, {'retrieval_prob':0.5, 'seed': module_seed}, + pytest.param(Functions.DictionaryMemory, test_var, {'retrieval_prob':0.1, 'seed': module_seed}, np.zeros_like(test_var), id="DictionaryMemory Low Retrieval"), pytest.param(Functions.DictionaryMemory, test_var, {'storage_prob':0.1, 'seed': module_seed}, @@ -67,7 +67,9 @@ pytest.param(Functions.DictionaryMemory, test_var, {'noise':test_var / 2}, [test_var[0] + test_var[0] / 2, test_var[1]], id="DictionaryMemory NoiseVecN"), - pytest.param(Functions.ContentAddressableMemory, test_var, {'rate':RAND1, 'retrieval_prob':0.5, 'seed': module_seed}, + + # ContentAddressableMemory + pytest.param(Functions.ContentAddressableMemory, test_var, {'rate':RAND1, 'retrieval_prob':0.1, 'seed': module_seed}, np.zeros_like(test_var), id="ContentAddressableMemory Low Retrieval"), pytest.param(Functions.ContentAddressableMemory, test_var, {'rate':RAND1, 'storage_prob':0.1, 'seed': module_seed}, @@ -96,7 +98,7 @@ pytest.param(Functions.DictionaryMemory, philox_var, {'storage_prob':0.01, 'seed': module_seed}, np.zeros_like(philox_var), id="DictionaryMemory Low Storage Philox"), - pytest.param(Functions.DictionaryMemory, philox_var, {'retrieval_prob':0.95, 'storage_prob':0.95, 'seed': module_seed}, + pytest.param(Functions.DictionaryMemory, philox_var, {'retrieval_prob':0.98, 'storage_prob':0.98, 'seed': module_seed}, [philox_var[0], philox_var[1]], id="DictionaryMemory High Storage/Retrieve Philox"), pytest.param(Functions.DictionaryMemory, philox_var, {'noise':RAND2}, @@ -111,13 +113,15 @@ pytest.param(Functions.DictionaryMemory, philox_var, {'noise':philox_var / 2}, [philox_var[0] + philox_var[0] / 2, philox_var[1]], id="DictionaryMemory NoiseVecN Philox"), + + # ContentAddressableMemory pytest.param(Functions.ContentAddressableMemory, philox_var, {'rate':RAND1, 'retrieval_prob':0.1, 'seed': module_seed}, np.zeros_like(philox_var), id="ContentAddressableMemory Low Retrieval Philox"), pytest.param(Functions.ContentAddressableMemory, philox_var, {'rate':RAND1, 'storage_prob':0.01, 'seed': module_seed}, np.zeros_like(philox_var), id="ContentAddressableMemory Low Storage Philox"), - pytest.param(Functions.ContentAddressableMemory, philox_var, {'rate':RAND1, 'retrieval_prob':0.9, 'storage_prob':0.9, 'seed': module_seed}, + pytest.param(Functions.ContentAddressableMemory, philox_var, {'rate':RAND1, 'retrieval_prob':0.98, 'storage_prob':0.98, 'seed': module_seed}, [philox_var[0], philox_var[1]], id="ContentAddressableMemory High Storage/Retrieval Philox"), pytest.param(Functions.ContentAddressableMemory, philox_var, {'initializer':philox_initializer, 'rate':RAND1, 'seed': module_seed}, @@ -145,6 +149,12 @@ def test_basic(func, variable, params, expected, benchmark, func_mode): EX = pytest.helpers.get_func_execution(f, func_mode, writeback=False) EX(variable) + + # Store value * 4 with a duplicate key + # This insertion should be ignored unless the function allows + # "duplicate_keys" + if len(variable) == 2: + EX([variable[0], variable[1] * 4]) res = benchmark(EX, variable) # This still needs to use "allclose" as the key gets manipulated before From bc409f2313348404dfae08315a0ee93ea94e2431 Mon Sep 17 00:00:00 2001 From: Jan Vesely Date: Fri, 27 Oct 2023 00:10:40 -0400 Subject: [PATCH 17/17] llvm, MemoryFunctions: Drop 'duplicate_keys' and 'previous_value' from compiled structures Currently unused in compiled code. Signed-off-by: Jan Vesely --- psyneulink/core/components/component.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/psyneulink/core/components/component.py b/psyneulink/core/components/component.py index ead7cba1376..59c81bf3f0a 100644 --- a/psyneulink/core/components/component.py +++ b/psyneulink/core/components/component.py @@ -1317,6 +1317,10 @@ def _get_compilation_state(self): if getattr(self.parameters, 'has_recurrent_input_port', False): blacklist.update(['combination_function']) + # Drop previous_value from MemoryFunctions + if hasattr(self.parameters, 'duplicate_keys'): + blacklist.add("previous_value") + def _is_compilation_state(p): # FIXME: This should use defaults instead of 'p.get' return p.name not in blacklist and \ @@ -1400,7 +1404,7 @@ def _get_compilation_params(self): "random_variables", "smoothing_factor", "per_item", "key_size", "val_size", "max_entries", "random_draw", "randomization_dimension", "save_values", "save_samples", - "max_iterations", + "max_iterations", "duplicate_keys", # not used in compiled learning "learning_results", "learning_signal", "learning_signals", "error_matrix", "error_signal", "activation_input",