diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..3f4f856 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,24 @@ +# EditorConfig is awesome: https://EditorConfig.org + +root = true + +[*] +charset = utf-8 +end_of_line = lf +insert_final_newline = true +trim_trailing_whitespace = true + +[*.py] +indent_style = space +indent_size = 4 + +[*.{yml,yaml}] +indent_style = space +indent_size = 2 + +[*.{json,toml}] +indent_style = space +indent_size = 2 + +[Makefile] +indent_style = tab diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000..01eaacb --- /dev/null +++ b/.flake8 @@ -0,0 +1,3 @@ +[flake8] +max-line-length = 120 +extend-ignore = E203, W503 diff --git a/.tool-versions b/.tool-versions index d69635c..193cb42 100644 --- a/.tool-versions +++ b/.tool-versions @@ -1,4 +1,4 @@ actionlint 1.6.26 poetry 2.1.3 -python 3.12.12 +python 3.10.12 shellcheck 0.9.0 diff --git a/poetry.lock b/poetry.lock index 00548be..a8ed12a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. [[package]] name = "attrs" @@ -56,6 +56,8 @@ packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" pytokens = ">=0.3.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] @@ -63,6 +65,156 @@ d = ["aiohttp (>=3.10)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] +[[package]] +name = "boto3" +version = "1.26.165" +description = "The AWS SDK for Python" +optional = false +python-versions = ">= 3.7" +groups = ["main", "dev"] +files = [ + {file = "boto3-1.26.165-py3-none-any.whl", hash = "sha256:fa85b67147c8dc99b6e7c699fc086103f958f9677db934f70659e6e6a72a818c"}, + {file = "boto3-1.26.165.tar.gz", hash = "sha256:9e7242b9059d937f34264125fecd844cb5e01acce6be093f6c44869fdf7c6e30"}, +] + +[package.dependencies] +botocore = ">=1.29.165,<1.30.0" +jmespath = ">=0.7.1,<2.0.0" +s3transfer = ">=0.6.0,<0.7.0" + +[package.extras] +crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] + +[[package]] +name = "botocore" +version = "1.29.165" +description = "Low-level, data-driven core of boto 3." +optional = false +python-versions = ">= 3.7" +groups = ["main", "dev"] +files = [ + {file = "botocore-1.29.165-py3-none-any.whl", hash = "sha256:6f35d59e230095aed7cd747604fe248fa384bebb7d09549077892f936a8ca3df"}, + {file = "botocore-1.29.165.tar.gz", hash = "sha256:988b948be685006b43c4bbd8f5c0cb93e77c66deb70561994e0c5b31b5a67210"}, +] + +[package.dependencies] +jmespath = ">=0.7.1,<2.0.0" +python-dateutil = ">=2.1,<3.0.0" +urllib3 = ">=1.25.4,<1.27" + +[package.extras] +crt = ["awscrt (==0.16.9)"] + +[[package]] +name = "certifi" +version = "2025.11.12" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b"}, + {file = "certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316"}, +] + +[[package]] +name = "cffi" +version = "2.0.0" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +markers = "platform_python_implementation != \"PyPy\"" +files = [ + {file = "cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44"}, + {file = "cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:53f77cbe57044e88bbd5ed26ac1d0514d2acf0591dd6bb02a3ae37f76811b80c"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3e837e369566884707ddaf85fc1744b47575005c0a229de3327f8f9a20f4efeb"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5eda85d6d1879e692d546a078b44251cdd08dd1cfb98dfb77b670c97cee49ea0"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9332088d75dc3241c702d852d4671613136d90fa6881da7d770a483fd05248b4"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc7de24befaeae77ba923797c7c87834c73648a05a4bde34b3b7e5588973a453"}, + {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf364028c016c03078a23b503f02058f1814320a56ad535686f90565636a9495"}, + {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e11e82b744887154b182fd3e7e8512418446501191994dbf9c9fc1f32cc8efd5"}, + {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8ea985900c5c95ce9db1745f7933eeef5d314f0565b27625d9a10ec9881e1bfb"}, + {file = "cffi-2.0.0-cp310-cp310-win32.whl", hash = "sha256:1f72fb8906754ac8a2cc3f9f5aaa298070652a0ffae577e0ea9bd480dc3c931a"}, + {file = "cffi-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:b18a3ed7d5b3bd8d9ef7a8cb226502c6bf8308df1525e1cc676c3680e7176739"}, + {file = "cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe"}, + {file = "cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26"}, + {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9"}, + {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414"}, + {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743"}, + {file = "cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5"}, + {file = "cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5"}, + {file = "cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d"}, + {file = "cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d"}, + {file = "cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba"}, + {file = "cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94"}, + {file = "cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187"}, + {file = "cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18"}, + {file = "cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5"}, + {file = "cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6"}, + {file = "cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb"}, + {file = "cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26"}, + {file = "cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c"}, + {file = "cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b"}, + {file = "cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27"}, + {file = "cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75"}, + {file = "cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91"}, + {file = "cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5"}, + {file = "cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775"}, + {file = "cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205"}, + {file = "cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1"}, + {file = "cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f"}, + {file = "cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25"}, + {file = "cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad"}, + {file = "cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9"}, + {file = "cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592"}, + {file = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512"}, + {file = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4"}, + {file = "cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e"}, + {file = "cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6"}, + {file = "cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9"}, + {file = "cffi-2.0.0-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:fe562eb1a64e67dd297ccc4f5addea2501664954f2692b69a76449ec7913ecbf"}, + {file = "cffi-2.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:de8dad4425a6ca6e4e5e297b27b5c824ecc7581910bf9aee86cb6835e6812aa7"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:4647afc2f90d1ddd33441e5b0e85b16b12ddec4fca55f0d9671fef036ecca27c"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3f4d46d8b35698056ec29bca21546e1551a205058ae1a181d871e278b0b28165"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:e6e73b9e02893c764e7e8d5bb5ce277f1a009cd5243f8228f75f842bf937c534"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:cb527a79772e5ef98fb1d700678fe031e353e765d1ca2d409c92263c6d43e09f"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:61d028e90346df14fedc3d1e5441df818d095f3b87d286825dfcbd6459b7ef63"}, + {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0f6084a0ea23d05d20c3edcda20c3d006f9b6f3fefeac38f59262e10cef47ee2"}, + {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1cd13c99ce269b3ed80b417dcd591415d3372bcac067009b6e0f59c7d4015e65"}, + {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:89472c9762729b5ae1ad974b777416bfda4ac5642423fa93bd57a09204712322"}, + {file = "cffi-2.0.0-cp39-cp39-win32.whl", hash = "sha256:2081580ebb843f759b9f617314a24ed5738c51d2aee65d31e02f6f7a2b97707a"}, + {file = "cffi-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:b882b3df248017dba09d6b16defe9b5c407fe32fc7c65a9c69798e6175601be9"}, + {file = "cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529"}, +] + +[package.dependencies] +pycparser = {version = "*", markers = "implementation_name != \"PyPy\""} + [[package]] name = "cfgv" version = "3.5.0" @@ -75,6 +227,129 @@ files = [ {file = "cfgv-3.5.0.tar.gz", hash = "sha256:d5b1034354820651caa73ede66a6294d6e95c1b00acc5e9b098e917404669132"}, ] +[[package]] +name = "charset-normalizer" +version = "3.4.4" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e824f1492727fa856dd6eda4f7cee25f8518a12f3c4a56a74e8095695089cf6d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4bd5d4137d500351a30687c2d3971758aac9a19208fc110ccb9d7188fbe709e8"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:027f6de494925c0ab2a55eab46ae5129951638a49a34d87f4c3eda90f696b4ad"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f820802628d2694cb7e56db99213f930856014862f3fd943d290ea8438d07ca8"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:798d75d81754988d2565bff1b97ba5a44411867c0cf32b77a7e8f8d84796b10d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d1bb833febdff5c8927f922386db610b49db6e0d4f4ee29601d71e7c2694313"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9cd98cdc06614a2f768d2b7286d66805f94c48cde050acdbbb7db2600ab3197e"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:077fbb858e903c73f6c9db43374fd213b0b6a778106bc7032446a8e8b5b38b93"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:244bfb999c71b35de57821b8ea746b24e863398194a4014e4c76adc2bbdfeff0"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:64b55f9dce520635f018f907ff1b0df1fdc31f2795a922fb49dd14fbcdf48c84"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:faa3a41b2b66b6e50f84ae4a68c64fcd0c44355741c6374813a800cd6695db9e"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6515f3182dbe4ea06ced2d9e8666d97b46ef4c75e326b79bb624110f122551db"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc00f04ed596e9dc0da42ed17ac5e596c6ccba999ba6bd92b0e0aef2f170f2d6"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win32.whl", hash = "sha256:f34be2938726fc13801220747472850852fe6b1ea75869a048d6f896838c896f"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:a61900df84c667873b292c3de315a786dd8dac506704dea57bc957bd31e22c7d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win_arm64.whl", hash = "sha256:cead0978fc57397645f12578bfd2d5ea9138ea0fac82b2f63f7f7c6877986a69"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ce8a0633f41a967713a59c4139d29110c07e826d131a316b50ce11b1d79b4f84"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaabd426fe94daf8fd157c32e571c85cb12e66692f15516a83a03264b08d06c3"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c4ef880e27901b6cc782f1b95f82da9313c0eb95c3af699103088fa0ac3ce9ac"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2aaba3b0819274cc41757a1da876f810a3e4d7b6eb25699253a4effef9e8e4af"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:778d2e08eda00f4256d7f672ca9fef386071c9202f5e4607920b86d7803387f2"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f155a433c2ec037d4e8df17d18922c3a0d9b3232a396690f17175d2946f0218d"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a8bf8d0f749c5757af2142fe7903a9df1d2e8aa3841559b2bad34b08d0e2bcf3"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:194f08cbb32dc406d6e1aea671a68be0823673db2832b38405deba2fb0d88f63"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:6aee717dcfead04c6eb1ce3bd29ac1e22663cdea57f943c87d1eab9a025438d7"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:cd4b7ca9984e5e7985c12bc60a6f173f3c958eae74f3ef6624bb6b26e2abbae4"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_riscv64.whl", hash = "sha256:b7cf1017d601aa35e6bb650b6ad28652c9cd78ee6caff19f3c28d03e1c80acbf"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:e912091979546adf63357d7e2ccff9b44f026c075aeaf25a52d0e95ad2281074"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5cb4d72eea50c8868f5288b7f7f33ed276118325c1dfd3957089f6b519e1382a"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-win32.whl", hash = "sha256:837c2ce8c5a65a2035be9b3569c684358dfbf109fd3b6969630a87535495ceaa"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:44c2a8734b333e0578090c4cd6b16f275e07aa6614ca8715e6c038e865e70576"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a9768c477b9d7bd54bc0c86dbaebdec6f03306675526c9927c0e8a04e8f94af9"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bee1e43c28aa63cb16e5c14e582580546b08e535299b8b6158a7c9c768a1f3d"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:fd44c878ea55ba351104cb93cc85e74916eb8fa440ca7903e57575e97394f608"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0f04b14ffe5fdc8c4933862d8306109a2c51e0704acfa35d51598eb45a1e89fc"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:cd09d08005f958f370f539f186d10aec3377d55b9eeb0d796025d4886119d76e"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4fe7859a4e3e8457458e2ff592f15ccb02f3da787fcd31e0183879c3ad4692a1"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fa09f53c465e532f4d3db095e0c55b615f010ad81803d383195b6b5ca6cbf5f3"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7fa17817dc5625de8a027cb8b26d9fefa3ea28c8253929b8d6649e705d2835b6"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:5947809c8a2417be3267efc979c47d76a079758166f7d43ef5ae8e9f92751f88"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:4902828217069c3c5c71094537a8e623f5d097858ac6ca8252f7b4d10b7560f1"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:7c308f7e26e4363d79df40ca5b2be1c6ba9f02bdbccfed5abddb7859a6ce72cf"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2c9d3c380143a1fedbff95a312aa798578371eb29da42106a29019368a475318"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cb01158d8b88ee68f15949894ccc6712278243d95f344770fa7593fa2d94410c"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win32.whl", hash = "sha256:2677acec1a2f8ef614c6888b5b4ae4060cc184174a938ed4e8ef690e15d3e505"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:f8e160feb2aed042cd657a72acc0b481212ed28b1b9a95c0cee1621b524e1966"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win_arm64.whl", hash = "sha256:b5d84d37db046c5ca74ee7bb47dd6cbc13f80665fdde3e8040bdd3fb015ecb50"}, + {file = "charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f"}, + {file = "charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a"}, +] + [[package]] name = "click" version = "8.3.1" @@ -208,6 +483,84 @@ files = [ [package.extras] toml = ["tomli ; python_full_version <= \"3.11.0a6\""] +[[package]] +name = "cryptography" +version = "46.0.3" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = "!=3.9.0,!=3.9.1,>=3.8" +groups = ["dev"] +files = [ + {file = "cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a"}, + {file = "cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc"}, + {file = "cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d"}, + {file = "cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb"}, + {file = "cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849"}, + {file = "cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8"}, + {file = "cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec"}, + {file = "cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91"}, + {file = "cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e"}, + {file = "cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926"}, + {file = "cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71"}, + {file = "cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac"}, + {file = "cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018"}, + {file = "cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb"}, + {file = "cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c"}, + {file = "cryptography-46.0.3-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:00a5e7e87938e5ff9ff5447ab086a5706a957137e6e433841e9d24f38a065217"}, + {file = "cryptography-46.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c8daeb2d2174beb4575b77482320303f3d39b8e81153da4f0fb08eb5fe86a6c5"}, + {file = "cryptography-46.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39b6755623145ad5eff1dab323f4eae2a32a77a7abef2c5089a04a3d04366715"}, + {file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:db391fa7c66df6762ee3f00c95a89e6d428f4d60e7abc8328f4fe155b5ac6e54"}, + {file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:78a97cf6a8839a48c49271cdcbd5cf37ca2c1d6b7fdd86cc864f302b5e9bf459"}, + {file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:dfb781ff7eaa91a6f7fd41776ec37c5853c795d3b358d4896fdbb5df168af422"}, + {file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:6f61efb26e76c45c4a227835ddeae96d83624fb0d29eb5df5b96e14ed1a0afb7"}, + {file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:23b1a8f26e43f47ceb6d6a43115f33a5a37d57df4ea0ca295b780ae8546e8044"}, + {file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:b419ae593c86b87014b9be7396b385491ad7f320bde96826d0dd174459e54665"}, + {file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:50fc3343ac490c6b08c0cf0d704e881d0d660be923fd3076db3e932007e726e3"}, + {file = "cryptography-46.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22d7e97932f511d6b0b04f2bfd818d73dcd5928db509460aaf48384778eb6d20"}, + {file = "cryptography-46.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d55f3dffadd674514ad19451161118fd010988540cee43d8bc20675e775925de"}, + {file = "cryptography-46.0.3-cp314-cp314t-win32.whl", hash = "sha256:8a6e050cb6164d3f830453754094c086ff2d0b2f3a897a1d9820f6139a1f0914"}, + {file = "cryptography-46.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:760f83faa07f8b64e9c33fc963d790a2edb24efb479e3520c14a45741cd9b2db"}, + {file = "cryptography-46.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:516ea134e703e9fe26bcd1277a4b59ad30586ea90c365a87781d7887a646fe21"}, + {file = "cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936"}, + {file = "cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683"}, + {file = "cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d"}, + {file = "cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0"}, + {file = "cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc"}, + {file = "cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3"}, + {file = "cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971"}, + {file = "cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac"}, + {file = "cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04"}, + {file = "cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506"}, + {file = "cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963"}, + {file = "cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4"}, + {file = "cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df"}, + {file = "cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f"}, + {file = "cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372"}, + {file = "cryptography-46.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a23582810fedb8c0bc47524558fb6c56aac3fc252cb306072fd2815da2a47c32"}, + {file = "cryptography-46.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e7aec276d68421f9574040c26e2a7c3771060bc0cff408bae1dcb19d3ab1e63c"}, + {file = "cryptography-46.0.3-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7ce938a99998ed3c8aa7e7272dca1a610401ede816d36d0693907d863b10d9ea"}, + {file = "cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:191bb60a7be5e6f54e30ba16fdfae78ad3a342a0599eb4193ba88e3f3d6e185b"}, + {file = "cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c70cc23f12726be8f8bc72e41d5065d77e4515efae3690326764ea1b07845cfb"}, + {file = "cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:9394673a9f4de09e28b5356e7fff97d778f8abad85c9d5ac4a4b7e25a0de7717"}, + {file = "cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94cd0549accc38d1494e1f8de71eca837d0509d0d44bf11d158524b0e12cebf9"}, + {file = "cryptography-46.0.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6b5063083824e5509fdba180721d55909ffacccc8adbec85268b48439423d78c"}, + {file = "cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1"}, +] + +[package.dependencies] +cffi = {version = ">=2.0.0", markers = "python_full_version >= \"3.9.0\" and platform_python_implementation != \"PyPy\""} +typing-extensions = {version = ">=4.13.2", markers = "python_full_version < \"3.11.0\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-inline-tabs", "sphinx-rtd-theme (>=3.0.0)"] +docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] +nox = ["nox[uv] (>=2024.4.15)"] +pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.14)", "ruff (>=0.11.11)"] +sdist = ["build (>=1.0.0)"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi (>=2024)", "cryptography-vectors (==46.0.3)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] +test-randomorder = ["pytest-randomly"] + [[package]] name = "distlib" version = "0.4.0" @@ -220,6 +573,24 @@ files = [ {file = "distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d"}, ] +[[package]] +name = "exceptiongroup" +version = "1.3.1" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598"}, + {file = "exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.6.0", markers = "python_version < \"3.13\""} + +[package.extras] +test = ["pytest (>=6)"] + [[package]] name = "filelock" version = "3.20.1" @@ -268,6 +639,21 @@ flake8 = ">=7.2.0" [package.extras] dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "pytest", "tox"] +[[package]] +name = "freezegun" +version = "1.5.5" +description = "Let your Python tests travel through time" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "freezegun-1.5.5-py3-none-any.whl", hash = "sha256:cd557f4a75cf074e84bc374249b9dd491eaeacd61376b9eb3c423282211619d2"}, + {file = "freezegun-1.5.5.tar.gz", hash = "sha256:ac7742a6cc6c25a2c35e9292dfd554b897b517d2dec26891a2e8debf205cb94a"}, +] + +[package.dependencies] +python-dateutil = ">=2.7" + [[package]] name = "identify" version = "2.6.15" @@ -283,6 +669,21 @@ files = [ [package.extras] license = ["ukkonen"] +[[package]] +name = "idna" +version = "3.11" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea"}, + {file = "idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + [[package]] name = "iniconfig" version = "2.3.0" @@ -311,6 +712,135 @@ files = [ colors = ["colorama"] plugins = ["setuptools"] +[[package]] +name = "jinja2" +version = "3.1.6" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, + {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jmespath" +version = "1.0.1" +description = "JSON Matching Expressions" +optional = false +python-versions = ">=3.7" +groups = ["main", "dev"] +files = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] + +[[package]] +name = "markupsafe" +version = "3.0.3" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "markupsafe-3.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f981d352f04553a7171b8e44369f2af4055f888dfb147d55e42d29e29e74559"}, + {file = "markupsafe-3.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c1493fb6e50ab01d20a22826e57520f1284df32f2d8601fdd90b6304601419"}, + {file = "markupsafe-3.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ba88449deb3de88bd40044603fafffb7bc2b055d626a330323a9ed736661695"}, + {file = "markupsafe-3.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f42d0984e947b8adf7dd6dde396e720934d12c506ce84eea8476409563607591"}, + {file = "markupsafe-3.0.3-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c0c0b3ade1c0b13b936d7970b1d37a57acde9199dc2aecc4c336773e1d86049c"}, + {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0303439a41979d9e74d18ff5e2dd8c43ed6c6001fd40e5bf2e43f7bd9bbc523f"}, + {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:d2ee202e79d8ed691ceebae8e0486bd9a2cd4794cec4824e1c99b6f5009502f6"}, + {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:177b5253b2834fe3678cb4a5f0059808258584c559193998be2601324fdeafb1"}, + {file = "markupsafe-3.0.3-cp310-cp310-win32.whl", hash = "sha256:2a15a08b17dd94c53a1da0438822d70ebcd13f8c3a95abe3a9ef9f11a94830aa"}, + {file = "markupsafe-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:c4ffb7ebf07cfe8931028e3e4c85f0357459a3f9f9490886198848f4fa002ec8"}, + {file = "markupsafe-3.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:e2103a929dfa2fcaf9bb4e7c091983a49c9ac3b19c9061b6d5427dd7d14d81a1"}, + {file = "markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad"}, + {file = "markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a"}, + {file = "markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50"}, + {file = "markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf"}, + {file = "markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f"}, + {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a"}, + {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115"}, + {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a"}, + {file = "markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19"}, + {file = "markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01"}, + {file = "markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c"}, + {file = "markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e"}, + {file = "markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce"}, + {file = "markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d"}, + {file = "markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d"}, + {file = "markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a"}, + {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b"}, + {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f"}, + {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b"}, + {file = "markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d"}, + {file = "markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c"}, + {file = "markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f"}, + {file = "markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795"}, + {file = "markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219"}, + {file = "markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6"}, + {file = "markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676"}, + {file = "markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9"}, + {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1"}, + {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc"}, + {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12"}, + {file = "markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed"}, + {file = "markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5"}, + {file = "markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485"}, + {file = "markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73"}, + {file = "markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37"}, + {file = "markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19"}, + {file = "markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025"}, + {file = "markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6"}, + {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f"}, + {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb"}, + {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009"}, + {file = "markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354"}, + {file = "markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218"}, + {file = "markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287"}, + {file = "markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe"}, + {file = "markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026"}, + {file = "markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737"}, + {file = "markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97"}, + {file = "markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d"}, + {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda"}, + {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf"}, + {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe"}, + {file = "markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9"}, + {file = "markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581"}, + {file = "markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4"}, + {file = "markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab"}, + {file = "markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175"}, + {file = "markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634"}, + {file = "markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50"}, + {file = "markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e"}, + {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5"}, + {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523"}, + {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc"}, + {file = "markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d"}, + {file = "markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9"}, + {file = "markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa"}, + {file = "markupsafe-3.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15d939a21d546304880945ca1ecb8a039db6b4dc49b2c5a400387cdae6a62e26"}, + {file = "markupsafe-3.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f71a396b3bf33ecaa1626c255855702aca4d3d9fea5e051b41ac59a9c1c41edc"}, + {file = "markupsafe-3.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f4b68347f8c5eab4a13419215bdfd7f8c9b19f2b25520968adfad23eb0ce60c"}, + {file = "markupsafe-3.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e8fc20152abba6b83724d7ff268c249fa196d8259ff481f3b1476383f8f24e42"}, + {file = "markupsafe-3.0.3-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:949b8d66bc381ee8b007cd945914c721d9aba8e27f71959d750a46f7c282b20b"}, + {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:3537e01efc9d4dccdf77221fb1cb3b8e1a38d5428920e0657ce299b20324d758"}, + {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:591ae9f2a647529ca990bc681daebdd52c8791ff06c2bfa05b65163e28102ef2"}, + {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a320721ab5a1aba0a233739394eb907f8c8da5c98c9181d1161e77a0c8e36f2d"}, + {file = "markupsafe-3.0.3-cp39-cp39-win32.whl", hash = "sha256:df2449253ef108a379b8b5d6b43f4b1a8e81a061d6537becd5582fba5f9196d7"}, + {file = "markupsafe-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:7c3fb7d25180895632e5d3148dbdc29ea38ccb7fd210aa27acbd1201a1902c6e"}, + {file = "markupsafe-3.0.3-cp39-cp39-win_arm64.whl", hash = "sha256:38664109c14ffc9e7437e86b4dceb442b0096dfe3541d7864d9cbe1da4cf36c8"}, + {file = "markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698"}, +] + [[package]] name = "mccabe" version = "0.7.0" @@ -323,6 +853,52 @@ files = [ {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] +[[package]] +name = "moto" +version = "5.1.18" +description = "A library that allows you to easily mock out tests based on AWS infrastructure" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "moto-5.1.18-py3-none-any.whl", hash = "sha256:b65aa8fc9032c5c574415451e14fd7da4e43fd50b8bdcb5f10289ad382c25bcf"}, + {file = "moto-5.1.18.tar.gz", hash = "sha256:45298ef7b88561b839f6fe3e9da2a6e2ecd10283c7bf3daf43a07a97465885f9"}, +] + +[package.dependencies] +boto3 = ">=1.9.201" +botocore = ">=1.20.88,<1.35.45 || >1.35.45,<1.35.46 || >1.35.46" +cryptography = ">=35.0.0" +Jinja2 = ">=2.10.1" +python-dateutil = ">=2.1,<3.0.0" +requests = ">=2.5" +responses = ">=0.15.0,<0.25.5 || >0.25.5" +werkzeug = ">=0.5,<2.2.0 || >2.2.0,<2.2.1 || >2.2.1" +xmltodict = "*" + +[package.extras] +all = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsonpath_ng", "jsonschema", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.6.3)", "pyparsing (>=3.0.7)", "setuptools"] +apigateway = ["PyYAML (>=5.1)", "joserfc (>=0.9.0)", "openapi-spec-validator (>=0.5.0)"] +apigatewayv2 = ["PyYAML (>=5.1)", "openapi-spec-validator (>=0.5.0)"] +appsync = ["graphql-core"] +awslambda = ["docker (>=3.0.0)"] +batch = ["docker (>=3.0.0)"] +cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.6.3)", "pyparsing (>=3.0.7)", "setuptools"] +cognitoidp = ["joserfc (>=0.9.0)"] +dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.6.3)"] +dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.6.3)"] +events = ["jsonpath_ng"] +glue = ["pyparsing (>=3.0.7)"] +proxy = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "graphql-core", "joserfc (>=0.9.0)", "jsonpath_ng", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.6.3)", "pyparsing (>=3.0.7)", "setuptools"] +quicksight = ["jsonschema"] +resourcegroupstaggingapi = ["PyYAML (>=5.1)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.6.3)", "pyparsing (>=3.0.7)"] +s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.6.3)"] +s3crc32c = ["PyYAML (>=5.1)", "crc32c", "py-partiql-parser (==0.6.3)"] +server = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "joserfc (>=0.9.0)", "jsonpath_ng", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.6.3)", "pyparsing (>=3.0.7)", "setuptools"] +ssm = ["PyYAML (>=5.1)"] +stepfunctions = ["antlr4-python3-runtime", "jsonpath_ng"] +xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"] + [[package]] name = "mypy-extensions" version = "1.1.0" @@ -359,6 +935,21 @@ files = [ {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, ] +[[package]] +name = "parameterized" +version = "0.9.0" +description = "Parameterized testing with any Python test framework" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "parameterized-0.9.0-py2.py3-none-any.whl", hash = "sha256:4e0758e3d41bea3bbd05ec14fc2c24736723f243b28d702081aef438c9372b1b"}, + {file = "parameterized-0.9.0.tar.gz", hash = "sha256:7fc905272cefa4f364c1a3429cbbe9c0f98b793988efb5bf90aac80f08db09b1"}, +] + +[package.extras] +dev = ["jinja2"] + [[package]] name = "pathspec" version = "0.12.1" @@ -385,6 +976,7 @@ files = [ [package.dependencies] prettytable = ">=3.12.0" +tomli = {version = ">=2", markers = "python_version < \"3.11\""} [package.extras] dev = ["autopep8", "black", "docutils", "isort", "mypy", "pip-tools", "pypandoc", "pytest-cov", "pytest-pycodestyle", "pytest-runner", "tomli-w", "twine", "wheel"] @@ -471,6 +1063,19 @@ files = [ {file = "pycodestyle-2.14.0.tar.gz", hash = "sha256:c4b5b517d278089ff9d0abdec919cd97262a3367449ea1c8b49b91529167b783"}, ] +[[package]] +name = "pycparser" +version = "2.23" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "platform_python_implementation != \"PyPy\" and implementation_name != \"PyPy\"" +files = [ + {file = "pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934"}, + {file = "pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2"}, +] + [[package]] name = "pyflakes" version = "3.4.0" @@ -512,14 +1117,31 @@ files = [ [package.dependencies] colorama = {version = ">=0.4", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1", markers = "python_version < \"3.11\""} iniconfig = ">=1.0.1" packaging = ">=22" pluggy = ">=1.5,<2" pygments = ">=2.7.2" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "requests", "setuptools", "xmlschema"] +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main", "dev"] +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + [[package]] name = "pytokens" version = "0.3.0" @@ -618,6 +1240,279 @@ files = [ {file = "pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f"}, ] +[[package]] +name = "requests" +version = "2.32.5" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6"}, + {file = "requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset_normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "responses" +version = "0.25.8" +description = "A utility library for mocking out the `requests` Python library." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "responses-0.25.8-py3-none-any.whl", hash = "sha256:0c710af92def29c8352ceadff0c3fe340ace27cf5af1bbe46fb71275bcd2831c"}, + {file = "responses-0.25.8.tar.gz", hash = "sha256:9374d047a575c8f781b94454db5cab590b6029505f488d12899ddb10a4af1cf4"}, +] + +[package.dependencies] +pyyaml = "*" +requests = ">=2.30.0,<3.0" +urllib3 = ">=1.25.10,<3.0" + +[package.extras] +tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli ; python_version < \"3.11\"", "tomli-w", "types-PyYAML", "types-requests"] + +[[package]] +name = "s3transfer" +version = "0.6.2" +description = "An Amazon S3 Transfer Manager" +optional = false +python-versions = ">= 3.7" +groups = ["main", "dev"] +files = [ + {file = "s3transfer-0.6.2-py3-none-any.whl", hash = "sha256:b014be3a8a2aab98cfe1abc7229cc5a9a0cf05eb9c1f2b86b230fd8df3f78084"}, + {file = "s3transfer-0.6.2.tar.gz", hash = "sha256:cab66d3380cca3e70939ef2255d01cd8aece6a4907a9528740f668c4b0611861"}, +] + +[package.dependencies] +botocore = ">=1.12.36,<2.0a.0" + +[package.extras] +crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] + +[[package]] +name = "simplejson" +version = "3.20.2" +description = "Simple, fast, extensible JSON encoder/decoder for Python" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.5" +groups = ["main"] +files = [ + {file = "simplejson-3.20.2-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:11847093fd36e3f5a4f595ff0506286c54885f8ad2d921dfb64a85bce67f72c4"}, + {file = "simplejson-3.20.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:4d291911d23b1ab8eb3241204dd54e3ec60ddcd74dfcb576939d3df327205865"}, + {file = "simplejson-3.20.2-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:da6d16d7108d366bbbf1c1f3274662294859c03266e80dd899fc432598115ea4"}, + {file = "simplejson-3.20.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9ddf9a07694c5bbb4856271cbc4247cc6cf48f224a7d128a280482a2f78bae3d"}, + {file = "simplejson-3.20.2-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:3a0d2337e490e6ab42d65a082e69473717f5cc75c3c3fb530504d3681c4cb40c"}, + {file = "simplejson-3.20.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:8ba88696351ed26a8648f8378a1431223f02438f8036f006d23b4f5b572778fa"}, + {file = "simplejson-3.20.2-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:00bcd408a4430af99d1f8b2b103bb2f5133bb688596a511fcfa7db865fbb845e"}, + {file = "simplejson-3.20.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4fc62feb76f590ccaff6f903f52a01c58ba6423171aa117b96508afda9c210f0"}, + {file = "simplejson-3.20.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6d7286dc11af60a2f76eafb0c2acde2d997e87890e37e24590bb513bec9f1bc5"}, + {file = "simplejson-3.20.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c01379b4861c3b0aa40cba8d44f2b448f5743999aa68aaa5d3ef7049d4a28a2d"}, + {file = "simplejson-3.20.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a16b029ca25645b3bc44e84a4f941efa51bf93c180b31bd704ce6349d1fc77c1"}, + {file = "simplejson-3.20.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e22a5fb7b1437ffb057e02e1936a3bfb19084ae9d221ec5e9f4cf85f69946b6"}, + {file = "simplejson-3.20.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d8b6ff02fc7b8555c906c24735908854819b0d0dc85883d453e23ca4c0445d01"}, + {file = "simplejson-3.20.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2bfc1c396ad972ba4431130b42307b2321dba14d988580c1ac421ec6a6b7cee3"}, + {file = "simplejson-3.20.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a97249ee1aee005d891b5a211faf58092a309f3d9d440bc269043b08f662eda"}, + {file = "simplejson-3.20.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f1036be00b5edaddbddbb89c0f80ed229714a941cfd21e51386dc69c237201c2"}, + {file = "simplejson-3.20.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5d6f5bacb8cdee64946b45f2680afa3f54cd38e62471ceda89f777693aeca4e4"}, + {file = "simplejson-3.20.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8db6841fb796ec5af632f677abf21c6425a1ebea0d9ac3ef1a340b8dc69f52b8"}, + {file = "simplejson-3.20.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c0a341f7cc2aae82ee2b31f8a827fd2e51d09626f8b3accc441a6907c88aedb7"}, + {file = "simplejson-3.20.2-cp310-cp310-win32.whl", hash = "sha256:27f9c01a6bc581d32ab026f515226864576da05ef322d7fc141cd8a15a95ce53"}, + {file = "simplejson-3.20.2-cp310-cp310-win_amd64.whl", hash = "sha256:c0a63ec98a4547ff366871bf832a7367ee43d047bcec0b07b66c794e2137b476"}, + {file = "simplejson-3.20.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:06190b33cd7849efc413a5738d3da00b90e4a5382fd3d584c841ac20fb828c6f"}, + {file = "simplejson-3.20.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4ad4eac7d858947a30d2c404e61f16b84d16be79eb6fb316341885bdde864fa8"}, + {file = "simplejson-3.20.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b392e11c6165d4a0fde41754a0e13e1d88a5ad782b245a973dd4b2bdb4e5076a"}, + {file = "simplejson-3.20.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51eccc4e353eed3c50e0ea2326173acdc05e58f0c110405920b989d481287e51"}, + {file = "simplejson-3.20.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:306e83d7c331ad833d2d43c76a67f476c4b80c4a13334f6e34bb110e6105b3bd"}, + {file = "simplejson-3.20.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f820a6ac2ef0bc338ae4963f4f82ccebdb0824fe9caf6d660670c578abe01013"}, + {file = "simplejson-3.20.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21e7a066528a5451433eb3418184f05682ea0493d14e9aae690499b7e1eb6b81"}, + {file = "simplejson-3.20.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:438680ddde57ea87161a4824e8de04387b328ad51cfdf1eaf723623a3014b7aa"}, + {file = "simplejson-3.20.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:cac78470ae68b8d8c41b6fca97f5bf8e024ca80d5878c7724e024540f5cdaadb"}, + {file = "simplejson-3.20.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:7524e19c2da5ef281860a3d74668050c6986be15c9dd99966034ba47c68828c2"}, + {file = "simplejson-3.20.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0e9b6d845a603b2eef3394eb5e21edb8626cd9ae9a8361d14e267eb969dbe413"}, + {file = "simplejson-3.20.2-cp311-cp311-win32.whl", hash = "sha256:47d8927e5ac927fdd34c99cc617938abb3624b06ff86e8e219740a86507eb961"}, + {file = "simplejson-3.20.2-cp311-cp311-win_amd64.whl", hash = "sha256:ba4edf3be8e97e4713d06c3d302cba1ff5c49d16e9d24c209884ac1b8455520c"}, + {file = "simplejson-3.20.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4376d5acae0d1e91e78baeba4ee3cf22fbf6509d81539d01b94e0951d28ec2b6"}, + {file = "simplejson-3.20.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f8fe6de652fcddae6dec8f281cc1e77e4e8f3575249e1800090aab48f73b4259"}, + {file = "simplejson-3.20.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25ca2663d99328d51e5a138f22018e54c9162438d831e26cfc3458688616eca8"}, + {file = "simplejson-3.20.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12a6b2816b6cab6c3fd273d43b1948bc9acf708272074c8858f579c394f4cbc9"}, + {file = "simplejson-3.20.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac20dc3fcdfc7b8415bfc3d7d51beccd8695c3f4acb7f74e3a3b538e76672868"}, + {file = "simplejson-3.20.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db0804d04564e70862ef807f3e1ace2cc212ef0e22deb1b3d6f80c45e5882c6b"}, + {file = "simplejson-3.20.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:979ce23ea663895ae39106946ef3d78527822d918a136dbc77b9e2b7f006237e"}, + {file = "simplejson-3.20.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a2ba921b047bb029805726800819675249ef25d2f65fd0edb90639c5b1c3033c"}, + {file = "simplejson-3.20.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:12d3d4dc33770069b780cc8f5abef909fe4a3f071f18f55f6d896a370fd0f970"}, + {file = "simplejson-3.20.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:aff032a59a201b3683a34be1169e71ddda683d9c3b43b261599c12055349251e"}, + {file = "simplejson-3.20.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:30e590e133b06773f0dc9c3f82e567463df40598b660b5adf53eb1c488202544"}, + {file = "simplejson-3.20.2-cp312-cp312-win32.whl", hash = "sha256:8d7be7c99939cc58e7c5bcf6bb52a842a58e6c65e1e9cdd2a94b697b24cddb54"}, + {file = "simplejson-3.20.2-cp312-cp312-win_amd64.whl", hash = "sha256:2c0b4a67e75b945489052af6590e7dca0ed473ead5d0f3aad61fa584afe814ab"}, + {file = "simplejson-3.20.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:90d311ba8fcd733a3677e0be21804827226a57144130ba01c3c6a325e887dd86"}, + {file = "simplejson-3.20.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:feed6806f614bdf7f5cb6d0123cb0c1c5f40407ef103aa935cffaa694e2e0c74"}, + {file = "simplejson-3.20.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6b1d8d7c3e1a205c49e1aee6ba907dcb8ccea83651e6c3e2cb2062f1e52b0726"}, + {file = "simplejson-3.20.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:552f55745044a24c3cb7ec67e54234be56d5d6d0e054f2e4cf4fb3e297429be5"}, + {file = "simplejson-3.20.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2da97ac65165d66b0570c9e545786f0ac7b5de5854d3711a16cacbcaa8c472d"}, + {file = "simplejson-3.20.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f59a12966daa356bf68927fca5a67bebac0033cd18b96de9c2d426cd11756cd0"}, + {file = "simplejson-3.20.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:133ae2098a8e162c71da97cdab1f383afdd91373b7ff5fe65169b04167da976b"}, + {file = "simplejson-3.20.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7977640af7b7d5e6a852d26622057d428706a550f7f5083e7c4dd010a84d941f"}, + {file = "simplejson-3.20.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b530ad6d55e71fa9e93e1109cf8182f427a6355848a4ffa09f69cc44e1512522"}, + {file = "simplejson-3.20.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:bd96a7d981bf64f0e42345584768da4435c05b24fd3c364663f5fbc8fabf82e3"}, + {file = "simplejson-3.20.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f28ee755fadb426ba2e464d6fcf25d3f152a05eb6b38e0b4f790352f5540c769"}, + {file = "simplejson-3.20.2-cp313-cp313-win32.whl", hash = "sha256:472785b52e48e3eed9b78b95e26a256f59bb1ee38339be3075dad799e2e1e661"}, + {file = "simplejson-3.20.2-cp313-cp313-win_amd64.whl", hash = "sha256:a1a85013eb33e4820286139540accbe2c98d2da894b2dcefd280209db508e608"}, + {file = "simplejson-3.20.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a135941a50795c934bdc9acc74e172b126e3694fe26de3c0c1bc0b33ea17e6ce"}, + {file = "simplejson-3.20.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25ba488decb18738f5d6bd082018409689ed8e74bc6c4d33a0b81af6edf1c9f4"}, + {file = "simplejson-3.20.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d81f8e982923d5e9841622ff6568be89756428f98a82c16e4158ac32b92a3787"}, + {file = "simplejson-3.20.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cdad497ccb1edc5020bef209e9c3e062a923e8e6fca5b8a39f0fb34380c8a66c"}, + {file = "simplejson-3.20.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a3f1db97bcd9fb592928159af7a405b18df7e847cbcc5682a209c5b2ad5d6b1"}, + {file = "simplejson-3.20.2-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:215b65b0dc2c432ab79c430aa4f1e595f37b07a83c1e4c4928d7e22e6b49a748"}, + {file = "simplejson-3.20.2-cp36-cp36m-musllinux_1_2_i686.whl", hash = "sha256:ece4863171ba53f086a3bfd87f02ec3d6abc586f413babfc6cf4de4d84894620"}, + {file = "simplejson-3.20.2-cp36-cp36m-musllinux_1_2_ppc64le.whl", hash = "sha256:4a76d7c47d959afe6c41c88005f3041f583a4b9a1783cf341887a3628a77baa0"}, + {file = "simplejson-3.20.2-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:e9b0523582a57d9ea74f83ecefdffe18b2b0a907df1a9cef06955883341930d8"}, + {file = "simplejson-3.20.2-cp36-cp36m-win32.whl", hash = "sha256:16366591c8e08a4ac76b81d76a3fc97bf2bcc234c9c097b48d32ea6bfe2be2fe"}, + {file = "simplejson-3.20.2-cp36-cp36m-win_amd64.whl", hash = "sha256:732cf4c4ac1a258b4e9334e1e40a38303689f432497d3caeb491428b7547e782"}, + {file = "simplejson-3.20.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6c3a98e21e5f098e4f982ef302ebb1e681ff16a5d530cfce36296bea58fe2396"}, + {file = "simplejson-3.20.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10cf9ca1363dc3711c72f4ec7c1caed2bbd9aaa29a8d9122e31106022dc175c6"}, + {file = "simplejson-3.20.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:106762f8aedf3fc3364649bfe8dc9a40bf5104f872a4d2d86bae001b1af30d30"}, + {file = "simplejson-3.20.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b21659898b7496322e99674739193f81052e588afa8b31b6a1c7733d8829b925"}, + {file = "simplejson-3.20.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78fa1db6a02bca88829f2b2057c76a1d2dc2fccb8c5ff1199e352f213e9ec719"}, + {file = "simplejson-3.20.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:156139d94b660448ec8a4ea89f77ec476597f752c2ff66432d3656704c66b40e"}, + {file = "simplejson-3.20.2-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:b2620ac40be04dff08854baf6f4df10272f67079f61ed1b6274c0e840f2e2ae1"}, + {file = "simplejson-3.20.2-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:9ccef5b5d3e3ac5d9da0a0ca1d2de8cf2b0fb56b06aa0ab79325fa4bcc5a1d60"}, + {file = "simplejson-3.20.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:f526304c2cc9fd8b8d18afacb75bc171650f83a7097b2c92ad6a431b5d7c1b72"}, + {file = "simplejson-3.20.2-cp37-cp37m-win32.whl", hash = "sha256:e0f661105398121dd48d9987a2a8f7825b8297b3b2a7fe5b0d247370396119d5"}, + {file = "simplejson-3.20.2-cp37-cp37m-win_amd64.whl", hash = "sha256:dab98625b3d6821e77ea59c4d0e71059f8063825a0885b50ed410e5c8bd5cb66"}, + {file = "simplejson-3.20.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b8205f113082e7d8f667d6cd37d019a7ee5ef30b48463f9de48e1853726c6127"}, + {file = "simplejson-3.20.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fc8da64929ef0ff16448b602394a76fd9968a39afff0692e5ab53669df1f047f"}, + {file = "simplejson-3.20.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bfe704864b5fead4f21c8d448a89ee101c9b0fc92a5f40b674111da9272b3a90"}, + {file = "simplejson-3.20.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40ca7cbe7d2f423b97ed4e70989ef357f027a7e487606628c11b79667639dc84"}, + {file = "simplejson-3.20.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0cec1868b237fe9fb2d466d6ce0c7b772e005aadeeda582d867f6f1ec9710cad"}, + {file = "simplejson-3.20.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:792debfba68d8dd61085ffb332d72b9f5b38269cda0c99f92c7a054382f55246"}, + {file = "simplejson-3.20.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e022b2c4c54cb4855e555f64aa3377e3e5ca912c372fa9e3edcc90ebbad93dce"}, + {file = "simplejson-3.20.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:5de26f11d5aca575d3825dddc65f69fdcba18f6ca2b4db5cef16f41f969cef15"}, + {file = "simplejson-3.20.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:e2162b2a43614727ec3df75baeda8881ab129824aa1b49410d4b6c64f55a45b4"}, + {file = "simplejson-3.20.2-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:e11a1d6b2f7e72ca546bdb4e6374b237ebae9220e764051b867111df83acbd13"}, + {file = "simplejson-3.20.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:daf7cd18fe99eb427fa6ddb6b437cfde65125a96dc27b93a8969b6fe90a1dbea"}, + {file = "simplejson-3.20.2-cp38-cp38-win32.whl", hash = "sha256:da795ea5f440052f4f497b496010e2c4e05940d449ea7b5c417794ec1be55d01"}, + {file = "simplejson-3.20.2-cp38-cp38-win_amd64.whl", hash = "sha256:6a4b5e7864f952fcce4244a70166797d7b8fd6069b4286d3e8403c14b88656b6"}, + {file = "simplejson-3.20.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b3bf76512ccb07d47944ebdca44c65b781612d38b9098566b4bb40f713fc4047"}, + {file = "simplejson-3.20.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:214e26acf2dfb9ff3314e65c4e168a6b125bced0e2d99a65ea7b0f169db1e562"}, + {file = "simplejson-3.20.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2fb1259ca9c385b0395bad59cdbf79535a5a84fb1988f339a49bfbc57455a35a"}, + {file = "simplejson-3.20.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c34e028a2ba8553a208ded1da5fa8501833875078c4c00a50dffc33622057881"}, + {file = "simplejson-3.20.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b538f9d9e503b0dd43af60496780cb50755e4d8e5b34e5647b887675c1ae9fee"}, + {file = "simplejson-3.20.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab998e416ded6c58f549a22b6a8847e75a9e1ef98eb9fbb2863e1f9e61a4105b"}, + {file = "simplejson-3.20.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a8f1c307edf5fbf0c6db3396c5d3471409c4a40c7a2a466fbc762f20d46601a"}, + {file = "simplejson-3.20.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5a7bbac80bdb82a44303f5630baee140aee208e5a4618e8b9fde3fc400a42671"}, + {file = "simplejson-3.20.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:5ef70ec8fe1569872e5a3e4720c1e1dcb823879a3c78bc02589eb88fab920b1f"}, + {file = "simplejson-3.20.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:cb11c09c99253a74c36925d461c86ea25f0140f3b98ff678322734ddc0f038d7"}, + {file = "simplejson-3.20.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:66f7c78c6ef776f8bd9afaad455e88b8197a51e95617bcc44b50dd974a7825ba"}, + {file = "simplejson-3.20.2-cp39-cp39-win32.whl", hash = "sha256:619ada86bfe3a5aa02b8222ca6bfc5aa3e1075c1fb5b3263d24ba579382df472"}, + {file = "simplejson-3.20.2-cp39-cp39-win_amd64.whl", hash = "sha256:44a6235e09ca5cc41aa5870a952489c06aa4aee3361ae46daa947d8398e57502"}, + {file = "simplejson-3.20.2-py3-none-any.whl", hash = "sha256:3b6bb7fb96efd673eac2e4235200bfffdc2353ad12c54117e1e4e2fc485ac017"}, + {file = "simplejson-3.20.2.tar.gz", hash = "sha256:5fe7a6ce14d1c300d80d08695b7f7e633de6cd72c80644021874d985b3393649"}, +] + +[[package]] +name = "six" +version = "1.17.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main", "dev"] +files = [ + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, +] + +[[package]] +name = "tomli" +version = "2.3.0" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45"}, + {file = "tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba"}, + {file = "tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf"}, + {file = "tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441"}, + {file = "tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845"}, + {file = "tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c"}, + {file = "tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456"}, + {file = "tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be"}, + {file = "tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac"}, + {file = "tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22"}, + {file = "tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f"}, + {file = "tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52"}, + {file = "tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8"}, + {file = "tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6"}, + {file = "tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876"}, + {file = "tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878"}, + {file = "tomli-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b"}, + {file = "tomli-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae"}, + {file = "tomli-2.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b"}, + {file = "tomli-2.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf"}, + {file = "tomli-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f"}, + {file = "tomli-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05"}, + {file = "tomli-2.3.0-cp313-cp313-win32.whl", hash = "sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606"}, + {file = "tomli-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999"}, + {file = "tomli-2.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cebc6fe843e0733ee827a282aca4999b596241195f43b4cc371d64fc6639da9e"}, + {file = "tomli-2.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4c2ef0244c75aba9355561272009d934953817c49f47d768070c3c94355c2aa3"}, + {file = "tomli-2.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c22a8bf253bacc0cf11f35ad9808b6cb75ada2631c2d97c971122583b129afbc"}, + {file = "tomli-2.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0eea8cc5c5e9f89c9b90c4896a8deefc74f518db5927d0e0e8d4a80953d774d0"}, + {file = "tomli-2.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b74a0e59ec5d15127acdabd75ea17726ac4c5178ae51b85bfe39c4f8a278e879"}, + {file = "tomli-2.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5870b50c9db823c595983571d1296a6ff3e1b88f734a4c8f6fc6188397de005"}, + {file = "tomli-2.3.0-cp314-cp314-win32.whl", hash = "sha256:feb0dacc61170ed7ab602d3d972a58f14ee3ee60494292d384649a3dc38ef463"}, + {file = "tomli-2.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:b273fcbd7fc64dc3600c098e39136522650c49bca95df2d11cf3b626422392c8"}, + {file = "tomli-2.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:940d56ee0410fa17ee1f12b817b37a4d4e4dc4d27340863cc67236c74f582e77"}, + {file = "tomli-2.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f85209946d1fe94416debbb88d00eb92ce9cd5266775424ff81bc959e001acaf"}, + {file = "tomli-2.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a56212bdcce682e56b0aaf79e869ba5d15a6163f88d5451cbde388d48b13f530"}, + {file = "tomli-2.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f3ffd1e098dfc032d4d3af5c0ac64f6d286d98bc148698356847b80fa4de1b"}, + {file = "tomli-2.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e01decd096b1530d97d5d85cb4dff4af2d8347bd35686654a004f8dea20fc67"}, + {file = "tomli-2.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8a35dd0e643bb2610f156cca8db95d213a90015c11fee76c946aa62b7ae7e02f"}, + {file = "tomli-2.3.0-cp314-cp314t-win32.whl", hash = "sha256:a1f7f282fe248311650081faafa5f4732bdbfef5d45fe3f2e702fbc6f2d496e0"}, + {file = "tomli-2.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:70a251f8d4ba2d9ac2542eecf008b3c8a9fc5c3f9f02c56a9d7952612be2fdba"}, + {file = "tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b"}, + {file = "tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549"}, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +description = "Backported and Experimental Type Hints for Python 3.9+" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, + {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, +] + +[[package]] +name = "urllib3" +version = "1.26.20" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +groups = ["main", "dev"] +files = [ + {file = "urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e"}, + {file = "urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32"}, +] + +[package.extras] +brotli = ["brotli (==1.0.9) ; os_name != \"nt\" and python_version < \"3\" and platform_python_implementation == \"CPython\"", "brotli (>=1.0.9) ; python_version >= \"3\" and platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; (os_name != \"nt\" or python_version >= \"3\") and platform_python_implementation != \"CPython\"", "brotlipy (>=0.6.0) ; os_name == \"nt\" and python_version < \"3\""] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress ; python_version == \"2.7\"", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + [[package]] name = "virtualenv" version = "20.35.4" @@ -634,6 +1529,7 @@ files = [ distlib = ">=0.3.7,<1" filelock = ">=3.12.2,<4" platformdirs = ">=3.9.1,<5" +typing-extensions = {version = ">=4.13.2", markers = "python_version < \"3.11\""} [package.extras] docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] @@ -651,7 +1547,40 @@ files = [ {file = "wcwidth-0.2.14.tar.gz", hash = "sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605"}, ] +[[package]] +name = "werkzeug" +version = "3.1.4" +description = "The comprehensive WSGI web application library." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "werkzeug-3.1.4-py3-none-any.whl", hash = "sha256:2ad50fb9ed09cc3af22c54698351027ace879a0b60a3b5edf5730b2f7d876905"}, + {file = "werkzeug-3.1.4.tar.gz", hash = "sha256:cd3cd98b1b92dc3b7b3995038826c68097dcb16f9baa63abe35f20eafeb9fe5e"}, +] + +[package.dependencies] +markupsafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog (>=2.3)"] + +[[package]] +name = "xmltodict" +version = "1.0.2" +description = "Makes working with XML feel like you are working with JSON" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "xmltodict-1.0.2-py3-none-any.whl", hash = "sha256:62d0fddb0dcbc9f642745d8bbf4d81fd17d6dfaec5a15b5c1876300aad92af0d"}, + {file = "xmltodict-1.0.2.tar.gz", hash = "sha256:54306780b7c2175a3967cad1db92f218207e5bc1aba697d887807c0fb68b7649"}, +] + +[package.extras] +test = ["pytest", "pytest-cov"] + [metadata] lock-version = "2.1" -python-versions = "==3.12.12" -content-hash = "4f445f633176b1ad8280cef34170a2a39b47faf4493682cd86dab1671378571f" +python-versions = "==3.10.12" +content-hash = "eed83f4afb5a8ec608852863089568d79a5721872e413420d5ef147a759bc5d1" diff --git a/pyproject.toml b/pyproject.toml index 73993bc..9ef4923 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,21 +10,24 @@ authors = [ {name = "Jack Spagnoli", email = "jack.spagnoli1@nhs.net"}, ] readme = "README.md" -requires-python = "==3.12.12" -dependencies = [] +requires-python = "==3.10.12" +dependencies = ["boto3 (>=1.26.159,<2.0.0)", "botocore (>=1.29.159,<1.30.0)", "simplejson (>=3.17.2,<4.0.0)", "python-dateutil (>=2.7.0.post0,<3.0.0)", "six (>=1.5,<2.0.0)"] [tool.poetry] packages = [{include = "eps_spine_shared", from = "src"}] [tool.poetry.group.dev.dependencies] -pytest = "^9.0" +black = "^25.12" coverage = "^7.13.1" flake8 = "^7.0" flake8-bugbear = "^25.11" -black = "^25.12" +freezegun = "^1.5.5" isort = "^7.0" -pre-commit = "^4.5" +moto = "^5.1.18" +parameterized = "^0.9.0" pip-licenses = "^5.0" +pre-commit = "^4.5" +pytest = "^9.0" [build-system] requires = ["poetry-core>=2.0.0"] @@ -32,7 +35,7 @@ build-backend = "poetry.core.masonry.api" [tool.pytest.ini_options] testpaths = ["tests"] -python_files = ["test_*.py"] +python_files = ["test_*.py", "*_test.py"] python_classes = ["Test*"] python_functions = ["test_*"] @@ -44,8 +47,3 @@ target-version = ["py312"] profile = "black" line_length = 100 known_first_party = ["eps_spine_shared"] - -[tool.flake8] -max-line-length = 100 -extend-ignore = ["E203", "W503"] -max-complexity = 10 diff --git a/sonar-project.properties b/sonar-project.properties index 6a46b7d..beeb291 100644 --- a/sonar-project.properties +++ b/sonar-project.properties @@ -3,4 +3,4 @@ sonar.host.url=https://sonarcloud.io sonar.organization=nhsdigital sonar.projectKey=NHSDigital_eps-spine-shared sonar.python.coverage.reportPaths=.coverage/info.xml -sonar.python.version=3.12.12 +sonar.python.version=3.10.12 diff --git a/src/eps_spine_shared/common/dynamodb_client.py b/src/eps_spine_shared/common/dynamodb_client.py new file mode 100644 index 0000000..5c173fa --- /dev/null +++ b/src/eps_spine_shared/common/dynamodb_client.py @@ -0,0 +1,519 @@ +import copy +import json +import random +import sys +from datetime import datetime, timezone +from decimal import Decimal + +from boto3.dynamodb.conditions import Attr, ConditionExpressionBuilder +from boto3.dynamodb.types import TypeDeserializer, TypeSerializer +from boto3.session import Session +from botocore.config import Config +from botocore.credentials import DeferredRefreshableCredentials +from botocore.exceptions import ClientError, NoCredentialsError + +from eps_spine_shared.common.dynamodb_common import ( + CONDITION_EXPRESSION, + REGION_NAME, + SERVICE_NAME, + Attribute, + Key, + ProjectedAttribute, + SortKey, +) +from eps_spine_shared.errors import EpsNoCredentialsErrorWithRetry +from eps_spine_shared.logger import EpsLogger + + +class EpsDynamoDbClient: + """ + The DynamoDB client specific to the prescriptions message store. + """ + + def __init__( + self, + log_object, + aws_endpoint_url, + table_name, + role_arn=None, + role_session_name=None, + sts_endpoint_url=None, + ): + """ + Instantiate the DynamoDB client. + """ + self.log_object = EpsLogger(log_object) + self.table_name = table_name + self.aws_endpoint_url = aws_endpoint_url + self.role_arn = role_arn + self.role_session_name = role_session_name + self.sts_endpoint_url = sts_endpoint_url + + try: + session = Session() + + if role_arn and role_session_name and sts_endpoint_url: + credentials = DeferredRefreshableCredentials( + refresh_using=self._refreshed_credentials_with_retry, method="sts-assume-role" + ) + session._session._credentials = credentials # noqa: SLF001 + else: + self.log_object.write_log( + "DDB0006", + None, + { + "role": role_arn, + "sessionName": role_session_name, + "endpoint": sts_endpoint_url, + }, + ) + + resource_args = {"service_name": SERVICE_NAME, "region_name": REGION_NAME} + if aws_endpoint_url: + log_object.write_log("DDB0003", None, {"awsEndpointUrl": aws_endpoint_url}) + resource_args["endpoint_url"] = aws_endpoint_url + else: + log_object.write_log("DDB0004", None) + + self.resource = session.resource(**resource_args) + self.table = self.resource.Table(table_name) + + self.client = session.client(**resource_args) + self.deserialiser = TypeDeserializer() + self.serialiser = TypeSerializer() + except Exception as ex: + log_object.write_log("DDB0000", sys.exc_info(), {"error": str(ex)}) + raise ex + + self.log_object.write_log("DDB0001", None, {"tableName": table_name}) + + def _refreshed_credentials_with_retry(self, attempts=2) -> dict: + """ + Retry _refreshed_credentials for a maximum number of attempts until credentials are returned or raise + EpsNoCredentialsErrorWithRetry including the number of attempts. + """ + for _ in range(attempts): + try: + return self._refreshed_credentials() + except NoCredentialsError as e: + latest_exception = e + + raise EpsNoCredentialsErrorWithRetry(attempts=attempts) from latest_exception + + def _refreshed_credentials(self) -> dict: + """ + Refreshes the IAM credentials provided to us by STS for the duration of our session. + This callback is invoked automatically by boto when we are past the lifetime of our + session (uses boto3 default of refreshing 15 mins before expiry). + DurationSeconds - duration of the role session + RoleSessionName - becomes the User Name for subsequent api calls made with the credentials returned + Returns: + dict -> A dictionary containing our new set of credentials from STS as well as the + expiration timestamp for the session. + Adapted from nhs-aws-helpers + """ + sts_session = Session() + + config = Config( + connect_timeout=1, + read_timeout=3, + max_pool_connections=10, + retries={"mode": "standard", "total_max_attempts": 4}, + ) + + sts_client = sts_session.client( + "sts", region_name=REGION_NAME, endpoint_url=self.sts_endpoint_url, config=config + ) + + params = { + "RoleArn": self.role_arn, + "RoleSessionName": self.role_session_name, + "DurationSeconds": 3600, + } + + # Any exceptions raised here must be caught and logged by the code using the client + response = sts_client.assume_role(**params).get("Credentials") + + self.log_object.write_log( + "DDB0005", + None, + { + "role": self.role_arn, + "sessionName": self.role_session_name, + "endpoint": self.sts_endpoint_url, + }, + ) + + return { + "access_key": response.get("AccessKeyId"), + "secret_key": response.get("SecretAccessKey"), + "token": response.get("SessionToken"), + "expiry_time": response.get("Expiration").isoformat(), + } + + def _log_item_size(self, internal_id, serialised_item): + """ + Writes a log message including the item type and post-serialisation size in bytes. + Bespoke sizing functions for items with compressed contents, as bytes won't serialise. + """ + + def default_size(item): + return sys.getsizeof(json.dumps(item)) + + def work_list_size(item): + try: + return sys.getsizeof(item["body"]["M"]["responseDetails"]["M"]["XML"]["B"]) + except KeyError: + return default_size(item) + + def claim_size(claim): + batch_xml = claim["body"]["M"]["Batch XML"] + if "S" in batch_xml: + return default_size(claim) + batch_xml_size = sys.getsizeof(claim["body"]["M"]["Batch XML"]["B"]) + claim_deep_copy = copy.deepcopy(claim) + del claim_deep_copy["body"]["M"]["Batch XML"]["B"] + claim_without_batch_xml_size = sys.getsizeof(json.dumps(claim_deep_copy)) + return batch_xml_size + claim_without_batch_xml_size + + def record_size(record): + body_size = sys.getsizeof(record["body"]["B"]) + record_deep_copy = copy.deepcopy(record) + del record_deep_copy["body"]["B"] + record_without_body_size = sys.getsizeof(json.dumps(record_deep_copy)) + return body_size + record_without_body_size + + def document_size(document): + if document["pk"]["S"].startswith("Notification_"): + return ppd_notification_size(document) + + if document["body"]["M"].get("content"): + content_size = sys.getsizeof(document["body"]["M"]["content"]["B"]) + document_deep_copy = copy.deepcopy(document) + del document_deep_copy["body"]["M"]["content"]["B"] + document_without_body_size = sys.getsizeof(json.dumps(document_deep_copy)) + return content_size + document_without_body_size + else: + return default_size(document) + + def ppd_notification_size(document): + document_deep_copy = copy.deepcopy(document) + + payload = None + if document["body"]["M"]["payload"].get("B"): + payload = document["body"]["M"]["payload"]["B"] + del document_deep_copy["body"]["M"]["payload"]["B"] + elif document["body"]["M"]["payload"].get("S"): + payload = document["body"]["M"]["payload"]["S"] + del document_deep_copy["body"]["M"]["payload"]["S"] + + payload_size = sys.getsizeof(payload) + document_without_payload_size = sys.getsizeof(json.dumps(document_deep_copy)) + return payload_size + document_without_payload_size + + size_funcs = { + SortKey.CLAIM.value: claim_size, + SortKey.WORK_LIST.value: work_list_size, + SortKey.RECORD.value: record_size, + SortKey.DOCUMENT.value: document_size, + } + + item_key = serialised_item["pk"]["S"] + item_type = serialised_item["sk"]["S"] + + try: + size = size_funcs.get(item_type, default_size)(serialised_item) + self.log_object.write_log( + "DDB0011", + None, + { + "itemType": item_type, + "table": self.table_name, + "key": item_key, + "size": size, + "internalID": internal_id, + }, + ) + except Exception: # noqa: BLE001 + self.log_object.write_log( + "DDB0012", + sys.exc_info(), + { + "table": self.table_name, + "itemType": item_type, + "key": item_key, + "internalID": internal_id, + }, + ) + + def serialise_for_dynamodb(self, item): + """ + Convert item into DynamoDB format. + """ + return {k: self.serialiser.serialize(v) for k, v in item.items()} + + def deserialise_from_dynamodb(self, item): + """ + Convert item from DynamoDB format. + """ + return {k: self.deserialiser.deserialize(v) for k, v in item.items()} + + def add_condition_expression(self, put_kwargs, is_update, item): + """ + Adds a condition expression to the put kwargs based on whether the item is being updated. + """ + if not is_update: + put_kwargs["ConditionExpression"] = CONDITION_EXPRESSION + elif item[Key.SK.name] == SortKey.RECORD.value: + put_kwargs["ExpressionAttributeNames"] = {"#currentScn": ProjectedAttribute.SCN.name} + put_kwargs["ExpressionAttributeValues"] = self.serialise_for_dynamodb( + {":newScn": item.get(ProjectedAttribute.SCN.name)} + ) + put_kwargs["ConditionExpression"] = "#currentScn < :newScn" + elif item[Key.SK.name] == SortKey.SEQUENCE_NUMBER.value: + sequence_number = item.get(Attribute.SEQUENCE_NUMBER.name) + if sequence_number == 1: + return + put_kwargs["ExpressionAttributeNames"] = {"#currentSqn": Attribute.SEQUENCE_NUMBER.name} + put_kwargs["ExpressionAttributeValues"] = self.serialise_for_dynamodb( + {":newSqn": sequence_number} + ) + put_kwargs["ConditionExpression"] = "#currentSqn < :newSqn" + + def put_item(self, internal_id, item, is_update=False, log_item_size=True): + """ + Insert an item into the configured DynamoDB table as a single put, after serialising and logging its size. + """ + serialised_item = self.serialise_for_dynamodb(item) + if log_item_size: + self._log_item_size(internal_id, serialised_item) + + put_kwargs = {"TableName": self.table_name, "Item": serialised_item} + self.add_condition_expression(put_kwargs, is_update, item) + return self.client.put_item(**put_kwargs) + + def transact_write_items(self, internal_id, items, is_update=False, log_item_size=True): + """ + Insert items into the configured DynamoDB table as a single transaction, after serialising and logging its size. + """ + transact_items = [] + for item in items: + serialised_item = self.serialise_for_dynamodb(item) + if log_item_size: + self._log_item_size(internal_id, serialised_item) + transact_item = { + "Put": {"TableName": self.table_name, "Item": self.serialise_for_dynamodb(item)} + } + self.add_condition_expression(transact_item["Put"], is_update, item) + transact_items.append(transact_item) + + return self.client.transact_write_items(TransactItems=transact_items) + + def add_last_modified_to_item(self, item): + """ + Add last modified timestamp and day to items. + """ + dt_now = datetime.now(timezone.utc) + last_modified_timestamp = Decimal(str(dt_now.timestamp())) + last_modified_day = dt_now.strftime("%Y%m%d") + partition_suffix = str(random.randint(0, 11)) + item.update( + { + "_lm_day": f"{last_modified_day}.{partition_suffix}", + "_riak_lm": last_modified_timestamp, + } + ) + + def insert_items(self, internal_id, items, is_update=False, log_item_size=True): + """ + Perform a put_item or a transact_write_items depending on the number of items. + """ + for item in items: + self.add_last_modified_to_item(item) + try: + if len(items) == 1: + return self.put_item(internal_id, items[0], is_update, log_item_size) + else: + return self.transact_write_items(internal_id, items, is_update, log_item_size) + except ClientError as e: + if e.response["Error"]["Code"] == "ConditionalCheckFailedException": + pk = items[0].get(Key.PK.name) + log_dict = { + "internalID": internal_id, + "incomingScn": items[0].get("scn", "None"), + "pk": pk, + "sk": items[0].get(Key.SK.name), + "table": self.table_name, + } + if is_update: + self.log_object.write_log("DDB0022", None, log_dict) + raise EpsDataStoreError( + self, pk, EpsDataStoreError.CONDITIONAL_UPDATE_FAILURE + ) from e + else: + self.log_object.write_log("DDB0021", None, log_dict) + raise EpsDataStoreError(self, pk, EpsDataStoreError.DUPLICATE_ERROR) from e + else: + raise e + + def get_item(self, internal_id, pk, sk, expect_exists=True, expect_none=False): + """ + Return an item from the DynamoDB table. + + expect_exists=False will not raise an error if the item does not exist. + expect_none=True will not raise an error if the item exists but has no data. + """ + if not pk: + self.log_object.write_log( + "DDB0041", None, {"key": pk, "table": self.table_name, "internalID": internal_id} + ) + raise EpsDataStoreError(self, pk, EpsDataStoreError.ACCESS_ERROR) + + item = self.table.get_item(Key={Key.PK.name: pk, Key.SK.name: sk}).get("Item") + + self._item_checks(item, pk, expect_exists, expect_none) + + return item + + def _item_checks(self, item, key, expect_exists, expect_none): + """ + Run standard checks on a returned item + - Does it exist + - Does it have data + """ + if not expect_exists: + return + + if item is None: + raise EpsDataStoreError(self, key, EpsDataStoreError.MISSING_RECORD) + + if expect_none: + return + + if item.get(ProjectedAttribute.BODY.name) is None: + raise EpsDataStoreError(self, key, EpsDataStoreError.EMPTY_RECORD) + + def query_index(self, index_name, key_condition_expression, filter_expression): + """ + Return the items that match the supplied expressions, for the given index. + """ + query_args = {"KeyConditionExpression": key_condition_expression} + + if index_name: + query_args["IndexName"] = index_name + + if filter_expression: + query_args["FilterExpression"] = filter_expression + + items = [] + while True: + response = self.table.query(**query_args) + items.extend(response["Items"]) + if "LastEvaluatedKey" not in response: + return items + query_args["ExclusiveStartKey"] = response["LastEvaluatedKey"] + + def query_index_with_limit( + self, index_name, key_condition_expression, filter_expression, limit + ): + """ + Return the items that match the supplied expressions, for the given index. + Will return item count up to the given limit. + """ + condition_builder = ConditionExpressionBuilder() + key_condition_expression, condition_attributes, condition_values = ( + condition_builder.build_expression(key_condition_expression, True) + ) + query_args = { + "TableName": self.table_name, + "IndexName": index_name, + "KeyConditionExpression": key_condition_expression, + "ExpressionAttributeNames": condition_attributes, + "ExpressionAttributeValues": self.serialise_for_dynamodb(condition_values), + } + if filter_expression: + query_args["FilterExpression"] = filter_expression + if limit: + query_args["Limit"] = limit + + response_iterator = self.client.get_paginator("query").paginate(**query_args) + + items = [] + for response in response_iterator: + items.extend([self.deserialise_from_dynamodb(item) for item in response["Items"]]) + if limit and len(items) >= limit: + items = items[:limit] + break + return items + + def query_index_yield(self, index_name, key_condition_expression, filter_expression=None): + """ + Return the items that match the supplied expressions, for the given index. + Uses yield to allow retrieval of a large number of items. + """ + query_args = {"IndexName": index_name, "KeyConditionExpression": key_condition_expression} + if filter_expression: + query_args["FilterExpression"] = filter_expression + + found = True + while found: + response = self.table.query(**query_args) + yield [item[Key.PK.name] for item in response["Items"]] + if "LastEvaluatedKey" not in response: + found = False + else: + query_args["ExclusiveStartKey"] = response["LastEvaluatedKey"] + + def build_filter_expression(self, filter_dict): + """ + Build a filter expression for use in the index query. + """ + filters = [] + for key, value in filter_dict.items(): + filters.append(Attr(key).eq(value)) + + filter_expression = None + for _filter in filters: + filter_expression = ( + _filter if filter_expression is None else filter_expression & _filter + ) + + return filter_expression + + def delete_item(self, pk, sk): + """ + Delete an item from the table. + """ + key = self.serialise_for_dynamodb({Key.PK.name: pk, Key.SK.name: sk}) + self.client.delete_item(TableName=self.table_name, Key=key) + + +class EpsDataStoreError(Exception): + """ + Exception to be raised when encountering issues with the DynamoDB datastore. + """ + + ACCESS_ERROR = "accessError" + CONDITIONAL_UPDATE_FAILURE = "conditionalUpdateFailure" + DUPLICATE_ERROR = "duplicateError" + EMPTY_RECORD = "recordRemoved" + MISSING_RECORD = "missingRecord" + + def __init__(self, client: EpsDynamoDbClient, key: str, error_topic: str): # noqa: B042 + """ + The error_topic must match a topic defined as an attribute of this class (above). + The client should have a log_object, a table_name and an aws_endpoint_url. + """ + super(EpsDataStoreError, self).__init__() + self.error_topic = error_topic + + log_values = { + "awsEndpointUrl": client.aws_endpoint_url, + "errorTopic": self.error_topic, + "key": key, + "tableName": client.table_name, + } + + log_ref = "UTI0213a" if self.error_topic == self.MISSING_RECORD else "UTI0213" + + client.log_object.write_log(log_ref, None, log_values) diff --git a/src/eps_spine_shared/common/dynamodb_common.py b/src/eps_spine_shared/common/dynamodb_common.py new file mode 100644 index 0000000..19442c5 --- /dev/null +++ b/src/eps_spine_shared/common/dynamodb_common.py @@ -0,0 +1,226 @@ +import random +from decimal import Decimal +from enum import Enum +from typing import Optional + + +class ReleaseVersion(Enum): + """ + Enum of release versions to be used in the DynamoDB table. + """ + + R1 = "R1" + R2 = "R2" + UNKNOWN = "UNKNOWN" + + +class DefinedAttributeType(Enum): + """ + S/N type for a defined attribute + """ + + STRING = "S" + NUMBER = "N" + + +class DefinedAttribute: + """ + Definition of an attribute in the DynamoDB table. + """ + + def __init__(self, name: str, arg_type: DefinedAttributeType) -> None: + self.name = name + self.type = arg_type + + +class Key(Enum): + """ + Enum of table Keys + """ + + PK = DefinedAttribute("pk", DefinedAttributeType.STRING) + SK = DefinedAttribute("sk", DefinedAttributeType.STRING) + + @property + def name(self) -> str: + return self.value.name + + @property + def attribute_type(self) -> DefinedAttributeType: + return self.value.type + + +class Attribute(Enum): + """ + Enum of Defined Attributes to be used in the DynamoDB table. + """ + + NHS_NUMBER = DefinedAttribute("nhsNumber", DefinedAttributeType.STRING) + CREATION_DATETIME = DefinedAttribute("creationDatetime", DefinedAttributeType.STRING) + PRESCRIBER_ORG = DefinedAttribute("prescriberOrg", DefinedAttributeType.STRING) + DISPENSER_ORG = DefinedAttribute("dispenserOrg", DefinedAttributeType.STRING) + NOMINATED_PHARMACY = DefinedAttribute("nominatedPharmacy", DefinedAttributeType.STRING) + IS_READY = DefinedAttribute("isReady", DefinedAttributeType.NUMBER) + NEXT_ACTIVITY = DefinedAttribute("nextActivity", DefinedAttributeType.STRING) + NEXT_ACTIVITY_DATE = DefinedAttribute("nextActivityDate", DefinedAttributeType.STRING) + DOC_REF_TITLE = DefinedAttribute("docRefTitle", DefinedAttributeType.STRING) + CLAIM_NOTIFICATION_STORE_DATE = DefinedAttribute( + "claimNotificationStoreDate", DefinedAttributeType.STRING + ) + STORE_TIME = DefinedAttribute("storeTime", DefinedAttributeType.STRING) + BACKSTOP_DELETE_DATE = DefinedAttribute("backstopDeleteDate", DefinedAttributeType.STRING) + SEQUENCE_NUMBER = DefinedAttribute("sequenceNumber", DefinedAttributeType.NUMBER) + SEQUENCE_NUMBER_NWSSP = DefinedAttribute("sequenceNumberNwssp", DefinedAttributeType.NUMBER) + LM_DAY = DefinedAttribute("_lm_day", DefinedAttributeType.STRING) + RIAK_LM = DefinedAttribute("_riak_lm", DefinedAttributeType.NUMBER) + BATCH_CLAIM_ID = DefinedAttribute("batchClaimId", DefinedAttributeType.STRING) + + @property + def name(self) -> str: + return self.value.name + + @property + def attribute_type(self) -> DefinedAttributeType: + return self.value.type + + +class ProjectedAttribute(Enum): + """ + Enum of Projected Attributes to be used in the DynamoDB table. + """ + + CLAIM_IDS = "claimIds" + SCN = "scn" + STATUS = "status" + BODY = "body" + INDEXES = "indexes" + EXPIRE_AT = "expireAt" + + @property + def name(self) -> str: + return self.value + + +class SortKey(Enum): + """ + Enum of SortKeys to be used in the DynamoDB table. + """ + + DOCUMENT = "DOC" + RECORD = "REC" + WORK_LIST = "WRK" + CLAIM = "CLM" + SEQUENCE_NUMBER = "SQN" + + +class Index: + """ + Information on a GSI + """ + + def __init__(self, name: str, pk: Attribute, sk: Optional[Attribute]) -> None: + self.name = name + self.pk = pk + self.sk = sk if sk else None + + +class GSI(Enum): + """ + Enum of global secondary indexes of the DynamoDB table. + """ + + NHS_NUMBER_DATE = Index("nhsNumberDate", Attribute.NHS_NUMBER, Attribute.CREATION_DATETIME) + PRESCRIBER_DATE = Index("prescriberDate", Attribute.PRESCRIBER_ORG, Attribute.CREATION_DATETIME) + DISPENSER_DATE = Index("dispenserDate", Attribute.DISPENSER_ORG, Attribute.CREATION_DATETIME) + NOMINATED_PHARMACY_STATUS = Index( + "nominatedPharmacyStatus", Attribute.NOMINATED_PHARMACY, Attribute.IS_READY + ) + CLAIM_ID = Index("claimId", Key.SK, Attribute.BATCH_CLAIM_ID) + NEXT_ACTIVITY_DATE = Index( + "nextActivityDate", Attribute.NEXT_ACTIVITY, Attribute.NEXT_ACTIVITY_DATE + ) + STORE_TIME_DOC_REF_TITLE = Index( + "storeTimeDocRefTitle", Attribute.DOC_REF_TITLE, Attribute.STORE_TIME + ) + CLAIM_NOTIFICATION_STORE_TIME = Index( + "claimNotificationStoreTime", Attribute.CLAIM_NOTIFICATION_STORE_DATE, Attribute.STORE_TIME + ) + BACKSTOP_DELETE_DATE = Index("backstopDeleteDate", Key.SK, Attribute.BACKSTOP_DELETE_DATE) + CLAIM_ID_SEQUENCE_NUMBER = Index("claimIdSequenceNumber", Attribute.SEQUENCE_NUMBER, None) + CLAIM_ID_SEQUENCE_NUMBER_NWSSP = Index( + "claimIdSequenceNumberNwssp", Attribute.SEQUENCE_NUMBER_NWSSP, None + ) + LAST_MODIFIED = Index("lastModified", Attribute.LM_DAY, Attribute.RIAK_LM) + + @property + def name(self) -> str: + return self.value.name + + @property + def pk(self) -> Attribute: + return self.value.pk + + @property + def sk(self) -> Optional[Attribute]: + return self.value.sk + + +REGION_NAME = "eu-west-2" +SERVICE_NAME = "dynamodb" +CONDITION_EXPRESSION = ( + f"attribute_not_exists({Key.PK.name}) AND attribute_not_exists({Key.SK.name})" +) +LAST_MODIFIED_DAILY_PARTITIONS = 12 +NEXT_ACTIVITY_DATE_PARTITIONS = 12 +RELEASE_VERSION_PARTITIONS = 12 + + +def replace_decimals(obj): + """ + Utility function to replace any instances of Decimal type with int/float. + """ + + def handle_decimal(obj): + return int(obj) if obj % 1 == 0 else float(obj) + + def handle_dict(obj): + for k in obj: + obj[k] = replace_decimals(obj[k]) + return obj + + def handle_list(obj): + for i in range(len(obj)): + obj[i] = replace_decimals(obj[i]) + return obj + + handlers = {Decimal: handle_decimal, dict: handle_dict, list: handle_list} + + return handlers.get(type(obj), lambda obj: obj)(obj) + + +def prescription_id_without_check_digit(prescription_id) -> str: + """ + If length is > 36 then long prescription id with checksum so truncate to 36 characters. + If length is > 19 and < 36 then short prescription id with checksum so truncate to 19 characters. + """ + if len(prescription_id) > 36: + return prescription_id[:36] + elif len(prescription_id) > 19 and len(prescription_id) < 36: + return prescription_id[:19] + else: + return prescription_id + + +def determine_release_version(prescription_id) -> str: + """ + Determines the release version of a prescription based on the length of its id. Includes shard for indexing. + """ + id_length = len(prescription_id_without_check_digit(prescription_id)) + shard = random.randint(1, RELEASE_VERSION_PARTITIONS) + match id_length: + case 36: + return f"{ReleaseVersion.R1.value}.{shard}" + case 19: + return f"{ReleaseVersion.R2.value}.{shard}" + case _: + return ReleaseVersion.UNKNOWN.value diff --git a/src/eps_spine_shared/common/dynamodb_datastore.py b/src/eps_spine_shared/common/dynamodb_datastore.py new file mode 100644 index 0000000..f9b8574 --- /dev/null +++ b/src/eps_spine_shared/common/dynamodb_datastore.py @@ -0,0 +1,766 @@ +import base64 +import copy +import functools +import sys +import time +import zlib +from datetime import datetime, timedelta, timezone +from random import randint + +import simplejson +from boto3.dynamodb.types import Binary +from dateutil.relativedelta import relativedelta + +from eps_spine_shared.common import indexes +from eps_spine_shared.common.dynamodb_client import EpsDataStoreError, EpsDynamoDbClient +from eps_spine_shared.common.dynamodb_common import ( + NEXT_ACTIVITY_DATE_PARTITIONS, + Attribute, + Key, + ProjectedAttribute, + SortKey, + determine_release_version, + prescription_id_without_check_digit, + replace_decimals, +) +from eps_spine_shared.common.dynamodb_index import EpsDynamoDbIndex, PrescriptionStatus +from eps_spine_shared.logger import EpsLogger +from eps_spine_shared.nhsfundamentals.timeutilities import ( + TimeFormats, + convertSpineDate, + timeNowAsString, +) + + +def timer(func): + """ + Decorator to be used to time methods. + """ + + @functools.wraps(func) + def wrapper_timer(*args, **kwargs): + self = args[0] + internal_id = args[1] + start_time = time.perf_counter() + value = func(*args, **kwargs) + end_time = time.perf_counter() + run_time_ms = (end_time - start_time) * 1000 + run_time_ms = float(f"{run_time_ms:.2f}") + self.log_object.write_log( + "DDB0002", + None, + { + "cls": type(self).__name__, + "func": func.__name__, + "duration": run_time_ms, + "internalID": internal_id, + }, + ) + return value + + return wrapper_timer + + +class EpsDynamoDbDataStore: + """ + The prescriptions message store specific DynamoDB client. + """ + + SEPARATOR = "#" + CLAIM_SEQUENCE_NUMBER_KEY = "claimSequenceNumber" + NWSSP_CLAIM_SEQUENCE_NUMBER_KEY = "claimSequenceNumberNwssp" + INDEX_CLAIMID = "claimid_bin" + INDEX_CLAIMHANDLETIME = "claimhandletime_bin" + INDEX_CLAIM_SEQNUMBER = "seqnum_bin" + INDEX_CLAIM_SEQNUMBER_NWSSP = "nwsspseqnum_bin" + INDEX_SCN = "delta_bin" + INDEX_WORKLISTDATE = "workListDate_bin" + NOTIFICATION_PREFIX = "Notification_" + STORE_TIME_DOC_REF_TITLE_PREFIX = "NominatedReleaseRequestMsgRef" + DEFAULT_EXPIRY_DAYS = 56 + + def __init__( + self, + log_object, + aws_endpoint_url, + table_name, + role_arn=None, + role_session_name=None, + sts_endpoint_url=None, + ): + """ + Instantiate the DynamoDB client. + """ + self.log_object = EpsLogger(log_object) + self.client = EpsDynamoDbClient( + log_object, + aws_endpoint_url, + table_name, + role_arn, + role_session_name, + sts_endpoint_url, + ) + self.indexes = EpsDynamoDbIndex(log_object, self.client) + + def base64_decode_document_content(self, internal_id, document): + """ + base64 decode document content in order to store as binary type in DynamoDB. + """ + if content := document.get("content"): + try: + decoded = base64.b64decode(document["content"].encode("utf-8")) + if base64.b64encode(decoded).decode("utf-8") == content: + document["content"] = decoded + else: + raise ValueError("Document content not b64 encoded") + except Exception as e: # noqa: BLE001 + self.log_object.write_log( + "DDB0031", sys.exc_info(), {"error": str(e), "internalID": internal_id} + ) + raise e + + def get_expire_at(self, delta, from_datetime=None): + """ + Returns an int timestamp to be used as an expireAt attribute. + This will determine when the item is deleted from the table. + """ + if not from_datetime: + from_datetime = datetime.now(timezone.utc) + + if not from_datetime.tzinfo: + from_datetime = datetime.combine( + from_datetime.date(), from_datetime.time(), timezone.utc + ) + + return int((from_datetime + delta).timestamp()) + + def build_document(self, internal_id, document, index): + """ + Build EPS Document object to be inserted into DynamoDB. + """ + document_copy = copy.deepcopy(document) + self.base64_decode_document_content(internal_id, document_copy) + + default_expire_at = self.get_expire_at(relativedelta(months=18)) + + item = { + Key.SK.name: SortKey.DOCUMENT.value, + ProjectedAttribute.INDEXES.name: self.convert_index_keys_to_lower_case(index), + ProjectedAttribute.BODY.name: document_copy, + ProjectedAttribute.EXPIRE_AT.name: default_expire_at, + } + + if index: + doc_ref_title, store_time = index[indexes.INDEX_STORE_TIME_DOC_REF_TITLE][0].split("_") + item[Attribute.DOC_REF_TITLE.name] = doc_ref_title + + if doc_ref_title == "ClaimNotification": + item[Attribute.CLAIM_NOTIFICATION_STORE_DATE.name] = store_time[:8] + + item[Attribute.STORE_TIME.name] = store_time + + delete_date = index[indexes.INDEX_DELETE_DATE][0] + delete_date_time = datetime.strptime(delete_date, TimeFormats.STANDARD_DATE_FORMAT) + item[ProjectedAttribute.EXPIRE_AT.name] = int(delete_date_time.timestamp()) + + return item + + @timer + def insert_eps_document_object(self, internal_id, document_key, document, index=None): + """ + Insert EPS Document object into the configured table. + """ + item = self.build_document(internal_id, document, index) + item[Key.PK.name] = document_key + return self.client.insert_items(internal_id, [item], True) + + def convert_index_keys_to_lower_case(self, index): + """ + Convert all keys in an index dict to lower case. + """ + if not isinstance(index, dict): + return index + return {key.lower(): index[key] for key in index} + + def build_record(self, prescription_id, record, record_type, indexes): + """ + Build EPS Record object to be inserted into DynamoDB. + """ + record_key = prescription_id_without_check_digit(prescription_id) + + if not indexes: + indexes = record["indexes"] + instances = record["instances"].values() + + next_activity_nad = indexes["nextActivityNAD_bin"][0] + next_activity_nad_split = next_activity_nad.split("_") + next_activity = next_activity_nad_split[0] + next_activity_is_purge = next_activity.lower() == "purge" + + next_activity_shard = randint(1, NEXT_ACTIVITY_DATE_PARTITIONS) + sharded_next_activity = f"{next_activity}.{next_activity_shard}" + + scn = record["SCN"] + + compressed_record = zlib.compress(simplejson.dumps(record).encode("utf-8")) + + item = { + Key.PK.name: record_key, + Key.SK.name: SortKey.RECORD.value, + ProjectedAttribute.BODY.name: compressed_record, + Attribute.NEXT_ACTIVITY.name: sharded_next_activity, + ProjectedAttribute.SCN.name: scn, + ProjectedAttribute.INDEXES.name: self.convert_index_keys_to_lower_case(indexes), + } + if len(next_activity_nad_split) == 2: + item[Attribute.NEXT_ACTIVITY_DATE.name] = next_activity_nad_split[1] + + if next_activity_is_purge: + return item + + # POC - Leverage methods in PrescriptionRecord to get some/all of these. + creation_datetime_string = record["prescription"]["prescriptionTime"] + nhs_number = record["patient"]["nhsNumber"] + + prescriber_org = record["prescription"]["prescribingOrganization"] + + statuses = list(set([instance["prescriptionStatus"] for instance in instances])) + is_ready = PrescriptionStatus.TO_BE_DISPENSED in statuses + if PrescriptionStatus.TO_BE_DISPENSED in statuses: + statuses.remove(PrescriptionStatus.TO_BE_DISPENSED) + statuses.insert(0, PrescriptionStatus.TO_BE_DISPENSED) + status = self.SEPARATOR.join(statuses) + + dispenser_orgs = [] + for instance in instances: + org = instance.get("dispense", {}).get("dispensingOrganization") + if org: + dispenser_orgs.append(org) + dispenser_org = self.SEPARATOR.join(set(dispenser_orgs)) + + nominated_pharmacy = record.get("nomination", {}).get("nominatedPerformer") + + creation_datetime = convertSpineDate( + creation_datetime_string, TimeFormats.STANDARD_DATE_TIME_FORMAT + ) + creation_datetime_utc = datetime.combine( + creation_datetime.date(), creation_datetime.time(), timezone.utc + ) + expire_at = self.get_expire_at(relativedelta(months=18), creation_datetime_utc) + + item_update = { + Attribute.CREATION_DATETIME.name: creation_datetime_string, + Attribute.NHS_NUMBER.name: nhs_number, + Attribute.PRESCRIBER_ORG.name: prescriber_org, + ProjectedAttribute.STATUS.name: status, + Attribute.IS_READY.name: int(is_ready), + ProjectedAttribute.EXPIRE_AT.name: expire_at, + } + if dispenser_org: + item[Attribute.DISPENSER_ORG.name] = dispenser_org + if nominated_pharmacy: + item[Attribute.NOMINATED_PHARMACY.name] = nominated_pharmacy + if not dispenser_org: + item[Attribute.DISPENSER_ORG.name] = nominated_pharmacy + if record_type: + item["recordType"] = record_type + item["releaseVersion"] = determine_release_version(prescription_id) + + item.update(item_update) + return item + + @timer + def insert_eps_record_object( + self, internal_id, prescription_id, record, index=None, record_type=None, is_update=False + ): + """ + Insert EPS Record object into the configured table. + """ + item = self.build_record(prescription_id, record, record_type, index) + + return self.client.insert_items(internal_id, [item], is_update) + + @timer + def insert_eps_work_list(self, internal_id, message_id, work_list, index=None): + """ + Insert EPS WorkList object into the configured table. + """ + work_list_indexes = {self.INDEX_WORKLISTDATE: [timeNowAsString()]} + if index: + work_list_indexes = index + + expire_at = self.get_expire_at(timedelta(days=self.DEFAULT_EXPIRY_DAYS)) + item = { + Key.PK.name: message_id, + Key.SK.name: SortKey.WORK_LIST.value, + ProjectedAttribute.EXPIRE_AT.name: expire_at, + ProjectedAttribute.BODY.name: self.compress_work_list_xml(internal_id, work_list), + ProjectedAttribute.INDEXES.name: self.convert_index_keys_to_lower_case( + work_list_indexes + ), + } + return self.client.insert_items(internal_id, [item], True) + + @timer + def is_record_present(self, internal_id, prescription_id) -> bool: + """ + Returns a boolean indicating the presence of a record. + """ + record_key = prescription_id_without_check_digit(prescription_id) + record = self.client.get_item( + internal_id, record_key, SortKey.RECORD.value, expect_exists=False + ) + return True if record else False + + @timer + def return_terms_by_nhs_number_date(self, internal_id, range_start, range_end, term_regex=None): + """ + Return the epsRecord terms which match the supplied range and regex for the nhsNumberDate index. + """ + return self.return_terms_by_index_date( + internal_id, indexes.INDEX_NHSNUMBER_DATE, range_start, range_end, term_regex + ) + + @timer + def return_terms_by_index_date( + self, _internal_id, index, range_start, range_end=None, term_regex=None + ): + """ + Return the epsRecord terms which match the supplied range and regex for the supplied index. + """ + index_map = { + indexes.INDEX_NHSNUMBER_PRDSDATE: self.indexes.nhs_number_presc_disp_date, + indexes.INDEX_NHSNUMBER_PRDATE: self.indexes.nhs_number_presc_date, + indexes.INDEX_NHSNUMBER_DSDATE: self.indexes.nhs_number_disp_date, + indexes.INDEX_NHSNUMBER_DATE: self.indexes.nhs_number_date, + indexes.INDEX_PRESCRIBER_DSDATE: self.indexes.presc_disp_date, + indexes.INDEX_PRESCRIBER_DATE: self.indexes.presc_date, + indexes.INDEX_DISPENSER_DATE: self.indexes.disp_date, + indexes.INDEX_NOMPHARM: self.indexes.nom_pharm_status, + } + return index_map[index](range_start, range_end, term_regex) + + @timer + def return_terms_by_nhs_number(self, _internal_id, nhs_number): + """ + Return the epsRecord terms which match the supplied NHS number. + """ + return self.indexes.query_nhs_number_date(indexes.INDEX_NHSNUMBER, nhs_number) + + @timer + def return_pids_for_nomination_change(self, internal_id, nhs_number): + """ + Return the epsRecord list which match the supplied NHS number. + """ + pid_list = self.return_terms_by_nhs_number(internal_id, nhs_number) + + prescriptions = [] + + for pid in pid_list: + prescriptions.append(pid[1]) + + return prescriptions + + def get_nominated_pharmacy_records(self, nominated_pharmacy, batch_size, internal_id): + """ + Run an index query to get the to-be-dispensed prescriptions for this nominated pharmacy. + """ + key_list = self.get_nom_pharm_records_unfiltered(internal_id, nominated_pharmacy) + discarded_key_count = max((len(key_list) - int(batch_size)), 0) + key_list = key_list[:batch_size] + return [key_list, discarded_key_count] + + @timer + def get_nom_pharm_records_unfiltered(self, _internal_id, nominated_pharmacy, limit=None): + """ + Query the nomPharmStatus index to get the unfiltered, to-be-dispensed prescriptions for the given pharmacy. + """ + return self.indexes.query_nom_pharm_status(nominated_pharmacy, limit=limit) + + @timer + def return_record_for_process(self, internal_id, prescription_id, expect_exists=True): + """ + Look for and return an epsRecord object. + """ + record_key = prescription_id_without_check_digit(prescription_id) + item = self.client.get_item( + internal_id, record_key, SortKey.RECORD.value, expect_exists=expect_exists + ) + if not item: + return {} + body = item.get(ProjectedAttribute.BODY.name) + if body and not isinstance(body, dict): + body = simplejson.loads(zlib.decompress(bytes(body))) + + return self._build_record_to_return(item, body) + + def _build_record_to_return(self, item, body): + """ + Create the record in the format expected by the calling code. + """ + replace_decimals(body) + + record = {"value": body, "vectorClock": "vc"} + + if record_type := item.get("recordType"): + record["recordType"] = record_type + + sharded_release_version = item.get( + "releaseVersion", determine_release_version(item.get(Key.PK.name)) + ) + record["releaseVersion"] = sharded_release_version.split(".")[0] + + return record + + def base64_encode_document_content(self, internal_id, document_body): + """ + base64 encode document content and convert to string, to align with return type of original datastore. + """ + if document_body and not isinstance(document_body.get("content"), str): + try: + document_body["content"] = base64.b64encode(bytes(document_body["content"])).decode( + "utf-8" + ) + except Exception as e: # noqa: BLE001 + self.log_object.write_log( + "DDB0032", sys.exc_info(), {"error": str(e), "internalID": internal_id} + ) + raise e + + @timer + def return_document_for_process(self, internal_id, document_key, expect_exists=True): + """ + Look for and return an epsDocument object. + """ + item = self.client.get_item( + internal_id, + document_key, + SortKey.DOCUMENT.value, + expect_none=True, + expect_exists=expect_exists, + ) + if not item: + return {} + + body = item.get(ProjectedAttribute.BODY.name) + replace_decimals(body) + + if item.get(Attribute.DOC_REF_TITLE.name, "").lower() != "claimnotification": + self.base64_encode_document_content(internal_id, body) + elif isinstance(body.get("payload"), Binary): + body["payload"] = body["payload"].value.decode("utf-8") + + return body + + @timer + def return_record_for_update(self, internal_id, prescription_id): + """ + Look for and return an epsRecord object, + but with dataObject on self so that an update can be applied. + """ + record_key = prescription_id_without_check_digit(prescription_id) + item = self.client.get_item(internal_id, record_key, SortKey.RECORD.value) + body = item.get(ProjectedAttribute.BODY.name) + if body and not isinstance(body, dict): + body = simplejson.loads(zlib.decompress(bytes(body))) + + self.dataObject = body + return self._build_record_to_return(item, body) + + def get_prescription_record_data(self, internal_id, prescription_id, expect_exists=True): + """ + Gets the prescription record from the data store and return just the data. + :expect_exists defaulted to True. Thus we expect the key should already exist, if + no matches are found DDB will throw a EpsDataStoreError (Missing Record). + """ + record_key = prescription_id_without_check_digit(prescription_id) + data_object = self.client.get_item( + internal_id, record_key, SortKey.RECORD.value, expect_exists=expect_exists + ) + + if data_object is None: + return None + + return data_object + + @timer + def get_work_list(self, internal_id, message_id): + """ + Look for and return a workList object. + """ + item = self.client.get_item( + internal_id, message_id, SortKey.WORK_LIST.value, expect_exists=False, expect_none=True + ) + if item is None: + return None + + if body := item.get(ProjectedAttribute.BODY.name): + replace_decimals(body) + self.decompress_work_list_xml(internal_id, body) + return body + + @timer + def compress_work_list_xml(self, _internal_id, work_list): + """ + Compresses the XML contained in the work list, if present. Maintains original responseDetails on context. + """ + work_list_deep_copy = copy.deepcopy(work_list) + xml_bytes = work_list_deep_copy.get("responseDetails", {}).get("XML") + + if xml_bytes: + if isinstance(xml_bytes, str): + xml_bytes = xml_bytes.encode("utf-8") + compressed_xml = zlib.compress(xml_bytes) + work_list_deep_copy["responseDetails"]["XML"] = compressed_xml + return work_list_deep_copy + + @timer + def decompress_work_list_xml(self, _internal_id, body): + """ + Decompresses the XML contained in the work list, if present. + """ + compressed_xml = body.get("responseDetails", {}).get("XML") + + if compressed_xml: + decompressed_xml = zlib.decompress(bytes(compressed_xml)) + body["responseDetails"]["XML"] = decompressed_xml + + def _fetch_next_sequence_number(self, internal_id, key, max_sequence_number, read_only=False): + """ + Fetch the next sequence number from a given key. + """ + item = self.client.get_item( + internal_id, key, SortKey.SEQUENCE_NUMBER.value, expect_exists=False + ) + is_update = True + if not item: + item = { + Key.PK.name: key, + Key.SK.name: SortKey.SEQUENCE_NUMBER.value, + Attribute.SEQUENCE_NUMBER.name: 1, + } + is_update = False + else: + replace_decimals(item) + sequence_number = item[Attribute.SEQUENCE_NUMBER.name] + item[Attribute.SEQUENCE_NUMBER.name] = ( + sequence_number + 1 if sequence_number < max_sequence_number else 1 + ) + + if not read_only: + tries = 0 + while True: + try: + self.client.insert_items(internal_id, [item], is_update, False) + break + except EpsDataStoreError as e: + if e.errorTopic == EpsDataStoreError.CONDITIONAL_UPDATE_FAILURE and tries < 25: + sequence_number = item[Attribute.SEQUENCE_NUMBER.name] + item[Attribute.SEQUENCE_NUMBER.name] = ( + sequence_number + 1 if sequence_number < max_sequence_number else 1 + ) + tries += 1 + else: + raise + + return item[Attribute.SEQUENCE_NUMBER.name] + + @timer + def fetch_next_sequence_number(self, internal_id, max_sequence_number, read_only=False): + """ + Fetch the next sequence number for a batch claim message. + ONLY SINGLETON WORKER PROCESSES SHOULD CALL THIS - IT IS NOT AN ATOMIC ACTION. + """ + return self._fetch_next_sequence_number( + internal_id, self.CLAIM_SEQUENCE_NUMBER_KEY, max_sequence_number, read_only + ) + + @timer + def fetch_next_sequence_number_nwssp(self, internal_id, max_sequence_number, read_only=False): + """ + Fetch the next sequence number for a welsh batch claim message + + ONLY SINGLETON WORKER PROCESSES SHOULD CALL THIS - IT IS NOT AN ATOMIC ACTION + """ + return self._fetch_next_sequence_number( + internal_id, self.NWSSP_CLAIM_SEQUENCE_NUMBER_KEY, max_sequence_number, read_only + ) + + @timer + def store_batch_claim(self, internal_id, batch_claim_original): + """ + batchClaims need to be stored by their GUIDs with a claims sort key. + They also require an index value for each claimID in the batch. + A further index value is added with sequence number, for batch resend functionality. + """ + batch_claim = copy.deepcopy(batch_claim_original) + key = batch_claim["Batch GUID"] + + claim_id_index_terms = batch_claim["Claim ID List"] + handle_time_index_term = batch_claim["Handle Time"] + sequence_number = batch_claim["Sequence Number"] + index_scn_value = f"{timeNowAsString()}|{sequence_number}" + + nwssp = "Nwssp Sequence Number" in batch_claim + nwssp_sequence_number = batch_claim.get("Nwssp Sequence Number") + expire_at = self.get_expire_at(timedelta(days=self.DEFAULT_EXPIRY_DAYS)) + + indexes = { + self.INDEX_CLAIMID: claim_id_index_terms, + self.INDEX_CLAIMHANDLETIME: [handle_time_index_term], + self.INDEX_CLAIM_SEQNUMBER: [sequence_number], + self.INDEX_SCN: [index_scn_value], + } + if nwssp: + indexes[self.INDEX_CLAIM_SEQNUMBER_NWSSP] = [nwssp_sequence_number] + + if batch_claim.get("Claim Metadata") and not batch_claim.get("Backward Incompatible"): + batch_claim["Batch XML"] = "" + + item = { + Key.PK.name: key, + Key.SK.name: SortKey.CLAIM.value, + ProjectedAttribute.BODY.name: batch_claim, + ProjectedAttribute.EXPIRE_AT.name: expire_at, + ProjectedAttribute.CLAIM_IDS.name: claim_id_index_terms, + ProjectedAttribute.INDEXES.name: self.convert_index_keys_to_lower_case(indexes), + Attribute.BATCH_CLAIM_ID.name: key, + } + if nwssp: + item[Attribute.SEQUENCE_NUMBER_NWSSP.name] = nwssp_sequence_number + else: + item[Attribute.SEQUENCE_NUMBER.name] = sequence_number + + try: + self.client.insert_items(internal_id, [item], True) + except Exception: # noqa: BLE001 + self.log_object.write_log("EPS0279", sys.exc_info(), {"internalID": key}) + return False + return True + + def fetch_batch_claim(self, internal_id, batch_claim_id): + """ + Retrieves the batch claim and returns the batch message for the calling application to handle. + """ + item = self.client.get_item( + internal_id, batch_claim_id, SortKey.CLAIM.value, expect_exists=False + ) + if not item: + return {} + + body = item.get(ProjectedAttribute.BODY.name) + replace_decimals(body) + batch_xml = body["Batch XML"] + + if not isinstance(batch_xml, str): + try: + body["Batch XML"] = bytes(batch_xml).decode("utf-8") + except Exception as e: # noqa: BLE001 + self.log_object.write_log( + "DDB0033", sys.exc_info(), {"error": str(e), "internalID": internal_id} + ) + raise e + + return body + + @timer + def delete_claim_notification(self, internal_id, claim_id): + """ + Delete the claim notification document from the table, and return True if the deletion was successful. + """ + try: + self.client.delete_item( + self.NOTIFICATION_PREFIX + str(claim_id), SortKey.DOCUMENT.value + ) + except Exception: # noqa: BLE001 + self.log_object.write_log( + "EPS0289", sys.exc_info(), {"claimID": claim_id, "internalID": internal_id} + ) + return False + return True + + @timer + def delete_document(self, internal_id, document_key, delete_notification=False): + """ + Delete a document from the table. Return a boolean indicator of success. + """ + if ( + str(document_key).lower().startswith(self.NOTIFICATION_PREFIX.lower()) + and not delete_notification + ): + return True + + item = self.client.get_item( + internal_id, document_key, SortKey.DOCUMENT.value, expect_exists=False + ) + + if not item: + self.log_object.write_log( + "EPS0601b", None, {"documentRef": document_key, "internalID": internal_id} + ) + return False + + self.log_object.write_log( + "EPS0601", None, {"documentRef": document_key, "internalID": internal_id} + ) + self.client.delete_item(document_key, SortKey.DOCUMENT.value) + return True + + @timer + def delete_record(self, internal_id, record_key): + """ + Delete a record from the table. + """ + self.log_object.write_log( + "EPS0602", None, {"recordRef": record_key, "internalID": internal_id} + ) + self.client.delete_item(record_key, SortKey.RECORD.value) + + @timer + def return_pids_due_for_next_activity( + self, _internal_id, next_activity_start, next_activity_end + ): + """ + Returns all the epsRecord keys for prescriptions whose nextActivity is the same as that provided, + and whose next activity date is within the date range provided. + """ + return self.indexes.query_next_activity_date(next_activity_start, next_activity_end) + + @timer + def return_prescription_ids_for_nom_pharm(self, _internal_id, nominated_pharmacy_index_term): + """ + Returns the epsRecord keys relating to the given nominated pharmacy term. + """ + ods_code = nominated_pharmacy_index_term.split("_")[0] + return self.indexes.query_nom_pharm_status(ods_code) + + @timer + def return_claim_notification_ids_between_store_dates(self, internal_id, start_date, end_date): + """ + Returns all the epsDocument keys for claim notification documents whose store dates are in the given window. + """ + return self.indexes.query_claim_notification_store_time(internal_id, start_date, end_date) + + @timer + def get_all_pids_by_nominated_pharmacy(self, _internal_id, nominated_pharmacy): + """ + Run an index query to get all prescriptions for this nominated pharmacy. + """ + return self.indexes.query_nom_pharm_status(nominated_pharmacy, True) + + @timer + def check_item_exists(self, internal_id, pk, sk, expect_exists) -> bool: + """ + Returns False as covered by condition expression. + """ + item = self.client.get_item(internal_id, pk, sk, expect_exists) + if item: + return True + return False + + def find_batch_claim_from_seq_number(self, sequence_number, nwssp=False): + """ + Run a query against the sequence number index looking for the + batch GUID (key) on the basis of sequence number. + """ + return self.indexes.query_batch_claim_id_sequence_number(sequence_number, nwssp) diff --git a/src/eps_spine_shared/common/dynamodb_index.py b/src/eps_spine_shared/common/dynamodb_index.py new file mode 100644 index 0000000..551ada2 --- /dev/null +++ b/src/eps_spine_shared/common/dynamodb_index.py @@ -0,0 +1,433 @@ +import re +from datetime import datetime, timedelta +from typing import Tuple + +from boto3.dynamodb.conditions import Attr +from boto3.dynamodb.conditions import Key as BotoKey + +from eps_spine_shared.common import indexes +from eps_spine_shared.common.dynamodb_client import EpsDynamoDbClient +from eps_spine_shared.common.dynamodb_common import ( + GSI, + NEXT_ACTIVITY_DATE_PARTITIONS, + Attribute, + Key, + ProjectedAttribute, + SortKey, +) +from eps_spine_shared.common.prescription.record import PrescriptionStatus +from eps_spine_shared.logger import EpsLogger +from eps_spine_shared.nhsfundamentals.timeutilities import TimeFormats + + +class EpsDynamoDbIndex: + """ + The prescriptions message store specific DynamoDB client. + """ + + def __init__(self, log_object, client: EpsDynamoDbClient): + """ + Instantiate the DynamoDB client. + """ + self.log_object = EpsLogger(log_object) + self.client = client + + def nhs_number_date(self, range_start, range_end, term_regex): + """ + Query the nhsNumberDate index. + """ + # POC - Use context in these methods, rather than range_start and range_end. + nhs_number, start_date = range_start.split(indexes.SEPERATOR) + end_date = range_end.split(indexes.SEPERATOR)[-1] + + return self.query_nhs_number_date( + indexes.INDEX_NHSNUMBER_DATE, nhs_number, start_date, end_date, term_regex=term_regex + ) + + def nhs_number_presc_disp_date(self, range_start, range_end, term_regex): + """ + Query the nhsNumberDate index, filtering on prescriber and dispenser. + """ + nhs_number, prescriber_org, dispenser_org, start_date = range_start.split(indexes.SEPERATOR) + end_date = range_end.split(indexes.SEPERATOR)[-1] + filter_expression = Attr(Attribute.PRESCRIBER_ORG.name).eq(prescriber_org) & Attr( + Attribute.DISPENSER_ORG.name + ).contains(dispenser_org) + + return self.query_nhs_number_date( + indexes.INDEX_NHSNUMBER_PRDSDATE, + nhs_number, + start_date, + end_date, + filter_expression, + term_regex, + ) + + def nhs_number_presc_date(self, range_start, range_end, term_regex): + """ + Query the nhsNumberDate index, filtering on prescriber. + """ + nhs_number, prescriber_org, start_date = range_start.split(indexes.SEPERATOR) + end_date = range_end.split(indexes.SEPERATOR)[-1] + filter_expression = Attr(Attribute.PRESCRIBER_ORG.name).eq(prescriber_org) + + return self.query_nhs_number_date( + indexes.INDEX_NHSNUMBER_PRDATE, + nhs_number, + start_date, + end_date, + filter_expression, + term_regex, + ) + + def nhs_number_disp_date(self, range_start, range_end, term_regex): + """ + Query the nhsNumberDate index, filtering on dispenser. + """ + nhs_number, dispenser_org, start_date = range_start.split(indexes.SEPERATOR) + end_date = range_end.split(indexes.SEPERATOR)[-1] + filter_expression = Attr(Attribute.DISPENSER_ORG.name).contains(dispenser_org) + + return self.query_nhs_number_date( + indexes.INDEX_NHSNUMBER_DSDATE, + nhs_number, + start_date, + end_date, + filter_expression, + term_regex, + ) + + def presc_disp_date(self, range_start, range_end, term_regex): + """ + Query the prescriberDate index, filtering on dispenser. + """ + prescriber_org, dispenser_org, start_date = range_start.split(indexes.SEPERATOR) + end_date = range_end.split(indexes.SEPERATOR)[-1] + filter_expression = Attr(Attribute.DISPENSER_ORG.name).contains(dispenser_org) + + return self.query_prescriber_date( + indexes.INDEX_PRESCRIBER_DSDATE, + prescriber_org, + start_date, + end_date, + filter_expression, + term_regex, + ) + + def presc_date(self, range_start, range_end, term_regex): + """ + Query the prescriberDate index. + """ + prescriber_org, start_date = range_start.split(indexes.SEPERATOR) + end_date = range_end.split(indexes.SEPERATOR)[-1] + + return self.query_prescriber_date( + indexes.INDEX_PRESCRIBER_DATE, + prescriber_org, + start_date, + end_date, + term_regex=term_regex, + ) + + def disp_date(self, range_start, range_end, term_regex): + """ + Query the dispenserDate index. + """ + dispenser_org, start_date = range_start.split(indexes.SEPERATOR) + end_date = range_end.split(indexes.SEPERATOR)[-1] + + return self.query_dispenser_date( + indexes.INDEX_DISPENSER_DATE, dispenser_org, start_date, end_date, term_regex=term_regex + ) + + def nom_pharm_status(self, range_start, _, term_regex): + """ + Query the nomPharmStatus index for terms. + """ + ods_code, status = range_start.split("_") + + return self.query_nom_pharm_status_terms( + indexes.INDEX_NOMPHARM, ods_code, status, term_regex=term_regex + ) + + def build_terms(self, items, index_name, term_regex): + """ + Build terms from items returned by the index query. + """ + # POC - Project the body into the index and do away with 'terms' altogether. + terms = [] + for item in items: + index_terms = item.get(ProjectedAttribute.INDEXES.name, {}).get(index_name.lower()) + if not index_terms: + continue + [ + terms.append((index_term, item[Key.PK.name])) + for index_term in index_terms + # POC - term_regex can be replaced by filter expressions for status and releaseVersion. + if ((not term_regex) or re.search(term_regex, index_term)) + ] + return terms + + def pad_or_trim_date(self, date): + """ + Ensure the date length is fourteen characters, if present. + """ + if not date: + return None + + if len(date) >= 14: + return date[:14] + + while len(date) < 14: + date = date + "0" + return date + + def query_nhs_number_date( + self, + index, + nhs_number, + start_date=None, + end_date=None, + filter_expression=None, + term_regex=None, + ): + """ + Return the epsRecord terms which match the supplied range and regex for the nhsNumberDate index. + """ + start_date, end_date = [self.pad_or_trim_date(date) for date in [start_date, end_date]] + + pk_expression = BotoKey(Attribute.NHS_NUMBER.name).eq(nhs_number) + sk_expression = None + if start_date and end_date: + [valid, sk_expression] = self._get_valid_range_condition( + Attribute.CREATION_DATETIME.name, start_date, end_date + ) + + if not valid: + return [] + elif start_date: + sk_expression = BotoKey(Attribute.CREATION_DATETIME.name).gte(start_date) + elif end_date: + sk_expression = BotoKey(Attribute.CREATION_DATETIME.name).lte(end_date) + + key_condition_expression = ( + pk_expression if not sk_expression else pk_expression & sk_expression + ) + items = self.client.query_index( + GSI.NHS_NUMBER_DATE.name, key_condition_expression, filter_expression + ) + + return self.build_terms(items, index, term_regex) + + def query_prescriber_date( + self, index, prescriber_org, start_date, end_date, filter_expression=None, term_regex=None + ): + """ + Return the epsRecord terms which match the supplied range and regex for the prescriberDate index. + """ + start_date, end_date = [self.pad_or_trim_date(date) for date in [start_date, end_date]] + + pk_expression = BotoKey(Attribute.PRESCRIBER_ORG.name).eq(prescriber_org) + [valid, sk_expression] = self._get_valid_range_condition( + Attribute.CREATION_DATETIME.name, start_date, end_date + ) + + if not valid: + return [] + + items = self.client.query_index( + GSI.PRESCRIBER_DATE.name, pk_expression & sk_expression, filter_expression + ) + + return self.build_terms(items, index, term_regex) + + def query_dispenser_date( + self, index, dispenser_org, start_date, end_date, filter_expression=None, term_regex=None + ): + """ + Return the epsRecord terms which match the supplied range and regex for the dispenserDate index. + """ + start_date, end_date = [self.pad_or_trim_date(date) for date in [start_date, end_date]] + + pk_expression = BotoKey(Attribute.DISPENSER_ORG.name).eq(dispenser_org) + [valid, sk_expression] = self._get_valid_range_condition( + Attribute.CREATION_DATETIME.name, start_date, end_date + ) + + if not valid: + return [] + + items = self.client.query_index( + GSI.DISPENSER_DATE.name, pk_expression & sk_expression, filter_expression + ) + + return self.build_terms(items, index, term_regex) + + def query_nom_pharm_status(self, ods_code, all_statuses=False, limit=None): + """ + Return the nomPharmStatus prescription keys which match the supplied ODS code. + Query using the nominatedPharmacyStatus index. If all_statuses is False, only return prescriptions + with status TO_BE_DISPENSED (0001). + """ + key_condition_expression = BotoKey(Attribute.NOMINATED_PHARMACY.name).eq(ods_code) + + is_ready_condition = ( + BotoKey(Attribute.IS_READY.name).eq(int(True)) + if not all_statuses + else BotoKey(Attribute.IS_READY.name).between(0, 1) + ) + key_condition_expression = key_condition_expression & is_ready_condition + + items = self.client.query_index_with_limit( + GSI.NOMINATED_PHARMACY_STATUS.name, key_condition_expression, None, limit + ) + + return [item[Key.PK.name] for item in items] + + def query_nom_pharm_status_terms(self, index, ods_code, status, term_regex=None): + """ + Return the nomPharmStatus terms which match the supplied ODS code and status. + Query using the nominatedPharmacyStatus index, with is_ready derived from the status. + """ + is_ready = status == PrescriptionStatus.TO_BE_DISPENSED + + key_condition_expression = BotoKey(Attribute.NOMINATED_PHARMACY.name).eq( + ods_code + ) & BotoKey(Attribute.IS_READY.name).eq(int(is_ready)) + + filter_expression = Attr(ProjectedAttribute.STATUS.name).contains(status) + + items = self.client.query_index( + GSI.NOMINATED_PHARMACY_STATUS.name, key_condition_expression, filter_expression + ) + + return self.build_terms(items, index, term_regex) + + def query_claim_id(self, claim_id): + """ + Search for an existing batch claim containing the given claim_id. + """ + key_condition_expression = BotoKey(Key.SK.name).eq(SortKey.CLAIM.value) + filter_expression = Attr(ProjectedAttribute.CLAIM_IDS.name).contains(claim_id) + + items = self.client.query_index( + GSI.CLAIM_ID.name, key_condition_expression, filter_expression + ) + + return [item[Key.PK.name] for item in items] + + def query_next_activity_date(self, range_start, range_end): + """ + Yields the epsRecord keys which match the supplied nextActivity and date range for the nextActivity index. + + nextActivity is suffix-sharded with NEXT_ACTIVITY_DATE_PARTITIONS to avoid hot partitions on ddb. + This means NEXT_ACTIVITY_DATE_PARITIONS + 1 queries are performed, one for each partition + and one for the non-partitioned nextActivityDate index. + """ + next_activity, start_date = range_start.split("_") + end_date = range_end.split("_")[-1] + + [valid, sk_expression] = self._get_valid_range_condition( + Attribute.NEXT_ACTIVITY_DATE.name, start_date, end_date + ) + + if not valid: + return [] + + shards = [None] + list(range(1, NEXT_ACTIVITY_DATE_PARTITIONS + 1)) + + for shard in shards: + yield from self._query_next_activity_date_shard(next_activity, sk_expression, shard) + + def _query_next_activity_date_shard(self, next_activity, sk_expression, shard): + """ + Return a generator for the epsRecord keys which match the supplied nextActivity and date range + for a given pk shard. + """ + expected_next_activity = next_activity if shard is None else f"{next_activity}.{shard}" + pk_expression = BotoKey(Attribute.NEXT_ACTIVITY.name).eq(expected_next_activity) + + return self.client.query_index_yield( + GSI.NEXT_ACTIVITY_DATE.name, pk_expression & sk_expression + ) + + def _get_date_range_for_query(self, start_datetime_str, end_datetime_str): + """ + Get days included in the given range. For use in claimNotificationStoreTime index query. + """ + start_datetime = datetime.strptime( + start_datetime_str, TimeFormats.STANDARD_DATE_TIME_FORMAT + ) + end_datetime = datetime.strptime(end_datetime_str, TimeFormats.STANDARD_DATE_TIME_FORMAT) + + return [ + (start_datetime + timedelta(days=d)).strftime(TimeFormats.STANDARD_DATE_FORMAT) + for d in range((end_datetime.date() - start_datetime.date()).days + 1) + ] + + def query_claim_notification_store_time( + self, internal_id, start_datetime_str, end_datetime_str + ): + """ + Search for claim notification documents whose store times fall within the specified window. + """ + [valid, sk_expression] = self._get_valid_range_condition( + Attribute.STORE_TIME.name, start_datetime_str, end_datetime_str + ) + + if not valid: + return [] + + dates = self._get_date_range_for_query(start_datetime_str, end_datetime_str) + generators = [] + + for date in dates: + pk_expression = BotoKey(Attribute.CLAIM_NOTIFICATION_STORE_DATE.name).eq(date) + self.log_object.write_log( + "DDB0013", + None, + { + "date": date, + "startTime": start_datetime_str, + "endTime": end_datetime_str, + "internalID": internal_id, + }, + ) + generators.append( + self.client.query_index_yield( + GSI.CLAIM_NOTIFICATION_STORE_TIME.name, pk_expression & sk_expression, None + ) + ) + + for generator in generators: + yield from generator + + def _get_valid_range_condition(self, key, start, end) -> Tuple[bool, object]: + """ + Returns a range condition if the start < end + """ + if end == start: + return True, BotoKey(key).eq(start) + if end < start: + return False, None + else: + return True, BotoKey(key).between(start, end) + + def query_batch_claim_id_sequence_number(self, sequence_number, nwssp=False): + """ + Query the claimIdSequenceNumber index for batch claim IDs based on sequence number. + """ + index_name = ( + GSI.CLAIM_ID_SEQUENCE_NUMBER_NWSSP.name if nwssp else GSI.CLAIM_ID_SEQUENCE_NUMBER.name + ) + key_name = Attribute.SEQUENCE_NUMBER_NWSSP.name if nwssp else Attribute.SEQUENCE_NUMBER.name + + key_condition_expression = BotoKey(key_name).eq(sequence_number) + + items = self.client.query_index(index_name, key_condition_expression, None) + + return [ + item[Key.PK.name] + for item in items + if item[Key.PK.name] not in ["claimSequenceNumber", "claimSequenceNumberNwssp"] + ] diff --git a/src/eps_spine_shared/common/indexes.py b/src/eps_spine_shared/common/indexes.py new file mode 100644 index 0000000..49f53c4 --- /dev/null +++ b/src/eps_spine_shared/common/indexes.py @@ -0,0 +1,246 @@ +from eps_spine_shared.errors import EpsSystemError +from eps_spine_shared.logger import EpsLogger +from eps_spine_shared.nhsfundamentals.timeutilities import timeNowAsString + +INDEX_NHSNUMBER_DATE = "nhsNumberDate_bin" +INDEX_NHSNUMBER_PRDATE = "nhsNumberPrescriberDate_bin" +INDEX_NHSNUMBER_PRDSDATE = "nhsNumberPrescDispDate_bin" +INDEX_NHSNUMBER_DSDATE = "nhsNumberDispenserDate_bin" +INDEX_PRESCRIBER_DATE = "prescriberDate_bin" +INDEX_PRESCRIBER_DSDATE = "prescDispDate_bin" +INDEX_PRESCRIBER_STATUS = "prescribingSiteStatus_bin" +INDEX_DISPENSER_DATE = "dispenserDate_bin" +INDEX_DISPENSER_STATUS = "dispensingSiteStatus_bin" + +INDEX_NEXTACTIVITY = "nextActivityNAD_bin" +INDEX_NOMPHARM = "nomPharmStatus_bin" +INDEX_NHSNUMBER = "nhsNumber_bin" +INDEX_DELETE_DATE = "backstopdeletedate_bin" +INDEX_PRESCRIPTION_ID = "prescriptionid_bin" +INDEX_STORE_TIME_DOC_REF_TITLE = "storetimebydocreftitle_bin" + +REGEX_INDICES = [ + INDEX_NHSNUMBER_DATE, + INDEX_NHSNUMBER_PRDATE, + INDEX_NHSNUMBER_PRDSDATE, + INDEX_NHSNUMBER_DSDATE, + INDEX_PRESCRIBER_DATE, + INDEX_PRESCRIBER_DSDATE, + INDEX_DISPENSER_DATE, +] + +SEPERATOR = "|" +INDEX_DELTA = "delta_bin" + + +class EpsIndexFactory(object): + """ + Factory for building index details for prescription record + """ + + def __init__(self, log_object, internal_id, test_prescribing_sites, nad_reference): + """ + Make internal_id available for logging in indexer + Requires nad_reference - a set of timedeltas to be used when calculating the next + activity index + requires test_prescribing_sites - used to differentiate for claims + """ + self.log_object = EpsLogger(log_object) + self.internal_id = internal_id + self.test_prescribing_sites = test_prescribing_sites + self.nad_reference = nad_reference + + def build_indexes(self, context): + """ + Create the index values to be used when storing the epsRecord. There may be + separate index terms for each individual instance (but only unique index terms + for the prescription should be returned). + + There are four potential indexes for the epsRecord store: + nextActivityNAD - the next activity which is due for this prescription and the + date which it is due (should only contain a single term) + prescribingSiteStatus - the statuses of the prescription concatenated with the + prescribing site (to be used in reporting and troubleshooting) + dispensingSiteStatus - as above (not added until release has occurred) + nomPharmStatus - as above for any nominated pharmacy (may also be used when bulk + changes in nomination occur) + nhsNumber - to be used when managing changes in nomination + delta - to be used when confirming changes are synchronised between clusters + """ + index_dict = {} + try: + self._add_prescibing_site_status_index(context.epsRecord, index_dict) + self._add_dispensing_site_status_index(context.epsRecord, index_dict) + self._add_nominated_pharmacy_status_index(context.epsRecord, index_dict) + self._add_next_activity_next_activity_date_index(context, index_dict) + self._add_nhs_number_index(context.epsRecord, index_dict) + + # Adding extra indexes for prescription search + # overloading each of these indexes with Release version and prescription status in preparation for + # Riak 1.4 + self._add_nhs_number_date_index(context.epsRecord, index_dict) + self._add_nhs_number_prescriber_date_index(context.epsRecord, index_dict) + self._add_nhs_number_prescriber_dispenser_date_index(context.epsRecord, index_dict) + self._add_nhs_number_dispenser_date_index(context.epsRecord, index_dict) + self._add_prescriber_date_index(context.epsRecord, index_dict) + self._add_prescriber_dispenser_date_index(context.epsRecord, index_dict) + self._add_dispenser_date_index(context.epsRecord, index_dict) + self._add_delta_index(context.epsRecord, index_dict) + except EpsSystemError as e: + self.log_object.write_log( + "EPS0124", None, {"internalID": self.internal_id, "creatingIndex": e.errorTopic} + ) + raise EpsSystemError(EpsSystemError.MESSAGE_FAILURE) from e + + return index_dict + + def _add_nhs_number_date_index(self, eps_record, index_dict): + """ + See build_indexes + """ + nhs_number = eps_record.return_nhs_number() + prescription_time = eps_record.return_prescription_time() + nhs_number_date_bin = nhs_number + SEPERATOR + prescription_time + index_dict[INDEX_NHSNUMBER_DATE] = eps_record.add_release_and_status(nhs_number_date_bin) + + def _add_nhs_number_prescriber_date_index(self, eps_record, index_dict): + """ + See build_indexes + """ + nhs_number = eps_record.return_nhs_number() + prescriber = eps_record.return_prescribing_organisation() + prescription_time = eps_record.return_prescription_time() + index = nhs_number + SEPERATOR + prescriber + SEPERATOR + prescription_time + new_indexes = eps_record.add_release_and_status(index) + index_dict[INDEX_NHSNUMBER_PRDATE] = new_indexes + + def _add_nhs_number_prescriber_dispenser_date_index(self, eps_record, index_dict): + """ + See build_indexes + """ + result_list = eps_record.return_nhs_number_prescriber_dispenser_date_index() + [success, nhs_number_presc_disp_date_bin] = result_list + if not success: + raise EpsSystemError(INDEX_NHSNUMBER_PRDSDATE) + if nhs_number_presc_disp_date_bin: + new_indexes = eps_record.add_release_and_status(nhs_number_presc_disp_date_bin, False) + index_dict[INDEX_NHSNUMBER_PRDSDATE] = new_indexes + + def _add_prescriber_date_index(self, eps_record, index_dict): + """ + See build_indexes + """ + prescriber = eps_record.return_prescribing_organisation() + prescription_time = eps_record.return_prescription_time() + prescriber_date_bin = prescriber + SEPERATOR + prescription_time + index_dict[INDEX_PRESCRIBER_DATE] = eps_record.add_release_and_status(prescriber_date_bin) + + def _add_nhs_number_dispenser_date_index(self, eps_record, index_dict): + """ + See build_indexes + """ + result_list = eps_record.return_nhs_number_dispenser_date_index() + [success, nhs_number_dispenser_date_bin] = result_list + if not success: + raise EpsSystemError(INDEX_NHSNUMBER_DSDATE) + if nhs_number_dispenser_date_bin: + new_indexes = eps_record.add_release_and_status(nhs_number_dispenser_date_bin, False) + index_dict[INDEX_NHSNUMBER_DSDATE] = new_indexes + + def _add_prescriber_dispenser_date_index(self, eps_record, index_dict): + """ + See build_indexes + """ + result_list = eps_record.return_prescriber_dispenser_date_index() + [success, presc_disp_dates] = result_list + if not success: + raise EpsSystemError(INDEX_PRESCRIBER_DSDATE) + if presc_disp_dates: + new_indexes = eps_record.add_release_and_status(presc_disp_dates, False) + index_dict[INDEX_PRESCRIBER_DSDATE] = new_indexes + + def _add_dispenser_date_index(self, eps_record, index_dict): + """ + See build_indexes + """ + result_list = eps_record.return_dispenser_date_index() + [success, disp_dates] = result_list + if not success: + raise EpsSystemError(INDEX_DISPENSER_DATE) + if disp_dates: + new_indexes = eps_record.add_release_and_status(disp_dates, False) + index_dict[INDEX_DISPENSER_DATE] = new_indexes + + def _add_next_activity_next_activity_date_index(self, context, index_dict): + """ + See build_indexes + """ + result_list = context.epsRecord.return_next_activity_index( + self.test_prescribing_sites, self.nad_reference, context + ) + + [next_activity, next_activity_date] = result_list + next_activity_nad_bin = ( + f"{next_activity}_{next_activity_date}" + if next_activity_date and next_activity + else next_activity + ) + index_dict[INDEX_NEXTACTIVITY] = [next_activity_nad_bin] + + def _add_prescibing_site_status_index(self, eps_record, index_dict): + """ + See build_indexes + """ + result_list = eps_record.return_presc_site_status_index() + [success, presc_site, prescription_status] = result_list + if not success: + raise EpsSystemError(INDEX_PRESCRIBER_STATUS) + index_dict[INDEX_PRESCRIBER_STATUS] = [] + for status in prescription_status: + index_dict[INDEX_PRESCRIBER_STATUS].append(presc_site + "_" + status) + + def _add_dispensing_site_status_index(self, eps_record, index_dict): + """ + See build_indexes + """ + result_list = eps_record.return_disp_site_status_index() + [success, disp_site_statuses] = result_list + if not success: + raise EpsSystemError(INDEX_DISPENSER_STATUS) + index_dict[INDEX_DISPENSER_STATUS] = list(disp_site_statuses) + + def _add_nominated_pharmacy_status_index(self, eps_record, index_dict): + """ + See build_indexes + """ + [nom_pharmacy, prescription_status] = eps_record.return_nom_pharm_status_index() + + if nom_pharmacy: + index_dict[INDEX_NOMPHARM] = [] + for status in prescription_status: + index_dict[INDEX_NOMPHARM].append(nom_pharmacy + "_" + status) + + self.log_object.write_log( + "EPS0617", + None, + { + "internalID": self.internal_id, + "nomPharmacy": nom_pharmacy, + "indexes": index_dict[INDEX_NOMPHARM], + }, + ) + else: + self.log_object.write_log("EPS0618", None, {"internalID": self.internal_id}) + + def _add_nhs_number_index(self, eps_record, index_dict): + """ + See build_indexes + """ + nhs_number = eps_record.return_nhs_number() + index_dict[INDEX_NHSNUMBER] = [nhs_number] + + def _add_delta_index(self, eps_record, index_dict): + """ + See build_indexes + """ + index_dict[INDEX_DELTA] = [timeNowAsString() + SEPERATOR + str(eps_record.get_scn())] diff --git a/src/eps_spine_shared/common/prescription/claim.py b/src/eps_spine_shared/common/prescription/claim.py new file mode 100644 index 0000000..3049c82 --- /dev/null +++ b/src/eps_spine_shared/common/prescription/claim.py @@ -0,0 +1,39 @@ +from eps_spine_shared.common.prescription import fields + + +class PrescriptionClaim(object): + """ + Wrapper class to simplify interacting with an issue claim portion of a prescription record. + """ + + def __init__(self, claim_dict): + """ + Constructor. + + :type claim_dict: dict + """ + self._claim_dict = claim_dict + + @property + def received_date_str(self): + """ + The date the claim was received. + + :rtype: str + """ + return self._claim_dict[fields.FIELD_CLAIM_RECEIVED_DATE] + + @received_date_str.setter + def received_date_str(self, value): + """ + The date the claim was received. + + :type value: str + """ + self._claim_dict[fields.FIELD_CLAIM_RECEIVED_DATE] = value + + def get_dict(self): + """ + returns claim_dict + """ + return self._claim_dict diff --git a/src/eps_spine_shared/common/prescription/fields.py b/src/eps_spine_shared/common/prescription/fields.py new file mode 100644 index 0000000..bbd03fe --- /dev/null +++ b/src/eps_spine_shared/common/prescription/fields.py @@ -0,0 +1,272 @@ +""" +Field name constants and related configuration for prescription records. +""" + +from eps_spine_shared.spinecore.changelog import PrescriptionsChangeLogProcessor + +# Field name constants +FIELD_AGENT_ORGANIZATION = "agentOrganization" +FIELD_BATCH_ID = "batchID" +FIELD_BATCH_NUMBER = "batchNumber" +FIELD_BIRTH_TIME = "birthTime" +FIELD_PREFIX = "prefix" +FIELD_SUFFIX = "suffix" +FIELD_GIVEN = "given" +FIELD_FAMILY = "family" +FIELD_CANCEL_LINE_ITEM_REF = "cancelLineItemRef" +FIELD_CANCELLATION_ID = "cancellationID" +FIELD_CANCELLATION_MSG_REF = "cancellationMsgRef" +FIELD_CANCELLATION_TARGET = "cancellationTarget" +FIELD_CANCELLATION_TIME = "cancellationTime" +FIELD_CANCELLATIONS = "cancellations" +FIELD_CHANGE_LOG = "changeLog" +FIELD_CLAIM = "claim" +FIELD_CLAIM_GUID = "claimGUID" +FIELD_CLAIM_REBUILD = "claimRebuild" +FIELD_CLAIM_RECEIVED_DATE = "claimReceivedDate" +FIELD_CLAIM_SENT_DATE = "claimSentDate" +FIELD_CLAIM_STATUS = "claimStatus" +FIELD_CLAIMED_DISPLAY_NAME = "claimed" +FIELD_COMPLETION_DATE = "completionDate" +FIELD_CURRENT_INSTANCE = "currentInstance" +FIELD_DAYS_SUPPLY = "daysSupply" +FIELD_DAYS_SUPPLY_HIGH = "daysSupplyValidHigh" +FIELD_DAYS_SUPPLY_LOW = "daysSupplyValidLow" +FIELD_DISPENSE = "dispense" +FIELD_DISPENSE_DATE = "dispenseDate" +FIELD_DISPENSE_TIME = "dispenseTime" +FIELD_DISPENSE_CLAIM_MSG_REF = "dispenseClaimMsgRef" +FIELD_DISPENSE_HISTORY = "dispenseHistory" +FIELD_DISPENSE_WINDOW_HIGH_DATE = "dispenseWindowHighDate" +FIELD_DISPENSE_WINDOW_LOW_DATE = "dispenseWindowLowDate" +FIELD_DISPENSING_ORGANIZATION = "dispensingOrganization" +FIELD_EXPIRY_DATE = "expiryDate" +FIELD_EXPIRY_PERIOD = "expiryPeriod" +FIELD_FORMATTED_EXPIRY_DATE = "formattedExpiryDate" +FIELD_HANDLE_TIME = "handleTime" +FIELD_HIGHER_AGE_LIMIT = "higherAgeLimit" +FIELD_HISTORIC_CLAIM_GUIDS = "historicClaimGUIDs" +FIELD_HISTORIC_CLAIMS = "historicClaims" +FIELD_HISTORIC_DISPENSE_CLAIM_MSG_REF = "historicDispenseClaimMsgRef" +FIELD_HL7 = "hl7" +FIELD_ID = "ID" +FIELD_INDEXES = "indexes" +FIELD_INSTANCES = "instances" +FIELD_INSTANCE_NUMBER = "instanceNumber" +FIELD_ISSUE = "issue" +FIELD_LAST_DISPENSE_DATE = "lastDispenseDate" +FIELD_LAST_DISPENSE_NOTIFICATION_GUID = "lastDispenseNotificationGuid" +FIELD_LAST_DISPENSE_NOTIFICATION_MSG_REF = "lastDispenseNotificationMsgRef" +FIELD_LAST_DISPENSE_STATUS = "lastDispenseStatus" +FIELD_LOWER_AGE_LIMIT = "lowerAgeLimit" +FIELD_LINE_ITEMS = "lineItems" +FIELD_MAX_REPEATS = "maxRepeats" +FIELD_NEXT_ACTIVITY = "nextActivity" +FIELD_NHS_NUMBER = "nhsNumber" +FIELD_NOMINATION = "nomination" +FIELD_NOMINATED = "nominated" +FIELD_NOMINATED_DOWNLOAD_DATE = "nominatedDownloadDate" +FIELD_NOMINATED_PERFORMER = "nominatedPerformer" +FIELD_NOMINATED_PERFORMER_TYPE = "nominatedPerformerType" +FIELD_NOMINATION_HISTORY = "nominationHistory" +FIELD_ORDER = "order" +FIELD_PATIENT = "patient" +FIELD_PENDING_CANCELLATIONS = "pendingCancellations" +FIELD_PRESCRIBING_ORG = "prescribingOrganization" +FIELD_PRESCRIBING_SITE_TEST_STATUS = "prescribingSiteTestStatus" +FIELD_PRESCRIPTION = "prescription" +FIELD_PRESCRIPTION_ID = "prescriptionID" +FIELD_PRESCRIPTION_MSG_REF = "prescriptionMsgRef" +FIELD_PRESCRIPTION_PRESENT = "prescriptionPresent" +FIELD_PRESCRIPTION_REPEAT_HIGH = "prescriptionRepeatHigh" +FIELD_PRESCRIPTION_STATUS = "prescriptionStatus" +FIELD_PRESCRIPTION_TIME = "prescriptionTime" +FIELD_PRESCRIPTION_DATE = "prescriptionDate" +# NOTE: be aware of the two similar named fields here: +# - treatment type describes whether the prescription is acute, repeat prescribe or +# repeat dispense +# - prescription type seems to indicate where the prescription is from, eg. GP, nurse +# hospital, dental, etc. - see MIM 4.2 for details (vocabulary "PrescriptionType") +# Confusingly, they both accept similar values, ie. numeric codes of the form "000X", +# so take care when examining prescription records! +FIELD_PRESCRIPTION_TREATMENT_TYPE = "prescriptionTreatmentType" +FIELD_PRESCRIPTION_TYPE = "prescriptionType" +FIELD_PREVIOUS_STATUS = "previousStatus" +FIELD_REASONS = "Reasons" +FIELD_RELEASE = "release" +FIELD_RELEASE_DATE = "releaseDate" +FIELD_RELEASE_REQUEST_MGS_REF = "releaseRequestMsgRef" +FIELD_RELEASE_DISPENSER_DETAILS = "releaseDispenserDetails" +FIELD_RELEASE_VERSION = "releaseVersion" +FIELD_SCN = "SCN" +FIELD_SIGNED_TIME = "signedTime" +FIELD_STATUS = "status" +FIELD_UNSUCCESSFUL_CANCELLATIONS = "unsuccessfulCancellations" +FIELD_ACTIVITY = "activity" +FIELD_DATE = "date" +FIELD_CAPITAL_D_DATE = "Date" +FIELD_TIMESTAMP = "Timestamp" + +FIELD_PRESCRIPTION_STATUS_DISPLAY_NAME = "prescriptionStatusDisplayName" +FIELD_PRESCRIPTION_CURRENT_INSTANCE = "prescriptionCurrentInstance" +FIELD_PRESCRIPTION_MAX_REPEATS = "prescriptionMaxRepeats" +FIELD_PREVIOUS_ISSUE_DATE = "priorPreviousIssueDate" + +# Treatment type constants +TREATMENT_TYPE_ACUTE = "0001" +TREATMENT_TYPE_REPEAT_PRESCRIBE = "0002" +TREATMENT_TYPE_REPEAT_DISPENSE = "0003" + +# Default values +DEFAULT_DAYSSUPPLY = 28 + +# Field groups for different sections of prescription records +PATIENT_DETAILS = [ + FIELD_NHS_NUMBER, + FIELD_BIRTH_TIME, + FIELD_LOWER_AGE_LIMIT, + FIELD_HIGHER_AGE_LIMIT, + FIELD_PREFIX, + FIELD_SUFFIX, + FIELD_GIVEN, + FIELD_FAMILY, +] + +PRESCRIPTION_DETAILS = [ + FIELD_PRESCRIPTION_ID, + FIELD_PRESCRIPTION_MSG_REF, + FIELD_PRESCRIPTION_TREATMENT_TYPE, + FIELD_PRESCRIPTION_TYPE, + FIELD_PRESCRIPTION_TIME, + FIELD_PRESCRIBING_ORG, + FIELD_SIGNED_TIME, + FIELD_DAYS_SUPPLY, + FIELD_MAX_REPEATS, + FIELD_PENDING_CANCELLATIONS, + FIELD_UNSUCCESSFUL_CANCELLATIONS, + FIELD_CURRENT_INSTANCE, + FIELD_PRESCRIPTION_PRESENT, + FIELD_HL7, + FIELD_SCN, +] + +NOMINATION_DETAILS = [ + FIELD_NOMINATED, + FIELD_NOMINATED_PERFORMER, + FIELD_NOMINATED_PERFORMER_TYPE, + FIELD_NOMINATION_HISTORY, +] + +INSTANCE_DETAILS = [ + FIELD_NEXT_ACTIVITY, + FIELD_INSTANCE_NUMBER, + FIELD_DISPENSE_WINDOW_LOW_DATE, + FIELD_DISPENSE_WINDOW_HIGH_DATE, + FIELD_PREVIOUS_ISSUE_DATE, + FIELD_COMPLETION_DATE, + FIELD_NOMINATED_DOWNLOAD_DATE, + FIELD_RELEASE_DATE, + FIELD_RELEASE_REQUEST_MGS_REF, + FIELD_EXPIRY_DATE, + FIELD_DISPENSE_HISTORY, + FIELD_PRESCRIPTION_STATUS, + FIELD_PREVIOUS_STATUS, + FIELD_LAST_DISPENSE_STATUS, +] + +DISPENSE_DETAILS = [ + FIELD_DISPENSING_ORGANIZATION, + FIELD_LAST_DISPENSE_NOTIFICATION_GUID, + FIELD_LAST_DISPENSE_NOTIFICATION_MSG_REF, + FIELD_LAST_DISPENSE_DATE, +] + +LINE_ITEM_DETAILS = [ + FIELD_STATUS, + FIELD_ID, + FIELD_PREVIOUS_STATUS, + FIELD_ORDER, + FIELD_MAX_REPEATS, +] + +CLAIM_DETAILS = [ + FIELD_CLAIM_GUID, + FIELD_BATCH_ID, + FIELD_BATCH_NUMBER, + FIELD_DISPENSE_CLAIM_MSG_REF, + FIELD_HISTORIC_DISPENSE_CLAIM_MSG_REF, + FIELD_CLAIM_RECEIVED_DATE, + FIELD_CLAIM_STATUS, + FIELD_CLAIM_REBUILD, + FIELD_HISTORIC_CLAIM_GUIDS, +] + +INSTANCE_CANCELLATION_DETAILS = [ + FIELD_CANCELLATION_ID, + FIELD_AGENT_ORGANIZATION, + FIELD_CANCELLATION_TARGET, + FIELD_CANCELLATION_TIME, + FIELD_CANCELLATION_MSG_REF, + FIELD_CANCEL_LINE_ITEM_REF, + FIELD_REASONS, + FIELD_CANCELLATION_MSG_REF, +] + +# Prescription ID lengths for different versions +R1_PRESCRIPTIONID_LENGTHS = [36, 37] +R2_PRESCRIPTIONID_LENGTHS = [19, 20] + +R1_VERSION = "R1" +R2_VERSION = "R2" + +# Other constants +NOMINATED_DOWNLOAD_LEAD_DAYS = 7 + +_YOUNG_AGE_EXEMPTION = 16 +_OLD_AGE_EXEMPTION = 60 + +# Activity constants +NEXTACTIVITY_EXPIRE = "expire" +NEXTACTIVITY_CREATENOCLAIM = "createNoClaim" +NEXTACTIVITY_DELETE = "delete" +NEXTACTIVITY_PURGE = "purge" +NEXTACTIVITY_READY = "ready" +ACTIVITY_NOMINATED_DOWNLOAD = "nominated-download" +BATCH_STATUS_AVAILABLE = "Available" +BATCH_STATUS_ALL = "All" +BATCH_STATUS_CURRENT = "Current" +ADMIN_ACTION_RESET_NAD = "resetNAD" +SPECIAL_DISPENSE_RESET = "specialDispenseReset" +SPECIAL_RESET_CURRENT_INSTANCE = "specialCurrentInstanceReset" +SPECIAL_APPLY_PENDING_CANCELLATIONS = "specialApplyPendingCancellations" + +# Update detail text mapping +UPDATE_DETAIL_TEXT = { + NEXTACTIVITY_EXPIRE: "Batch update for Prescription Expiry", + NEXTACTIVITY_CREATENOCLAIM: "Batch create no claim", + NEXTACTIVITY_DELETE: "Batch prescription deletion", + NEXTACTIVITY_READY: "Batch make prescription available for download", + ACTIVITY_NOMINATED_DOWNLOAD: "Batch make prescription available for nominated download", + ADMIN_ACTION_RESET_NAD: "Administrative reset of Next Activity Date", + SPECIAL_DISPENSE_RESET: "Administrative hard-reset return to Spine", + SPECIAL_RESET_CURRENT_INSTANCE: "Administrative reset current issue number", + SPECIAL_APPLY_PENDING_CANCELLATIONS: "Administrative apply all pending cancellations", + NEXTACTIVITY_PURGE: "Batch prescription purge", +} + +# Activity lookup mapping +ACTIVITY_LOOKUP = {} +ACTIVITY_LOOKUP[NEXTACTIVITY_EXPIRE] = NEXTACTIVITY_EXPIRE +ACTIVITY_LOOKUP[NEXTACTIVITY_CREATENOCLAIM] = NEXTACTIVITY_CREATENOCLAIM +ACTIVITY_LOOKUP[NEXTACTIVITY_DELETE] = NEXTACTIVITY_DELETE +ACTIVITY_LOOKUP[NEXTACTIVITY_PURGE] = NEXTACTIVITY_PURGE +ACTIVITY_LOOKUP[ACTIVITY_NOMINATED_DOWNLOAD] = NEXTACTIVITY_READY +ACTIVITY_LOOKUP[ADMIN_ACTION_RESET_NAD] = ADMIN_ACTION_RESET_NAD +ACTIVITY_LOOKUP[SPECIAL_DISPENSE_RESET] = SPECIAL_DISPENSE_RESET +ACTIVITY_LOOKUP[SPECIAL_RESET_CURRENT_INSTANCE] = SPECIAL_RESET_CURRENT_INSTANCE +ACTIVITY_LOOKUP[SPECIAL_APPLY_PENDING_CANCELLATIONS] = SPECIAL_APPLY_PENDING_CANCELLATIONS + +USER_IMPACTING_ACTIVITY = [NEXTACTIVITY_READY] + +FIELDS_DOCUMENTS = "documents" +FIELDS_SCN = PrescriptionsChangeLogProcessor.RECORD_SCN_REF diff --git a/src/eps_spine_shared/common/prescription/issue.py b/src/eps_spine_shared/common/prescription/issue.py new file mode 100644 index 0000000..bd5591f --- /dev/null +++ b/src/eps_spine_shared/common/prescription/issue.py @@ -0,0 +1,323 @@ +import datetime + +from eps_spine_shared.common.prescription import fields +from eps_spine_shared.common.prescription.claim import PrescriptionClaim +from eps_spine_shared.common.prescription.line_item import PrescriptionLineItem +from eps_spine_shared.common.prescription.statuses import PrescriptionStatus +from eps_spine_shared.nhsfundamentals.timeutilities import TimeFormats + + +class PrescriptionIssue(object): + """ + Wrapper class to simplify interacting with an issue (instance) portion of a prescription record. + + Note: the correct domain terminology is "issue", however there are legacy references + to "instance" in the code and database records. + """ + + def __init__(self, issue_dict): + """ + Constructor. + + :type issue_dict: dict + """ + self._issue_dict = issue_dict + + @property + def number(self): + """ + The number of this issue. + + :rtype: int + """ + # Note: the number is stored as a string, so we need to convert + number = int(self._issue_dict[fields.FIELD_INSTANCE_NUMBER]) + return number + + @property + def status(self): + """ + The status code of the issue + + :rtype: str + """ + return self._issue_dict[fields.FIELD_PRESCRIPTION_STATUS] + + @status.setter + def status(self, new_status): + """ + The status code of the issue + + NOTE: this does not update the previous status - use update_status() to do that + PAB - should we be using update_status() in places we are using this? + :type new_status: str + """ + self._issue_dict[fields.FIELD_PRESCRIPTION_STATUS] = new_status + + @property + def completion_date_str(self): + """ + The issue completion date as a YYYYMMDD string, if available. + + :rtype: str or None + """ + completion_date_str = self._issue_dict[fields.FIELD_COMPLETION_DATE] + if not completion_date_str: + return None + return completion_date_str + + def expire(self, expired_at_time, parent_prescription): + """ + Update the issue and all its line items to be expired. + + :type expired_at_time: datetime.datetime + :type parent_prescription: PrescriptionRecord + """ + + currentStatus = self.status + + # update the issue status, if appropriate + if currentStatus not in PrescriptionStatus.EXPIRY_IMMUTABLE_STATES: + newStatus = PrescriptionStatus.EXPIRY_LOOKUP[currentStatus] + self.update_status(newStatus, parent_prescription) + + if currentStatus in PrescriptionStatus.UNACTIONED_STATES: + parent_prescription.log_object.write_log( + "EPS0616", + None, + { + "internalID": parent_prescription.internal_id, + "previousStatus": currentStatus, + "releaseVersion": parent_prescription.get_release_version(), + "prescriptionID": str(parent_prescription.return_prescription_id()), + }, + ) + + # make sure all the line items are expired as well + for lineItem in self.line_items: + lineItem.expire(parent_prescription) + + parent_prescription.log_object.write_log( + "EPS0403", + None, + { + "internalID": parent_prescription.internal_id, + }, + ) + + # PAB: this will update the completion time of issues that are + # already in EXPIRY_IMMUTABLE_STATES (ie. already completed) - is + # this correct, or should this be guarded in the above if statement? + self.mark_completed(expired_at_time, parent_prescription) + + def mark_completed(self, completion_datetime, parent_prescription): + """ + Update the completion date of this issue. + + :type completion_datetime: datetime.datetime + :type parent_prescription: PrescriptionRecord + """ + current_completion_date_str = self.completion_date_str + + new_completion_date_str = completion_datetime.strftime(TimeFormats.STANDARD_DATE_FORMAT) + self._issue_dict[fields.FIELD_COMPLETION_DATE] = new_completion_date_str + + parent_prescription.log_attribute_change( + fields.FIELD_COMPLETION_DATE, + (current_completion_date_str or ""), + new_completion_date_str, + None, + ) + + @property + def expiry_date_str(self): + """ + The issue expiry date as a YYYYMMDD string. + + :rtype: str + """ + return self._issue_dict[fields.FIELD_EXPIRY_DATE] + + @property + def line_items(self): + """ + The line items for this issue. + + :rtype: list(PrescriptionLineItem) + """ + line_item_dicts = self._issue_dict[fields.FIELD_LINE_ITEMS] + # wrap the dicts to add convenience methods + line_items = [PrescriptionLineItem(d) for d in line_item_dicts] + return line_items + + @property + def claim(self): + """ + The claim information for this issue. + + :rtype: PrescriptionClaim + """ + claim_dict = self._issue_dict[fields.FIELD_CLAIM] + return PrescriptionClaim(claim_dict) + + def update_status(self, new_status, parent_prescription): + """ + Update the issue status, and record the previous status. + + :type new_status: str + """ + currentStatus = self.status + self._issue_dict[fields.FIELD_PREVIOUS_STATUS] = currentStatus + self._issue_dict[fields.FIELD_PRESCRIPTION_STATUS] = new_status + parent_prescription.log_attribute_change( + fields.FIELD_PRESCRIPTION_STATUS, currentStatus, new_status, None + ) + + @property + def dispensing_organization(self): + """ + Dispensing organization for this issue. + + :rtype: str + """ + dispense_dict = self._issue_dict[fields.FIELD_DISPENSE] + return dispense_dict[fields.FIELD_DISPENSING_ORGANIZATION] + + @property + def last_dispense_date(self): + """ + Dispensing date for this issue. + + :rtype: str + """ + dispense_dict = self._issue_dict[fields.FIELD_DISPENSE] + return dispense_dict[fields.FIELD_LAST_DISPENSE_DATE] + + @property + def last_dispense_notification_msg_ref(self): + """ + Last Dispense Notification MsgRef for this issue. + + :rtype: str + """ + dispense_dict = self._issue_dict[fields.FIELD_DISPENSE] + return dispense_dict[fields.FIELD_LAST_DISPENSE_NOTIFICATION_MSG_REF] + + def clear_dispensing_organisation(self): + """ + Clear the dispensing organisation from this instance. + """ + dispense_dict = self._issue_dict[fields.FIELD_DISPENSE] + dispense_dict[fields.FIELD_DISPENSING_ORGANIZATION] = None + + @property + def dispense_window_low_date(self): + """ + Dispense window low date + + :rtype: datetime or None + """ + low_date_str = self._issue_dict.get(fields.FIELD_DISPENSE_WINDOW_LOW_DATE) + if not low_date_str: + return None + return datetime.datetime.strptime(low_date_str, TimeFormats.STANDARD_DATE_FORMAT) + + def has_active_line_item(self): + """ + See if this instance has any active line items. + + :rtype: bool + """ + return any(lineItem.is_active() for lineItem in self.line_items) + + def get_line_item_by_id(self, line_item_id): + """ + Get a particular line item by its ID. + + Raises a KeyError if no item can be found. + + :type line_item_id: str + :rtype: PrescriptionLineItem + """ + for lineItem in self.line_items: + if lineItem.id == line_item_id: + return lineItem + + raise KeyError("Could not find line item '%s'" % line_item_id) + + @property + def release_date(self): + """ + The releaseDate for this issue, if one is specified + + :rtype: str + """ + release_date = self._issue_dict.get(fields.FIELD_RELEASE_DATE) + return str(release_date) + + @property + def next_activity(self): + """ + The next activity for this issue, if one is specified. + + Note: some migrated prescriptions may not have a next activity specified, + although this should hopefully be rectified. If so, we may be able to tighten + up the return type. + + :rtype: str or None + """ + next_activity_dict = self._issue_dict[fields.FIELD_NEXT_ACTIVITY] + return next_activity_dict.get(fields.FIELD_ACTIVITY, None) + + @property + def next_activity_date_str(self): + """ + The next activity date for this issue, if one is specified. + + :rtype: str or None + """ + next_activity_dict = self._issue_dict[fields.FIELD_NEXT_ACTIVITY] + return next_activity_dict.get(fields.FIELD_DATE, None) + + @property + def cancellations(self): + """ + The cancellations for this issue. + + :rtype: list() + """ + return self._issue_dict[fields.FIELD_CANCELLATIONS] + + def get_line_item_cancellations(self, line_item_id): + """ + Get the cancellations for a particular line item. + + :type line_item_id: str + :rtype: list() + """ + return [ + c for c in self.cancellations if c[fields.FIELD_CANCEL_LINE_ITEM_REF] == line_item_id + ] + + def get_line_item_first_cancellation_time(self, line_item_id): + """ + Get the time of the first cancellation targetting a particular line item. + + :type line_item_id: str + :rtype: str or None + """ + cancellations = self.get_line_item_cancellations(line_item_id) + cancellation_times = [c[fields.FIELD_CANCELLATION_TIME] for c in cancellations] + + if cancellations: + return min(cancellation_times, key=lambda x: int(x)) + return None + + @property + def release_request_msg_ref(self): + """ + The release request message reference for this issue. + + :rtype: str + """ + return self._issue_dict[fields.FIELD_RELEASE_REQUEST_MGS_REF] diff --git a/src/eps_spine_shared/common/prescription/line_item.py b/src/eps_spine_shared/common/prescription/line_item.py new file mode 100644 index 0000000..8515ac9 --- /dev/null +++ b/src/eps_spine_shared/common/prescription/line_item.py @@ -0,0 +1,101 @@ +from eps_spine_shared.common.prescription import fields +from eps_spine_shared.common.prescription.statuses import LineItemStatus + + +class PrescriptionLineItem(object): + """ + Wrapper class to simplify interacting with line item sections of a prescription record. + """ + + def __init__(self, line_item_dict): + """ + Constructor. + + :type line_item_dict: dict + """ + self._line_item_dict = line_item_dict + + @property + def id(self): + """ + The line item's ID. + + :rtype: str + """ + return self._line_item_dict[fields.FIELD_ID] + + @property + def status(self): + """ + The status of this line item. + + :rtype: str + """ + return self._line_item_dict[fields.FIELD_STATUS] + + @property + def previous_status(self): + """ + The previous status of this line item. + + :rtype: str + """ + return self._line_item_dict[fields.FIELD_PREVIOUS_STATUS] + + @property + def order(self): + """ + The order of this line item. + + :rtype: int + """ + return self._line_item_dict[fields.FIELD_ORDER] + + @property + def max_repeats(self): + """ + The maximum number of repeats for this line item. + + :rtype: int + """ + return int(self._line_item_dict[fields.FIELD_MAX_REPEATS]) + + def is_active(self): + """ + Test whether this line item is active. + + :rtype: bool + """ + return self.status in LineItemStatus.ACTIVE_STATES + + def update_status(self, new_status): + """ + Set the line item status, and remember the previous status. + + :type new_status: str + """ + self._line_item_dict[fields.FIELD_PREVIOUS_STATUS] = self._line_item_dict[ + fields.FIELD_STATUS + ] + self._line_item_dict[fields.FIELD_STATUS] = new_status + + def expire(self, parent_prescription): + """ + Expire this line item. + + :type parent_prescription: PrescriptionRecord + """ + currentStatus = self.status + if currentStatus not in LineItemStatus.EXPIRY_IMMUTABLE_STATES: + newStatus = LineItemStatus.EXPIRY_LOOKUP[currentStatus] + self.update_status(newStatus) + parent_prescription.logObject.write_log( + "EPS0072b", + None, + { + "internalID": parent_prescription.internalID, + "lineItemChanged": self.id, + "previousStatus": currentStatus, + "newStatus": newStatus, + }, + ) diff --git a/src/eps_spine_shared/common/prescription/next_activity_generator.py b/src/eps_spine_shared/common/prescription/next_activity_generator.py new file mode 100644 index 0000000..5c1aa8b --- /dev/null +++ b/src/eps_spine_shared/common/prescription/next_activity_generator.py @@ -0,0 +1,308 @@ +import datetime + +from eps_spine_shared.common.prescription import fields +from eps_spine_shared.common.prescription.statuses import PrescriptionStatus +from eps_spine_shared.logger import EpsLogger +from eps_spine_shared.nhsfundamentals.timeutilities import TimeFormats + + +class NextActivityGenerator(object): + """ + Used to create the next activity for a prescription instance + """ + + INPUT_LIST_1 = [ + fields.FIELD_EXPIRY_PERIOD, + fields.FIELD_PRESCRIPTION_DATE, + fields.FIELD_NOMINATED_DOWNLOAD_DATE, + fields.FIELD_DISPENSE_WINDOW_HIGH_DATE, + ] + INPUT_LIST_2 = [ + fields.FIELD_EXPIRY_PERIOD, + fields.FIELD_PRESCRIPTION_DATE, + fields.FIELD_DISPENSE_WINDOW_HIGH_DATE, + fields.FIELD_LAST_DISPENSE_DATE, + ] + INPUT_LIST_3 = [ + fields.FIELD_EXPIRY_PERIOD, + fields.FIELD_PRESCRIPTION_DATE, + fields.FIELD_COMPLETION_DATE, + ] + INPUT_LIST_4 = [ + fields.FIELD_EXPIRY_PERIOD, + fields.FIELD_PRESCRIPTION_DATE, + fields.FIELD_COMPLETION_DATE, + fields.FIELD_DISPENSE_WINDOW_HIGH_DATE, + fields.FIELD_LAST_DISPENSE_DATE, + fields.FIELD_CLAIM_SENT_DATE, + ] + INPUT_LIST_5 = [ + fields.FIELD_PRESCRIBING_SITE_TEST_STATUS, + fields.FIELD_PRESCRIPTION_DATE, + fields.FIELD_CLAIM_SENT_DATE, + ] + INPUT_LIST_6 = [ + fields.FIELD_EXPIRY_PERIOD, + fields.FIELD_PRESCRIPTION_DATE, + fields.FIELD_NOMINATED_DOWNLOAD_DATE, + fields.FIELD_DISPENSE_WINDOW_LOW_DATE, + ] + INPUT_LIST_7 = [ + fields.FIELD_EXPIRY_PERIOD, + fields.FIELD_PRESCRIPTION_DATE, + ] + + INPUT_BY_STATUS = {} + INPUT_BY_STATUS[PrescriptionStatus.TO_BE_DISPENSED] = INPUT_LIST_1 + INPUT_BY_STATUS[PrescriptionStatus.WITH_DISPENSER] = INPUT_LIST_1 + INPUT_BY_STATUS[PrescriptionStatus.WITH_DISPENSER_ACTIVE] = INPUT_LIST_2 + INPUT_BY_STATUS[PrescriptionStatus.EXPIRED] = INPUT_LIST_3 + INPUT_BY_STATUS[PrescriptionStatus.CANCELLED] = INPUT_LIST_3 + INPUT_BY_STATUS[PrescriptionStatus.DISPENSED] = INPUT_LIST_4 + INPUT_BY_STATUS[PrescriptionStatus.NOT_DISPENSED] = INPUT_LIST_3 + INPUT_BY_STATUS[PrescriptionStatus.CLAIMED] = INPUT_LIST_5 + INPUT_BY_STATUS[PrescriptionStatus.NO_CLAIMED] = INPUT_LIST_5 + INPUT_BY_STATUS[PrescriptionStatus.AWAITING_RELEASE_READY] = INPUT_LIST_6 + INPUT_BY_STATUS[PrescriptionStatus.REPEAT_DISPENSE_FUTURE_INSTANCE] = INPUT_LIST_7 + INPUT_BY_STATUS[PrescriptionStatus.FUTURE_DATED_PRESCRIPTION] = INPUT_LIST_6 + INPUT_BY_STATUS[PrescriptionStatus.PENDING_CANCELLATION] = [fields.FIELD_PRESCRIPTION_DATE] + + FIELD_REPEAT_DISPENSE_EXPIRY_PERIOD = "repeatDispenseExpiryPeriod" + FIELD_PRESCRIPTION_EXPIRY_PERIOD = "prescriptionExpiryPeriod" + FIELD_WITH_DISPENSER_ACTIVE_EXPIRY_PERIOD = "withDispenserActiveExpiryPeriod" + FIELD_EXPIRED_DELETE_PERIOD = "expiredDeletePeriod" + FIELD_CANCELLED_DELETE_PERIOD = "cancelledDeletePeriod" + FIELD_NOTIFICATION_DELAY_PERIOD = "notificationDelayPeriod" + FIELD_CLAIMED_DELETE_PERIOD = "claimedDeletePeriod" + FIELD_NOT_DISPENSED_DELETE_PERIOD = "notDispensedDeletePeriod" + FIELD_RELEASE_VERSION = "releaseVersion" + + def __init__(self, log_object, internal_id): + self.log_object = EpsLogger(log_object) + self.internal_id = internal_id + + # Map between prescription status and method for calculating index values + self._index_map = {} + self._index_map[PrescriptionStatus.TO_BE_DISPENSED] = self.un_dispensed + self._index_map[PrescriptionStatus.WITH_DISPENSER] = self.un_dispensed + self._index_map[PrescriptionStatus.WITH_DISPENSER_ACTIVE] = self.part_dispensed + self._index_map[PrescriptionStatus.EXPIRED] = self.expired + self._index_map[PrescriptionStatus.CANCELLED] = self.cancelled + self._index_map[PrescriptionStatus.DISPENSED] = self.dispensed + self._index_map[PrescriptionStatus.NO_CLAIMED] = self.completed + self._index_map[PrescriptionStatus.NOT_DISPENSED] = self.not_dispensed + self._index_map[PrescriptionStatus.CLAIMED] = self.completed + self._index_map[PrescriptionStatus.AWAITING_RELEASE_READY] = self.awaiting_nominated_release + self._index_map[PrescriptionStatus.REPEAT_DISPENSE_FUTURE_INSTANCE] = self.un_dispensed + self._index_map[PrescriptionStatus.FUTURE_DATED_PRESCRIPTION] = self.future_dated + self._index_map[PrescriptionStatus.PENDING_CANCELLATION] = self.awaiting_cancellation + + def next_activity_date(self, nad_status, nad_reference): + """ + Function takes prescriptionStatus (this will be the prescriptionStatus to be + if the function is called during an update process) + Function takes nad_status - a dictionary of information relevant to + next-activity-date calculation + Function takes nad_reference - a dictionary of global variables relevant to + next-activity-date calculation + Function should return [nextActivity, nextActivityDate, expiryDate] + """ + prescription_status = nad_status[fields.FIELD_PRESCRIPTION_STATUS] + + for key in NextActivityGenerator.INPUT_BY_STATUS[prescription_status]: + if fields.FIELD_CAPITAL_D_DATE in key: + if nad_status[key]: + nad_status[key] = datetime.datetime.strptime( + nad_status[key], TimeFormats.STANDARD_DATE_FORMAT + ) + elif key not in [ + fields.FIELD_NOMINATED_DOWNLOAD_DATE, + fields.FIELD_DISPENSE_WINDOW_LOW_DATE, + ]: + nad_status[key] = datetime.datetime.now() + + self._calculate_expiry_date(nad_status, nad_reference) + return_value = self._index_map[prescription_status](nad_status, nad_reference) + return return_value + + def _calculate_expiry_date(self, nad_status, nad_reference): + """ + Calculate the expiry date to be used in subsequent Next Activity calculations + """ + if int(nad_status[fields.FIELD_INSTANCE_NUMBER]) > 1: + expiry_date = ( + nad_status[fields.FIELD_PRESCRIPTION_DATE] + + nad_reference[self.FIELD_REPEAT_DISPENSE_EXPIRY_PERIOD] + ) + else: + expiry_date = ( + nad_status[fields.FIELD_PRESCRIPTION_DATE] + + nad_reference[self.FIELD_PRESCRIPTION_EXPIRY_PERIOD] + ) + + nad_status[fields.FIELD_EXPIRY_DATE] = expiry_date + expiry_date_str = expiry_date.strftime(TimeFormats.STANDARD_DATE_FORMAT) + nad_status[fields.FIELD_FORMATTED_EXPIRY_DATE] = expiry_date_str + + def un_dispensed(self, nad_status, _): + """ + return [nextActivity, nextActivityDate, expiryDate] for un_dispensed prescription + messages, covers: + toBeDispensed + withDispenser + RepeatDispenseFutureInstance + """ + next_activity = fields.NEXTACTIVITY_EXPIRE + next_activity_date = nad_status[fields.FIELD_FORMATTED_EXPIRY_DATE] + return [next_activity, next_activity_date, nad_status[fields.FIELD_EXPIRY_DATE]] + + def part_dispensed(self, nad_status, nad_reference): + """ + return [nextActivity, nextActivityDate, expiryDate] for part_dispensed prescription + messages + """ + max_dispense_time = nad_status[fields.FIELD_LAST_DISPENSE_DATE] + max_dispense_time += nad_reference[self.FIELD_WITH_DISPENSER_ACTIVE_EXPIRY_PERIOD] + expiry_date = min(max_dispense_time, nad_status[fields.FIELD_EXPIRY_DATE]) + + if nad_status[self.FIELD_RELEASE_VERSION] == fields.R1_VERSION: + next_activity = fields.NEXTACTIVITY_EXPIRE + next_activity_date = expiry_date.strftime(TimeFormats.STANDARD_DATE_FORMAT) + else: + if not nad_status[fields.FIELD_LAST_DISPENSE_NOTIFICATION_MSG_REF]: + next_activity = fields.NEXTACTIVITY_EXPIRE + next_activity_date = expiry_date.strftime(TimeFormats.STANDARD_DATE_FORMAT) + else: + next_activity = fields.NEXTACTIVITY_CREATENOCLAIM + next_activity_date = max_dispense_time.strftime(TimeFormats.STANDARD_DATE_FORMAT) + return [next_activity, next_activity_date, expiry_date] + + def expired(self, nad_status, nad_reference): + """ + return [nextActivity, nextActivityDate, expiryDate] for expired prescription + messages + """ + deletion_date = ( + nad_status[fields.FIELD_COMPLETION_DATE] + + nad_reference[self.FIELD_EXPIRED_DELETE_PERIOD] + ) + next_activity = fields.NEXTACTIVITY_DELETE + next_activity_date = deletion_date.strftime(TimeFormats.STANDARD_DATE_FORMAT) + return [next_activity, next_activity_date, None] + + def cancelled(self, nad_status, nad_reference): + """ + return [nextActivity, nextActivityDate, expiryDate] for cancelled prescription + messages + """ + deletion_date = ( + nad_status[fields.FIELD_COMPLETION_DATE] + + nad_reference[self.FIELD_CANCELLED_DELETE_PERIOD] + ) + next_activity = fields.NEXTACTIVITY_DELETE + next_activity_date = deletion_date.strftime(TimeFormats.STANDARD_DATE_FORMAT) + return [next_activity, next_activity_date, None] + + def dispensed(self, nad_status, nad_reference): + """ + return [nextActivity, nextActivityDate, expiryDate] for dispensed prescription + messages. + Note that if a claim is not received before the notification delay period expires, + a no claim notification is sent to the PPD. + """ + completion_date = nad_status[fields.FIELD_COMPLETION_DATE] + max_notification_date = ( + completion_date + nad_reference[self.FIELD_NOTIFICATION_DELAY_PERIOD] + ) + if nad_status[self.FIELD_RELEASE_VERSION] == fields.R1_VERSION: # noqa: SIM108 + next_activity = fields.NEXTACTIVITY_DELETE + else: + next_activity = fields.NEXTACTIVITY_CREATENOCLAIM + next_activity_date = max_notification_date.strftime(TimeFormats.STANDARD_DATE_FORMAT) + return [next_activity, next_activity_date, None] + + def completed(self, nad_status, nad_reference): + """ + return [nextActivity, nextActivityDate, expiryDate] for completed prescription + messages + + Note, all reference to claim sent date removed as this now only applies to already + claimed and no-claimed prescriptions. + """ + deletion_date = ( + nad_status[fields.FIELD_CLAIM_SENT_DATE] + + nad_reference[self.FIELD_CLAIMED_DELETE_PERIOD] + ) + next_activity = fields.NEXTACTIVITY_DELETE + next_activity_date = deletion_date.strftime(TimeFormats.STANDARD_DATE_FORMAT) + return [next_activity, next_activity_date, None] + + def not_dispensed(self, nad_status, nad_reference): + """ + return [nextActivity, nextActivityDate, expiryDate] for not_dispensed prescription + messages + """ + deletion_date = ( + nad_status[fields.FIELD_COMPLETION_DATE] + + nad_reference[self.FIELD_NOT_DISPENSED_DELETE_PERIOD] + ) + next_activity = fields.NEXTACTIVITY_DELETE + next_activity_date = deletion_date.strftime(TimeFormats.STANDARD_DATE_FORMAT) + return [next_activity, next_activity_date, None] + + def awaiting_nominated_release(self, nad_status, _): + """ + return [nextActivity, nextActivityDate, expiryDate] for awaiting_nominated_release + prescription messages + """ + ready_date = nad_status[fields.FIELD_DISPENSE_WINDOW_LOW_DATE] + + if nad_status[fields.FIELD_NOMINATED_DOWNLOAD_DATE]: + ready_date = nad_status[fields.FIELD_NOMINATED_DOWNLOAD_DATE] + + ready_date_string = ready_date.strftime(TimeFormats.STANDARD_DATE_FORMAT) + + if ready_date < nad_status[fields.FIELD_EXPIRY_DATE]: + next_activity = fields.NEXTACTIVITY_READY + next_activity_date = ready_date_string + else: + next_activity = fields.NEXTACTIVITY_EXPIRE + next_activity_date = nad_status[fields.FIELD_FORMATTED_EXPIRY_DATE] + return [next_activity, next_activity_date, nad_status[fields.FIELD_EXPIRY_DATE]] + + def future_dated(self, nad_status, _): + """ + return [nextActivity, nextActivityDate, expiryDate] for awaiting_nominated_release + prescription messages + """ + if nad_status[fields.FIELD_DISPENSE_WINDOW_LOW_DATE]: + ready_date = max( + nad_status[fields.FIELD_DISPENSE_WINDOW_LOW_DATE], + nad_status[fields.FIELD_PRESCRIPTION_DATE], + ) + else: + ready_date = nad_status[fields.FIELD_PRESCRIPTION_DATE] + + ready_date_string = ready_date.strftime(TimeFormats.STANDARD_DATE_FORMAT) + + if nad_status[fields.FIELD_NOMINATED_DOWNLOAD_DATE]: + ready_date = nad_status[fields.FIELD_NOMINATED_DOWNLOAD_DATE] + if ready_date < nad_status[fields.FIELD_EXPIRY_DATE]: + next_activity = fields.NEXTACTIVITY_READY + next_activity_date = ready_date_string + else: + next_activity = fields.NEXTACTIVITY_EXPIRE + next_activity_date = nad_status[fields.FIELD_FORMATTED_EXPIRY_DATE] + return [next_activity, next_activity_date, nad_status[fields.FIELD_EXPIRY_DATE]] + + def awaiting_cancellation(self, nad_status, nad_reference): + """ + return [nextActivity, nextActivityDate, expiryDate] for awaiting_cancellation + prescription messages + """ + deletion_date = ( + nad_status[fields.FIELD_HANDLE_TIME] + nad_reference[self.FIELD_CANCELLED_DELETE_PERIOD] + ) + next_activity = fields.NEXTACTIVITY_DELETE + next_activity_date = deletion_date.strftime(TimeFormats.STANDARD_DATE_FORMAT) + return [next_activity, next_activity_date, None] diff --git a/src/eps_spine_shared/common/prescription/record.py b/src/eps_spine_shared/common/prescription/record.py new file mode 100644 index 0000000..295b5bf --- /dev/null +++ b/src/eps_spine_shared/common/prescription/record.py @@ -0,0 +1,3601 @@ +import datetime +import sys +from copy import copy + +from dateutil.relativedelta import relativedelta + +from eps_spine_shared.common import indexes +from eps_spine_shared.common.prescription import fields +from eps_spine_shared.common.prescription.issue import PrescriptionIssue +from eps_spine_shared.common.prescription.next_activity_generator import NextActivityGenerator +from eps_spine_shared.common.prescription.statuses import LineItemStatus, PrescriptionStatus +from eps_spine_shared.errors import ( + EpsBusinessError, + EpsErrorBase, + EpsSystemError, +) +from eps_spine_shared.logger import EpsLogger +from eps_spine_shared.nhsfundamentals.timeutilities import TimeFormats +from eps_spine_shared.spinecore.baseutilities import handleEncodingOddities, quoted +from eps_spine_shared.spinecore.changelog import PrescriptionsChangeLogProcessor + + +class PrescriptionRecord(object): + """ + Base class for all Prescriptions record objects + + A record object should be created by the validator used by a particular interaction + The validator can then update the attributes of this object. + + The object should then support creating a new record, or existing an updated record + using the attributes which have been bound to it + """ + + SCN_MAX = 512 + # Limit beyond which we should stop updating the change log as almost certainly in an + # uncontrolled loop - and updating the change log may lead to the record being of an + # unbounded size + + def __init__(self, log_object, internal_id): + """ + The basic attributes of an epsRecord + """ + self.log_object = EpsLogger(log_object) + self.internal_id = internal_id + self.nad_generator = NextActivityGenerator(log_object, internal_id) + self.pending_instance_change = None + self.prescription_record = None + self.pre_change_issue_status_dict = {} + self.pre_change_current_issue = None + + def create_initial_record(self, context, prescription=True): + """ + Take the context of a worker object - which should contain validated output, and + use to build an initial prescription object + + The prescription boolean is used to indicate that the creation has been caused + by receipt of an actual prescription. The creation may be triggered on receipt + of a cancellation (prior to a prescription) in which case this should be set to + False. + """ + + self.name_map_on_create(context) + + self.prescription_record = {} + self.prescription_record[fields.FIELDS_DOCUMENTS] = [] + self.prescription_record[fields.FIELD_PRESCRIPTION] = self.create_prescription_snippet( + context + ) + self.prescription_record[fields.FIELD_PRESCRIPTION][ + fields.FIELD_PRESCRIPTION_PRESENT + ] = prescription + self.prescription_record[fields.FIELD_PATIENT] = self.create_patient_snippet(context) + self.prescription_record[fields.FIELD_NOMINATION] = self.create_nomination_snippet(context) + line_items = self.create_line_items(context) + self.prescription_record[fields.FIELD_INSTANCES] = self.create_instances( + context, line_items + ) + + def return_prechange_issue_status_dict(self): + """ + Returns a dictionary of the initial statuses by issue number. + """ + return self.pre_change_issue_status_dict + + def return_prechange_current_issue(self): + """ + Returns the current issue as it was prior to this change + """ + return self.pre_change_current_issue + + def return_changed_issue_list( + self, + pre_change_issue_list, + post_change_issue_list, + max_repeats=None, + changed_issues_list=None, + ): + """ + Iterate through the prescription issues comparing the pre and post change status dict + for each issue number, checking for differences. If a difference is found, add the + issue number as a string to the returned changed_issues_list. + + Accept an initial changed_issues_list as this may need to include other issues, e.g. in the pending cancellation + case, an issue can be changed by adding a pending cancellation, even though the statuses don't change. + """ + + if not changed_issues_list: + changed_issues_list = [] + + if not max_repeats: + max_repeats = self.max_repeats + for i in range(1, int(max_repeats) + 1): + issue_ref = self.generate_status_dict_issue_reference(i) + # The get will handle missing issues from the change log + if pre_change_issue_list.get(issue_ref, {}) == post_change_issue_list.get( + issue_ref, {} + ): + continue + changed_issues_list.append(str(i)) + + return changed_issues_list + + def generate_status_dict_issue_reference(self, issue_number): + """ + Create the status dict issue reference. Moved into a separate function as it is used + in a couple of places. + """ + return fields.FIELD_ISSUE + str(issue_number) + + def create_issue_current_status_dict(self): + """ + Cycle through all of the issues in the prescription and add the current prescription + status and the status of each line item (by order not ID) to a dictionary keyed on issue number + """ + status_dict = {} + prescription_issues = self.prescription_record[fields.FIELD_INSTANCES] + for issue in prescription_issues: + issue_dict = {} + issue_dict[fields.FIELD_PRESCRIPTION] = str( + prescription_issues[issue][fields.FIELD_PRESCRIPTION_STATUS] + ) + issue_dict[fields.FIELD_LINE_ITEMS] = {} + for line_item in prescription_issues[issue][fields.FIELD_LINE_ITEMS]: + line_order = line_item[fields.FIELD_ORDER] + line_status = line_item[fields.FIELD_STATUS] + issue_dict[fields.FIELD_LINE_ITEMS][str(line_order)] = str(line_status) + status_dict[self.generate_status_dict_issue_reference(issue)] = issue_dict + return status_dict + + def add_event_to_change_log(self, message_id, event_log): + """ + Add the event_log to the change log under the key of message_id. If the changeLog does + not exist it will be created. + + Prescriptions change logs will not be be pruned and will grow unbounded. + """ + # Set the SCN on the change log to be the same as on the record + event_log[PrescriptionsChangeLogProcessor.SCN] = self.get_scn() + length_before = len(self.prescription_record.get(fields.FIELD_CHANGE_LOG, [])) + try: + PrescriptionsChangeLogProcessor.updateChangeLog( + self.prescription_record, event_log, message_id, self.SCN_MAX + ) + except Exception as e: # noqa: BLE001 + self.log_object.write_log( + "EPS0336", + sys.exc_info(), + {"internalID": self.internal_id, "prescriptionID": self.id, "error": str(e)}, + ) + raise EpsSystemError(EpsSystemError.SYSTEM_FAILURE) from e + length_after = len(self.prescription_record.get(fields.FIELD_CHANGE_LOG, [])) + if length_after != length_before + 1: + self.log_object.write_log( + "EPS0672", + None, + { + "internalID": self.internal_id, + "lengthBefore": str(length_before), + "lengthAfter": str(length_after), + }, + ) + + def add_index_to_record(self, index_dict): + """ + Replace the existing index information with a new set of index information + """ + self.prescription_record[fields.FIELD_INDEXES] = index_dict + + def increment_scn(self): + """ + Check for an SCN on the record, if one does not already exist, add it. + If it does exist, increment it - but throw a system error if this exceed a + maximum to prevent a prescription ending up in an uncontrolled loop - SPII-14250. + """ + if fields.FIELDS_SCN not in self.prescription_record: + self.prescription_record[fields.FIELDS_SCN] = ( + PrescriptionsChangeLogProcessor.INITIAL_SCN + ) + else: + self.prescription_record[fields.FIELDS_SCN] += 1 + + def get_scn(self): + """ + Check for an SCN on the record, if one does not already exist, create it. + If it already exists, return it. + """ + if fields.FIELDS_SCN not in self.prescription_record: + self.prescription_record[fields.FIELDS_SCN] = ( + PrescriptionsChangeLogProcessor.INITIAL_SCN + ) + + return self.prescription_record[fields.FIELDS_SCN] + + def add_document_references(self, document_refs): + """ + Adds a document reference to the high-level document list. + """ + if fields.FIELDS_DOCUMENTS not in self.prescription_record: + self.prescription_record[fields.FIELDS_DOCUMENTS] = [] + + for document in document_refs: + self.prescription_record[fields.FIELDS_DOCUMENTS].append(document) + + def return_record_to_be_stored(self): + """ + Return a copy of the record in a storable format (i.e. note that this is not json + encoded here - it will be encoded as it is placed onto the WDO) + """ + return self.prescription_record + + def return_next_activity_nad_bin(self): + """ + Return the nextActivityNAD_bin index of the prescription record + """ + if fields.FIELD_INDEXES in self.prescription_record: + if indexes.INDEX_NEXTACTIVITY in self.prescription_record[fields.FIELD_INDEXES]: + return self.prescription_record[fields.FIELD_INDEXES][indexes.INDEX_NEXTACTIVITY] + if indexes.INDEX_NEXTACTIVITY.lower() in self.prescription_record[fields.FIELD_INDEXES]: + return self.prescription_record[fields.FIELD_INDEXES][ + indexes.INDEX_NEXTACTIVITY.lower() + ] + return None + + def create_record_from_store(self, record): + """ + Convert the stored format into a self.prescription_record + """ + self.prescription_record = record + self.pre_change_issue_status_dict = self.create_issue_current_status_dict() + self.pre_change_current_issue = self.prescription_record.get( + fields.FIELD_PRESCRIPTION, {} + ).get(fields.FIELD_CURRENT_INSTANCE) + + def name_map_on_create(self, context): + """ + Map any additional names from the original context (e.g. if the property here is + named differently at the point of extract from the message such as with + agentOrganization) + """ + + context.prescribingOrganization = context.agentOrganization + if hasattr(context, fields.FIELD_PRESCRIPTION_REPEAT_HIGH): + context.maxRepeats = context.prescriptionRepeatHigh + if hasattr(context, fields.FIELD_DAYS_SUPPLY_LOW): + context.dispenseWindowLowDate = context.daysSupplyValidLow + if hasattr(context, fields.FIELD_DAYS_SUPPLY_HIGH): + context.dispenseWindowHighDate = context.daysSupplyValidHigh + + def create_instances(self, context, line_items): + """ + Create all prescription instances + """ + instance_snippet = self.set_all_snippet_details(fields.INSTANCE_DETAILS, context) + instance_snippet[fields.FIELD_LINE_ITEMS] = line_items + instance_snippet[fields.FIELD_INSTANCE_NUMBER] = "1" + instance_snippet[fields.FIELD_DISPENSE] = self.set_all_snippet_details( + fields.DISPENSE_DETAILS, context + ) + instance_snippet[fields.FIELD_CLAIM] = self.set_all_snippet_details( + fields.CLAIM_DETAILS, context + ) + instance_snippet[fields.FIELD_CANCELLATIONS] = [] + instance_snippet[fields.FIELD_DISPENSE_HISTORY] = {} + instance_snippet[fields.FIELD_NEXT_ACTIVITY] = {} + instance_snippet[fields.FIELD_NEXT_ACTIVITY][fields.FIELD_ACTIVITY] = None + instance_snippet[fields.FIELD_NEXT_ACTIVITY][fields.FIELD_DATE] = None + + return {"1": instance_snippet} + + def create_prescription_snippet(self, context): + """ + Create the prescription snippet from the prescription details + """ + presc_details = self.set_all_snippet_details(fields.PRESCRIPTION_DETAILS, context) + presc_details[fields.FIELD_CURRENT_INSTANCE] = str(1) + return presc_details + + def create_patient_snippet(self, context): + """ + Create the patient snippet from the patient details + """ + return self.set_all_snippet_details(fields.PATIENT_DETAILS, context) + + def create_nomination_snippet(self, context): + """ + Create the nomination snippet from the nomination details + """ + nomination_snippet = self.set_all_snippet_details(fields.NOMINATION_DETAILS, context) + if hasattr(context, fields.FIELD_NOMINATED_PERFORMER): + if context.nominatedPerformer: + nomination_snippet[fields.FIELD_NOMINATED] = True + if not nomination_snippet[fields.FIELD_NOMINATION_HISTORY]: + nomination_snippet[fields.FIELD_NOMINATION_HISTORY] = [] + return nomination_snippet + + def set_all_snippet_details(self, details_list, context): + """ + Default any missing value to False + """ + snippet = {} + for item_detail in details_list: + if hasattr(context, item_detail): + value = getattr(context, item_detail) + elif isinstance(context, dict) and item_detail in context: + value = context[item_detail] + else: + snippet[item_detail] = False + continue + + if isinstance(value, datetime.datetime): + value = value.strftime(TimeFormats.STANDARD_DATE_TIME_FORMAT) + snippet[item_detail] = value + return snippet + + def create_line_items(self, context): + """ + Create individual line items + """ + + complete_line_items = [] + + for line_item in context.lineItems: + line_item_snippet = self.set_all_snippet_details(fields.LINE_ITEM_DETAILS, line_item) + complete_line_items.append(line_item_snippet) + + return complete_line_items + + def _get_prescription_instance_data(self, instance_number, raise_exception_on_missing=True): + """ + Internal method to support record access + """ + prescription_instance_data = self.prescription_record[fields.FIELD_INSTANCES].get( + instance_number + ) + if not prescription_instance_data: + if raise_exception_on_missing: + self._handle_missing_issue(instance_number) + else: + return {} + return prescription_instance_data + + def get_prescription_instance_data(self, instance_number, raise_exception_on_missing=True): + """ + Public method to support record access + """ + return self._get_prescription_instance_data(instance_number, raise_exception_on_missing) + + @property + def future_issues_available(self): + """ + Return boolean to indicate if future issues are available or not. Always False for + Acute and Repeat Prescribe + """ + return False + + def get_issue(self, issue_number): + """ + Get a particular issue of this prescription. + + :type issue_number: int + :rtype: PrescriptionIssue + """ + # explicitly check that we are receiving an int, as legacy code used strs + if not isinstance(issue_number, int): + raise TypeError("Issue number must be an int") + + issue_number_str = str(issue_number) + issue_data = self.prescription_record[fields.FIELD_INSTANCES].get(issue_number_str) + + if not issue_data: + self._handle_missing_issue(issue_number) + + issue = PrescriptionIssue(issue_data) + return issue + + def _handle_missing_issue(self, issue_number): + """ + Missing instances are a data migration specific issue, and will throw + a prescription not found error after after being logged + """ + self.log_object.write_log( + "EPS0073c", + None, + {"internalID": self.internal_id, "prescriptionID": self.id, "issue": issue_number}, + ) + # Re-raise this as SpineBusinessError with equivalent errorCode from ErrorBase1722. + raise EpsBusinessError(EpsErrorBase.PRESCRIPTION_NOT_FOUND) + + @property + def id(self): + """ + The prescription's ID. + + :rtype: str + """ + return self.prescription_record[fields.FIELD_PRESCRIPTION][fields.FIELD_PRESCRIPTION_ID] + + @property + def issue_numbers(self): + """ + Sorted list of issue numbers. + + Note: migrated prescriptions may have missing issues (before the current one) + so do not be surprised if the list returned here is not the complete range. + + :rtype: list(int) + """ + # we have to convert instance numbers to ints, as they're stored as strings + issue_numbers = [int(i) for i in list(self.prescription_record["instances"].keys())] + return sorted(issue_numbers) + + def get_issue_numbers_in_range(self, lowest=None, highest=None): + """ + Sorted list of issue numbers in the specified range (inclusive). + + If either lowest or highest threshold is set to None then it will be ignored. + + :type lowest: int or None + :type highest: int or None + :rtype: list(int) + """ + candidate_numbers = self.issue_numbers + + if lowest is not None: + candidate_numbers = [i for i in candidate_numbers if i >= lowest] + + if highest is not None: + candidate_numbers = [i for i in candidate_numbers if i <= highest] + + return candidate_numbers + + def get_issues_in_range(self, lowest=None, highest=None): + """ + Sorted list of issues in the specified range (inclusive). + + If either lowest or highest threshold is set to None then it will be ignored. + + :type lowest: int or None + :type highest: int or None + :rtype: list(PrescriptionIssue) + """ + issues = [self.get_issue(i) for i in self.get_issue_numbers_in_range(lowest, highest)] + return issues + + def get_issues_from_current_upwards(self): + """ + Sorted list of issues, starting at the current one. + + :rtype: list(PrescriptionIssue) + """ + return self.get_issues_in_range(self.current_issue_number, None) + + @property + def missing_issue_numbers(self): + """ + Sorted list of numbers of instances missing from the prescription. + + :rtype: list(int) + """ + expected_issue_numbers = range(1, self.max_repeats + 1) + actual_issue_numbers = self.issue_numbers + missing_issue_numbers = set(expected_issue_numbers) - set(actual_issue_numbers) + + return sorted(list(missing_issue_numbers)) + + @property + def issues(self): + """ + List of issues, ordered by issue number. + + :rtype: list(PrescriptionIssue) + """ + issues = [self.get_issue(i) for i in self.issue_numbers] + return issues + + @property + def _current_instance_data(self): + """ + Internal property to support record access + """ + return self._get_prescription_instance_data(str(self.current_issue_number)) + + @property + def current_issue_number(self): + """ + The current issue number of this prescription. + + :rtype: int + """ + current_issue_number_str = self.prescription_record[fields.FIELD_PRESCRIPTION].get( + fields.FIELD_CURRENT_INSTANCE + ) + if not current_issue_number_str: + self._handle_missing_issue(fields.FIELD_CURRENT_INSTANCE) + return int(current_issue_number_str) + + @current_issue_number.setter + def current_issue_number(self, value): + """ + The current issue number of this prescription. + + :type value: int + """ + # explicitly check that we are receiving an int, as legacy code used strs + if not isinstance(value, int): + raise TypeError("Issue number must be an int") + + current_issue_number_str = str(value) + self.prescription_record[fields.FIELD_PRESCRIPTION][ + fields.FIELD_CURRENT_INSTANCE + ] = current_issue_number_str + + @property + def current_issue(self): + """ + The current issue of this prescription. + + :rtype: PrescriptionIssue + """ + return self.get_issue(self.current_issue_number) + + @property + def _current_instance_status(self): + """ + Internal property to support record access + + .. deprecated:: + use "current_issue.status" instead + """ + return self._current_instance_data[fields.FIELD_PRESCRIPTION_STATUS] + + @property + def _pending_cancellations(self): + """ + Internal property to support record access + """ + return self.prescription_record[fields.FIELD_PRESCRIPTION][ + fields.FIELD_PENDING_CANCELLATIONS + ] + + @property + def _pending_cancellation_flag(self): + """ + Internal property to support record access + """ + obj = self.prescription_record.get(fields.FIELD_PRESCRIPTION, {}).get( + fields.FIELD_PENDING_CANCELLATIONS + ) + if not obj: + return False + if isinstance(obj, list) and obj: + return True + return False + + @_pending_cancellations.setter + def _pending_cancellations(self, value): + """ + Internal property to support record access + """ + self.prescription_record[fields.FIELD_PRESCRIPTION][ + fields.FIELD_PENDING_CANCELLATIONS + ] = value + + @property + def _nhs_number(self): + """ + Internal property to support record access + """ + return self.prescription_record[fields.FIELD_PATIENT][fields.FIELD_NHS_NUMBER] + + @property + def _prescription_time(self): + """ + Internal property to support record access + + .. deprecated:: + use "time" instead (which returns a datetime instead of a str) + PAB - but note - this field may contain just a date str, not a datetime?! + :rtype: str + """ + return self.prescription_record[fields.FIELD_PRESCRIPTION][fields.FIELD_PRESCRIPTION_TIME] + + @property + def time(self): + """ + The datetime of the prescription. + + PAB - what does this time actually signify? It needs better naming + + :rtype: datetime.datetime + """ + prescription_time_str = self.prescription_record[fields.FIELD_PRESCRIPTION][ + fields.FIELD_PRESCRIPTION_TIME + ] + prescription_time = datetime.datetime.strptime( + prescription_time_str, TimeFormats.STANDARD_DATE_TIME_FORMAT + ) + return prescription_time + + @property + def _release_version(self): + """ + Internal property to support record access + """ + prescriptionID = str(self.return_prescription_id()) + idLength = len(prescriptionID) + if idLength in fields.R1_PRESCRIPTIONID_LENGTHS: + return fields.R1_VERSION + if idLength in fields.R2_PRESCRIPTIONID_LENGTHS: + return fields.R2_VERSION + + def get_release_version(self): + """ + Return the prescription release version (R1 or R2) + """ + return self._release_version + + def add_release_and_status(self, index_prefix, is_string=True): + """ + Returns a list containing the index prefix concatenated with all applicable release + versions and Prescription Statuses + """ + _release_version = self._release_version + _status_list = self.return_prescription_status_set() + return_set = [] + for each_status in _status_list: + if not is_string: + for each_index in index_prefix: + _new_value = each_index + "|" + _release_version + "|" + each_status + return_set.append(_new_value) + else: + _new_value = index_prefix + "|" + _release_version + "|" + each_status + return_set.append(_new_value) + + return return_set + + def update_nominated_performer(self, context): + """ + Update the "nominated performer" field and log the change. + """ + nomination = self.prescription_record[fields.FIELD_NOMINATION] + self.log_attribute_change( + fields.FIELD_NOMINATED_PERFORMER, + nomination[fields.FIELD_NOMINATED_PERFORMER], + context.nominatedPerformer, + context.fieldsToUpdate, + ) + nomination[fields.FIELD_NOMINATED_PERFORMER] = context.nominatedPerformer + + def return_presc_site_status_index(self): + """ + Return the prescribing organization and the prescription status + """ + _presc_site = self.prescription_record[fields.FIELD_PRESCRIPTION][ + fields.FIELD_PRESCRIBING_ORG + ] + _presc_status = self.return_prescription_status_set() + return [True, _presc_site, _presc_status] + + def return_nom_pharm_status_index(self): + """ + Return the Nominated Pharmacy and the prescription status + """ + nom_pharm = self.return_nom_pharm() + if not nom_pharm: + return [None, None] + presc_status = self.return_prescription_status_set() + return [nom_pharm, presc_status] + + def return_nom_pharm(self): + """ + Return the Nominated Pharmacy + """ + return self.prescription_record.get(fields.FIELD_NOMINATION, {}).get( + fields.FIELD_NOMINATED_PERFORMER + ) + + def return_disp_site_or_nom_pharm(self, instance): + """ + Returns the Dispensing Site if available, otherwise, returns the Nominated Pharmacy + or None if neither exist + """ + _disp_site = instance.get(fields.FIELD_DISPENSE, {}).get( + fields.FIELD_DISPENSING_ORGANIZATION + ) + if not _disp_site: + _disp_site = self.return_nom_pharm() + return _disp_site + + def return_disp_site_status_index(self): + """ + Return the dispensing organization and the prescription status. + If nominated but not yet downloaded, return NomPharm instead of dispensing org + """ + dispensing_site_statuses = set() + for instance_key in self.prescription_record[fields.FIELD_INSTANCES]: + instance = self._get_prescription_instance_data(instance_key) + _disp_site = self.return_disp_site_or_nom_pharm(instance) + if not _disp_site: + continue + _presc_status = instance[fields.FIELD_PRESCRIPTION_STATUS] + dispensing_site_statuses.add(_disp_site + "_" + _presc_status) + + return [True, dispensing_site_statuses] + + def return_nhs_number_prescriber_dispenser_date_index(self): + """ + Return the NHS Number Prescribing organization dispensingOrganization and the prescription date + """ + nhs_number = self.return_nhs_number() + prescriber = self.return_prescribing_organisation() + index_start = nhs_number + "|" + prescriber + "|" + prescription_time = self.return_prescription_time() + nhs_number_presc_disp_dates = set() + for instance_key in self.prescription_record[fields.FIELD_INSTANCES]: + instance = self._get_prescription_instance_data(instance_key) + _disp_site = self.return_disp_site_or_nom_pharm(instance) + if not _disp_site: + continue + nhs_number_presc_disp_dates.add(index_start + _disp_site + "|" + prescription_time) + + return [True, nhs_number_presc_disp_dates] + + def return_prescriber_dispenser_date_index(self): + """ + Return the Prescribing organization dispensingOrganization and the prescription date + """ + prescriber = self.return_prescribing_organisation() + index_start = prescriber + "|" + prescription_time = self.return_prescription_time() + presc_disp_dates = set() + for instance_key in self.prescription_record[fields.FIELD_INSTANCES]: + instance = self._get_prescription_instance_data(instance_key) + disp_site = self.return_disp_site_or_nom_pharm(instance) + if not disp_site: + continue + presc_disp_dates.add(index_start + disp_site + "|" + prescription_time) + + return [True, presc_disp_dates] + + def return_dispenser_date_index(self): + """ + Return the dispensingOrganization and the prescription date + """ + index_start = "" + prescription_time = self.return_prescription_time() + presc_disp_dates = set() + for instance_key in self.prescription_record[fields.FIELD_INSTANCES]: + instance = self._get_prescription_instance_data(instance_key) + disp_site = self.return_disp_site_or_nom_pharm(instance) + if not disp_site: + continue + presc_disp_dates.add(index_start + disp_site + "|" + prescription_time) + + return [True, presc_disp_dates] + + def return_nhs_number_dispenser_date_index(self): + """ + Return the NHS Number dispensingOrganization and the prescription date + """ + nhs_number = self.return_nhs_number() + index_start = nhs_number + "|" + prescription_time = self.return_prescription_time() + nhs_number_disp_dates = set() + for instance_key in self.prescription_record[fields.FIELD_INSTANCES]: + instance = self._get_prescription_instance_data(instance_key) + _disp_site = self.return_disp_site_or_nom_pharm(instance) + if not _disp_site: + continue + nhs_number_disp_dates.add(index_start + _disp_site + "|" + prescription_time) + + return [True, nhs_number_disp_dates] + + def return_nominated_performer(self): + """ + Return the nominated performer (called when determining routing key extension) + """ + nom_performer = None + _nomination = self.prescription_record.get(fields.FIELD_NOMINATION) + if _nomination: + nom_performer = _nomination.get(fields.FIELD_NOMINATED_PERFORMER) + return nom_performer + + def return_nominated_performer_type(self): + """ + Return the nominated performer type + """ + nom_performer_type = None + _nomination = self.prescription_record.get(fields.FIELD_NOMINATION) + if _nomination: + nom_performer_type = _nomination.get(fields.FIELD_NOMINATED_PERFORMER_TYPE) + return nom_performer_type + + def return_prescription_status_set(self): + """ + For single instance prescription - the prescription status is always the current + status of the first (and only) instance + """ + status_set = set() + for instance_key in self.prescription_record[fields.FIELD_INSTANCES]: + instance = self._get_prescription_instance_data(instance_key) + status_set.add(instance[fields.FIELD_PRESCRIPTION_STATUS]) + return list(status_set) + + def return_nhs_number(self): + """ + Return the NHS Number + """ + return self._nhs_number + + def return_prescription_time(self): + """ + Return the Prescription Time + """ + return self._prescription_time + + def return_prescription_id(self): + """ + Return the Prescription ID + """ + return self.prescription_record[fields.FIELD_PRESCRIPTION][fields.FIELD_PRESCRIPTION_ID] + + def return_pending_cancellations_flag(self): + """ + Return the pending cancellations flag + """ + _prescription = self.prescription_record[fields.FIELD_PRESCRIPTION] + _max_repeats = _prescription.get(fields.FIELD_MAX_REPEATS) + + if not _max_repeats: + _max_repeats = 1 + + for prescription_issue in range(1, int(_max_repeats) + 1): + _prescription_issue = self.prescription_record[fields.FIELD_INSTANCES].get( + str(prescription_issue) + ) + # handle missing issues + if not _prescription_issue: + continue + issue_specific_cancellations = {} + _applied_cancellations_for_issue = _prescription_issue.get( + fields.FIELD_CANCELLATIONS, [] + ) + _cancellation_status_string_prefix = "" + self._create_cancellation_summary_dict( + _applied_cancellations_for_issue, + issue_specific_cancellations, + _cancellation_status_string_prefix, + ) + if str(_prescription_issue[fields.FIELD_INSTANCE_NUMBER]) == str( + _prescription[fields.FIELD_CURRENT_INSTANCE] + ): + _pending_cancellations = _prescription[fields.FIELD_PENDING_CANCELLATIONS] + _cancellation_status_string_prefix = "Pending: " + self._create_cancellation_summary_dict( + _pending_cancellations, + issue_specific_cancellations, + _cancellation_status_string_prefix, + ) + for _, val in issue_specific_cancellations.items(): + if val.get(fields.FIELD_REASONS, "")[:7] == "Pending": + return True + + return False + + def _create_cancellation_summary_dict( + self, recorded_cancellations, issue_cancellation_dict, cancellation_status + ): + """ + Process a list of cancellations, creating a dictionary of cancellation reason text + and applied SCN for each prescription and issue. + + cancellationStatus is used to seed the reasons in the pending scenario. + """ + + if not recorded_cancellations: + return + + for _cancellation in recorded_cancellations: + _subsequent_reason = False + _cancellation_reasons = str(cancellation_status) + + _cancellation_id = _cancellation.get(fields.FIELD_CANCELLATION_ID, []) + _scn = PrescriptionsChangeLogProcessor.getSCN( + self.prescription_record["changeLog"].get(_cancellation_id, {}) + ) + for _cancellation_reason in _cancellation.get(fields.FIELD_REASONS, []): + _cancellation_text = _cancellation_reason.split(":")[1].strip() + if _subsequent_reason: + _cancellation_reasons += "; " + _subsequent_reason = True + _cancellation_reasons += str(handleEncodingOddities(_cancellation_text)) + + if ( + _cancellation.get(fields.FIELD_CANCELLATION_TARGET) == "Prescription" + ): # noqa: SIM108 + _cancellation_target = fields.FIELD_PRESCRIPTION + else: + _cancellation_target = _cancellation.get(fields.FIELD_CANCEL_LINE_ITEM_REF) + + if ( + issue_cancellation_dict.get(_cancellation_target, {}).get(fields.FIELD_ID) + == _cancellation_id + ): + # Cancellation has already been added and this is pending as multiple cancellations are not possible + return + + issue_cancellation_dict[_cancellation_target] = { + fields.FIELD_SCN: _scn, + fields.FIELD_REASONS: _cancellation_reasons, + fields.FIELD_ID: _cancellation_id, + } + + def return_current_instance(self): + """ + Return the current instance + + .. deprecated:: + use "current_issue_number" instead (which returns int instead of string) + """ + return str(self.current_issue_number) + + def return_prescription_status(self, instance_number, raise_exception_on_missing=True): + """ + For single instance prescription - the prescription status is always the current + status of the first (and only) instance + """ + return self._get_prescription_instance_data( + str(instance_number), raise_exception_on_missing + ).get(fields.FIELD_PRESCRIPTION_STATUS) + + def return_previous_prescription_status(self, instance_number, raise_exception_on_missing=True): + """ + For single instance prescription - the previous prescription status is always the + previous status of the first (and only) instance + """ + return self._get_prescription_instance_data( + str(instance_number), raise_exception_on_missing + ).get(fields.FIELD_PREVIOUS_STATUS) + + def return_line_item_by_ref(self, instance_number, line_item_ref): + """ + Return the line item from the instance that matches the reference provided + """ + for line_item in self._get_prescription_instance_data(instance_number)[ + fields.FIELD_LINE_ITEMS + ]: + if line_item[fields.FIELD_ID] == line_item_ref: + return line_item + return None + + def return_prescribing_organisation(self): + """ + Return the prescribing organisation from the record + """ + return self.prescription_record[fields.FIELD_PRESCRIPTION][fields.FIELD_PRESCRIBING_ORG] + + def return_last_dn_guid(self, instance_number): + """ + Return references to the last dispense notification messages + """ + instance = self._get_prescription_instance_data(instance_number) + try: + dispn_msg_guid = instance[fields.FIELD_DISPENSE][ + fields.FIELD_LAST_DISPENSE_NOTIFICATION_GUID + ] + return dispn_msg_guid + except KeyError: + return None + + def return_last_dc_guid(self, instance_number): + """ + Return references to the last dispense notification messages + """ + instance = self._get_prescription_instance_data(instance_number) + try: + claim_msg_guid = instance[fields.FIELD_CLAIM][fields.FIELD_CLAIM_GUID] + return claim_msg_guid + except KeyError: + return None + + def return_document_references_for_claim(self, instance_number): + """ + Return references to prescription, dispense notification and claim messages + """ + presc_msg_ref = self.prescription_record[fields.FIELD_PRESCRIPTION][ + fields.FIELD_PRESCRIPTION_MSG_REF + ] + instance = self._get_prescription_instance_data(instance_number) + dispn_msg_ref = instance[fields.FIELD_DISPENSE][ + fields.FIELD_LAST_DISPENSE_NOTIFICATION_MSG_REF + ] + claim_msg_ref = instance[fields.FIELD_CLAIM][fields.FIELD_DISPENSE_CLAIM_MSG_REF] + return [presc_msg_ref, dispn_msg_ref, claim_msg_ref] + + def return_claim_date(self, instance_number): + """ + Returns the claim date recorded for an instance + """ + instance = self._get_prescription_instance_data(instance_number) + claim_rcv_date = instance[fields.FIELD_CLAIM][fields.FIELD_CLAIM_RECEIVED_DATE] + return claim_rcv_date + + def check_real(self): + """ + Check that the prescription object is real (as opposed to an empty one created + by a pendingCancellation) + + If the prescriptionPresent flag is not there - act as if True + """ + try: + return self.prescription_record[fields.FIELD_PRESCRIPTION][ + fields.FIELD_PRESCRIPTION_PRESENT + ] + except KeyError: + return True + + def check_returned_record_is_real(self, returned_record): + """ + Check that the returned_record is real (as opposed to an empty one created + by a pending cancellation). Look for a valid prescription treatment type + """ + if returned_record[fields.FIELD_PRESCRIPTION][fields.FIELD_PRESCRIPTION_TREATMENT_TYPE]: + return True + + return False + + def _get_dispense_list_to_check(self, prescription_status): + """ + Consistency check fields + """ + if prescription_status == PrescriptionStatus.WITH_DISPENSER: + check_list = [fields.FIELD_DISPENSING_ORGANIZATION] + elif prescription_status == PrescriptionStatus.WITH_DISPENSER_ACTIVE: + check_list = [fields.FIELD_DISPENSING_ORGANIZATION, fields.FIELD_LAST_DISPENSE_DATE] + elif prescription_status in [PrescriptionStatus.DISPENSED, PrescriptionStatus.CLAIMED]: + check_list = [fields.FIELD_LAST_DISPENSE_DATE] + else: + check_list = [] + + return check_list + + def _get_instance_list_to_check(self, prescription_status): + """ + Consistency check fields + """ + if prescription_status == PrescriptionStatus.EXPIRED: + check_list = [fields.FIELD_COMPLETION_DATE, fields.FIELD_EXPIRY_DATE] + elif prescription_status in [ + PrescriptionStatus.CANCELLED, + PrescriptionStatus.NOT_DISPENSED, + ]: + check_list = [fields.FIELD_COMPLETION_DATE] + elif prescription_status in [ + PrescriptionStatus.AWAITING_RELEASE_READY, + PrescriptionStatus.REPEAT_DISPENSE_FUTURE_INSTANCE, + ]: + check_list = [ + fields.FIELD_DISPENSE_WINDOW_LOW_DATE, + fields.FIELD_NOMINATED_DOWNLOAD_DATE, + ] + else: + check_list = [] + + return check_list + + def _get_prescription_list_to_check(self, prescription_status): + """ + Consistency check fields + """ + if prescription_status in [ + PrescriptionStatus.AWAITING_RELEASE_READY, + PrescriptionStatus.REPEAT_DISPENSE_FUTURE_INSTANCE, + ]: + check_list = [fields.FIELD_PRESCRIPTION_TIME] + else: + check_list = [fields.FIELD_PRESCRIPTION_TREATMENT_TYPE, fields.FIELD_PRESCRIPTION_TIME] + + return check_list + + def _get_claim_list_to_check(self, prescription_status): + """ + Consistency check fields + """ + return ( + [fields.FIELD_CLAIM_RECEIVED_DATE] + if prescription_status == PrescriptionStatus.CLAIMED + else [] + ) + + def _get_nominate_list_to_check(self): + """ + Consistency check fields + """ + p_t_type = self.prescription_record[fields.FIELD_PRESCRIPTION][ + fields.FIELD_PRESCRIPTION_TREATMENT_TYPE + ] + return ( + [fields.FIELD_NOMINATED_PERFORMER] + if p_t_type == fields.TREATMENT_TYPE_REPEAT_DISPENSE + else [] + ) + + def check_record_consistency(self, context): + """ + Check each line item to ensure consistency with the prescription status for + this instance - the epsAdminUpdate can only impact a single instance + + *** Should be called targetInstance not currentInstance *** + + Check for the prescription status for that instance that required data exists + Check a nominatedPerformer is set for repeat prescriptions (although this may + not be required as a check due to DPR rules) + """ + + test_failures = [] + + instance_dict = self._get_prescription_instance_data(context.currentInstance) + + for line_item_dict in instance_dict[fields.FIELD_LINE_ITEMS]: + valid = self.validate_line_prescription_status( + instance_dict[fields.FIELD_PRESCRIPTION_STATUS], line_item_dict[fields.FIELD_STATUS] + ) + if not valid: + test_failures.append("lineItemStatus check for " + line_item_dict[fields.FIELD_ID]) + + prescription_status = instance_dict[fields.FIELD_PRESCRIPTION_STATUS] + + prescription = self.prescription_record[fields.FIELD_PRESCRIPTION] + prescription_list = self._get_prescription_list_to_check(prescription_status) + self.individual_consistency_checks(prescription_list, prescription, test_failures) + + instance_list = self._get_instance_list_to_check(prescription_status) + self.individual_consistency_checks(instance_list, instance_dict, test_failures) + + nomination = self.prescription_record[fields.FIELD_NOMINATION] + nominate_list = self._get_nominate_list_to_check() + self.individual_consistency_checks(nominate_list, nomination, test_failures, False) + + dispense_list = self._get_dispense_list_to_check(prescription_status) + self.individual_consistency_checks( + dispense_list, instance_dict[fields.FIELD_DISPENSE], test_failures + ) + + claim_list = self._get_claim_list_to_check(prescription_status) + self.individual_consistency_checks( + claim_list, instance_dict[fields.FIELD_CLAIM], test_failures + ) + + if not test_failures: + return [True, None] + + for failure_reason in test_failures: + self.log_object.write_log( + "EPS0073", + None, + { + "internalID": self.internal_id, + "failureReason": failure_reason, + }, + ) + + return [False, "Record consistency check failure"] + + def individual_consistency_checks( + self, list_of_checks, record_part, test_failures, fail_on_none=True + ): + """ + Loop through field names in a list to confirm there is a value on the record_part + for each field + """ + for req_field in list_of_checks: + if req_field not in record_part: + test_failures.append("Mandatory item " + req_field + " missing") + if not record_part[req_field]: + if fail_on_none: + test_failures.append("Mandatory item " + req_field + " set to None") + return + self.log_object.write_log( + "EPS0073b", None, {"internalID": self.internal_id, "mandatoryItem": req_field} + ) + + def determine_if_final_issue(self, issue_number): + """ + Check if the issue is the final one, this may be because the current issue is + already at max_repeats, or becuase subsequent issues are missing + """ + if issue_number == self.max_repeats: + return True + + for i in range(int(issue_number) + 1, int(self.max_repeats + 1)): + issue_data = self._get_prescription_instance_data(str(i), False) + if issue_data.get(fields.FIELD_PRESCRIPTION_STATUS): + return False + return True + + def return_next_activity_index(self, test_sites, nad_reference, context): + """ + Iterate through all prescription instances, determining the Next Activity and Date + for each, and then set the lowest to the record. + Ignore a next activity of delete for all but the last instance + In the case of a tie-break, set the priority based on user impact (making a + prescription instance 'ready' for download takes precedence over deleting or + expiring an instance) + """ + earliest_activity_date = "99991231" + delete_date = "99991231" + + earliest_activity = None + + for instance_key in self.prescription_record[fields.FIELD_INSTANCES]: + instance_dict = self._get_prescription_instance_data(instance_key, False) + if not instance_dict.get(fields.FIELD_PRESCRIPTION_STATUS): + continue + + issue = PrescriptionIssue(instance_dict) + nad_status = self.set_nad_status(test_sites, context, str(issue.number)) + [next_activity, next_activity_date, expiry_date] = ( + self.nad_generator.next_activity_date(nad_status, nad_reference) + ) + + if fields.FIELD_NEXT_ACTIVITY not in instance_dict: + instance_dict[fields.FIELD_NEXT_ACTIVITY] = {} + + instance_dict[fields.FIELD_NEXT_ACTIVITY][fields.FIELD_ACTIVITY] = next_activity + instance_dict[fields.FIELD_NEXT_ACTIVITY][fields.FIELD_DATE] = next_activity_date + + if isinstance(expiry_date, datetime.datetime): + expiry_date = expiry_date.strftime(TimeFormats.STANDARD_DATE_FORMAT) + + instance_dict[fields.FIELD_EXPIRY_DATE] = expiry_date + + issue_is_final = self.determine_if_final_issue(issue.number) + + if not self._include_next_activity_for_instance( + next_activity, + issue.number, + self.current_issue_number, + self.max_repeats, + issue_is_final, + ): + continue + + # treat deletion separately to next activities + if next_activity == fields.NEXTACTIVITY_DELETE: + delete_date = next_activity_date + continue + + # Note: string comparison of dates in YYYYMMDD format + if next_activity_date < earliest_activity_date: + earliest_activity_date = next_activity_date + earliest_activity = next_activity + + # Note: string comparison of dates in YYYYMMDD format + if next_activity_date <= earliest_activity_date: + for activity in fields.USER_IMPACTING_ACTIVITY: + if next_activity == activity or earliest_activity == activity: + earliest_activity = activity + break + + if earliest_activity: + return [earliest_activity, earliest_activity_date] + + return [fields.NEXTACTIVITY_DELETE, delete_date] + + def _include_next_activity_for_instance( + self, next_activity, issue_number, current_issue_number, max_repeats, issue_is_final=None + ): + """ + Check whether the next_activity should be included for the issue as a position + within the prescription repeat issues. + - The final issue (issue_number == max_repeats) supports everything + - The previous issue(s) (issue_number < currentInstance) support createNoClaim + - The current issue supports everything other than delete and purge + - Future issues support nothing + + Note: we shouldn't really need to pass in the current_issue_number and max_repeats + parameters as these are available from self. However, the unit tests are + currently written to expect these to be passed in. + + Also note that due to missing prescription issues from Spine1, we need to be extra + cautious and cannot just assume that later issues are present. + + :type next_activity: str + :type issue_number: int + :type current_issue_number: int + :type max_repeats: int + :rtype: bool + """ + + issue_is_current = issue_number == current_issue_number + if not issue_is_final: + issue_is_final = issue_number == max_repeats + issue_is_before_current = issue_number < current_issue_number + all_remaining_issues_missing = (issue_number < current_issue_number) and (issue_is_final) + + # default for future issue + permitted_activities = [] + + if (issue_is_current and issue_is_final) or all_remaining_issues_missing: + # final issue + permitted_activities = [ + fields.NEXTACTIVITY_EXPIRE, + fields.NEXTACTIVITY_CREATENOCLAIM, + fields.NEXTACTIVITY_READY, + fields.NEXTACTIVITY_DELETE, + fields.NEXTACTIVITY_PURGE, + ] + + elif issue_is_before_current: + # previous issue + permitted_activities = [fields.NEXTACTIVITY_CREATENOCLAIM] + + elif issue_is_current: + # current issue + permitted_activities = [ + fields.NEXTACTIVITY_EXPIRE, + fields.NEXTACTIVITY_READY, + fields.NEXTACTIVITY_CREATENOCLAIM, + ] + + return next_activity in permitted_activities + + def set_nad_status(self, test_prescribing_sites, context, instance_number_str): + """ + Create the status fields that are required for the Next Activity Index calculation + + *** Shortcut taken converting time to date for prescriptionTime - relies on + relationship between standardDate format and standardDateTimeFormat staying + consistent *** + """ + presc_details = self.prescription_record[fields.FIELD_PRESCRIPTION] + inst_details = self._get_prescription_instance_data(instance_number_str, False) + + nad_status = {} + nad_status[fields.FIELD_PRESCRIPTION_TREATMENT_TYPE] = presc_details[ + fields.FIELD_PRESCRIPTION_TREATMENT_TYPE + ] + nad_status[fields.FIELD_PRESCRIPTION_DATE] = presc_details[fields.FIELD_PRESCRIPTION_TIME][ + :8 + ] + nad_status[fields.FIELD_RELEASE_VERSION] = self._release_version + + if presc_details[fields.FIELD_PRESCRIBING_ORG] in test_prescribing_sites: + nad_status[fields.FIELD_PRESCRIBING_SITE_TEST_STATUS] = True + else: + nad_status[fields.FIELD_PRESCRIBING_SITE_TEST_STATUS] = False + + nad_status[fields.FIELD_DISPENSE_WINDOW_HIGH_DATE] = inst_details[ + fields.FIELD_DISPENSE_WINDOW_HIGH_DATE + ] + nad_status[fields.FIELD_DISPENSE_WINDOW_LOW_DATE] = inst_details[ + fields.FIELD_DISPENSE_WINDOW_LOW_DATE + ] + nad_status[fields.FIELD_NOMINATED_DOWNLOAD_DATE] = inst_details[ + fields.FIELD_NOMINATED_DOWNLOAD_DATE + ] + nad_status[fields.FIELD_LAST_DISPENSE_DATE] = inst_details[fields.FIELD_DISPENSE][ + fields.FIELD_LAST_DISPENSE_DATE + ] + nad_status[fields.FIELD_LAST_DISPENSE_NOTIFICATION_MSG_REF] = inst_details[ + fields.FIELD_DISPENSE + ][fields.FIELD_LAST_DISPENSE_NOTIFICATION_MSG_REF] + nad_status[fields.FIELD_COMPLETION_DATE] = inst_details[fields.FIELD_COMPLETION_DATE] + nad_status[fields.FIELD_CLAIM_SENT_DATE] = inst_details[fields.FIELD_CLAIM][ + fields.FIELD_CLAIM_RECEIVED_DATE + ] + nad_status[fields.FIELD_HANDLE_TIME] = context.handleTime + nad_status[fields.FIELD_PRESCRIPTION_STATUS] = self.return_prescription_status( + instance_number_str + ) + nad_status[fields.FIELD_INSTANCE_NUMBER] = instance_number_str + + return nad_status + + def roll_forward_instance(self): + """ + If the currentInstance is changed, it is first stored as a pending_instance_change + - so that the update can be applied at the end of the process + """ + if self.pending_instance_change is not None: + self.current_issue_number = int(self.pending_instance_change) + + def compare_line_items_for_dispense( + self, passed_line_items, valid_status_changes, instance_number + ): + """ + Compare the line items provided on a dispense message with the previous (stored) + state on the record to determine if this is a valid dispense notification for + each line items. + + passed_line_items will be a list of line_item dictionaries - with each line_item + having and: + fields.FIELD_ID - to match to an ID on the record + 'DN_ID' - a GUID for the dispense notification for that specific line item (this + will actually be ignored) + fields.FIELD_STATUS - A changed status following the dispense of which this is a + notification + fields.FIELD_MAX_REPEATS - to match the max_repeats of the original record + fields.FIELD_CURRENT_INSTANCE - to match the instanceNumber of the current record + + Note that as per SPII-6085, we should permit a Repeat Prescribe message without a + repeat number. + """ + treatment_type = self.prescription_record[fields.FIELD_PRESCRIPTION][ + fields.FIELD_PRESCRIPTION_TREATMENT_TYPE + ] + instance = self._get_prescription_instance_data(instance_number) + + stored_line_items = instance[fields.FIELD_LINE_ITEMS] + [stored_ids, passed_ids] = [set(), set()] + for line_item in stored_line_items: + stored_ids.add(str(line_item[fields.FIELD_ID])) + for line_item in passed_line_items: + passed_ids.add(str(line_item[fields.FIELD_ID])) + if stored_ids != passed_ids: + self.log_object.write_log( + "EPS0146", + None, + { + "internalID": self.internal_id, + "storedIDs": str(stored_ids), + "passedIDs": str(passed_ids), + }, + ) + # Re-raise this as SpineBusinessError with equivalent errorCode from ErrorBase1722. + raise EpsBusinessError(EpsErrorBase.ITEM_NOT_FOUND) + + for line_item in passed_line_items: + stored_line_item = self._return_matching_line_item(stored_line_items, line_item) + if not stored_line_item: + continue + + previous_status = stored_line_item[fields.FIELD_STATUS] + new_status = line_item[fields.FIELD_STATUS] + if [previous_status, new_status] not in valid_status_changes: + self.log_object.write_log( + "EPS0148", + None, + { + "internalID": self.internal_id, + "lineItemID": line_item[fields.FIELD_ID], + "previousStatus": previous_status, + "newStatus": new_status, + }, + ) + # Re-raise this as SpineBusinessError with equivalent errorCode from ErrorBase1722. + raise EpsBusinessError(EpsErrorBase.INVALID_LINE_STATE_TRANSITION) + + if treatment_type == fields.TREATMENT_TYPE_ACUTE: + continue + + if line_item[fields.FIELD_MAX_REPEATS] != stored_line_item[fields.FIELD_MAX_REPEATS]: + if treatment_type == fields.TREATMENT_TYPE_REPEAT_PRESCRIBE: + self.log_object.write_log( + "EPS0147b", + None, + { + "internalID": self.internal_id, + "providedRepeatCount": (line_item[fields.FIELD_MAX_REPEATS]), + "storedRepeatCount": str(stored_line_item[fields.FIELD_MAX_REPEATS]), + "lineItemID": line_item[fields.FIELD_ID], + }, + ) + continue + + # SPII-14044 - permit the max_repeats for line items to be equal to the + # prescription max_repeats as is normal when the line item expires sooner + # than the prescription. + if line_item.get(fields.FIELD_MAX_REPEATS) is None or self.max_repeats is None: + self.log_object.write_log( + "EPS0147d", + None, + { + "internalID": self.internal_id, + "providedRepeatCount": line_item.get(fields.FIELD_MAX_REPEATS), + "storedRepeatCount": ( + self.max_repeats + if self.max_repeats is None + else str(self.max_repeats) + ), + "lineItemID": line_item.get(fields.FIELD_ID), + }, + ) + # Re-raise this as SpineBusinessError with equivalent errorCode from ErrorBase1722. + raise EpsBusinessError(EpsErrorBase.MAX_REPEAT_MISMATCH) + + if int(line_item[fields.FIELD_MAX_REPEATS]) == int(self.max_repeats): + self.log_object.write_log( + "EPS0147c", + None, + { + "internalID": self.internal_id, + "providedRepeatCount": (line_item[fields.FIELD_MAX_REPEATS]), + "storedRepeatCount": str(stored_line_item[fields.FIELD_MAX_REPEATS]), + "lineItemID": line_item[fields.FIELD_ID], + }, + ) + continue + + self.log_object.write_log( + "EPS0147", + None, + { + "internalID": self.internal_id, + "providedRepeatCount": (line_item[fields.FIELD_MAX_REPEATS]), + "storedRepeatCount": str(stored_line_item[fields.FIELD_MAX_REPEATS]), + "lineItemID": line_item[fields.FIELD_ID], + }, + ) + # Re-raise this as SpineBusinessError with equivalent errorCode from ErrorBase1722. + raise EpsBusinessError(EpsErrorBase.MAX_REPEAT_MISMATCH) + + def _return_matching_line_item(self, stored_line_items, line_item): + """ + Match on line item ID + """ + for stored_line_item in stored_line_items: + if stored_line_item[fields.FIELD_ID] == line_item[fields.FIELD_ID]: + return stored_line_item + return None + + def return_details_for_release(self): + """ + Need to return the status and expiryDate of the current instance - which can then + be used in validity checks for release request messages + """ + current_issue = self.current_issue + details = [ + current_issue.status, + current_issue.expiry_date_str, + self.return_nominated_performer(), + ] + return details + + def return_details_for_dispense(self): + """ + For dispense messages the following details are required: + - Instance status + - NHS Number + - Dispensing Organisation + - Max repeats (if repeat type, otherwise return None) + """ + current_issue = self.current_issue + max_repeats = str( + self.prescription_record[fields.FIELD_PRESCRIPTION][fields.FIELD_MAX_REPEATS] + ) + details = [ + str(current_issue.number), + current_issue.status, + self._nhs_number, + current_issue.dispensing_organization, + max_repeats, + ] + return details + + def return_last_dispense_status(self, instance_number): + """ + Return the last_dispense_status for the requested instance + """ + instance = self._get_prescription_instance_data(instance_number) + last_dispense_status = instance[fields.FIELD_LAST_DISPENSE_STATUS] + return last_dispense_status + + def return_last_dispense_date(self, instance_number): + """ + Return the last_dispense_date for the requested instance + """ + instance = self._get_prescription_instance_data(instance_number) + last_dispense_date = instance[fields.FIELD_DISPENSE][fields.FIELD_LAST_DISPENSE_DATE] + return last_dispense_date + + def return_details_for_claim(self, instance_number_str): + """ + For claim messages the following details are required: + - Instance status + - NHS Number + - Dispensing Organisation + - Max repeats (if repeat type, otherwise return None) + """ + issue_number = int(instance_number_str) + issue = self.get_issue(issue_number) + max_repeats = str( + self.prescription_record[fields.FIELD_PRESCRIPTION][fields.FIELD_MAX_REPEATS] + ) + details = [ + issue.claim, + issue.status, + self._nhs_number, + issue.dispensing_organization, + max_repeats, + ] + return details + + def return_last_disp_msg_ref(self, instance_number_str): + """ + returns the last dispense Msg Ref for the issue + """ + issue_number = int(instance_number_str) + issue = self.get_issue(issue_number) + return issue.last_dispense_notification_msg_ref + + def return_details_for_dispense_proposal_return(self): + """ + For DPR changes currentInstance, instanceStatus and dispensing_org required + """ + dispensing_org = self._current_instance_data[fields.FIELD_DISPENSE][ + fields.FIELD_DISPENSING_ORGANIZATION + ] + return (self.current_issue_number, self._current_instance_status, dispensing_org) + + def update_for_release(self, context): + """ + Update a prescription to indicate valid release request: + prescription instance to be changed to with-dispenser + add dispense section onto the instance - with dispensingOrganization + update status of individual line items + """ + self.update_instance_status(self._current_instance_data, PrescriptionStatus.WITH_DISPENSER) + self._current_instance_data[fields.FIELD_DISPENSE][ + fields.FIELD_DISPENSING_ORGANIZATION + ] = context.agentOrganization + release_date = context.handleTime.strftime(TimeFormats.STANDARD_DATE_FORMAT) + self._current_instance_data[fields.FIELD_RELEASE_DATE] = release_date + + self.update_line_item_status( + self._current_instance_data, + LineItemStatus.TO_BE_DISPENSED, + LineItemStatus.WITH_DISPENSER, + ) + self.set_exemption_dates() + + def update_for_dispense( + self, + context, + days_supply, + nom_down_lead_days, + nom_download_date_enabled, + maintain_instance=False, + ): + """ + Update a prescription to indicate valid dispense notification: + prescription instance to be changed to reflect passed-in status + update status of individual line items to reflect passed-in status + + """ + if context.isAmendment: # noqa: SIM108 - More readable as is + instance = self._get_prescription_instance_data(context.targetInstance) + else: + instance = self._current_instance_data + + instance[fields.FIELD_DISPENSE][fields.FIELD_LAST_DISPENSE_DATE] = context.dispenseDate + instance[fields.FIELD_LAST_DISPENSE_STATUS] = context.prescriptionStatus + + if hasattr(context, "agentOrganization"): + if context.agentOrganization: + instance[fields.FIELD_DISPENSE][ + fields.FIELD_DISPENSING_ORGANIZATION + ] = context.agentOrganization + + if context.prescriptionStatus in PrescriptionStatus.COMPLETED_STATES: + instance[fields.FIELD_COMPLETION_DATE] = context.dispenseDate + self.set_next_instance_prior_issue_date(context) + self.release_next_instance( + context, days_supply, nom_down_lead_days, nom_download_date_enabled + ) + self.update_line_item_status_from_dispense(instance, context.lineItems) + + if maintain_instance: + return + + self.update_instance_status(instance, context.prescriptionStatus) + + def update_for_rebuild( + self, context, days_supply, nom_down_lead_days, dispense_dict, nom_download_date_enabled + ): + """ + Complete the actions required to update the prescription instance with the changes + made in the interaction worker + """ + + instance = self._get_prescription_instance_data(context.targetInstance) + instance[fields.FIELD_DISPENSE][fields.FIELD_LAST_DISPENSE_DATE] = dispense_dict[ + fields.FIELD_DISPENSE_DATE + ] + instance[fields.FIELD_LAST_DISPENSE_STATUS] = dispense_dict[ + fields.FIELD_PRESCRIPTION_STATUS + ] + if dispense_dict[fields.FIELD_PRESCRIPTION_STATUS] in PrescriptionStatus.COMPLETED_STATES: + instance[fields.FIELD_COMPLETION_DATE] = dispense_dict[fields.FIELD_DISPENSE_DATE] + self.set_next_instance_prior_issue_date(context, context.targetInstance) + self.release_next_instance( + context, + days_supply, + nom_down_lead_days, + nom_download_date_enabled, + context.targetInstance, + ) + self.update_line_item_status_from_dispense(instance, dispense_dict[fields.FIELD_LINE_ITEMS]) + self.update_instance_status(instance, dispense_dict[fields.FIELD_PRESCRIPTION_STATUS]) + + def update_for_claim(self, context, instance_number): + """ + Update a prescription to indicate valid dispense claim received: + prescription instance to be changed to reflect passed-in status + Do not update status of individual line items + Add Claim details to record + """ + instance = self._get_prescription_instance_data(instance_number) + self.update_instance_status(instance, PrescriptionStatus.CLAIMED) + instance[fields.FIELD_CLAIM][fields.FIELD_CLAIM_RECEIVED_DATE] = context.claimDate + instance[fields.FIELD_CLAIM][fields.FIELD_CLAIM_STATUS] = fields.FIELD_CLAIMED_DISPLAY_NAME + instance[fields.FIELD_CLAIM][fields.FIELD_CLAIM_REBUILD] = False + instance[fields.FIELD_CLAIM][fields.FIELD_CLAIM_GUID] = context.dispenseClaimID + + def update_for_claim_amend(self, context, instance_number): + """ + Modification of update_for_claim for use when the claim is an amendment. + - Do not change the claimReceivedDate from the original value + - Change claimRebuild to True + Update a prescription to indicate valid dispense claim received: + prescription instance to be changed to reflect passed-in status + Do not update status of individual line items + Append the existing claimGUID into the historicClaimGUID List + Add Claim details to record + """ + instance = self._get_prescription_instance_data(instance_number) + self.update_instance_status(instance, PrescriptionStatus.CLAIMED) + instance[fields.FIELD_CLAIM][fields.FIELD_CLAIM_RECEIVED_DATE] = context.claimDate + instance[fields.FIELD_CLAIM][fields.FIELD_CLAIM_STATUS] = fields.FIELD_CLAIMED_DISPLAY_NAME + instance[fields.FIELD_CLAIM][fields.FIELD_CLAIM_REBUILD] = True + if fields.FIELD_HISTORIC_CLAIMS not in instance[fields.FIELD_CLAIM]: + instance[fields.FIELD_CLAIM][fields.FIELD_HISTORIC_CLAIM_GUIDS] = [] + claim_guid = instance[fields.FIELD_CLAIM][fields.FIELD_CLAIM_GUID] + instance[fields.FIELD_CLAIM][fields.FIELD_HISTORIC_CLAIM_GUIDS].append(claim_guid) + instance[fields.FIELD_CLAIM][fields.FIELD_CLAIM_GUID] = context.dispenseClaimID + + def update_for_return(self, _, retain_nomination=False): + """ + If this is a nominated prescription then check that the nominated performer is in + the nomination history and clear the current value. + + The status then needs to be changed for the prescription and the line items + """ + + self.clear_dispensing_organisation(self._current_instance_data) + + self.update_instance_status(self._current_instance_data, PrescriptionStatus.TO_BE_DISPENSED) + self.update_line_item_status( + self._current_instance_data, + LineItemStatus.WITH_DISPENSER, + LineItemStatus.TO_BE_DISPENSED, + ) + if retain_nomination: + return + + nom_details = self.prescription_record[fields.FIELD_NOMINATION] + if nom_details[fields.FIELD_NOMINATED]: + if ( + nom_details[fields.FIELD_NOMINATED_PERFORMER] + not in nom_details[fields.FIELD_NOMINATION_HISTORY] + ): + nom_details[fields.FIELD_NOMINATION_HISTORY].append( + nom_details[fields.FIELD_NOMINATED_PERFORMER] + ) + nom_details[fields.FIELD_NOMINATED_PERFORMER] = None + + def clear_dispensing_organisation(self, instance): + """ + Clear the dispensing organisation from the instance + """ + instance[fields.FIELD_DISPENSE][fields.FIELD_DISPENSING_ORGANIZATION] = None + + def check_action_applicability(self, target_instance, action, context): + """ + The batch worker will always use 'Available' as the target reference, if this isn't + the target instance then the update has come from a test or admin system that needs + to take action on a specific instance, so skip the applicability test. + """ + + if target_instance != fields.BATCH_STATUS_AVAILABLE: + self.set_instance_to_action_update(target_instance, context, action) + else: + self.find_instances_to_action_update(context, action) + + def set_instance_to_action_update(self, target_instance, context, action): + """ + Set the instance to action update based on the value passed in the request + """ + context.instancesToUpdate = str(target_instance) + self.log_object.write_log( + "EPS0407b", + None, + { + "internalID": self.internal_id, + "passedAction": str(action), + "instancesToUpdate": str(target_instance), + }, + ) + + def find_instances_to_action_update(self, context, action): + """ + Check all available instances for any that match the activity and have passed the + next activity date. This date check is important, as all instances of a prescription + will have 'expire' as the NAD status to start with. + """ + issues_to_update = [] + rejected_list = [] + + activity_to_look_for = fields.ACTIVITY_LOOKUP[action] + handle_date = context.handleTime.strftime(TimeFormats.STANDARD_DATE_FORMAT) + + for issue in self.issues: + # Special case to reset the NextActivityDate for prescriptions that were migrated without a NAD + if (issue.status == PrescriptionStatus.AWAITING_RELEASE_READY) and ( + action == fields.ADMIN_ACTION_RESET_NAD + ): + issues_to_update.append(issue) + # Special case to allow the reset of the current instance + if action == fields.SPECIAL_RESET_CURRENT_INSTANCE: + issues_to_update.append(issue) + # break the loop once at least one issue has been identified. + if issues_to_update: + break + # Special case to return the dispense notification to Spine in the case that it is 'hung' + if action == fields.SPECIAL_DISPENSE_RESET: + self._confirm_dispense_reset_on_issue(issues_to_update, issue) + # Special case to apply cancellations to those that weren't set post migration - issue 110898 + if action == fields.SPECIAL_APPLY_PENDING_CANCELLATIONS: + self._confirm_cancellations_to_apply(issues_to_update, issue) + # break the loop once the first issue has been identified. + if issues_to_update: + break + # NOTE: SPII-10495 some migrated prescriptions don't have the 'activity' field + # populated, so guard against this to avoid killing process. + if issue.next_activity is not None: + # Note: string comparison of dates in YYYYMMDD format + action_is_due = issue.next_activity_date_str <= handle_date + + if (activity_to_look_for == issue.next_activity) and action_is_due: + issues_to_update.append(issue) + else: + rejection_ref = str(issue.number) + rejection_ref += "|" + issue.next_activity + rejection_ref += "|" + issue.next_activity_date_str + rejected_list.append(rejection_ref) + + if issues_to_update: + # Note: calling code currently expects issue numbers as strings + context.instancesToUpdate = [str(issue.number) for issue in issues_to_update] + self.log_object.write_log( + "EPS0407", + None, + { + "internalID": self.internal_id, + "passedAction": str(action), + "instancesToUpdate": context.instancesToUpdate, + }, + ) + else: + self.log_object.write_log( + "EPS0405", + None, + { + "internalID": self.internal_id, + "handleDate": handle_date, + "passedAction": activity_to_look_for, + "recordAction": str(rejected_list), + }, + ) + + def _confirm_cancellations_to_apply(self, issues_to_update, issue): + """ + Only apply pending cancellations to those issuse that are safe to cancel. It is + fine to reapply cancellations that have already been successful, and cancellation + takes precedence over expiry so no need to check the detailed status, only that + the prescription is in a cancellable state. + The cancellation worker will apply the cancellation to the first available issue and + all subsequent issues (due to constraints with active prescriptions, issue n+x must + be cancellable if issue n is cancellable). So only need to identify the first issue + """ + if issue.status in PrescriptionStatus.CANCELLABLE_STATES: + issues_to_update.append(issue) + + def _confirm_dispense_reset_on_issue(self, issues_to_update, issue): + """ + This code is to handle an exception that happened at go-live whereby some + prescriptions could not be read and need to be reset in bulk. The conditions for + reset are: + 1) The issue state is still 0002 - With Dispenser, i.e. it has not progressed to + with-dispenser active, dispensed or been returned, cancelled or expired. + 2) The prescription issue was downloaded on the 24th, 25th, 26th or 27th August 2014, + (this is the time that the issue was resolved in Live.) + The second check is required to protect against the scenario where the one issue + was downloaded within the target window, but this was successfully processed and + subsequently dispensed, releasing a new issue which may be status 0002, but will + not have a release date within the target window. + """ + # declared here as this whole method should be removed post clean-up + special_dispense_reset_dates = [ + "20140824", + "20140825", + "20140826", + "20140827", + "20140828", + "20140829", + "20140830", + "20140831", + "20140901", + "20140902", + "20140903", + "20140904", + "20140905", + "20140906", + "20140907", + "20140908", + ] + + if issue.status != PrescriptionStatus.WITH_DISPENSER: + return + + release_date = issue.release_date + if release_date and str(release_date) in special_dispense_reset_dates: + issues_to_update.append(issue) + + def update_by_action(self, context, nom_download_date_enabled=True): + """ + Update the record by performing the necessary logic to carry out the specified + action. + + These actions are responsible for maintaining consistent record state, so the + calling code does not need to do this. + + Deletion is applied to the whole record (all issues), but other actions will + apply to all issues in instancesToUpdate. Note that expiring an issue will + expire all future issues as well. + """ + action = context.action + + # prescription-wide actions + if action == fields.NEXTACTIVITY_DELETE: + self._update_delete(context) + else: + # instance-specific actions + if context.instancesToUpdate: + for issue_number in context.instancesToUpdate: + # make sure this is really an int, and not a str + issue_number_int = int(issue_number) + self.perform_instance_specific_updates( + issue_number_int, context, nom_download_date_enabled + ) + + def perform_instance_specific_updates( + self, target_issue_number, context, nom_download_date_enabled + ): + """ + Perform the actions that would be specific to an instance and could apply to more + than one instance. + Return after nominated download as only Expire and Create No Claim should add a + completion date and release the next instance + Release next instance and roll forward instance are both safe to re-apply as they + check first for the correct instance state (awaiting release ready). + + :type target_issue_number: int + :type context: ??? + """ + issue = self.get_issue(target_issue_number) + + # dispatch based on action + + if context.action == fields.ACTIVITY_NOMINATED_DOWNLOAD: + # make an issue available for download + self._update_make_available_for_nominated_download(issue) + + elif context.action == fields.SPECIAL_RESET_CURRENT_INSTANCE: + old_current_issue_number, new_current_issue_number = self.reset_current_instance() + if old_current_issue_number != new_current_issue_number: + self.log_object.write_log( + "EPS0401c", + None, + { + "internalID": self.internal_id, + "oldCurrentIssue": old_current_issue_number, + "newCurrentIssue": new_current_issue_number, + "prescriptionID": context.prescriptionID, + }, + ) + self.current_issue_number = new_current_issue_number + else: + context.updatesToApply = False + + elif context.action == fields.SPECIAL_DISPENSE_RESET: + # Special case to reset the dispense status. This needs to perform a dispense + # proposal return and then re-set the nominated performer + self.update_for_return(None, True) + + elif context.action == fields.SPECIAL_APPLY_PENDING_CANCELLATIONS: + # No action to be taken at this level, just pass. + pass + + elif context.action == fields.NEXTACTIVITY_EXPIRE: + # NOTE (SPII-10316): when requested to expire an issue, we must expire all + # subsequent issues as well, and set the current issue indicator to point at + # the last issue + issues_to_expire = self.get_issues_in_range(issue.number, None) + for issue_to_expire in issues_to_expire: + issue_to_expire.expire(context.handleTime, self) + + self.current_issue_number = self.max_repeats + + elif context.action == fields.NEXTACTIVITY_CREATENOCLAIM: + self._create_no_claim(issue, context.handleTime) + issue.mark_completed(context.handleTime, self) + self._move_to_next_issue_if_possible(issue.number, context, nom_download_date_enabled) + + elif context.action == fields.ADMIN_ACTION_RESET_NAD: + # Log that the prescription has been touched, but no change should be made + self.log_object.write_log( + "EPS0401b", + None, + {"internalID": self.internal_id, "prescriptionID": context.prescriptionID}, + ) + else: + # invalid action + self.log_object.write_log( + "EPS0401", + None, + { + "internalID": self.internal_id, + "action": str(context.action), + }, + ) + + def _move_to_next_issue_if_possible(self, issue_number, context, nom_download_date_enabled): + """ + Release the next issue, if possible, and mark it as the current issue + + :type issue_number: int + :type context : ??? + """ + # if this isn't the last issue... + if issue_number < self.max_repeats: + # Note: we know this is a Repeat Dispensing prescription, as it has multiple + # issues + context.prescriptionRepeatLow = context.targetInstance + self.release_next_instance( + context, + self.get_days_supply(), + fields.NOMINATED_DOWNLOAD_LEAD_DAYS, + nom_download_date_enabled, + str(issue_number), + ) + self.roll_forward_instance() + + def get_days_supply(self): + """ + Return the days supply from the prescription record, this will have been set to the + value passed in the original prescription, or the default 28 days + """ + days_supply = self.prescription_record[fields.FIELD_PRESCRIPTION][fields.FIELD_DAYS_SUPPLY] + # Habdle records that were migrated with null daysSupply rather than 0. + if not days_supply: + return 0 + if isinstance(days_supply, int): + return days_supply + # Habdle records that were migrated with blank space in the daysSupply rather than 0. + if not days_supply.strip(): + return 0 + return int(days_supply) + + def _create_no_claim(self, issue, handle_time): + """ + Update the prescription status to No Claimed. + + :type issue: PrescriptionIssue + :type handle_time: datetime.datetime + """ + issue.update_status(PrescriptionStatus.NO_CLAIMED, self) + + handle_time_str = handle_time.strftime(TimeFormats.STANDARD_DATE_FORMAT) + issue.claim.received_date_str = handle_time_str + self.log_attribute_change(fields.FIELD_CLAIM_RECEIVED_DATE, "", handle_time_str, None) + + self.log_object.write_log("EPS0406", None, {"internalID": self.internal_id}) + + def _update_make_available_for_nominated_download(self, issue): + """ + Update the prescription state to make it available for nominated download + + :type issue: PrescriptionIssue + """ + issue.update_status(PrescriptionStatus.TO_BE_DISPENSED, self) + + self.log_object.write_log("EPS0402", None, {"internalID": self.internal_id}) + + def _verify_record_deletion(self): + """ + Confirm that it is ok to delete the record by checking through the next activities + of each of the prescription issues, if not then log and return false + """ + for issue_key in self.prescription_record[fields.FIELD_INSTANCES]: + issue = self._get_prescription_instance_data(issue_key) + next_activity_for_issue = issue.get(fields.FIELD_NEXT_ACTIVITY, {}).get( + fields.FIELD_ACTIVITY + ) + if next_activity_for_issue == fields.NEXTACTIVITY_DELETE: + continue + + self.log_object.write_log( + "EPS0404b", + None, + { + "internalID": self.internal_id, + "prescriptionID": self.id, + "nextActivity": next_activity_for_issue, + "issue": issue_key, + }, + ) + return False + return True + + def _update_delete(self, context): + """ + Update the entire prescription to delete it + """ + if not self._verify_record_deletion(): + return + + doc_list = [] + if self.prescription_record.get(fields.FIELDS_DOCUMENTS) is not None: + for document in self.prescription_record[fields.FIELDS_DOCUMENTS]: + doc_list.append(document) + if doc_list: + context.documentsToDelete = doc_list + + context.recordToDelete = context.prescriptionID[:-1] + + context.updatesToApply = False + + self.log_object.write_log( + "EPS0404", + None, + { + "internalID": self.internal_id, + "recordRef": context.recordToDelete, + "documentRefs": context.documentsToDelete, + }, + ) + + def update_by_admin(self, context): + """ + Set values from admin message straight into record + Log each change + Changes are not validated - the whole record will be validated once the full lot + of amendments have been made + + If record is a prescription that has not yet been acted upon, there will be no + previous status + + Perform the prescription level changes + Determine the instance or range of instances to be updated + Reset the context.currentInstance as this is used later in the validation + Run the instance update(s) + """ + current_instance = context.currentInstance + + if context.handleOverdueExpiry: + self.handle_overdue_expiry(context) + # nominatedPerformer will be None in the removal scenario so check for nominatedPerformerType too + if context.nominatedPerformerType or context.nominatedPerformer: + self.update_nominated_performer(context) + + [range_flag, start_instance, end_instance] = self.instances_to_update(current_instance) + context.currentInstance = self.return_current_instance() + + # find out which issues need updating + lowest = int(start_instance) + highest = int(end_instance) if range_flag else lowest + issue_numbers_to_update = self.get_issue_numbers_in_range(lowest, highest) + + # update the issues + for issue_number in issue_numbers_to_update: + self._make_admin_instance_updates(context, issue_number) + + return [True, None, None] + + def is_expiry_overdue(self): + """ + Check the expected Expiry date on the record, if in the past return True + """ + nad = self.return_next_activity_nad_bin() + return self._is_expiry_overdue(nad) + + def is_next_activity_purge(self): + """ + Check if records next activity is purge + """ + next_activity = self.return_next_activity_nad_bin() + if next_activity: + if next_activity[0].startswith(fields.NEXTACTIVITY_PURGE): + return True + return False + + @staticmethod + def _is_expiry_overdue(nad): + """ + return True if Expiry is overdue or index isn't set + """ + if not nad: + return False + if nad[0] is None: # badly behaved prescriptions from pre-golive + return False + if not nad[0][:6] == fields.NEXTACTIVITY_EXPIRE: + return False + if nad[0][7:15] >= datetime.datetime.now().strftime(TimeFormats.STANDARD_DATE_FORMAT): + return False + return True + + def handle_overdue_expiry(self, context): + """ + Check the expected Expiry date on the record, if in the past, expire the line + and prescription. + """ + nad = context.epsRecord.return_next_activity_nad_bin() + if not self._is_expiry_overdue(nad): + return + + self.log_object.write_log("EPS0335", None, {"internalID": self.internal_id}) + context.overdueExpiry = True + + # Only set the status to Expired if not already part of the admin update + if ( + not context.prescriptionStatus + or context.prescriptionStatus not in PrescriptionStatus.EXPIRY_IMMUTABLE_STATES + ): + context.prescriptionStatus = PrescriptionStatus.EXPIRED + + # Set the completion date if not already part of the admin update + if not context.completionDate: + context.completionDate = datetime.datetime.now().strftime( + TimeFormats.STANDARD_DATE_FORMAT + ) + + # Create a LineDict if one does not already exist and ensure that all LineItems are included + if not context.lineDict: + context.lineDict = {} + for line_item in context.epsRecord.current_issue.line_items: + if line_item.id in context.lineDict: + continue + context.lineDict[line_item.id] = LineItemStatus.EXPIRED + + def instances_to_update(self, target_instance): + """ + Check the target_instance value passed in the admin update request and set a + range or single instance target accordingly. + + The target_instance will be provided as either a integer or 'All', 'Available' + or 'Current', where the behaviour is: + All = all instances, including any past (complete) instances + Available = current through to final instance, not including any past instances + Current = the recorded current instance only, not a range + + Otherwise, the target_instance passed is an integer identifying the target + instance. + """ + recorded_current_instance = self.return_current_instance() + recorded_max_instance = str(self.max_repeats) + + instance_range = False + end_instance = None + + if target_instance == fields.BATCH_STATUS_ALL: + instance_range = True + start_instance = "1" + end_instance = recorded_max_instance + elif target_instance == fields.BATCH_STATUS_AVAILABLE: + instance_range = True + start_instance = recorded_current_instance + end_instance = recorded_max_instance + elif target_instance == fields.BATCH_STATUS_CURRENT: + start_instance = recorded_current_instance + else: + start_instance = target_instance + + if instance_range: + self.log_object.write_log( + "EPS0297a", + None, + dict( + { + "internalID": self.internal_id, + "startInstance": start_instance, + "endInstance": end_instance, + } + ), + ) + else: + self.log_object.write_log( + "EPS0297b", + None, + dict({"internalID": self.internal_id, "startInstance": start_instance}), + ) + + return [instance_range, start_instance, end_instance] + + def make_withdrawal_updates(self, context): + """ + Apply instance specific updates into record + """ + + target_instance = context.targetInstance + prescription = self.prescription_record + instance = prescription[fields.FIELD_INSTANCES][target_instance] + instance[fields.FIELD_DISPENSE] = context.dispenseElement + instance[fields.FIELD_LINE_ITEMS] = context.lineItems + instance[fields.FIELD_PREVIOUS_STATUS] = instance[fields.FIELD_PRESCRIPTION_STATUS] + instance[fields.FIELD_PRESCRIPTION_STATUS] = context.prescriptionStatus + instance[fields.FIELD_LAST_DISPENSE_STATUS] = context.lastDispenseStatus + instance[fields.FIELD_COMPLETION_DATE] = context.completionDate + + def _make_admin_instance_updates(self, context, instance_number): + """ + Apply instance specific updates into record + """ + + current_instance = str(instance_number) + context.updateInstance = instance_number + prescription = self.prescription_record + instance = prescription[fields.FIELD_INSTANCES][current_instance] + dispense = instance[fields.FIELD_DISPENSE] + claim = instance[fields.FIELD_CLAIM] + + if context.prescriptionStatus: + self.log_attribute_change( + fields.FIELD_PRESCRIPTION_STATUS, + instance[fields.FIELD_PRESCRIPTION_STATUS], + context.prescriptionStatus, + context.fieldsToUpdate, + ) + instance[fields.FIELD_PREVIOUS_STATUS] = instance[fields.FIELD_PRESCRIPTION_STATUS] + instance[fields.FIELD_PRESCRIPTION_STATUS] = context.prescriptionStatus + + if context.completionDate: + self.log_attribute_change( + fields.FIELD_COMPLETION_DATE, + instance[fields.FIELD_COMPLETION_DATE], + context.completionDate, + context.fieldsToUpdate, + ) + instance[fields.FIELD_COMPLETION_DATE] = context.completionDate + + if context.dispenseWindowLowDate: + self.log_attribute_change( + fields.FIELD_DISPENSE_WINDOW_LOW_DATE, + instance[fields.FIELD_DISPENSE_WINDOW_LOW_DATE], + context.dispenseWindowLowDate, + context.fieldsToUpdate, + ) + instance[fields.FIELD_DISPENSE_WINDOW_LOW_DATE] = context.dispenseWindowLowDate + + if context.nominatedDownloadDate: + self.log_attribute_change( + fields.FIELD_NOMINATED_DOWNLOAD_DATE, + instance[fields.FIELD_NOMINATED_DOWNLOAD_DATE], + context.nominatedDownloadDate, + context.fieldsToUpdate, + ) + instance[fields.FIELD_NOMINATED_DOWNLOAD_DATE] = context.nominatedDownloadDate + + if context.releaseDate: + self.log_attribute_change( + fields.FIELD_RELEASE_DATE, + instance[fields.FIELD_RELEASE_DATE], + context.releaseDate, + context.fieldsToUpdate, + ) + instance[fields.FIELD_RELEASE_DATE] = context.releaseDate + + if context.dispensingOrganization: + self.log_attribute_change( + fields.FIELD_DISPENSING_ORGANIZATION, + dispense[fields.FIELD_DISPENSING_ORGANIZATION], + context.dispensingOrganization, + context.fieldsToUpdate, + ) + dispense[fields.FIELD_DISPENSING_ORGANIZATION] = context.dispensingOrganization + + # This is to reset the dispensing org + if context.dispensingOrgNullFlavor: + self.log_attribute_change( + fields.FIELD_DISPENSING_ORGANIZATION, + dispense[fields.FIELD_DISPENSING_ORGANIZATION], + "None", + context.fieldsToUpdate, + ) + dispense[fields.FIELD_DISPENSING_ORGANIZATION] = None + + if context.lastDispenseDate: + self.log_attribute_change( + fields.FIELD_LAST_DISPENSE_DATE, + dispense[fields.FIELD_LAST_DISPENSE_DATE], + context.lastDispenseDate, + context.fieldsToUpdate, + ) + dispense[fields.FIELD_LAST_DISPENSE_DATE] = context.lastDispenseDate + + if context.claimSentDate: + self.log_attribute_change( + fields.FIELD_CLAIM_SENT_DATE, + claim[fields.FIELD_CLAIM_RECEIVED_DATE], + context.claimSentDate, + context.fieldsToUpdate, + ) + claim[fields.FIELD_CLAIM_RECEIVED_DATE] = context.claimSentDate + + for line_item_id in context.lineDict: + for current_line_item in instance[fields.FIELD_LINE_ITEMS]: + if current_line_item[fields.FIELD_ID] != line_item_id: + continue + current_line_status = current_line_item[fields.FIELD_STATUS] + if context.overdueExpiry: + if current_line_status in LineItemStatus.EXPIRY_IMMUTABLE_STATES: + continue + changed_line_status = LineItemStatus.EXPIRED + else: + changed_line_status = context.lineDict[line_item_id] + self.log_object.write_log( + "EPS0072", + None, + { + "internalID": self.internal_id, + "prescriptionID": context.prescriptionID, + "lineItemChanged": line_item_id, + "previousStatus": current_line_status, + "newStatus": changed_line_status, + }, + ) + current_line_item[fields.FIELD_STATUS] = changed_line_status + + def log_attribute_change(self, item_changed, previous_value, new_value, fields_to_update): + """ + Used by the update record function to change an existing attribute on the record + Both old and new values as well as the field name are logged + """ + if fields_to_update is not None: + fields_to_update.append(item_changed) + + self.log_object.write_log( + "EPS0071", + None, + { + "internalID": self.internal_id, + "itemChanged": item_changed, + "previousValue": previous_value, + "newValue": new_value, + }, + ) + + def _extract_dispense_date_from_context(self, context): + """ + Get the Dispense date from context, or use handleTime if not available. + + :type context: ??? + :rtype: str + """ + dispense_date = context.handleTime.strftime(TimeFormats.STANDARD_DATE_FORMAT) + if hasattr(context, fields.FIELD_DISPENSE_DATE): + if context.dispenseDate is not None: + dispense_date = context.dispenseDate + return dispense_date + + def _extract_dispense_datetime_from_context(self, context): + """ + Get the Dispense datetime from context, or use handleTime if not available. + + :type context: ??? + :rtype: str + """ + dispense_time = context.handleTime.strftime(TimeFormats.STANDARD_DATE_TIME_FORMAT) + if hasattr(context, fields.FIELD_DISPENSE_TIME): + if context.dispenseTime is not None: + dispense_time = context.dispenseTime + return dispense_time + + def _calculate_nominated_download_date( + self, prescribe_date, days_supply, lead_days, next_issue_number + ): + """ + Calculate the date for nominated download, taking into account lead time and supply length. + + :type prescribe_date: str + :type days_supply: int + :type lead_days: int + :rtype: datetime.datetime + :type next_issue_number: str + """ + nominated_download_date = datetime.datetime.strptime( + prescribe_date, TimeFormats.STANDARD_DATE_FORMAT + ) + duration = days_supply * (int(next_issue_number) - 1) + nominated_download_date += relativedelta(days=+duration) + nominated_download_date += relativedelta(days=-lead_days) + return nominated_download_date + + def _calculate_nominated_download_date_old(self, dispense_date, days_supply, lead_days): + """ + Calculate the date for nominated download, taking into account lead time and supply length. + + :type dispense_date: str + :type days_supply: int + :type lead_days: int + :rtype: datetime.datetime + """ + nominated_download_date = datetime.datetime.strptime( + dispense_date, TimeFormats.STANDARD_DATE_FORMAT + ) + nominated_download_date += relativedelta(days=+days_supply) + nominated_download_date += relativedelta(days=-lead_days) + return nominated_download_date + + def return_next_issue_number(self, issue_number=None): + """ + Wrapper for _find_next_future_issue_number, allows an optional start issue to be passed in + otherwise will use the current issue number + """ + if not issue_number: + issue_number = self.current_issue_number + + return self._find_next_future_issue_number(str(issue_number)) + + def _find_next_future_issue_number(self, issue_number_str, skip_check_for_correct_status=False): + """ + Find the next issue number after the specified one, if valid. + + :type issue_number_str: str or ??? + :rtype: str or None + """ + if not issue_number_str: + return None + + next_issue_number = int(issue_number_str) + 1 + + # make sure the prescription actually has this issue + if next_issue_number not in self.issue_numbers: + return None + + if skip_check_for_correct_status: + return str(next_issue_number) + + # examine the issue to make sure it's in the correct state + next_issue = self.get_issue(next_issue_number) + if not next_issue.status == PrescriptionStatus.REPEAT_DISPENSE_FUTURE_INSTANCE: + return None + + # if we get this far, then we have a valid next issue, so return its number + # Note: calling code is currently expecting a str, so convert,until we've had + # a chance to refactor properly + return str(next_issue_number) + + def set_next_instance_prior_issue_date(self, context, current_issue_number_str=None): + """ + Set the prior issue date for the next instance, this is done as part of the + dispense notification process, but may form part of a standard dispense, a + dispense amendment or a rebuild dispense history. + """ + if not current_issue_number_str: + current_issue_number_str = context.prescriptionRepeatLow + + # find the number of the next issue, if there is a valid one. Don't check for + # valid status of the next instance as this could be a rebuild or amendment + # and the next issue may already be active. + next_issue_number_str = self._find_next_future_issue_number( + current_issue_number_str, skip_check_for_correct_status=True + ) + if next_issue_number_str: + instance = self._get_prescription_instance_data(next_issue_number_str) + instance[fields.FIELD_PREVIOUS_ISSUE_DATE] = ( + self._extract_dispense_datetime_from_context(context) + ) + + def release_next_instance( + self, + context, + days_supply, + nom_down_lead_days, + nom_download_date_enabled, + current_issue_number_str=None, + ): + """ + If not a repeat prescription (and no prescriptionRepeatLow provided), + no future issue to release. Otherwise, use the prescriptionRepeatLow to + determine the next issue - if it is there then change the status of that + issue to awaiting-release-ready, and set the dispenseWindowLowDate + + Note that it is possible that this will be invoked as part of an amendment. + """ + if not current_issue_number_str: + current_issue_number_str = context.prescriptionRepeatLow + + # find the number of the next issue, if there is a valid one + next_issue_number_str = self._find_next_future_issue_number(current_issue_number_str) + if next_issue_number_str is None: + # give up if there is no next issue + self.pendingInstanceChange = None + return + + # update the issue + dispense_date = self._extract_dispense_date_from_context(context) + prescribe_date = context.epsRecord.return_prescription_time() + if nom_download_date_enabled: + if prescribe_date is None: + self.log_object.write_log( + "EPS0676", + None, + dict( + {"internalID": self.internal_id, "prescriptionID": context.prescriptionID} + ), + ) + nominated_download_date = self._calculate_nominated_download_date( + prescribe_date[:8], days_supply, nom_down_lead_days, next_issue_number_str + ) + self.log_object.write_log( + "EPS0675", + None, + dict( + { + "internalID": self.internal_id, + "prescriptionID": context.prescriptionID, + "nominatedDownloadDate": nominated_download_date.strftime( + TimeFormats.STANDARD_DATE_FORMAT + ), + "prescribeDate": prescribe_date, + "daysSupply": str(days_supply), + "leadDays": str(nom_down_lead_days), + "issueNumber": next_issue_number_str, + } + ), + ) + else: + nominated_download_date = self._calculate_nominated_download_date_old( + dispense_date, days_supply, nom_down_lead_days + ) + + if nominated_download_date >= datetime.datetime( + context.handleTime.year, context.handleTime.month, context.handleTime.day + ): + new_prescription_status = PrescriptionStatus.AWAITING_RELEASE_READY + else: + new_prescription_status = PrescriptionStatus.TO_BE_DISPENSED + + instance = self._get_prescription_instance_data(next_issue_number_str) + instance[fields.FIELD_PREVIOUS_STATUS] = instance[fields.FIELD_PRESCRIPTION_STATUS] + instance[fields.FIELD_PRESCRIPTION_STATUS] = new_prescription_status + instance[fields.FIELD_DISPENSE_WINDOW_LOW_DATE] = dispense_date + instance[fields.FIELD_NOMINATED_DOWNLOAD_DATE] = nominated_download_date.strftime( + TimeFormats.STANDARD_DATE_FORMAT + ) + + # mark so that we know to update the prescription's current issue number + self.pendingInstanceChange = next_issue_number_str + + def add_release_document_ref(self, rel_req_document_ref): + """ + Add the reference to the release request document to the instance. + """ + self._current_instance_data[fields.FIELD_RELEASE_REQUEST_MGS_REF] = rel_req_document_ref + + def add_release_dispenser_details(self, rel_dispenser_details): + """ + Add the dispenser details from the release request document to the instance. + """ + self._current_instance_data[fields.FIELD_RELEASE_DISPENSER_DETAILS] = rel_dispenser_details + + def add_dispense_document_ref(self, dn_document_ref, target_instance=None): + """ + Add the reference to the dispense notification document to the instance. + """ + instance = ( + self._get_prescription_instance_data(target_instance) + if target_instance + else self._current_instance_data + ) + instance[fields.FIELD_DISPENSE][ + fields.FIELD_LAST_DISPENSE_NOTIFICATION_MSG_REF + ] = dn_document_ref + + def check_status_complete(self, prescription_status): + """ + Check if the passed prescription status is in a complete state and return the + appropriate boolean + """ + return prescription_status in PrescriptionStatus.COMPLETED_STATES + + def clear_dispense_notifications_from_history(self, target_instance): + """ + Clear all but the release from the dispense history + """ + + instance = self._get_prescription_instance_data(target_instance) + new_dispense_history = {} + if fields.FIELD_RELEASE in instance[fields.FIELD_DISPENSE_HISTORY]: + release_snippet = copy(instance[fields.FIELD_DISPENSE_HISTORY][fields.FIELD_RELEASE]) + new_dispense_history[fields.FIELD_RELEASE] = release_snippet + instance[fields.FIELD_DISPENSE_HISTORY] = copy(new_dispense_history) + + def create_dispense_history_entry(self, dn_document_guid, target_instance=None): + """ + Create a dispense history entry to be used in future if the dispense notification + is withdrawn. Also need to include the current prescription status + + Use the copy function to take a copy of it as it is prior to the changes + otherwise a link is created and the data will be added at the post-update state. + + Use the last dispense date from the record unless the last dispense time is passed + in (used for release only). + """ + instance = ( + self._get_prescription_instance_data(target_instance) + if target_instance + else self._current_instance_data + ) + instance[fields.FIELD_DISPENSE_HISTORY][dn_document_guid] = {} + dispense_entry = instance[fields.FIELD_DISPENSE_HISTORY][dn_document_guid] + dispense_entry[fields.FIELD_DISPENSE] = copy(instance[fields.FIELD_DISPENSE]) + dispense_entry[fields.FIELD_PRESCRIPTION_STATUS] = copy( + instance[fields.FIELD_PRESCRIPTION_STATUS] + ) + dispense_entry[fields.FIELD_LAST_DISPENSE_STATUS] = copy( + instance[fields.FIELD_LAST_DISPENSE_STATUS] + ) + line_items = [] + for line_item in instance[fields.FIELD_LINE_ITEMS]: + line_item_copy = copy(line_item) + line_items.append(line_item_copy) + dispense_entry[fields.FIELD_LINE_ITEMS] = copy(line_items) + dispense_entry[fields.FIELD_COMPLETION_DATE] = copy(instance[fields.FIELD_COMPLETION_DATE]) + + instance_last_dispense = copy( + instance[fields.FIELD_DISPENSE][fields.FIELD_LAST_DISPENSE_DATE] + ) + if not instance_last_dispense: + release_date = copy(instance[fields.FIELD_RELEASE_DATE]) + dispense_entry[fields.FIELD_DISPENSE][fields.FIELD_LAST_DISPENSE_DATE] = release_date + else: + dispense_entry[fields.FIELD_DISPENSE][ + fields.FIELD_LAST_DISPENSE_DATE + ] = instance_last_dispense + + def create_release_history_entry(self, release_time, dispensing_org): + """ + Create a dispense history entry specific to the release action + + Use the copy function to take a copy of it as it is prior to the changes + otherwise a link is created and the data will be added at the post-update state. + + Set the line item status to 0008 as any withdrawal can only return the + prescription back to 'with dispenser' state. + + Use the release date as the last dispense date to support next activity + calculation if the dispense history is withdrawn. + """ + + instance = self._current_instance_data + + instance[fields.FIELD_DISPENSE_HISTORY][fields.FIELD_RELEASE] = {} + dispense_entry = instance[fields.FIELD_DISPENSE_HISTORY][fields.FIELD_RELEASE] + dispense_entry[fields.FIELD_DISPENSE] = copy(instance[fields.FIELD_DISPENSE]) + dispense_entry[fields.FIELD_PRESCRIPTION_STATUS] = copy( + instance[fields.FIELD_PRESCRIPTION_STATUS] + ) + dispense_entry[fields.FIELD_LAST_DISPENSE_STATUS] = copy( + instance[fields.FIELD_LAST_DISPENSE_STATUS] + ) + line_items = [] + for line_item in instance[fields.FIELD_LINE_ITEMS]: + line_item_copy = copy(line_item) + if ( + line_item_copy[fields.FIELD_STATUS] != LineItemStatus.CANCELLED + and line_item_copy[fields.FIELD_STATUS] != LineItemStatus.EXPIRED + ): + line_item_copy[fields.FIELD_STATUS] = LineItemStatus.WITH_DISPENSER + line_items.append(line_item_copy) + dispense_entry[fields.FIELD_LINE_ITEMS] = line_items + dispense_entry[fields.FIELD_COMPLETION_DATE] = copy(instance[fields.FIELD_COMPLETION_DATE]) + release_time_str = release_time.strftime(TimeFormats.STANDARD_DATE_FORMAT) + dispense_entry[fields.FIELD_DISPENSE][fields.FIELD_LAST_DISPENSE_DATE] = release_time_str + dispense_entry[fields.FIELD_DISPENSE][fields.FIELD_DISPENSING_ORGANIZATION] = dispensing_org + + def add_dispense_document_guid(self, dn_document_guid, target_instance=None): + """ + Add the reference to the dispense notification document to the instance. + """ + instance = ( + self._get_prescription_instance_data(target_instance) + if target_instance + else self._current_instance_data + ) + instance[fields.FIELD_DISPENSE][ + fields.FIELD_LAST_DISPENSE_NOTIFICATION_GUID + ] = dn_document_guid + + def add_claim_document_ref(self, dn_claim_ref, instance_number): + """ + Add the reference to the dispense claim document to the instance. + """ + instance = self._get_prescription_instance_data(instance_number) + instance[fields.FIELD_CLAIM][fields.FIELD_DISPENSE_CLAIM_MSG_REF] = dn_claim_ref + + def return_completion_date(self, instance_number): + """ + Return the completion date for the requested instance + """ + instance = self._get_prescription_instance_data(instance_number) + return instance[fields.FIELD_COMPLETION_DATE] + + def add_claim_amend_document_ref(self, dn_claim_ref, instance_number): + """ + Add the old claim reference to the dispense claim MsgRef history and add the new + document to the instance. + """ + instance = self._get_prescription_instance_data(instance_number) + + if not instance[fields.FIELD_CLAIM][fields.FIELD_HISTORIC_DISPENSE_CLAIM_MSG_REF]: + instance[fields.FIELD_CLAIM][fields.FIELD_HISTORIC_DISPENSE_CLAIM_MSG_REF] = [] + + historic_claim_msg_ref = instance[fields.FIELD_CLAIM][fields.FIELD_DISPENSE_CLAIM_MSG_REF] + + instance[fields.FIELD_CLAIM][fields.FIELD_HISTORIC_DISPENSE_CLAIM_MSG_REF].append( + historic_claim_msg_ref + ) + instance[fields.FIELD_CLAIM][fields.FIELD_DISPENSE_CLAIM_MSG_REF] = dn_claim_ref + + def update_instance_status(self, instance, new_status): + """ + Method for updating the status of the current instance + """ + if fields.FIELD_PRESCRIPTION_STATUS in instance: + instance[fields.FIELD_PREVIOUS_STATUS] = instance[fields.FIELD_PRESCRIPTION_STATUS] + else: + instance[fields.FIELD_PREVIOUS_STATUS] = False + instance[fields.FIELD_PRESCRIPTION_STATUS] = new_status + + def update_line_item_status(self, issue_dict, status_to_check, new_status): + """ + Roll through the line items checking for those who have current status of + status_to_check, then update to new_status and change the previous status. + Note that this is safe for cancelled and expired line items as it will only update + if the 'status_to_check' matches. + + :type issue_dict: dict + :type status_to_check: str + :type new_status: str + """ + issue = PrescriptionIssue(issue_dict) + for line_item in issue.line_items: + if line_item.status == status_to_check: + line_item.update_status(new_status) + + def update_line_item_status_from_dispense(self, instance, dn_line_items): + """ + Roll through the line itesm on the dispense notification, and update the + prescription record line items to the revised previousStatus and status + """ + for dn_line_item in dn_line_items: + for line_item in instance[fields.FIELD_LINE_ITEMS]: + if line_item[fields.FIELD_ID] == dn_line_item[fields.FIELD_ID]: + line_item[fields.FIELD_PREVIOUS_STATUS] = line_item[fields.FIELD_STATUS] + line_item[fields.FIELD_STATUS] = dn_line_item[fields.FIELD_STATUS] + + def set_exemption_dates(self): + """ + Set the exemption dates + """ + patient_details = self.prescription_record[fields.FIELD_PATIENT] + birth_time = patient_details[fields.FIELD_BIRTH_TIME] + + lower_age_limit = datetime.datetime.strptime(birth_time, TimeFormats.STANDARD_DATE_FORMAT) + lower_age_limit += relativedelta(years=fields._YOUNG_AGE_EXEMPTION, days=-1) + lower_age_limit = lower_age_limit.isoformat()[0:10].replace("-", "") + higher_age_limit = datetime.datetime.strptime(birth_time, TimeFormats.STANDARD_DATE_FORMAT) + higher_age_limit += relativedelta(years=fields._OLD_AGE_EXEMPTION) + higher_age_limit = higher_age_limit.isoformat()[0:10].replace("-", "") + patient_details[fields.FIELD_LOWER_AGE_LIMIT] = lower_age_limit + patient_details[fields.FIELD_HIGHER_AGE_LIMIT] = higher_age_limit + + def return_message_ref(self, doc_type): + """ + Return message references for different document types + """ + if doc_type == "Prescription": + return self.prescription_record[fields.FIELD_PRESCRIPTION][ + fields.FIELD_PRESCRIPTION_MSG_REF + ] + if doc_type == "ReleaseRequest": + return self._current_instance_data[fields.FIELD_RELEASE_REQUEST_MGS_REF] + else: + raise EpsSystemError("developmentFailure") + + def return_release_dispenser_details(self, target_instance): + """ + Return release dispenser details of the target instance + """ + instance = self._get_prescription_instance_data(target_instance) + return instance.get(fields.FIELD_RELEASE_DISPENSER_DETAILS) + + def fetch_release_response_parameters(self): + """ + A dictionary of response parameters is required for generating the response + message to the release request - these are parameters which will be used to + translate and update the original prescription message + """ + release_data = {} + patient_details = self.prescription_record[fields.FIELD_PATIENT] + presc_details = self.prescription_record[fields.FIELD_PRESCRIPTION] + + release_data[fields.FIELD_LOWER_AGE_LIMIT] = quoted( + patient_details[fields.FIELD_LOWER_AGE_LIMIT] + ) + release_data[fields.FIELD_HIGHER_AGE_LIMIT] = quoted( + patient_details[fields.FIELD_HIGHER_AGE_LIMIT] + ) + + if self._current_instance_data.get(fields.FIELD_PREVIOUS_ISSUE_DATE): + # SPII-10490 - handle this date not being present + previous_issue_data = quoted( + self._current_instance_data[fields.FIELD_PREVIOUS_ISSUE_DATE] + ) + release_data[fields.FIELD_PREVIOUS_ISSUE_DATE] = previous_issue_data + + # !!! This is for backwards compatibility - does not make sense, should really be + # the current status. However Spine 1 returns previous status !!! + # Note that we also have to remap the prescription status here if this is a GUID + # release for a '0000' (internal only) prescription status. + previous_presc_status = self._current_instance_data[fields.FIELD_PREVIOUS_STATUS] + if previous_presc_status == PrescriptionStatus.AWAITING_RELEASE_READY: + previous_presc_status = PrescriptionStatus.TO_BE_DISPENSED + + release_data[fields.FIELD_PRESCRIPTION_STATUS] = quoted(previous_presc_status) + + display_name = PrescriptionStatus.PRESCRIPTION_DISPLAY_LOOKUP[previous_presc_status] + release_data[fields.FIELD_PRESCRIPTION_STATUS_DISPLAY_NAME] = quoted(display_name) + release_data[fields.FIELD_PRESCRIPTION_CURRENT_INSTANCE] = quoted( + str(self.current_issue_number) + ) + release_data[fields.FIELD_PRESCRIPTION_MAX_REPEATS] = quoted( + presc_details[fields.FIELD_MAX_REPEATS] + ) + + for line_item in self.current_issue.line_items: + line_item_ref = "lineItem" + str(line_item.order) + item_status = ( + line_item.previousStatus + if line_item.status == LineItemStatus.WITH_DISPENSER + else line_item.status + ) + + release_data[line_item_ref + "Status"] = quoted(item_status) + item_display_name = LineItemStatus.ITEM_DISPLAY_LOOKUP[item_status] + release_data[line_item_ref + "StatusDisplayName"] = quoted(item_display_name) + + self.add_line_item_repeat_data(release_data, line_item_ref, line_item) + + return release_data + + def add_line_item_repeat_data(self, release_data, line_item_ref, line_item): + """ + Add line item information (only done for repeat prescriptions) + Note that due to inconsistency of repeat numbers, it is possible that the + current instance for the whole prescription is greater than the line item max_repeats + in which case the line item max_repeats should be used. + + :type release_data: dict + :type line_item_ref: str + :type line_item: PrescriptionLineItem + """ + line_instance = self.current_issue_number + + if line_item.max_repeats < self.current_issue_number: + line_instance = line_item.max_repeats + + release_data[line_item_ref + "MaxRepeats"] = quoted(str(line_item.max_repeats)) + release_data[line_item_ref + "CurrentInstance"] = quoted(str(line_instance)) + + def validate_line_prescription_status(self, prescription_status, line_item_status): + """ + Compare lineItem status with the prescription status and confirm that the combination is valid + """ + if line_item_status in LineItemStatus.VALID_STATES[prescription_status]: + return True + + self.log_object.write_log( + "EPS0259", + None, + { + "internalID": self.internal_id, + "lineItemStatus": line_item_status, + "prescriptionStatus": prescription_status, + }, + ) + + return False + + def force_current_instance_increment(self): + """ + Force the current instance number to be incremented. + This is a serious undertaking, but is required where an issue is missing. + """ + old_current_issue_number = self.current_issue_number + + if self.current_issue_number == self.max_repeats: + self.log_object.write_log( + "EPS0625b", + None, + { + "internalID": self.internal_id, + "currentIssueNumber": old_current_issue_number, + "reason": "already at max_repeats", + }, + ) + return + + # Count upwards from the current issue number to max_repeats, looking either for + # an issue that exists + new_current_issue_number = False + for i in range(self.current_issue_number, self.max_repeats + 1): + try: + new_current_issue_number = i + break + except KeyError: + continue + + if not new_current_issue_number: + self.log_object.write_log( + "EPS0625b", + None, + { + "internalID": self.internal_id, + "currentIssueNumber": old_current_issue_number, + "reason": "no issues available", + }, + ) + return + + self.log_object.write_log( + "EPS0625", + None, + { + "internalID": self.internal_id, + "oldCurrentIssueNumber": old_current_issue_number, + "newCurrentIssueNumber": new_current_issue_number, + }, + ) + + self.current_issue_number = new_current_issue_number + + def reset_current_instance(self): + """ + Rotate through the instances to find the first instance which is either in a + future or active state. Then reset the currentInstance to be this instance. + This is used in Admin updates. If no future/active instances - then it should + be the last instance + + :returns: a list containing the old and new "current instance" number as strings + :rtype: [str, str] + """ + + # see if we can find an issue from the current one upwards in an active or future state + new_current_issue_number = None + acceptable_states = PrescriptionStatus.ACTIVE_STATES + PrescriptionStatus.FUTURE_STATES + for issue in self.get_issues_from_current_upwards(): + if issue.status in acceptable_states: + new_current_issue_number = issue.number + break + + # if we didn't find one, then just set to the last issue + if new_current_issue_number is None: + new_current_issue_number = self.issue_numbers[-1] + + # update the current instance number + old_current_issue_number = self.current_issue_number + self.current_issue_number = new_current_issue_number + + return (old_current_issue_number, new_current_issue_number) + + def check_current_instance_to_cancel_by_pr_id(self): + """ + Check for the prescription being in a cancellable status + """ + return self._current_instance_status in PrescriptionStatus.CANCELLABLE_STATES + + def check_current_instance_w_dispenser_by_pr_id(self): + """ + Check for the prescription being in a with dispenser status + """ + return self._current_instance_status in PrescriptionStatus.WITH_DISPENSER_STATES + + def check_include_performer_detail_by_pr_id(self): + """ + Check whether the prescription status is such that the performer node should be + included in the cancellation response message. + """ + return self._current_instance_status in PrescriptionStatus.INCLUDE_PERFORMER_STATES + + def check_current_instance_to_cancel_by_li_id(self, line_item_ref): + """ + Check for the line item being in a cancellable status + """ + return self._check_current_instance_by_line_item( + line_item_ref, LineItemStatus.ITEM_CANCELLABLE_STATES + ) + + def check_current_instance_w_dispenser_by_li_id(self, line_item_ref): + """ + Check for the line item being in a with dispenser status + """ + return self._check_current_instance_by_line_item( + line_item_ref, LineItemStatus.ITEM_WITH_DISPENSER_STATES + ) + + def check_include_performer_detail_by_li_id(self, line_item_ref): + """ + Check whether the line item status is such that the performer node should be + included in the cancellation response message. + """ + return self._check_current_instance_by_line_item( + line_item_ref, LineItemStatus.INCLUDE_PERFORMER_STATES + ) + + def _check_current_instance_by_line_item(self, line_item_ref, line_item_states): + """ + Check for the line item being in one of the specified states + """ + for line_item in self._current_instance_data[ + fields.FIELD_LINE_ITEMS + ]: # noqa: SIM110 - More readable as is + if (line_item_ref == line_item[fields.FIELD_ID]) and ( + line_item[fields.FIELD_STATUS] in line_item_states + ): + return True + return False + + def check_nhs_number_match(self, context): + """ + Check if the nhsNumber on the prescription record matches the nhsNumber in the + cancellation. Return True or False. + """ + return self._nhs_number == context.nhsNumber + + def return_error_for_invalid_cancel_by_pr_id(self): + """ + Raise the correct cancellation code matching the status of the current + instance + """ + presc_status = self._current_instance_status + + self.log_object.write_log( + "EPS0262", + None, + { + "internalID": self.internal_id, + "currentInstance": str(self.current_issue_number), + "cancellationType": fields.FIELD_PRESCRIPTION, + "currentStatus": presc_status, + }, + ) + + # return values below are to be mapped to equivalent ErrorBase1719 in Spine. + if presc_status in PrescriptionStatus.COMPLETED_STATES: + if presc_status == PrescriptionStatus.EXPIRED: + return EpsErrorBase.NOT_CANCELLED_EXPIRED + elif presc_status == PrescriptionStatus.CANCELLED: + return EpsErrorBase.NOT_CANCELLED_CANCELLED + elif presc_status == PrescriptionStatus.NOT_DISPENSED: + return EpsErrorBase.NOT_CANCELLED_NOT_DISPENSED + else: + return EpsErrorBase.NOT_CANCELLED_DISPENSED + + if presc_status == PrescriptionStatus.WITH_DISPENSER: + return EpsErrorBase.NOT_CANCELLED_WITH_DISPENSER + if presc_status == PrescriptionStatus.WITH_DISPENSER_ACTIVE: + return EpsErrorBase.NOT_CANCELLED_WITH_DISPENSER_ACTIVE + + def return_error_for_invalid_cancel_by_li_id(self, context): + """ + Confirm if line item exists. If it does raise the error associated with the + line item status + """ + line_item_status = None + for line_item in self._current_instance_data[fields.FIELD_LINE_ITEMS]: + if context.cancelLineItemRef != line_item[fields.FIELD_ID]: + continue + line_item_status = line_item[fields.FIELD_STATUS] + + self.log_object.write_log( + "EPS0262", + None, + { + "internalID": self.internal_id, + "currentInstance": str(self.current_issue_number), + "cancellationType": "lineItem", + "currentStatus": line_item_status, + }, + ) + + # return values below are to be mapped to equivalent ErrorBase1719 in Spine. + if not line_item_status: + return EpsErrorBase.PRESCRIPTION_NOT_FOUND + + if line_item_status == LineItemStatus.FULLY_DISPENSED: + return EpsErrorBase.NOT_CANCELLED_DISPENSED + if line_item_status == LineItemStatus.NOT_DISPENSED: + return EpsErrorBase.NOT_CANCELLED_NOT_DISPENSED + if line_item_status == LineItemStatus.CANCELLED: + return EpsErrorBase.NOT_CANCELLED_CANCELLED + if line_item_status == LineItemStatus.EXPIRED: + return EpsErrorBase.NOT_CANCELLED_EXPIRED + else: + return EpsErrorBase.NOT_CANCELLED_WITH_DISPENSER_ACTIVE + + def apply_cancellation(self, cancellation_obj, range_to_cancel_start_issue=None): + """ + Loop through the valid cancellations on the context and change the prescription + status as appropriate + """ + instances = self.prescription_record[fields.FIELD_INSTANCES] + + # only apply from the start issue upwards + if not range_to_cancel_start_issue: + range_to_cancel_start_issue = self.current_issue_number + range_to_update = self.get_issues_in_range(int(range_to_cancel_start_issue), None) + + issue_numbers = [issue.number for issue in range_to_update] + for issue_number in issue_numbers: + instance = instances[str(issue_number)] + if cancellation_obj[fields.FIELD_CANCELLATION_TARGET] == "LineItem": + self.process_line_cancellation(instance, cancellation_obj) + else: + self.process_instance_cancellation(instance, cancellation_obj) + # the current issue may have become cancelled, so find the new current one? + self.reset_current_instance() + return [cancellation_obj[fields.FIELD_CANCELLATION_ID], issue_numbers] + + def remove_pending_cancellations(self): + """ + Once the pending cancellations have been completed, remove any pending + cancellations from the record + """ + self.prescription_record[fields.FIELD_PENDING_CANCELLATIONS] = False + + def process_instance_cancellation(self, instance, cancellation_obj): + """ + Change the prescription status, and set the completion date + """ + instance[fields.FIELD_PREVIOUS_STATUS] = instance[fields.FIELD_PRESCRIPTION_STATUS] + instance[fields.FIELD_PRESCRIPTION_STATUS] = PrescriptionStatus.CANCELLED + instance[fields.FIELD_CANCELLATIONS].append(cancellation_obj) + completion_date = datetime.datetime.strptime( + cancellation_obj[fields.FIELD_CANCELLATION_TIME], TimeFormats.STANDARD_DATE_TIME_FORMAT + ) + instance[fields.FIELD_COMPLETION_DATE] = completion_date.strftime( + TimeFormats.STANDARD_DATE_FORMAT + ) + + def process_line_cancellation(self, instance, cancellation_obj): + """ + Loop through the line items to find one relevant to the cancellation, + If all line items now inactive then cancel the instance + """ + active_line_item = False + for line_item in instance[fields.FIELD_LINE_ITEMS]: + if cancellation_obj[fields.FIELD_CANCEL_LINE_ITEM_REF] != line_item[fields.FIELD_ID]: + if line_item[fields.FIELD_STATUS] in LineItemStatus.ACTIVE_STATES: + active_line_item = True + continue + line_item[fields.FIELD_PREVIOUS_STATUS] = line_item[fields.FIELD_STATUS] + line_item[fields.FIELD_STATUS] = LineItemStatus.CANCELLED + instance[fields.FIELD_CANCELLATIONS].append(cancellation_obj) + + if not active_line_item: + self.process_instance_cancellation(instance, cancellation_obj) + + def return_pending_cancellations(self): + """ + Return the list of pendingCancellations (should be False if none exist) + """ + return self._pending_cancellations + + def return_cancellation_object(self, context, hl7, reasons): + """ + Create an object (dict) which describes a cancellation + """ + cancellation_obj = self.set_all_snippet_details( + fields.INSTANCE_CANCELLATION_DETAILS, context + ) + cancellation_obj[fields.FIELD_REASONS] = reasons + cancellation_obj[fields.FIELD_HL7] = hl7 + return cancellation_obj + + def check_pending_cancellation_unique_w_disp(self, cancellation_obj): + """ + Check whether the pending cancellation is unique. If not unique, return false and + a boolean to indicate whether the requesting organisation matches. + If there are no pendingCancellations already on the prescription then return + immediately, indicating that the cancellation is unique. + + For both the pending cancellation (if exists) and the cancellationObject, if the + target is a LineItem, set the target variable to be a string of + LineItem_<> for logging purposes. + + This method is used for pending cancellations when the prescription is with + dispenser, therefore whilst it is similar to the method used when + the prescription has not yet been received by Spine + (check_pending_cancellation_unique), in this case a whole prescription cancellation + is treated independently to individual line item cancellations, as the action of + the dispenser could mean that either one, both or neither cancellations are + possible. + """ + if not self._pending_cancellations: + return [True, None] + + cancellation_target = str(cancellation_obj[fields.FIELD_CANCELLATION_TARGET]) + cancellation_org = str(cancellation_obj[fields.FIELD_AGENT_ORGANIZATION]) + if cancellation_target == "LineItem": + cancellation_target = "LineItem_" + str( + cancellation_obj[fields.FIELD_CANCEL_LINE_ITEM_REF] + ) + + org_match = True + for pending_cancellation in self._pending_cancellations: + pending_target = str(pending_cancellation[fields.FIELD_CANCELLATION_TARGET]) + if pending_target == "LineItem": + pending_target = "LineItem_" + str( + pending_cancellation[fields.FIELD_CANCEL_LINE_ITEM_REF] + ) + pending_org = str(pending_cancellation[fields.FIELD_AGENT_ORGANIZATION]) + if pending_target == cancellation_target: + if pending_org != cancellation_org: + org_match = False + self.log_object.write_log( + "EPS0264a", + None, + dict( + { + "internalID": self.internal_id, + "pendingOrg": pending_org, + "cancellationTarget": cancellation_target, + "cancellationOrg": cancellation_org, + } + ), + ) + return [False, org_match] + + return [True, None] + + def check_pending_cancellation_unique(self, cancellation_obj): + """ + Check whether the pending cancellation is unique. If not unique, return false and + a boolean to indicate whether the requesting organisation matches. + If there are no pendingCancellations already on the prescription then return + immediately, indicating that the cancellation is unique. + + For both the pending cancellation (if exists) and the cancellationObject, if the + target is a LineItem, set the target variable to be a string of + LineItem_<> for logging purposes. + + This method is used for pending cancellations when the prescription has not yet + been received by Spine, therefore whilst it is similar to the method used when + the prescription is With Dispenser (check_pending_cancellation_unique_w_disp) except + that in this case a whole prescription cancellation takes precedence over + individual line item cancellations. + """ + + if not self._pending_cancellations: + return [True, None] + + cancellation_target = str(cancellation_obj[fields.FIELD_CANCELLATION_TARGET]) + cancellation_org = str(cancellation_obj[fields.FIELD_AGENT_ORGANIZATION]) + if cancellation_target == "LineItem": + cancellation_target = "LineItem_" + str( + cancellation_obj[fields.FIELD_CANCEL_LINE_ITEM_REF] + ) + + whole_prescription_cancellation = False + org_match = True + for pending_cancellation in self._pending_cancellations: + pending_target = str(pending_cancellation[fields.FIELD_CANCELLATION_TARGET]) + pending_org = str(pending_cancellation[fields.FIELD_AGENT_ORGANIZATION]) + if pending_target == fields.FIELD_PRESCRIPTION: + whole_prescription_cancellation = True + if pending_target == "LineItem": + pending_target = "LineItem_" + str( + pending_cancellation[fields.FIELD_CANCEL_LINE_ITEM_REF] + ) + if (pending_target == cancellation_target) or whole_prescription_cancellation: + if pending_org != cancellation_org: + org_match = False + self.log_object.write_log( + "EPS0264a", + None, + dict( + { + "internalID": self.internal_id, + "pendingOrg": pending_org, + "cancellationTarget": cancellation_target, + "cancellationOrg": cancellation_org, + } + ), + ) + return [False, org_match] + + return [True, None] + + def set_unsuccessful_cancellation(self, cancellation_obj, failure_reason): + """ + Set on the record details of the cancellation that has been unsuccessful, + including the a reason. Note that this is used for unsuccessful pending + cancellations and where a cancellation is a duplicate, and does not apply to + cancellations that are simply not valid. + """ + failed_cs = self.prescription_record[fields.FIELD_PRESCRIPTION][ + fields.FIELD_UNSUCCESSFUL_CANCELLATIONS + ] + cancellation_obj["failureReason"] = failure_reason + + if not failed_cs: + failed_cs = [] + failed_cs.append(cancellation_obj) + + self.prescription_record[fields.FIELD_PRESCRIPTION][ + fields.FIELD_UNSUCCESSFUL_CANCELLATIONS + ] = failed_cs + + def set_pending_cancellation(self, cancellation_obj, prescription_present): + """ + Set the default Prescription Pending Cancellation status code and then + Append a cancellation object to the pendingCancellations + """ + + if not prescription_present: + instance = self._get_prescription_instance_data("1") + self.update_instance_status(instance, PrescriptionStatus.PENDING_CANCELLATION) + + pending_cs = self._pending_cancellations + + if not pending_cs: + pending_cs = [cancellation_obj] + cancellation_date_obj = datetime.datetime.strptime( + cancellation_obj[fields.FIELD_CANCELLATION_TIME], + TimeFormats.STANDARD_DATE_TIME_FORMAT, + ) + cancellation_date = cancellation_date_obj.strftime(TimeFormats.STANDARD_DATE_FORMAT) + if not self.prescription_record[fields.FIELD_PRESCRIPTION][ + fields.FIELD_PRESCRIPTION_TIME + ]: + self.prescription_record[fields.FIELD_PRESCRIPTION][ + fields.FIELD_PRESCRIPTION_TIME + ] = cancellation_date + self.log_object.write_log( + "EPS0340", + None, + dict( + { + "internalID": self.internal_id, + "cancellationDate": cancellation_date, + "prescriptionID": self.return_prescription_id(), + } + ), + ) + else: + pending_cs.append(cancellation_obj) + + self._pending_cancellations = pending_cs + + def set_initial_prescription_status(self, handle_time): + """ + Create the initial prescription status. For repeat dispense prescriptions, this + needs to consider both the prescription date and the dispense window low dates, + therefore this common method will be overridden. + + A prescription should not be available for download before its start date. + + :type handle_time: datetime.datetime + """ + first_issue = self.get_issue(1) + + future_threshold = handle_time + datetime.timedelta(days=1) + if self.time > future_threshold: + first_issue.status = PrescriptionStatus.FUTURE_DATED_PRESCRIPTION + else: + first_issue.status = PrescriptionStatus.TO_BE_DISPENSED + + @property + def max_repeats(self): + """ + The maximum number of issues of this prescription. + + :rtype: int + """ + return 1 + + def return_instance_details_for_amend(self, instance_number): + """ + For dispense messages the following details are required: + Instance status + NHS Number + Dispensing Organisation + None (indicating not a repeat prescription so no max_repeats) + """ + instance = self._get_prescription_instance_data(instance_number) + instance_status = instance[fields.FIELD_PRESCRIPTION_STATUS] + dispensing_org = instance[fields.FIELD_DISPENSE][fields.FIELD_DISPENSING_ORGANIZATION] + + return [ + str(self.current_issue_number), + instance_status, + self._nhs_number, + dispensing_org, + None, + ] + + def return_dispense_history_events(self, target_instance): + """ + Return the dispense history events for a specific instance + """ + instance = self._get_prescription_instance_data(target_instance) + return instance[fields.FIELD_DISPENSE_HISTORY] + + def get_withdrawn_status(self, passed_status): + """ + Dispense Return can only go back as far as 'with dispenser-active' for repeat dispense + prescriptions, so convert the status for with dispenser, otherwise, return what was provided. + """ + return passed_status + + def return_prescription_type(self): + """ + Return the prescription type from the prescription record + """ + return self.prescription_record[fields.FIELD_PRESCRIPTION].get( + fields.FIELD_PRESCRIPTION_TYPE, "" + ) + + def return_prescription_treatment_type(self): + """ + Return the prescription treatment type from the prescription record + """ + return self.prescription_record[fields.FIELD_PRESCRIPTION].get( + fields.FIELD_PRESCRIPTION_TREATMENT_TYPE, "" + ) + + def return_parent_prescription_document_key(self): + """ + Return the parent prescription document key from the prescription record + """ + return self.prescription_record.get(fields.FIELD_PRESCRIPTION, {}).get( + fields.FIELD_PRESCRIPTION_MSG_REF + ) + + def return_signed_time(self): + """ + Return the signed date/time from the prescription record + """ + return self.prescription_record[fields.FIELD_PRESCRIPTION].get(fields.FIELD_SIGNED_TIME, "") + + def return_change_log(self): + """ + Return the change log from the prescription record + """ + return self.prescription_record.get(fields.FIELD_CHANGE_LOG, []) + + def return_nomination_data(self): + """ + Return the nomination data from the prescription record + """ + return self.prescription_record.get(fields.FIELD_NOMINATION) + + def return_prescription_field(self): + """ + Return the complete prescription field + """ + return self.prescription_record[fields.FIELD_PRESCRIPTION] diff --git a/src/eps_spine_shared/common/prescription/repeat_dispense.py b/src/eps_spine_shared/common/prescription/repeat_dispense.py new file mode 100644 index 0000000..e73e48b --- /dev/null +++ b/src/eps_spine_shared/common/prescription/repeat_dispense.py @@ -0,0 +1,116 @@ +import datetime +from copy import copy + +from eps_spine_shared.common.prescription import fields +from eps_spine_shared.common.prescription.record import PrescriptionRecord +from eps_spine_shared.common.prescription.statuses import LineItemStatus, PrescriptionStatus + + +class RepeatDispenseRecord(PrescriptionRecord): + """ + Class defined to handle repeat dispense prescriptions + """ + + def __init__(self, log_object, internal_id): + """ + Allow the record_type attribute to be set + """ + super(RepeatDispenseRecord, self).__init__(log_object, internal_id) + self.record_type = "RepeatDispense" + + def create_instances(self, context, line_items): + """ + Create all prescription instances + + Expire any lineItems that have a lower max_repeats number than the instance number + """ + + instance_snippets = {} + + range_max = int(context.maxRepeats) + 1 + future_instance_status = PrescriptionStatus.REPEAT_DISPENSE_FUTURE_INSTANCE + + for instance_number in range(1, range_max): + instance_snippet = self.set_all_snippet_details(fields.INSTANCE_DETAILS, context) + instance_snippet[fields.FIELD_LINE_ITEMS] = [] + for line_item in line_items: + line_item_copy = copy(line_item) + if int(line_item_copy[fields.FIELD_MAX_REPEATS]) < instance_number: + line_item_copy[fields.FIELD_STATUS] = LineItemStatus.EXPIRED + instance_snippet[fields.FIELD_LINE_ITEMS].append(line_item_copy) + + instance_snippet[fields.FIELD_INSTANCE_NUMBER] = str(instance_number) + if instance_number != 1: + instance_snippet[fields.FIELD_PRESCRIPTION_STATUS] = future_instance_status + instance_snippet[fields.FIELD_DISPENSE] = self.set_all_snippet_details( + fields.DISPENSE_DETAILS, context + ) + instance_snippet[fields.FIELD_CLAIM] = self.set_all_snippet_details( + fields.CLAIM_DETAILS, context + ) + instance_snippet[fields.FIELD_CANCELLATIONS] = [] + instance_snippet[fields.FIELD_DISPENSE_HISTORY] = {} + instance_snippets[str(instance_number)] = instance_snippet + instance_snippet[fields.FIELD_NEXT_ACTIVITY] = {} + instance_snippet[fields.FIELD_NEXT_ACTIVITY][fields.FIELD_ACTIVITY] = None + instance_snippet[fields.FIELD_NEXT_ACTIVITY][fields.FIELD_DATE] = None + + return instance_snippets + + def set_initial_prescription_status(self, handle_time): + """ + Create the initial prescription status. For repeat dispense prescriptions, this + needs to consider both the prescription date and the dispense window low dates. + + If either the prescriptionTime or dispenseWindowLow date is in the future then + the prescription needs to have a Future Dated Prescription status set and can + not yet be downloaded. + If the prescription is not Future Dated, the default To Be Dispensed should be used. + + Note that this only applies to the first instance, the remaining instances will + already have a Future Repeat Dispense Instance status set. + + :type handle_time: datetime.datetime + """ + first_issue = self.get_issue(1) + + future_threshold = handle_time + datetime.timedelta(days=1) + is_future_dated = self.time > future_threshold + + dispense_low_date = first_issue.dispense_window_low_date + if dispense_low_date is not None and dispense_low_date > future_threshold: + is_future_dated = True + + if is_future_dated: + first_issue.status = PrescriptionStatus.FUTURE_DATED_PRESCRIPTION + else: + first_issue.status = PrescriptionStatus.TO_BE_DISPENSED + + def get_withdrawn_status(self, passed_status): + """ + Dispense Return can only go back as far as 'with dispenser-active' for repeat dispense + prescriptions, so convert the status for with dispenser, otherwise, return what was provided. + """ + if passed_status == PrescriptionStatus.WITH_DISPENSER: + return PrescriptionStatus.WITH_DISPENSER_ACTIVE + return passed_status + + @property + def max_repeats(self): + """ + The maximum number of issues of this prescription. + + :rtype: int + """ + max_repeats = self.prescription_record[fields.FIELD_PRESCRIPTION][fields.FIELD_MAX_REPEATS] + return int(max_repeats) + + @property + def future_issues_available(self): + """ + Return boolean to indicate if future issues are available or not. Always False for + Acute and Repeat Prescribe + + :rtype: bool + """ + return self.current_issue_number < self.max_repeats diff --git a/src/eps_spine_shared/common/prescription/repeat_prescribe.py b/src/eps_spine_shared/common/prescription/repeat_prescribe.py new file mode 100644 index 0000000..4ffd8b8 --- /dev/null +++ b/src/eps_spine_shared/common/prescription/repeat_prescribe.py @@ -0,0 +1,14 @@ +from eps_spine_shared.common.prescription.record import PrescriptionRecord + + +class RepeatPrescribeRecord(PrescriptionRecord): + """ + Class defined to handle repeat prescribe prescriptions + """ + + def __init__(self, log_object, internal_id): + """ + Allow the record_type attribute to be set + """ + super(RepeatPrescribeRecord, self).__init__(log_object, internal_id) + self.record_type = "RepeatPrescribe" diff --git a/src/eps_spine_shared/common/prescription/single_prescribe.py b/src/eps_spine_shared/common/prescription/single_prescribe.py new file mode 100644 index 0000000..2b94b96 --- /dev/null +++ b/src/eps_spine_shared/common/prescription/single_prescribe.py @@ -0,0 +1,90 @@ +from eps_spine_shared.common.prescription import fields +from eps_spine_shared.common.prescription.record import PrescriptionRecord +from eps_spine_shared.spinecore.baseutilities import quoted + + +class SinglePrescribeRecord(PrescriptionRecord): + """ + Class defined to handle single instance (acute) prescriptions + """ + + def __init__(self, log_object, internal_id): + """ + Allow the record_type attribute to be set + """ + super(SinglePrescribeRecord, self).__init__(log_object, internal_id) + self.record_type = "Acute" + + def add_line_item_repeat_data(self, release_data, line_item_ref, line_item): + """ + Add line item information (This is not required for Acute prescriptions, but + will invalidate the signature if provided in the prescription and not returned + in the release. + It the lineItem.max_repeats is false (not provided inbound), then do not include + it in the response, otherwise, both MaxRepeats and CurrentInstnace will be 1 for Acute. + + :type release_data: dict + :type line_item_ref: str + :type line_item: PrescriptionLineItem + """ + # Handle the missing inbound max_repeats + if not line_item.max_repeats: + return + + # Acute, so both values may only be '1' + release_data[line_item_ref + "MaxRepeats"] = quoted(str(1)) + release_data[line_item_ref + "CurrentInstance"] = quoted(str(1)) + + def return_details_for_dispense(self): + """ + For dispense messages the following details are required: + - Issue number + - Issue status + - NHS Number + - Dispensing Organisation + - None (indicating not a repeat prescription so no max_repeats) + """ + current_issue = self.current_issue + details = [ + str(current_issue.number), + current_issue.status, + self._nhs_number, + current_issue.dispensing_organization, + None, + ] + return details + + def return_details_for_claim(self, instance_number_str): + """ + For dispense messages the following details are required: + - Issue status + - NHS Number + - Dispensing Organisation + - None (indicating not a repeat prescription so no max_repeats) + """ + issue_number = int(instance_number_str) + issue = self.get_issue(issue_number) + details = [ + issue.claim, + issue.status, + self._nhs_number, + issue.dispensing_organization, + None, + ] + return details + + def return_last_dispense_date(self, instance_number): + """ + Return the last_dispense_date for the requested instance + """ + instance = self._get_prescription_instance_data(instance_number) + last_dispense_date = instance[fields.FIELD_DISPENSE][fields.FIELD_LAST_DISPENSE_DATE] + return last_dispense_date + + def return_last_disp_msg_ref(self, instance_number_str): + """ + returns the last dispense Msg Ref for the issue + """ + issue_number = int(instance_number_str) + issue = self.get_issue(issue_number) + return issue.last_dispense_notification_msg_ref diff --git a/src/eps_spine_shared/common/prescription/statuses.py b/src/eps_spine_shared/common/prescription/statuses.py new file mode 100644 index 0000000..b448fa0 --- /dev/null +++ b/src/eps_spine_shared/common/prescription/statuses.py @@ -0,0 +1,187 @@ +class PrescriptionStatus(object): + """ + Prescription states and related information + """ + + AWAITING_RELEASE_READY = "0000" + TO_BE_DISPENSED = "0001" + WITH_DISPENSER = "0002" + WITH_DISPENSER_ACTIVE = "0003" + EXPIRED = "0004" + CANCELLED = "0005" + DISPENSED = "0006" + NOT_DISPENSED = "0007" + CLAIMED = "0008" + NO_CLAIMED = "0009" + REPEAT_DISPENSE_FUTURE_INSTANCE = "9000" + FUTURE_DATED_PRESCRIPTION = "9001" + PENDING_CANCELLATION = "9005" + + PRESCRIPTION_DISPLAY_LOOKUP = {} + PRESCRIPTION_DISPLAY_LOOKUP[AWAITING_RELEASE_READY] = "Awaiting Release Ready" + PRESCRIPTION_DISPLAY_LOOKUP[TO_BE_DISPENSED] = "To Be Dispensed" + PRESCRIPTION_DISPLAY_LOOKUP[WITH_DISPENSER] = "With Dispenser" + PRESCRIPTION_DISPLAY_LOOKUP[WITH_DISPENSER_ACTIVE] = "With Dispenser - Active" + PRESCRIPTION_DISPLAY_LOOKUP[EXPIRED] = "Expired" + PRESCRIPTION_DISPLAY_LOOKUP[CANCELLED] = "Cancelled" + PRESCRIPTION_DISPLAY_LOOKUP[DISPENSED] = "Dispensed" + PRESCRIPTION_DISPLAY_LOOKUP[NOT_DISPENSED] = "Not Dispensed" + PRESCRIPTION_DISPLAY_LOOKUP[CLAIMED] = "Claimed" + PRESCRIPTION_DISPLAY_LOOKUP[NO_CLAIMED] = "No-Claimed" + PRESCRIPTION_DISPLAY_LOOKUP[REPEAT_DISPENSE_FUTURE_INSTANCE] = "Repeat Dispense future instance" + PRESCRIPTION_DISPLAY_LOOKUP[FUTURE_DATED_PRESCRIPTION] = "Prescription future instance" + PRESCRIPTION_DISPLAY_LOOKUP[PENDING_CANCELLATION] = "Cancelled future instance" + + CANCELLABLE_STATES = [ + AWAITING_RELEASE_READY, + TO_BE_DISPENSED, + REPEAT_DISPENSE_FUTURE_INSTANCE, + FUTURE_DATED_PRESCRIPTION, + ] + + WITH_DISPENSER_STATES = [WITH_DISPENSER, WITH_DISPENSER_ACTIVE] + + ACTIVE_STATES = [AWAITING_RELEASE_READY, TO_BE_DISPENSED, WITH_DISPENSER, WITH_DISPENSER_ACTIVE] + + FUTURE_STATES = [FUTURE_DATED_PRESCRIPTION, REPEAT_DISPENSE_FUTURE_INSTANCE] + + COMPLETED_STATES = [EXPIRED, CANCELLED, DISPENSED, NOT_DISPENSED, CLAIMED, NO_CLAIMED] + + NOT_COMPLETED_STATES = [ + AWAITING_RELEASE_READY, + TO_BE_DISPENSED, + WITH_DISPENSER, + WITH_DISPENSER_ACTIVE, + FUTURE_DATED_PRESCRIPTION, + REPEAT_DISPENSE_FUTURE_INSTANCE, + ] + + INCLUDE_PERFORMER_STATES = [ + WITH_DISPENSER, + WITH_DISPENSER_ACTIVE, + DISPENSED, + NOT_DISPENSED, + CLAIMED, + NO_CLAIMED, + ] + + EXPIRY_IMMUTABLE_STATES = [EXPIRED, CANCELLED, DISPENSED, NOT_DISPENSED, CLAIMED, NO_CLAIMED] + + UNACTIONED_STATES = [ + AWAITING_RELEASE_READY, + TO_BE_DISPENSED, + WITH_DISPENSER, + REPEAT_DISPENSE_FUTURE_INSTANCE, + PENDING_CANCELLATION, + ] + + ALL_VALID_STATES = [ + AWAITING_RELEASE_READY, + TO_BE_DISPENSED, + WITH_DISPENSER, + WITH_DISPENSER_ACTIVE, + EXPIRED, + CANCELLED, + DISPENSED, + NOT_DISPENSED, + CLAIMED, + NO_CLAIMED, + REPEAT_DISPENSE_FUTURE_INSTANCE, + FUTURE_DATED_PRESCRIPTION, + PENDING_CANCELLATION, + ] + + EXPIRY_LOOKUP = {} + EXPIRY_LOOKUP[AWAITING_RELEASE_READY] = EXPIRED + EXPIRY_LOOKUP[TO_BE_DISPENSED] = EXPIRED + EXPIRY_LOOKUP[WITH_DISPENSER] = EXPIRED + EXPIRY_LOOKUP[WITH_DISPENSER_ACTIVE] = DISPENSED + EXPIRY_LOOKUP[REPEAT_DISPENSE_FUTURE_INSTANCE] = EXPIRED + EXPIRY_LOOKUP[FUTURE_DATED_PRESCRIPTION] = EXPIRED + EXPIRY_LOOKUP[PENDING_CANCELLATION] = EXPIRED + + +class LineItemStatus(object): + """ + Prescription line item states and related information + """ + + FULLY_DISPENSED = "0001" + NOT_DISPENSED = "0002" + PARTIAL_DISPENSED = "0003" + NOT_DISPENSED_OWING = "0004" + CANCELLED = "0005" + EXPIRED = "0006" + TO_BE_DISPENSED = "0007" + WITH_DISPENSER = "0008" + + ITEM_CANCELLABLE_STATES = [TO_BE_DISPENSED] + ITEM_WITH_DISPENSER_STATES = [WITH_DISPENSER, PARTIAL_DISPENSED] + + ACTIVE_STATES = [TO_BE_DISPENSED, WITH_DISPENSER, PARTIAL_DISPENSED, NOT_DISPENSED_OWING] + + INCLUDE_PERFORMER_STATES = [ + WITH_DISPENSER, + PARTIAL_DISPENSED, + FULLY_DISPENSED, + NOT_DISPENSED, + NOT_DISPENSED_OWING, + ] + + ITEM_DISPLAY_LOOKUP = {} + ITEM_DISPLAY_LOOKUP[FULLY_DISPENSED] = "Item fully dispensed" + ITEM_DISPLAY_LOOKUP[NOT_DISPENSED] = "Item not dispensed" + ITEM_DISPLAY_LOOKUP[PARTIAL_DISPENSED] = "Item dispensed - partial" + ITEM_DISPLAY_LOOKUP[NOT_DISPENSED_OWING] = "Item not dispensed owing" + ITEM_DISPLAY_LOOKUP[EXPIRED] = "Expired" + ITEM_DISPLAY_LOOKUP[CANCELLED] = "Item Cancelled" + ITEM_DISPLAY_LOOKUP[TO_BE_DISPENSED] = "To Be Dispensed" + ITEM_DISPLAY_LOOKUP[WITH_DISPENSER] = "Item with dispenser" + + VALID_STATES = {} + VALID_STATES[PrescriptionStatus.AWAITING_RELEASE_READY] = [CANCELLED, EXPIRED, TO_BE_DISPENSED] + VALID_STATES[PrescriptionStatus.TO_BE_DISPENSED] = [CANCELLED, EXPIRED, TO_BE_DISPENSED] + VALID_STATES[PrescriptionStatus.WITH_DISPENSER] = [CANCELLED, EXPIRED, WITH_DISPENSER] + VALID_STATES[PrescriptionStatus.WITH_DISPENSER_ACTIVE] = [ + FULLY_DISPENSED, + NOT_DISPENSED, + PARTIAL_DISPENSED, + NOT_DISPENSED_OWING, + CANCELLED, + EXPIRED, + WITH_DISPENSER, + ] + VALID_STATES[PrescriptionStatus.EXPIRED] = [CANCELLED, EXPIRED] + VALID_STATES[PrescriptionStatus.CANCELLED] = [CANCELLED, EXPIRED] + VALID_STATES[PrescriptionStatus.DISPENSED] = [ + FULLY_DISPENSED, + NOT_DISPENSED, + CANCELLED, + EXPIRED, + ] + VALID_STATES[PrescriptionStatus.NOT_DISPENSED] = [NOT_DISPENSED, CANCELLED, EXPIRED] + VALID_STATES[PrescriptionStatus.CLAIMED] = [FULLY_DISPENSED, NOT_DISPENSED, CANCELLED, EXPIRED] + VALID_STATES[PrescriptionStatus.NO_CLAIMED] = [ + FULLY_DISPENSED, + NOT_DISPENSED, + CANCELLED, + EXPIRED, + ] + VALID_STATES[PrescriptionStatus.REPEAT_DISPENSE_FUTURE_INSTANCE] = [ + CANCELLED, + EXPIRED, + TO_BE_DISPENSED, + ] + VALID_STATES[PrescriptionStatus.FUTURE_DATED_PRESCRIPTION] = [ + CANCELLED, + EXPIRED, + TO_BE_DISPENSED, + ] + + EXPIRY_IMMUTABLE_STATES = [FULLY_DISPENSED, NOT_DISPENSED, EXPIRED, CANCELLED] + + EXPIRY_LOOKUP = {} + EXPIRY_LOOKUP[TO_BE_DISPENSED] = "0006" + EXPIRY_LOOKUP[PARTIAL_DISPENSED] = "0001" + EXPIRY_LOOKUP[NOT_DISPENSED_OWING] = "0002" + EXPIRY_LOOKUP[WITH_DISPENSER] = "0006" diff --git a/src/eps_spine_shared/common/prescription/types.py b/src/eps_spine_shared/common/prescription/types.py new file mode 100644 index 0000000..c9270e3 --- /dev/null +++ b/src/eps_spine_shared/common/prescription/types.py @@ -0,0 +1,151 @@ +# flake8: noqa: E501 + + +class PrescriptionTreatmentType(object): + """ + Constants for prescription treatment type. + """ + + ACUTE_PRESCRIBING = "0001" # "one-off" prescriptions + REPEAT_PRESCRIBING = "0002" # may be re-issued by prescribing site + REPEAT_DISPENSING = "0003" # may be automatically reissued by Spine + + prescriptionTreatmentTypes = { + ACUTE_PRESCRIBING: "Acute Prescription", + REPEAT_PRESCRIBING: "Repeat Prescribing", + REPEAT_DISPENSING: "Repeat Dispensing", + } + + +class PrescriptionTypes(object): + """ + Constants for prescription type. + """ + + # Translate prescription type codes to their text value + prescriptionTypeCodes = { + "0001": "GENERAL PRACTITIONER PRESCRIBING", + "0002": "INTENTIONALLY LEFT BLANK", + "0003": "NURSE PRACTITIONER PRESCRIBING", + "0004": "HOSPITAL PRESCRIBING", + "0006": "DENTAL PRESCRIBING", + "0007": "SUPPLEMENTARY PRESCRIBER PRESCRIBING", + "0009": "GENERAL PRACTITIONER PRESCRIBING: PRIVATE", + "0012": "EXTENDED FORUMULARY PRESCRIBER", + "0101": "PRIMARY CARE PRESCRIBER - MEDICAL PRESCRIBER", + "0102": "GENERAL PRACTITIONER PRESCRIBING - TRAINEE DOCTOR/GP REGISTRAR", + "0103": "GENERAL PRACTITIONER PRESCRIBING - DEPUTISING SERVICES", + "0104": "PRIMARY CARE PRESCRIBER - NURSE INDEPENDENT/SUPPLEMENTARY PRESCRIBER", + "0105": "PRIMARY CARE PRESCRIBER - COMMUNITY PRACTITIONER NURSE PRESCRIBER", + "0106": "GENERAL PRACTITIONER PRESCRIBING - PCT EMPLOYED NURSE INDEPENDENT/SUPPLEMENTARY PRESCRIBER", + "0107": "GENERAL PRACTITIONER PRESCRIBING - PCT EMPLOYED COMMUNITY PRACTITIONER NURSE PRESCRIBER", + "0108": "PRIMARY CARE PRESCRIBER - PHARMACIST INDEPENDENT/SUPPLEMENTARY PRESCRIBER", + "0109": "GENERAL PRACTITIONER PRESCRIBING - PCT EMPLOYED PHARMACIST PRESCRIBER", + "0113": "PRIMARY CARE PRESCRIBER - OPTOMETRIST INDEPENDENT/SUPPLEMENTARY PRESCRIBER", + "0114": "PRIMARY CARE PRESCRIBER - PODIATRIST/CHIROPODIST INDEPENDENT/SUPPLEMENTARY PRESCRIBER", + "0116": "PRIMARY CARE PRESCRIBER - RADIOGRAPHER INDEPENDENT/SUPPLEMENTARY PRESCRIBER", + "0117": "PRIMARY CARE PRESCRIBER - PHYSIOTHERAPIST INDEPENDENT/SUPPLEMENTARY PRESCRIBER", + "0119": "GENERAL PRACTITIONER PRESCRIBING - PCT EMPLOYED PODIATRIST/CHIROPODIST", + "0120": "GENERAL PRACTITIONER PRESCRIBING - PCT EMPLOYED OPTOMETRIST", + "0121": "GENERAL PRACTITIONER PRESCRIBING - PCT EMPLOYED RADIOGRAPHER", + "0122": "GENERAL PRACTITIONER PRESCRIBING - PCT EMPLOYED PHYSIOTHERAPIST", + "0123": "PRIMARY CARE PRESCRIBER - HOSPITAL PRESCRIBER", + "0124": "PRIMARY CARE PRESCRIBER - DIETICIAN SUPPLEMENTARY PRESCRIBER", + "0125": "PRIMARY CARE PRESCRIBER - PARAMEDIC INDEPENDENT/SUPPLEMENTARY PRESCRIBER", + "0304": "NURSE PRACTITIONER - PRACTICE EMPLOYED NURSE INDEPENDENT/SUPPLEMENTARY PRESCRIBER", + "0305": "NURSE PRACTITIONER - PRACTICE EMPLOYED COMMUNITY PRACTITIONER NURSE PRESCRIBER", + "0306": "NURSE PRACTITIONER - PRACTICE EMPLOYED NURSE INDEPENDENT/SUPPLEMENTARY PRESCRIBER", + "0307": "NURSE PRACTITIONER - PRACTICE EMPLOYED COMMUNITY PRACTITIONER NURSE PRESCRIBER", + "0406": "HOSPITAL PRESCRIBING - HOSPITAL PRESCRIBER", + "0607": "DENTAL PRESCRIBING - DENTIST", + "0708": "SUPPLEMENTARY PRESCRIBING - PRACTICE EMPLOYED PHARMACIST", + "0709": "SUPPLEMENTARY PRESCRIBING - PCT EMPLOYED PHARMACIST", + "0713": "SUPPLEMENTARY PRESCRIBING - PRACTICE EMPLOYED OPTOMETRIST", + "0714": "SUPPLEMENTARY PRESCRIBING - PRACTICE EMPLOYED PODIATRIST/CHIROPODIST", + "0716": "SUPPLEMENTARY PRESCRIBING - PRACTICE EMPLOYED RADIOGRAPHER", + "0717": "SUPPLEMENTARY PRESCRIBING - PRACTICE EMPLOYED PHYSIOTHERAPIST", + "0718": "SUPPLEMENTARY PRESCRIBING - PCT EMPLOYED OPTOMETRIST", + "0719": "SUPPLEMENTARY PRESCRIBING - PCT EMPLOYED PODIATRIST/CHIROPODIST", + "0721": "SUPPLEMENTARY PRESCRIBING - PCT EMPLOYED RADIOGRAPHER", + "0722": "SUPPLEMENTARY PRESCRIBING - PCT EMPLOYED PHYSIOTHERAPIST", + "0901": "PRIVATE PRESCRIBING - GP", + "0904": "PRIVATE PRESCRIBING - NURSE PRESCRIBING", + "0908": "PRIVATE PRESCRIBING - PHARMACIST PRESCRIBING", + "0913": "PRIVATE PRESCRIBING - OPTOMETRIST", + "0914": "PRIVATE PRESCRIBING - PODIATRIST/CHIROPODIST", + "0915": "PRIVATE PRESCRIBING - PHYSIOTHERAPIST", + "0916": "PRIVATE PRESCRIBING - RADIOGRAPHER", + "1004": "Outpatient Community Prescriber - Nurse Independent Supplementary prescriber - FP10SS (HP) Hospital outpatient prescriptions dispensed in a community pharmacy", + "1005": "Outpatient Community Prescriber - Community Practitioner Nurse prescriber - FP10SS (HP) Hospital outpatient prescriptions dispensed in a community pharmacy", + "1008": "Outpatient Community Prescriber - Pharmacist Independent Supplementary prescriber - FP10SS (HP) Hospital outpatient prescriptions dispensed in a community pharmacy", + "1013": "Outpatient Community Prescriber - Optometrist Independent Supplementary prescriber - FP10SS (HP) Hospital outpatient prescriptions dispensed in a community pharmacy", + "1014": "Outpatient Community Prescriber - Podiatrist Chiropodist Independent Supplementary prescriber - FP10SS (HP) Hospital outpatient prescriptions dispensed in a community pharmacy", + "1016": "Outpatient Community Prescriber - Radiographer Independent Supplementary prescriber - FP10SS (HP) Hospital outpatient prescriptions dispensed in a community pharmacy", + "1017": "Outpatient Community Prescriber - Physiotherapist Independent Supplementary prescriber - FP10SS (HP) Hospital outpatient prescriptions dispensed in a community pharmacy", + "1024": "Outpatient Community Prescriber - Dietician Supplementary prescriber - FP10SS (HP) Hospital outpatient prescriptions dispensed in a community pharmacy", + "1025": "Outpatient Community Prescriber - Paramedic Independent Supplementary prescriber - FP10SS (HP) Hospital outpatient prescriptions dispensed in a community pharmacy", + "1104": "Outpatient Hospital Pharmacy Prescriber - Nurse Independent Supplementary prescriber - NON- FP10 Hospital outpatient prescriptions dispensed in their own hospital pharmacy", + "1105": "Outpatient Hospital Pharmacy Prescriber - Community Practitioner Nurse prescriber - NON- FP10 Hospital outpatient prescriptions dispensed in their own hospital pharmacy", + "1108": "Outpatient Hospital Pharmacy Prescriber - Pharmacist Independent Supplementary prescriber - NON- FP10 Hospital outpatient prescriptions dispensed in their own hospital pharmacy", + "1113": "Outpatient Hospital Pharmacy Prescriber - Optometrist Independent Supplementary prescriber - NON- FP10 Hospital outpatient prescriptions dispensed in their own hospital pharmacy", + "1114": "Outpatient Hospital Pharmacy Prescriber - Podiatrist Chiropodist Independent Supplementary prescriber - NON- FP10 Hospital outpatient prescriptions dispensed in their own hospital pharmacy", + "1116": "Outpatient Hospital Pharmacy Prescriber - Radiographer Independent Supplementary prescriber - NON- FP10 Hospital outpatient prescriptions dispensed in their own hospital pharmacy", + "1117": "Outpatient Hospital Pharmacy Prescriber - Physiotherapist Independent Supplementary prescriber - NON- FP10 Hospital outpatient prescriptions dispensed in their own hospital pharmacy", + "1124": "Outpatient Hospital Pharmacy Prescriber - Dietician Supplementary prescriber - NON- FP10 Hospital outpatient prescriptions dispensed in their own hospital pharmacy", + "1125": "Outpatient Hospital Pharmacy Prescriber - Paramedic Independent Supplementary prescriber - NON- FP10 Hospital outpatient prescriptions dispensed in their own hospital pharmacy", + "1204": "Outpatient Homecare Prescriber - Nurse Independent Supplementary prescriber - NON- FP10 Hospital outpatient prescriptions dispensed by Homecare", + "1205": "Outpatient Homecare Prescriber - Community Practitioner Nurse prescriber - NON- FP10 Hospital outpatient prescriptions dispensed by Homecare", + "1208": "Outpatient Homecare Prescriber - Pharmacist Independent Supplementary prescriber - NON- FP10 Hospital outpatient prescriptions dispensed by Homecare", + "1213": "Outpatient Homecare Prescriber - Optometrist Independent Supplementary prescriber - NON- FP10 Hospital outpatient prescriptions dispensed by Homecare", + "1214": "Outpatient Homecare Prescriber - Podiatrist Chiropodist Independent Supplementary prescriber - NON- FP10 Hospital outpatient prescriptions dispensed by Homecare", + "1216": "Outpatient Homecare Prescriber - Radiographer Independent Supplementary prescriber - NON- FP10 Hospital outpatient prescriptions dispensed by Homecare", + "1217": "Outpatient Homecare Prescriber - Physiotherapist Independent Supplementary prescriber - NON- FP10 Hospital outpatient prescriptions dispensed by Homecare", + "1224": "Outpatient Homecare Prescriber - Dietician Supplementary prescriber - NON- FP10 Hospital outpatient prescriptions dispensed by Homecare", + "1225": "Outpatient Homecare Prescriber - Paramedic Independent Supplementary prescriber - NON- FP10 Hospital outpatient prescriptions dispensed by Homecare", + "1001": "Outpatient Community Prescriber - Medical Prescriber", + "1101": "Outpatient Pharmacy Prescriber - Medical Prescriber", + "1201": "Outpatient Homecare Prescriber - Medical Prescriber", + # WELSH CODES + "0201": "Primary Care Prescriber - Medical Prescriber (Wales)", + "0204": "Primary Care Prescriber - Nurse Independent/Supplementary prescriber (Wales)", + "0205": "Primary Care Prescriber - Community Practitioner Nurse prescriber (Wales)", + "0208": "Primary Care Prescriber - Pharmacist Independent/Supplementary prescriber (Wales)", + "0213": "Primary Care Prescriber - Optometrist Independent/Supplementary prescriber (Wales)", + "0214": "Primary Care Prescriber - Podiatrist/Chiropodist Independent/Supplementary prescriber (Wales)", + "0216": "Primary Care Prescriber - Radiographer Independent/Supplementary prescriber (Wales)", + "0217": "Primary Care Prescriber - Physiotherapist Independent/Supplementary prescriber (Wales)", + "0224": "Primary Care Prescriber - Dietician Supplementary prescriber (Wales)", + "0225": "Primary Care Prescriber - Paramedic Independent/Supplementary prescriber (Wales)", + "2001": "Outpatient Community Prescriber - Medical Prescriber (Wales)", + "2004": "Outpatient Community Prescriber - Nurse Independent/Supplementary prescriber (Wales)", + "2005": "Outpatient Community Prescriber - Community Practitioner Nurse prescriber (Wales)", + "2008": "Outpatient Community Prescriber - Pharmacist Independent/Supplementary prescriber (Wales)", + "2013": "Outpatient Community Prescriber - Optometrist Independent/Supplementary prescriber (Wales)", + "2014": "Outpatient Community Prescriber - Podiatrist/Chiropodist Independent/Supplementary (Wales)", + "2016": "Outpatient Community Prescriber - Radiographer Independent/Supplementary prescriber (Wales)", + "2017": "Outpatient Community Prescriber - Physiotherapist Independent/Supplementary prescriber (Wales)", + "2024": "Outpatient Community Prescriber - Dietician Supplementary prescriber (Wales)", + "2025": "Outpatient Community Prescriber - Paramedic Independent/Supplementary prescriber (Wales)", + "0707": "Dental Prescribing - Dentist (Wales)", + # ISLE OF MANN CODES + "0501": "Primary Care Prescriber - Medical Prescriber (IOM)", + "0504": "Primary Care Prescriber - Nurse Independent/Supplementary prescriber (IOM)", + "0505": "Primary Care Prescriber - Community Practitioner Nurse prescriber (IOM)", + "0508": "Primary Care Prescriber - Pharmacist Independent/Supplementary prescriber (IOM)", + "0513": "Primary Care Prescriber - Optometrist Independent/Supplementary prescriber (IOM)", + "0514": "Primary Care Prescriber - Podiatrist/Chiropodist Independent/Supplementary prescriber (IOM)", + "0516": "Primary Care Prescriber - Radiographer Independent/Supplementary prescriber (IOM)", + "0517": "Primary Care Prescriber - Physiotherapist Independent/Supplementary prescriber (IOM)", + "0524": "Primary Care Prescriber - Dietician Supplementary prescriber (IOM)", + "0525": "Primary Care Prescriber - Paramedic Independent/Supplementary prescriber (IOM)", + "5001": "Outpatient Community Prescriber - Medical Prescriber (IOM)", + "5004": "Outpatient Community Prescriber - Nurse Independent/Supplementary prescriber (IOM)", + "5005": "Outpatient Community Prescriber - Community Practitioner Nurse prescriber (IOM)", + "5008": "Outpatient Community Prescriber - Pharmacist Independent/Supplementary prescriber (IOM)", + "5013": "Outpatient Community Prescriber - Optometrist Independent/Supplementary prescriber (IOM)", + "5014": "Outpatient Community Prescriber - Podiatrist/Chiropodist Independent/Supplementary (IOM)", + "5016": "Outpatient Community Prescriber - Radiographer Independent/Supplementary prescriber (IOM)", + "5017": "Outpatient Community Prescriber - Physiotherapist Independent/Supplementary prescriber (IOM)", + "5024": "Outpatient Community Prescriber - Dietician Supplementary prescriber (IOM)", + "5025": "Outpatient Community Prescriber - Paramedic Independent/Supplementary prescriber (IOM)", + } diff --git a/src/eps_spine_shared/errors.py b/src/eps_spine_shared/errors.py new file mode 100644 index 0000000..f9825ef --- /dev/null +++ b/src/eps_spine_shared/errors.py @@ -0,0 +1,70 @@ +from enum import Enum + +from botocore.exceptions import NoCredentialsError + + +class EpsNoCredentialsErrorWithRetry(NoCredentialsError): + """ + Extends NoCredentialsError to provide information about retry attempts. + To be caught in Spine application code and re-raised as NoCredentialsErrorWithRetry. + """ + + fmt = "Unable to locate credentials after {attempts} attempts" + + +class EpsSystemError(Exception): + """ + Exception to be raised if an unexpected system error occurs. + To be caught in Spine application code and re-raised as SpineSystemError. + """ + + MESSAGE_FAILURE = "messageFailure" + DEVELOPMENT_FAILURE = "developmentFailure" + SYSTEM_FAILURE = "systemFailure" + IMMEDIATE_REQUEUE = "immediateRequeue" + RETRY_EXPIRED = "retryExpired" + PUBLISHER_HANDLES_REQUEUE = "publisherHandlesRequeue" + UNRELIABLE_MESSAGE = "unreliableMessage" + + def __init__(self, errorTopic, *args): # noqa: B042 + """ + errorTopic is the topic to be used when writing the WDO to the error exchange + """ + super(EpsSystemError, self).__init__(*args) + self.errorTopic = errorTopic + + +class EpsBusinessError(Exception): + """ + Exception to be raised by a message worker if an expected error condition is hit, + one that is expected to cause a HL7 error response with a set errorCode. + To be caught in Spine application code and re-raised as SpineBusinessError. + """ + + def __init__(self, errorCode, suppInfo=None, messageId=None): # noqa: B042 + super(EpsBusinessError, self).__init__() + self.errorCode = errorCode + self.supplementaryInformation = suppInfo + self.messageId = messageId + + def __str__(self): + if self.supplementaryInformation: + return "{} {}".format(self.errorCode, self.supplementaryInformation) + return str(self.errorCode) + + +class EpsErrorBase(Enum): + """ + To be used in Spine application code to remap to ErrorBases. + """ + + INVALID_LINE_STATE_TRANSITION = 1 + ITEM_NOT_FOUND = 2 + MAX_REPEAT_MISMATCH = 3 + NOT_CANCELLED_EXPIRED = 4 + NOT_CANCELLED_CANCELLED = 5 + NOT_CANCELLED_NOT_DISPENSED = 6 + NOT_CANCELLED_DISPENSED = 7 + NOT_CANCELLED_WITH_DISPENSER = 8 + NOT_CANCELLED_WITH_DISPENSER_ACTIVE = 9 + PRESCRIPTION_NOT_FOUND = 10 diff --git a/src/eps_spine_shared/hello.py b/src/eps_spine_shared/hello.py deleted file mode 100644 index 0d2b345..0000000 --- a/src/eps_spine_shared/hello.py +++ /dev/null @@ -1,2 +0,0 @@ -def hello(name: str = "World") -> str: - return f"Hello, {name}!" diff --git a/src/eps_spine_shared/logger.py b/src/eps_spine_shared/logger.py new file mode 100644 index 0000000..8f8db96 --- /dev/null +++ b/src/eps_spine_shared/logger.py @@ -0,0 +1,14 @@ +class EpsLogger: + """ + Wrapper for logging to handle either EPS or Spine logger. + """ + + def __init__(self, logger=None): + self.logger = logger + self.is_spine = hasattr(logger, "writeLog") + + def write_log(self, code: str, exc_info, data: dict = None): + if self.is_spine: + self.logger.writeLog(code, exc_info, data) + else: + print({"code": code, "exc_info": exc_info, "data": data}) diff --git a/src/eps_spine_shared/nhsfundamentals/timeutilities.py b/src/eps_spine_shared/nhsfundamentals/timeutilities.py new file mode 100644 index 0000000..829dafb --- /dev/null +++ b/src/eps_spine_shared/nhsfundamentals/timeutilities.py @@ -0,0 +1,93 @@ +from datetime import datetime + + +class TimeFormats: + STANDARD_DATE_TIME_UTC_ZONE_FORMAT = "%Y%m%d%H%M%S+0000" + STANDARD_DATE_TIME_FORMAT = "%Y%m%d%H%M%S" + STANDARD_DATE_TIME_LENGTH = 14 + DATE_TIME_WITHOUT_SECONDS_FORMAT = "%Y%m%d%H%M" + STANDARD_DATE_FORMAT = "%Y%m%d" + STANDARD_DATE_FORMAT_YEAR_MONTH = "%Y%m" + STANDARD_DATE_FORMAT_YEAR_ONLY = "%Y" + HL7_DATETIME_FORMAT = "%Y%m%dT%H%M%S.%f" + SPINE_DATETIME_MS_FORMAT = "%Y%m%d%H%M%S.%f" + SPINE_DATE_FORMAT = "%Y%m%d" + EBXML_FORMAT = "%Y-%m-%dT%H:%M:%S" + SMSP_FORMAT = "%Y-%m-%dT%H:%M:%SZ" + EXTENDED_SMSP_FORMAT = "%Y-%m-%dT%H:%M:%S.%f" + EXTENDED_SMSP_PLUS_Z_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ" + + +_TIMEFORMAT_LENGTH_MAP = { + TimeFormats.STANDARD_DATE_TIME_LENGTH: TimeFormats.STANDARD_DATE_TIME_FORMAT, + 12: TimeFormats.DATE_TIME_WITHOUT_SECONDS_FORMAT, + 8: TimeFormats.STANDARD_DATE_FORMAT, + 6: TimeFormats.STANDARD_DATE_FORMAT_YEAR_MONTH, + 4: TimeFormats.STANDARD_DATE_FORMAT_YEAR_ONLY, + 22: TimeFormats.HL7_DATETIME_FORMAT, + 21: TimeFormats.SPINE_DATETIME_MS_FORMAT, + 20: TimeFormats.SMSP_FORMAT, + 23: TimeFormats.EXTENDED_SMSP_FORMAT, + 26: TimeFormats.EXTENDED_SMSP_FORMAT, + 24: TimeFormats.EXTENDED_SMSP_PLUS_Z_FORMAT, + 27: TimeFormats.EXTENDED_SMSP_PLUS_Z_FORMAT, +} + + +def _guessCommonDateTimeFormat(timeString, raiseErrorIfUnknown=False): + """ + Guess the date time format from the commonly used list + + Args: + timeString (str): + The datetime string to try determine the format of. + raiseErrorIfUnknown (bool): + Determines the action when the format cannot be determined. + False (default) will return None, True will raise an error. + """ + _format = None + if len(timeString) == 19: + try: + datetime.strptime(timeString, TimeFormats.EBXML_FORMAT) + _format = TimeFormats.EBXML_FORMAT + except ValueError: + _format = TimeFormats.STANDARD_DATE_TIME_UTC_ZONE_FORMAT + else: + _format = _TIMEFORMAT_LENGTH_MAP.get(len(timeString), None) + + if not _format and raiseErrorIfUnknown: + raise ValueError("Could not determine datetime format of '{}'".format(timeString)) + + return _format + + +def convertSpineDate(dateString, dateFormat=None): + """ + Try to convert a Spine date using the passed format - if it fails - try the most + appropriate + """ + if dateFormat: + try: + dateObject = datetime.strptime(dateString, dateFormat) + return dateObject + except ValueError: + pass + + dateFormat = _guessCommonDateTimeFormat(dateString, raiseErrorIfUnknown=True) + return datetime.strptime(dateString, dateFormat) + + +def timeNowAsString(_dateFormat=TimeFormats.STANDARD_DATE_TIME_FORMAT): + """ + Return the current date and time as a string in standard format + """ + return now().strftime(_dateFormat) + + +def now(): + """ + Utility to gets the current date and time. + The intention is for this to be easier to replace when testing. + :returns: a datetime representing the current date and time + """ + return datetime.now() diff --git a/src/eps_spine_shared/spine/dynamodb_datastore.py b/src/eps_spine_shared/spine/dynamodb_datastore.py new file mode 100644 index 0000000..790e10e --- /dev/null +++ b/src/eps_spine_shared/spine/dynamodb_datastore.py @@ -0,0 +1,196 @@ +from eps_spine_shared.common.dynamodb_datastore import EpsDynamoDbDataStore + + +class DynamoDbDataStore(EpsDynamoDbDataStore): + """ + Wrapper class for EpsDynamoDbDataStore that provides backward compatibility + with camelCase method signatures. + """ + + def __init__( + self, + logObject, + awsEndpointUrl: str, + tableName: str, + roleArn: str = None, + roleSessionName: str = None, + stsEndpointUrl: str = None, + ): + super().__init__( + log_object=logObject, + aws_endpoint_url=awsEndpointUrl, + table_name=tableName, + role_arn=roleArn, + role_session_name=roleSessionName, + sts_endpoint_url=stsEndpointUrl, + ) + # Maintain backward compatibility with camelCase attributes + self.logObject = logObject + self.tableName = tableName + self.awsEndpointUrl = awsEndpointUrl + self.roleArn = roleArn + self.roleSessionName = roleSessionName + self.stsEndpointUrl = stsEndpointUrl + + # Override parent methods with camelCase signatures for backward compatibility + + def base64DecodeDocumentContent(self, internalID, document): + """base64 decode document content in order to store as binary type in DynamoDB.""" + return self.base64_decode_document_content(internalID, document) + + def getExpireAt(self, delta, fromDatetime=None): + """Returns an int timestamp to be used as an expireAt attribute.""" + return self.get_expire_at(delta, fromDatetime) + + def buildDocument(self, internalID, document, index): + """Build EPS Document object to be inserted into DynamoDB.""" + return self.build_document(internalID, document, index) + + def insertEpsDocumentObject(self, internalID, documentKey, document, index=None): + """Insert EPS Document object into the configured table.""" + return self.insert_eps_document_object(internalID, documentKey, document, index) + + def convertIndexKeysToLowerCase(self, index): + """Convert all keys in an index dict to lower case.""" + return self.convert_index_keys_to_lower_case(index) + + def buildRecord(self, prescriptionId, record, recordType, indexes): + """Build EPS Record object to be inserted into DynamoDB.""" + return self.build_record(prescriptionId, record, recordType, indexes) + + def insertEpsRecordObject( + self, internalID, prescriptionId, record, index=None, recordType=None, isUpdate=False + ): + """Insert EPS Record object into the configured table.""" + return self.insert_eps_record_object( + internalID, prescriptionId, record, index, recordType, isUpdate + ) + + def insertEpsWorkList(self, internalID, messageId, workList, index=None): + """Insert EPS WorkList object into the configured table.""" + return self.insert_eps_work_list(internalID, messageId, workList, index) + + def isRecordPresent(self, internalID, prescriptionId) -> bool: + """Returns a boolean indicating the presence of a record.""" + return self.is_record_present(internalID, prescriptionId) + + def returnTermsByNhsNumberDate(self, internalID, rangeStart, rangeEnd, termRegex=None): + """Return the epsRecord terms which match the supplied range and regex for the nhsNumberDate index.""" + return self.return_terms_by_nhs_number_date(internalID, rangeStart, rangeEnd, termRegex) + + def returnTermsByIndexDate(self, internalID, index, rangeStart, rangeEnd=None, termRegex=None): + """Return the epsRecord terms which match the supplied range and regex for the supplied index.""" + return self.return_terms_by_index_date(internalID, index, rangeStart, rangeEnd, termRegex) + + def returnTermsByNhsNumber(self, internalID, nhsNumber): + """Return the epsRecord terms which match the supplied NHS number.""" + return self.return_terms_by_nhs_number(internalID, nhsNumber) + + def returnPidsForNominationChange(self, internalID, nhsNumber): + """Return the epsRecord list which match the supplied NHS number.""" + return self.return_pids_for_nomination_change(internalID, nhsNumber) + + def getNominatedPharmacyRecords(self, nominatedPharmacy, batchSize, internalID): + """Run an index query to get the to-be-dispensed prescriptions for this nominated pharmacy.""" + return self.get_nominated_pharmacy_records(nominatedPharmacy, batchSize, internalID) + + def getNomPharmRecordsUnfiltered(self, internalID, nominatedPharmacy, limit=None): + """ + Query the nomPharmStatus index to get the unfiltered, + to-be-dispensed prescriptions for the given pharmacy. + """ + return self.get_nom_pharm_records_unfiltered(internalID, nominatedPharmacy, limit) + + def returnRecordForProcess(self, internalID, prescriptionId, expectExists=True): + """Look for and return an epsRecord object.""" + return self.return_record_for_process(internalID, prescriptionId, expectExists) + + def base64EncodeDocumentContent(self, internalID, documentBody): + """base64 encode document content and convert to string, to align with return type of original datastore.""" + return self.base64_encode_document_content(internalID, documentBody) + + def returnDocumentForProcess(self, internalID, documentKey, expectExists=True): + """Look for and return an epsDocument object.""" + return self.return_document_for_process(internalID, documentKey, expectExists) + + def returnRecordForUpdate(self, internalID, prescriptionId): + """Look for and return an epsRecord object, but with dataObject on self so that an update can be applied.""" + return self.return_record_for_update(internalID, prescriptionId) + + def getPrescriptionRecordData(self, internalID, prescriptionId, expectExists=True): + """Gets the prescription record from the data store and return just the data.""" + return self.get_prescription_record_data(internalID, prescriptionId, expectExists) + + def getWorkList(self, internalID, messageId): + """Look for and return a workList object.""" + return self.get_work_list(internalID, messageId) + + def compressWorkListXml(self, internalID, workList): + """Compresses the XML contained in the work list, if present.""" + return self.compress_work_list_xml(internalID, workList) + + def decompressWorkListXml(self, internalID, body): + """Decompresses the XML contained in the work list, if present.""" + return self.decompress_work_list_xml(internalID, body) + + def fetchNextSequenceNumber(self, internalID, maxSequenceNumber, readOnly=False): + """Fetch the next sequence number for a batch claim message.""" + return self.fetch_next_sequence_number(internalID, maxSequenceNumber, readOnly) + + def fetchNextSequenceNumberNwssp(self, internalID, maxSequenceNumber, readOnly=False): + """Fetch the next sequence number for a welsh batch claim message.""" + return self.fetch_next_sequence_number_nwssp(internalID, maxSequenceNumber, readOnly) + + def storeBatchClaim(self, internalID, batchClaimOriginal): + """batchClaims need to be stored by their GUIDs with a claims sort key.""" + return self.store_batch_claim(internalID, batchClaimOriginal) + + def fetchBatchClaim(self, internalID, batchClaimId): + """Retrieves the batch claim and returns the batch message for the calling application to handle.""" + return self.fetch_batch_claim(internalID, batchClaimId) + + def deleteClaimNotification(self, internalID, claimId): + """Delete the claim notification document from the table, and return True if the deletion was successful.""" + return self.delete_claim_notification(internalID, claimId) + + def deleteDocument(self, internalID, documentKey, deleteNotification=False): + """Delete a document from the table. Return a boolean indicator of success.""" + return self.delete_document(internalID, documentKey, deleteNotification) + + def deleteRecord(self, internalID, recordKey): + """Delete a record from the table.""" + return self.delete_record(internalID, recordKey) + + def returnPIDsDueForNextActivity(self, internalID, nextActivityStart, nextActivityEnd): + """Returns all the epsRecord keys for prescriptions whose nextActivity is the same as that provided.""" + return self.return_pids_due_for_next_activity( + internalID, nextActivityStart, nextActivityEnd + ) + + def returnPrescriptionIdsForNomPharm(self, internalID, nominatedPharmacyIndexTerm): + """Returns the epsRecord keys relating to the given nominated pharmacy term.""" + return self.return_prescription_ids_for_nom_pharm(internalID, nominatedPharmacyIndexTerm) + + def returnClaimNotificationIDsBetweenStoreDates(self, internalID, startDate, endDate): + """ + Returns all the epsDocument keys for claim notification documents + whose store dates are in the given window. + """ + return self.return_claim_notification_ids_between_store_dates( + internalID, startDate, endDate + ) + + def getAllPIDsByNominatedPharmacy(self, internalID, nominatedPharmacy): + """Run an index query to get all prescriptions for this nominated pharmacy.""" + return self.get_all_pids_by_nominated_pharmacy(internalID, nominatedPharmacy) + + def checkItemExists(self, internalID, pk, sk, expectExists) -> bool: + """Returns False as covered by condition expression.""" + return self.check_item_exists(internalID, pk, sk, expectExists) + + def findBatchClaimfromSeqNumber(self, sequenceNumber, nwssp=False): + """ + Run a query against the sequence number index looking for + the batch GUID (key) on the basis of sequence number. + """ + return self.find_batch_claim_from_seq_number(sequenceNumber, nwssp) diff --git a/src/eps_spine_shared/spinecore/baseutilities.py b/src/eps_spine_shared/spinecore/baseutilities.py new file mode 100644 index 0000000..e841a8a --- /dev/null +++ b/src/eps_spine_shared/spinecore/baseutilities.py @@ -0,0 +1,43 @@ +import unicodedata + +import six + + +def handleEncodingOddities(text, attemptEscapedReplacement=False): + """ + Strip accents and non-ascii characters from unicode strings + """ + if not isinstance(text, (six.text_type, six.binary_type)): + text = six.text_type(text) + + # By default use decomposed characters and simply ignore the combining characters + form = "NFKD" + mode = "ignore" + + # Attempt to convert bytes to text + if isinstance(text, six.binary_type): + try: + # We expect UTF-8 normally + text = text.decode("utf8") + except UnicodeDecodeError: + # If that didn't work, use latin1 which basically always works + text = text.decode("latin1") + + # if replacement is not requested, use composed characters + # and replace them with question marks when encoding to ascii. + # This is only done if using the fallback latin1 encoding as a last resort + if not attemptEscapedReplacement: + form = "NFKC" + mode = "replace" + + return unicodedata.normalize(form, text).encode("ascii", mode).decode("ascii") + + +def quoted(value): + """ + Utility function that returns the value as a string surrounded by double quotes + """ + try: + return '"' + str(value) + '"' + except (UnicodeEncodeError, UnicodeDecodeError): + return '"' + handleEncodingOddities(value) + '"' diff --git a/src/eps_spine_shared/spinecore/changelog.py b/src/eps_spine_shared/spinecore/changelog.py new file mode 100644 index 0000000..d0a3855 --- /dev/null +++ b/src/eps_spine_shared/spinecore/changelog.py @@ -0,0 +1,461 @@ +import datetime +import re +import uuid + +from eps_spine_shared.errors import EpsSystemError +from eps_spine_shared.nhsfundamentals.timeutilities import TimeFormats + + +class ChangeLogProcessor(object): + """ + Keep the change log within the record + + The methods here assume that a None is never passed as change log, if necessary pass {} instead. + """ + + TIMESTAMP = "Timestamp" + SCN = "SCN" + SYS_SDS = "agentSystemSDS1" + PRS_SDS = "agentPersonSDSPerson" + UPDATES = "updatesApplied" + XSLT = "Source XSLT" + RSP_PARAMS = "Response Parameters" + NOTIFICATIONS = "Notifications" + INTERNAL_ID = "InternalID" + INTERACTION_ID = "interactionID" + TIME_PREPARED = "timePreparedForUpdate" + INSTANCE = "instance" + + RECORD_SCN_REF = "SCN" + RECORD_CHANGELOG_REF = "changeLog" + + INITIAL_SCN = 1 + + DO_NOT_PRUNE = -1 + PRUNE_POINT = 12 + INVALID_SCN = -1 + + @classmethod + def logForGeneralUpdate(cls, sCN, internalID=None, xslt=None, rspParameters=None): + """ + Add a general change log update, nothing specific to a domain + """ + if not rspParameters: + rspParameters = {} + + logOfChange = {} + _timeOfChange = datetime.datetime.now().strftime(TimeFormats.STANDARD_DATE_TIME_FORMAT) + logOfChange[cls.TIMESTAMP] = _timeOfChange + logOfChange[cls.SCN] = sCN + logOfChange[cls.INTERNAL_ID] = internalID + logOfChange[cls.XSLT] = xslt + logOfChange[cls.RSP_PARAMS] = rspParameters + return logOfChange + + @classmethod + def updateChangeLog(cls, record, newLog, messageID, prunePoint=None): + """ + Take a change log from the record, add the new log to it, and prune to the prune + point + """ + if not prunePoint: + prunePoint = cls.PRUNE_POINT + + changeLog = record.get(cls.RECORD_CHANGELOG_REF, {}) + changeLog[messageID] = newLog + + cls.pruneChangeLog(changeLog, prunePoint) + + record[cls.RECORD_CHANGELOG_REF] = changeLog + return record + + @classmethod + def pruneChangeLog(cls, changeLog, prunePoint): + """ + Prune to the prune point + """ + if prunePoint != cls.DO_NOT_PRUNE: + _, _highestSCN = cls.getHighestSCN(changeLog) + if _highestSCN != cls.INVALID_SCN: + _scnToPrune = _highestSCN - prunePoint + pruneList = [] + for guid, changeLogEntry in changeLog.items(): + _entrySCN = int(changeLogEntry.get(cls.SCN, cls.INVALID_SCN)) + if _entrySCN < _scnToPrune: + pruneList.append(guid) + + for guid in pruneList: + del changeLog[guid] + + @classmethod + def getHighestSCN(cls, changeLog): + """ + Return the (guid, scn) from the first changeLog found with the highest SCN + """ + (highestGUID, highestSCN) = (None, cls.INVALID_SCN) + for _guid in changeLog: + _scn = int(changeLog[_guid].get(cls.SCN, cls.INVALID_SCN)) + if _scn > highestSCN: + highestGUID = _guid + highestSCN = _scn + return (highestGUID, highestSCN) + + @classmethod + def getSCN(cls, changeLogEntry): + """ + Retrieve the SCN as an int from the provided changeLog entry + """ + scnNumber = int(changeLogEntry.get(cls.SCN, cls.INVALID_SCN)) + return scnNumber + + @classmethod + def listSCNs(cls, changeLog): + """ + Performs list comprehension on the changeLog dictionary to retrieve all the SCNs from changeLog + + Duplicates will be present and changeLog entries with no SCN will be represented with the + INVALID_SCN constant + """ + scnNumberList = [cls.getSCN(changeLog[x]) for x in changeLog] + return scnNumberList + + @classmethod + def getMaxSCN(cls, changeLog): + """ + Return the highest SCN value from the provided changeLog + """ + scnNumberList = cls.listSCNs(changeLog) + if not scnNumberList: + return cls.INVALID_SCN + highestSCN = max(scnNumberList) + return highestSCN + + @classmethod + def getAllGuidsForSCN(cls, changeLog, searchScn): + """ + For the provided SCN return the GUID Keys of all the changeLog entries that have that SCN + + Usually this will be a single GUID, but in the case of tickled records there can be multiple. + """ + searchScn = int(searchScn) + guidList = [k for k in changeLog if cls.getSCN(changeLog[k]) == searchScn] + return guidList + + @classmethod + def getMaxSCNGuids(cls, changeLog): + """ + Finds the highest SCN in the changeLog and returns all the GUIDs that have that SCN + """ + highestSCN = cls.getMaxSCN(changeLog) + guidList = cls.getAllGuidsForSCN(changeLog, highestSCN) + return guidList + + @classmethod + def getAllGuids(cls, changeLog): + """ + Return a list of all the GUID keys from the provided changeLog + """ + return list(changeLog.keys()) + + @classmethod + def getLastChangeTime(cls, changeLog): + """ + Returns the last change time + """ + try: + guid = cls.getMaxSCNGuids(changeLog)[0] + except IndexError: + return None + return changeLog[guid].get(cls.TIMESTAMP) + + @classmethod + def setInitialChangeLog(cls, record, internalID, reasonGUID=None): + """ + If no change log is present set an initial change log on the record. It may + use a GUID as a key or a string explaining the reason for initiating the + change log. + """ + changeLog = record.get(cls.RECORD_CHANGELOG_REF) + if changeLog: + return + + scn = int(record.get(cls.RECORD_SCN_REF, cls.INITIAL_SCN)) + if not reasonGUID: + reasonGUID = str(uuid.uuid4()).upper() + changeLog = {} + changeLog[reasonGUID] = cls.logForGeneralUpdate(scn, internalID) + + record[cls.RECORD_CHANGELOG_REF] = changeLog + + +class DemographicsChangeLogProcessor(ChangeLogProcessor): + """ + Change Log Processor specifically for demographic records + """ + + # Demographic record uses 'serialChangeNumber' rather than the default 'SCN' + RECORD_SCN_REF = "serialChangeNumber" + + @classmethod + def logForDomainUpdate(cls, updateContext, internalID): + """ + Create a change log for this expected change - requires attributes to be set on + context object + """ + logOfChange = cls.logForGeneralUpdate( + updateContext.pdsRecord.get(cls.RECORD_SCN_REF, cls.INITIAL_SCN), + internalID, + updateContext.responseDetails.get(cls.XSLT), + updateContext.responseDetails.get(cls.RSP_PARAMS), + ) + + logOfChange[cls.SYS_SDS] = updateContext.agentSystem + logOfChange[cls.PRS_SDS] = updateContext.agentPerson + logOfChange[cls.UPDATES] = updateContext.updatesApplied + logOfChange[cls.NOTIFICATIONS] = updateContext.notificationsToQueue + return logOfChange + + @staticmethod + def getHighestGpLinksTransactionNumber(changeLog, sender, recipient): + """ + Return the highest GP Links transaction number which has been included in the change log, or None (if there + aren't any). + """ + maxNumber = -1 + + gpLinksKeyPattern = re.compile( + "^{}_{}_[0-9]+_[0-9]+_(?P[0-9]+)$".format( + sender.upper(), recipient.upper() + ) + ) + + for key in changeLog.keys(): # noqa: SIM118 + match = gpLinksKeyPattern.match(key) + # Ignore keys which aren't related to GP Links transactions + if match is None: + continue + transactionNumber = int(match.group("transactionNumber")) + if transactionNumber > maxNumber: + maxNumber = transactionNumber + + return maxNumber + + +class PrescriptionsChangeLogProcessor(ChangeLogProcessor): + """ + Change Log Processor specifically for prescriptions records + """ + + FROM_STATUS = "fromStatus" + TO_STATUS = "toStatus" + INS_FROM_STATUS = "instanceFromStatus" + INS_TO_STATUS = "instanceToStatus" + PRE_CHANGE_STATUS_DICT = "preChangeStatusDict" + POST_CHANGE_STATUS_DICT = "postChangeStatusDict" + CHANGED_ISSUES_LIST = "issuesAlteredByChange" + PRE_CHANGE_CURRENT_ISSUE = "preChangeCurrentIssue" + POST_CHANGE_CURRENT_ISSUE = "postChangeCurrentIssue" + TOUCHED = "touched" + AGENT_ROLE_PROFILE_CODE_ID = "agentRoleProfileCodeId" + AGENT_PERSON_ROLE = "agentPersonRole" + AGENT_PERSON_ORG_CODE = "agentPersonOrgCode" + + MIN_INITIALHISTORY = 16 + MIN_RECENTHISTORY = 16 + REPEATING_ACTIONS = [ + "PORX_IN060102UK30", + "PORX_IN060102SM30", + "PORX_IN132004UK30", + "PORX_IN132004SM30", + "PORX_IN132004UK04", + "PORX_IN100101UK31", + "PORX_IN100101SM31", + "PORX_IN100101UK04", + "PORX_IN020101UK31", + "PORX_IN020102UK31", + "PORX_IN020101SM31", + "PORX_IN020102SM31", + "PORX_IN020101UK04", + "PORX_IN020102UK04", + "PORX_IN060102GB01", + "PRESCRIPTION_DISPENSE_PROPOSAL_RETURN", + ] + + REGEX_ALPHANUMERIC8 = re.compile(r"^[A-Za-z0-9\-]{1,8}$") + + @classmethod + def logForDomainUpdate(cls, updateContext, internalID): + """ + Create a change log for this expected change - requires attribute to be set on + context object + """ + + logOfChange = cls.logForGeneralUpdate( + updateContext.epsRecord.get_scn(), + internalID, + updateContext.responseDetails.get(cls.XSLT), + updateContext.responseDetails.get(cls.RSP_PARAMS), + ) + logOfChange = updateContext.workDescriptionObject.createInitialEventLog(logOfChange) + + _instance = ( + str(updateContext.updateInstance) + if updateContext.updateInstance + else str(updateContext.instanceID) + ) + + logOfChange[cls.TIME_PREPARED] = updateContext.handleTime.strftime( + TimeFormats.STANDARD_DATE_TIME_FORMAT + ) + + # NOTE: FROM_STATUS and TO_STATUS seem to be legacy fields, that have been + # superceded by the INS_FROM_STATUS and INS_TO_STATUS fields set below. + # The only reference to TO_STATUS seems to be in PrescriptionJsonQueryResponse.cfg + # template used by the prescription detail view web service + logOfChange[cls.FROM_STATUS] = updateContext.epsRecord.return_previous_prescription_status( + updateContext.instanceID, False + ) + logOfChange[cls.TO_STATUS] = updateContext.epsRecord.return_prescription_status( + updateContext.instanceID, False + ) + + # Event history lines for UI + # **** NOTE THAT THESE ARE WRONG, THEY REFER TO THE FINAL ISSUE, WHICH MAY NOT BE THE ISSUE THAT WAS UPDATED + logOfChange[cls.INSTANCE] = _instance + logOfChange[cls.INS_FROM_STATUS] = ( + updateContext.epsRecord.return_previous_prescription_status(_instance, False) + ) + logOfChange[cls.INS_TO_STATUS] = updateContext.epsRecord.return_prescription_status( + _instance, False + ) + logOfChange[cls.AGENT_ROLE_PROFILE_CODE_ID] = updateContext.agentRoleProfileCodeId + logOfChange[cls.AGENT_PERSON_ROLE] = updateContext.agentPersonRole + orgCode = updateContext.agentOrganization + hasDispenserCode = hasattr(updateContext, "dispenserCode") and updateContext.dispenserCode + if ( + not orgCode + and hasDispenserCode + and cls.REGEX_ALPHANUMERIC8.match(updateContext.dispenserCode) + ): + orgCode = updateContext.dispenserCode + logOfChange[cls.AGENT_PERSON_ORG_CODE] = orgCode + + # To help with troubleshooting, the following change entris are added + _preChangeIssueStatuses = updateContext.epsRecord.return_prechange_issue_status_dict() + _postChangeIssueStatuses = updateContext.epsRecord.create_issue_current_status_dict() + logOfChange[cls.PRE_CHANGE_STATUS_DICT] = _preChangeIssueStatuses + logOfChange[cls.POST_CHANGE_STATUS_DICT] = _postChangeIssueStatuses + logOfChange[cls.CHANGED_ISSUES_LIST] = updateContext.epsRecord.return_changed_issue_list( + _preChangeIssueStatuses, _postChangeIssueStatuses, None, updateContext.changedIssuesList + ) + # To help with troubleshooting, the following currentIssue values are added + logOfChange[cls.PRE_CHANGE_CURRENT_ISSUE] = ( + updateContext.epsRecord.return_prechange_current_issue() + ) + logOfChange[cls.POST_CHANGE_CURRENT_ISSUE] = updateContext.epsRecord.current_issue_number + if hasattr(updateContext, cls.TOUCHED) and updateContext.touched: + logOfChange[cls.TOUCHED] = updateContext.touched + + return logOfChange + + @classmethod + def pruneChangeLog(cls, changeLog, prunePoint): + """ + Prune if other the prune point + Prune the change log where there is a series of change log entries for the same + interactionID - and the change is neither recent nor part of the early history + + The intention if we get a repeating interaction we don't continue to explode the + changeLog with all the history + """ + invertedChangeLog = {} + maxSCN = 0 + for guid, changeLogEntry in changeLog.items(): + _SCN = int(changeLogEntry.get(cls.SCN, cls.INVALID_SCN)) + invertedChangeLog[_SCN] = (guid, changeLogEntry.get(cls.INTERACTION_ID)) + maxSCN = max(maxSCN, _SCN) + + if maxSCN <= prunePoint: + # Don't make any changes + return + + _iclSCNKeys = list(invertedChangeLog.keys()) + _iclSCNKeys.sort(reverse=True) + _guidsToPrune = [] + for _iclSCN in _iclSCNKeys: + if _iclSCN > (maxSCN - cls.MIN_RECENTHISTORY) or _iclSCN < cls.MIN_INITIALHISTORY: + continue + _thisIntID = invertedChangeLog.get(_iclSCN, (None, None))[1] + (_previousGUID, _previousIntID) = invertedChangeLog.get(_iclSCN - 1, (None, None)) + _oneBeforeIntID = invertedChangeLog.get(_iclSCN - 2, (None, None))[1] + if ( + _thisIntID + and _thisIntID in cls.REPEATING_ACTIONS + and _thisIntID == _previousIntID + and _previousIntID == _oneBeforeIntID + ): + _guidsToPrune.append(_previousGUID) + + for guid in _guidsToPrune: + del changeLog[guid] + + if len(changeLog) > prunePoint: + # If we have breached the prune point but can't safely prune - stop before + # The un-pruned record becomes an issue + raise EpsSystemError(EpsSystemError.SYSTEM_FAILURE) + + +class ClinicalsChangeLogProcessor(ChangeLogProcessor): + """ + Change Log Processor specifically for clinicals patient records + """ + + SYS_SDS = "agentSystemSDS" + PRS_SDS = "agentPerson" + PRUNE_POINT = 48 + + @classmethod + def logForDomainUpdate(cls, updateContext, internalID, interactionID=None): + """ + Create a change log for this expected change - requires attributes to be set on + context object + """ + logOfChange = cls.logForGeneralUpdate( + updateContext.patientRecord.get_scn(), + internalID, + updateContext.responseDetails.get(cls.XSLT), + updateContext.responseDetails.get(cls.RSP_PARAMS), + ) + + logOfChange[cls.TIME_PREPARED] = updateContext.handleTime.strftime( + TimeFormats.STANDARD_DATE_TIME_FORMAT + ) + logOfChange[cls.INTERACTION_ID] = interactionID + logOfChange[cls.SYS_SDS] = updateContext.agentSystem + logOfChange[cls.PRS_SDS] = updateContext.agentPerson + return logOfChange + + @classmethod + def logForNotificationUpdate(cls, interactionID, updateTime, scn, internalID): + """ + Create a change log for this expected change from a notification worker - doesn't use + context and sets a subset of the items used by logForDomainUpdate + """ + logOfChange = cls.logForGeneralUpdate(scn, internalID) + logOfChange[cls.TIME_PREPARED] = updateTime.strftime(TimeFormats.STANDARD_DATE_TIME_FORMAT) + logOfChange[cls.INTERACTION_ID] = interactionID + return logOfChange + + @classmethod + def logForTickleClinicalRecord(cls, updateContext, interactionID, internalID): + """ + Create a change log for this expected change from a notification worker - doesn't use + context and sets a subset of the items used by logForDomainUpdate + """ + logOfChange = cls.logForGeneralUpdate(updateContext.patientRecord.get_scn(), internalID) + logOfChange[cls.TIME_PREPARED] = updateContext.handleTime.strftime( + TimeFormats.STANDARD_DATE_TIME_FORMAT + ) + logOfChange[cls.INTERACTION_ID] = interactionID + logOfChange[cls.SYS_SDS] = "SYSTEM" + return logOfChange diff --git a/tests/common/dynamodb_client_test.py b/tests/common/dynamodb_client_test.py new file mode 100644 index 0000000..fdfc972 --- /dev/null +++ b/tests/common/dynamodb_client_test.py @@ -0,0 +1,177 @@ +import zlib +from uuid import uuid4 + +import simplejson + +from eps_spine_shared.common.dynamodb_client import EpsDataStoreError +from eps_spine_shared.common.dynamodb_common import Key, ProjectedAttribute, SortKey +from tests.dynamodb_test import DynamoDbTest + + +class EpsDynamoDbClientTest(DynamoDbTest): + """ + Tests relating to DynamoDbClient. + """ + + def test_log_item_size_default(self): + """ + Test logging size of items using default size fn. + """ + key = str(uuid4()) + item = { + Key.PK.name: key, + Key.SK.name: "DEF", + ProjectedAttribute.BODY.name: {"a": 1, "b": True}, + } + serialised_item = self.datastore.client.serialise_for_dynamodb(item) + internal_id = self.internal_id + self.datastore.client._log_item_size(internal_id, serialised_item) + + expected = { + "itemType": "DEF", + "key": key, + "size": 177, + "table": self.datastore.client.table_name, + "internalID": internal_id, + } + + logs = self.logger.get_log_occurrences("DDB0011") + self.assertEqual(logs[0], expected) + + def test_log_item_size_record(self): + """ + Test logging size of record items. + """ + key = str(uuid4()) + body = zlib.compress(simplejson.dumps({"a": 1, "b": True}).encode("utf-8")) + item = { + Key.PK.name: key, + Key.SK.name: SortKey.RECORD.value, + ProjectedAttribute.BODY.name: body, + } + serialised_item = self.datastore.client.serialise_for_dynamodb(item) + internal_id = self.internal_id + self.datastore.client._log_item_size(internal_id, serialised_item) + + expected = { + "itemType": SortKey.RECORD.value, + "key": key, + "size": 192, + "table": self.datastore.client.table_name, + "internalID": internal_id, + } + + logs = self.logger.get_log_occurrences("DDB0011") + self.assertEqual(logs[0], expected) + + def test_log_item_size_document(self): + """ + Test logging size of document items. + """ + key = str(uuid4()) + content = self.get_document_content() + internal_id = self.internal_id + document = self.datastore.build_document(internal_id, {"content": content}, None) + document[Key.PK.name] = key + serialised_item = self.datastore.client.serialise_for_dynamodb(document) + self.datastore.client._log_item_size(internal_id, serialised_item) + + expected = { + "itemType": SortKey.DOCUMENT.value, + "key": key, + "size": 272, + "table": self.datastore.client.table_name, + "internalID": internal_id, + } + + logs = self.logger.get_log_occurrences("DDB0011") + self.assertEqual(logs[0], expected) + + def test_log_item_size_document_no_content(self): + """ + Test logging size of document items when no content is present. + """ + key = str(uuid4()) + internal_id = self.internal_id + document = self.datastore.build_document(internal_id, {}, None) + document[Key.PK.name] = key + serialised_item = self.datastore.client.serialise_for_dynamodb(document) + self.datastore.client._log_item_size(internal_id, serialised_item) + + expected = { + "itemType": SortKey.DOCUMENT.value, + "key": key, + "size": 201, + "table": self.datastore.client.table_name, + "internalID": internal_id, + } + + logs = self.logger.get_log_occurrences("DDB0011") + self.assertEqual(logs[0], expected) + + def test_log_item_size_claim(self): + """ + Test logging size of items using bespoke claim fn. + """ + key = str(uuid4()) + item = { + Key.PK.name: key, + Key.SK.name: SortKey.CLAIM.value, + ProjectedAttribute.BODY.name: {"a": 1, "b": True, "Batch XML": b""}, + } + serialised_item = self.datastore.client.serialise_for_dynamodb(item) + internal_id = self.internal_id + self.datastore.client._log_item_size(internal_id, serialised_item) + + expected = { + "itemType": SortKey.CLAIM.value, + "key": key, + "size": 234, + "table": self.datastore.client.table_name, + "internalID": internal_id, + } + + logs = self.logger.get_log_occurrences("DDB0011") + self.assertEqual(logs[0], expected) + + def test_log_item_size_work_list(self): + """ + Test logging size of items using bespoke workList fn. + WorkList may or may not have responseDetails containing compressed xml. + """ + key = str(uuid4()) + bodies = [ + ({"a": 1, "b": True}, 177), + ({"a": 1, "b": True, "responseDetails": {"XML": b""}}, 40), + ] + + for i, (body, size) in enumerate(bodies): + item = { + Key.PK.name: key, + Key.SK.name: SortKey.WORK_LIST.value, + ProjectedAttribute.BODY.name: body, + } + serialised_item = self.datastore.client.serialise_for_dynamodb(item) + internal_id = self.internal_id + self.datastore.client._log_item_size(internal_id, serialised_item) + + expected = { + "itemType": SortKey.WORK_LIST.value, + "key": key, + "size": size, + "table": self.datastore.client.table_name, + "internalID": internal_id, + } + + logs = self.logger.get_log_occurrences("DDB0011") + self.assertEqual(logs[i], expected) + + def test_get_item_raises_data_store_error_when_pk_is_falsy(self): + """ + Test that the get_item method throws an EpsDataStoreError matching that thrown by the original datastore, + when the given key is falsy. + """ + keys = [False, "", [], {}] + for key in keys: + with self.assertRaises(EpsDataStoreError): + self.datastore.client.get_item(self.internal_id, key, "SK") diff --git a/tests/common/dynamodb_common_test.py b/tests/common/dynamodb_common_test.py new file mode 100644 index 0000000..8757664 --- /dev/null +++ b/tests/common/dynamodb_common_test.py @@ -0,0 +1,47 @@ +from decimal import Decimal +from unittest import TestCase + +from parameterized import parameterized + +from eps_spine_shared.common.dynamodb_common import ( + prescription_id_without_check_digit, + replace_decimals, +) + + +class DynamoDbCommonTest(TestCase): + """ + Tests relating to DynamoDbCommon. + """ + + def test_replace_decimals(self): + """ + Test replacing values of Decimal type in object. + """ + with_decimals = {"a": Decimal(1), "b": [Decimal(2)], "c": {"d": Decimal(3)}} + + expected = {"a": 1, "b": [2], "c": {"d": 3}} + + self.assertEqual(replace_decimals(with_decimals), expected) + + @parameterized.expand( + [ + ("r1_with_check", "1A23FF-Z3F5D8-11F0BE", "1A23FF-Z3F5D8-11F0B"), + ( + "r2_with_check", + "297BDA4D-5D80-11F0-BB47-57D6E4EB747DO", + "297BDA4D-5D80-11F0-BB47-57D6E4EB747D", + ), + ("r1_without_check", "1A23FF-Z3F5D8-11F0B", "1A23FF-Z3F5D8-11F0B"), + ( + "r2_without_check", + "297BDA4D-5D80-11F0-BB47-57D6E4EB747D", + "297BDA4D-5D80-11F0-BB47-57D6E4EB747D", + ), + ] + ) + def test_prescription_id_without_check_digit(self, _, with_check_digit, without_check_digit): + """ + Test removing the check digit from R1 and R2 prescriptions IDs. + """ + self.assertEqual(prescription_id_without_check_digit(with_check_digit), without_check_digit) diff --git a/tests/common/dynamodb_datastore_test.py b/tests/common/dynamodb_datastore_test.py new file mode 100644 index 0000000..b9aa249 --- /dev/null +++ b/tests/common/dynamodb_datastore_test.py @@ -0,0 +1,906 @@ +import base64 +import binascii +import zlib +from datetime import datetime, timedelta, timezone +from decimal import Decimal +from threading import Thread +from unittest.mock import Mock, patch +from uuid import uuid4 + +import simplejson +from boto3.dynamodb.types import Binary +from freezegun import freeze_time +from parameterized import parameterized + +from eps_spine_shared.common import indexes +from eps_spine_shared.common.dynamodb_client import EpsDataStoreError +from eps_spine_shared.common.dynamodb_common import ( + NEXT_ACTIVITY_DATE_PARTITIONS, + Attribute, + Key, + ProjectedAttribute, + SortKey, + replace_decimals, +) +from eps_spine_shared.common.dynamodb_datastore import EpsDynamoDbDataStore +from eps_spine_shared.common.prescription.record import PrescriptionStatus +from eps_spine_shared.nhsfundamentals.timeutilities import TimeFormats +from tests.dynamodb_test import DynamoDbTest +from tests.mock_logger import MockLogObject + + +class EpsDynamoDbDataStoreTest(DynamoDbTest): + """ + Tests relating to DynamoDbDataStore. + """ + + def test_insert_record(self): + """ + Test datastore can insert records. + """ + prescription_id, nhs_number = self.get_new_record_keys() + record = self.get_record(nhs_number) + + response = self.datastore.insert_eps_record_object( + self.internal_id, prescription_id, record + ) + + self.assertEqual(response["ResponseMetadata"]["HTTPStatusCode"], 200) + + def test_include_record_type(self): + """ + Test datastore can insert records including recordType and retrieve records with it included. + """ + repeat_dispense = "RepeatDispense" + prescription_id, nhs_number = self.get_new_record_keys() + record = self.get_record(nhs_number) + + self.datastore.insert_eps_record_object( + self.internal_id, prescription_id, record, None, repeat_dispense + ) + returned_record = self.datastore.return_record_for_process( + self.internal_id, prescription_id + ) + + self.assertEqual(returned_record["recordType"], repeat_dispense) + + def test_insert_duplicate(self): + """ + Test datastore will not overwrite records. + """ + prescription_id, nhs_number = self.get_new_record_keys() + record = self.get_record(nhs_number) + self.datastore.insert_eps_record_object(self.internal_id, prescription_id, record) + + record["instances"]["1"]["prescriptionStatus"] = PrescriptionStatus.AWAITING_RELEASE_READY + + with self.assertRaises(EpsDataStoreError) as cm: + self.datastore.insert_eps_record_object(self.internal_id, prescription_id, record) + self.assertEqual(cm.exception.error_topic, EpsDataStoreError.DUPLICATE_ERROR) + + returned_record = self.datastore.return_record_for_process( + self.internal_id, prescription_id + ) + returned_record_status = returned_record["value"]["instances"]["1"]["prescriptionStatus"] + + self.assertEqual(returned_record_status, PrescriptionStatus.TO_BE_DISPENSED) + self.assertEqual(self.logger.log_occurrence_count("DDB0021"), 1) + + def test_insert_multiple(self): + """ + Test client can insert multiple items. + """ + items = [] + for _ in range(2): + record_key, _ = self.get_new_record_keys() + items.append({Key.PK.name: record_key, Key.SK.name: "DEF"}) + + response = self.datastore.client.insert_items(self.internal_id, items) + + self.assertEqual(response["ResponseMetadata"]["HTTPStatusCode"], 200) + + def test_client_put(self): + """ + Test put_item is used when one item. + """ + mock_client = Mock() + self.datastore.client.client = mock_client + self.datastore.client.insert_items(self.internal_id, [{}], log_item_size=False) + mock_client.put_item.assert_called_once() + + def test_client_transact(self): + """ + Test transact_write_items is used when multiple items. + """ + mock_client = Mock() + self.datastore.client.client = mock_client + self.datastore.client.insert_items(self.internal_id, [{}, {}], log_item_size=False) + mock_client.transact_write_items.assert_called_once() + + def test_return_record_for_process(self): + """ + Test querying against the prescriptionId index and + returning a record with additional required attributes. + """ + prescription_id, nhs_number = self.get_new_record_keys() + self.assertFalse(self.datastore.is_record_present(self.internal_id, prescription_id)) + + record = self.get_record(nhs_number) + self.datastore.insert_eps_record_object(self.internal_id, prescription_id, record) + + returned_record = self.datastore.return_record_for_process( + self.internal_id, prescription_id + ) + + expected_record = {"value": record, "vectorClock": "vc", "releaseVersion": "R2"} + + self.assertEqual(expected_record, returned_record) + self.assertEqual(type(returned_record["value"]["prescription"]["daysSupply"]), int) + + def test_return_record_for_update(self): + """ + Test querying against the prescriptionId index and + returning a record with additional required attributes, including setting it on the dataStore. + """ + prescription_id, nhs_number = self.get_new_record_keys() + self.assertFalse(self.datastore.is_record_present(self.internal_id, prescription_id)) + + record = self.get_record(nhs_number) + self.datastore.insert_eps_record_object(self.internal_id, prescription_id, record) + + returned_record = self.datastore.return_record_for_update(self.internal_id, prescription_id) + + expected_record = {"value": record, "vectorClock": "vc", "releaseVersion": "R2"} + + self.assertEqual(expected_record, returned_record) + self.assertEqual(record, self.datastore.dataObject) + + def test_change_eps_object(self): + """ + Test update to existing record. + """ + prescription_id, nhs_number = self.get_new_record_keys() + self.assertFalse(self.datastore.is_record_present(self.internal_id, prescription_id)) + + record = self.get_record(nhs_number) + self.datastore.insert_eps_record_object(self.internal_id, prescription_id, record) + + record["SCN"] = 2 + self.datastore.insert_eps_record_object( + self.internal_id, prescription_id, record, is_update=True + ) + + updated_record = self.datastore.return_record_for_process(self.internal_id, prescription_id) + + expected_record = {"value": record, "vectorClock": "vc", "releaseVersion": "R2"} + + self.assertEqual(expected_record, updated_record) + + def test_change_eps_object_same_scn(self): + """ + Test failed update to existing record due to no increment to SCN. + """ + prescription_id, nhs_number = self.get_new_record_keys() + self.assertFalse(self.datastore.is_record_present(self.internal_id, prescription_id)) + + record = self.get_record(nhs_number) + self.datastore.insert_eps_record_object(self.internal_id, prescription_id, record) + + modified_record = self.get_record(nhs_number) + modified_record["instances"]["1"][ + "prescriptionStatus" + ] = PrescriptionStatus.AWAITING_RELEASE_READY + + with self.assertRaises(EpsDataStoreError) as cm: + self.datastore.insert_eps_record_object( + self.internal_id, prescription_id, modified_record, is_update=True + ) + self.assertEqual(cm.exception.error_topic, EpsDataStoreError.CONDITIONAL_UPDATE_FAILURE) + + self.assertEqual(self.logger.log_occurrence_count("DDB0022"), 1) + + updated_record = self.datastore.return_record_for_process(self.internal_id, prescription_id) + + expected_record = {"value": record, "vectorClock": "vc", "releaseVersion": "R2"} + + self.assertEqual(expected_record, updated_record) + + def test_timer(self): + """ + Test timer decorator writes desired log. + """ + prescription_id, nhs_number = self.get_new_record_keys() + record = self.get_record(nhs_number) + + self.datastore.insert_eps_record_object(self.internal_id, prescription_id, record) + + occurrences = self.logger.get_log_occurrences("DDB0002") + self.assertEqual(len(occurrences), 1) + self.assertEqual(occurrences[0]["func"], "insert_eps_record_object") + self.assertEqual(occurrences[0]["cls"], "EpsDynamoDbDataStore") + + def test_insert_and_get_eps_work_list(self): + """ + Test insertion and retrieval of EPS worklist, compressing/decompressing its XML. + """ + message_id = str(uuid4()) + self.keys.append((message_id, SortKey.WORK_LIST.value)) + + xml = "" + xml_bytes = xml.encode("utf-8") + + for response_details in [xml, xml_bytes]: + work_list = { + Key.SK.name: SortKey.WORK_LIST.value, + "keyList": [], + "responseDetails": {"XML": response_details}, + } + self.datastore.insert_eps_work_list(self.internal_id, message_id, work_list) + + returned_work_list = self.datastore.get_work_list(self.internal_id, message_id) + + self.assertEqual(returned_work_list["responseDetails"]["XML"], xml_bytes) + self.assertEqual(work_list["responseDetails"]["XML"], response_details) + + def test_fetch_next_sequence_number(self): + """ + Test fetching and incrementing claims sequence number. + """ + self.keys.append((self.datastore.CLAIM_SEQUENCE_NUMBER_KEY, SortKey.SEQUENCE_NUMBER.value)) + self.datastore.client.delete_item( + self.datastore.CLAIM_SEQUENCE_NUMBER_KEY, SortKey.SEQUENCE_NUMBER.value + ) + + sequence_number = self.datastore.fetch_next_sequence_number(self.internal_id, 2) + self.assertEqual(sequence_number, 1) + + sequence_number = self.datastore.fetch_next_sequence_number(self.internal_id, 2, True) + self.assertEqual(sequence_number, 2) + + sequence_number = self.datastore.fetch_next_sequence_number(self.internal_id, 2) + self.assertEqual(sequence_number, 2) + + sequence_number = self.datastore.fetch_next_sequence_number(self.internal_id, 2) + self.assertEqual(sequence_number, 1) + + def test_fetch_next_sequence_number_nwssp(self): + """ + Test fetching and incrementing claims sequence number. + """ + self.keys.append( + (self.datastore.NWSSP_CLAIM_SEQUENCE_NUMBER_KEY, SortKey.SEQUENCE_NUMBER.value) + ) + self.datastore.client.delete_item( + self.datastore.NWSSP_CLAIM_SEQUENCE_NUMBER_KEY, SortKey.SEQUENCE_NUMBER.value + ) + + sequence_number = self.datastore.fetch_next_sequence_number_nwssp(self.internal_id, 2) + self.assertEqual(sequence_number, 1) + + sequence_number = self.datastore.fetch_next_sequence_number_nwssp(self.internal_id, 2, True) + self.assertEqual(sequence_number, 2) + + sequence_number = self.datastore.fetch_next_sequence_number_nwssp(self.internal_id, 2) + self.assertEqual(sequence_number, 2) + + sequence_number = self.datastore.fetch_next_sequence_number_nwssp(self.internal_id, 2) + self.assertEqual(sequence_number, 1) + + @patch("random.randint") + def test_store_batch_claim(self, patched_randint): + """ + Test creating and storing a batch claim. + """ + patched_randint.return_value = 7 + + self.keys.append(("batchGuid", SortKey.CLAIM.value)) + batch_claim = { + "Batch GUID": "batchGuid", + "Claim ID List": ["claimId1", "claimId2"], + "Handle Time": "handleTime", + "Sequence Number": 1, + "Nwssp Sequence Number": 2, + "Batch XML": b"", + } + dt_now = datetime.now(timezone.utc) + with freeze_time(dt_now): + self.datastore.store_batch_claim(self.internal_id, batch_claim) + + returned_batch_claim = self.datastore.client.get_item( + self.internal_id, "batchGuid", SortKey.CLAIM.value + ) + replace_decimals(returned_batch_claim) + returned_batch_claim["body"]["Batch XML"] = bytes(returned_batch_claim["body"]["Batch XML"]) + + expected = { + Key.PK.name: "batchGuid", + Key.SK.name: SortKey.CLAIM.value, + ProjectedAttribute.BODY.name: batch_claim, + ProjectedAttribute.INDEXES.name: { + self.datastore.INDEX_CLAIMID: ["claimId1", "claimId2"], + self.datastore.INDEX_CLAIMHANDLETIME: ["handleTime"], + self.datastore.INDEX_CLAIM_SEQNUMBER: [1], + self.datastore.INDEX_SCN: [ + f"{dt_now.strftime(TimeFormats.STANDARD_DATE_TIME_FORMAT)}|1" + ], + self.datastore.INDEX_CLAIM_SEQNUMBER_NWSSP: [2], + }, + ProjectedAttribute.CLAIM_IDS.name: ["claimId1", "claimId2"], + Attribute.SEQUENCE_NUMBER_NWSSP.name: 2, + ProjectedAttribute.EXPIRE_AT.name: int( + (dt_now + timedelta(days=self.datastore.DEFAULT_EXPIRY_DAYS)).timestamp() + ), + Attribute.RIAK_LM.name: float(str(dt_now.timestamp())), + Attribute.LM_DAY.name: dt_now.strftime("%Y%m%d") + ".7", + Attribute.BATCH_CLAIM_ID.name: "batchGuid", + } + self.assertEqual(returned_batch_claim, expected) + + fetched_batch_claim = self.datastore.fetch_batch_claim(self.internal_id, "batchGuid") + batch_xml = fetched_batch_claim["Batch XML"] + self.assertEqual(batch_xml, "") + + def test_delete_claim_notification(self): + """ + Test deleting a claim notification from the table. + """ + document_key = uuid4() + notification_key = self.datastore.NOTIFICATION_PREFIX + str(document_key) + content = self.get_document_content() + self.datastore.insert_eps_document_object( + self.internal_id, notification_key, {"content": content} + ) + + returned_body = self.datastore.return_document_for_process( + self.internal_id, notification_key + ) + self.assertEqual(returned_body, {"content": content}) + + self.datastore.delete_claim_notification(self.internal_id, document_key) + self.assertRaises( + EpsDataStoreError, + self.datastore.return_document_for_process, + notification_key, + self.internal_id, + ) + + def test_return_claim_notification(self): + """ + Test returning a claim notification from the table. + Claim notification has content under payload key instead of content, so won't be b64 decoded/encoded. + """ + document_key = uuid4() + notification_key = self.datastore.NOTIFICATION_PREFIX + str(document_key) + content = self.get_document_content() + index = { + indexes.INDEX_STORE_TIME_DOC_REF_TITLE: ["ClaimNotification_20250911"], + indexes.INDEX_DELETE_DATE: ["20250911"], + indexes.INDEX_PRESCRIPTION_ID: str(uuid4()), + } + self.datastore.insert_eps_document_object( + self.internal_id, notification_key, {"payload": content}, index + ) + + returned_body = self.datastore.return_document_for_process( + self.internal_id, notification_key + ) + self.assertEqual(returned_body, {"payload": content}) + + def test_delete_document(self): + """ + Test deleting a document from the table. + """ + document_key = self.generate_document_key() + content = self.get_document_content() + self.datastore.insert_eps_document_object( + self.internal_id, document_key, {"content": content} + ) + + self.assertTrue(self.datastore.delete_document(self.internal_id, document_key)) + + def test_delete_record(self): + """ + Test deleting a record from the table. + """ + record_key = self.generate_record_key() + nhs_number = self.generate_nhs_number() + record = self.get_record(nhs_number) + self.datastore.insert_eps_record_object(self.internal_id, record_key, record) + + self.datastore.delete_record(self.internal_id, record_key) + + self.assertFalse( + self.datastore.client.get_item( + self.internal_id, record_key, SortKey.RECORD.value, expect_exists=False + ) + ) + + def test_convert_index_keys_to_lower_case(self): + """ + Test converting all keys in a dict to lower case. Returns unchanged if unexpected type. + """ + index_dict = { + "nhsNumber_bin": ["nhsNumberA", "nhsNumberB"], + "nhsNumberPrescDispDate_bin": [ + "nhsNumberA|prescA|dispA|dateA", + "nhsNumberB|prescB|dispB|dateB", + ], + "nextActivityNAD_bin": ["purge", "delete"], + } + + expected = { + "nhsnumber_bin": ["nhsNumberA", "nhsNumberB"], + "nhsnumberprescdispdate_bin": [ + "nhsNumberA|prescA|dispA|dateA", + "nhsNumberB|prescB|dispB|dateB", + ], + "nextactivitynad_bin": ["purge", "delete"], + } + + converted_dict = self.datastore.convert_index_keys_to_lower_case(index_dict) + + self.assertEqual(converted_dict, expected) + + index_wrong_type = "NoTaDiCt" + converted_wrong_type = self.datastore.convert_index_keys_to_lower_case(index_wrong_type) + + self.assertEqual(converted_wrong_type, index_wrong_type) + + @patch("random.randint") + def test_add_last_modified_to_item(self, patched_randint): + """ + Test adding last modified timestamp and date to items. + """ + patched_randint.return_value = 7 + + item = {"a": 1} + + date_time = datetime( + year=2025, month=9, day=11, hour=10, minute=11, second=12, microsecond=123456 + ) + with freeze_time(date_time): + self.datastore.client.add_last_modified_to_item(item) + + expected = {"a": 1, "_riak_lm": Decimal("1757585472.123456"), "_lm_day": "20250911.7"} + self.assertEqual(item, expected) + + @parameterized.expand( + [ + ["string that is not base64 encoded", ValueError, "Document content not b64 encoded"], + ["xxx", binascii.Error, "Incorrect padding"], + ] + ) + def test_document_decode_error(self, content, expected_error_type, expected_log_value): + """ + Test error handling when base64 decoding the document. + """ + document = {"content": content} + with self.assertRaises(expected_error_type): + self.datastore.insert_eps_document_object(self.internal_id, None, document) + + log_value = self.datastore.log_object.logger.get_logged_value("DDB0031", "error") + self.assertEqual(log_value, expected_log_value) + + def test_document_encode_error(self): + """ + Test error handling when base64 encoding the document. + """ + document_key = "testDocument" + self.keys.append((document_key, SortKey.DOCUMENT.value)) + document = { + Key.PK.name: document_key, + Key.SK.name: SortKey.DOCUMENT.value, + ProjectedAttribute.BODY.name: {"content": None}, + } + self.datastore.client.put_item(self.internal_id, document, log_item_size=False) + + with self.assertRaises(TypeError): + self.datastore.return_document_for_process(self.internal_id, document_key) + + was_logged = self.datastore.log_object.logger.was_logged("DDB0032") + self.assertTrue(was_logged) + + def test_batch_claim_xml_decode_error(self): + """ + Test error handling when decoding the batch claim xml. + """ + batch_claim_key = "testBatchClaim" + self.keys.append((batch_claim_key, SortKey.CLAIM.value)) + batch_claim = { + Key.PK.name: batch_claim_key, + Key.SK.name: SortKey.CLAIM.value, + ProjectedAttribute.BODY.name: {"Batch XML": None}, + } + self.datastore.client.put_item(self.internal_id, batch_claim, log_item_size=False) + + with self.assertRaises(TypeError): + self.datastore.fetch_batch_claim(self.internal_id, batch_claim_key) + + was_logged = self.datastore.log_object.logger.was_logged("DDB0033") + self.assertTrue(was_logged) + + def test_record_expire_at_datetime_format(self): + """ + Test that the expireAt attribute added to a record defaults to 18 months from its creation. + Provided prescriptionTime is in %Y%m%d%H%M%S format. + """ + prescription_id, nhs_number = self.get_new_record_keys() + + date_time = datetime( + year=2025, + month=9, + day=11, + hour=10, + minute=11, + second=12, + microsecond=123456, + tzinfo=timezone.utc, + ) + date_time_string = datetime.strftime(date_time, TimeFormats.STANDARD_DATE_TIME_FORMAT) + record = self.get_record(nhs_number, date_time_string) + + expected_timestamp = int( + datetime( + year=2027, month=3, day=11, hour=10, minute=11, second=12, tzinfo=timezone.utc + ).timestamp() + ) + + built_record = self.datastore.build_record(prescription_id, record, None, None) + + expire_at = built_record["expireAt"] + self.assertEqual(expire_at, expected_timestamp) + + def test_record_expire_at_date_format(self): + """ + Test that the expireAt attribute added to a record defaults to 18 months from its creation. + Provided prescriptionTime is in %Y%m%d format. + """ + prescription_id, nhs_number = self.get_new_record_keys() + + date_time = datetime( + year=2025, + month=9, + day=11, + hour=10, + minute=11, + second=12, + microsecond=123456, + tzinfo=timezone.utc, + ) + date_string = datetime.strftime(date_time, TimeFormats.STANDARD_DATE_FORMAT) + record = self.get_record(nhs_number, date_string) + + expected_timestamp = int( + datetime(year=2027, month=3, day=11, tzinfo=timezone.utc).timestamp() + ) + + built_record = self.datastore.build_record(prescription_id, record, None, None) + + expire_at = built_record["expireAt"] + self.assertEqual(expire_at, expected_timestamp) + + def test_document_expire_at(self): + """ + Test that the expireAt attribute added to a document + defaults to 18 months from when it is written to the database. + """ + content = self.get_document_content() + document = {"content": content} + + date_time = datetime( + year=2025, + month=9, + day=11, + hour=10, + minute=11, + second=12, + microsecond=123456, + tzinfo=timezone.utc, + ) + + expected_timestamp = int( + datetime( + year=2027, month=3, day=11, hour=10, minute=11, second=12, tzinfo=timezone.utc + ).timestamp() + ) + + with freeze_time(date_time): + built_document = self.datastore.build_document(self.internal_id, document, None) + + expire_at = built_document["expireAt"] + self.assertEqual(expire_at, expected_timestamp) + + def test_document_expire_at_from_index(self): + """ + Test that the expireAt attribute added to a document matches that provided in the index. + """ + content = self.get_document_content() + document = {"content": content} + index = { + indexes.INDEX_STORE_TIME_DOC_REF_TITLE: [ + f"{self.datastore.STORE_TIME_DOC_REF_TITLE_PREFIX}_20250911" + ], + indexes.INDEX_DELETE_DATE: ["20250911"], + indexes.INDEX_PRESCRIPTION_ID: str(uuid4()), + } + + expected_timestamp = int( + datetime(year=2025, month=9, day=11, tzinfo=timezone.utc).timestamp() + ) + + built_document = self.datastore.build_document(self.internal_id, document, index) + + expire_at = built_document["expireAt"] + self.assertEqual(expire_at, expected_timestamp) + + def test_concurrent_inserts(self): + """ + Test that concurrent inserts to a record will raise a EpsDataStoreError and log correctly + """ + prescription_id, nhs_number = self.get_new_record_keys() + record = self.get_record(nhs_number) + + exceptions_thrown = [] + + def insert_record(datastore: EpsDynamoDbDataStore, insert_args): + try: + datastore.insert_eps_record_object(*insert_args) + except Exception as e: + exceptions_thrown.append(e) + + # Create several processes that try to insert the record concurrently + processes = [] + loggers = [] + for _ in range(2): + logger = MockLogObject() + loggers.append(logger) + + datastore = EpsDynamoDbDataStore(logger, None, "spine-eps-datastore") + + process = Thread( + target=insert_record, args=(datastore, (self.internal_id, prescription_id, record)) + ) + processes.append(process) + + # Start processes + for process in processes: + process.start() + + # Wait for processes to finish + for process in processes: + process.join() + + logs = set() + [logs.add(log) for logger in loggers for log in logger.called_references] + self.assertTrue("DDB0021" in logs, "Expected a log DDB0021 for concurrent insert failure") + + self.assertEqual( + len(exceptions_thrown), 1, "Expected exception to be thrown for concurrent insertions" + ) + self.assertTrue( + isinstance(exceptions_thrown[0], EpsDataStoreError), + "Expected EpsDataStoreError for concurrent insertions", + ) + self.assertEqual( + exceptions_thrown[0].error_topic, + EpsDataStoreError.DUPLICATE_ERROR, + "Expected EpsDataStoreError.DUPLICATE_ERROR for concurrent insertions", + ) + + def test_concurrent_updates(self): + """ + Test that concurrent updates to a record will raise a EpsDataStoreError and log correctly + """ + # Insert the initial record + prescription_id, nhs_number = self.get_new_record_keys() + record = self.get_record(nhs_number) + + response = self.datastore.insert_eps_record_object( + self.internal_id, prescription_id, record + ) + + self.assertEqual(response["ResponseMetadata"]["HTTPStatusCode"], 200) + + # Make a change to the record + record["prescription"]["daysSupply"] = 30 + record["SCN"] = 5 + + exceptions_thrown = [] + + def change_record(datastore, change_args): + try: + datastore.insert_eps_record_object(*change_args) + except Exception as e: + exceptions_thrown.append(e) + + # Create several processes that try to update the record concurrently + processes = [] + loggers = [] + for _ in range(2): + logger = MockLogObject() + loggers.append(logger) + + datastore = EpsDynamoDbDataStore(logger, None, "spine-eps-datastore") + + index = None + record_type = None + is_update = True + + process = Thread( + target=change_record, + args=( + datastore, + (self.internal_id, prescription_id, record, index, record_type, is_update), + ), + ) + processes.append(process) + + # Start processes + for process in processes: + process.start() + + # Wait for processes to finish + for process in processes: + process.join() + + logs = set() + [logs.add(log) for logger in loggers for log in logger.called_references] + self.assertTrue("DDB0022" in logs, "Expected a log DDB0022 for concurrent update failure") + + self.assertEqual( + len(exceptions_thrown), 1, "Expected exception to be thrown for concurrent updates" + ) + self.assertTrue( + isinstance(exceptions_thrown[0], EpsDataStoreError), + "Expected EpsDataStoreError for concurrent updates", + ) + self.assertEqual( + exceptions_thrown[0].error_topic, + EpsDataStoreError.CONDITIONAL_UPDATE_FAILURE, + "Expected EpsDataStoreError.CONDITIONAL_UPDATE_FAILURE for concurrent updates", + ) + + def test_add_claim_notification_store_date(self): + """ + Test that the claimNotificationStoreDate attribute is added only when docRefTitle is ClaimNotification. + """ + content = self.get_document_content() + document = {"content": content} + + for doc_ref_title in ["ClaimNotification", "Other"]: + index = { + indexes.INDEX_STORE_TIME_DOC_REF_TITLE: [f"{doc_ref_title}_20250911"], + indexes.INDEX_DELETE_DATE: ["20250911"], + indexes.INDEX_PRESCRIPTION_ID: str(uuid4()), + } + + built_document = self.datastore.build_document(self.internal_id, document, index) + + if doc_ref_title == "ClaimNotification": + claim_notification_store_date = built_document["claimNotificationStoreDate"] + self.assertEqual("20250911", claim_notification_store_date) + else: + self.assertTrue("claimNotificationStoreDate" not in built_document) + + def test_record_next_activity_sharding(self): + """ + Test that building a record correctly shards the nextActivity attribute + """ + prescription_id, nhs_number = self.get_new_record_keys() + + record = self.get_record(nhs_number) + + item = self.datastore.build_record(prescription_id, record, None, None) + + next_activity = item[Attribute.NEXT_ACTIVITY.name] + activity, shard = next_activity.split(".") + shard = int(shard) + + self.assertEqual(activity, "createNoClaim") + self.assertTrue(shard >= 1 and shard <= NEXT_ACTIVITY_DATE_PARTITIONS) + + @parameterized.expand( + [ + [ + ["C51BB3D6-6948-11F0-9F54-EDAF56A204B4N", "C51BB3D6-6948-11F0-9F54-EDAF56A204B4"], + "R1.7", + ], + [["5HLBWE-U5QENL-24XBU", "5HLBWE-U5QENL-24XBUX"], "R2.7"], + [["5HLBWE-U5QENL-24XB"], "UNKNOWN"], + ] + ) + def test_build_record_adds_release_version(self, prescription_ids, expected): + """ + Test that the build_record method adds an R1/R2 releaseVersion attribute to a record. + Defaults to UNKNOWN when id is too short. + """ + nhs_number = self.generate_nhs_number() + record = self.get_record(nhs_number) + + for prescription_id in prescription_ids: + with patch("random.randint") as patched_randint: + patched_randint.return_value = 7 + item = self.datastore.build_record(prescription_id, record, None, None) + self.assertEqual(item["releaseVersion"], expected) + + @parameterized.expand( + [ + [ + ["C51BB3D6-6948-11F0-9F54-EDAF56A204B4N", "C51BB3D6-6948-11F0-9F54-EDAF56A204B4"], + "R1", + ], + [["5HLBWE-U5QENL-24XBU", "5HLBWE-U5QENL-24XBUX"], "R2"], + [["5HLBWE-U5QENL-24XB"], "UNKNOWN"], + ] + ) + def test_build_record_to_return_adds_release_version(self, prescription_ids, expected): + """ + Test that the _build_record_to_return method adds an R1/R2 releaseVersion attribute to a record + if it is missing. Defaults to UNKNOWN when id is too short. + """ + for prescription_id in prescription_ids: + item = {"pk": prescription_id} + record = self.datastore._build_record_to_return(item, {}) + self.assertEqual(record["releaseVersion"], expected) + + def test_is_record_present(self): + """ + Ensure that the is_record_present returns the correct boolean depending on presence of a record. + """ + prescription_id, nhs_number = self.get_new_record_keys() + self.assertFalse(self.datastore.is_record_present(self.internal_id, prescription_id)) + + record = self.get_record(nhs_number) + self.datastore.insert_eps_record_object(self.internal_id, prescription_id, record) + + self.assertTrue(self.datastore.is_record_present(self.internal_id, prescription_id)) + + def test_claim_notification_binary_encoding(self): + """ + Ensure that fetching documents handles stringified and binary payloads + """ + document_key = self.generate_document_key() + content = self.get_document_content() + index = { + indexes.INDEX_STORE_TIME_DOC_REF_TITLE: ["ClaimNotification_20250911"], + indexes.INDEX_DELETE_DATE: ["20250911"], + } + self.datastore.insert_eps_document_object( + self.internal_id, document_key, {"payload": content}, index + ) + + # Document should be stored as a string in DynamoDB + self.assertTrue( + isinstance( + self.datastore.client.get_item( + self.internal_id, document_key, SortKey.DOCUMENT.value + )["body"]["payload"], + str, + ) + ) + + string_response = self.datastore.return_document_for_process(self.internal_id, document_key) + + binary_content = base64.b64encode( + zlib.compress(simplejson.dumps({"a": 1, "b": True}).encode("utf-8")) + ) + document_key2 = self.generate_document_key() + self.datastore.insert_eps_document_object( + self.internal_id, document_key2, {"payload": binary_content}, index + ) + + # Document should be stored as a binary in DynamoDB + self.assertTrue( + isinstance( + self.datastore.client.get_item( + self.internal_id, document_key2, SortKey.DOCUMENT.value + )["body"]["payload"], + Binary, + ) + ) + + binary_response = self.datastore.return_document_for_process( + self.internal_id, document_key2 + ) + + self.assertEqual(string_response, binary_response) diff --git a/tests/common/dynamodb_index_test.py b/tests/common/dynamodb_index_test.py new file mode 100644 index 0000000..757bc7c --- /dev/null +++ b/tests/common/dynamodb_index_test.py @@ -0,0 +1,905 @@ +import copy +import datetime +import json +import sys +from decimal import Decimal +from sys import getsizeof +from types import SimpleNamespace +from unittest.mock import patch +from uuid import uuid4 + +from boto3.dynamodb.conditions import Key as BotoKey +from freezegun import freeze_time +from parameterized import parameterized + +from eps_spine_shared.common import indexes +from eps_spine_shared.common.dynamodb_common import ( + GSI, + NEXT_ACTIVITY_DATE_PARTITIONS, + Attribute, + Key, + ProjectedAttribute, + SortKey, +) +from eps_spine_shared.common.prescription import fields +from eps_spine_shared.common.prescription.record import ( + PrescriptionStatus, +) +from tests.dynamodb_test import ( + CREATION_TIME, + DISP_ORG, + NOM_ORG, + PRESC_ORG, + DynamoDbTest, +) + + +class EpsDynamoDbIndexTest(DynamoDbTest): + """ + Tests relating to DynamoDbIndex. + """ + + def get_erd_record(self, nhs_number, creation_time=CREATION_TIME): + """ + Get record and add instance and index entry to represent eRD. + """ + record = self.get_record(nhs_number, creation_time) + record["instances"]["2"] = { + "prescriptionStatus": PrescriptionStatus.REPEAT_DISPENSE_FUTURE_INSTANCE, + "dispense": {"dispensingOrganization": "X28"}, + } + record["indexes"]["nhsNumberDate_bin"].append( + f"{nhs_number}|{creation_time}|R2|{PrescriptionStatus.REPEAT_DISPENSE_FUTURE_INSTANCE}" + ) + return record + + def get_nominated_record(self, nhs_number, creation_time=CREATION_TIME): + """ + Get record and add nomination and index entry to represent nominated. + """ + record = self.get_record(nhs_number, creation_time) + record.update({"nomination": {"nominatedPerformer": NOM_ORG}}) + record["indexes"]["nomPharmStatus_bin"] = [ + f"{NOM_ORG}_{PrescriptionStatus.TO_BE_DISPENSED}" + ] + return record + + def modify_prescriber(self, record): + """ + Modify prescriber org of given record. + """ + record["prescription"]["prescribingOrganization"] = "NOPE" + + def modify_dispenser(self, record): + """ + Modify dispenser org of given record. + """ + record["instances"]["1"]["dispense"]["dispensingOrganization"] = "NOPE" + + def modify_status(self, record): + """ + Modify status of given record. + """ + record["instances"]["1"][ + "prescriptionStatus" + ] = PrescriptionStatus.FUTURE_DATED_PRESCRIPTION + + def add_ballast_to_record(self, record): + """ + Add ballast to the index attribute of the record to increase its size. + """ + built_record = self.datastore.build_record("", record, "Acute", None) + + body_size = sys.getsizeof(built_record["body"]) + item_deep_copy = copy.deepcopy(built_record) + del item_deep_copy["body"] + record_without_body_size = sys.getsizeof(json.dumps(item_deep_copy)) + record_size = body_size + record_without_body_size + + ballast = "" + while (getsizeof(ballast) * 2) + record_size < 400_000: + ballast = ballast + "a" + record["indexes"]["ballast"] = ballast + + def create_modify_insert_record( + self, internal_id, nhs_number, modification=None, nominated=False + ): + """ + Create a record, modifying so as not to be returned by a query and adding its keys to those to be cleaned-up. + """ + record_id = self.generate_record_key() + self.keys.append((record_id, SortKey.RECORD.value)) + record = self.get_nominated_record(nhs_number) if nominated else self.get_record(nhs_number) + if modification: + modification(record) + self.datastore.insert_eps_record_object(internal_id, record_id, record) + return record_id + + def test_build_terms_with_regex(self): + """ + Test building terms from indexes of returned records, including regex checks. + """ + nhs_number = self.generate_nhs_number() + release_version = "R2" + items = [ + { + Key.PK.name: self.generate_prescription_id(), + ProjectedAttribute.INDEXES.name: { + indexes.INDEX_NHSNUMBER_DATE.lower(): [ + f"{nhs_number}|{CREATION_TIME}|R2|{PrescriptionStatus.TO_BE_DISPENSED}", + f"{nhs_number}|{CREATION_TIME}|R1|{PrescriptionStatus.TO_BE_DISPENSED}", + f"{nhs_number}|{CREATION_TIME}|R2|{PrescriptionStatus.AWAITING_RELEASE_READY}", + ] + }, + } + ] + term_regex = r"\|\d{8,14}\|" + release_version + r"\|" + PrescriptionStatus.TO_BE_DISPENSED + terms = self.datastore.indexes.build_terms(items, indexes.INDEX_NHSNUMBER_DATE, term_regex) + + self.assertEqual(len(terms), 1) + + def test_return_terms_by_nhs_number_date(self): + """ + Test querying against the nhsNumberDate index and returning nhsNumberDate records. + """ + nhs_number = self.generate_nhs_number() + creation_times = ["20230911000000", "20230912000000", "20230913000000", "20230914000000"] + + record_values = [ + SimpleNamespace(id=self.generate_record_key(), creation_time=time) + for time in creation_times + ] + + for values in record_values: + record = self.get_record(nhs_number, values.creation_time) + self.datastore.insert_eps_record_object(self.internal_id, values.id, record) + self.keys.append((values.id, SortKey.RECORD.value)) + + start_date = "20230912" + end_date = "20230913" + range_start = indexes.SEPERATOR.join([nhs_number, start_date]) + range_end = indexes.SEPERATOR.join([nhs_number, end_date]) + + terms = self.datastore.return_terms_by_nhs_number_date( + self.internal_id, range_start, range_end + ) + + expected = [ + ( + f"{nhs_number}|{values.creation_time}|R2|{PrescriptionStatus.TO_BE_DISPENSED}", + values.id, + ) + for values in record_values[1:-1] + ] + + self.assertEqual(expected, terms) + + def test_return_terms_by_nhs_number_same_date(self): + """ + Test querying against the nhsNumberDate index and returning nhsNumberDate records. + Start and end date are the same. + """ + nhs_number = self.generate_nhs_number() + creation_times = ["20230911000000", "20230911000000"] + + record_values = [ + SimpleNamespace(id=self.generate_record_key(), creation_time=time) + for time in creation_times + ] + + for values in record_values: + record = self.get_record(nhs_number, values.creation_time) + self.datastore.insert_eps_record_object(self.internal_id, values.id, record) + self.keys.append((values.id, SortKey.RECORD.value)) + + date = "20230911" + range_start = indexes.SEPERATOR.join([nhs_number, date]) + range_end = indexes.SEPERATOR.join([nhs_number, date]) + + terms = self.datastore.return_terms_by_nhs_number_date( + self.internal_id, range_start, range_end + ) + + expected = [ + ( + f"{nhs_number}|{values.creation_time}|R2|{PrescriptionStatus.TO_BE_DISPENSED}", + values.id, + ) + for values in record_values + ] + + self.assertEqual(sorted(expected), sorted(terms)) + + def test_return_terms_by_nhs_number(self): + """ + Test querying against the nhsNumberDate index and returning nhsNumberDate records, without startDate. + """ + prescription_id, nhs_number = self.get_new_record_keys() + record = self.get_record(nhs_number) + self.datastore.insert_eps_record_object(self.internal_id, prescription_id, record) + + terms = self.datastore.return_terms_by_nhs_number(self.internal_id, nhs_number) + + expected = [(nhs_number, prescription_id)] + + self.assertEqual(expected, terms) + + def test_exclude_next_activity_purge(self): + """ + Test querying against a record index and excluding records with a nextActivity of purge. + """ + prescription_id, nhs_number = self.get_new_record_keys() + record = self.get_record(nhs_number) + self.datastore.insert_eps_record_object(self.internal_id, prescription_id, record) + + prescription_id2 = self.generate_record_key() + self.keys.append((prescription_id2, SortKey.RECORD.value)) + record = self.get_record(nhs_number) + self.datastore.insert_eps_record_object(self.internal_id, prescription_id2, record) + + terms = self.datastore.return_terms_by_nhs_number(self.internal_id, nhs_number) + + expected = [(nhs_number, prescription_id), (nhs_number, prescription_id2)] + self.assertEqual(sorted(expected), sorted(terms)) + + record["indexes"]["nextActivityNAD_bin"] = ["purge_20241114"] + record["SCN"] = record["SCN"] + 1 + self.datastore.insert_eps_record_object( + self.internal_id, prescription_id2, record, is_update=True + ) + + terms = self.datastore.return_terms_by_nhs_number(self.internal_id, nhs_number) + + expected = [(nhs_number, prescription_id)] + self.assertEqual(expected, terms) + + def test_return_terms_by_nhs_number_multiple(self): + """ + Test querying against the nhsNumberDate index and returning multiple nhsNumberDate records, without startDate. + """ + prescription_id, nhs_number = self.get_new_record_keys() + record = self.get_record(nhs_number) + self.datastore.insert_eps_record_object(self.internal_id, prescription_id, record) + + self.create_modify_insert_record(self.internal_id, nhs_number) + + terms = self.datastore.return_terms_by_nhs_number(self.internal_id, nhs_number) + + self.assertEqual(len(terms), 2) + + def test_return_terms_by_nom_pharm_status(self): + """ + Test querying against the nomPharmStatus index and returning nomPharmStatus records. + """ + prescription_id, nhs_number = self.get_new_record_keys() + record = self.get_nominated_record(nhs_number) + self.datastore.insert_eps_record_object(self.internal_id, prescription_id, record) + + self.create_modify_insert_record( + self.internal_id, nhs_number, self.modify_status, nominated=True + ) + + terms = self.datastore.get_nom_pharm_records_unfiltered(self.internal_id, NOM_ORG) + + expected = [prescription_id] + + self.assertEqual(expected, terms) + + def test_return_terms_by_nom_pharm_status_with_batch_size(self): + """ + Test querying against the nomPharmStatus index via the get_nominated_pharmacy_records method and returning + a defined number of nomPharmStatus records. + """ + prescription_ids = [] + for _ in range(3): + prescription_id, nhs_number = self.get_new_record_keys() + record = self.get_nominated_record(nhs_number) + self.datastore.insert_eps_record_object(self.internal_id, prescription_id, record) + self.create_modify_insert_record( + self.internal_id, nhs_number, self.modify_status, nominated=True + ) + prescription_ids.append(prescription_id) + + returned_prescription_ids, discarded_count = self.datastore.get_nominated_pharmacy_records( + NOM_ORG, 2, self.internal_id + ) + + self.assertEqual(discarded_count, 1) + self.assertEqual(len(returned_prescription_ids), 2) + self.assertTrue(set(returned_prescription_ids).issubset(set(prescription_ids))) + + def test_return_terms_by_nom_pharm_status_with_pagination(self): + """ + Test querying against the nomPharmStatus index and returning nomPharmStatus records. + Index attribute value is made artificially large, so that when projected into the index, + the combined returned items breach the pagination threshold. + """ + total_terms = 7 + nhs_number = self.generate_nhs_number() + [ + self.create_modify_insert_record( + self.internal_id, nhs_number, self.add_ballast_to_record, nominated=True + ) + for _ in range(total_terms) + ] + + terms = self.datastore.get_nom_pharm_records_unfiltered(self.internal_id, NOM_ORG) + + self.assertEqual(len(terms), total_terms) + + def test_return_terms_by_nom_pharm_status_unfiltered_with_limit(self): + """ + Test querying against the nomPharmStatus index and returning nomPharmStatus records. + Provide a limit for the query to adhere to. + """ + total_terms = 3 + limit = 2 + nhs_number = self.generate_nhs_number() + [ + self.create_modify_insert_record(self.internal_id, nhs_number, nominated=True) + for _ in range(total_terms) + ] + + terms = self.datastore.get_nom_pharm_records_unfiltered( + self.internal_id, NOM_ORG, limit=limit + ) + + self.assertEqual(len(terms), limit) + + def test_return_terms_by_nom_pharm_status_unfiltered_with_limit_and_pagination(self): + """ + Test querying against the nomPharmStatus index and returning nomPharmStatus records. + Provide a limit for the query to adhere to combined with pagination. + """ + total_terms = 7 + limit = 6 + nhs_number = self.generate_nhs_number() + [ + self.create_modify_insert_record( + self.internal_id, nhs_number, self.add_ballast_to_record, nominated=True + ) + for _ in range(total_terms) + ] + + terms = self.datastore.get_nom_pharm_records_unfiltered( + self.internal_id, NOM_ORG, limit=limit + ) + + self.assertEqual(len(terms), limit) + + def test_return_terms_by_nom_pharm(self): + """ + Test querying against the nomPharmStatus index using only the odsCode and returning nomPharmStatus records. + """ + prescription_id, nhs_number = self.get_new_record_keys() + record = self.get_nominated_record(nhs_number) + self.datastore.insert_eps_record_object(self.internal_id, prescription_id, record) + + id_of_prescription_with_other_status = self.create_modify_insert_record( + self.internal_id, nhs_number, self.modify_status, nominated=True + ) + + terms = self.datastore.get_all_pids_by_nominated_pharmacy(self.internal_id, NOM_ORG) + + expected = [prescription_id, id_of_prescription_with_other_status] + + expected.sort() + terms.sort() + + self.assertEqual(expected, terms) + + def test_return_terms_by_nhs_number_date_erd(self): + """ + Test querying against the nhsNumberDate index and returning multiple nhsNumberDates per record. + """ + prescription_id, nhs_number = self.get_new_record_keys() + record = self.get_erd_record(nhs_number) + self.datastore.insert_eps_record_object(self.internal_id, prescription_id, record) + + range_start = f"{nhs_number}|20230911" + range_end = f"{nhs_number}|20230912" + terms = self.datastore.return_terms_by_nhs_number_date( + self.internal_id, range_start, range_end + ) + + expected = [ + ( + f"{nhs_number}|{CREATION_TIME}|R2|{PrescriptionStatus.TO_BE_DISPENSED}", + prescription_id, + ), + ( + f"{nhs_number}|{CREATION_TIME}|R2|{PrescriptionStatus.REPEAT_DISPENSE_FUTURE_INSTANCE}", + prescription_id, + ), + ] + + self.assertEqual(expected, terms) + + def test_return_terms_by_nhs_number_prescriber_dispenser_date(self): + """ + Test querying against the nhsNumberDate index and returning nhsNumberPrescriberDispenserDate records. + """ + prescription_id, nhs_number = self.get_new_record_keys() + self.datastore.insert_eps_record_object( + self.internal_id, prescription_id, self.get_record(nhs_number) + ) + + self.create_modify_insert_record(self.internal_id, nhs_number, self.modify_prescriber) + self.create_modify_insert_record(self.internal_id, nhs_number, self.modify_dispenser) + + start_date = "20230911" + end_date = "20230912" + range_start = indexes.SEPERATOR.join([nhs_number, PRESC_ORG, DISP_ORG, start_date]) + range_end = indexes.SEPERATOR.join([nhs_number, PRESC_ORG, DISP_ORG, end_date]) + + terms = self.datastore.return_terms_by_index_date( + self.internal_id, indexes.INDEX_NHSNUMBER_PRDSDATE, range_start, range_end + ) + + expected = [ + ( + f"{nhs_number}|{PRESC_ORG}|{DISP_ORG}|{CREATION_TIME}|R2|{PrescriptionStatus.TO_BE_DISPENSED}", + prescription_id, + ) + ] + + self.assertEqual(expected, terms) + + def test_return_terms_by_nhs_number_prescriber_date(self): + """ + Test querying against the nhsNumberDate index and returning nhsNumberPrescriberDate records. + """ + prescription_id, nhs_number = self.get_new_record_keys() + self.datastore.insert_eps_record_object( + self.internal_id, prescription_id, self.get_record(nhs_number) + ) + + self.create_modify_insert_record(self.internal_id, nhs_number, self.modify_prescriber) + + start_date = "20230911" + end_date = "20230912" + range_start = indexes.SEPERATOR.join([nhs_number, PRESC_ORG, start_date]) + range_end = indexes.SEPERATOR.join([nhs_number, PRESC_ORG, end_date]) + + terms = self.datastore.return_terms_by_index_date( + self.internal_id, indexes.INDEX_NHSNUMBER_PRDATE, range_start, range_end + ) + + expected = [ + ( + f"{nhs_number}|{PRESC_ORG}|{CREATION_TIME}|R2|{PrescriptionStatus.TO_BE_DISPENSED}", + prescription_id, + ) + ] + + self.assertEqual(expected, terms) + + def test_return_terms_by_nhs_number_dispenser_date(self): + """ + Test querying against the nhsNumberDate index and returning nhsNumberDispenserDate records. + """ + prescription_id, nhs_number = self.get_new_record_keys() + self.datastore.insert_eps_record_object( + self.internal_id, prescription_id, self.get_record(nhs_number) + ) + + self.create_modify_insert_record(self.internal_id, nhs_number, self.modify_dispenser) + + start_date = "20230911" + end_date = "20230912" + range_start = indexes.SEPERATOR.join([nhs_number, DISP_ORG, start_date]) + range_end = indexes.SEPERATOR.join([nhs_number, DISP_ORG, end_date]) + + terms = self.datastore.return_terms_by_index_date( + self.internal_id, indexes.INDEX_NHSNUMBER_DSDATE, range_start, range_end + ) + + expected = [ + ( + f"{nhs_number}|{DISP_ORG}|{CREATION_TIME}|R2|{PrescriptionStatus.TO_BE_DISPENSED}", + prescription_id, + ) + ] + + self.assertEqual(expected, terms) + + def test_return_terms_by_prescriber_dispenser_date(self): + """ + Test querying against the prescriberDate index and returning prescDispDate records. + """ + prescription_id, nhs_number = self.get_new_record_keys() + self.datastore.insert_eps_record_object( + self.internal_id, prescription_id, self.get_record(nhs_number) + ) + + self.create_modify_insert_record(self.internal_id, nhs_number, self.modify_prescriber) + self.create_modify_insert_record(self.internal_id, nhs_number, self.modify_dispenser) + + start_date = "20230911" + end_date = "20230912" + range_start = indexes.SEPERATOR.join([PRESC_ORG, DISP_ORG, start_date]) + range_end = indexes.SEPERATOR.join([PRESC_ORG, DISP_ORG, end_date]) + + terms = self.datastore.return_terms_by_index_date( + self.internal_id, indexes.INDEX_PRESCRIBER_DSDATE, range_start, range_end + ) + + expected = [ + ( + f"{PRESC_ORG}|{DISP_ORG}|{CREATION_TIME}|R2|{PrescriptionStatus.TO_BE_DISPENSED}", + prescription_id, + ) + ] + + self.assertEqual(expected, terms) + + def test_return_terms_by_prescriber_date(self): + """ + Test querying against the prescriberDate index and returning prescriberDate records. + """ + prescription_id, nhs_number = self.get_new_record_keys() + self.datastore.insert_eps_record_object( + self.internal_id, prescription_id, self.get_record(nhs_number) + ) + + self.create_modify_insert_record(self.internal_id, nhs_number, self.modify_prescriber) + + start_date = "20230911" + end_date = "20230912" + range_start = indexes.SEPERATOR.join([PRESC_ORG, start_date]) + range_end = indexes.SEPERATOR.join([PRESC_ORG, end_date]) + + terms = self.datastore.return_terms_by_index_date( + self.internal_id, indexes.INDEX_PRESCRIBER_DATE, range_start, range_end + ) + + expected = [ + ( + f"{PRESC_ORG}|{CREATION_TIME}|R2|{PrescriptionStatus.TO_BE_DISPENSED}", + prescription_id, + ) + ] + + self.assertEqual(expected, terms) + + def test_return_terms_by_dispenser_date(self): + """ + Test querying against the dispenserDate index and returning dispenserDate records. + """ + prescription_id, nhs_number = self.get_new_record_keys() + self.datastore.insert_eps_record_object( + self.internal_id, prescription_id, self.get_record(nhs_number) + ) + + self.create_modify_insert_record(self.internal_id, nhs_number, self.modify_dispenser) + + start_date = "20230911" + end_date = "20230912" + range_start = indexes.SEPERATOR.join([DISP_ORG, start_date]) + range_end = indexes.SEPERATOR.join([DISP_ORG, end_date]) + + terms = self.datastore.return_terms_by_index_date( + self.internal_id, indexes.INDEX_DISPENSER_DATE, range_start, range_end + ) + + expected = [ + (f"{DISP_ORG}|{CREATION_TIME}|R2|{PrescriptionStatus.TO_BE_DISPENSED}", prescription_id) + ] + + self.assertEqual(expected, terms) + + def test_items_without_batch_claim_id_not_added_to_claim_id_index(self): + """ + Test claimId index doesn't contain any items without a batchClaimId attribute. + """ + batch_claim_ids = [] + for _ in range(2): + batch_id = str(uuid4()) + batch_claim_ids.append(batch_id) + self.keys.append((batch_id, SortKey.CLAIM.value)) + + batch_claims = [ + { + Key.PK.name: batch_claim_ids[0], + Key.SK.name: SortKey.CLAIM.value, + Attribute.BATCH_CLAIM_ID.name: batch_claim_ids[0], + ProjectedAttribute.BODY.name: "testBody", + }, + { + Key.PK.name: batch_claim_ids[1], + Key.SK.name: SortKey.CLAIM.value, + ProjectedAttribute.BODY.name: "testBody", + }, + ] + [ + self.datastore.client.put_item(self.internal_id, batch_claim) + for batch_claim in batch_claims + ] + + key_condition_expression = BotoKey(Key.SK.name).eq(SortKey.CLAIM.value) + items = self.datastore.client.query_index(GSI.CLAIM_ID.name, key_condition_expression, None) + self.assertEqual(len(items), 1) + + def test_query_next_activity_date(self): + """ + Test querying against the nextActivityDate index and returning lists of prescription keys. + """ + expected = [] + for _ in range(3): + prescription_id, nhs_number = self.get_new_record_keys() + expected.append(prescription_id) + record = self.get_record(nhs_number) + self.datastore.insert_eps_record_object(self.internal_id, prescription_id, record) + + actual = self.datastore.return_pids_due_for_next_activity( + self.internal_id, "createNoClaim_20250103", "createNoClaim_20250105" + ) + flat = [i for generator in actual for i in generator] + self.assertEqual(len(flat), 3) + + def test_query_next_activity_same_date(self): + """ + Test querying against the nextActivityDate index and + returning lists of prescription keys when dates are the same. + """ + expected = [] + for _ in range(3): + prescription_id, nhs_number = self.get_new_record_keys() + expected.append(prescription_id) + record = self.get_record(nhs_number) + self.datastore.insert_eps_record_object(self.internal_id, prescription_id, record) + + actual = self.datastore.return_pids_due_for_next_activity( + self.internal_id, "createNoClaim_20250104", "createNoClaim_20250104" + ) + flat = [i for generator in actual for i in generator] + self.assertEqual(len(flat), 3) + + # (a,) notation is to force a single item tuple as expected by parameterized.expand + @parameterized.expand( + [ + (fields.NEXTACTIVITY_EXPIRE,), + (fields.NEXTACTIVITY_CREATENOCLAIM,), + (fields.NEXTACTIVITY_DELETE,), + (fields.NEXTACTIVITY_PURGE,), + (fields.NEXTACTIVITY_READY,), + ] + ) + def test_query_next_activity_date_all_activities(self, next_activity): + """ + Test query works against all next activities + """ + next_activity_nad_bin = f"{next_activity}_20250104" + + prescription_id, nhs_number = self.get_new_record_keys() + record = self.get_record(nhs_number) + record["indexes"]["nextActivityNAD_bin"] = [next_activity_nad_bin] + self.datastore.insert_eps_record_object(self.internal_id, prescription_id, record) + + actual = self.datastore.return_pids_due_for_next_activity( + self.internal_id, next_activity_nad_bin, next_activity_nad_bin + ) + flat = [i for generator in actual for i in generator] + self.assertEqual(flat, [prescription_id]) + + def test_query_next_activity_date_shards(self): + """ + Test query works against records on all shards + """ + expected = [] + + def add_record(next_activity): + """ + Add a record to the table with a given next activity shard, and append its prescriptionId to expected list + """ + prescription_id, nhs_number = self.get_new_record_keys() + record = self.get_record(nhs_number) + item = self.datastore.build_record(prescription_id, record, None, None) + + item[Attribute.NEXT_ACTIVITY.name] = next_activity + + self.datastore.client.insert_items(self.internal_id, [item], False) + expected.append([prescription_id]) + + # Add unsharded record + add_record("createNoClaim") + + # Add a record on each shard + for shard in range(1, NEXT_ACTIVITY_DATE_PARTITIONS + 1): + add_record(f"createNoClaim.{shard}") + + actual = self.datastore.return_pids_due_for_next_activity( + self.internal_id, "createNoClaim_20250104", "createNoClaim_20250104" + ) + consumed = [list(generator) for generator in list(actual)] + + self.assertEqual(expected, consumed) + + def test_query_claim_notification_store_time(self): + """ + Test querying against the claimNotificationStoreTime index and returning lists of document keys. + """ + document_keys = [] + + def create_documents(doc_ref_title): + for i in range(3): + index = { + indexes.INDEX_STORE_TIME_DOC_REF_TITLE: [f"{doc_ref_title}_2024091110111{i}"], + indexes.INDEX_DELETE_DATE: ["20250911"], + indexes.INDEX_PRESCRIPTION_ID: [self.generate_prescription_id()], + } + + document_key = f"20240911_{doc_ref_title}_{i}" + document_keys.append(document_key) + self.keys.append((document_key, SortKey.DOCUMENT.value)) + + content = self.get_document_content() + self.datastore.insert_eps_document_object( + self.internal_id, document_key, {"content": content}, index + ) + + [create_documents(doc_ref_title) for doc_ref_title in ["ClaimNotification", "Other"]] + + query_response = self.datastore.return_claim_notification_ids_between_store_dates( + self.internal_id, "20240911101111", "20240912101111" + ) + + actual = list(query_response) + expected = [["20240911_ClaimNotification_1", "20240911_ClaimNotification_2"], []] + + self.assertEqual(actual, expected) + + def test_query_claim_notification_store_time_boundaries(self): + """ + Test querying against the claimNotificationStoreTime index and returning lists of document keys. + Creates two documents relating to each boundary argument. Asserts that one of each pair is returned. + """ + document_keys = [] + + def create_documents(store_date): + for i in range(2): + index = { + indexes.INDEX_STORE_TIME_DOC_REF_TITLE: [ + f"ClaimNotification_{store_date}10111{i}" + ], + indexes.INDEX_DELETE_DATE: ["20250911"], + indexes.INDEX_PRESCRIPTION_ID: [self.generate_prescription_id()], + } + + document_key = f"{store_date}_ClaimNotification_{i}" + document_keys.append(document_key) + self.keys.append((document_key, SortKey.DOCUMENT.value)) + + content = self.get_document_content() + self.datastore.insert_eps_document_object( + self.internal_id, document_key, {"content": content}, index + ) + + [create_documents(store_date) for store_date in ["20240911", "20240912"]] + + query_response = self.datastore.return_claim_notification_ids_between_store_dates( + self.internal_id, "20240911101111", "20240912101110" + ) + + actual = list(query_response) + expected = [["20240911_ClaimNotification_1"], ["20240912_ClaimNotification_0"]] + + self.assertEqual(actual, expected) + + def test_get_date_range_for_query(self): + """ + Test method for creating dates to query indexes against. + Method is inclusive, so slightly less than one day gives both relevant days. + """ + start_datetime_str = "20250911101112" + end_datetime_str = "20250912101111" + + actual = self.datastore.indexes._get_date_range_for_query( + start_datetime_str, end_datetime_str + ) + expected = ["20250911", "20250912"] + + self.assertEqual(actual, expected) + + @parameterized.expand( + [ + ["query_nhs_number_date", ["index", "nhsNumber"], [], str], + ["query_prescriber_date", ["index", "org"], [], str], + ["query_dispenser_date", ["index", "org"], [], str], + ["query_next_activity_date", [], [], lambda x: f"_{x}"], + ] + ) + def test_invalid_ranges(self, index, preargs, postargs, input_formatter=None): + """ + Test querying against indexes with invalid ranges. + """ + input_values = [2, 1] + if input_formatter: + input_values = [input_formatter(i) for i in input_values] + + args = preargs + input_values + postargs + + self.assertEqual(list(getattr(self.datastore.indexes, index)(*args)), []) + + def test_query_batch_claim_id_sequence_number(self): + """ + Test querying against the claimIdSequenceNumber(Nwssp) indexes and returning lists of batch claim IDs. + """ + batch_claim1 = [str(uuid4()), 1, False] + batch_claim2 = [str(uuid4()), 2, False] + + nwssp_batch_claim1 = [str(uuid4()), 1, True] + nwssp_batch_claim2 = [str(uuid4()), 2, True] + + for batch_claim in [batch_claim1, batch_claim2, nwssp_batch_claim1, nwssp_batch_claim2]: + batch_claim_id, sqn_value, nwssp = batch_claim + self.keys.append((batch_claim_id, SortKey.CLAIM.value)) + + batch_claim = { + "Batch GUID": batch_claim_id, + "Claim ID List": [], + "Handle Time": "20241111121314", + "Sequence Number": sqn_value, + "Batch XML": b"", + } + if nwssp: + batch_claim["Nwssp Sequence Number"] = sqn_value + + self.datastore.store_batch_claim(self.internal_id, batch_claim) + + returned_batch_claim_ids = self.datastore.find_batch_claim_from_seq_number(1) + self.assertEqual(returned_batch_claim_ids, [batch_claim1[0]]) + + returned_batch_claim_ids = self.datastore.find_batch_claim_from_seq_number(2, True) + self.assertEqual(returned_batch_claim_ids, [nwssp_batch_claim2[0]]) + + @parameterized.expand( + [ + [("20240911", "20240912"), ("20240911000000", "20240912000000")], + [("2024091112", "2024091213"), ("20240911120000", "20240912130000")], + [("20240911121314", "20240912131415"), ("20240911121314", "20240912131415")], + ] + ) + def test_pad_or_trim_date(self, input_dates, expected_dates): + """ + Test padding or trimming dates used in index queries. + """ + start_date, end_date = input_dates + expected_start_date, expected_end_date = expected_dates + + actual_start_date = self.datastore.indexes.pad_or_trim_date(start_date) + actual_end_date = self.datastore.indexes.pad_or_trim_date(end_date) + + self.assertEqual(expected_start_date, actual_start_date) + self.assertEqual(expected_end_date, actual_end_date) + + @patch("random.randint") + def test_last_modified_index(self, patched_randint): + """ + Test lastModified index by calling directly. It is not used from application code. + """ + patched_randint.return_value = 7 + + index_name = GSI.LAST_MODIFIED.name + pk = str(uuid4()) + self.keys.append((pk, "SK")) + + date_time = datetime.datetime.now() + datetime.timedelta(weeks=30) + + date_time_decimal = Decimal(str(date_time.timestamp())) + date_time_int = int(date_time.timestamp()) + + day = date_time.strftime("%Y%m%d") + item = {Key.PK.name: pk, Key.SK.name: "SK"} + + with freeze_time(date_time): + self.datastore.client.insert_items(self.internal_id, [item], log_item_size=False) + + for timestamp in [date_time_decimal, date_time_int]: + key_condition_expression = BotoKey(Attribute.LM_DAY.name).eq(f"{day}.7") & BotoKey( + Attribute.RIAK_LM.name + ).gte(timestamp) + + items = self.datastore.client.query_index(index_name, key_condition_expression, None) + + self.assertEqual(len(items), 1) diff --git a/tests/common/indexes_test.py b/tests/common/indexes_test.py new file mode 100644 index 0000000..9982101 --- /dev/null +++ b/tests/common/indexes_test.py @@ -0,0 +1,121 @@ +from os import path +from unittest.case import TestCase +from unittest.mock import Mock + +import simplejson +from dateutil.relativedelta import relativedelta +from freezegun import freeze_time + +from eps_spine_shared.common.indexes import EpsIndexFactory +from eps_spine_shared.common.prescription.repeat_dispense import RepeatDispenseRecord +from eps_spine_shared.common.prescription.repeat_prescribe import RepeatPrescribeRecord +from eps_spine_shared.common.prescription.single_prescribe import SinglePrescribeRecord +from eps_spine_shared.common.prescription.types import PrescriptionTreatmentType +from tests.mock_logger import MockLogObject + + +def get_nad_references(): + """ + Reference dictionary of information to be used during next activity + date calculation + """ + return { + "prescriptionExpiryPeriod": relativedelta(months=6), + "repeatDispenseExpiryPeriod": relativedelta(months=12), + "dataCleansePeriod": relativedelta(months=6), + "withDispenserActiveExpiryPeriod": relativedelta(days=180), + "expiredDeletePeriod": relativedelta(days=90), + "cancelledDeletePeriod": relativedelta(days=180), + "claimedDeletePeriod": relativedelta(days=9), + "notDispensedDeletePeriod": relativedelta(days=30), + "nominatedDownloadDateLeadTime": relativedelta(days=5), + "notificationDelayPeriod": relativedelta(days=180), + } + + +def _load_test_prescription(mock_log_object, prescription_id): + """ + Load prescription data from JSON files in the test resources directory. + """ + test_dir_path = path.dirname(__file__) + full_path = path.join(test_dir_path, "prescription", "resources", prescription_id + ".json") + with open(full_path) as json_file: + prescription_dict = simplejson.load(json_file) + json_file.close() + + treatment_type = prescription_dict["prescription"]["prescriptionTreatmentType"] + if treatment_type == PrescriptionTreatmentType.ACUTE_PRESCRIBING: + prescription = SinglePrescribeRecord(mock_log_object, "test") + elif treatment_type == PrescriptionTreatmentType.REPEAT_PRESCRIBING: + prescription = RepeatPrescribeRecord(mock_log_object, "test") + elif treatment_type == PrescriptionTreatmentType.REPEAT_DISPENSING: + prescription = RepeatDispenseRecord(mock_log_object, "test") + else: + raise ValueError("Unknown treatment type %s" % str(treatment_type)) + + prescription.create_record_from_store(prescription_dict) + + return prescription + + +class PrescriptionIndexFactoryTest(TestCase): + """ + Tests for PrescriptionIndexFactory + """ + + def setUp(self): + """ + Common init code + """ + self.log_object = MockLogObject() + + @freeze_time("2025-07-15") + def test_build_indexes(self): + """ + Test that build_indexes method creates indexes as expected. + """ + prescription_id = "7D9625-Z72BF2-11E3A" + nad_references = get_nad_references() + index_factory = EpsIndexFactory(self.log_object, prescription_id, [], nad_references) + + context = Mock() + context.epsRecord = _load_test_prescription(self.log_object, prescription_id) + + record_indexes = index_factory.build_indexes(context) + for key, value in record_indexes.items(): + record_indexes[key] = sorted(value) + + expected_indexes = { + "prescribingSiteStatus_bin": ["Z99901_0006", "Z99901_0009"], + "dispensingSiteStatus_bin": ["F001M_0006", "F001M_0009"], + "nomPharmStatus_bin": ["F001M_0006", "F001M_0009"], + "nextActivityNAD_bin": ["createNoClaim_20141005"], + "nhsNumber_bin": ["9990406707"], + "nhsNumberDate_bin": [ + "9990406707|20140408144130|R2|0006", + "9990406707|20140408144130|R2|0009", + ], + "nhsNumberPrescriberDate_bin": [ + "9990406707|Z99901|20140408144130|R2|0006", + "9990406707|Z99901|20140408144130|R2|0009", + ], + "nhsNumberPrescDispDate_bin": [ + "9990406707|Z99901|F001M|20140408144130|R2|0006", + "9990406707|Z99901|F001M|20140408144130|R2|0009", + ], + "nhsNumberDispenserDate_bin": [ + "9990406707|F001M|20140408144130|R2|0006", + "9990406707|F001M|20140408144130|R2|0009", + ], + "prescriberDate_bin": [ + "Z99901|20140408144130|R2|0006", + "Z99901|20140408144130|R2|0009", + ], + "prescDispDate_bin": [ + "Z99901|F001M|20140408144130|R2|0006", + "Z99901|F001M|20140408144130|R2|0009", + ], + "dispenserDate_bin": ["F001M|20140408144130|R2|0006", "F001M|20140408144130|R2|0009"], + "delta_bin": ["20250715000000|10"], + } + self.assertEqual(record_indexes, expected_indexes) diff --git a/tests/common/prescription/build_indexes_test.py b/tests/common/prescription/build_indexes_test.py new file mode 100644 index 0000000..53b2793 --- /dev/null +++ b/tests/common/prescription/build_indexes_test.py @@ -0,0 +1,260 @@ +from unittest.case import TestCase +from unittest.mock import Mock + +from eps_spine_shared.common.prescription.record import PrescriptionRecord + + +class BuildIndexesTest(TestCase): + """ + Test Case for testing that indexes are built correctly + """ + + def setUp(self): + """ + Set up all valid values - tests will overwrite these where required. + """ + + mock = Mock() + attrs = {"writeLog.return_value": None} + mock.configure_mock(**attrs) + log_object = mock + internal_id = "test" + + self.prescription = PrescriptionRecord(log_object, internal_id) + self.prescription.prescription_record = {} + self.prescription.prescription_record["prescription"] = {} + self.prescription.prescription_record["instances"] = {} + self.prescription.prescription_record["patient"] = {} + self.prescription.prescription_record["patient"]["nhsNumber"] = "TESTPatient" + + def test_add_release_and_status_string(self): + """ + tests that release and status are added to the passed in index. + """ + is_string = True + index_prefix = "indexPrefix" + # set prescription to be 37 characters long ie R1 + temp = "0123456789012345678901234567890123456" + self.prescription.prescription_record["prescription"]["prescriptionID"] = temp + self.prescription.prescription_record["instances"]["0"] = {} + self.prescription.prescription_record["instances"]["0"]["prescriptionStatus"] = "0001" + result_set = self.prescription.add_release_and_status(index_prefix, is_string) + self.assertEqual( + result_set, + ["indexPrefix|R1|0001"], + "Failed to create expected release and status suffix", + ) + + def test_add_release_and_status_list(self): + """ + tests that release and status are added to the passed in index where the passed in index is a list of indexes. + """ + is_string = False + index_prefix = ["indexPrefix1", "indexPrefix2"] + # set prescription to be 37 characters long ie R1 + temp = "0123456789012345678901234567890123456" + self.prescription.prescription_record["prescription"]["prescriptionID"] = temp + self.prescription.prescription_record["instances"]["0"] = {} + self.prescription.prescription_record["instances"]["0"]["prescriptionStatus"] = "0001" + result_set = self.prescription.add_release_and_status(index_prefix, is_string) + self.assertEqual( + result_set, + ["indexPrefix1|R1|0001", "indexPrefix2|R1|0001"], + "Failed to create expected release and status suffix for list of indexes", + ) + + def test_add_release_and_status_string_multiple_status(self): + """ + tests that release and multiple status are added to the passed in index. + """ + is_string = True + index_prefix = "indexPrefix" + # set prescription to be 37 characters long ie R1 + temp = "0123456789012345678901234567890123456" + self.prescription.prescription_record["prescription"]["prescriptionID"] = temp + self.prescription.prescription_record["instances"]["0"] = {} + self.prescription.prescription_record["instances"]["0"]["prescriptionStatus"] = "0001" + self.prescription.prescription_record["instances"]["1"] = {} + self.prescription.prescription_record["instances"]["1"]["prescriptionStatus"] = "0002" + result_set = self.prescription.add_release_and_status(index_prefix, is_string) + self.assertEqual( + sorted(result_set), + sorted(["indexPrefix|R1|0001", "indexPrefix|R1|0002"]), + "Failed to create expected release and status suffix", + ) + + def test_nhs_num_presc_disp_index(self): + """ + Given a prescription for a specific NHS Number and Prescriber that has been dispensed + that the correct index is created + """ + self.prescription.prescription_record["prescription"][ + "prescribingOrganization" + ] = "TESTPrescriber" + self.prescription.prescription_record["prescription"]["prescriptionTime"] = "TESTtime" + self.prescription.prescription_record["instances"]["0"] = {} + self.prescription.prescription_record["instances"]["0"]["dispense"] = {} + self.prescription.prescription_record["instances"]["0"]["dispense"][ + "dispensingOrganization" + ] = "TESTdispenser" + + [success, created_index] = ( + self.prescription.return_nhs_number_prescriber_dispenser_date_index() + ) + self.assertEqual(success, True, "Failed to successfully create index") + expected_index = set(["TESTPatient|TESTPrescriber|TESTdispenser|TESTtime"]) + self.assertEqual( + created_index, + expected_index, + "Created index " + str(created_index) + " expecting " + str(expected_index), + ) + + def test_nhs_num_presc_disp_index_no_dispenser(self): + """ + Given a prescription for a specific NHS Number and Prescriber that has been dispensed + that the correct index is created + """ + self.prescription.prescription_record["prescription"][ + "prescribingOrganization" + ] = "TESTPrescriber" + self.prescription.prescription_record["prescription"]["prescriptionTime"] = "TESTtime" + + [success, created_index] = ( + self.prescription.return_nhs_number_prescriber_dispenser_date_index() + ) + self.assertEqual(success, True, "Failed to successfully create index") + expected_index = set([]) + self.assertEqual( + created_index, + expected_index, + "Created index " + str(created_index) + " expecting " + str(expected_index), + ) + + def test_presc_disp_index(self): + """ + Given a prescription for a specific NHS Number and Prescriber that has been dispensed + that the correct index is created + """ + self.prescription.prescription_record["prescription"][ + "prescribingOrganization" + ] = "TESTPrescriber" + self.prescription.prescription_record["prescription"]["prescriptionTime"] = "TESTtime" + self.prescription.prescription_record["instances"]["0"] = {} + self.prescription.prescription_record["instances"]["0"]["dispense"] = {} + self.prescription.prescription_record["instances"]["0"]["dispense"][ + "dispensingOrganization" + ] = "TESTdispenser" + + [success, created_index] = self.prescription.return_prescriber_dispenser_date_index() + self.assertEqual(success, True, "Failed to successfully create index") + expected_index = set(["TESTPrescriber|TESTdispenser|TESTtime"]) + self.assertEqual( + created_index, + expected_index, + "Created index " + str(created_index) + " expecting " + str(expected_index), + ) + + def test_presc_disp_index_no_dispenser(self): + """ + Given a prescription for a specific NHS Number and Prescriber that has been dispensed + that the correct index is created + """ + self.prescription.prescription_record["prescription"][ + "prescribingOrganization" + ] = "TESTPrescriber" + self.prescription.prescription_record["prescription"]["prescriptionTime"] = "TESTtime" + + [success, created_index] = self.prescription.return_prescriber_dispenser_date_index() + self.assertEqual(success, True, "Failed to successfully create index") + expected_index = set([]) + self.assertEqual( + created_index, + expected_index, + "Created index " + str(created_index) + " expecting " + str(expected_index), + ) + + def test_disp_index(self): + """ + Given a prescription for a specific NHS Number and Prescriber that has been dispensed + that the correct index is created + """ + self.prescription.prescription_record["prescription"][ + "prescribingOrganization" + ] = "TESTPrescriber" + self.prescription.prescription_record["prescription"]["prescriptionTime"] = "TESTtime" + self.prescription.prescription_record["instances"]["0"] = {} + self.prescription.prescription_record["instances"]["0"]["dispense"] = {} + self.prescription.prescription_record["instances"]["0"]["dispense"][ + "dispensingOrganization" + ] = "TESTdispenser" + + [success, created_index] = self.prescription.return_dispenser_date_index() + self.assertEqual(success, True, "Failed to successfully create index") + expected_index = set(["TESTdispenser|TESTtime"]) + self.assertEqual( + created_index, + expected_index, + "Created index " + str(created_index) + " expecting " + str(expected_index), + ) + + def test_disp_index_no_dispenser(self): + """ + Given a prescription for a specific NHS Number and Prescriber that has been dispensed + that the correct index is created + """ + self.prescription.prescription_record["prescription"][ + "prescribingOrganization" + ] = "TESTPrescriber" + self.prescription.prescription_record["prescription"]["prescriptionTime"] = "TESTtime" + + [success, created_index] = self.prescription.return_dispenser_date_index() + self.assertEqual(success, True, "Failed to successfully create index") + expected_index = set([]) + self.assertEqual( + created_index, + expected_index, + "Created index " + str(created_index) + " expecting " + str(expected_index), + ) + + def test_nhs_num_disp_index(self): + """ + Given a prescription for a specific NHS Number and Prescriber that has been dispensed + that the correct index is created + """ + self.prescription.prescription_record["prescription"][ + "prescribingOrganization" + ] = "TESTPrescriber" + self.prescription.prescription_record["prescription"]["prescriptionTime"] = "TESTtime" + self.prescription.prescription_record["instances"]["0"] = {} + self.prescription.prescription_record["instances"]["0"]["dispense"] = {} + self.prescription.prescription_record["instances"]["0"]["dispense"][ + "dispensingOrganization" + ] = "TESTdispenser" + + [success, created_index] = self.prescription.return_nhs_number_dispenser_date_index() + self.assertEqual(success, True, "Failed to successfully create index") + expected_index = set(["TESTPatient|TESTdispenser|TESTtime"]) + self.assertEqual( + created_index, + expected_index, + "Created index " + str(created_index) + " expecting " + str(expected_index), + ) + + def test_nhs_num_disp_index_no_dispenser(self): + """ + Given a prescription for a specific NHS Number and Prescriber that has been dispensed + that the correct index is created + """ + self.prescription.prescription_record["prescription"][ + "prescribingOrganization" + ] = "TESTPrescriber" + self.prescription.prescription_record["prescription"]["prescriptionTime"] = "TESTtime" + + [success, created_index] = self.prescription.return_nhs_number_dispenser_date_index() + self.assertEqual(success, True, "Failed to successfully create index") + expected_index = set([]) + self.assertEqual( + created_index, + expected_index, + "Created index " + str(created_index) + " expecting " + str(expected_index), + ) diff --git a/tests/common/prescription/include_next_activity_for_instance_test.py b/tests/common/prescription/include_next_activity_for_instance_test.py new file mode 100644 index 0000000..668d48d --- /dev/null +++ b/tests/common/prescription/include_next_activity_for_instance_test.py @@ -0,0 +1,354 @@ +from unittest.case import TestCase +from unittest.mock import Mock + +from eps_spine_shared.common.prescription import fields +from eps_spine_shared.common.prescription.record import PrescriptionRecord + + +class IncludeNextActivityForInstanceTest(TestCase): + """ + Test Case for testing the Include Next Activity for Instance Test + """ + + def setUp(self): + """ + Set up all valid values - tests will overwrite these where required. + """ + + mock = Mock() + attrs = {"writeLog.return_value": None} + mock.configure_mock(**attrs) + log_object = mock + internal_id = "test" + + self.mock_record: PrescriptionRecord = PrescriptionRecord(log_object, internal_id) + + def test_include_next_activity_1(self): + """ + Test that 'True' is returned for acute, current, first and final issue when: + - currentInstance = 1, + - instanceNumber = 1, + - max_repeats = 1, + - nextActivity = expire + """ + activity = fields.NEXTACTIVITY_EXPIRE + self.assertTrue(self.mock_record._include_next_activity_for_instance(activity, 1, 1, 1)) + + def test_include_next_activity_2(self): + """ + Test that 'True' is returned for acute, current, first and final issue when: + - currentInstance = 1, + - instanceNumber = 1, + - max_repeats = 1, + - nextActivity = createNoClaim + """ + activity = fields.NEXTACTIVITY_CREATENOCLAIM + self.assertTrue(self.mock_record._include_next_activity_for_instance(activity, 1, 1, 1)) + + def test_include_next_activity_3(self): + """ + Test that 'True' is returned for acute, current, first and final issue when: + - currentInstance = 1, + - instanceNumber = 1, + - max_repeats = 1, + - nextActivity = ready + """ + activity = fields.NEXTACTIVITY_READY + self.assertTrue(self.mock_record._include_next_activity_for_instance(activity, 1, 1, 1)) + + def test_include_next_activity_4(self): + """ + Test that 'True' is returned for acute, current, first and final issue when: + - currentInstance = 1, + - instanceNumber = 1, + - max_repeats = 1, + - nextActivity = delete + """ + activity = fields.NEXTACTIVITY_DELETE + self.assertTrue(self.mock_record._include_next_activity_for_instance(activity, 1, 1, 1)) + + def test_include_next_activity_5(self): + """ + Test that 'True' is returned for repeat dispense, current and first issue when: + - currentInstance = 1, + - instanceNumber = 1, + - max_repeats = 3, + - nextActivity = expire + """ + activity = fields.NEXTACTIVITY_EXPIRE + self.assertTrue(self.mock_record._include_next_activity_for_instance(activity, 1, 1, 3)) + + def test_include_next_activity_6(self): + """ + Test that 'True' is returned for repeat dispense, current but not final issue when: + - currentInstance = 1, + - instanceNumber = 1, + - max_repeats = 3, + - nextActivity = createNoClaim + """ + activity = fields.NEXTACTIVITY_CREATENOCLAIM + self.assertTrue(self.mock_record._include_next_activity_for_instance(activity, 1, 1, 3)) + + def test_include_next_activity_7(self): + """ + Test that 'True' is returned for repeat dispense, current but not final issue when: + - currentInstance = 1, + - instanceNumber = 1, + - max_repeats = 3, + - nextActivity = ready + """ + activity = fields.NEXTACTIVITY_READY + self.assertTrue(self.mock_record._include_next_activity_for_instance(activity, 1, 1, 3)) + + def test_include_next_activity_8(self): + """ + Test that 'False' is returned for repeat dispense, current but not final issue when: + - currentInstance = 1, + - instanceNumber = 1, + - max_repeats = 3, + - nextActivity = delete + """ + activity = fields.NEXTACTIVITY_DELETE + self.assertFalse(self.mock_record._include_next_activity_for_instance(activity, 1, 1, 3)) + + def test_include_next_activity_9(self): + """ + Test that 'False' is returned for repeat dispense, previous issue when: + - currentInstance = 2, + - instanceNumber = 1, + - max_repeats = 3, + - nextActivity = expire + """ + activity = fields.NEXTACTIVITY_EXPIRE + self.assertFalse(self.mock_record._include_next_activity_for_instance(activity, 1, 2, 3)) + + def test_include_next_activity_10(self): + """ + Test that 'True' is returned for repeat dispense, previous issue when: + - currentInstance = 2, + - instanceNumber = 1, + - max_repeats = 3, + - nextActivity = createNoClaim + """ + activity = fields.NEXTACTIVITY_CREATENOCLAIM + self.assertTrue(self.mock_record._include_next_activity_for_instance(activity, 1, 2, 3)) + + def test_include_next_activity_11(self): + """ + Test that 'False' is returned for repeat dispense, previous issue when: + - currentInstance = 2, + - instanceNumber = 1, + - max_repeats = 3, + - nextActivity = ready + """ + activity = fields.NEXTACTIVITY_READY + self.assertFalse(self.mock_record._include_next_activity_for_instance(activity, 1, 2, 3)) + + def test_include_next_activity_12(self): + """ + Test that 'False' is returned for repeat dispense, previous issue when: + - currentInstance = 2, + - instanceNumber = 1, + - max_repeats = 3, + - nextActivity = delete + """ + activity = fields.NEXTACTIVITY_DELETE + self.assertFalse(self.mock_record._include_next_activity_for_instance(activity, 1, 2, 3)) + + def test_include_next_activity_13(self): + """ + Test that 'True' is returned for repeat dispense, current but not first or final issue when: + - currentInstance = 2, + - instanceNumber = 2, + - max_repeats = 3, + - nextActivity = expire + """ + activity = fields.NEXTACTIVITY_EXPIRE + self.assertTrue(self.mock_record._include_next_activity_for_instance(activity, 2, 2, 3)) + + def test_include_next_activity_14(self): + """ + Test that 'True' is returned for repeat dispense, current but not first or final issue when: + - currentInstance = 2, + - instanceNumber = 2, + - max_repeats = 3, + - nextActivity = createNoClaim + """ + activity = fields.NEXTACTIVITY_CREATENOCLAIM + self.assertTrue(self.mock_record._include_next_activity_for_instance(activity, 2, 2, 3)) + + def test_include_next_activity_15(self): + """ + Test that 'True' is returned for repeat dispense, current but not first or final issue when: + - currentInstance = 2, + - instanceNumber = 2, + - max_repeats = 3, + - nextActivity = ready + """ + activity = fields.NEXTACTIVITY_READY + self.assertTrue(self.mock_record._include_next_activity_for_instance(activity, 2, 2, 3)) + + def test_include_next_activity_16(self): + """ + Test that 'False' is returned for repeat dispense, current but not first or final issue when: + - currentInstance = 2, + - instanceNumber = 2, + - max_repeats = 3, + - nextActivity = delete + """ + activity = fields.NEXTACTIVITY_DELETE + self.assertFalse(self.mock_record._include_next_activity_for_instance(activity, 2, 2, 3)) + + def test_include_next_activity_17(self): + """ + Test that 'True' is returned for repeat dispense, current and final issue when: + - currentInstance = 3, + - instanceNumber = 3, + - max_repeats = 3, + - nextActivity = expire + """ + activity = fields.NEXTACTIVITY_EXPIRE + self.assertTrue(self.mock_record._include_next_activity_for_instance(activity, 3, 3, 3)) + + def test_include_next_activity_18(self): + """ + Test that 'True' is returned for repeat dispense, current and final issue when: + - currentInstance = 3, + - instanceNumber = 3, + - max_repeats = 3, + - nextActivity = createNoClaim + """ + activity = fields.NEXTACTIVITY_CREATENOCLAIM + self.assertTrue(self.mock_record._include_next_activity_for_instance(activity, 3, 3, 3)) + + def test_include_next_activity_19(self): + """ + Test that 'True' is returned for repeat dispense, current and final issue when: + - currentInstance = 3, + - instanceNumber = 3, + - max_repeats = 3, + - nextActivity = ready + """ + activity = fields.NEXTACTIVITY_READY + self.assertTrue(self.mock_record._include_next_activity_for_instance(activity, 3, 3, 3)) + + def test_include_next_activity_20(self): + """ + Test that 'True' is returned for repeat dispense, current and final issue when: + - currentInstance = 3, + - instanceNumber = 3, + - max_repeats = 3, + - nextActivity = delete + """ + activity = fields.NEXTACTIVITY_DELETE + self.assertTrue(self.mock_record._include_next_activity_for_instance(activity, 3, 3, 3)) + + def test_include_next_activity_21(self): + """ + Test that 'False' is returned for repeat dispense, future issue when: + - currentInstance = 1, + - instanceNumber = 3, + - max_repeats = 3, + - nextActivity = expire + """ + activity = fields.NEXTACTIVITY_EXPIRE + self.assertFalse(self.mock_record._include_next_activity_for_instance(activity, 3, 1, 3)) + + def test_include_next_activity_22(self): + """ + Test that 'False' is returned for repeat dispense, future issue when: + - currentInstance = 1, + - instanceNumber = 3, + - max_repeats = 3, + - nextActivity = createNoClaim + """ + activity = fields.NEXTACTIVITY_CREATENOCLAIM + self.assertFalse(self.mock_record._include_next_activity_for_instance(activity, 3, 1, 3)) + + def test_include_next_activity_23(self): + """ + Test that 'False' is returned for repeat dispense, future issue when: + - currentInstance = 1, + - instanceNumber = 3, + - max_repeats = 3, + - nextActivity = ready + """ + activity = fields.NEXTACTIVITY_READY + self.assertFalse(self.mock_record._include_next_activity_for_instance(activity, 3, 1, 3)) + + def test_include_next_activity_24(self): + """ + Test that 'False' is returned for repeat dispense, future issue when: + - currentInstance = 1, + - instanceNumber = 3, + - max_repeats = 3, + - nextActivity = delete + """ + activity = fields.NEXTACTIVITY_DELETE + self.assertFalse(self.mock_record._include_next_activity_for_instance(activity, 3, 1, 3)) + + def test_include_next_activity_25(self): + """ + Test that 'True' is returned for acute, curent, first and final issue when: + - currentInstance = 1, + - instanceNumber = 1, + - max_repeats = 1, + - nextActivity = purge + """ + activity = fields.NEXTACTIVITY_PURGE + self.assertTrue(self.mock_record._include_next_activity_for_instance(activity, 1, 1, 1)) + + def test_include_next_activity_26(self): + """ + Test that 'False' is returned for repeat dispense, current but not final issue when: + - currentInstance = 1, + - instanceNumber = 1, + - max_repeats = 3, + - nextActivity = purge + """ + activity = fields.NEXTACTIVITY_PURGE + self.assertFalse(self.mock_record._include_next_activity_for_instance(activity, 1, 1, 3)) + + def test_include_next_activity_27(self): + """ + Test that 'False' is returned for repeat dispense, previous issue when: + - currentInstance = 2, + - instanceNumber = 1, + - max_repeats = 3, + - nextActivity = purge + """ + activity = fields.NEXTACTIVITY_PURGE + self.assertFalse(self.mock_record._include_next_activity_for_instance(activity, 1, 2, 3)) + + def test_include_next_activity_28(self): + """ + Test that 'False' is returned for repeat dispense, current but not first or final issue when: + - currentInstance = 2, + - instanceNumber = 2, + - max_repeats = 3, + - nextActivity = purge + """ + activity = fields.NEXTACTIVITY_PURGE + self.assertFalse(self.mock_record._include_next_activity_for_instance(activity, 2, 2, 3)) + + def test_include_next_activity_29(self): + """ + Test that 'True' is returned for repeat dispense, current and final issue when: + - currentInstance = 3, + - instanceNumber = 3, + - max_repeats = 3, + - nextActivity = purge + """ + activity = fields.NEXTACTIVITY_PURGE + self.assertTrue(self.mock_record._include_next_activity_for_instance(activity, 3, 3, 3)) + + def test_include_next_activity_30(self): + """ + Test that 'False' is returned for repeat dispense, future issue when: + - currentInstance = 1, + - instanceNumber = 3, + - max_repeats = 3, + - nextActivity = purge + """ + activity = fields.NEXTACTIVITY_PURGE + self.assertFalse(self.mock_record._include_next_activity_for_instance(activity, 3, 1, 3)) diff --git a/tests/common/prescription/next_activity_generator_test.py b/tests/common/prescription/next_activity_generator_test.py new file mode 100644 index 0000000..0f8fa76 --- /dev/null +++ b/tests/common/prescription/next_activity_generator_test.py @@ -0,0 +1,613 @@ +from datetime import datetime +from unittest.case import TestCase + +from dateutil.relativedelta import relativedelta + +from eps_spine_shared.common.prescription.record import NextActivityGenerator + + +class NextActivityGeneratorTest(TestCase): + """ + Test Case for the next activity index generator + """ + + def setUp(self): + """ + Set up all valid values - tests will overwrite these where required. + """ + self.next_activity_generator = NextActivityGenerator(None, None) + + self.nad_reference = {} + self.nad_reference["prescriptionExpiryPeriod"] = relativedelta(months=+6) + self.nad_reference["repeatDispenseExpiryPeriod"] = relativedelta(months=+12) + self.nad_reference["dataCleansePeriod"] = relativedelta(months=+6) + self.nad_reference["withDispenserActiveExpiryPeriod"] = relativedelta(days=+180) + self.nad_reference["expiredDeletePeriod"] = relativedelta(days=+90) + self.nad_reference["cancelledDeletePeriod"] = relativedelta(days=+180) + self.nad_reference["claimedDeletePeriod"] = relativedelta(days=+9) + self.nad_reference["notDispensedDeletePeriod"] = relativedelta(days=+30) + self.nad_reference["nominatedDownloadDateLeadTime"] = relativedelta(days=+5) + self.nad_reference["notificationDelayPeriod"] = relativedelta(days=+180) + self.nad_reference["purgedDeletePeriod"] = relativedelta(days=+365) + + self.nad_status = {} + self.nad_status["prescriptionTreatmentType"] = "0001" + self.nad_status["prescriptionDate"] = "20120101" + self.nad_status["prescribingSiteTestStatus"] = True + self.nad_status["dispenseWindowHighDate"] = "20121231" + self.nad_status["dispenseWindowLowDate"] = "20120101" + # The nominated download date is the date that the next issue should be released + # for download (already taking account of the lead time) + self.nad_status["nominatedDownloadDate"] = "20120101" + self.nad_status["lastDispenseDate"] = "20120101" + self.nad_status["completionDate"] = "20120101" + self.nad_status["claimSentDate"] = "20120101" + self.nad_status["handleTime"] = "20120101" + self.nad_status["prescriptionStatus"] = "0001" + self.nad_status["instanceNumber"] = 1 + self.nad_status["releaseVersion"] = "R2" + self.nad_status["lastDispenseNotificationMsgRef"] = "20180918150922275520_2FA340_2" + + def perform_test_next_activity_date(self, expected_result): + """ + Test Runner for next activity and next activity date method. Takes the created + nad_status (on self) and compares it to the expected result + """ + results = self.next_activity_generator.next_activity_date( + self.nad_status, self.nad_reference + ) + [next_activity, next_activity_date, _] = results + self.assertTrue([next_activity, next_activity_date] == expected_result) + + def test_next_activity_date_scenario_1(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Acute - expiry falls 31st -> 1st + """ + + self.nad_status["prescriptionTreatmentType"] = "0001" + self.nad_status["prescriptionStatus"] = "0001" + self.nad_status["prescriptionDate"] = "20111031" + self.perform_test_next_activity_date(["expire", "20120430"]) + + def test_next_activity_date_scenario_2(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Acute - expiry falls 29th Feb 2012 + """ + + self.nad_status["prescriptionTreatmentType"] = "0001" + self.nad_status["prescriptionStatus"] = "0001" + self.nad_status["prescriptionDate"] = "20110829" + self.perform_test_next_activity_date(["expire", "20120229"]) + + def test_next_activity_date_scenario_3(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Repeat Prescribe - expiry falls 31st -> 1st + """ + + self.nad_status["prescriptionTreatmentType"] = "0002" + self.nad_status["prescriptionStatus"] = "0001" + self.nad_status["prescriptionDate"] = "20111031" + self.perform_test_next_activity_date(["expire", "20120430"]) + + def test_next_activity_date_scenario_4(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Repeat Prescribe - expiry falls 29th Feb 2012 + """ + + self.nad_status["prescriptionTreatmentType"] = "0002" + self.nad_status["prescriptionStatus"] = "0001" + self.nad_status["prescriptionDate"] = "20110829" + self.perform_test_next_activity_date(["expire", "20120229"]) + + def test_next_activity_date_scenario_5(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Repeat Dispense - expiry falls 31st -> 1st + """ + + self.nad_status["prescriptionTreatmentType"] = "0003" + self.nad_status["prescriptionStatus"] = "0001" + self.nad_status["prescriptionDate"] = "20111031" + self.nad_status["dispenseWindowHighDate"] = "20120601" + self.perform_test_next_activity_date(["expire", "20120430"]) + + def test_next_activity_date_scenario_6(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Repeat Dispense - check that expiry is not limited by Dispense Window + """ + + self.nad_status["prescriptionTreatmentType"] = "0003" + self.nad_status["prescriptionStatus"] = "0001" + self.nad_status["prescriptionDate"] = "20120131" + self.nad_status["dispenseWindowHighDate"] = "20120401" + self.perform_test_next_activity_date(["expire", "20120731"]) + + def test_next_activity_date_scenario_7(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Acute - expiry falls 29th Feb 2012 + """ + + self.nad_status["prescriptionTreatmentType"] = "0001" + self.nad_status["prescriptionStatus"] = "0002" + self.nad_status["prescriptionDate"] = "20110829" + self.perform_test_next_activity_date(["expire", "20120229"]) + + def test_next_activity_date_scenario_8(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Acute - expiry falls 31st -> 1st + """ + + self.nad_status["prescriptionTreatmentType"] = "0001" + self.nad_status["prescriptionStatus"] = "0002" + self.nad_status["prescriptionDate"] = "20111031" + self.perform_test_next_activity_date(["expire", "20120430"]) + + def test_next_activity_date_scenario_9(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Repeat Prescribe - expiry falls 29th Feb 2012 + """ + + self.nad_status["prescriptionTreatmentType"] = "0002" + self.nad_status["prescriptionStatus"] = "0002" + self.nad_status["prescriptionDate"] = "20110829" + self.perform_test_next_activity_date(["expire", "20120229"]) + + def test_next_activity_date_scenario_10(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Repeat Prescribe - expiry falls 31st -> 1st + """ + + self.nad_status["prescriptionTreatmentType"] = "0002" + self.nad_status["prescriptionStatus"] = "0002" + self.nad_status["prescriptionDate"] = "20111031" + self.perform_test_next_activity_date(["expire", "20120430"]) + + def test_next_activity_date_scenario_11(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Repeat Dispense - expiry falls 29th Feb 2012 + """ + + self.nad_status["prescriptionTreatmentType"] = "0003" + self.nad_status["prescriptionStatus"] = "0002" + self.nad_status["prescriptionDate"] = "20110829" + self.nad_status["dispenseWindowHighDate"] = "20120601" + self.perform_test_next_activity_date(["expire", "20120229"]) + + def test_next_activity_date_scenario_12(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Repeat Dispense - expiry falls 31st -> 1st + """ + + self.nad_status["prescriptionTreatmentType"] = "0003" + self.nad_status["prescriptionStatus"] = "0002" + self.nad_status["prescriptionDate"] = "20111031" + self.nad_status["dispenseWindowHighDate"] = "20120601" + self.perform_test_next_activity_date(["expire", "20120430"]) + + def test_next_activity_date_scenario_13(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Repeat Dispense - check that expiry is not limited by Dispense Window + """ + + self.nad_status["prescriptionTreatmentType"] = "0003" + self.nad_status["prescriptionStatus"] = "0002" + self.nad_status["prescriptionDate"] = "20111031" + self.nad_status["dispenseWindowHighDate"] = "20120401" + self.perform_test_next_activity_date(["expire", "20120430"]) + + def test_next_activity_date_scenario_14(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Acute - expiry falls 29th Feb 2012 + """ + + self.nad_status["prescriptionTreatmentType"] = "0001" + self.nad_status["prescriptionStatus"] = "0003" + self.nad_status["prescriptionDate"] = "20110829" + self.nad_status["lastDispenseDate"] = "20110928" + self.perform_test_next_activity_date(["createNoClaim", "20120326"]) + + def test_next_activity_date_scenario_14b(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Acute R1 - expiry falls 29th Feb 2012 + """ + + self.nad_status["prescriptionTreatmentType"] = "0001" + self.nad_status["prescriptionStatus"] = "0003" + self.nad_status["prescriptionDate"] = "20110829" + self.nad_status["lastDispenseDate"] = "20110928" + self.nad_status["releaseVersion"] = "R1" + self.perform_test_next_activity_date(["expire", "20120229"]) + + def test_next_activity_date_scenario_15(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Acute - expiry falls 31st -> 1st + """ + + self.nad_status["prescriptionTreatmentType"] = "0001" + self.nad_status["prescriptionStatus"] = "0003" + self.nad_status["prescriptionDate"] = "20111031" + self.nad_status["lastDispenseDate"] = "20111130" + self.perform_test_next_activity_date(["createNoClaim", "20120528"]) + + def test_next_activity_date_scenario_15b(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Acute R1 - expiry falls 31st -> 1st + """ + + self.nad_status["prescriptionTreatmentType"] = "0001" + self.nad_status["prescriptionStatus"] = "0003" + self.nad_status["prescriptionDate"] = "20111031" + self.nad_status["lastDispenseDate"] = "20111130" + self.nad_status["releaseVersion"] = "R1" + self.perform_test_next_activity_date(["expire", "20120430"]) + + def test_next_activity_date_scenario_16(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Repeat Prescribe - expiry falls 29th Feb 2012 + """ + + self.nad_status["prescriptionTreatmentType"] = "0002" + self.nad_status["prescriptionStatus"] = "0003" + self.nad_status["prescriptionDate"] = "20110829" + self.nad_status["lastDispenseDate"] = "20110928" + self.perform_test_next_activity_date(["createNoClaim", "20120326"]) + + def test_next_activity_date_scenario_17(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Repeat Prescribe - expiry falls 31st -> 1st + """ + + self.nad_status["prescriptionTreatmentType"] = "0002" + self.nad_status["prescriptionStatus"] = "0003" + self.nad_status["prescriptionDate"] = "20111031" + self.nad_status["lastDispenseDate"] = "20111130" + self.perform_test_next_activity_date(["createNoClaim", "20120528"]) + + def test_next_activity_date_scenario_18(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Repeat Dispense - expiry falls 29th Feb 2012 + """ + + self.nad_status["prescriptionTreatmentType"] = "0003" + self.nad_status["prescriptionStatus"] = "0003" + self.nad_status["prescriptionDate"] = "20110829" + self.nad_status["dispenseWindowHighDate"] = "20120601" + self.nad_status["lastDispenseDate"] = "20110928" + self.perform_test_next_activity_date(["createNoClaim", "20120326"]) + + def test_next_activity_date_scenario_19(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Repeat Dispense - expiry falls 31st -> 1st + """ + + self.nad_status["prescriptionTreatmentType"] = "0003" + self.nad_status["prescriptionStatus"] = "0003" + self.nad_status["prescriptionDate"] = "20111031" + self.nad_status["dispenseWindowHighDate"] = "20120601" + self.nad_status["lastDispenseDate"] = "20111130" + self.perform_test_next_activity_date(["createNoClaim", "20120528"]) + + def test_next_activity_date_scenario_20(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Repeat Dispense - check that expiry date is not limited by Dispense Window + """ + + self.nad_status["prescriptionTreatmentType"] = "0003" + self.nad_status["prescriptionStatus"] = "0003" + self.nad_status["prescriptionDate"] = "20111031" + self.nad_status["dispenseWindowHighDate"] = "20120401" + self.nad_status["lastDispenseDate"] = "20120301" + self.perform_test_next_activity_date(["createNoClaim", "20120828"]) + + def test_next_activity_date_scenario_21(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Repeat Dispense - no claim window falls before expiry + """ + + self.nad_status["prescriptionTreatmentType"] = "0003" + self.nad_status["prescriptionStatus"] = "0003" + self.nad_status["prescriptionDate"] = "20111031" + self.nad_status["dispenseWindowHighDate"] = "20120601" + self.nad_status["lastDispenseDate"] = "20111031" + self.perform_test_next_activity_date(["createNoClaim", "20120428"]) + + def test_next_activity_date_scenario_22(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Acute - expiry falls 29th Feb 2012 + """ + + self.nad_status["prescriptionTreatmentType"] = "0001" + self.nad_status["prescriptionStatus"] = "0004" + self.nad_status["prescriptionDate"] = "20110729" + self.nad_status["completionDate"] = "20120329" + self.perform_test_next_activity_date(["delete", "20120627"]) + + def test_next_activity_date_scenario_23(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Repeat Prescribe - expiry falls 29th Feb 2012 + """ + + self.nad_status["prescriptionTreatmentType"] = "0002" + self.nad_status["prescriptionStatus"] = "0004" + self.nad_status["prescriptionDate"] = "20110729" + self.nad_status["completionDate"] = "20120329" + self.perform_test_next_activity_date(["delete", "20120627"]) + + def test_next_activity_date_scenario_24(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Repeat Dispense - expiry falls 29th Feb 2012 + """ + + self.nad_status["prescriptionTreatmentType"] = "0003" + self.nad_status["prescriptionStatus"] = "0004" + self.nad_status["prescriptionDate"] = "20110729" + self.nad_status["completionDate"] = "20120329" + self.perform_test_next_activity_date(["delete", "20120627"]) + + def test_next_activity_date_scenario_25(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Acute - expiry falls 29th Feb 2012 + """ + + self.nad_status["prescriptionTreatmentType"] = "0001" + self.nad_status["prescriptionStatus"] = "0005" + self.nad_status["prescriptionDate"] = "20110729" + self.nad_status["completionDate"] = "20120329" + self.perform_test_next_activity_date(["delete", "20120925"]) + + def test_next_activity_date_scenario_25a(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Specific test for migrated data scenario where completionDate is false not a valid + date. + """ + + self.nad_status["prescriptionTreatmentType"] = "0001" + self.nad_status["prescriptionStatus"] = "0005" + self.nad_status["prescriptionDate"] = "20110729" + self.nad_status["completionDate"] = False + expectedDate = datetime.now() + relativedelta(days=+180) + self.perform_test_next_activity_date(["delete", expectedDate.strftime("%Y%m%d")]) + + def test_next_activity_date_scenario_26(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Repeat Prescribe - expiry falls 29th Feb 2012 + """ + + self.nad_status["prescriptionTreatmentType"] = "0002" + self.nad_status["prescriptionStatus"] = "0005" + self.nad_status["prescriptionDate"] = "20110729" + self.nad_status["completionDate"] = "20120329" + self.perform_test_next_activity_date(["delete", "20120925"]) + + def test_next_activity_date_scenario_27(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Repeat Dispense - expiry falls 29th Feb 2012 + """ + + self.nad_status["prescriptionTreatmentType"] = "0003" + self.nad_status["prescriptionStatus"] = "0005" + self.nad_status["prescriptionDate"] = "20110729" + self.nad_status["completionDate"] = "20120329" + self.perform_test_next_activity_date(["delete", "20120925"]) + + def test_next_activity_date_scenario_28(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Acute - expiry falls 29th Feb 2012 + """ + + self.nad_status["prescriptionTreatmentType"] = "0001" + self.nad_status["prescriptionStatus"] = "0006" + self.nad_status["prescriptionDate"] = "20110729" + self.nad_status["dispenseWindowHighDate"] = "20120728" + self.nad_status["lastDispenseDate"] = "20110831" + self.nad_status["completionDate"] = "20110831" + self.perform_test_next_activity_date(["createNoClaim", "20120227"]) + + def test_next_activity_date_scenario_28b(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Acute R1 - expiry falls 29th Feb 2012 + """ + + self.nad_status["prescriptionTreatmentType"] = "0001" + self.nad_status["prescriptionStatus"] = "0006" + self.nad_status["prescriptionDate"] = "20110729" + self.nad_status["dispenseWindowHighDate"] = "20120728" + self.nad_status["lastDispenseDate"] = "20110831" + self.nad_status["completionDate"] = "20110831" + self.nad_status["releaseVersion"] = "R1" + self.perform_test_next_activity_date(["delete", "20120227"]) + + def test_next_activity_date_scenario_29(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Acute - expiry falls 31st -> 1st + """ + + self.nad_status["prescriptionTreatmentType"] = "0001" + self.nad_status["prescriptionStatus"] = "0006" + self.nad_status["prescriptionDate"] = "20110331" + self.nad_status["dispenseWindowHighDate"] = "20120330" + self.nad_status["lastDispenseDate"] = "20110831" + self.nad_status["completionDate"] = "20110831" + self.perform_test_next_activity_date(["createNoClaim", "20120227"]) + + def test_next_activity_date_scenario_30(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Repeat Prescribe - expiry falls 29th Feb 2012 + """ + + self.nad_status["prescriptionTreatmentType"] = "0002" + self.nad_status["prescriptionStatus"] = "0006" + self.nad_status["prescriptionDate"] = "20110729" + self.nad_status["dispenseWindowHighDate"] = "20120728" + self.nad_status["lastDispenseDate"] = "20110831" + self.nad_status["completionDate"] = "20110831" + self.perform_test_next_activity_date(["createNoClaim", "20120227"]) + + def test_next_activity_date_scenario_31(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Acute - expiry falls 29th Feb 2012 + """ + + self.nad_status["prescriptionTreatmentType"] = "0001" + self.nad_status["prescriptionStatus"] = "0007" + self.nad_status["prescriptionDate"] = "20110729" + self.nad_status["completionDate"] = "20120130" + self.perform_test_next_activity_date(["delete", "20120229"]) + + def test_next_activity_date_scenario_32(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Repeat Prescribe - expiry falls 29th Feb 2012 + """ + + self.nad_status["prescriptionTreatmentType"] = "0002" + self.nad_status["prescriptionStatus"] = "0007" + self.nad_status["prescriptionDate"] = "20110729" + self.nad_status["completionDate"] = "20120130" + self.perform_test_next_activity_date(["delete", "20120229"]) + + def test_next_activity_date_scenario_33(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Repeat Dispense - expiry falls 29th Feb 2012 + """ + + self.nad_status["prescriptionTreatmentType"] = "0003" + self.nad_status["prescriptionStatus"] = "0007" + self.nad_status["prescriptionDate"] = "20110729" + self.nad_status["completionDate"] = "20120130" + self.perform_test_next_activity_date(["delete", "20120229"]) + + def test_next_activity_date_scenario_34(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Acute - expiry falls 29th Feb 2012 + """ + + self.nad_status["prescriptionTreatmentType"] = "0001" + self.nad_status["prescriptionStatus"] = "0008" + self.nad_status["prescriptionDate"] = "20110731" + self.nad_status["completionDate"] = "20111231" + self.nad_status["claimSentDate"] = "20120101" + self.perform_test_next_activity_date(["delete", "20120110"]) + + def test_next_activity_date_scenario_37(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Acute - expiry falls 29th Feb 2012 + """ + + self.nad_status["prescriptionTreatmentType"] = "0001" + self.nad_status["prescriptionStatus"] = "0009" + self.nad_status["prescriptionDate"] = "20110731" + self.nad_status["completionDate"] = "20111231" + self.nad_status["claimSentDate"] = "20120101" + self.perform_test_next_activity_date(["delete", "20120110"]) + + def test_next_activity_date_scenario_38(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Repeat Prescribe - expiry falls 29th Feb 2012 + """ + + self.nad_status["prescriptionTreatmentType"] = "0002" + self.nad_status["prescriptionStatus"] = "0009" + self.nad_status["prescriptionDate"] = "20110731" + self.nad_status["completionDate"] = "20111231" + self.nad_status["claimSentDate"] = "20120101" + self.perform_test_next_activity_date(["delete", "20120110"]) + + def test_next_activity_date_scenario_39(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Repeat Dispense - expiry falls 29th Feb 2012 + """ + + self.nad_status["prescriptionTreatmentType"] = "0003" + self.nad_status["prescriptionStatus"] = "0009" + self.nad_status["prescriptionDate"] = "20110731" + self.nad_status["completionDate"] = "20111231" + self.nad_status["claimSentDate"] = "20120101" + self.perform_test_next_activity_date(["delete", "20120110"]) + + def test_next_activity_date_scenario_40(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Repeat Prescribe - Nominated Release before Expiry + """ + + self.nad_status["prescriptionTreatmentType"] = "0002" + self.nad_status["prescriptionStatus"] = "0000" + self.nad_status["prescriptionDate"] = "20120731" + self.nad_status["nominatedDownloadDate"] = "20121101" + self.perform_test_next_activity_date(["ready", "20121101"]) + + def test_next_activity_date_scenario_41(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Repeat Prescribe - Expiry before Nominated Release + """ + + self.nad_status["prescriptionTreatmentType"] = "0002" + self.nad_status["prescriptionStatus"] = "0000" + self.nad_status["prescriptionDate"] = "20110731" + self.nad_status["nominatedDownloadDate"] = "20120301" + self.perform_test_next_activity_date(["expire", "20120131"]) + + def test_next_activity_date_scenario_42(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Repeat Dispense - Nominated Release falls 29th Feb 2012 + """ + + self.nad_status["prescriptionTreatmentType"] = "0003" + self.nad_status["prescriptionStatus"] = "0000" + self.nad_status["prescriptionDate"] = "20111101" + self.nad_status["nominatedDownloadDate"] = "20120229" + self.perform_test_next_activity_date(["ready", "20120229"]) + + def test_next_activity_date_scenario_43(self): + """ + Unit test for Next Activity and Next Activity Date Generator: + Repeat Dispense - Expiry falls 30th Sep 2011 + """ + + self.nad_status["prescriptionTreatmentType"] = "0003" + self.nad_status["prescriptionStatus"] = "0000" + self.nad_status["prescriptionDate"] = "20110331" + self.nad_status["nominatedDownloadDate"] = "20120130" + self.perform_test_next_activity_date(["expire", "20110930"]) diff --git a/tests/common/prescription/record_test.py b/tests/common/prescription/record_test.py new file mode 100644 index 0000000..436bd38 --- /dev/null +++ b/tests/common/prescription/record_test.py @@ -0,0 +1,410 @@ +import json +import os.path +from datetime import datetime, timedelta +from unittest.case import TestCase +from unittest.mock import MagicMock + +from eps_spine_shared.common.prescription import fields +from eps_spine_shared.common.prescription.record import PrescriptionRecord +from eps_spine_shared.common.prescription.repeat_dispense import RepeatDispenseRecord +from eps_spine_shared.common.prescription.repeat_prescribe import RepeatPrescribeRecord +from eps_spine_shared.common.prescription.single_prescribe import SinglePrescribeRecord +from eps_spine_shared.common.prescription.types import PrescriptionTreatmentType +from eps_spine_shared.errors import EpsSystemError +from eps_spine_shared.nhsfundamentals.timeutilities import TimeFormats +from tests.mock_logger import MockLogObject + + +def load_test_example_json(mock_log_object, filename): + """ + Load prescription data from JSON files in the test resources directory. + + :type filename: str + :rtype: PrescriptionRecord + """ + # load the JSON dict + test_dir_path = os.path.dirname(__file__) + full_path = os.path.join(test_dir_path, "resources", filename) + with open(full_path) as json_file: + prescription_dict = json.load(json_file) + json_file.close() + + # wrap it in a PrescriptionRecord - need to create the + # appropriate subclass based on treatment type + treatment_type = prescription_dict["prescription"]["prescriptionTreatmentType"] + if treatment_type == PrescriptionTreatmentType.ACUTE_PRESCRIBING: + prescription = SinglePrescribeRecord(mock_log_object, "test") + elif treatment_type == PrescriptionTreatmentType.REPEAT_PRESCRIBING: + prescription = RepeatPrescribeRecord(mock_log_object, "test") + elif treatment_type == PrescriptionTreatmentType.REPEAT_DISPENSING: + prescription = RepeatDispenseRecord(mock_log_object, "test") + else: + raise ValueError("Unknown treatment type %s" % str(treatment_type)) + + prescription.create_record_from_store(prescription_dict) + + return prescription + + +class PrescriptionRecordTest(TestCase): + """ + Test Case for PrescriptionRecord class + """ + + def setUp(self): + self.mock_log_object = MagicMock() + + def test_basic_properties(self): + """ + Test basic property access of a record loaded from JSON + """ + prescription = load_test_example_json(self.mock_log_object, "7D9625-Z72BF2-11E3A.json") + + self.assertEqual(prescription.id, "7D9625-Z72BF2-11E3AC") + self.assertEqual(prescription.max_repeats, 3) + + def test_current_issue(self): + """ + Test that we can access the current issue + """ + prescription = load_test_example_json(self.mock_log_object, "7D9625-Z72BF2-11E3A.json") + + self.assertEqual(prescription.current_issue_number, 3) + self.assertEqual(prescription.current_issue.number, 3) + self.assertEqual(prescription.current_issue.status, "0006") + + # try changing the current issue number and make sure that this is picked up + prescription.current_issue_number = 1 + self.assertEqual(prescription.current_issue_number, 1) + self.assertEqual(prescription.current_issue.number, 1) + self.assertEqual(prescription.current_issue.status, "0009") + + def test_issues(self): + """ + Test that we can access the prescription issues + """ + prescription = load_test_example_json(self.mock_log_object, "7D9625-Z72BF2-11E3A.json") + + self.assertEqual(prescription.issue_numbers, [1, 2, 3]) + + issues = prescription.issues + self.assertEqual(len(issues), 3) + + issue_numbers = [issue.number for issue in issues] + self.assertEqual(issue_numbers, [1, 2, 3]) + + def test_claims(self): + """ + Test that we can access the prescription issue claims + """ + prescription = load_test_example_json(self.mock_log_object, "7D9625-Z72BF2-11E3A.json") + + issue = prescription.get_issue(1) + claim = issue.claim + + self.assertEqual(claim.received_date_str, "20140408") + + # make sure we can also update the received date + claim.received_date_str = "20131225" + self.assertEqual(claim.received_date_str, "20131225") + + def test_find_next_future_issue_number_future_issue_available(self): + """ + Test that a future issue can be found in a prescription. + """ + prescription = load_test_example_json(self.mock_log_object, "DD0180-ZBED5C-11E3A.json") + + # check the future issue can be found + self.assertEqual(prescription._find_next_future_issue_number("1"), "2") + + # check that there are no more beyond the last issue + self.assertEqual(prescription.max_repeats, 2) + self.assertEqual(prescription._find_next_future_issue_number("2"), None) + + def test_find_next_future_issue_number_issues_already_dispensed(self): + """ + Test that no future issues can be found if they're all dispensed. + """ + prescription = load_test_example_json(self.mock_log_object, "7D9625-Z72BF2-11E3A.json") + + # chekc that dispensed issues can not be found + self.assertEqual(prescription._find_next_future_issue_number("1"), None) + self.assertEqual(prescription._find_next_future_issue_number("2"), None) + + # check that there are no more beyond the last issue + self.assertEqual(prescription.max_repeats, 3) + self.assertEqual(prescription._find_next_future_issue_number("3"), None) + + def test_get_issue_numbers_in_range(self): + """ + Test that we can correctly retrieve ranges of issue numbers. + """ + prescription = load_test_example_json(self.mock_log_object, "7D9625-Z72BF2-11E3A.json") + + self.assertEqual(prescription.issue_numbers, [1, 2, 3]) + + # test lower bound only + self.assertEqual(prescription.get_issue_numbers_in_range(0, None), [1, 2, 3]) + self.assertEqual(prescription.get_issue_numbers_in_range(1, None), [1, 2, 3]) + self.assertEqual(prescription.get_issue_numbers_in_range(2, None), [2, 3]) + self.assertEqual(prescription.get_issue_numbers_in_range(3, None), [3]) + self.assertEqual(prescription.get_issue_numbers_in_range(4, None), []) + + # test upper bound only + self.assertEqual(prescription.get_issue_numbers_in_range(None, 4), [1, 2, 3]) + self.assertEqual(prescription.get_issue_numbers_in_range(None, 3), [1, 2, 3]) + self.assertEqual(prescription.get_issue_numbers_in_range(None, 2), [1, 2]) + self.assertEqual(prescription.get_issue_numbers_in_range(None, 1), [1]) + self.assertEqual(prescription.get_issue_numbers_in_range(None, 0), []) + + # test both bounds + self.assertEqual(prescription.get_issue_numbers_in_range(0, 4), [1, 2, 3]) + self.assertEqual(prescription.get_issue_numbers_in_range(1, 3), [1, 2, 3]) + self.assertEqual(prescription.get_issue_numbers_in_range(2, 3), [2, 3]) + self.assertEqual(prescription.get_issue_numbers_in_range(2, 2), [2]) + self.assertEqual(prescription.get_issue_numbers_in_range(2, 1), []) + + # test no bounds + self.assertEqual(prescription.get_issue_numbers_in_range(None, None), [1, 2, 3]) + self.assertEqual(prescription.get_issue_numbers_in_range(), [1, 2, 3]) + + def test_missing_issue_numbers(self): + """ + Test that we can deal correctly with prescriptions with missing instances. + """ + # this 12-issue prescription has issues 1 and 2 missing because of migration + prescription = load_test_example_json(self.mock_log_object, "50EE48-B83002-490F7.json") + + self.assertEqual(prescription.issue_numbers, [3, 4, 5, 6, 7, 8, 9, 10, 11, 12]) + self.assertEqual(prescription.missing_issue_numbers, [1, 2]) + + # make sure the range fetches work as well + self.assertEqual( + prescription.get_issue_numbers_in_range(None, None), [3, 4, 5, 6, 7, 8, 9, 10, 11, 12] + ) + self.assertEqual( + prescription.get_issue_numbers_in_range(2, None), [3, 4, 5, 6, 7, 8, 9, 10, 11, 12] + ) + self.assertEqual( + prescription.get_issue_numbers_in_range(3, None), [3, 4, 5, 6, 7, 8, 9, 10, 11, 12] + ) + self.assertEqual( + prescription.get_issue_numbers_in_range(4, None), [4, 5, 6, 7, 8, 9, 10, 11, 12] + ) + self.assertEqual( + prescription.get_issue_numbers_in_range(None, 13), [3, 4, 5, 6, 7, 8, 9, 10, 11, 12] + ) + self.assertEqual( + prescription.get_issue_numbers_in_range(None, 12), [3, 4, 5, 6, 7, 8, 9, 10, 11, 12] + ) + self.assertEqual( + prescription.get_issue_numbers_in_range(None, 11), [3, 4, 5, 6, 7, 8, 9, 10, 11] + ) + self.assertEqual(prescription.get_issue_numbers_in_range(5, 8), [5, 6, 7, 8]) + self.assertEqual(prescription.get_issue_numbers_in_range(10, 7), []) + + def _assert_find_instances_to_action_update( + self, prescription: PrescriptionRecord, handle_time, action, expected_issue_number_strs + ): + """ + Helper to test that find_instances_to_action_update() returns expected instances + """ + mock_context = MagicMock() + mock_context.handleTime = handle_time + mock_context.instancesToUpdate = None + prescription.find_instances_to_action_update(mock_context, action) + self.assertEqual(mock_context.instancesToUpdate, expected_issue_number_strs) + + def test_find_instances_to_action_update(self): + """ + Test that we can find instances that need updating at a particular time. + """ + prescription = load_test_example_json(self.mock_log_object, "7D9625-Z72BF2-11E3A.json") + + # first, try a date that will pick up all next actions + handle_time = datetime(year=2050, month=1, day=1) + + action = fields.NEXTACTIVITY_DELETE + self._assert_find_instances_to_action_update(prescription, handle_time, action, ["1"]) + + action = fields.NEXTACTIVITY_CREATENOCLAIM + self._assert_find_instances_to_action_update(prescription, handle_time, action, ["2", "3"]) + + action = fields.NEXTACTIVITY_EXPIRE + self._assert_find_instances_to_action_update(prescription, handle_time, action, None) + + # then try a date in the past that won't pick up actions + handle_time = datetime(year=2010, month=1, day=1) + action = fields.NEXTACTIVITY_CREATENOCLAIM + self._assert_find_instances_to_action_update(prescription, handle_time, action, None) + + # first, try a date that will pick up all next actions + handle_time = datetime(year=2050, month=1, day=1) + # same as above json but with nextActivityNAD_bin and instance 1 nextActivity set to purge + prescription = load_test_example_json(self.mock_log_object, "7D9625-Z72BF2-11E3B.json") + action = fields.NEXTACTIVITY_PURGE + self._assert_find_instances_to_action_update(prescription, handle_time, action, ["1"]) + + def test_find_instances_to_action_update_missing_instances(self): + """ + SPII-10492 - Test that we can find instances that need updating in a migrated + prescription with missing instances. + """ + # this 12-issue prescription has issues 1 and 2 missing because of migration + prescription = load_test_example_json(self.mock_log_object, "50EE48-B83002-490F7.json") + + # first, try a date that will pick up all next actions + handle_time = datetime(year=2050, month=1, day=1) + + action = fields.NEXTACTIVITY_DELETE + self._assert_find_instances_to_action_update(prescription, handle_time, action, ["3"]) + + action = fields.NEXTACTIVITY_EXPIRE + self._assert_find_instances_to_action_update( + prescription, handle_time, action, ["5", "6", "7", "8", "9", "10", "11", "12"] + ) + + def test_reset_current_instance(self): + """ + Test that resetting the current instance chooses the correct instance. + """ + + prescription = load_test_example_json(self.mock_log_object, "50EE48-B83002-490F7.json") + self.assertEqual(prescription.current_issue_number, 4) + (old, new) = prescription.reset_current_instance() + self.assertEqual((old, new), (4, 4)) + self.assertEqual(prescription.current_issue_number, 4) + + prescription = load_test_example_json(self.mock_log_object, "DD0180-ZBED5C-11E3A.json") + self.assertEqual(prescription.current_issue_number, 1) + (old, new) = prescription.reset_current_instance() + self.assertEqual((old, new), (1, 1)) + self.assertEqual(prescription.current_issue_number, 1) + + prescription = load_test_example_json(self.mock_log_object, "7D9625-Z72BF2-11E3A.json") + self.assertEqual(prescription.current_issue_number, 3) + (old, new) = prescription.reset_current_instance() + self.assertEqual((old, new), (3, 3)) + self.assertEqual(prescription.current_issue_number, 3) + + def test_handle_overdue_expiry_none(self): + """ + SPII-31379 due to old prescrptions the NAD index is set to None + """ + nad = [None] + self.assertFalse(PrescriptionRecord._is_expiry_overdue(nad)) + + def test_handle_overdue_expiry_empty(self): + """ + SPII-31379 due to old prescrptions the NAD index is empty + """ + nad = [] + self.assertFalse(PrescriptionRecord._is_expiry_overdue(nad)) + + def test_handle_overdue_expiry_not_expired(self): + """ + Expiry is set to tomorrow + """ + nad = [ + "expire:{}".format( + (datetime.now() + timedelta(days=1)).strftime(TimeFormats.STANDARD_DATE_FORMAT) + ) + ] + self.assertFalse(PrescriptionRecord._is_expiry_overdue(nad)) + + def test_handle_overdue_expiry_expired(self): + """ + Expiry is set to yesterday + """ + nad = [ + "expire:{}".format( + (datetime.now() - timedelta(days=1)).strftime(TimeFormats.STANDARD_DATE_FORMAT) + ) + ] + self.assertTrue(PrescriptionRecord._is_expiry_overdue(nad)) + + def test_get_line_item_cancellations(self): + """ + Test that we can get the line item cancellations for a prescription + """ + prescription = load_test_example_json(self.mock_log_object, "23C1BC-Z75FB1-11EE84.json") + current_issue = prescription.current_issue + + cancelled_line_item_id = "02ED7776-21CD-4E7B-AC9D-D1DBFEE7B8CF" + cancellations = current_issue.get_line_item_cancellations(cancelled_line_item_id) + self.assertEqual(len(cancellations), 1) + + not_cancelled_line_item_id = "45D5FB11-D793-4D51-9ADD-95E0F54D2786" + cancellations = current_issue.get_line_item_cancellations(not_cancelled_line_item_id) + self.assertEqual(len(cancellations), 0) + + def test_get_line_item_first_cancellation_time(self): + prescription = load_test_example_json(self.mock_log_object, "23C1BC-Z75FB1-11EE84.json") + current_issue = prescription.current_issue + + cancelled_line_item_id = "02ED7776-21CD-4E7B-AC9D-D1DBFEE7B8CF" + first_cancellation_time = current_issue.get_line_item_first_cancellation_time( + cancelled_line_item_id + ) + self.assertEqual(first_cancellation_time, "20240415101553") + + not_cancelled_line_item_id = "45D5FB11-D793-4D51-9ADD-95E0F54D2786" + first_cancellation_time = current_issue.get_line_item_first_cancellation_time( + not_cancelled_line_item_id + ) + self.assertEqual(first_cancellation_time, None) + + def test_set_initial_prescription_status_active_prescription(self): + """ + Test that a prescription with a start date of today or earlier is marked as TO_BE_DISPENSED. + """ + prescription = load_test_example_json(self.mock_log_object, "7D9625-Z72BF2-11E3A.json") + + current_time = datetime.now() + prescription.set_initial_prescription_status(current_time) + + self.assertEqual(prescription.get_issue(1).status, "0001") + + def test_set_initial_prescription_status_future_dated(self): + """ + Test that a prescription with a future start date is marked as FUTURE_DATED_PRESCRIPTION. + """ + + prescription = load_test_example_json(self.mock_log_object, "0DA698-A83008-F50593.json") + + future_time = datetime.now() + timedelta(days=10) + prescription.set_initial_prescription_status(future_time) + + self.assertEqual(prescription.get_issue(1).status, "9001") + + +class PrescriptionRecordChangeLogTest(TestCase): + """ + For testing aspects of the change log in the prescription record. + """ + + def setUp(self): + self.log_object = MockLogObject() + self.mock_record = PrescriptionRecord(self.log_object, "test") + + def test_error_log_change_log_too_big(self): + """ + When a change log cannot be pruned small enough an error is raised. + """ + self.mock_record.prescription_record = { + "prescription": {fields.FIELD_PRESCRIPTION_ID: "testID"}, + "SCN": 10, + "changeLog": { + "438eb94f-9da7-46ca-ba2a-72c4f83b2a06": {"SCN": 10}, + "438eb94f-9da7-46ca-ba2a-72c4f83b2a46": {"SCN": 10}, + }, + } + self.mock_record.SCN_MAX = 1 + self.assertRaises( + EpsSystemError, + self.mock_record.add_event_to_change_log, + "ce6c4a39-e239-44c5-81e2-adf3612a7391", + {}, + ) + self.assertTrue(self.log_object.was_logged("EPS0336")) + self.assertTrue(self.log_object.was_value_logged("EPS0336", "prescriptionID", "testID")) diff --git a/tests/common/prescription/resources/0DA698-A83008-F50593.json b/tests/common/prescription/resources/0DA698-A83008-F50593.json new file mode 100644 index 0000000..498ab86 --- /dev/null +++ b/tests/common/prescription/resources/0DA698-A83008-F50593.json @@ -0,0 +1,736 @@ +{ + + "documents": [ + "20140408144130355815_6BB2F0_2", + "20140408144130582188_F41F3F_2", + "20140408144130854644_0CC025_2", + "20140408144131797294_419E3E_2", + "20140408144132021702_28EA27_2", + "20140408144132365276_126328_2", + "20140408144132603745_6BF1FB_2" + ], + "patient": { + "lowerAgeLimit": "19960419", + "birthTime": "19800420", + "nhsNumber": "9990406707", + "higherAgeLimit": "20400420" + }, + "changeLog": { + "7E952DF6-BF23-11E3-A209-080027D20F8F": { + "SCN": 8, + "InternalID": "20140408144132021702_28EA27_2", + "Response Parameters": { + "responseText": "\"Dispense notification successful\"", + "timeStampAck": "\"20140408T144132.021\"", + "refToMessageID": "\"7E952DF6-BF23-11E3-A209-080027D20F8F\"", + "toASID": "\"230811201324\"", + "fromASID": "\"990101234567\"", + "serviceASID": "\"618729461037\"", + "timeStampSent": "\"20140408144132\"", + "versionCode": "\"V3NPfIT4.2.00\"", + "responseAction": "\"MCCI_IN010000UK13\"", + "messageID": "\"94AD2116-C8AD-4D89-B642-762681816266\"", + "toPartyKey": "\"YEA-0000806\"", + "fromPartyKey": "\"TESTGEN-201324\"", + "prescriptionID": "\"0DA698-A83008-F50593\"", + "refToEventID": "\"7D962AEA-BF23-11E3-A209-080027D20F8F\"" + }, + "Timestamp": "20140408144132", + "agentSystemSDS": "230811201324", + "fromStatus": "0002", + "toStatus": "0006", + "Source XSLT": "requestSuccessResponse_MCAI_MT040101UK03.xsl", + "agentPerson": null, + "interactionID": "PORX_IN080101UK31", + "timePreparedForUpdate": "20140408144132" + }, + "7DE2730A-BF23-11E3-A209-080027D20F8F": { + "SCN": 4, + "InternalID": "20140408144130854644_0CC025_2", + "Response Parameters": { + "responseText": "\"Dispense notification successful\"", + "timeStampAck": "\"20140408T144130.854\"", + "refToMessageID": "\"7DE2730A-BF23-11E3-A209-080027D20F8F\"", + "toASID": "\"230811201324\"", + "fromASID": "\"990101234567\"", + "serviceASID": "\"618729461037\"", + "timeStampSent": "\"20140408144130\"", + "versionCode": "\"V3NPfIT4.2.00\"", + "responseAction": "\"MCCI_IN010000UK13\"", + "messageID": "\"8BA82B48-70FE-4679-ADA5-BEB7DA94D3FC\"", + "toPartyKey": "\"YEA-0000806\"", + "fromPartyKey": "\"TESTGEN-201324\"", + "prescriptionID": "\"0DA698-A83008-F50593\"", + "refToEventID": "\"7D96291E-BF23-11E3-A209-080027D20F8F\"" + }, + "Timestamp": "20140408144130", + "agentSystemSDS": "230811201324", + "fromStatus": "0002", + "toStatus": "0006", + "Source XSLT": "requestSuccessResponse_MCAI_MT040101UK03.xsl", + "agentPerson": null, + "interactionID": "PORX_IN080101UK31", + "timePreparedForUpdate": "20140408144130" + }, + "BE63F8AC-7A37-404A-B470-74BEAE2C52EC": { + "SCN": 6, + "InternalID": "20140408144131368328_FED07C_BATCH", + "Response Parameters": { + "responseText": "\"Updated by Routine Admin Batch worker\"", + "timeStampAck": "\"20140408T144131.368\"", + "refToMessageID": "\"BE63F8AC-7A37-404A-B470-74BEAE2C52EC\"", + "toASID": "\"SpineInternalSource\"", + "fromASID": "\"338068513039\"", + "serviceASID": "\"618729461037\"", + "timeStampSent": "\"20140408144131\"", + "responseAction": "\"MCCI_IN010000UK13\"", + "messageID": "\"EFAD8454-E60D-40E4-AC42-2E4846E5D4D2\"", + "toPartyKey": "\"None\"", + "fromPartyKey": "\"None\"", + "prescriptionID": "\"0DA698-A83008-F50593\"", + "refToEventID": "\"BE63F8AC-7A37-404A-B470-74BEAE2C52EC\"" + }, + "Timestamp": "20140408144131", + "agentSystemSDS": "SpineInternalSource", + "fromStatus": "9000", + "toStatus": "0000", + "Source XSLT": "requestSuccessResponse_MCAI_MT040101UK03.xsl", + "agentPerson": null, + "interactionID": "PORX_IN999002UK01", + "timePreparedForUpdate": "20140408144131" + }, + "7DBA3520-BF23-11E3-A209-080027D20F8F": { + "SCN": 3, + "InternalID": "20140408144130582188_F41F3F_2", + "Response Parameters": { + "responseText": "\"Release Request successful\"", + "lowerAgeLimit": "\"19960419\"", + "prescriptionMaxRepeats": "\"3\"", + "lineItem1MaxRepeats": "\"3\"", + "responseAction": "\"PORX_IN070103UK31\"", + "toPartyKey": "\"YEA-0000806\"", + "refToEventID": "\"7DBA3840-BF23-11E3-A209-080027D20F8F\"", + "timeStampAck": "\"20140408T144130.582\"", + "refToMessageID": "\"7DBA3520-BF23-11E3-A209-080027D20F8F\"", + "lineItem2StatusDisplayName": "\"To Be Dispensed\"", + "lineItem2MaxRepeats": "\"3\"", + "prescriptionCurrentInstance": "\"1\"", + "versionCode": "\"V3NPfIT4.2.00\"", + "lineItem1Status": "\"0007\"", + "prescriptionStatus": "\"0001\"", + "fromPartyKey": "\"TESTGEN-201324\"", + "lineItem1StatusDisplayName": "\"To Be Dispensed\"", + "fromASID": "\"990101234567\"", + "toASID": "\"230811201324\"", + "higherAgeLimit": "\"20400420\"", + "serviceASID": "\"618729461037\"", + "lineItem2Status": "\"0007\"", + "messageID": "\"3135A4B1-2BFE-43A1-8C11-D4374E7BA902\"", + "prescriptionID": "\"0DA698-A83008-F50593\"", + "lineItem1CurrentInstance": "\"1\"", + "lineItem2CurrentInstance": "\"1\"", + "prescriptionStatusDisplayName": "\"To Be Dispensed\"", + "timeStampSent": "\"20140408144130\"" + }, + "Timestamp": "20140408144130", + "agentSystemSDS": "230811201324", + "fromStatus": "0001", + "toStatus": "0002", + "Source XSLT": "createResponseFramework_PORX_RM122002UK31.xsl", + "agentPerson": null, + "interactionID": "PORX_IN132004UK30", + "timePreparedForUpdate": "20140408144130" + }, + "7E73C9FE-BF23-11E3-A209-080027D20F8F": { + "SCN": 7, + "InternalID": "20140408144131797294_419E3E_2", + "Response Parameters": { + "responseText": "\"Release Request successful\"", + "lowerAgeLimit": "\"19960419\"", + "prescriptionMaxRepeats": "\"3\"", + "lineItem1MaxRepeats": "\"3\"", + "responseAction": "\"PORX_IN070103UK31\"", + "toPartyKey": "\"YEA-0000806\"", + "refToEventID": "\"7E73CD00-BF23-11E3-A209-080027D20F8F\"", + "timeStampAck": "\"20140408T144131.797\"", + "refToMessageID": "\"7E73C9FE-BF23-11E3-A209-080027D20F8F\"", + "lineItem2StatusDisplayName": "\"To Be Dispensed\"", + "lineItem2MaxRepeats": "\"3\"", + "prescriptionCurrentInstance": "\"2\"", + "versionCode": "\"V3NPfIT4.2.00\"", + "lineItem1Status": "\"0007\"", + "prescriptionStatus": "\"0001\"", + "fromPartyKey": "\"TESTGEN-201324\"", + "lineItem1StatusDisplayName": "\"To Be Dispensed\"", + "fromASID": "\"990101234567\"", + "toASID": "\"230811201324\"", + "higherAgeLimit": "\"20400420\"", + "serviceASID": "\"618729461037\"", + "lineItem2Status": "\"0007\"", + "messageID": "\"DDDC4D00-6211-42ED-9DCE-06A841881916\"", + "prescriptionID": "\"0DA698-A83008-F50593\"", + "lineItem1CurrentInstance": "\"2\"", + "lineItem2CurrentInstance": "\"2\"", + "priorPreviousIssueDate": "\"20140408144130\"", + "prescriptionStatusDisplayName": "\"To Be Dispensed\"", + "timeStampSent": "\"20140408144131\"" + }, + "Timestamp": "20140408144131", + "agentSystemSDS": "230811201324", + "fromStatus": "0000", + "toStatus": "0002", + "Source XSLT": "createResponseFramework_PORX_RM122002UK31.xsl", + "agentPerson": null, + "interactionID": "PORX_IN132004UK30", + "timePreparedForUpdate": "20140408144131" + }, + "7ECA9216-BF23-11E3-A209-080027D20F8F": { + "SCN": 9, + "InternalID": "20140408144132365276_126328_2", + "Response Parameters": { + "responseText": "\"Release Request successful\"", + "lowerAgeLimit": "\"19960419\"", + "prescriptionMaxRepeats": "\"3\"", + "lineItem1MaxRepeats": "\"3\"", + "responseAction": "\"PORX_IN070103UK31\"", + "toPartyKey": "\"YEA-0000806\"", + "refToEventID": "\"7ECA9450-BF23-11E3-A209-080027D20F8F\"", + "timeStampAck": "\"20140408T144132.365\"", + "refToMessageID": "\"7ECA9216-BF23-11E3-A209-080027D20F8F\"", + "lineItem2StatusDisplayName": "\"To Be Dispensed\"", + "lineItem2MaxRepeats": "\"3\"", + "prescriptionCurrentInstance": "\"3\"", + "versionCode": "\"V3NPfIT4.2.00\"", + "lineItem1Status": "\"0007\"", + "prescriptionStatus": "\"0001\"", + "fromPartyKey": "\"TESTGEN-201324\"", + "lineItem1StatusDisplayName": "\"To Be Dispensed\"", + "fromASID": "\"990101234567\"", + "toASID": "\"230811201324\"", + "higherAgeLimit": "\"20400420\"", + "serviceASID": "\"618729461037\"", + "lineItem2Status": "\"0007\"", + "messageID": "\"BA9E58F7-09F0-4D99-8859-2CCE768349C4\"", + "prescriptionID": "\"0DA698-A83008-F50593\"", + "lineItem1CurrentInstance": "\"3\"", + "lineItem2CurrentInstance": "\"3\"", + "priorPreviousIssueDate": "\"20140408144130\"", + "prescriptionStatusDisplayName": "\"To Be Dispensed\"", + "timeStampSent": "\"20140408144132\"" + }, + "Timestamp": "20140408144132", + "agentSystemSDS": "230811201324", + "fromStatus": "0000", + "toStatus": "0002", + "Source XSLT": "createResponseFramework_PORX_RM122002UK31.xsl", + "agentPerson": null, + "interactionID": "PORX_IN132004UK30", + "timePreparedForUpdate": "20140408144132" + }, + "7D96F876-BF23-11E3-A209-080027D20F8F": { + "SCN": 2, + "InternalID": "20140408144130355815_6BB2F0_2", + "Response Parameters": { + "responseText": "\"Prescription upload successful\"", + "timeStampAck": "\"20140408T144130.355\"", + "refToMessageID": "\"7D96F876-BF23-11E3-A209-080027D20F8F\"", + "toASID": "\"230811201324\"", + "fromASID": "\"990101234567\"", + "serviceASID": "\"618729461037\"", + "timeStampSent": "\"20140408144130\"", + "versionCode": "\"V3NPfIT4.2.00\"", + "responseAction": "\"MCCI_IN010000UK13\"", + "messageID": "\"F82CF38D-0E5B-4ECA-836B-EA3079C2162A\"", + "toPartyKey": "\"YEA-0000806\"", + "fromPartyKey": "\"TESTGEN-201324\"", + "prescriptionID": "\"0DA698-A83008-F50593\"", + "refToEventID": "\"7D96FB00-BF23-11E3-A209-080027D20F8F\"" + }, + "Timestamp": "20140408144130", + "agentSystemSDS": "230811201324", + "fromStatus": false, + "toStatus": "0001", + "Source XSLT": "requestSuccessResponse_MCAI_MT040101UK03.xsl", + "agentPerson": null, + "interactionID": "PORX_IN020101UK31", + "timePreparedForUpdate": "20140408144130" + }, + "7E0C360E-BF23-11E3-A209-080027D20F8F": { + "SCN": 5, + "InternalID": "20140408144131121178_3263F7_2", + "Response Parameters": { + "responseText": "\"Administrative update successful\"", + "timeStampAck": "\"20140408T144131.121\"", + "refToMessageID": "\"7E0C360E-BF23-11E3-A209-080027D20F8F\"", + "toASID": "\"230811201324\"", + "fromASID": "\"990101234567\"", + "serviceASID": "\"618729461037\"", + "timeStampSent": "\"20140408144131\"", + "versionCode": "\"V3NPfIT4.2.00\"", + "responseAction": "\"MCCI_IN010000UK13\"", + "messageID": "\"EBA6ED27-F7DD-4940-BAB2-C39EA0737138\"", + "toPartyKey": "\"YEA-0000806\"", + "fromPartyKey": "\"TESTGEN-201324\"", + "prescriptionID": "\"0DA698-A83008-F50593\"", + "refToEventID": "\"7E0C360E-BF23-11E3-A209-080027D20F8F\"" + }, + "Timestamp": "20140408144131", + "agentSystemSDS": "230811201324", + "fromStatus": "9000", + "toStatus": "0000", + "Source XSLT": "requestSuccessResponse_MCAI_MT040101UK03.xsl", + "agentPerson": null, + "interactionID": "PORX_IN999000UK01", + "timePreparedForUpdate": "20140408144131" + }, + "7EEDA49A-BF23-11E3-A209-080027D20F8F": { + "SCN": 10, + "InternalID": "20140408144132603745_6BF1FB_2", + "Response Parameters": { + "responseText": "\"Dispense notification successful\"", + "timeStampAck": "\"20140408T144132.603\"", + "refToMessageID": "\"7EEDA49A-BF23-11E3-A209-080027D20F8F\"", + "toASID": "\"230811201324\"", + "fromASID": "\"990101234567\"", + "serviceASID": "\"618729461037\"", + "timeStampSent": "\"20140408144132\"", + "versionCode": "\"V3NPfIT4.2.00\"", + "responseAction": "\"MCCI_IN010000UK13\"", + "messageID": "\"EF7C7D0B-9686-4B25-B143-F780F4E5B6CB\"", + "toPartyKey": "\"YEA-0000806\"", + "fromPartyKey": "\"TESTGEN-201324\"", + "prescriptionID": "\"0DA698-A83008-F50593\"", + "refToEventID": "\"7D963530-BF23-11E3-A209-080027D20F8F\"" + }, + "Timestamp": "20140408144132", + "agentSystemSDS": "230811201324", + "fromStatus": "0002", + "toStatus": "0006", + "Source XSLT": "requestSuccessResponse_MCAI_MT040101UK03.xsl", + "agentPerson": null, + "interactionID": "PORX_IN080101UK31", + "timePreparedForUpdate": "20140408144132" + } + }, + "indexes": { + "prescribingSiteStatus_bin": [ + "Z99901_0006", + "Z99901_0009" + ], + "nextActivityNAD_bin": [ + "purge_20160417" + ], + "prescDispDate_bin": [ + "Z99901|F001M|20140408144130|R2|0006", + "Z99901|F001M|20140408144130|R2|0009" + ], + "nhsNumberPrescDispDate_bin": [ + "9990406707|Z99901|F001M|20140408144130|R2|0006", + "9990406707|Z99901|F001M|20140408144130|R2|0009" + ], + "dispensingSiteStatus_bin": [ + "F001M_0009", + "F001M_0006" + ], + "nomPharmStatus_bin": [ + "F001M_0006", + "F001M_0009" + ], + "prescriberDate_bin": [ + "Z99901|20140408144130|R2|0006", + "Z99901|20140408144130|R2|0009" + ], + "nhsNumber_bin": [ + "9990406707" + ], + "delta_bin": [ + "20140408144132|9" + ], + "nhsNumberDate_bin": [ + "9990406707|20140408144130|R2|0006", + "9990406707|20140408144130|R2|0009" + ], + "nhsNumberPrescriberDate_bin": [ + "9990406707|Z99901|20140408144130|R2|0006", + "9990406707|Z99901|20140408144130|R2|0009" + ], + "dispenserDate_bin": [ + "F001M|20140408144130|R2|0006", + "F001M|20140408144130|R2|0009" + ], + "nhsNumberDispenserDate_bin": [ + "9990406707|F001M|20140408144130|R2|0006", + "9990406707|F001M|20140408144130|R2|0009" + ] + }, + "instances": { + "1": { + "completionDate": "20140408", + "claim": { + "historicClaimGUIDs": false, + "claimStatus": false, + "historicDispenseClaimMsgRef": false, + "claimGUID": false, + "batchID": false, + "batchNumber": false, + "dispenseClaimMsgRef": false, + "claimRebuild": false, + "claimReceivedDate": "20140408" + }, + "nominatedDownloadDate": false, + "previousStatus": "0006", + "lastDispenseStatus": "0006", + "dispense": { + "lastDispenseNotificationMsgRef": "20140408144130854644_0CC025_2", + "dispensingOrganization": "F001M", + "lastDispenseDate": "20130930", + "lastDispenseNotificationGuid": "7D96291E-BF23-11E3-A209-080027D20F8F" + }, + "nextActivity": { + "date": "20160417", + "activity": "purge" + }, + "cancellations": [ ], + "previousIssueDate": false, + "dispenseHistory": { + "release": { + "completionDate": false, + "dispense": { + "lastDispenseNotificationMsgRef": false, + "lastDispenseNotificationGuid": false, + "lastDispenseDate": "20140408", + "dispensingOrganization": "F001M" + }, + "lineItems": [ + { + "status": "0008", + "maxRepeats": "3", + "previousStatus": false, + "ID": "02ED7776-21CD-4E7B-AC9D-D1DBFEE7B8CF", + "order": 1 + }, + { + "status": "0008", + "maxRepeats": "3", + "previousStatus": false, + "ID": "45D5FB11-D793-4D51-9ADD-95E0F54D2786", + "order": 2 + } + ], + "prescriptionStatus": "0001", + "lastDispenseStatus": false + }, + "7D96291E-BF23-11E3-A209-080027D20F8F": { + "completionDate": false, + "dispense": { + "lastDispenseNotificationMsgRef": false, + "lastDispenseNotificationGuid": false, + "lastDispenseDate": "20140408", + "dispensingOrganization": "F001M" + }, + "lineItems": [ + { + "status": "0008", + "maxRepeats": "3", + "previousStatus": "0007", + "ID": "02ED7776-21CD-4E7B-AC9D-D1DBFEE7B8CF", + "order": 1 + }, + { + "status": "0008", + "maxRepeats": "3", + "previousStatus": "0007", + "ID": "45D5FB11-D793-4D51-9ADD-95E0F54D2786", + "order": 2 + } + ], + "prescriptionStatus": "0002", + "lastDispenseStatus": false + } + }, + "instanceNumber": "1", + "dispenseHistoryprescriptionStatus": false, + "releaseDate": "20140408", + "dispenseWindowHighDate": "20140418", + "dispenseWindowLowDate": "20140408", + "lineItems": [ + { + "status": "0001", + "maxRepeats": "3", + "previousStatus": "0008", + "ID": "02ED7776-21CD-4E7B-AC9D-D1DBFEE7B8CF", + "order": 1 + }, + { + "status": "0001", + "maxRepeats": "3", + "previousStatus": "0008", + "ID": "45D5FB11-D793-4D51-9ADD-95E0F54D2786", + "order": 2 + } + ], + "releaseRequestMsgRef": "20140408144130582188_F41F3F_2", + "expiryDate": null, + "prescriptionStatus": "0009" + }, + "2": { + "completionDate": "20140408", + "claim": { + "historicClaimGUIDs": false, + "claimStatus": false, + "historicDispenseClaimMsgRef": false, + "claimGUID": false, + "batchID": false, + "batchNumber": false, + "dispenseClaimMsgRef": false, + "claimRebuild": false, + "claimReceivedDate": false + }, + "nominatedDownloadDate": "20140501", + "previousStatus": "0002", + "lastDispenseStatus": "0006", + "dispense": { + "lastDispenseNotificationMsgRef": "20140408144132021702_28EA27_2", + "dispensingOrganization": "F001M", + "lastDispenseDate": "20140408", + "lastDispenseNotificationGuid": "7D962AEA-BF23-11E3-A209-080027D20F8F" + }, + "nextActivity": { + "date": "20141005", + "activity": "createNoClaim" + }, + "cancellations": [ ], + "previousIssueDate": "20140408144130", + "dispenseHistory": { + "release": { + "completionDate": false, + "dispense": { + "lastDispenseNotificationMsgRef": false, + "lastDispenseNotificationGuid": false, + "lastDispenseDate": "20140408", + "dispensingOrganization": "F001M" + }, + "lineItems": [ + { + "status": "0008", + "maxRepeats": "3", + "previousStatus": false, + "ID": "02ED7776-21CD-4E7B-AC9D-D1DBFEE7B8CF", + "order": 1 + }, + { + "status": "0008", + "maxRepeats": "3", + "previousStatus": false, + "ID": "45D5FB11-D793-4D51-9ADD-95E0F54D2786", + "order": 2 + } + ], + "prescriptionStatus": "0000", + "lastDispenseStatus": false + }, + "7D962AEA-BF23-11E3-A209-080027D20F8F": { + "completionDate": false, + "dispense": { + "lastDispenseNotificationMsgRef": false, + "lastDispenseNotificationGuid": false, + "lastDispenseDate": "20140408", + "dispensingOrganization": "F001M" + }, + "lineItems": [ + { + "status": "0008", + "maxRepeats": "3", + "previousStatus": "0007", + "ID": "02ED7776-21CD-4E7B-AC9D-D1DBFEE7B8CF", + "order": 1 + }, + { + "status": "0008", + "maxRepeats": "3", + "previousStatus": "0007", + "ID": "45D5FB11-D793-4D51-9ADD-95E0F54D2786", + "order": 2 + } + ], + "prescriptionStatus": "0002", + "lastDispenseStatus": false + } + }, + "instanceNumber": "2", + "dispenseHistoryprescriptionStatus": false, + "releaseDate": "20140408", + "dispenseWindowHighDate": "20140418", + "dispenseWindowLowDate": "20140408", + "lineItems": [ + { + "status": "0001", + "maxRepeats": "3", + "previousStatus": "0008", + "ID": "02ED7776-21CD-4E7B-AC9D-D1DBFEE7B8CF", + "order": 1 + }, + { + "status": "0001", + "maxRepeats": "3", + "previousStatus": "0008", + "ID": "45D5FB11-D793-4D51-9ADD-95E0F54D2786", + "order": 2 + } + ], + "releaseRequestMsgRef": "20140408144131797294_419E3E_2", + "expiryDate": null, + "prescriptionStatus": "0006" + }, + "3": { + "completionDate": "20140408", + "claim": { + "historicClaimGUIDs": false, + "claimStatus": false, + "historicDispenseClaimMsgRef": false, + "claimGUID": false, + "batchID": false, + "batchNumber": false, + "dispenseClaimMsgRef": false, + "claimRebuild": false, + "claimReceivedDate": false + }, + "nominatedDownloadDate": "20140501", + "previousStatus": "0002", + "lastDispenseStatus": "0006", + "dispense": { + "lastDispenseNotificationMsgRef": "20140408144132603745_6BF1FB_2", + "dispensingOrganization": "F001M", + "lastDispenseDate": "20140408", + "lastDispenseNotificationGuid": "7D963530-BF23-11E3-A209-080027D20F8F" + }, + "nextActivity": { + "date": "20141005", + "activity": "createNoClaim" + }, + "cancellations": [ ], + "previousIssueDate": "20140408144130", + "dispenseHistory": { + "release": { + "completionDate": false, + "dispense": { + "lastDispenseNotificationMsgRef": false, + "lastDispenseNotificationGuid": false, + "lastDispenseDate": "20140408", + "dispensingOrganization": "F001M" + }, + "lineItems": [ + { + "status": "0008", + "maxRepeats": "3", + "previousStatus": false, + "ID": "02ED7776-21CD-4E7B-AC9D-D1DBFEE7B8CF", + "order": 1 + }, + { + "status": "0008", + "maxRepeats": "3", + "previousStatus": false, + "ID": "45D5FB11-D793-4D51-9ADD-95E0F54D2786", + "order": 2 + } + ], + "prescriptionStatus": "0000", + "lastDispenseStatus": false + }, + "7D963530-BF23-11E3-A209-080027D20F8F": { + "completionDate": false, + "dispense": { + "lastDispenseNotificationMsgRef": false, + "lastDispenseNotificationGuid": false, + "lastDispenseDate": "20140408", + "dispensingOrganization": "F001M" + }, + "lineItems": [ + { + "status": "0008", + "maxRepeats": "3", + "previousStatus": "0007", + "ID": "02ED7776-21CD-4E7B-AC9D-D1DBFEE7B8CF", + "order": 1 + }, + { + "status": "0008", + "maxRepeats": "3", + "previousStatus": "0007", + "ID": "45D5FB11-D793-4D51-9ADD-95E0F54D2786", + "order": 2 + } + ], + "prescriptionStatus": "0002", + "lastDispenseStatus": false + } + }, + "instanceNumber": "3", + "dispenseHistoryprescriptionStatus": false, + "releaseDate": "20140408", + "dispenseWindowHighDate": "20140418", + "dispenseWindowLowDate": "20140408", + "lineItems": [ + { + "status": "0001", + "maxRepeats": "3", + "previousStatus": "0008", + "ID": "02ED7776-21CD-4E7B-AC9D-D1DBFEE7B8CF", + "order": 1 + }, + { + "status": "0001", + "maxRepeats": "3", + "previousStatus": "0008", + "ID": "45D5FB11-D793-4D51-9ADD-95E0F54D2786", + "order": 2 + } + ], + "releaseRequestMsgRef": "20140408144132365276_126328_2", + "expiryDate": null, + "prescriptionStatus": "0006" + } + }, + "SCN": 10, + "prescription": { + "prescriptionType": "0001", + "SCN": false, + "prescriptionTreatmentType": "0003", + "signedTime": "20140408144130", + "prescriptionMsgRef": "20140408144130355815_6BB2F0_2", + "prescribingOrganization": "Z99901", + "prescriptionTime": "21140408144130", + "pendingCancellations": false, + "unsuccessfulCancellations": false, + "hl7": { + "eventID": "7D96FB00-BF23-11E3-A209-080027D20F8F", + "agentPersonSDSRole": null, + "wsaMessageID": "", + "agentSystemSDS1CodeSystem": "1.2.826.0.1285.0.2.0.107", + "fromASID": "230811201324", + "toASID": "990101234567", + "agentSystemSDS2": null, + "agentSystemSDS1": "230811201324", + "agentPersonSDSPerson": null, + "messageID": "7D96F876-BF23-11E3-A209-080027D20F8F", + "interactionID": "PORX_IN020101UK31", + "agentPersonSDSTargetRole": null, + "agentPersonSDSCodeSystem": null, + "messageVersion": "V3NPfIT4.2.00", + "agentPersonOrgCode": null + }, + "prescriptionPresent": true, + "prescriptionID": "0DA698-A83008-F50593", + "maxRepeats": 3, + "currentInstance": "3", + "daysSupply": 28 + }, + "nomination": { + "nominatedPerformerType": "P1", + "nominatedPerformer": "F001M", + "nominated": true, + "nominationHistory": [ ] + } + +} diff --git a/tests/common/prescription/resources/23C1BC-Z75FB1-11EE84.json b/tests/common/prescription/resources/23C1BC-Z75FB1-11EE84.json new file mode 100644 index 0000000..24e5641 --- /dev/null +++ b/tests/common/prescription/resources/23C1BC-Z75FB1-11EE84.json @@ -0,0 +1,311 @@ +{ + "documents": [ + "20240415101552901994_B4D632_208005743390136970170184304764716375459", + "20240415101553071705_C24486_208005743390136970170184304764716375459" + ], + "patient": { + "nhsNumber": "9912003489", + "birthTime": "19800420", + "lowerAgeLimit": false, + "higherAgeLimit": false, + "prefix": "MR", + "suffix": "", + "given": "DONOTUSE", + "family": "XXTESTPATIENTTRCEONE" + }, + "changeLog": { + "23C1BC7A-FB11-11EE-8CEB-000C297C24E7": { + "Timestamp": "20240415101552", + "SCN": 2, + "InternalID": "20240415101552901994_B4D632_208005743390136970170184304764716375459", + "Source XSLT": "requestSuccessResponse_MCAI_MT040101UK03.xsl", + "Response Parameters": { + "versionCode": "\"V3NPfIT4.2.00\"", + "responseAction": "\"MCCI_IN010000UK13\"", + "messageID": "\"5B808FF1-B331-48AD-B7EF-853F2C424A3E\"", + "refToEventID": "\"23C1BC7A-FB11-11EE-8CEB-000C297C24E7\"", + "refToMessageID": "\"23C1BC7A-FB11-11EE-8CEB-000C297C24E7\"", + "timeStampAck": "\"20240415T101552.901\"", + "timeStampSent": "\"20240415101552\"", + "fromASID": "\"990101234567\"", + "toASID": "\"992101234567\"", + "fromPartyKey": "\"TESTGEN-201324\"", + "toPartyKey": "\"YEA-0000806\"", + "serviceASID": "\"618729461037\"", + "responseText": "\"Prescription upload successful\"", + "prescriptionID": "\"23C1BC-Z75FB1-11EE84\"" + }, + "interactionID": "PORX_IN020101UK31", + "agentSystemSDS": "992101234567", + "agentPerson": "G8448879", + "agentPersonOrgCode": "Z99901", + "timePreparedForUpdate": "20240415101552", + "fromStatus": false, + "toStatus": "0001", + "instance": "1", + "instanceFromStatus": false, + "instanceToStatus": "0001", + "agentRoleProfileCodeId": "103142999989", + "agentPersonRole": "NotProvided", + "preChangeStatusDict": {}, + "postChangeStatusDict": { + "issue1": { + "prescription": "0001", + "lineItems": { + "1": "0007", + "2": "0007", + "3": "0007" + } + } + }, + "issuesAlteredByChange": [ + "1" + ], + "preChangeCurrentIssue": null, + "postChangeCurrentIssue": 1 + }, + "23C1BC7F-FB11-11EE-8CEB-000C297C24E7": { + "Timestamp": "20240415101553", + "SCN": 3, + "InternalID": "20240415101553071705_C24486_208005743390136970170184304764716375459", + "Source XSLT": [ + "generateHL7MCCIDetectedIssue.xsl", + "cancellationRequest_to_cancellationResponse.xsl" + ], + "Response Parameters": { + "cancellationResponseText": "\"Prescription/item was cancelled\"", + "cancellationResponseCode": "\"0001\"", + "cancellationResponseCodeSystem": "\"2.16.840.1.113883.2.1.3.2.4.17.19\"", + "messageID": "\"899757E4-AE44-4B41-B086-A2BA1AB941CB\"", + "refToEventID": "\"23C1BC7F-FB11-11EE-8CEB-000C297C24E7\"", + "refToMessageID": "\"23C1BC7F-FB11-11EE-8CEB-000C297C24E7\"", + "timeStampAck": "\"20240415T101553.071\"", + "timeStampSent": "\"20240415101553\"", + "fromASID": "\"990101234567\"", + "toASID": "\"992101234567\"", + "serviceASID": "\"618729461037\"" + }, + "interactionID": "PORX_IN050102UK32", + "agentSystemSDS": "992101234567", + "agentPerson": null, + "agentPersonOrgCode": "Z99901", + "timePreparedForUpdate": "20240415101553", + "fromStatus": false, + "toStatus": "0001", + "instance": "1", + "instanceFromStatus": false, + "instanceToStatus": "0001", + "agentRoleProfileCodeId": "103142999989", + "agentPersonRole": "NotProvided", + "preChangeStatusDict": { + "issue1": { + "prescription": "0001", + "lineItems": { + "1": "0007", + "2": "0007", + "3": "0007" + } + } + }, + "postChangeStatusDict": { + "issue1": { + "prescription": "0001", + "lineItems": { + "1": "0005", + "2": "0007", + "3": "0007" + } + } + }, + "issuesAlteredByChange": [ + "1" + ], + "preChangeCurrentIssue": "1", + "postChangeCurrentIssue": 1 + } + }, + "indexes": { + "prescribingSiteStatus_bin": [ + "Z99901_0001" + ], + "dispensingSiteStatus_bin": [ + "FA666_0001" + ], + "nomPharmStatus_bin": [ + "FA666_0001" + ], + "nextActivityNAD_bin": [ + "expire_20241015" + ], + "nhsNumber_bin": [ + "9912003489" + ], + "nhsNumberDate_bin": [ + "9912003489|20240415101552|R2|0001" + ], + "nhsNumberPrescriberDate_bin": [ + "9912003489|Z99901|20240415101552|R2|0001" + ], + "nhsNumberPrescDispDate_bin": [ + "9912003489|Z99901|FA666|20240415101552|R2|0001" + ], + "nhsNumberDispenserDate_bin": [ + "9912003489|FA666|20240415101552|R2|0001" + ], + "prescriberDate_bin": [ + "Z99901|20240415101552|R2|0001" + ], + "prescDispDate_bin": [ + "Z99901|FA666|20240415101552|R2|0001" + ], + "dispenserDate_bin": [ + "FA666|20240415101552|R2|0001" + ], + "delta_bin": [ + "20240415101553|2" + ] + }, + "instances": { + "1": { + "nextActivity": { + "activity": "expire", + "date": "20241015" + }, + "instanceNumber": "1", + "dispenseWindowLowDate": "20240415", + "dispenseWindowHighDate": "20250415", + "priorPreviousIssueDate": false, + "completionDate": false, + "nominatedDownloadDate": false, + "releaseDate": false, + "releaseRequestMsgRef": false, + "expiryDate": "20241015", + "dispenseHistory": {}, + "prescriptionStatus": "0001", + "previousStatus": false, + "lastDispenseStatus": false, + "lineItems": [ + { + "status": "0005", + "ID": "02ED7776-21CD-4E7B-AC9D-D1DBFEE7B8CF", + "previousStatus": "0007", + "order": 1, + "maxRepeats": false + }, + { + "status": "0007", + "ID": "45D5FB11-D793-4D51-9ADD-95E0F54D2786", + "previousStatus": false, + "order": 2, + "maxRepeats": false + }, + { + "status": "0007", + "ID": "BC7A2174-72D6-4C95-8B2D-3E1B63DF90BD", + "previousStatus": false, + "order": 3, + "maxRepeats": false + } + ], + "dispense": { + "dispensingOrganization": false, + "lastDispenseNotificationGuid": false, + "lastDispenseNotificationMsgRef": false, + "lastDispenseDate": false + }, + "claim": { + "claimGUID": false, + "batchID": false, + "batchNumber": false, + "dispenseClaimMsgRef": false, + "historicDispenseClaimMsgRef": false, + "claimReceivedDate": false, + "claimStatus": false, + "claimRebuild": false, + "historicClaimGUIDs": false + }, + "cancellations": [ + { + "cancellationID": "23C1BC7F-FB11-11EE-8CEB-000C297C24E7", + "agentOrganization": "Z99901", + "cancellationTarget": "LineItem", + "cancellationTime": "20240415101553", + "cancellationMsgRef": "20240415101553071705_C24486_208005743390136970170184304764716375459", + "cancelLineItemRef": "02ED7776-21CD-4E7B-AC9D-D1DBFEE7B8CF", + "Reasons": [ + "0002: Clinical contra-indication \u00e9" + ], + "hl7": { + "interactionID": "PORX_IN050102UK32", + "messageID": "23C1BC7F-FB11-11EE-8CEB-000C297C24E7", + "eventID": "23C1BC7F-FB11-11EE-8CEB-000C297C24E7", + "messageVersion": "V3NPfIT4.2.00", + "toASID": "990101234567", + "fromASID": "992101234567", + "agentPersonSDSRole": "S0080:G0450:R5080", + "agentPersonSDSPerson": null, + "agentPersonSDSTargetRole": null, + "agentPersonSDSCodeSystem": null, + "agentPersonSDSRoleCodeSystem": null, + "agentSystemSDS1": "992101234567", + "agentSystemSDS1CodeSystem": "1.2.826.0.1285.0.2.0.107", + "agentSystemSDS2": null, + "agentPersonOrgCode": null, + "querySize": null, + "fileName": null, + "originalFileName": null, + "orgId": null, + "issueWarning": null, + "agentPersonSDSRoleProfileId": "103142999989" + } + } + ] + } + }, + "SCN": 3, + "prescription": { + "prescriptionID": "23C1BC-Z75FB1-11EE84", + "prescriptionMsgRef": "20240415101552901994_B4D632_208005743390136970170184304764716375459", + "prescriptionTreatmentType": "0001", + "prescriptionType": "0001", + "prescriptionTime": "20240415101552", + "prescribingOrganization": "Z99901", + "signedTime": "20120108210751", + "daysSupply": 28, + "maxRepeats": null, + "pendingCancellations": false, + "unsuccessfulCancellations": false, + "currentInstance": "1", + "prescriptionPresent": true, + "hl7": { + "interactionID": "PORX_IN020101UK31", + "messageID": "23C1BC7A-FB11-11EE-8CEB-000C297C24E7", + "eventID": "23C1BC7A-FB11-11EE-8CEB-000C297C24E7", + "messageVersion": "V3NPfIT4.2.00", + "toASID": "990101234567", + "fromASID": "992101234567", + "agentPersonSDSRole": "S0080:G0450:R5080", + "agentPersonSDSPerson": "G8448879", + "agentPersonSDSTargetRole": null, + "agentPersonSDSCodeSystem": null, + "agentPersonSDSRoleCodeSystem": null, + "agentSystemSDS1": "992101234567", + "agentSystemSDS1CodeSystem": "1.2.826.0.1285.0.2.0.107", + "agentSystemSDS2": null, + "agentPersonOrgCode": null, + "querySize": null, + "fileName": null, + "originalFileName": null, + "orgId": null, + "issueWarning": null, + "agentPersonSDSRoleProfileId": "103142999989" + }, + "SCN": false + }, + "nomination": { + "nominated": true, + "nominatedPerformer": "FA666", + "nominatedPerformerType": "P1", + "nominationHistory": [] + } +} diff --git a/tests/common/prescription/resources/50EE48-B83002-490F7.json b/tests/common/prescription/resources/50EE48-B83002-490F7.json new file mode 100644 index 0000000..afad314 --- /dev/null +++ b/tests/common/prescription/resources/50EE48-B83002-490F7.json @@ -0,0 +1,775 @@ +{ + "documents": [ + "201311290117241541325_A247DB_14", + "20131204131437137297_674307_1081759534", + "20140107172529139127_718E63_1081759534", + "Notification_20140107172529139127_718E63_1081759534" + ], + "patient": { + "lowerAgeLimit": "20041228", + "birthTime": "19881229", + "nhsNumber": "9446370914", + "higherAgeLimit": "20481229" + }, + "changeLog": { + "07D09C01-441D-4D55-9386-CF790F225885": { + "Response Parameters": { + "responseText": "\"Release Request Successful\"", + "lowerAgeLimit": "\"20041228\"", + "prescriptionMaxRepeats": "\"12\"", + "lineItem1MaxRepeats": "\"12\"", + "responseAction": "\"not-provided-migrated\"", + "refToMessageID": "\"07D09C01-441D-4D55-9386-CF790F225885\"", + "refToEventID": "\"07D09C01-441D-4D55-9386-CF790F225885\"", + "timeStampAck": "\"20130517084623\"", + "prescriptionCurrentInstance": "\"3\"", + "versionCode": "\"V3NPfIT4.2.00\"", + "lineItem1Status": "\"0007\"", + "prescriptionStatus": "\"0001\"", + "lineItem1StatusDisplayName": "\"To Be Dispensed\"", + "fromASID": "\"not-provided-migrated\"", + "toASID": "\"935002902515\"", + "higherAgeLimit": "\"20481229\"", + "serviceASID": "\"not-provided-migrated\"", + "messageID": "\"07D09C01-441D-4D55-9386-CF790F225885\"", + "prescriptionID": "\"50EE48-B83002-490F7Q\"", + "lineItem1CurrentInstance": "\"3\"", + "prescriptionStatusDisplayName": "\"To Be Dispensed\"", + "timeStampSent": "\"20130517084623\"", + "messageMigratedFromSpine1": "\"true\"" + }, + "agentSystemSDS": "805017560014", + "fromStatus": "0001", + "toStatus": "0002", + "Source XSLT": "not-provided-migrated", + "agentPerson": "not-provided-migratedsetAgentPersonSDSPerson", + "interactionID": "PORX_IN060102UK30", + "timePreparedForUpdate": "20130517084623" + }, + "A247DB87-FF0E-3715-B59E-2090CCF61E32": { + "Response Parameters": { + "responseText": "\"Prescription upload successful\"", + "timeStampAck": "\"20130128120100\"", + "toASID": "\"276827251543\"", + "fromASID": "\"not-provided-migrated\"", + "serviceASID": "\"not-provided-migrated\"", + "timeStampSent": "\"20130128120100\"", + "versionCode": "\"V3NPfIT4.2.00\"", + "responseAction": "\"not-provided-migrated\"", + "messageID": "\"A247DB87-FF0E-3715-B59E-2090CCF61E32\"", + "refToEventID": "\"A247DB87-FF0E-3715-B59E-2090CCF61E32\"", + "refToMessageID": "\"A247DB87-FF0E-3715-B59E-2090CCF61E32\"", + "prescriptionID": "\"50EE48-B83002-490F7Q\"", + "messageMigratedFromSpine1": "\"true\"" + }, + "agentSystemSDS": "805017560014", + "fromStatus": false, + "toStatus": "0001", + "Source XSLT": "not-provided-migrated", + "agentPerson": "not-provided-migratedsetAgentPersonSDSPerson", + "interactionID": "PORX_IN020101UK31", + "timePreparedForUpdate": "20130128142413" + }, + "B6D0F8C5-A656-4289-95A0-AECA9EB18459": { + "Response Parameters": { + "lineItem1StatusDisplayName": "\"To Be Dispensed\"", + "priorPreviousIssueDate": "\"20130424143714\"", + "lowerAgeLimit": "\"20041228\"", + "prescriptionMaxRepeats": "\"12\"", + "prescriptionStatusDisplayName": "\"To Be Dispensed\"", + "higherAgeLimit": "\"20481229\"", + "lineItem1MaxRepeats": "\"12\"", + "prescriptionCurrentInstance": "\"3\"", + "lineItem1Status": "\"0007\"", + "prescriptionStatus": "\"0001\"", + "lineItem1CurrentInstance": "\"12\"" + }, + "agentSystemSDS": "935002902515", + "fromStatus": "0001", + "toStatus": "0002", + "Source XSLT": "parentPrescription2_Release.xsl", + "agentPerson": "330051032515", + "interactionID": "PORX_IN060102UK30", + "timePreparedForUpdate": "20131202095154" + }, + "F5A95426-F05A-464F-B493-489DBDAD3564": { + "Response Parameters": { + "responseText": "\"Dispense notification successful\"", + "timeStampAck": "\"20131204T131437.137\"", + "refToMessageID": "\"F5A95426-F05A-464F-B493-489DBDAD3564\"", + "toASID": "\"935002902515\"", + "fromASID": "\"428081423512\"", + "serviceASID": "\"618729461037\"", + "timeStampSent": "\"20131204131437\"", + "versionCode": "\"V4NPfIT4.2.00\"", + "responseAction": "\"MCCI_IN010000UK13\"", + "messageID": "\"9DD01822-C76E-4DC4-86B6-F5890DABA4A3\"", + "toPartyKey": "\"YEA-801248\"", + "fromPartyKey": "\"YGM94-810145\"", + "prescriptionID": "\"50EE48-B83002-490F7Q\"", + "refToEventID": "\"B92512E4-7E22-4D70-B27B-8FE88E6BE5E9\"" + }, + "agentSystemSDS": "935002902515", + "fromStatus": "0002", + "toStatus": "0006", + "Source XSLT": "requestSuccessResponse_MCAI_MT040101UK03.xsl", + "agentPerson": "330051032515", + "interactionID": "PORX_IN080101UK31", + "timePreparedForUpdate": "20131204131437" + }, + "D3975194-1074-4030-91EE-1B405AA1C73B": { + "Response Parameters": { + "responseText": "\"Dispense claim successful\"", + "timeStampAck": "\"20140107T172529.139\"", + "refToMessageID": "\"D3975194-1074-4030-91EE-1B405AA1C73B\"", + "toASID": "\"935002902515\"", + "fromASID": "\"428081423512\"", + "serviceASID": "\"618729461037\"", + "timeStampSent": "\"20140107172529\"", + "versionCode": "\"V4NPfIT4.2.00\"", + "responseAction": "\"MCCI_IN010000UK13\"", + "messageID": "\"666281BE-9C6A-4C10-BDAC-BADEC55F29B8\"", + "toPartyKey": "\"YEA-801248\"", + "fromPartyKey": "\"YGM94-810145\"", + "prescriptionID": "\"50EE48-B83002-490F7Q\"", + "refToEventID": "\"F80E196A-7B56-4A7C-BAFE-7693215DEA1B\"" + }, + "agentSystemSDS": "935002902515", + "fromStatus": "9000", + "toStatus": "0000", + "Source XSLT": "requestSuccessResponse_MCAI_MT040101UK03.xsl", + "agentPerson": "330051032515", + "interactionID": "PORX_IN090101UK31", + "timePreparedForUpdate": "20140107172529" + }, + "0B9177AB-C0D0-400F-8583-D7C37FAA2F10": { + "Response Parameters": { + "responseText": "\"Dispense Proposal Return Successful\"", + "timeStampAck": "\"20130517123119\"", + "toASID": "\"935002902515\"", + "fromASID": "\"not-provided-migrated\"", + "serviceASID": "\"not-provided-migrated\"", + "timeStampSent": "\"20130517123119\"", + "versionCode": "\"V3NPfIT4.2.00\"", + "responseAction": "\"not-provided-migrated\"", + "messageID": "\"0B9177AB-C0D0-400F-8583-D7C37FAA2F10\"", + "refToEventID": "\"0B9177AB-C0D0-400F-8583-D7C37FAA2F10\"", + "refToMessageID": "\"0B9177AB-C0D0-400F-8583-D7C37FAA2F10\"", + "prescriptionID": "\"50EE48-B83002-490F7Q\"", + "messageMigratedFromSpine1": "\"true\"" + }, + "agentSystemSDS": "805017560014", + "fromStatus": "0002", + "toStatus": "0001", + "Source XSLT": "not-provided-migrated", + "agentPerson": "not-provided-migratedsetAgentPersonSDSPerson", + "interactionID": "PORX_IN100101UK31", + "timePreparedForUpdate": "20130517123119" + } + }, + "indexes": { + "prescribingSiteStatus_bin": [ + "B83002_0000", + "B83002_0008", + "B83002_9000" + ], + "nextActivityNAD_bin": [ + "ready_20131227" + ], + "prescDispDate_bin": [ + "B83002|FA740|20130128120100|R2|0000", + "B83002|FA740|20130128120100|R2|0008", + "B83002|FA740|20130128120100|R2|9000" + ], + "nhsNumberPrescDispDate_bin": [ + "9446370914|B83002|FA740|20130128120100|R2|0000", + "9446370914|B83002|FA740|20130128120100|R2|0008", + "9446370914|B83002|FA740|20130128120100|R2|9000" + ], + "dispensingSiteStatus_bin": [ + "FA740_0008" + ], + "nomPharmStatus_bin": [ + "FA740_0000", + "FA740_0008", + "FA740_9000" + ], + "prescriberDate_bin": [ + "B83002|20130128120100|R2|0000", + "B83002|20130128120100|R2|0008", + "B83002|20130128120100|R2|9000" + ], + "nhsNumber_bin": [ + "9446370914" + ], + "nhsNumberDate_bin": [ + "9446370914|20130128120100|R2|0000", + "9446370914|20130128120100|R2|0008", + "9446370914|20130128120100|R2|9000" + ], + "nhsNumberPrescriberDate_bin": [ + "9446370914|B83002|20130128120100|R2|0000", + "9446370914|B83002|20130128120100|R2|0008", + "9446370914|B83002|20130128120100|R2|9000" + ], + "dispenserDate_bin": [ + "FA740|20130128120100|R2|0000", + "FA740|20130128120100|R2|0008", + "FA740|20130128120100|R2|9000" + ], + "nhsNumberDispenserDate_bin": [ + "9446370914|FA740|20130128120100|R2|0000", + "9446370914|FA740|20130128120100|R2|0008", + "9446370914|FA740|20130128120100|R2|9000" + ] + }, + "instances": { + "3": { + "completionDate": "20131204", + "claim": { + "historicClaimGUIDs": false, + "claimStatus": "claimed", + "historicDispenseClaimMsgRef": false, + "dispenseClaimMsgRef": "20140107172529139127_718E63_1081759534", + "batchID": false, + "claimReceivedDate": "20140107", + "claimGUID": "F80E196A-7B56-4A7C-BAFE-7693215DEA1B", + "claimRebuild": false, + "batchNumber": false + }, + "nominatedDownloadDate": "20130517", + "lineItems": [ + { + "maxRepeats": "12", + "status": "0001", + "previousStatus": "0008", + "order": 1, + "ID": "D459547E-3C98-403B-E040-950AE073319B" + } + ], + "lastDispenseStatus": "0006", + "dispense": { + "lastDispenseNotificationMsgRef": "20131204131437137297_674307_1081759534", + "lastDispenseNotificationGuid": "B92512E4-7E22-4D70-B27B-8FE88E6BE5E9", + "lastDispenseDate": "20131204", + "dispensingOrganization": "FA740" + }, + "nextActivity": { + "date": "20140116", + "activity": "delete" + }, + "previousIssueDate": "20130424143714", + "cancellations": [], + "dispenseHistory": { + "release": { + "completionDate": false, + "dispense": { + "lastDispenseNotificationMsgRef": false, + "dispensingOrganization": "FA740", + "lastDispenseDate": "20131202", + "lastDispenseNotificationGuid": false + }, + "lineItems": [ + { + "maxRepeats": "12", + "status": "0008", + "previousStatus": "0008", + "order": 1, + "ID": "D459547E-3C98-403B-E040-950AE073319B" + } + ], + "prescriptionStatus": "0001", + "lastDispenseStatus": false + }, + "B92512E4-7E22-4D70-B27B-8FE88E6BE5E9": { + "completionDate": false, + "dispense": { + "lastDispenseNotificationMsgRef": false, + "dispensingOrganization": "FA740", + "lastDispenseDate": "20131202", + "lastDispenseNotificationGuid": false + }, + "lineItems": [ + { + "maxRepeats": "12", + "status": "0008", + "previousStatus": "0007", + "order": 1, + "ID": "D459547E-3C98-403B-E040-950AE073319B" + } + ], + "prescriptionStatus": "0002", + "lastDispenseStatus": false + } + }, + "instanceNumber": "3", + "dispenseHistoryprescriptionStatus": false, + "releaseDate": "20131202", + "dispenseWindowHighDate": "20140127", + "dispenseWindowLowDate": "20130128", + "previousStatus": "0006", + "releaseRequestMsgRef": "20131202095154407105_877E02_1081759534", + "expiryDate": null, + "prescriptionStatus": "0008" + }, + "4": { + "completionDate": false, + "claim": { + "historicClaimGUIDs": false, + "claimStatus": false, + "historicDispenseClaimMsgRef": false, + "dispenseClaimMsgRef": false, + "batchID": false, + "claimReceivedDate": false, + "claimGUID": false, + "claimRebuild": false, + "batchNumber": false + }, + "nominatedDownloadDate": "20131227", + "lineItems": [ + { + "maxRepeats": "12", + "status": "0007", + "previousStatus": false, + "order": 1, + "ID": "D459547E-3C98-403B-E040-950AE073319B" + } + ], + "lastDispenseStatus": false, + "dispense": { + "lastDispenseNotificationMsgRef": false, + "lastDispenseNotificationGuid": false, + "lastDispenseDate": false, + "dispensingOrganization": false + }, + "nextActivity": { + "date": "20131227", + "activity": "ready" + }, + "previousIssueDate": "20131204131440", + "cancellations": [], + "dispenseHistory": {}, + "instanceNumber": "4", + "dispenseHistoryprescriptionStatus": false, + "releaseDate": false, + "dispenseWindowHighDate": "20130128", + "dispenseWindowLowDate": "20131204", + "previousStatus": "9000", + "releaseRequestMsgRef": false, + "expiryDate": "20140128", + "prescriptionStatus": "0000" + }, + "5": { + "completionDate": false, + "claim": { + "historicClaimGUIDs": false, + "claimStatus": false, + "historicDispenseClaimMsgRef": false, + "dispenseClaimMsgRef": false, + "batchID": false, + "claimReceivedDate": false, + "claimGUID": false, + "claimRebuild": false, + "batchNumber": false + }, + "nominatedDownloadDate": false, + "lineItems": [ + { + "maxRepeats": "12", + "status": "0007", + "previousStatus": false, + "order": 1, + "ID": "D459547E-3C98-403B-E040-950AE073319B" + } + ], + "lastDispenseStatus": false, + "dispense": { + "lastDispenseNotificationMsgRef": false, + "lastDispenseNotificationGuid": false, + "lastDispenseDate": false, + "dispensingOrganization": false + }, + "nextActivity": { + "date": "20140128", + "activity": "expire" + }, + "previousIssueDate": false, + "cancellations": [], + "dispenseHistory": {}, + "instanceNumber": "5", + "dispenseHistoryprescriptionStatus": false, + "releaseDate": false, + "dispenseWindowHighDate": "20130128", + "dispenseWindowLowDate": "20130128", + "previousStatus": false, + "releaseRequestMsgRef": false, + "expiryDate": "20140128", + "prescriptionStatus": "9000" + }, + "6": { + "completionDate": false, + "claim": { + "historicClaimGUIDs": false, + "claimStatus": false, + "historicDispenseClaimMsgRef": false, + "dispenseClaimMsgRef": false, + "batchID": false, + "claimReceivedDate": false, + "claimGUID": false, + "claimRebuild": false, + "batchNumber": false + }, + "nominatedDownloadDate": false, + "lineItems": [ + { + "maxRepeats": "12", + "status": "0007", + "previousStatus": false, + "order": 1, + "ID": "D459547E-3C98-403B-E040-950AE073319B" + } + ], + "lastDispenseStatus": false, + "dispense": { + "lastDispenseNotificationMsgRef": false, + "lastDispenseNotificationGuid": false, + "lastDispenseDate": false, + "dispensingOrganization": false + }, + "nextActivity": { + "date": "20140128", + "activity": "expire" + }, + "previousIssueDate": false, + "cancellations": [], + "dispenseHistory": {}, + "instanceNumber": "6", + "dispenseHistoryprescriptionStatus": false, + "releaseDate": false, + "dispenseWindowHighDate": "20130128", + "dispenseWindowLowDate": "20130128", + "previousStatus": false, + "releaseRequestMsgRef": false, + "expiryDate": "20140128", + "prescriptionStatus": "9000" + }, + "7": { + "completionDate": false, + "claim": { + "historicClaimGUIDs": false, + "claimStatus": false, + "historicDispenseClaimMsgRef": false, + "dispenseClaimMsgRef": false, + "batchID": false, + "claimReceivedDate": false, + "claimGUID": false, + "claimRebuild": false, + "batchNumber": false + }, + "nominatedDownloadDate": false, + "lineItems": [ + { + "maxRepeats": "12", + "status": "0007", + "previousStatus": false, + "order": 1, + "ID": "D459547E-3C98-403B-E040-950AE073319B" + } + ], + "lastDispenseStatus": false, + "dispense": { + "lastDispenseNotificationMsgRef": false, + "lastDispenseNotificationGuid": false, + "lastDispenseDate": false, + "dispensingOrganization": false + }, + "nextActivity": { + "date": "20140128", + "activity": "expire" + }, + "previousIssueDate": false, + "cancellations": [], + "dispenseHistory": {}, + "instanceNumber": "7", + "dispenseHistoryprescriptionStatus": false, + "releaseDate": false, + "dispenseWindowHighDate": "20130128", + "dispenseWindowLowDate": "20130128", + "previousStatus": false, + "releaseRequestMsgRef": false, + "expiryDate": "20140128", + "prescriptionStatus": "9000" + }, + "8": { + "completionDate": false, + "claim": { + "historicClaimGUIDs": false, + "claimStatus": false, + "historicDispenseClaimMsgRef": false, + "dispenseClaimMsgRef": false, + "batchID": false, + "claimReceivedDate": false, + "claimGUID": false, + "claimRebuild": false, + "batchNumber": false + }, + "nominatedDownloadDate": false, + "lineItems": [ + { + "maxRepeats": "12", + "status": "0007", + "previousStatus": false, + "order": 1, + "ID": "D459547E-3C98-403B-E040-950AE073319B" + } + ], + "lastDispenseStatus": false, + "dispense": { + "lastDispenseNotificationMsgRef": false, + "lastDispenseNotificationGuid": false, + "lastDispenseDate": false, + "dispensingOrganization": false + }, + "nextActivity": { + "date": "20140128", + "activity": "expire" + }, + "previousIssueDate": false, + "cancellations": [], + "dispenseHistory": {}, + "instanceNumber": "8", + "dispenseHistoryprescriptionStatus": false, + "releaseDate": false, + "dispenseWindowHighDate": "20130128", + "dispenseWindowLowDate": "20130128", + "previousStatus": false, + "releaseRequestMsgRef": false, + "expiryDate": "20140128", + "prescriptionStatus": "9000" + }, + "9": { + "completionDate": false, + "claim": { + "historicClaimGUIDs": false, + "claimStatus": false, + "historicDispenseClaimMsgRef": false, + "dispenseClaimMsgRef": false, + "batchID": false, + "claimReceivedDate": false, + "claimGUID": false, + "claimRebuild": false, + "batchNumber": false + }, + "nominatedDownloadDate": false, + "lineItems": [ + { + "maxRepeats": "12", + "status": "0007", + "previousStatus": false, + "order": 1, + "ID": "D459547E-3C98-403B-E040-950AE073319B" + } + ], + "lastDispenseStatus": false, + "dispense": { + "lastDispenseNotificationMsgRef": false, + "lastDispenseNotificationGuid": false, + "lastDispenseDate": false, + "dispensingOrganization": false + }, + "nextActivity": { + "date": "20140128", + "activity": "expire" + }, + "previousIssueDate": false, + "cancellations": [], + "dispenseHistory": {}, + "instanceNumber": "9", + "dispenseHistoryprescriptionStatus": false, + "releaseDate": false, + "dispenseWindowHighDate": "20130128", + "dispenseWindowLowDate": "20130128", + "previousStatus": false, + "releaseRequestMsgRef": false, + "expiryDate": "20140128", + "prescriptionStatus": "9000" + }, + "10": { + "completionDate": false, + "claim": { + "historicClaimGUIDs": false, + "claimStatus": false, + "historicDispenseClaimMsgRef": false, + "dispenseClaimMsgRef": false, + "batchID": false, + "claimReceivedDate": false, + "claimGUID": false, + "claimRebuild": false, + "batchNumber": false + }, + "nominatedDownloadDate": false, + "lineItems": [ + { + "maxRepeats": "12", + "status": "0007", + "previousStatus": false, + "order": 1, + "ID": "D459547E-3C98-403B-E040-950AE073319B" + } + ], + "lastDispenseStatus": false, + "dispense": { + "lastDispenseNotificationMsgRef": false, + "lastDispenseNotificationGuid": false, + "lastDispenseDate": false, + "dispensingOrganization": false + }, + "nextActivity": { + "date": "20140128", + "activity": "expire" + }, + "previousIssueDate": false, + "cancellations": [], + "dispenseHistory": {}, + "instanceNumber": "10", + "dispenseHistoryprescriptionStatus": false, + "releaseDate": false, + "dispenseWindowHighDate": "20130128", + "dispenseWindowLowDate": "20130128", + "previousStatus": false, + "releaseRequestMsgRef": false, + "expiryDate": "20140128", + "prescriptionStatus": "9000" + }, + "11": { + "completionDate": false, + "claim": { + "historicClaimGUIDs": false, + "claimStatus": false, + "historicDispenseClaimMsgRef": false, + "dispenseClaimMsgRef": false, + "batchID": false, + "claimReceivedDate": false, + "claimGUID": false, + "claimRebuild": false, + "batchNumber": false + }, + "nominatedDownloadDate": false, + "lineItems": [ + { + "maxRepeats": "12", + "status": "0007", + "previousStatus": false, + "order": 1, + "ID": "D459547E-3C98-403B-E040-950AE073319B" + } + ], + "lastDispenseStatus": false, + "dispense": { + "lastDispenseNotificationMsgRef": false, + "lastDispenseNotificationGuid": false, + "lastDispenseDate": false, + "dispensingOrganization": false + }, + "nextActivity": { + "date": "20140128", + "activity": "expire" + }, + "previousIssueDate": false, + "cancellations": [], + "dispenseHistory": {}, + "instanceNumber": "11", + "dispenseHistoryprescriptionStatus": false, + "releaseDate": false, + "dispenseWindowHighDate": "20130128", + "dispenseWindowLowDate": "20130128", + "previousStatus": false, + "releaseRequestMsgRef": false, + "expiryDate": "20140128", + "prescriptionStatus": "9000" + }, + "12": { + "completionDate": false, + "claim": { + "historicClaimGUIDs": false, + "claimStatus": false, + "historicDispenseClaimMsgRef": false, + "dispenseClaimMsgRef": false, + "batchID": false, + "claimReceivedDate": false, + "claimGUID": false, + "claimRebuild": false, + "batchNumber": false + }, + "nominatedDownloadDate": false, + "lineItems": [ + { + "maxRepeats": "12", + "status": "0007", + "previousStatus": false, + "order": 1, + "ID": "D459547E-3C98-403B-E040-950AE073319B" + } + ], + "lastDispenseStatus": false, + "dispense": { + "lastDispenseNotificationMsgRef": false, + "lastDispenseNotificationGuid": false, + "lastDispenseDate": false, + "dispensingOrganization": false + }, + "nextActivity": { + "date": "20140128", + "activity": "expire" + }, + "previousIssueDate": false, + "cancellations": [], + "dispenseHistory": {}, + "instanceNumber": "12", + "dispenseHistoryprescriptionStatus": false, + "releaseDate": false, + "dispenseWindowHighDate": "20130128", + "dispenseWindowLowDate": "20130128", + "previousStatus": false, + "releaseRequestMsgRef": false, + "expiryDate": "20140128", + "prescriptionStatus": "9000" + } + }, + "prescription": { + "prescriptionTreatmentType": "0003", + "unsuccessfulCancellations": false, + "daysSupply": 28, + "prescriptionTime": "20130128120100", + "hl7": { + "eventID": "A247DB87-FF0E-3715-B59E-2090CCF61E32", + "agentPersonSDSRole": "not-provided-migratedsetAgentPersonSDSRole", + "agentPersonSDSPerson": "not-provided-migratedsetAgentPersonSDSPerson", + "agentSystemSDS1CodeSystem": "1.2.826.0.1285.0.2.0.107", + "fromASID": "276827251543", + "toASID": "not-provided-migrated", + "messageID": "A247DB87-FF0E-3715-B59E-2090CCF61E32", + "agentSystemSDS2": "not-provided-migratedsetAgentSystemSDS2", + "agentSystemSDS1": "805017560014", + "agentPersonSDSTarget": "not-provided-migratedsetAgentPersonSDSTarget", + "interactionID": "PORX_IN020101UK31", + "agentPersonSDSCodeSystem": "not-provided-migratedsetAgentPersonSDSCodeSystem", + "messageVersion": "V3NPfIT4.2.00" + }, + "pendingCancellations": false, + "signedTime": "20130128120100", + "prescriptionPresent": true, + "prescriptionID": "50EE48-B83002-490F7Q", + "maxRepeats": 12, + "currentInstance": "4", + "prescriptionMsgRef": "201311290117241541325_A247DB_14", + "prescribingOrganization": "B83002", + "migratedFromSpine1": true + }, + "nomination": { + "nominatedPerformerType": "P1", + "nominatedPerformer": "FA740", + "nominated": true, + "nominationHistory": [] + } +} diff --git a/tests/common/prescription/resources/7D9625-Z72BF2-11E3A.json b/tests/common/prescription/resources/7D9625-Z72BF2-11E3A.json new file mode 100644 index 0000000..b7799fc --- /dev/null +++ b/tests/common/prescription/resources/7D9625-Z72BF2-11E3A.json @@ -0,0 +1,736 @@ +{ + + "documents": [ + "20140408144130355815_6BB2F0_2", + "20140408144130582188_F41F3F_2", + "20140408144130854644_0CC025_2", + "20140408144131797294_419E3E_2", + "20140408144132021702_28EA27_2", + "20140408144132365276_126328_2", + "20140408144132603745_6BF1FB_2" + ], + "patient": { + "lowerAgeLimit": "19960419", + "birthTime": "19800420", + "nhsNumber": "9990406707", + "higherAgeLimit": "20400420" + }, + "changeLog": { + "7E952DF6-BF23-11E3-A209-080027D20F8F": { + "SCN": 8, + "InternalID": "20140408144132021702_28EA27_2", + "Response Parameters": { + "responseText": "\"Dispense notification successful\"", + "timeStampAck": "\"20140408T144132.021\"", + "refToMessageID": "\"7E952DF6-BF23-11E3-A209-080027D20F8F\"", + "toASID": "\"230811201324\"", + "fromASID": "\"990101234567\"", + "serviceASID": "\"618729461037\"", + "timeStampSent": "\"20140408144132\"", + "versionCode": "\"V3NPfIT4.2.00\"", + "responseAction": "\"MCCI_IN010000UK13\"", + "messageID": "\"94AD2116-C8AD-4D89-B642-762681816266\"", + "toPartyKey": "\"YEA-0000806\"", + "fromPartyKey": "\"TESTGEN-201324\"", + "prescriptionID": "\"7D9625-Z72BF2-11E3AC\"", + "refToEventID": "\"7D962AEA-BF23-11E3-A209-080027D20F8F\"" + }, + "Timestamp": "20140408144132", + "agentSystemSDS": "230811201324", + "fromStatus": "0002", + "toStatus": "0006", + "Source XSLT": "requestSuccessResponse_MCAI_MT040101UK03.xsl", + "agentPerson": null, + "interactionID": "PORX_IN080101UK31", + "timePreparedForUpdate": "20140408144132" + }, + "7DE2730A-BF23-11E3-A209-080027D20F8F": { + "SCN": 4, + "InternalID": "20140408144130854644_0CC025_2", + "Response Parameters": { + "responseText": "\"Dispense notification successful\"", + "timeStampAck": "\"20140408T144130.854\"", + "refToMessageID": "\"7DE2730A-BF23-11E3-A209-080027D20F8F\"", + "toASID": "\"230811201324\"", + "fromASID": "\"990101234567\"", + "serviceASID": "\"618729461037\"", + "timeStampSent": "\"20140408144130\"", + "versionCode": "\"V3NPfIT4.2.00\"", + "responseAction": "\"MCCI_IN010000UK13\"", + "messageID": "\"8BA82B48-70FE-4679-ADA5-BEB7DA94D3FC\"", + "toPartyKey": "\"YEA-0000806\"", + "fromPartyKey": "\"TESTGEN-201324\"", + "prescriptionID": "\"7D9625-Z72BF2-11E3AC\"", + "refToEventID": "\"7D96291E-BF23-11E3-A209-080027D20F8F\"" + }, + "Timestamp": "20140408144130", + "agentSystemSDS": "230811201324", + "fromStatus": "0002", + "toStatus": "0006", + "Source XSLT": "requestSuccessResponse_MCAI_MT040101UK03.xsl", + "agentPerson": null, + "interactionID": "PORX_IN080101UK31", + "timePreparedForUpdate": "20140408144130" + }, + "BE63F8AC-7A37-404A-B470-74BEAE2C52EC": { + "SCN": 6, + "InternalID": "20140408144131368328_FED07C_BATCH", + "Response Parameters": { + "responseText": "\"Updated by Routine Admin Batch worker\"", + "timeStampAck": "\"20140408T144131.368\"", + "refToMessageID": "\"BE63F8AC-7A37-404A-B470-74BEAE2C52EC\"", + "toASID": "\"SpineInternalSource\"", + "fromASID": "\"338068513039\"", + "serviceASID": "\"618729461037\"", + "timeStampSent": "\"20140408144131\"", + "responseAction": "\"MCCI_IN010000UK13\"", + "messageID": "\"EFAD8454-E60D-40E4-AC42-2E4846E5D4D2\"", + "toPartyKey": "\"None\"", + "fromPartyKey": "\"None\"", + "prescriptionID": "\"7D9625-Z72BF2-11E3AC\"", + "refToEventID": "\"BE63F8AC-7A37-404A-B470-74BEAE2C52EC\"" + }, + "Timestamp": "20140408144131", + "agentSystemSDS": "SpineInternalSource", + "fromStatus": "9000", + "toStatus": "0000", + "Source XSLT": "requestSuccessResponse_MCAI_MT040101UK03.xsl", + "agentPerson": null, + "interactionID": "PORX_IN999002UK01", + "timePreparedForUpdate": "20140408144131" + }, + "7DBA3520-BF23-11E3-A209-080027D20F8F": { + "SCN": 3, + "InternalID": "20140408144130582188_F41F3F_2", + "Response Parameters": { + "responseText": "\"Release Request successful\"", + "lowerAgeLimit": "\"19960419\"", + "prescriptionMaxRepeats": "\"3\"", + "lineItem1MaxRepeats": "\"3\"", + "responseAction": "\"PORX_IN070103UK31\"", + "toPartyKey": "\"YEA-0000806\"", + "refToEventID": "\"7DBA3840-BF23-11E3-A209-080027D20F8F\"", + "timeStampAck": "\"20140408T144130.582\"", + "refToMessageID": "\"7DBA3520-BF23-11E3-A209-080027D20F8F\"", + "lineItem2StatusDisplayName": "\"To Be Dispensed\"", + "lineItem2MaxRepeats": "\"3\"", + "prescriptionCurrentInstance": "\"1\"", + "versionCode": "\"V3NPfIT4.2.00\"", + "lineItem1Status": "\"0007\"", + "prescriptionStatus": "\"0001\"", + "fromPartyKey": "\"TESTGEN-201324\"", + "lineItem1StatusDisplayName": "\"To Be Dispensed\"", + "fromASID": "\"990101234567\"", + "toASID": "\"230811201324\"", + "higherAgeLimit": "\"20400420\"", + "serviceASID": "\"618729461037\"", + "lineItem2Status": "\"0007\"", + "messageID": "\"3135A4B1-2BFE-43A1-8C11-D4374E7BA902\"", + "prescriptionID": "\"7D9625-Z72BF2-11E3AC\"", + "lineItem1CurrentInstance": "\"1\"", + "lineItem2CurrentInstance": "\"1\"", + "prescriptionStatusDisplayName": "\"To Be Dispensed\"", + "timeStampSent": "\"20140408144130\"" + }, + "Timestamp": "20140408144130", + "agentSystemSDS": "230811201324", + "fromStatus": "0001", + "toStatus": "0002", + "Source XSLT": "createResponseFramework_PORX_RM122002UK31.xsl", + "agentPerson": null, + "interactionID": "PORX_IN132004UK30", + "timePreparedForUpdate": "20140408144130" + }, + "7E73C9FE-BF23-11E3-A209-080027D20F8F": { + "SCN": 7, + "InternalID": "20140408144131797294_419E3E_2", + "Response Parameters": { + "responseText": "\"Release Request successful\"", + "lowerAgeLimit": "\"19960419\"", + "prescriptionMaxRepeats": "\"3\"", + "lineItem1MaxRepeats": "\"3\"", + "responseAction": "\"PORX_IN070103UK31\"", + "toPartyKey": "\"YEA-0000806\"", + "refToEventID": "\"7E73CD00-BF23-11E3-A209-080027D20F8F\"", + "timeStampAck": "\"20140408T144131.797\"", + "refToMessageID": "\"7E73C9FE-BF23-11E3-A209-080027D20F8F\"", + "lineItem2StatusDisplayName": "\"To Be Dispensed\"", + "lineItem2MaxRepeats": "\"3\"", + "prescriptionCurrentInstance": "\"2\"", + "versionCode": "\"V3NPfIT4.2.00\"", + "lineItem1Status": "\"0007\"", + "prescriptionStatus": "\"0001\"", + "fromPartyKey": "\"TESTGEN-201324\"", + "lineItem1StatusDisplayName": "\"To Be Dispensed\"", + "fromASID": "\"990101234567\"", + "toASID": "\"230811201324\"", + "higherAgeLimit": "\"20400420\"", + "serviceASID": "\"618729461037\"", + "lineItem2Status": "\"0007\"", + "messageID": "\"DDDC4D00-6211-42ED-9DCE-06A841881916\"", + "prescriptionID": "\"7D9625-Z72BF2-11E3AC\"", + "lineItem1CurrentInstance": "\"2\"", + "lineItem2CurrentInstance": "\"2\"", + "priorPreviousIssueDate": "\"20140408144130\"", + "prescriptionStatusDisplayName": "\"To Be Dispensed\"", + "timeStampSent": "\"20140408144131\"" + }, + "Timestamp": "20140408144131", + "agentSystemSDS": "230811201324", + "fromStatus": "0000", + "toStatus": "0002", + "Source XSLT": "createResponseFramework_PORX_RM122002UK31.xsl", + "agentPerson": null, + "interactionID": "PORX_IN132004UK30", + "timePreparedForUpdate": "20140408144131" + }, + "7ECA9216-BF23-11E3-A209-080027D20F8F": { + "SCN": 9, + "InternalID": "20140408144132365276_126328_2", + "Response Parameters": { + "responseText": "\"Release Request successful\"", + "lowerAgeLimit": "\"19960419\"", + "prescriptionMaxRepeats": "\"3\"", + "lineItem1MaxRepeats": "\"3\"", + "responseAction": "\"PORX_IN070103UK31\"", + "toPartyKey": "\"YEA-0000806\"", + "refToEventID": "\"7ECA9450-BF23-11E3-A209-080027D20F8F\"", + "timeStampAck": "\"20140408T144132.365\"", + "refToMessageID": "\"7ECA9216-BF23-11E3-A209-080027D20F8F\"", + "lineItem2StatusDisplayName": "\"To Be Dispensed\"", + "lineItem2MaxRepeats": "\"3\"", + "prescriptionCurrentInstance": "\"3\"", + "versionCode": "\"V3NPfIT4.2.00\"", + "lineItem1Status": "\"0007\"", + "prescriptionStatus": "\"0001\"", + "fromPartyKey": "\"TESTGEN-201324\"", + "lineItem1StatusDisplayName": "\"To Be Dispensed\"", + "fromASID": "\"990101234567\"", + "toASID": "\"230811201324\"", + "higherAgeLimit": "\"20400420\"", + "serviceASID": "\"618729461037\"", + "lineItem2Status": "\"0007\"", + "messageID": "\"BA9E58F7-09F0-4D99-8859-2CCE768349C4\"", + "prescriptionID": "\"7D9625-Z72BF2-11E3AC\"", + "lineItem1CurrentInstance": "\"3\"", + "lineItem2CurrentInstance": "\"3\"", + "priorPreviousIssueDate": "\"20140408144130\"", + "prescriptionStatusDisplayName": "\"To Be Dispensed\"", + "timeStampSent": "\"20140408144132\"" + }, + "Timestamp": "20140408144132", + "agentSystemSDS": "230811201324", + "fromStatus": "0000", + "toStatus": "0002", + "Source XSLT": "createResponseFramework_PORX_RM122002UK31.xsl", + "agentPerson": null, + "interactionID": "PORX_IN132004UK30", + "timePreparedForUpdate": "20140408144132" + }, + "7D96F876-BF23-11E3-A209-080027D20F8F": { + "SCN": 2, + "InternalID": "20140408144130355815_6BB2F0_2", + "Response Parameters": { + "responseText": "\"Prescription upload successful\"", + "timeStampAck": "\"20140408T144130.355\"", + "refToMessageID": "\"7D96F876-BF23-11E3-A209-080027D20F8F\"", + "toASID": "\"230811201324\"", + "fromASID": "\"990101234567\"", + "serviceASID": "\"618729461037\"", + "timeStampSent": "\"20140408144130\"", + "versionCode": "\"V3NPfIT4.2.00\"", + "responseAction": "\"MCCI_IN010000UK13\"", + "messageID": "\"F82CF38D-0E5B-4ECA-836B-EA3079C2162A\"", + "toPartyKey": "\"YEA-0000806\"", + "fromPartyKey": "\"TESTGEN-201324\"", + "prescriptionID": "\"7D9625-Z72BF2-11E3AC\"", + "refToEventID": "\"7D96FB00-BF23-11E3-A209-080027D20F8F\"" + }, + "Timestamp": "20140408144130", + "agentSystemSDS": "230811201324", + "fromStatus": false, + "toStatus": "0001", + "Source XSLT": "requestSuccessResponse_MCAI_MT040101UK03.xsl", + "agentPerson": null, + "interactionID": "PORX_IN020101UK31", + "timePreparedForUpdate": "20140408144130" + }, + "7E0C360E-BF23-11E3-A209-080027D20F8F": { + "SCN": 5, + "InternalID": "20140408144131121178_3263F7_2", + "Response Parameters": { + "responseText": "\"Administrative update successful\"", + "timeStampAck": "\"20140408T144131.121\"", + "refToMessageID": "\"7E0C360E-BF23-11E3-A209-080027D20F8F\"", + "toASID": "\"230811201324\"", + "fromASID": "\"990101234567\"", + "serviceASID": "\"618729461037\"", + "timeStampSent": "\"20140408144131\"", + "versionCode": "\"V3NPfIT4.2.00\"", + "responseAction": "\"MCCI_IN010000UK13\"", + "messageID": "\"EBA6ED27-F7DD-4940-BAB2-C39EA0737138\"", + "toPartyKey": "\"YEA-0000806\"", + "fromPartyKey": "\"TESTGEN-201324\"", + "prescriptionID": "\"7D9625-Z72BF2-11E3AC\"", + "refToEventID": "\"7E0C360E-BF23-11E3-A209-080027D20F8F\"" + }, + "Timestamp": "20140408144131", + "agentSystemSDS": "230811201324", + "fromStatus": "9000", + "toStatus": "0000", + "Source XSLT": "requestSuccessResponse_MCAI_MT040101UK03.xsl", + "agentPerson": null, + "interactionID": "PORX_IN999000UK01", + "timePreparedForUpdate": "20140408144131" + }, + "7EEDA49A-BF23-11E3-A209-080027D20F8F": { + "SCN": 10, + "InternalID": "20140408144132603745_6BF1FB_2", + "Response Parameters": { + "responseText": "\"Dispense notification successful\"", + "timeStampAck": "\"20140408T144132.603\"", + "refToMessageID": "\"7EEDA49A-BF23-11E3-A209-080027D20F8F\"", + "toASID": "\"230811201324\"", + "fromASID": "\"990101234567\"", + "serviceASID": "\"618729461037\"", + "timeStampSent": "\"20140408144132\"", + "versionCode": "\"V3NPfIT4.2.00\"", + "responseAction": "\"MCCI_IN010000UK13\"", + "messageID": "\"EF7C7D0B-9686-4B25-B143-F780F4E5B6CB\"", + "toPartyKey": "\"YEA-0000806\"", + "fromPartyKey": "\"TESTGEN-201324\"", + "prescriptionID": "\"7D9625-Z72BF2-11E3AC\"", + "refToEventID": "\"7D963530-BF23-11E3-A209-080027D20F8F\"" + }, + "Timestamp": "20140408144132", + "agentSystemSDS": "230811201324", + "fromStatus": "0002", + "toStatus": "0006", + "Source XSLT": "requestSuccessResponse_MCAI_MT040101UK03.xsl", + "agentPerson": null, + "interactionID": "PORX_IN080101UK31", + "timePreparedForUpdate": "20140408144132" + } + }, + "indexes": { + "prescribingSiteStatus_bin": [ + "Z99901_0006", + "Z99901_0009" + ], + "nextActivityNAD_bin": [ + "delete_20140417" + ], + "prescDispDate_bin": [ + "Z99901|F001M|20140408144130|R2|0006", + "Z99901|F001M|20140408144130|R2|0009" + ], + "nhsNumberPrescDispDate_bin": [ + "9990406707|Z99901|F001M|20140408144130|R2|0006", + "9990406707|Z99901|F001M|20140408144130|R2|0009" + ], + "dispensingSiteStatus_bin": [ + "F001M_0009", + "F001M_0006" + ], + "nomPharmStatus_bin": [ + "F001M_0006", + "F001M_0009" + ], + "prescriberDate_bin": [ + "Z99901|20140408144130|R2|0006", + "Z99901|20140408144130|R2|0009" + ], + "nhsNumber_bin": [ + "9990406707" + ], + "delta_bin": [ + "20140408144132|9" + ], + "nhsNumberDate_bin": [ + "9990406707|20140408144130|R2|0006", + "9990406707|20140408144130|R2|0009" + ], + "nhsNumberPrescriberDate_bin": [ + "9990406707|Z99901|20140408144130|R2|0006", + "9990406707|Z99901|20140408144130|R2|0009" + ], + "dispenserDate_bin": [ + "F001M|20140408144130|R2|0006", + "F001M|20140408144130|R2|0009" + ], + "nhsNumberDispenserDate_bin": [ + "9990406707|F001M|20140408144130|R2|0006", + "9990406707|F001M|20140408144130|R2|0009" + ] + }, + "instances": { + "1": { + "completionDate": "20140408", + "claim": { + "historicClaimGUIDs": false, + "claimStatus": false, + "historicDispenseClaimMsgRef": false, + "claimGUID": false, + "batchID": false, + "batchNumber": false, + "dispenseClaimMsgRef": false, + "claimRebuild": false, + "claimReceivedDate": "20140408" + }, + "nominatedDownloadDate": false, + "previousStatus": "0006", + "lastDispenseStatus": "0006", + "dispense": { + "lastDispenseNotificationMsgRef": "20140408144130854644_0CC025_2", + "dispensingOrganization": "F001M", + "lastDispenseDate": "20130930", + "lastDispenseNotificationGuid": "7D96291E-BF23-11E3-A209-080027D20F8F" + }, + "nextActivity": { + "date": "20140417", + "activity": "delete" + }, + "cancellations": [ ], + "previousIssueDate": false, + "dispenseHistory": { + "release": { + "completionDate": false, + "dispense": { + "lastDispenseNotificationMsgRef": false, + "lastDispenseNotificationGuid": false, + "lastDispenseDate": "20140408", + "dispensingOrganization": "F001M" + }, + "lineItems": [ + { + "status": "0008", + "maxRepeats": "3", + "previousStatus": false, + "ID": "02ED7776-21CD-4E7B-AC9D-D1DBFEE7B8CF", + "order": 1 + }, + { + "status": "0008", + "maxRepeats": "3", + "previousStatus": false, + "ID": "45D5FB11-D793-4D51-9ADD-95E0F54D2786", + "order": 2 + } + ], + "prescriptionStatus": "0001", + "lastDispenseStatus": false + }, + "7D96291E-BF23-11E3-A209-080027D20F8F": { + "completionDate": false, + "dispense": { + "lastDispenseNotificationMsgRef": false, + "lastDispenseNotificationGuid": false, + "lastDispenseDate": "20140408", + "dispensingOrganization": "F001M" + }, + "lineItems": [ + { + "status": "0008", + "maxRepeats": "3", + "previousStatus": "0007", + "ID": "02ED7776-21CD-4E7B-AC9D-D1DBFEE7B8CF", + "order": 1 + }, + { + "status": "0008", + "maxRepeats": "3", + "previousStatus": "0007", + "ID": "45D5FB11-D793-4D51-9ADD-95E0F54D2786", + "order": 2 + } + ], + "prescriptionStatus": "0002", + "lastDispenseStatus": false + } + }, + "instanceNumber": "1", + "dispenseHistoryprescriptionStatus": false, + "releaseDate": "20140408", + "dispenseWindowHighDate": "20140418", + "dispenseWindowLowDate": "20140408", + "lineItems": [ + { + "status": "0001", + "maxRepeats": "3", + "previousStatus": "0008", + "ID": "02ED7776-21CD-4E7B-AC9D-D1DBFEE7B8CF", + "order": 1 + }, + { + "status": "0001", + "maxRepeats": "3", + "previousStatus": "0008", + "ID": "45D5FB11-D793-4D51-9ADD-95E0F54D2786", + "order": 2 + } + ], + "releaseRequestMsgRef": "20140408144130582188_F41F3F_2", + "expiryDate": null, + "prescriptionStatus": "0009" + }, + "2": { + "completionDate": "20140408", + "claim": { + "historicClaimGUIDs": false, + "claimStatus": false, + "historicDispenseClaimMsgRef": false, + "claimGUID": false, + "batchID": false, + "batchNumber": false, + "dispenseClaimMsgRef": false, + "claimRebuild": false, + "claimReceivedDate": false + }, + "nominatedDownloadDate": "20140501", + "previousStatus": "0002", + "lastDispenseStatus": "0006", + "dispense": { + "lastDispenseNotificationMsgRef": "20140408144132021702_28EA27_2", + "dispensingOrganization": "F001M", + "lastDispenseDate": "20140408", + "lastDispenseNotificationGuid": "7D962AEA-BF23-11E3-A209-080027D20F8F" + }, + "nextActivity": { + "date": "20141005", + "activity": "createNoClaim" + }, + "cancellations": [ ], + "previousIssueDate": "20140408144130", + "dispenseHistory": { + "release": { + "completionDate": false, + "dispense": { + "lastDispenseNotificationMsgRef": false, + "lastDispenseNotificationGuid": false, + "lastDispenseDate": "20140408", + "dispensingOrganization": "F001M" + }, + "lineItems": [ + { + "status": "0008", + "maxRepeats": "3", + "previousStatus": false, + "ID": "02ED7776-21CD-4E7B-AC9D-D1DBFEE7B8CF", + "order": 1 + }, + { + "status": "0008", + "maxRepeats": "3", + "previousStatus": false, + "ID": "45D5FB11-D793-4D51-9ADD-95E0F54D2786", + "order": 2 + } + ], + "prescriptionStatus": "0000", + "lastDispenseStatus": false + }, + "7D962AEA-BF23-11E3-A209-080027D20F8F": { + "completionDate": false, + "dispense": { + "lastDispenseNotificationMsgRef": false, + "lastDispenseNotificationGuid": false, + "lastDispenseDate": "20140408", + "dispensingOrganization": "F001M" + }, + "lineItems": [ + { + "status": "0008", + "maxRepeats": "3", + "previousStatus": "0007", + "ID": "02ED7776-21CD-4E7B-AC9D-D1DBFEE7B8CF", + "order": 1 + }, + { + "status": "0008", + "maxRepeats": "3", + "previousStatus": "0007", + "ID": "45D5FB11-D793-4D51-9ADD-95E0F54D2786", + "order": 2 + } + ], + "prescriptionStatus": "0002", + "lastDispenseStatus": false + } + }, + "instanceNumber": "2", + "dispenseHistoryprescriptionStatus": false, + "releaseDate": "20140408", + "dispenseWindowHighDate": "20140418", + "dispenseWindowLowDate": "20140408", + "lineItems": [ + { + "status": "0001", + "maxRepeats": "3", + "previousStatus": "0008", + "ID": "02ED7776-21CD-4E7B-AC9D-D1DBFEE7B8CF", + "order": 1 + }, + { + "status": "0001", + "maxRepeats": "3", + "previousStatus": "0008", + "ID": "45D5FB11-D793-4D51-9ADD-95E0F54D2786", + "order": 2 + } + ], + "releaseRequestMsgRef": "20140408144131797294_419E3E_2", + "expiryDate": null, + "prescriptionStatus": "0006" + }, + "3": { + "completionDate": "20140408", + "claim": { + "historicClaimGUIDs": false, + "claimStatus": false, + "historicDispenseClaimMsgRef": false, + "claimGUID": false, + "batchID": false, + "batchNumber": false, + "dispenseClaimMsgRef": false, + "claimRebuild": false, + "claimReceivedDate": false + }, + "nominatedDownloadDate": "20140501", + "previousStatus": "0002", + "lastDispenseStatus": "0006", + "dispense": { + "lastDispenseNotificationMsgRef": "20140408144132603745_6BF1FB_2", + "dispensingOrganization": "F001M", + "lastDispenseDate": "20140408", + "lastDispenseNotificationGuid": "7D963530-BF23-11E3-A209-080027D20F8F" + }, + "nextActivity": { + "date": "20141005", + "activity": "createNoClaim" + }, + "cancellations": [ ], + "previousIssueDate": "20140408144130", + "dispenseHistory": { + "release": { + "completionDate": false, + "dispense": { + "lastDispenseNotificationMsgRef": false, + "lastDispenseNotificationGuid": false, + "lastDispenseDate": "20140408", + "dispensingOrganization": "F001M" + }, + "lineItems": [ + { + "status": "0008", + "maxRepeats": "3", + "previousStatus": false, + "ID": "02ED7776-21CD-4E7B-AC9D-D1DBFEE7B8CF", + "order": 1 + }, + { + "status": "0008", + "maxRepeats": "3", + "previousStatus": false, + "ID": "45D5FB11-D793-4D51-9ADD-95E0F54D2786", + "order": 2 + } + ], + "prescriptionStatus": "0000", + "lastDispenseStatus": false + }, + "7D963530-BF23-11E3-A209-080027D20F8F": { + "completionDate": false, + "dispense": { + "lastDispenseNotificationMsgRef": false, + "lastDispenseNotificationGuid": false, + "lastDispenseDate": "20140408", + "dispensingOrganization": "F001M" + }, + "lineItems": [ + { + "status": "0008", + "maxRepeats": "3", + "previousStatus": "0007", + "ID": "02ED7776-21CD-4E7B-AC9D-D1DBFEE7B8CF", + "order": 1 + }, + { + "status": "0008", + "maxRepeats": "3", + "previousStatus": "0007", + "ID": "45D5FB11-D793-4D51-9ADD-95E0F54D2786", + "order": 2 + } + ], + "prescriptionStatus": "0002", + "lastDispenseStatus": false + } + }, + "instanceNumber": "3", + "dispenseHistoryprescriptionStatus": false, + "releaseDate": "20140408", + "dispenseWindowHighDate": "20140418", + "dispenseWindowLowDate": "20140408", + "lineItems": [ + { + "status": "0001", + "maxRepeats": "3", + "previousStatus": "0008", + "ID": "02ED7776-21CD-4E7B-AC9D-D1DBFEE7B8CF", + "order": 1 + }, + { + "status": "0001", + "maxRepeats": "3", + "previousStatus": "0008", + "ID": "45D5FB11-D793-4D51-9ADD-95E0F54D2786", + "order": 2 + } + ], + "releaseRequestMsgRef": "20140408144132365276_126328_2", + "expiryDate": null, + "prescriptionStatus": "0006" + } + }, + "SCN": 10, + "prescription": { + "prescriptionType": "0001", + "SCN": false, + "prescriptionTreatmentType": "0003", + "signedTime": "20140408144130", + "prescriptionMsgRef": "20140408144130355815_6BB2F0_2", + "prescribingOrganization": "Z99901", + "prescriptionTime": "20140408144130", + "pendingCancellations": false, + "unsuccessfulCancellations": false, + "hl7": { + "eventID": "7D96FB00-BF23-11E3-A209-080027D20F8F", + "agentPersonSDSRole": null, + "wsaMessageID": "", + "agentSystemSDS1CodeSystem": "1.2.826.0.1285.0.2.0.107", + "fromASID": "230811201324", + "toASID": "990101234567", + "agentSystemSDS2": null, + "agentSystemSDS1": "230811201324", + "agentPersonSDSPerson": null, + "messageID": "7D96F876-BF23-11E3-A209-080027D20F8F", + "interactionID": "PORX_IN020101UK31", + "agentPersonSDSTargetRole": null, + "agentPersonSDSCodeSystem": null, + "messageVersion": "V3NPfIT4.2.00", + "agentPersonOrgCode": null + }, + "prescriptionPresent": true, + "prescriptionID": "7D9625-Z72BF2-11E3AC", + "maxRepeats": 3, + "currentInstance": "3", + "daysSupply": 28 + }, + "nomination": { + "nominatedPerformerType": "P1", + "nominatedPerformer": "F001M", + "nominated": true, + "nominationHistory": [ ] + } + +} diff --git a/tests/common/prescription/resources/7D9625-Z72BF2-11E3B.json b/tests/common/prescription/resources/7D9625-Z72BF2-11E3B.json new file mode 100644 index 0000000..22185be --- /dev/null +++ b/tests/common/prescription/resources/7D9625-Z72BF2-11E3B.json @@ -0,0 +1,736 @@ +{ + + "documents": [ + "20140408144130355815_6BB2F0_2", + "20140408144130582188_F41F3F_2", + "20140408144130854644_0CC025_2", + "20140408144131797294_419E3E_2", + "20140408144132021702_28EA27_2", + "20140408144132365276_126328_2", + "20140408144132603745_6BF1FB_2" + ], + "patient": { + "lowerAgeLimit": "19960419", + "birthTime": "19800420", + "nhsNumber": "9990406707", + "higherAgeLimit": "20400420" + }, + "changeLog": { + "7E952DF6-BF23-11E3-A209-080027D20F8F": { + "SCN": 8, + "InternalID": "20140408144132021702_28EA27_2", + "Response Parameters": { + "responseText": "\"Dispense notification successful\"", + "timeStampAck": "\"20140408T144132.021\"", + "refToMessageID": "\"7E952DF6-BF23-11E3-A209-080027D20F8F\"", + "toASID": "\"230811201324\"", + "fromASID": "\"990101234567\"", + "serviceASID": "\"618729461037\"", + "timeStampSent": "\"20140408144132\"", + "versionCode": "\"V3NPfIT4.2.00\"", + "responseAction": "\"MCCI_IN010000UK13\"", + "messageID": "\"94AD2116-C8AD-4D89-B642-762681816266\"", + "toPartyKey": "\"YEA-0000806\"", + "fromPartyKey": "\"TESTGEN-201324\"", + "prescriptionID": "\"7D9625-Z72BF2-11E3AC\"", + "refToEventID": "\"7D962AEA-BF23-11E3-A209-080027D20F8F\"" + }, + "Timestamp": "20140408144132", + "agentSystemSDS": "230811201324", + "fromStatus": "0002", + "toStatus": "0006", + "Source XSLT": "requestSuccessResponse_MCAI_MT040101UK03.xsl", + "agentPerson": null, + "interactionID": "PORX_IN080101UK31", + "timePreparedForUpdate": "20140408144132" + }, + "7DE2730A-BF23-11E3-A209-080027D20F8F": { + "SCN": 4, + "InternalID": "20140408144130854644_0CC025_2", + "Response Parameters": { + "responseText": "\"Dispense notification successful\"", + "timeStampAck": "\"20140408T144130.854\"", + "refToMessageID": "\"7DE2730A-BF23-11E3-A209-080027D20F8F\"", + "toASID": "\"230811201324\"", + "fromASID": "\"990101234567\"", + "serviceASID": "\"618729461037\"", + "timeStampSent": "\"20140408144130\"", + "versionCode": "\"V3NPfIT4.2.00\"", + "responseAction": "\"MCCI_IN010000UK13\"", + "messageID": "\"8BA82B48-70FE-4679-ADA5-BEB7DA94D3FC\"", + "toPartyKey": "\"YEA-0000806\"", + "fromPartyKey": "\"TESTGEN-201324\"", + "prescriptionID": "\"7D9625-Z72BF2-11E3AC\"", + "refToEventID": "\"7D96291E-BF23-11E3-A209-080027D20F8F\"" + }, + "Timestamp": "20140408144130", + "agentSystemSDS": "230811201324", + "fromStatus": "0002", + "toStatus": "0006", + "Source XSLT": "requestSuccessResponse_MCAI_MT040101UK03.xsl", + "agentPerson": null, + "interactionID": "PORX_IN080101UK31", + "timePreparedForUpdate": "20140408144130" + }, + "BE63F8AC-7A37-404A-B470-74BEAE2C52EC": { + "SCN": 6, + "InternalID": "20140408144131368328_FED07C_BATCH", + "Response Parameters": { + "responseText": "\"Updated by Routine Admin Batch worker\"", + "timeStampAck": "\"20140408T144131.368\"", + "refToMessageID": "\"BE63F8AC-7A37-404A-B470-74BEAE2C52EC\"", + "toASID": "\"SpineInternalSource\"", + "fromASID": "\"338068513039\"", + "serviceASID": "\"618729461037\"", + "timeStampSent": "\"20140408144131\"", + "responseAction": "\"MCCI_IN010000UK13\"", + "messageID": "\"EFAD8454-E60D-40E4-AC42-2E4846E5D4D2\"", + "toPartyKey": "\"None\"", + "fromPartyKey": "\"None\"", + "prescriptionID": "\"7D9625-Z72BF2-11E3AC\"", + "refToEventID": "\"BE63F8AC-7A37-404A-B470-74BEAE2C52EC\"" + }, + "Timestamp": "20140408144131", + "agentSystemSDS": "SpineInternalSource", + "fromStatus": "9000", + "toStatus": "0000", + "Source XSLT": "requestSuccessResponse_MCAI_MT040101UK03.xsl", + "agentPerson": null, + "interactionID": "PORX_IN999002UK01", + "timePreparedForUpdate": "20140408144131" + }, + "7DBA3520-BF23-11E3-A209-080027D20F8F": { + "SCN": 3, + "InternalID": "20140408144130582188_F41F3F_2", + "Response Parameters": { + "responseText": "\"Release Request successful\"", + "lowerAgeLimit": "\"19960419\"", + "prescriptionMaxRepeats": "\"3\"", + "lineItem1MaxRepeats": "\"3\"", + "responseAction": "\"PORX_IN070103UK31\"", + "toPartyKey": "\"YEA-0000806\"", + "refToEventID": "\"7DBA3840-BF23-11E3-A209-080027D20F8F\"", + "timeStampAck": "\"20140408T144130.582\"", + "refToMessageID": "\"7DBA3520-BF23-11E3-A209-080027D20F8F\"", + "lineItem2StatusDisplayName": "\"To Be Dispensed\"", + "lineItem2MaxRepeats": "\"3\"", + "prescriptionCurrentInstance": "\"1\"", + "versionCode": "\"V3NPfIT4.2.00\"", + "lineItem1Status": "\"0007\"", + "prescriptionStatus": "\"0001\"", + "fromPartyKey": "\"TESTGEN-201324\"", + "lineItem1StatusDisplayName": "\"To Be Dispensed\"", + "fromASID": "\"990101234567\"", + "toASID": "\"230811201324\"", + "higherAgeLimit": "\"20400420\"", + "serviceASID": "\"618729461037\"", + "lineItem2Status": "\"0007\"", + "messageID": "\"3135A4B1-2BFE-43A1-8C11-D4374E7BA902\"", + "prescriptionID": "\"7D9625-Z72BF2-11E3AC\"", + "lineItem1CurrentInstance": "\"1\"", + "lineItem2CurrentInstance": "\"1\"", + "prescriptionStatusDisplayName": "\"To Be Dispensed\"", + "timeStampSent": "\"20140408144130\"" + }, + "Timestamp": "20140408144130", + "agentSystemSDS": "230811201324", + "fromStatus": "0001", + "toStatus": "0002", + "Source XSLT": "createResponseFramework_PORX_RM122002UK31.xsl", + "agentPerson": null, + "interactionID": "PORX_IN132004UK30", + "timePreparedForUpdate": "20140408144130" + }, + "7E73C9FE-BF23-11E3-A209-080027D20F8F": { + "SCN": 7, + "InternalID": "20140408144131797294_419E3E_2", + "Response Parameters": { + "responseText": "\"Release Request successful\"", + "lowerAgeLimit": "\"19960419\"", + "prescriptionMaxRepeats": "\"3\"", + "lineItem1MaxRepeats": "\"3\"", + "responseAction": "\"PORX_IN070103UK31\"", + "toPartyKey": "\"YEA-0000806\"", + "refToEventID": "\"7E73CD00-BF23-11E3-A209-080027D20F8F\"", + "timeStampAck": "\"20140408T144131.797\"", + "refToMessageID": "\"7E73C9FE-BF23-11E3-A209-080027D20F8F\"", + "lineItem2StatusDisplayName": "\"To Be Dispensed\"", + "lineItem2MaxRepeats": "\"3\"", + "prescriptionCurrentInstance": "\"2\"", + "versionCode": "\"V3NPfIT4.2.00\"", + "lineItem1Status": "\"0007\"", + "prescriptionStatus": "\"0001\"", + "fromPartyKey": "\"TESTGEN-201324\"", + "lineItem1StatusDisplayName": "\"To Be Dispensed\"", + "fromASID": "\"990101234567\"", + "toASID": "\"230811201324\"", + "higherAgeLimit": "\"20400420\"", + "serviceASID": "\"618729461037\"", + "lineItem2Status": "\"0007\"", + "messageID": "\"DDDC4D00-6211-42ED-9DCE-06A841881916\"", + "prescriptionID": "\"7D9625-Z72BF2-11E3AC\"", + "lineItem1CurrentInstance": "\"2\"", + "lineItem2CurrentInstance": "\"2\"", + "priorPreviousIssueDate": "\"20140408144130\"", + "prescriptionStatusDisplayName": "\"To Be Dispensed\"", + "timeStampSent": "\"20140408144131\"" + }, + "Timestamp": "20140408144131", + "agentSystemSDS": "230811201324", + "fromStatus": "0000", + "toStatus": "0002", + "Source XSLT": "createResponseFramework_PORX_RM122002UK31.xsl", + "agentPerson": null, + "interactionID": "PORX_IN132004UK30", + "timePreparedForUpdate": "20140408144131" + }, + "7ECA9216-BF23-11E3-A209-080027D20F8F": { + "SCN": 9, + "InternalID": "20140408144132365276_126328_2", + "Response Parameters": { + "responseText": "\"Release Request successful\"", + "lowerAgeLimit": "\"19960419\"", + "prescriptionMaxRepeats": "\"3\"", + "lineItem1MaxRepeats": "\"3\"", + "responseAction": "\"PORX_IN070103UK31\"", + "toPartyKey": "\"YEA-0000806\"", + "refToEventID": "\"7ECA9450-BF23-11E3-A209-080027D20F8F\"", + "timeStampAck": "\"20140408T144132.365\"", + "refToMessageID": "\"7ECA9216-BF23-11E3-A209-080027D20F8F\"", + "lineItem2StatusDisplayName": "\"To Be Dispensed\"", + "lineItem2MaxRepeats": "\"3\"", + "prescriptionCurrentInstance": "\"3\"", + "versionCode": "\"V3NPfIT4.2.00\"", + "lineItem1Status": "\"0007\"", + "prescriptionStatus": "\"0001\"", + "fromPartyKey": "\"TESTGEN-201324\"", + "lineItem1StatusDisplayName": "\"To Be Dispensed\"", + "fromASID": "\"990101234567\"", + "toASID": "\"230811201324\"", + "higherAgeLimit": "\"20400420\"", + "serviceASID": "\"618729461037\"", + "lineItem2Status": "\"0007\"", + "messageID": "\"BA9E58F7-09F0-4D99-8859-2CCE768349C4\"", + "prescriptionID": "\"7D9625-Z72BF2-11E3AC\"", + "lineItem1CurrentInstance": "\"3\"", + "lineItem2CurrentInstance": "\"3\"", + "priorPreviousIssueDate": "\"20140408144130\"", + "prescriptionStatusDisplayName": "\"To Be Dispensed\"", + "timeStampSent": "\"20140408144132\"" + }, + "Timestamp": "20140408144132", + "agentSystemSDS": "230811201324", + "fromStatus": "0000", + "toStatus": "0002", + "Source XSLT": "createResponseFramework_PORX_RM122002UK31.xsl", + "agentPerson": null, + "interactionID": "PORX_IN132004UK30", + "timePreparedForUpdate": "20140408144132" + }, + "7D96F876-BF23-11E3-A209-080027D20F8F": { + "SCN": 2, + "InternalID": "20140408144130355815_6BB2F0_2", + "Response Parameters": { + "responseText": "\"Prescription upload successful\"", + "timeStampAck": "\"20140408T144130.355\"", + "refToMessageID": "\"7D96F876-BF23-11E3-A209-080027D20F8F\"", + "toASID": "\"230811201324\"", + "fromASID": "\"990101234567\"", + "serviceASID": "\"618729461037\"", + "timeStampSent": "\"20140408144130\"", + "versionCode": "\"V3NPfIT4.2.00\"", + "responseAction": "\"MCCI_IN010000UK13\"", + "messageID": "\"F82CF38D-0E5B-4ECA-836B-EA3079C2162A\"", + "toPartyKey": "\"YEA-0000806\"", + "fromPartyKey": "\"TESTGEN-201324\"", + "prescriptionID": "\"7D9625-Z72BF2-11E3AC\"", + "refToEventID": "\"7D96FB00-BF23-11E3-A209-080027D20F8F\"" + }, + "Timestamp": "20140408144130", + "agentSystemSDS": "230811201324", + "fromStatus": false, + "toStatus": "0001", + "Source XSLT": "requestSuccessResponse_MCAI_MT040101UK03.xsl", + "agentPerson": null, + "interactionID": "PORX_IN020101UK31", + "timePreparedForUpdate": "20140408144130" + }, + "7E0C360E-BF23-11E3-A209-080027D20F8F": { + "SCN": 5, + "InternalID": "20140408144131121178_3263F7_2", + "Response Parameters": { + "responseText": "\"Administrative update successful\"", + "timeStampAck": "\"20140408T144131.121\"", + "refToMessageID": "\"7E0C360E-BF23-11E3-A209-080027D20F8F\"", + "toASID": "\"230811201324\"", + "fromASID": "\"990101234567\"", + "serviceASID": "\"618729461037\"", + "timeStampSent": "\"20140408144131\"", + "versionCode": "\"V3NPfIT4.2.00\"", + "responseAction": "\"MCCI_IN010000UK13\"", + "messageID": "\"EBA6ED27-F7DD-4940-BAB2-C39EA0737138\"", + "toPartyKey": "\"YEA-0000806\"", + "fromPartyKey": "\"TESTGEN-201324\"", + "prescriptionID": "\"7D9625-Z72BF2-11E3AC\"", + "refToEventID": "\"7E0C360E-BF23-11E3-A209-080027D20F8F\"" + }, + "Timestamp": "20140408144131", + "agentSystemSDS": "230811201324", + "fromStatus": "9000", + "toStatus": "0000", + "Source XSLT": "requestSuccessResponse_MCAI_MT040101UK03.xsl", + "agentPerson": null, + "interactionID": "PORX_IN999000UK01", + "timePreparedForUpdate": "20140408144131" + }, + "7EEDA49A-BF23-11E3-A209-080027D20F8F": { + "SCN": 10, + "InternalID": "20140408144132603745_6BF1FB_2", + "Response Parameters": { + "responseText": "\"Dispense notification successful\"", + "timeStampAck": "\"20140408T144132.603\"", + "refToMessageID": "\"7EEDA49A-BF23-11E3-A209-080027D20F8F\"", + "toASID": "\"230811201324\"", + "fromASID": "\"990101234567\"", + "serviceASID": "\"618729461037\"", + "timeStampSent": "\"20140408144132\"", + "versionCode": "\"V3NPfIT4.2.00\"", + "responseAction": "\"MCCI_IN010000UK13\"", + "messageID": "\"EF7C7D0B-9686-4B25-B143-F780F4E5B6CB\"", + "toPartyKey": "\"YEA-0000806\"", + "fromPartyKey": "\"TESTGEN-201324\"", + "prescriptionID": "\"7D9625-Z72BF2-11E3AC\"", + "refToEventID": "\"7D963530-BF23-11E3-A209-080027D20F8F\"" + }, + "Timestamp": "20140408144132", + "agentSystemSDS": "230811201324", + "fromStatus": "0002", + "toStatus": "0006", + "Source XSLT": "requestSuccessResponse_MCAI_MT040101UK03.xsl", + "agentPerson": null, + "interactionID": "PORX_IN080101UK31", + "timePreparedForUpdate": "20140408144132" + } + }, + "indexes": { + "prescribingSiteStatus_bin": [ + "Z99901_0006", + "Z99901_0009" + ], + "nextActivityNAD_bin": [ + "purge_20160417" + ], + "prescDispDate_bin": [ + "Z99901|F001M|20140408144130|R2|0006", + "Z99901|F001M|20140408144130|R2|0009" + ], + "nhsNumberPrescDispDate_bin": [ + "9990406707|Z99901|F001M|20140408144130|R2|0006", + "9990406707|Z99901|F001M|20140408144130|R2|0009" + ], + "dispensingSiteStatus_bin": [ + "F001M_0009", + "F001M_0006" + ], + "nomPharmStatus_bin": [ + "F001M_0006", + "F001M_0009" + ], + "prescriberDate_bin": [ + "Z99901|20140408144130|R2|0006", + "Z99901|20140408144130|R2|0009" + ], + "nhsNumber_bin": [ + "9990406707" + ], + "delta_bin": [ + "20140408144132|9" + ], + "nhsNumberDate_bin": [ + "9990406707|20140408144130|R2|0006", + "9990406707|20140408144130|R2|0009" + ], + "nhsNumberPrescriberDate_bin": [ + "9990406707|Z99901|20140408144130|R2|0006", + "9990406707|Z99901|20140408144130|R2|0009" + ], + "dispenserDate_bin": [ + "F001M|20140408144130|R2|0006", + "F001M|20140408144130|R2|0009" + ], + "nhsNumberDispenserDate_bin": [ + "9990406707|F001M|20140408144130|R2|0006", + "9990406707|F001M|20140408144130|R2|0009" + ] + }, + "instances": { + "1": { + "completionDate": "20140408", + "claim": { + "historicClaimGUIDs": false, + "claimStatus": false, + "historicDispenseClaimMsgRef": false, + "claimGUID": false, + "batchID": false, + "batchNumber": false, + "dispenseClaimMsgRef": false, + "claimRebuild": false, + "claimReceivedDate": "20140408" + }, + "nominatedDownloadDate": false, + "previousStatus": "0006", + "lastDispenseStatus": "0006", + "dispense": { + "lastDispenseNotificationMsgRef": "20140408144130854644_0CC025_2", + "dispensingOrganization": "F001M", + "lastDispenseDate": "20130930", + "lastDispenseNotificationGuid": "7D96291E-BF23-11E3-A209-080027D20F8F" + }, + "nextActivity": { + "date": "20160417", + "activity": "purge" + }, + "cancellations": [ ], + "previousIssueDate": false, + "dispenseHistory": { + "release": { + "completionDate": false, + "dispense": { + "lastDispenseNotificationMsgRef": false, + "lastDispenseNotificationGuid": false, + "lastDispenseDate": "20140408", + "dispensingOrganization": "F001M" + }, + "lineItems": [ + { + "status": "0008", + "maxRepeats": "3", + "previousStatus": false, + "ID": "02ED7776-21CD-4E7B-AC9D-D1DBFEE7B8CF", + "order": 1 + }, + { + "status": "0008", + "maxRepeats": "3", + "previousStatus": false, + "ID": "45D5FB11-D793-4D51-9ADD-95E0F54D2786", + "order": 2 + } + ], + "prescriptionStatus": "0001", + "lastDispenseStatus": false + }, + "7D96291E-BF23-11E3-A209-080027D20F8F": { + "completionDate": false, + "dispense": { + "lastDispenseNotificationMsgRef": false, + "lastDispenseNotificationGuid": false, + "lastDispenseDate": "20140408", + "dispensingOrganization": "F001M" + }, + "lineItems": [ + { + "status": "0008", + "maxRepeats": "3", + "previousStatus": "0007", + "ID": "02ED7776-21CD-4E7B-AC9D-D1DBFEE7B8CF", + "order": 1 + }, + { + "status": "0008", + "maxRepeats": "3", + "previousStatus": "0007", + "ID": "45D5FB11-D793-4D51-9ADD-95E0F54D2786", + "order": 2 + } + ], + "prescriptionStatus": "0002", + "lastDispenseStatus": false + } + }, + "instanceNumber": "1", + "dispenseHistoryprescriptionStatus": false, + "releaseDate": "20140408", + "dispenseWindowHighDate": "20140418", + "dispenseWindowLowDate": "20140408", + "lineItems": [ + { + "status": "0001", + "maxRepeats": "3", + "previousStatus": "0008", + "ID": "02ED7776-21CD-4E7B-AC9D-D1DBFEE7B8CF", + "order": 1 + }, + { + "status": "0001", + "maxRepeats": "3", + "previousStatus": "0008", + "ID": "45D5FB11-D793-4D51-9ADD-95E0F54D2786", + "order": 2 + } + ], + "releaseRequestMsgRef": "20140408144130582188_F41F3F_2", + "expiryDate": null, + "prescriptionStatus": "0009" + }, + "2": { + "completionDate": "20140408", + "claim": { + "historicClaimGUIDs": false, + "claimStatus": false, + "historicDispenseClaimMsgRef": false, + "claimGUID": false, + "batchID": false, + "batchNumber": false, + "dispenseClaimMsgRef": false, + "claimRebuild": false, + "claimReceivedDate": false + }, + "nominatedDownloadDate": "20140501", + "previousStatus": "0002", + "lastDispenseStatus": "0006", + "dispense": { + "lastDispenseNotificationMsgRef": "20140408144132021702_28EA27_2", + "dispensingOrganization": "F001M", + "lastDispenseDate": "20140408", + "lastDispenseNotificationGuid": "7D962AEA-BF23-11E3-A209-080027D20F8F" + }, + "nextActivity": { + "date": "20141005", + "activity": "createNoClaim" + }, + "cancellations": [ ], + "previousIssueDate": "20140408144130", + "dispenseHistory": { + "release": { + "completionDate": false, + "dispense": { + "lastDispenseNotificationMsgRef": false, + "lastDispenseNotificationGuid": false, + "lastDispenseDate": "20140408", + "dispensingOrganization": "F001M" + }, + "lineItems": [ + { + "status": "0008", + "maxRepeats": "3", + "previousStatus": false, + "ID": "02ED7776-21CD-4E7B-AC9D-D1DBFEE7B8CF", + "order": 1 + }, + { + "status": "0008", + "maxRepeats": "3", + "previousStatus": false, + "ID": "45D5FB11-D793-4D51-9ADD-95E0F54D2786", + "order": 2 + } + ], + "prescriptionStatus": "0000", + "lastDispenseStatus": false + }, + "7D962AEA-BF23-11E3-A209-080027D20F8F": { + "completionDate": false, + "dispense": { + "lastDispenseNotificationMsgRef": false, + "lastDispenseNotificationGuid": false, + "lastDispenseDate": "20140408", + "dispensingOrganization": "F001M" + }, + "lineItems": [ + { + "status": "0008", + "maxRepeats": "3", + "previousStatus": "0007", + "ID": "02ED7776-21CD-4E7B-AC9D-D1DBFEE7B8CF", + "order": 1 + }, + { + "status": "0008", + "maxRepeats": "3", + "previousStatus": "0007", + "ID": "45D5FB11-D793-4D51-9ADD-95E0F54D2786", + "order": 2 + } + ], + "prescriptionStatus": "0002", + "lastDispenseStatus": false + } + }, + "instanceNumber": "2", + "dispenseHistoryprescriptionStatus": false, + "releaseDate": "20140408", + "dispenseWindowHighDate": "20140418", + "dispenseWindowLowDate": "20140408", + "lineItems": [ + { + "status": "0001", + "maxRepeats": "3", + "previousStatus": "0008", + "ID": "02ED7776-21CD-4E7B-AC9D-D1DBFEE7B8CF", + "order": 1 + }, + { + "status": "0001", + "maxRepeats": "3", + "previousStatus": "0008", + "ID": "45D5FB11-D793-4D51-9ADD-95E0F54D2786", + "order": 2 + } + ], + "releaseRequestMsgRef": "20140408144131797294_419E3E_2", + "expiryDate": null, + "prescriptionStatus": "0006" + }, + "3": { + "completionDate": "20140408", + "claim": { + "historicClaimGUIDs": false, + "claimStatus": false, + "historicDispenseClaimMsgRef": false, + "claimGUID": false, + "batchID": false, + "batchNumber": false, + "dispenseClaimMsgRef": false, + "claimRebuild": false, + "claimReceivedDate": false + }, + "nominatedDownloadDate": "20140501", + "previousStatus": "0002", + "lastDispenseStatus": "0006", + "dispense": { + "lastDispenseNotificationMsgRef": "20140408144132603745_6BF1FB_2", + "dispensingOrganization": "F001M", + "lastDispenseDate": "20140408", + "lastDispenseNotificationGuid": "7D963530-BF23-11E3-A209-080027D20F8F" + }, + "nextActivity": { + "date": "20141005", + "activity": "createNoClaim" + }, + "cancellations": [ ], + "previousIssueDate": "20140408144130", + "dispenseHistory": { + "release": { + "completionDate": false, + "dispense": { + "lastDispenseNotificationMsgRef": false, + "lastDispenseNotificationGuid": false, + "lastDispenseDate": "20140408", + "dispensingOrganization": "F001M" + }, + "lineItems": [ + { + "status": "0008", + "maxRepeats": "3", + "previousStatus": false, + "ID": "02ED7776-21CD-4E7B-AC9D-D1DBFEE7B8CF", + "order": 1 + }, + { + "status": "0008", + "maxRepeats": "3", + "previousStatus": false, + "ID": "45D5FB11-D793-4D51-9ADD-95E0F54D2786", + "order": 2 + } + ], + "prescriptionStatus": "0000", + "lastDispenseStatus": false + }, + "7D963530-BF23-11E3-A209-080027D20F8F": { + "completionDate": false, + "dispense": { + "lastDispenseNotificationMsgRef": false, + "lastDispenseNotificationGuid": false, + "lastDispenseDate": "20140408", + "dispensingOrganization": "F001M" + }, + "lineItems": [ + { + "status": "0008", + "maxRepeats": "3", + "previousStatus": "0007", + "ID": "02ED7776-21CD-4E7B-AC9D-D1DBFEE7B8CF", + "order": 1 + }, + { + "status": "0008", + "maxRepeats": "3", + "previousStatus": "0007", + "ID": "45D5FB11-D793-4D51-9ADD-95E0F54D2786", + "order": 2 + } + ], + "prescriptionStatus": "0002", + "lastDispenseStatus": false + } + }, + "instanceNumber": "3", + "dispenseHistoryprescriptionStatus": false, + "releaseDate": "20140408", + "dispenseWindowHighDate": "20140418", + "dispenseWindowLowDate": "20140408", + "lineItems": [ + { + "status": "0001", + "maxRepeats": "3", + "previousStatus": "0008", + "ID": "02ED7776-21CD-4E7B-AC9D-D1DBFEE7B8CF", + "order": 1 + }, + { + "status": "0001", + "maxRepeats": "3", + "previousStatus": "0008", + "ID": "45D5FB11-D793-4D51-9ADD-95E0F54D2786", + "order": 2 + } + ], + "releaseRequestMsgRef": "20140408144132365276_126328_2", + "expiryDate": null, + "prescriptionStatus": "0006" + } + }, + "SCN": 10, + "prescription": { + "prescriptionType": "0001", + "SCN": false, + "prescriptionTreatmentType": "0003", + "signedTime": "20140408144130", + "prescriptionMsgRef": "20140408144130355815_6BB2F0_2", + "prescribingOrganization": "Z99901", + "prescriptionTime": "20140408144130", + "pendingCancellations": false, + "unsuccessfulCancellations": false, + "hl7": { + "eventID": "7D96FB00-BF23-11E3-A209-080027D20F8F", + "agentPersonSDSRole": null, + "wsaMessageID": "", + "agentSystemSDS1CodeSystem": "1.2.826.0.1285.0.2.0.107", + "fromASID": "230811201324", + "toASID": "990101234567", + "agentSystemSDS2": null, + "agentSystemSDS1": "230811201324", + "agentPersonSDSPerson": null, + "messageID": "7D96F876-BF23-11E3-A209-080027D20F8F", + "interactionID": "PORX_IN020101UK31", + "agentPersonSDSTargetRole": null, + "agentPersonSDSCodeSystem": null, + "messageVersion": "V3NPfIT4.2.00", + "agentPersonOrgCode": null + }, + "prescriptionPresent": true, + "prescriptionID": "7D9625-Z72BF2-11E3AC", + "maxRepeats": 3, + "currentInstance": "3", + "daysSupply": 28 + }, + "nomination": { + "nominatedPerformerType": "P1", + "nominatedPerformer": "F001M", + "nominated": true, + "nominationHistory": [ ] + } + +} diff --git a/tests/common/prescription/resources/DD0180-ZBED5C-11E3A.json b/tests/common/prescription/resources/DD0180-ZBED5C-11E3A.json new file mode 100644 index 0000000..5acbcb9 --- /dev/null +++ b/tests/common/prescription/resources/DD0180-ZBED5C-11E3A.json @@ -0,0 +1,278 @@ +{ + "documents": [ + "20140507101333516493_E240C8_2" + ], + "patient": { + "lowerAgeLimit": false, + "birthTime": "19800420", + "nhsNumber": "9990406707", + "higherAgeLimit": false + }, + "changeLog": { + "DD282232-D5C7-11E3-A026-080027786234": { + "SCN": 3, + "InternalID": "20140507101333731886_5A2123_2", + "Response Parameters": { + "responseText": "\"Administrative update successful\"", + "timeStampAck": "\"20140507T101333.731\"", + "refToMessageID": "\"DD282232-D5C7-11E3-A026-080027786234\"", + "toASID": "\"230811201324\"", + "fromASID": "\"990101234567\"", + "serviceASID": "\"618729461037\"", + "timeStampSent": "\"20140507101333\"", + "versionCode": "\"V3NPfIT4.2.00\"", + "responseAction": "\"MCCI_IN010000UK13\"", + "messageID": "\"025BDFE4-D025-4E68-98EB-7C8A03D6E5DF\"", + "toPartyKey": "\"YEA-0000806\"", + "fromPartyKey": "\"TESTGEN-201324\"", + "prescriptionID": "\"DD0180-ZBED5C-11E3AV\"", + "refToEventID": "\"DD282232-D5C7-11E3-A026-080027786234\"" + }, + "Timestamp": "20140507101333", + "agentSystemSDS": "230811201324", + "fromStatus": "0001", + "toStatus": "0000", + "Source XSLT": "requestSuccessResponse_MCAI_MT040101UK03.xsl", + "agentPerson": null, + "interactionID": "PORX_IN999000UK01", + "timePreparedForUpdate": "20140507101333" + }, + "B4A3BBF4-2E5A-47F7-BAE5-9C4A000427CA": { + "SCN": 4, + "InternalID": "20140507101334085165_1E0E39_BATCH", + "Response Parameters": { + "responseText": "\"Updated by Urgent Admin Batch worker\"", + "timeStampAck": "\"20140507T101334.085\"", + "refToMessageID": "\"B4A3BBF4-2E5A-47F7-BAE5-9C4A000427CA\"", + "toASID": "\"SpineInternalSource\"", + "fromASID": "\"338068513039\"", + "serviceASID": "\"618729461037\"", + "timeStampSent": "\"20140507101334\"", + "messageID": "\"9FD1C198-3FD9-4378-AA41-9FCDBE8EAF6B\"", + "responseAction": "\"MCCI_IN010000UK13\"", + "toPartyKey": "\"None\"", + "fromPartyKey": "\"None\"", + "prescriptionID": "\"DD0180-ZBED5C-11E3AV\"", + "refToEventID": "\"B4A3BBF4-2E5A-47F7-BAE5-9C4A000427CA\"" + }, + "Timestamp": "20140507101334", + "agentSystemSDS": "SpineInternalSource", + "fromStatus": "0000", + "toStatus": "0001", + "Source XSLT": "requestSuccessResponse_MCAI_MT040101UK03.xsl", + "agentPerson": null, + "interactionID": "PORX_IN999001UK01", + "timePreparedForUpdate": "20140507101334" + }, + "DD062588-D5C7-11E3-A026-080027786234": { + "SCN": 2, + "InternalID": "20140507101333516493_E240C8_2", + "Response Parameters": { + "responseText": "\"Prescription upload successful\"", + "timeStampAck": "\"20140507T101333.516\"", + "refToMessageID": "\"DD062588-D5C7-11E3-A026-080027786234\"", + "toASID": "\"230811201324\"", + "fromASID": "\"990101234567\"", + "serviceASID": "\"618729461037\"", + "timeStampSent": "\"20140507101333\"", + "versionCode": "\"V3NPfIT4.2.00\"", + "responseAction": "\"MCCI_IN010000UK13\"", + "messageID": "\"40FB7E41-01FF-46CD-94CE-3A4F6F04239F\"", + "toPartyKey": "\"YEA-0000806\"", + "fromPartyKey": "\"TESTGEN-201324\"", + "prescriptionID": "\"DD0180-ZBED5C-11E3AV\"", + "refToEventID": "\"DD06298E-D5C7-11E3-A026-080027786234\"" + }, + "Timestamp": "20140507101333", + "agentSystemSDS": "230811201324", + "fromStatus": false, + "toStatus": "0001", + "Source XSLT": "requestSuccessResponse_MCAI_MT040101UK03.xsl", + "agentPerson": null, + "interactionID": "PORX_IN020101UK31", + "timePreparedForUpdate": "20140507101333" + } + }, + "indexes": { + "prescribingSiteStatus_bin": [ + "Z99901_0001", + "Z99901_9000" + ], + "nextActivityNAD_bin": [ + "expire_20150507" + ], + "delta_bin": [ + "20140507101334|3" + ], + "dispensingSiteStatus_bin": [], + "nomPharmStatus_bin": [ + "F001M_0001", + "F001M_9000" + ], + "prescriberDate_bin": [ + "Z99901|20140507101333|R2|0001", + "Z99901|20140507101333|R2|9000" + ], + "nhsNumber_bin": [ + "9990406707" + ], + "nhsNumberDate_bin": [ + "9990406707|20140507101333|R2|0001", + "9990406707|20140507101333|R2|9000" + ], + "nhsNumberPrescriberDate_bin": [ + "9990406707|Z99901|20140507101333|R2|0001", + "9990406707|Z99901|20140507101333|R2|9000" + ] + }, + "instances": { + "1": { + "completionDate": false, + "claim": { + "historicClaimGUIDs": false, + "claimStatus": false, + "historicDispenseClaimMsgRef": false, + "claimGUID": false, + "batchID": false, + "batchNumber": false, + "dispenseClaimMsgRef": false, + "claimRebuild": false, + "claimReceivedDate": false + }, + "nominatedDownloadDate": "20140507", + "previousStatus": "0000", + "lastDispenseStatus": false, + "dispense": { + "lastDispenseNotificationMsgRef": false, + "dispensingOrganization": false, + "lastDispenseDate": false, + "lastDispenseNotificationGuid": false + }, + "nextActivity": { + "date": "20150507", + "activity": "expire" + }, + "cancellations": [], + "previousIssueDate": false, + "dispenseHistory": {}, + "instanceNumber": "1", + "dispenseHistoryprescriptionStatus": false, + "releaseDate": false, + "dispenseWindowHighDate": "20140517", + "dispenseWindowLowDate": "20140507", + "lineItems": [ + { + "status": "0007", + "maxRepeats": "2", + "previousStatus": false, + "ID": "02ED7776-21CD-4E7B-AC9D-D1DBFEE7B8CF", + "order": 1 + }, + { + "status": "0007", + "maxRepeats": "2", + "previousStatus": false, + "ID": "45D5FB11-D793-4D51-9ADD-95E0F54D2786", + "order": 2 + } + ], + "releaseRequestMsgRef": false, + "expiryDate": "20150507", + "prescriptionStatus": "0001" + }, + "2": { + "completionDate": false, + "claim": { + "historicClaimGUIDs": false, + "claimStatus": false, + "historicDispenseClaimMsgRef": false, + "claimGUID": false, + "batchID": false, + "batchNumber": false, + "dispenseClaimMsgRef": false, + "claimRebuild": false, + "claimReceivedDate": false + }, + "nominatedDownloadDate": false, + "previousStatus": false, + "lastDispenseStatus": false, + "dispense": { + "lastDispenseNotificationMsgRef": false, + "dispensingOrganization": false, + "lastDispenseDate": false, + "lastDispenseNotificationGuid": false + }, + "nextActivity": { + "date": "20150507", + "activity": "expire" + }, + "cancellations": [], + "previousIssueDate": false, + "dispenseHistory": {}, + "instanceNumber": "2", + "dispenseHistoryprescriptionStatus": false, + "releaseDate": false, + "dispenseWindowHighDate": "20140517", + "dispenseWindowLowDate": "20140507", + "lineItems": [ + { + "status": "0007", + "maxRepeats": "2", + "previousStatus": false, + "ID": "02ED7776-21CD-4E7B-AC9D-D1DBFEE7B8CF", + "order": 1 + }, + { + "status": "0007", + "maxRepeats": "2", + "previousStatus": false, + "ID": "45D5FB11-D793-4D51-9ADD-95E0F54D2786", + "order": 2 + } + ], + "releaseRequestMsgRef": false, + "expiryDate": "20150507", + "prescriptionStatus": "9000" + } + }, + "SCN": 4, + "prescription": { + "prescriptionType": "0001", + "SCN": false, + "prescriptionTreatmentType": "0003", + "signedTime": "20140507101333", + "prescriptionMsgRef": "20140507101333516493_E240C8_2", + "prescribingOrganization": "Z99901", + "prescriptionTime": "20140507101333", + "pendingCancellations": false, + "unsuccessfulCancellations": false, + "hl7": { + "eventID": "DD06298E-D5C7-11E3-A026-080027786234", + "agentPersonSDSRole": null, + "wsaMessageID": "", + "agentSystemSDS1CodeSystem": "1.2.826.0.1285.0.2.0.107", + "fromASID": "230811201324", + "toASID": "990101234567", + "agentSystemSDS2": null, + "agentSystemSDS1": "230811201324", + "agentPersonSDSPerson": null, + "messageID": "DD062588-D5C7-11E3-A026-080027786234", + "interactionID": "PORX_IN020101UK31", + "agentPersonSDSTargetRole": null, + "agentPersonSDSCodeSystem": null, + "messageVersion": "V3NPfIT4.2.00", + "agentPersonOrgCode": null + }, + "prescriptionPresent": true, + "prescriptionID": "DD0180-ZBED5C-11E3AV", + "maxRepeats": 2, + "currentInstance": "1", + "daysSupply": 28 + }, + "nomination": { + "nominatedPerformerType": "P1", + "nominatedPerformer": "F001M", + "nominated": true, + "nominationHistory": [] + } +} diff --git a/tests/common/prescription/return_changed_issue_list_test.py b/tests/common/prescription/return_changed_issue_list_test.py new file mode 100644 index 0000000..f9015fa --- /dev/null +++ b/tests/common/prescription/return_changed_issue_list_test.py @@ -0,0 +1,106 @@ +from unittest.case import TestCase +from unittest.mock import Mock + +from eps_spine_shared.common.prescription.repeat_dispense import RepeatDispenseRecord + + +class ReturnChangedIssueListTest(TestCase): + """ + Returns the list of changed issues. + """ + + def setUp(self): + """ + Set up all valid values - tests will overwrite these where required. + """ + + mock = Mock() + attrs = {"writeLog.return_value": None} + mock.configure_mock(**attrs) + log_object = mock + internal_id = "test" + + self.mock_record = RepeatDispenseRecord(log_object, internal_id) + self.pre_change_dict = { + "issue1": {"lineItems": {"1": "0001", "2": "0001"}, "prescription": "0006"}, + "issue2": {"lineItems": {"1": "0008", "2": "0008"}, "prescription": "0002"}, + "issue3": {"lineItems": {"1": "0007", "2": "0007"}, "prescription": "9000"}, + } + self.post_change_dict = { + "issue1": {"lineItems": {"1": "0001", "2": "0001"}, "prescription": "0006"}, + "issue2": {"lineItems": {"1": "0008", "2": "0008"}, "prescription": "0002"}, + "issue3": {"lineItems": {"1": "0007", "2": "0007"}, "prescription": "9000"}, + } + self.max_repeats = 3 + self.expected_result = None + + def run_return_changed_issue_list_test(self): + """ + Execute the test + """ + result_set = self.mock_record.return_changed_issue_list( + self.pre_change_dict, self.post_change_dict, self.max_repeats + ) + self.assertEqual(result_set, self.expected_result) + + def test_identical_dicts(self): + """ + No difference in content + """ + self.expected_result = [] + self.run_return_changed_issue_list_test() + + def test_identical_dicts_out_of_order(self): + """ + Out of order elements, but key:value pairs unchanged + """ + self.post_change_dict = { + "issue1": {"lineItems": {"1": "0001", "2": "0001"}, "prescription": "0006"}, + "issue3": {"prescription": "9000", "lineItems": {"2": "0007", "1": "0007"}}, + "issue2": {"lineItems": {"2": "0008", "1": "0008"}, "prescription": "0002"}, + } + self.expected_result = [] + self.run_return_changed_issue_list_test() + + def test_missing_issue_from_pre_change_dict(self): + """ + Issue missing from pre change dict + """ + del self.pre_change_dict["issue2"] + self.expected_result = ["2"] + self.run_return_changed_issue_list_test() + + def test_missing_issue_from_post_change_dict(self): + """ + Issue missing from pre change dict + """ + del self.post_change_dict["issue2"] + self.expected_result = ["2"] + self.run_return_changed_issue_list_test() + + def test_single_item_status_change(self): + """ + Test that a single line item difference is identified + """ + self.post_change_dict["issue1"]["lineItems"]["1"] = "0002" + self.expected_result = ["1"] + self.run_return_changed_issue_list_test() + + def test_single_prescription_status_change(self): + """ + Test that a single prescription difference is identified + """ + self.post_change_dict["issue1"]["prescription"] = "0007" + self.expected_result = ["1"] + self.run_return_changed_issue_list_test() + + def test_multiple_combination_status_change(self): + """ + Test that a multiple line item and prescription differences are identified + """ + self.post_change_dict["issue1"]["lineItems"]["1"] = "0002" + self.post_change_dict["issue1"]["lineItems"]["2"] = "0003" + self.post_change_dict["issue3"]["prescription"] = "0006" + self.post_change_dict["issue3"]["prescription"] = "0007" + self.expected_result = ["1", "3"] + self.run_return_changed_issue_list_test() diff --git a/tests/dynamodb_test.py b/tests/dynamodb_test.py new file mode 100644 index 0000000..b12a679 --- /dev/null +++ b/tests/dynamodb_test.py @@ -0,0 +1,277 @@ +import base64 +import os +import random +import string +import zlib +from unittest import TestCase +from uuid import uuid4 + +import boto3 +import simplejson +from moto import mock_aws + +from eps_spine_shared.common.dynamodb_common import SortKey +from eps_spine_shared.common.dynamodb_datastore import EpsDynamoDbDataStore +from eps_spine_shared.common.prescription.record import PrescriptionStatus +from tests.mock_logger import MockLogObject + +PRESC_ORG = "X26" +DISP_ORG = "X27" +NOM_ORG = "X28" +CREATION_TIME = "20230911101112" + + +def set_aws_credentials(): + """Mocked AWS Credentials for moto.""" + os.environ["AWS_ACCESS_KEY_ID"] = "testing" + os.environ["AWS_SECRET_ACCESS_KEY"] = "testing" + os.environ["AWS_SECURITY_TOKEN"] = "testing" + os.environ["AWS_SESSION_TOKEN"] = "testing" + os.environ["AWS_DEFAULT_REGION"] = "eu-west-2" + + +def create_gsi(name: str, hash_key: str, range_key: str = None, projection_attributes: list = None): + """ + Create a GSI definition for table creation. + """ + gsi = { + "IndexName": name, + "KeySchema": [ + {"AttributeName": hash_key, "KeyType": "HASH"}, + ], + } + + if range_key: + gsi["KeySchema"].append( + {"AttributeName": range_key, "KeyType": "RANGE"}, + ) + + projection = ( + {"Projection": {"ProjectionType": "INCLUDE", "NonKeyAttributes": projection_attributes}} + if projection_attributes + else {"Projection": {"ProjectionType": "KEYS_ONLY"}} + ) + gsi.update(projection) + + return gsi + + +def create_dynamodb_table(): + """ + Create the DynamoDB table with all required indexes. + """ + dynamodb = boto3.client("dynamodb", region_name="eu-west-2") + + dynamodb.create_table( + TableName="spine-eps-datastore", + KeySchema=[ + {"AttributeName": name, "KeyType": key_type} + for name, key_type in [("pk", "HASH"), ("sk", "RANGE")] + ], + AttributeDefinitions=[ + {"AttributeName": name, "AttributeType": "S"} + for name in [ + "pk", + "sk", + "batchClaimId", + "claimNotificationStoreDate", + "creationDatetime", + "dispenserOrg", + "nextActivity", + "nextActivityDate", + "nhsNumber", + "nominatedPharmacy", + "prescriberOrg", + "storeTime", + "_lm_day", + ] + ] + + [ + {"AttributeName": name, "AttributeType": "N"} + for name in [ + "isReady", + "sequenceNumber", + "sequenceNumberNwssp", + "_riak_lm", + ] + ], + GlobalSecondaryIndexes=[ + create_gsi( + "nhsNumberDate", + "nhsNumber", + "creationDatetime", + ["indexes", "prescriberOrg", "dispenserOrg"], + ), + create_gsi( + "prescriberDate", + "prescriberOrg", + "creationDatetime", + ["indexes", "dispenserOrg"], + ), + create_gsi( + "dispenserDate", + "dispenserOrg", + "creationDatetime", + ["indexes"], + ), + create_gsi( + "nominatedPharmacyStatus", + "nominatedPharmacy", + "isReady", + ["status", "indexes"], + ), + create_gsi( + "claimId", + "sk", + "batchClaimId", + ["claimIds"], + ), + create_gsi( + "nextActivityDate", + "nextActivity", + "nextActivityDate", + ), + create_gsi( + "claimIdSequenceNumber", + "sequenceNumber", + ), + create_gsi( + "claimIdSequenceNumberNwssp", + "sequenceNumberNwssp", + ), + create_gsi( + "lastModified", + "_lm_day", + "_riak_lm", + ), + create_gsi( + "claimNotificationStoreTime", + "claimNotificationStoreDate", + "storeTime", + ), + ], + BillingMode="PAY_PER_REQUEST", + ) + + dynamodb.update_time_to_live( + TableName="spine-eps-datastore", + TimeToLiveSpecification={"Enabled": True, "AttributeName": "expireAt"}, + ) + + +class DynamoDbTest(TestCase): + """ + Parent class for DynamoDB tests. + """ + + def setUp(self) -> None: + """ + Instantiate class to be tested. + """ + set_aws_credentials() + self.mock_aws = mock_aws() + self.mock_aws.start() + + create_dynamodb_table() + + self.logger: MockLogObject = MockLogObject() + + self.datastore: EpsDynamoDbDataStore = EpsDynamoDbDataStore( + self.logger, None, "spine-eps-datastore" + ) + self.keys = [] + self.internal_id = str(uuid4()) + + def tearDown(self) -> None: + """ + Stop moto mocking and clean up resources. + """ + self.mock_aws.stop() + + def generate_prescription_id(self): + """ + Create a random id with the format of a prescription id. + """ + parts = [random.choices(string.ascii_uppercase + string.digits, k=6) for _ in range(3)] + return "-".join(["".join(part) for part in parts]) + + def generate_document_key(self): + """ + Create a placeholder document key and queue it for cleanup + """ + document_key = str(uuid4()) + self.keys.append((document_key, SortKey.DOCUMENT.value)) + return document_key + + def generate_record_key(self): + """ + Returns a prescription id excluding the check digit + """ + return self.generate_prescription_id()[:-1] + + def generate_nhs_number(self): + """ + Create a random number in the range of test NHS numbers and return it as a string. + """ + return str(random.randrange(9000000000, 9999999999)) + + def get_new_record_keys(self, prescription_id=None): + """ + Gives unique primary/secondary keys to use on a record item. + Adds to the list of keys to be deleted in tearDown. + """ + record_key = prescription_id[:19] if prescription_id else self.generate_record_key() + nhs_number = self.generate_nhs_number() + + self.keys.append((record_key, SortKey.RECORD.value)) + return record_key, nhs_number + + def get_record(self, nhs_number, creation_time=CREATION_TIME): + return { + "patient": {"nhsNumber": nhs_number}, + "prescription": { + "prescriptionTime": creation_time, + "daysSupply": 28, + "prescribingOrganization": PRESC_ORG, + }, + "instances": { + "1": { + "prescriptionStatus": PrescriptionStatus.TO_BE_DISPENSED, + "dispense": {"dispensingOrganization": DISP_ORG}, + } + }, + "indexes": { + "nextActivityNAD_bin": ["createNoClaim_20250104"], + "nhsNumberDate_bin": [ + f"{nhs_number}|{creation_time}|R2|{PrescriptionStatus.TO_BE_DISPENSED}" + ], + "nhsNumber_bin": [nhs_number], + "nhsNumberPrescDispDate_bin": [ + f"{nhs_number}|{PRESC_ORG}|{DISP_ORG}|{creation_time}|R2|{PrescriptionStatus.TO_BE_DISPENSED}" + ], + "nhsNumberPrescriberDate_bin": [ + f"{nhs_number}|{PRESC_ORG}|{creation_time}|R2|{PrescriptionStatus.TO_BE_DISPENSED}" + ], + "nhsNumberDispenserDate_bin": [ + f"{nhs_number}|{DISP_ORG}|{creation_time}|R2|{PrescriptionStatus.TO_BE_DISPENSED}" + ], + "prescDispDate_bin": [ + f"{PRESC_ORG}|{DISP_ORG}|{creation_time}|R2|{PrescriptionStatus.TO_BE_DISPENSED}" + ], + "prescriberDate_bin": [ + f"{PRESC_ORG}|{creation_time}|R2|{PrescriptionStatus.TO_BE_DISPENSED}" + ], + "dispenserDate_bin": [ + f"{DISP_ORG}|{creation_time}|R2|{PrescriptionStatus.TO_BE_DISPENSED}" + ], + }, + "SCN": 1, + } + + def get_document_content(self, content={"a": 1, "b": True}): # noqa: B006 + """ + Gets base64 encoded compressed string of document content. + """ + return base64.b64encode(zlib.compress(simplejson.dumps(content).encode("utf-8"))).decode( + "utf-8" + ) diff --git a/tests/mock_logger.py b/tests/mock_logger.py new file mode 100644 index 0000000..d018d5e --- /dev/null +++ b/tests/mock_logger.py @@ -0,0 +1,179 @@ +from typing import Dict + + +class MockLogObject(object): + """ + Mock log object + """ + + def __init__(self, severity_threshold="INFO"): + self.__expectations = set([]) + self._called_references = [] + self._log_records = [] + self._severity_threshold = severity_threshold + self.logged_messages = [] + + def write_log( + self, + log_reference="UTI9999", + error_list=None, + log_row_dict=None, + severity_threshold_override=None, + process_name=None, + ): + """ + Dummy write log just keeps a list of the logReferences + """ + log_row_dict = dict(log_row_dict) if log_row_dict else {} + self.logged_messages.append((log_reference, log_row_dict)) + log_record = { + "logReference": log_reference, + "errorList": error_list, + "logRowDict": log_row_dict, + "severityThresholdOverride": severity_threshold_override, + "processName": process_name, + } + self._log_records.append(log_record) + + self._called_references.append(log_reference) + + if log_reference in self.__expectations: + self.__expectations.remove(log_reference) + + # Alias for Spine compatibility + writeLog = write_log + + def was_logged(self, log_reference): + """ + Was a particular log reference logged? + """ + return log_reference in self._called_references + + def was_value_logged(self, log_reference, key, expected_value): + """ + Was a particular log key supplied as expected + """ + for log_record in self.log_records: + if log_record["logReference"] == log_reference: + # Deliberately done like this so that if there are multiple logReferences that are the same with + # different value. + match = expected_value == log_record["logRowDict"][key] + if match: + return True + return False + + def was_value_not_logged(self, log_reference, key, expected_value): + """ + Was a particular log key not supplied as expected + """ + for log_record in self.log_records: + if log_record["logReference"] == log_reference: + # Deliberately done like this so that if there are multiple logReferences that are the same with + # different value. + match = expected_value == log_record["logRowDict"][key] + if match: + return False + return True + + def logged_value_occurrences(self, log_reference, key, expected_value): + """ + Return the number of occurrences of a particular key and value + """ + occurrences = 0 + for log_record in self.log_records: + if log_record["logReference"] == log_reference: + # Deliberately done like this so that if there are multiple logReferences that are the same with + # different value. + match = expected_value == log_record["logRowDict"][key] + if match: + occurrences += 1 + return occurrences + + def was_multiple_value_logged(self, log_reference: str, key_values: Dict): + """ + Was a particular log key supplied as expected with the expected values + """ + for log_record in self.log_records: + found_count = 0 + if log_record["logReference"] == log_reference: + for key in key_values: + if log_record["logRowDict"][key] == key_values[key]: + found_count += 1 + if found_count == len(key_values): + return True + return False + + def get_logged_value(self, log_reference, key): + """ + Get logged value for given reference and key + """ + for log_record in self.log_records: + if log_record["logReference"] == log_reference: + return log_record["logRowDict"][key] + + def get_log_occurrences(self, log_reference): + """ + Gets a list of the args that were passed each time a specified message was logged + """ + return [args for reference, args in self.logged_messages if reference == log_reference] + + def log_occurrence_count(self, log_reference): + """ + Returns the number of times a logReference was logged. + """ + return len(self.get_log_occurrences(log_reference)) + + def add_expected_reference(self, expected_reference): + """ + set the expected reference + """ + self.__expectations.add(expected_reference) + + def expectations_satisfied(self): + """ + has the expected log line been written + """ + return len(self.__expectations) == 0 + + def clear_expectations(self): + """ + clear expectations + """ + self.__expectations = set([]) + + def clear(self): + """ + Clear everything + """ + self.__expectations = set([]) + self._called_references = [] + self._log_records = [] + self.logged_messages = [] + + @property + def called_references(self): + """ + The called references + """ + return self._called_references + + @called_references.setter + def called_references(self, new_called_references): + """ + Setter + """ + self._called_references = new_called_references + + @property + def log_records(self): + """ + The logRecords + """ + return self._log_records + + @property + def severity_threshold(self): + """ + The severity threshold (ref logging.py _SEVERITY_INPUT_MAP) + """ + return self._severity_threshold diff --git a/tests/nhsfundamentals/timeutilities_test.py b/tests/nhsfundamentals/timeutilities_test.py new file mode 100644 index 0000000..07c0380 --- /dev/null +++ b/tests/nhsfundamentals/timeutilities_test.py @@ -0,0 +1,164 @@ +from datetime import datetime +from unittest import mock +from unittest.case import TestCase + +from parameterized.parameterized import parameterized + +from eps_spine_shared.nhsfundamentals.timeutilities import ( + TimeFormats, + _guessCommonDateTimeFormat, + convertSpineDate, + timeNowAsString, +) + + +class TimeUtilitiesTests(TestCase): + """ + Time Utility Testing + """ + + @parameterized.expand( + [ + ("gmt_end", "2021-03-28 01:59:59", "20210328015959"), + ("bst_start", "2021-03-28 02:00:00", "20210328020000"), + ("bst_end", "2021-10-31 01:59:59", "20211031015959"), + ("gmt_start", "2021-10-31 02:00:00", "20211031020000"), + ] + ) + def testTimeNowAsString(self, _, utcNow, expected): + """ + Check timeNowAsString returns standard spine format by default matching UTC time. + """ + with mock.patch("eps_spine_shared.nhsfundamentals.timeutilities.now") as mockNow: + mockNow.return_value = datetime.strptime(utcNow, "%Y-%m-%d %H:%M:%S") + result = timeNowAsString() + self.assertEqual(expected, result) + + @parameterized.expand( + [ + ("length_4", "2022", TimeFormats.STANDARD_DATE_FORMAT_YEAR_ONLY), + ("length_6", "202201", TimeFormats.STANDARD_DATE_FORMAT_YEAR_MONTH), + ("length_8", "20220113", TimeFormats.STANDARD_DATE_FORMAT), + ("length_12", "202201131234", TimeFormats.DATE_TIME_WITHOUT_SECONDS_FORMAT), + ("length_14", "20220113123456", TimeFormats.STANDARD_DATE_TIME_FORMAT), + ("length_19_EBXML", "2022-01-13T12:34:56", TimeFormats.EBXML_FORMAT), + ( + "length_19_OTHER", + "20220113123456+0000", + TimeFormats.STANDARD_DATE_TIME_UTC_ZONE_FORMAT, + ), + ("length_20", "2022-01-13T12:34:56Z", TimeFormats.SMSP_FORMAT), + ("length_21", "20220113123456.123456", TimeFormats.SPINE_DATETIME_MS_FORMAT), + ("length_22", "20220113T123456.123456", TimeFormats.HL7_DATETIME_FORMAT), + ("length_23", "2022-01-13T12:34:56.123456", TimeFormats.EXTENDED_SMSP_FORMAT), + ("length_24", "2022-01-13T12:34:56.123456Z", TimeFormats.EXTENDED_SMSP_PLUS_Z_FORMAT), + ("other", "202", None), + ] + ) + def testGuessCommonDateTimeFormat_Default(self, _, timeString, expected): + """ + Check time format determined from date time string using default settings + """ + result = _guessCommonDateTimeFormat(timeString) + self.assertEqual(expected, result) + + def testGuessCommonDateTimeFormat_NoneIfUnknown(self): + """ + Check time format determined from date time string specifying to return none if could not be determined + """ + result = _guessCommonDateTimeFormat("202", False) + self.assertIsNone(result) + + def testGuessCommonDateTimeFormat_ErrorIfUnknown_FormatUnknown(self): + """ + Check time format determined from date time string with an unknown format, with raise error true + """ + with self.assertRaises(ValueError): + _ = _guessCommonDateTimeFormat("202", True) + + def testGuessCommonDateTimeFormat_ErrorIfUnknown_FormatKnown(self): + """ + Check time format determined from date time string with a known format, with raise error true + """ + result = _guessCommonDateTimeFormat("2020", True) + self.assertEqual(TimeFormats.STANDARD_DATE_FORMAT_YEAR_ONLY, result) + + +class DateFormatTest(TestCase): + """ + There is a safety method called convertSpineDate which will convert a date string if + there is doubt over the actual format being used + """ + + def _formatTester(self, dateFormat, withFormat=False): + """ + Test the format of a date + """ + _now = datetime.now() + _nowAsString = _now.strftime(dateFormat) + if withFormat: + _newNow = convertSpineDate(_nowAsString, dateFormat) + else: + _newNow = convertSpineDate(_nowAsString) + + if _newNow > _now: + return _newNow - _now + return _now - _newNow + + def testEbxml(self): + """ + TimeFormats.EBXML_FORMAT + """ + delta = self._formatTester(TimeFormats.EBXML_FORMAT) + self.assertLessEqual(delta.seconds, 1) + + def testStandardUTC(self): + """ + STANDARD_DATE_TIME_UTC_ZONE_FORMAT = '%Y%m%d%H%M%S+0000' + STANDARD_DATE_TIME_FORMAT = '%Y%m%d%H%M%S' + STANDARD_DATE_FORMAT = '%Y%m%d' + HL7_DATETIME_FORMAT = '%Y%m%dT%H%M%S.%f' + SPINE_DATETIME_MS_FORMAT = '%Y%m%d%H%M%S.%f' + SPINE_DATE_FORMAT = '%Y%m%d' + DAY_MONTH_YEAR_FORMAT = '%d%m%Y' + DAY_MONTH_TWO_DIGIT_YEAR_FORMAT = '%d%m%y' + DAY_MONTH_YEAR_WITH_SLASHES_FORMAT = '%d/%m/%Y' + TWO_DIGIT_YEAR_AND_WEEK_FORMAT = '%y%W' + """ + delta = self._formatTester(TimeFormats.STANDARD_DATE_TIME_UTC_ZONE_FORMAT) + self.assertLessEqual(delta.seconds, 1) + + def testStandardDT(self): + """ + The value of STANDARD_DATE_TIME_FORMAT = '%Y%m%d%H%M%S' + """ + delta = self._formatTester(TimeFormats.STANDARD_DATE_TIME_FORMAT) + self.assertLessEqual(delta.seconds, 1) + + def testStandardDTMS(self): + """ + The value of SPINE_DATETIME_MS_FORMAT = '%Y%m%d%H%M%S.%f' + """ + delta = self._formatTester(TimeFormats.SPINE_DATETIME_MS_FORMAT) + self.assertLessEqual(delta.seconds, 1) + + def testStandardHL7(self): + """ + The value of HL7_DATETIME_FORMAT = '%Y%m%dT%H%M%S.%f' + """ + delta = self._formatTester(TimeFormats.HL7_DATETIME_FORMAT) + self.assertLessEqual(delta.seconds, 1) + + def testStandardDate(self): + """ + The value of SPINE_DATE_FORMAT = '%Y%m%d' + """ + delta = self._formatTester(TimeFormats.SPINE_DATE_FORMAT) + self.assertLessEqual(delta.days, 1) + + def testStandardDT_withFormat(self): + """ + The value of STANDARD_DATE_TIME_FORMAT = '%Y%m%d%H%M%S' + """ + delta = self._formatTester(TimeFormats.STANDARD_DATE_TIME_FORMAT, True) + self.assertLessEqual(delta.seconds, 1) diff --git a/tests/spinecore/changelog_test.py b/tests/spinecore/changelog_test.py new file mode 100644 index 0000000..c93be6f --- /dev/null +++ b/tests/spinecore/changelog_test.py @@ -0,0 +1,320 @@ +""" +Created on 11 Feb 2014 +""" + +import copy +import sys +import unittest + +from eps_spine_shared.errors import EpsSystemError +from eps_spine_shared.spinecore.changelog import ChangeLogProcessor, PrescriptionsChangeLogProcessor + +CHANGE_LOG_TO_PRUNE = { + "GUID1": {"SCN": 1, "InternalID": "INTERNALID"}, + "GUID2": {"SCN": 4, "InternalID": "INTERNALID"}, + "GUID3": {"SCN": 5, "InternalID": "INTERNALID"}, + "GUID4": {"SCN": 6, "InternalID": "INTERNALID"}, + "GUID5": {"SCN": 3, "InternalID": "INTERNALID"}, + "GUID6": {"SCN": 8, "InternalID": "INTERNALID"}, + "GUID7": {"SCN": 9, "InternalID": "INTERNALID"}, + "GUID8": {"SCN": "10", "InternalID": "INTERNALID"}, +} + + +class ChangeLogProcessorTest(unittest.TestCase): + """ + Tests for the ChangeLogProcessor + """ + + def testGeneralLogEntry_Empty(self): + """ + test producing a general log with empty inputs + """ + logOfChange = ChangeLogProcessor.logForGeneralUpdate(1) + del logOfChange["Timestamp"] + + _expectedLog = {} + _expectedLog["SCN"] = 1 + _expectedLog["InternalID"] = None + _expectedLog["Source XSLT"] = None + _expectedLog["Response Parameters"] = {} + self.assertEqual(logOfChange, _expectedLog) + + def testPruningOfChangeLog(self): + """ + Add a new entry into change log and show it is correctly pruned + """ + _expectedChangeLog = copy.copy(CHANGE_LOG_TO_PRUNE) + + record = {} + record["changeLog"] = copy.copy(CHANGE_LOG_TO_PRUNE) + + newLog = {"SCN": 12, "InternalID": "INTERNALID"} + _newRecord = ChangeLogProcessor.updateChangeLog(record, newLog, "GUID9", 6) + _newChangeLog = _newRecord["changeLog"] + + del _expectedChangeLog["GUID1"] + del _expectedChangeLog["GUID2"] + del _expectedChangeLog["GUID3"] + del _expectedChangeLog["GUID5"] + _expectedChangeLog["GUID9"] = newLog + + self.assertDictEqual(_newChangeLog, _expectedChangeLog) + + def testNotPruningOfChangeLog(self): + """ + Add a new entry into change log and show that when DO_NOT_PRUNE is used it does not prune + """ + _expectedChangeLog = copy.copy(CHANGE_LOG_TO_PRUNE) + + record = {} + record["changeLog"] = copy.copy(CHANGE_LOG_TO_PRUNE) + + newLog = {"SCN": 12, "InternalID": "INTERNALID"} + _newRecord = ChangeLogProcessor.updateChangeLog( + record, newLog, "GUID9", ChangeLogProcessor.DO_NOT_PRUNE + ) + _newChangeLog = _newRecord["changeLog"] + + _expectedChangeLog["GUID9"] = newLog + + self.assertDictEqual(_newChangeLog, _expectedChangeLog) + + def testHighestSCN(self): + """ + test highest guid and scn returned + """ + (guid, scn) = ChangeLogProcessor.getHighestSCN(CHANGE_LOG_TO_PRUNE) + self.assertEqual(guid, "GUID8") + self.assertEqual(scn, 10) + + record = {} + record["changeLog"] = copy.copy(CHANGE_LOG_TO_PRUNE) + + newLog = {"SCN": 12, "InternalID": "INTERNALID"} + _newRecord = ChangeLogProcessor.updateChangeLog(record, newLog, "GUID9", 6) + _newChangeLog = _newRecord["changeLog"] + + (guid, scn) = ChangeLogProcessor.getHighestSCN(_newChangeLog) + self.assertEqual(guid, "GUID9") + self.assertEqual(scn, 12) + + def testGetSCN(self): + """ + test return of SCN from changeLog entry + """ + changeLogEntry = {"SCN": 1} + scn = ChangeLogProcessor.getSCN(changeLogEntry) + self.assertEqual(scn, 1) + changeLogEntry = {"SCN": "1"} + scn = ChangeLogProcessor.getSCN(changeLogEntry) + self.assertEqual(scn, 1) + changeLogEntry = {} + scn = ChangeLogProcessor.getSCN(changeLogEntry) + self.assertEqual(scn, ChangeLogProcessor.INVALID_SCN) + changeLogEntry = {"SCN": sys.maxsize} + scn = ChangeLogProcessor.getSCN(changeLogEntry) + self.assertEqual(scn, sys.maxsize) + + def testListSCNs(self): + """ + test the return of the list of SCNs present in a changeLog + """ + changeLog = {"ABCD": {"SCN": 1}, "EFGH": {"SCN": 2}, "IJKL": {"SCN": 3}} + scnList = sorted(ChangeLogProcessor.listSCNs(changeLog)) + self.assertEqual(scnList, [1, 2, 3]) + + changeLog = {} + scnList = ChangeLogProcessor.listSCNs(changeLog) + scnList.sort() + self.assertEqual(scnList, []) + + changeLog = {"ABCD": {}} + scnList = ChangeLogProcessor.listSCNs(changeLog) + scnList.sort() + self.assertEqual(scnList, [ChangeLogProcessor.INVALID_SCN]) + + def testGetMaxSCN(self): + """ + Test retrieval of the highest SCN from changeLog + """ + changeLog = {"ABCD": {"SCN": 1}, "IJKL": {"SCN": 3}, "EFGH": {"SCN": 2}} + highestSCN = ChangeLogProcessor.getMaxSCN(changeLog) + self.assertEqual(highestSCN, 3) + + changeLog = {"ABCD": {"SCN": 1}, "EFGH": {"SCN": 2}, "IJKL": {"SCN": 3}, "ZZZZ": {"SCN": 3}} + highestSCN = ChangeLogProcessor.getMaxSCN(changeLog) + self.assertEqual(highestSCN, 3) + + changeLog = {"ABCD": {}} + highestSCN = ChangeLogProcessor.getMaxSCN(changeLog) + self.assertEqual(highestSCN, ChangeLogProcessor.INVALID_SCN) + + def testGetAllGuidsForSCN(self): + """ + test retrieval of list of GUIDS that are keys for changelog entries which have a particular SCN + """ + changeLog = {"ABCD": {"SCN": 1}, "EFGH": {"SCN": 2}, "IJKL": {"SCN": 3}, "ZZZZ": {"SCN": 3}} + guidList = sorted(ChangeLogProcessor.getAllGuidsForSCN(changeLog, 1)) + self.assertEqual(guidList, ["ABCD"]) + + guidList = ChangeLogProcessor.getAllGuidsForSCN(changeLog, 3) + guidList.sort() + self.assertEqual(guidList, ["IJKL", "ZZZZ"]) + + guidList = ChangeLogProcessor.getAllGuidsForSCN(changeLog, "3") + guidList.sort() + self.assertEqual(guidList, ["IJKL", "ZZZZ"]) + + guidList = ChangeLogProcessor.getAllGuidsForSCN(changeLog, "7") + guidList.sort() + self.assertEqual(guidList, []) + + def testGetMaxSCNGuids(self): + """ + test retrieval of all GUIDS that have the highest SCN in the changeLog entry + """ + changeLog = {"ABCD": {"SCN": 1}, "IJKL": {"SCN": 3}, "EFGH": {"SCN": 2}} + guidList = sorted(ChangeLogProcessor.getMaxSCNGuids(changeLog)) + self.assertEqual(guidList, ["IJKL"]) + + changeLog = {"ABCD": {"SCN": 1}, "EFGH": {"SCN": 2}, "IJKL": {"SCN": 3}, "ZZZZ": {"SCN": 3}} + guidList = ChangeLogProcessor.getMaxSCNGuids(changeLog) + guidList.sort() + self.assertEqual(guidList, ["IJKL", "ZZZZ"]) + + changeLog = {"ABCD": {}, "EFGH": {}} + guidList = ChangeLogProcessor.getMaxSCNGuids(changeLog) + guidList.sort() + self.assertEqual(guidList, ["ABCD", "EFGH"]) + + changeLog = {"ABCD": {}, "EFGH": {}, "IJKL": {"SCN": 3}} + guidList = ChangeLogProcessor.getMaxSCNGuids(changeLog) + guidList.sort() + self.assertEqual(guidList, ["IJKL"]) + + changeLog = {} + guidList = ChangeLogProcessor.getMaxSCNGuids(changeLog) + self.assertEqual(guidList, []) + + def testGetAllGuids(self): + """ + test getting the list of all GUID keys for a changeLog + """ + changeLog = {"ABCD": {"SCN": 1}, "EFGH": {"SCN": 2}, "IJKL": {"SCN": 3}, "ZZZZ": {"SCN": 3}} + guidList = sorted(ChangeLogProcessor.getAllGuids(changeLog)) + self.assertEqual(guidList, ["ABCD", "EFGH", "IJKL", "ZZZZ"]) + + changeLog = {"ABCD": {}, "EFGH": {}} + guidList = ChangeLogProcessor.getAllGuids(changeLog) + guidList.sort() + self.assertEqual(guidList, ["ABCD", "EFGH"]) + + changeLog = {} + guidList = ChangeLogProcessor.getAllGuids(changeLog) + self.assertEqual(guidList, []) + + def testSettingInitialChangeLogOnDataMigration(self): + """ + Set an initial change log onto a record which does not have one + """ + record = {} + internalID = "INTERNALID" + reasonGUID = "DataMigration" + ChangeLogProcessor.setInitialChangeLog(record, internalID, reasonGUID) + + _changeLog = record[ChangeLogProcessor.RECORD_CHANGELOG_REF] + del _changeLog["DataMigration"]["Timestamp"] + + self.assertDictEqual( + _changeLog, + { + "DataMigration": { + "SCN": 1, + "InternalID": "INTERNALID", + "Source XSLT": None, + "Response Parameters": {}, + } + }, + ) + + +PR_CHANGE_LOG_TO_PRUNE = { + "GUID1": {"SCN": 1, "interactionID": "PORX_IN090101UK01"}, + "GUID2": {"SCN": 4, "interactionID": "PORX_IN090101UK04"}, + "GUID3": {"SCN": 5, "interactionID": "PORX_IN090101UK05"}, + "GUID4": {"SCN": 6, "interactionID": "PORX_IN090101UK05"}, + "GUID5": {"SCN": 3, "interactionID": "PORX_IN060102UK29"}, + "GUID6": {"SCN": 8, "interactionID": "PORX_IN060102UK30"}, + "GUID7": {"SCN": 9, "interactionID": "PORX_IN060102UK30"}, + "GUID8": {"SCN": 33, "interactionID": "PORX_IN060102UK30"}, + "GUID9": {"SCN": 34, "interactionID": "PORX_IN060102UK30"}, + "GUIDA": {"SCN": 35, "interactionID": "PORX_IN060102UK30"}, + "GUIDB": {"SCN": 36, "interactionID": "PORX_IN060102UK30"}, + "GUIDC": {"SCN": 37, "interactionID": "PORX_IN060102UK30"}, + "GUIDD": {"SCN": 40, "interactionID": "PORX_IN060102UK30"}, + "GUIDE": {"SCN": 41, "interactionID": "PORX_IN060102UK30"}, + "GUIDF": {"SCN": 42, "interactionID": "PORX_IN060102UK30"}, + "GUIDX": {"SCN": 43, "interactionID": "PORX_IN090101UK09"}, + "GUIDZ": {"SCN": 82, "interactionID": "PORX_IN060102UK30"}, +} + +# Should not delete GUID 1-7 (initial history) +# Should not delete GUID 8, C, D - not piggy in middle +# Should not delete GUID F, X is different InteractionID +# Should not delete GUID z (recent history) + + +class PrescriptionChangeLogProcessorTest(unittest.TestCase): + """ + Tests for the ChangeLogProcessor + """ + + def testPrunePrescriptionChangeLog(self): + """ + Prune the record as expected + """ + _changeLog = copy.copy(PR_CHANGE_LOG_TO_PRUNE) + + PrescriptionsChangeLogProcessor.pruneChangeLog(_changeLog, 80) + + _presentGUIDs = [ + "GUID1", + "GUID2", + "GUID3", + "GUID4", + "GUID5", + "GUID6", + "GUID7", + "GUID8", + "GUIDC", + "GUIDD", + "GUIDF", + "GUIDX", + "GUIDZ", + ] + + for guid in _presentGUIDs: + self.assertIn(guid, list(_changeLog.keys())) + + self.assertEqual(len(_presentGUIDs), len(list(_changeLog.keys()))) + + def testPrunePrescriptionChangeLog_HghPrunePoint(self): + """ + Increase the prune point, and confirm now no pruning + """ + _changeLog = copy.copy(PR_CHANGE_LOG_TO_PRUNE) + + PrescriptionsChangeLogProcessor.pruneChangeLog(_changeLog, 180) + self.assertDictEqual(_changeLog, PR_CHANGE_LOG_TO_PRUNE) + + def testUnprunableChangeLog(self): + """ + Make the change log unprunable below the prune point + """ + _changeLog = copy.copy(PR_CHANGE_LOG_TO_PRUNE) + for scn in range(100, 200): + _changeLog["GUID" + str(scn)] = {"SCN": scn, "interactionID": "PORX_IN090101UK09"} + + with self.assertRaises(EpsSystemError): + PrescriptionsChangeLogProcessor.pruneChangeLog(_changeLog, 50) diff --git a/tests/test_hello.py b/tests/test_hello.py deleted file mode 100644 index a33fd72..0000000 --- a/tests/test_hello.py +++ /dev/null @@ -1,18 +0,0 @@ -import pytest - -from eps_spine_shared.hello import hello - - -@pytest.mark.parametrize( - "name,expected", - [ - ("World", "Hello, World!"), - ("", "Hello, !"), - ], -) -def test_hello(name, expected): - assert hello(name) == expected - - -def test_hello_default(): - assert hello() == "Hello, World!"