diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index 5eaf84b..93882cf 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.13-slim-bookworm +FROM python:3.10-slim-bookworm # Update the certificates RUN apt-get update && \ @@ -32,10 +32,9 @@ ENV PATH="$PATH:$POETRY_HOME/bin" RUN curl -sSL https://install.python-poetry.org | python3 - RUN poetry self update -# Configure poetry to not create virtual environments. -# This is done to force package installation to the global Python install so that users -# other than the Docker root user have access to the installed packages. -RUN poetry config virtualenvs.create false +# Configure poetry to create virtual environments in the project. +# This is done to be compatible with how uv and PDM set up the venvs. +RUN poetry config virtualenvs.in-project true EXPOSE 8888 ENTRYPOINT ["/bin/sh"] diff --git a/.vscode/example.launch.json b/.vscode/example.launch.json new file mode 100644 index 0000000..e156bcc --- /dev/null +++ b/.vscode/example.launch.json @@ -0,0 +1,20 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "Python Debugger: testserver", + "type": "debugpy", + "request": "launch", + "module": "uvicorn", + "args": [ + "testserver:app", + "--app-dir=./tests/server", + "--host=0.0.0.0", + "--port=8000", + ] + } + ] +} \ No newline at end of file diff --git a/docs/advanced_use.md b/docs/advanced_use.md index 710ed74..cee3ada 100644 --- a/docs/advanced_use.md +++ b/docs/advanced_use.md @@ -4,7 +4,7 @@ When working with APIs, there are often relations between resources or constraints on values. The property on one resource may refer to the `id` of another resource. The value for a certain property may have to be unique within a certain scope. -Perhaps an endpoint path contains parameters that must match values that are defined outside the API itself. +Perhaps an url contains parameters that must match values that are defined outside the API itself. These types of relations and limitations cannot be described / modeled within the openapi document. To support automatic validation of API endpoints where such relations apply, OpenApiLibCore supports the usage of a custom mappings file. @@ -42,7 +42,7 @@ from OpenApiLibCore import ( ID_MAPPING = { - "/myspecialendpoint", "special_thing_id", + "/myspecialpath", "special_thing_id", } @@ -54,7 +54,7 @@ class MyDtoThatDoesNothing(Dto): DTO_MAPPING = { - ("/myspecialendpoint", "post"): MyDtoThatDoesNothing + ("/myspecialpath", "post"): MyDtoThatDoesNothing } ``` @@ -88,16 +88,16 @@ def my_transformer(identifier_name: str) -> str: ID_MAPPING = { - "/myspecialendpoint": ("special_thing_id", my_transformer), + "/myspecialpath": ("special_thing_id", my_transformer), } ``` ### The DTO_MAPPING The `DTO_MAPPING` is a dictionary with a tuple as its key and a mappings Dto as its value. -The tuple must be in the form `("endpoint_from_the_paths_section", "method_supported_by_the_endpoint")`. -The `endpoint_from_the_paths_section` must be exactly as found in the openapi document. -The `method_supported_by_the_endpoint` must be one of the methods supported by the endpoint and must be in lowercase. +The tuple must be in the form `("path_from_the_paths_section", "method_supported_by_the_path")`. +The `path_from_the_paths_section` must be exactly as found in the openapi document. +The `method_supported_by_the_path` must be one of the methods supported by the path and must be in lowercase. ## Dto mapping classes @@ -108,10 +108,10 @@ Each of these classes is designed to handle a relation or constraint commonly se To explain the different mapping classes, we'll use the following example: -Imagine we have an API endpoint `/employees` where we can create (`post`) a new Employee resource. +Imagine we have an API path `/employees` where we can create (`post`) a new Employee resource. The Employee has a number of required properties; name, employee_number, wagegroup_id, and date_of_birth. -There is also the the `/wagegroups` endpoint where a Wagegroup resource can be created. +There is also the the `/wagegroups` path where a Wagegroup resource can be created. This Wagegroup also has a number of required properties: name and hourly rate. --- @@ -159,7 +159,7 @@ This `error_code` should be described as one of the `responses` in the openapi d If an Employee has been added to the system, this Employee refers to the `id` of a Wagegroup for its required `employee_number` property. -Now let's say there is also the `/wagegroups/${wagegroup_id}` endpoint that supports the `delete` operation. +Now let's say there is also the `/wagegroups/${wagegroup_id}` path that supports the `delete` operation. If the Wagegroup refered to the Employee would be deleted, the Employee would be left with an invalid reference for one of its required properties. To prevent this, an API typically returns an `error_code` when such a `delete` operation is attempted on a resource that is refered to in this fashion. This `error_code` should be described as one of the `responses` in the openapi document for the `delete` operation of the `/wagegroups/${wagegroup_id}` path. @@ -314,9 +314,9 @@ To be able to automatically perform endpoint validations, the OpenApiLibCore has Often, such a `path` contains a reference to a resource id, e.g. `/employees/${employee_id}`. When such an `id` is needed, the OpenApiLibCore tries to obtain a valid `id` by taking these steps: -1. Attempt a `post` on the "parent endpoint" and extract the `id` from the response. -In our example: perform a `post` request on the `/employees` endpoint and get the `id` from the response. -2. If 1. fails, perform a `get` request on the `/employees` endpoint. It is assumed that this will return a list of Employee objects with an `id`. +1. Attempt a `post` on the "parent path" and extract the `id` from the response. +In our example: perform a `post` request on the `/employees` path and get the `id` from the response. +2. If 1. fails, perform a `get` request on the `/employees` path. It is assumed that this will return a list of Employee objects with an `id`. One item from the returned list is picked at rondom and its `id` is used. This mechanism relies on the standard REST structure and patterns. @@ -339,6 +339,7 @@ DTO_MAPPING = { ("/birthdays/{month}/{date}", "get"): BirthdaysDto } ``` +> Note: To take a `PathPropertiesConstraint` into use, the related Dto must be mapped to the `get` operation for the `path` in the `DTO_MAPPING` even if no such endpoint exists in the API. --- @@ -346,7 +347,7 @@ DTO_MAPPING = { > *Never send this query parameter as part of a request* Some optional query parameters have a range of valid values that depend on one or more path parameters. -Since path parameters are part of a url, they cannot be optional or empty so to extend the path parameters with optional parameters, query parameters can be used. +Since path parameters are part of an url, they cannot be optional or empty so to extend the path parameters with optional parameters, query parameters can be used. To illustrate this, let's imagine an API where the energy label for a building can be requested: `/energylabel/${zipcode}/${home_number}`. Some addresses however have an address extension, e.g. 1234AB 42 2.C. diff --git a/docs/driver.md b/docs/driver.md index d489c1a..07b934b 100644 --- a/docs/driver.md +++ b/docs/driver.md @@ -37,7 +37,7 @@ The OpenAPI Specification (OAS) defines a standard, language-agnostic interface to RESTful APIs, see https://swagger.io/specification/ The OpenApiDriver module implements a reader class that generates a test case for -each endpoint, method and response that is defined in an OpenAPI document, typically +each path, method and response that is defined in an OpenAPI document, typically an openapi.json or openapi.yaml file. > Note: OpenApiDriver is designed for APIs based on the OAS v3 @@ -88,13 +88,13 @@ Library OpenApiDriver Test Template Validate Using Test Endpoint Keyword *** Test Cases *** -Test Endpoint for ${method} on ${endpoint} where ${status_code} is expected +Test Endpoint for ${method} on ${path} where ${status_code} is expected *** Keywords *** Validate Using Test Endpoint Keyword - [Arguments] ${endpoint} ${method} ${status_code} + [Arguments] ${path} ${method} ${status_code} Test Endpoint - ... endpoint=${endpoint} method=${method} status_code=${status_code} + ... path=${path} method=${method} status_code=${status_code} ``` diff --git a/docs/libcore.md b/docs/libcore.md index a8beed4..4fce562 100644 --- a/docs/libcore.md +++ b/docs/libcore.md @@ -76,7 +76,7 @@ recursion in them. See the `recursion_limit` and `recursion_default` parameters. If the openapi document passes this validation, the next step is trying to do a test run with a minimal test suite. -The example below can be used, with `source`, `origin` and 'endpoint' altered to +The example below can be used, with `source`, `origin` and `path` altered to fit your situation. ``` robotframework @@ -87,7 +87,7 @@ Library OpenApiLibCore *** Test Cases *** Getting Started - ${url}= Get Valid Url endpoint=/employees/{employee_id} method=get + ${url}= Get Valid Url path=/employees/{employee_id} ``` diff --git a/docs/openapi_libcore.html b/docs/openapi_libcore.html index 6cceb4a..7dd5bb7 100644 --- a/docs/openapi_libcore.html +++ b/docs/openapi_libcore.html @@ -1,1876 +1,410 @@ - + - - - - - - - - - - - - - + + + + + + + - - - - - - - - - -
-

Opening library documentation failed

- -
- - - - - modalBackground.classList.remove('visible'); - modal.classList.remove('visible'); - document.body.style.overflow = 'auto'; - if (window.location.hash.indexOf('#type-') == 0) - history.pushState("", document.title, window.location.pathname); - // modal is hidden with a fading transition, timeout prevents premature emptying of modal - setTimeout(() => { - modalContent.innerHTML = ''; - }, 200); - } + + + + - // http://stackoverflow.com/a/18484799 - var delay = (function () { - var timer = 0; - return function(callback, ms) { - clearTimeout(timer); - timer = setTimeout(callback, ms); - }; - })(); - - - - - + + + + + + - - - - - - - + + + + - - + + - - - - - - - - - - - - + {{#if usages.length}} +
+

{{t "usages"}}

+ +
+ {{/if}} + + + + + + diff --git a/docs/openapidriver.html b/docs/openapidriver.html index 6d2a2c5..875c7be 100644 --- a/docs/openapidriver.html +++ b/docs/openapidriver.html @@ -1,1876 +1,410 @@ - + - - - - - - - - - - - - - + + + + + + + - - - - - - - - - -
-

Opening library documentation failed

- -
- - - - - modalBackground.classList.remove('visible'); - modal.classList.remove('visible'); - document.body.style.overflow = 'auto'; - if (window.location.hash.indexOf('#type-') == 0) - history.pushState("", document.title, window.location.pathname); - // modal is hidden with a fading transition, timeout prevents premature emptying of modal - setTimeout(() => { - modalContent.innerHTML = ''; - }, 200); - } + + + + - // http://stackoverflow.com/a/18484799 - var delay = (function () { - var timer = 0; - return function(callback, ms) { - clearTimeout(timer); - timer = setTimeout(callback, ms); - }; - })(); - - - - - + + + + + + - - - - - - - + + + + - - + + - - - - - - - - - - - - + {{#if usages.length}} +
+

{{t "usages"}}

+ +
+ {{/if}} + + + + + + diff --git a/poetry.lock b/poetry.lock index 9e36cd6..cfc59bf 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.0.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. [[package]] name = "annotated-types" @@ -14,14 +14,14 @@ files = [ [[package]] name = "anyio" -version = "4.7.0" +version = "4.8.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "anyio-4.7.0-py3-none-any.whl", hash = "sha256:ea60c3723ab42ba6fff7e8ccb0488c898ec538ff4df1f1d5e642c3601d07e352"}, - {file = "anyio-4.7.0.tar.gz", hash = "sha256:2f834749c602966b7d456a7567cafcb309f96482b5081d14ac93ccd457f9dd48"}, + {file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"}, + {file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"}, ] [package.dependencies] @@ -32,19 +32,19 @@ typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""] trio = ["trio (>=0.26.1)"] [[package]] name = "astroid" -version = "3.3.8" +version = "3.3.9" description = "An abstract syntax tree for Python with inference support." optional = false python-versions = ">=3.9.0" groups = ["lint-and-format"] files = [ - {file = "astroid-3.3.8-py3-none-any.whl", hash = "sha256:187ccc0c248bfbba564826c26f070494f7bc964fd286b6d9fff4420e55de828c"}, - {file = "astroid-3.3.8.tar.gz", hash = "sha256:a88c7994f914a4ea8572fac479459f4955eeccc877be3f2d959a33273b0cf40b"}, + {file = "astroid-3.3.9-py3-none-any.whl", hash = "sha256:d05bfd0acba96a7bd43e222828b7d9bc1e138aaeb0649707908d3702a9831248"}, + {file = "astroid-3.3.9.tar.gz", hash = "sha256:622cc8e3048684aa42c820d9d218978021c3c3d174fb03a9f0d615921744f550"}, ] [package.dependencies] @@ -52,54 +52,54 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} [[package]] name = "attrs" -version = "24.2.0" +version = "25.1.0" description = "Classes Without Boilerplate" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" groups = ["main"] files = [ - {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, - {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, + {file = "attrs-25.1.0-py3-none-any.whl", hash = "sha256:c75a69e28a550a7e93789579c22aa26b0f5b83b75dc4e08fe092980051e1090a"}, + {file = "attrs-25.1.0.tar.gz", hash = "sha256:1c97078a80c814273a76b2a298a932eb681c87415c11dee0a6921de7f1b02c3e"}, ] [package.extras] -benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] +tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] [[package]] name = "black" -version = "24.10.0" +version = "25.1.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "black-24.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6668650ea4b685440857138e5fe40cde4d652633b1bdffc62933d0db4ed9812"}, - {file = "black-24.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1c536fcf674217e87b8cc3657b81809d3c085d7bf3ef262ead700da345bfa6ea"}, - {file = "black-24.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:649fff99a20bd06c6f727d2a27f401331dc0cc861fb69cde910fe95b01b5928f"}, - {file = "black-24.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:fe4d6476887de70546212c99ac9bd803d90b42fc4767f058a0baa895013fbb3e"}, - {file = "black-24.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5a2221696a8224e335c28816a9d331a6c2ae15a2ee34ec857dcf3e45dbfa99ad"}, - {file = "black-24.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f9da3333530dbcecc1be13e69c250ed8dfa67f43c4005fb537bb426e19200d50"}, - {file = "black-24.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4007b1393d902b48b36958a216c20c4482f601569d19ed1df294a496eb366392"}, - {file = "black-24.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:394d4ddc64782e51153eadcaaca95144ac4c35e27ef9b0a42e121ae7e57a9175"}, - {file = "black-24.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5e39e0fae001df40f95bd8cc36b9165c5e2ea88900167bddf258bacef9bbdc3"}, - {file = "black-24.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d37d422772111794b26757c5b55a3eade028aa3fde43121ab7b673d050949d65"}, - {file = "black-24.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:14b3502784f09ce2443830e3133dacf2c0110d45191ed470ecb04d0f5f6fcb0f"}, - {file = "black-24.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:30d2c30dc5139211dda799758559d1b049f7f14c580c409d6ad925b74a4208a8"}, - {file = "black-24.10.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cbacacb19e922a1d75ef2b6ccaefcd6e93a2c05ede32f06a21386a04cedb981"}, - {file = "black-24.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1f93102e0c5bb3907451063e08b9876dbeac810e7da5a8bfb7aeb5a9ef89066b"}, - {file = "black-24.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ddacb691cdcdf77b96f549cf9591701d8db36b2f19519373d60d31746068dbf2"}, - {file = "black-24.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:680359d932801c76d2e9c9068d05c6b107f2584b2a5b88831c83962eb9984c1b"}, - {file = "black-24.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:17374989640fbca88b6a448129cd1745c5eb8d9547b464f281b251dd00155ccd"}, - {file = "black-24.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:63f626344343083322233f175aaf372d326de8436f5928c042639a4afbbf1d3f"}, - {file = "black-24.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfa1d0cb6200857f1923b602f978386a3a2758a65b52e0950299ea014be6800"}, - {file = "black-24.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:2cd9c95431d94adc56600710f8813ee27eea544dd118d45896bb734e9d7a0dc7"}, - {file = "black-24.10.0-py3-none-any.whl", hash = "sha256:3bb2b7a1f7b685f85b11fed1ef10f8a9148bceb49853e47a294a3dd963c1dd7d"}, - {file = "black-24.10.0.tar.gz", hash = "sha256:846ea64c97afe3bc677b761787993be4991810ecc7a4a937816dd6bddedc4875"}, + {file = "black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32"}, + {file = "black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da"}, + {file = "black-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7"}, + {file = "black-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:db8ea9917d6f8fc62abd90d944920d95e73c83a5ee3383493e35d271aca872e9"}, + {file = "black-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a39337598244de4bae26475f77dda852ea00a93bd4c728e09eacd827ec929df0"}, + {file = "black-25.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96c1c7cd856bba8e20094e36e0f948718dc688dba4a9d78c3adde52b9e6c2299"}, + {file = "black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096"}, + {file = "black-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:172b1dbff09f86ce6f4eb8edf9dede08b1fce58ba194c87d7a4f1a5aa2f5b3c2"}, + {file = "black-25.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b"}, + {file = "black-25.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc"}, + {file = "black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f"}, + {file = "black-25.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba"}, + {file = "black-25.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f"}, + {file = "black-25.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3"}, + {file = "black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171"}, + {file = "black-25.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18"}, + {file = "black-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1ee0a0c330f7b5130ce0caed9936a904793576ef4d2b98c40835d6a65afa6a0"}, + {file = "black-25.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3df5f1bf91d36002b0a75389ca8663510cf0531cca8aa5c1ef695b46d98655f"}, + {file = "black-25.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9e6827d563a2c820772b32ce8a42828dc6790f095f441beef18f96aa6f8294e"}, + {file = "black-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:bacabb307dca5ebaf9c118d2d2f6903da0d62c9faa82bd21a33eecc319559355"}, + {file = "black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717"}, + {file = "black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666"}, ] [package.dependencies] @@ -119,14 +119,14 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "certifi" -version = "2024.8.30" +version = "2025.1.31" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" groups = ["main"] files = [ - {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, - {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, + {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, + {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, ] [[package]] @@ -143,129 +143,116 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.4.0" +version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false -python-versions = ">=3.7.0" +python-versions = ">=3.7" groups = ["main"] files = [ - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, - {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, - {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, + {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, + {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, ] [[package]] name = "click" -version = "8.1.7" +version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" -groups = ["main", "dev", "lint-and-format"] +groups = ["main", "dev", "lint-and-format", "type-checking"] files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, + {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, + {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, ] [package.dependencies] @@ -277,90 +264,91 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -groups = ["main", "dev", "lint-and-format"] +groups = ["main", "dev", "lint-and-format", "type-checking"] files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] -markers = {main = "platform_system == \"Windows\"", dev = "platform_system == \"Windows\""} +markers = {main = "platform_system == \"Windows\""} [[package]] name = "coverage" -version = "7.6.9" +version = "7.6.12" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "coverage-7.6.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:85d9636f72e8991a1706b2b55b06c27545448baf9f6dbf51c4004609aacd7dcb"}, - {file = "coverage-7.6.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:608a7fd78c67bee8936378299a6cb9f5149bb80238c7a566fc3e6717a4e68710"}, - {file = "coverage-7.6.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96d636c77af18b5cb664ddf12dab9b15a0cfe9c0bde715da38698c8cea748bfa"}, - {file = "coverage-7.6.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d75cded8a3cff93da9edc31446872d2997e327921d8eed86641efafd350e1df1"}, - {file = "coverage-7.6.9-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7b15f589593110ae767ce997775d645b47e5cbbf54fd322f8ebea6277466cec"}, - {file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:44349150f6811b44b25574839b39ae35291f6496eb795b7366fef3bd3cf112d3"}, - {file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d891c136b5b310d0e702e186d70cd16d1119ea8927347045124cb286b29297e5"}, - {file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:db1dab894cc139f67822a92910466531de5ea6034ddfd2b11c0d4c6257168073"}, - {file = "coverage-7.6.9-cp310-cp310-win32.whl", hash = "sha256:41ff7b0da5af71a51b53f501a3bac65fb0ec311ebed1632e58fc6107f03b9198"}, - {file = "coverage-7.6.9-cp310-cp310-win_amd64.whl", hash = "sha256:35371f8438028fdccfaf3570b31d98e8d9eda8bb1d6ab9473f5a390969e98717"}, - {file = "coverage-7.6.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:932fc826442132dde42ee52cf66d941f581c685a6313feebed358411238f60f9"}, - {file = "coverage-7.6.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:085161be5f3b30fd9b3e7b9a8c301f935c8313dcf928a07b116324abea2c1c2c"}, - {file = "coverage-7.6.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ccc660a77e1c2bf24ddbce969af9447a9474790160cfb23de6be4fa88e3951c7"}, - {file = "coverage-7.6.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c69e42c892c018cd3c8d90da61d845f50a8243062b19d228189b0224150018a9"}, - {file = "coverage-7.6.9-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0824a28ec542a0be22f60c6ac36d679e0e262e5353203bea81d44ee81fe9c6d4"}, - {file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4401ae5fc52ad8d26d2a5d8a7428b0f0c72431683f8e63e42e70606374c311a1"}, - {file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:98caba4476a6c8d59ec1eb00c7dd862ba9beca34085642d46ed503cc2d440d4b"}, - {file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ee5defd1733fd6ec08b168bd4f5387d5b322f45ca9e0e6c817ea6c4cd36313e3"}, - {file = "coverage-7.6.9-cp311-cp311-win32.whl", hash = "sha256:f2d1ec60d6d256bdf298cb86b78dd715980828f50c46701abc3b0a2b3f8a0dc0"}, - {file = "coverage-7.6.9-cp311-cp311-win_amd64.whl", hash = "sha256:0d59fd927b1f04de57a2ba0137166d31c1a6dd9e764ad4af552912d70428c92b"}, - {file = "coverage-7.6.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:99e266ae0b5d15f1ca8d278a668df6f51cc4b854513daab5cae695ed7b721cf8"}, - {file = "coverage-7.6.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9901d36492009a0a9b94b20e52ebfc8453bf49bb2b27bca2c9706f8b4f5a554a"}, - {file = "coverage-7.6.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abd3e72dd5b97e3af4246cdada7738ef0e608168de952b837b8dd7e90341f015"}, - {file = "coverage-7.6.9-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff74026a461eb0660366fb01c650c1d00f833a086b336bdad7ab00cc952072b3"}, - {file = "coverage-7.6.9-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65dad5a248823a4996724a88eb51d4b31587aa7aa428562dbe459c684e5787ae"}, - {file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:22be16571504c9ccea919fcedb459d5ab20d41172056206eb2994e2ff06118a4"}, - {file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f957943bc718b87144ecaee70762bc2bc3f1a7a53c7b861103546d3a403f0a6"}, - {file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0ae1387db4aecb1f485fb70a6c0148c6cdaebb6038f1d40089b1fc84a5db556f"}, - {file = "coverage-7.6.9-cp312-cp312-win32.whl", hash = "sha256:1a330812d9cc7ac2182586f6d41b4d0fadf9be9049f350e0efb275c8ee8eb692"}, - {file = "coverage-7.6.9-cp312-cp312-win_amd64.whl", hash = "sha256:b12c6b18269ca471eedd41c1b6a1065b2f7827508edb9a7ed5555e9a56dcfc97"}, - {file = "coverage-7.6.9-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:899b8cd4781c400454f2f64f7776a5d87bbd7b3e7f7bda0cb18f857bb1334664"}, - {file = "coverage-7.6.9-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:61f70dc68bd36810972e55bbbe83674ea073dd1dcc121040a08cdf3416c5349c"}, - {file = "coverage-7.6.9-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a289d23d4c46f1a82d5db4abeb40b9b5be91731ee19a379d15790e53031c014"}, - {file = "coverage-7.6.9-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e216d8044a356fc0337c7a2a0536d6de07888d7bcda76febcb8adc50bdbbd00"}, - {file = "coverage-7.6.9-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c026eb44f744acaa2bda7493dad903aa5bf5fc4f2554293a798d5606710055d"}, - {file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e77363e8425325384f9d49272c54045bbed2f478e9dd698dbc65dbc37860eb0a"}, - {file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:777abfab476cf83b5177b84d7486497e034eb9eaea0d746ce0c1268c71652077"}, - {file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:447af20e25fdbe16f26e84eb714ba21d98868705cb138252d28bc400381f6ffb"}, - {file = "coverage-7.6.9-cp313-cp313-win32.whl", hash = "sha256:d872ec5aeb086cbea771c573600d47944eea2dcba8be5f3ee649bfe3cb8dc9ba"}, - {file = "coverage-7.6.9-cp313-cp313-win_amd64.whl", hash = "sha256:fd1213c86e48dfdc5a0cc676551db467495a95a662d2396ecd58e719191446e1"}, - {file = "coverage-7.6.9-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:ba9e7484d286cd5a43744e5f47b0b3fb457865baf07bafc6bee91896364e1419"}, - {file = "coverage-7.6.9-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e5ea1cf0872ee455c03e5674b5bca5e3e68e159379c1af0903e89f5eba9ccc3a"}, - {file = "coverage-7.6.9-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d10e07aa2b91835d6abec555ec8b2733347956991901eea6ffac295f83a30e4"}, - {file = "coverage-7.6.9-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:13a9e2d3ee855db3dd6ea1ba5203316a1b1fd8eaeffc37c5b54987e61e4194ae"}, - {file = "coverage-7.6.9-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c38bf15a40ccf5619fa2fe8f26106c7e8e080d7760aeccb3722664c8656b030"}, - {file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d5275455b3e4627c8e7154feaf7ee0743c2e7af82f6e3b561967b1cca755a0be"}, - {file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:8f8770dfc6e2c6a2d4569f411015c8d751c980d17a14b0530da2d7f27ffdd88e"}, - {file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8d2dfa71665a29b153a9681edb1c8d9c1ea50dfc2375fb4dac99ea7e21a0bcd9"}, - {file = "coverage-7.6.9-cp313-cp313t-win32.whl", hash = "sha256:5e6b86b5847a016d0fbd31ffe1001b63355ed309651851295315031ea7eb5a9b"}, - {file = "coverage-7.6.9-cp313-cp313t-win_amd64.whl", hash = "sha256:97ddc94d46088304772d21b060041c97fc16bdda13c6c7f9d8fcd8d5ae0d8611"}, - {file = "coverage-7.6.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:adb697c0bd35100dc690de83154627fbab1f4f3c0386df266dded865fc50a902"}, - {file = "coverage-7.6.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:be57b6d56e49c2739cdf776839a92330e933dd5e5d929966fbbd380c77f060be"}, - {file = "coverage-7.6.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1592791f8204ae9166de22ba7e6705fa4ebd02936c09436a1bb85aabca3e599"}, - {file = "coverage-7.6.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e12ae8cc979cf83d258acb5e1f1cf2f3f83524d1564a49d20b8bec14b637f08"}, - {file = "coverage-7.6.9-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb5555cff66c4d3d6213a296b360f9e1a8e323e74e0426b6c10ed7f4d021e464"}, - {file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b9389a429e0e5142e69d5bf4a435dd688c14478a19bb901735cdf75e57b13845"}, - {file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:592ac539812e9b46046620341498caf09ca21023c41c893e1eb9dbda00a70cbf"}, - {file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a27801adef24cc30871da98a105f77995e13a25a505a0161911f6aafbd66e678"}, - {file = "coverage-7.6.9-cp39-cp39-win32.whl", hash = "sha256:8e3c3e38930cfb729cb8137d7f055e5a473ddaf1217966aa6238c88bd9fd50e6"}, - {file = "coverage-7.6.9-cp39-cp39-win_amd64.whl", hash = "sha256:e28bf44afa2b187cc9f41749138a64435bf340adfcacb5b2290c070ce99839d4"}, - {file = "coverage-7.6.9-pp39.pp310-none-any.whl", hash = "sha256:f3ca78518bc6bc92828cd11867b121891d75cae4ea9e908d72030609b996db1b"}, - {file = "coverage-7.6.9.tar.gz", hash = "sha256:4a8d8977b0c6ef5aeadcb644da9e69ae0dcfe66ec7f368c89c72e058bd71164d"}, + {file = "coverage-7.6.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:704c8c8c6ce6569286ae9622e534b4f5b9759b6f2cd643f1c1a61f666d534fe8"}, + {file = "coverage-7.6.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ad7525bf0241e5502168ae9c643a2f6c219fa0a283001cee4cf23a9b7da75879"}, + {file = "coverage-7.6.12-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06097c7abfa611c91edb9e6920264e5be1d6ceb374efb4986f38b09eed4cb2fe"}, + {file = "coverage-7.6.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:220fa6c0ad7d9caef57f2c8771918324563ef0d8272c94974717c3909664e674"}, + {file = "coverage-7.6.12-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3688b99604a24492bcfe1c106278c45586eb819bf66a654d8a9a1433022fb2eb"}, + {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d1a987778b9c71da2fc8948e6f2656da6ef68f59298b7e9786849634c35d2c3c"}, + {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:cec6b9ce3bd2b7853d4a4563801292bfee40b030c05a3d29555fd2a8ee9bd68c"}, + {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ace9048de91293e467b44bce0f0381345078389814ff6e18dbac8fdbf896360e"}, + {file = "coverage-7.6.12-cp310-cp310-win32.whl", hash = "sha256:ea31689f05043d520113e0552f039603c4dd71fa4c287b64cb3606140c66f425"}, + {file = "coverage-7.6.12-cp310-cp310-win_amd64.whl", hash = "sha256:676f92141e3c5492d2a1596d52287d0d963df21bf5e55c8b03075a60e1ddf8aa"}, + {file = "coverage-7.6.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e18aafdfb3e9ec0d261c942d35bd7c28d031c5855dadb491d2723ba54f4c3015"}, + {file = "coverage-7.6.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66fe626fd7aa5982cdebad23e49e78ef7dbb3e3c2a5960a2b53632f1f703ea45"}, + {file = "coverage-7.6.12-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ef01d70198431719af0b1f5dcbefc557d44a190e749004042927b2a3fed0702"}, + {file = "coverage-7.6.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e92ae5a289a4bc4c0aae710c0948d3c7892e20fd3588224ebe242039573bf0"}, + {file = "coverage-7.6.12-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e695df2c58ce526eeab11a2e915448d3eb76f75dffe338ea613c1201b33bab2f"}, + {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d74c08e9aaef995f8c4ef6d202dbd219c318450fe2a76da624f2ebb9c8ec5d9f"}, + {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e995b3b76ccedc27fe4f477b349b7d64597e53a43fc2961db9d3fbace085d69d"}, + {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b1f097878d74fe51e1ddd1be62d8e3682748875b461232cf4b52ddc6e6db0bba"}, + {file = "coverage-7.6.12-cp311-cp311-win32.whl", hash = "sha256:1f7ffa05da41754e20512202c866d0ebfc440bba3b0ed15133070e20bf5aeb5f"}, + {file = "coverage-7.6.12-cp311-cp311-win_amd64.whl", hash = "sha256:e216c5c45f89ef8971373fd1c5d8d1164b81f7f5f06bbf23c37e7908d19e8558"}, + {file = "coverage-7.6.12-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b172f8e030e8ef247b3104902cc671e20df80163b60a203653150d2fc204d1ad"}, + {file = "coverage-7.6.12-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:641dfe0ab73deb7069fb972d4d9725bf11c239c309ce694dd50b1473c0f641c3"}, + {file = "coverage-7.6.12-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e549f54ac5f301e8e04c569dfdb907f7be71b06b88b5063ce9d6953d2d58574"}, + {file = "coverage-7.6.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:959244a17184515f8c52dcb65fb662808767c0bd233c1d8a166e7cf74c9ea985"}, + {file = "coverage-7.6.12-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bda1c5f347550c359f841d6614fb8ca42ae5cb0b74d39f8a1e204815ebe25750"}, + {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1ceeb90c3eda1f2d8c4c578c14167dbd8c674ecd7d38e45647543f19839dd6ea"}, + {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f16f44025c06792e0fb09571ae454bcc7a3ec75eeb3c36b025eccf501b1a4c3"}, + {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b076e625396e787448d27a411aefff867db2bffac8ed04e8f7056b07024eed5a"}, + {file = "coverage-7.6.12-cp312-cp312-win32.whl", hash = "sha256:00b2086892cf06c7c2d74983c9595dc511acca00665480b3ddff749ec4fb2a95"}, + {file = "coverage-7.6.12-cp312-cp312-win_amd64.whl", hash = "sha256:7ae6eabf519bc7871ce117fb18bf14e0e343eeb96c377667e3e5dd12095e0288"}, + {file = "coverage-7.6.12-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:488c27b3db0ebee97a830e6b5a3ea930c4a6e2c07f27a5e67e1b3532e76b9ef1"}, + {file = "coverage-7.6.12-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d1095bbee1851269f79fd8e0c9b5544e4c00c0c24965e66d8cba2eb5bb535fd"}, + {file = "coverage-7.6.12-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0533adc29adf6a69c1baa88c3d7dbcaadcffa21afbed3ca7a225a440e4744bf9"}, + {file = "coverage-7.6.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53c56358d470fa507a2b6e67a68fd002364d23c83741dbc4c2e0680d80ca227e"}, + {file = "coverage-7.6.12-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64cbb1a3027c79ca6310bf101014614f6e6e18c226474606cf725238cf5bc2d4"}, + {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:79cac3390bfa9836bb795be377395f28410811c9066bc4eefd8015258a7578c6"}, + {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9b148068e881faa26d878ff63e79650e208e95cf1c22bd3f77c3ca7b1d9821a3"}, + {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8bec2ac5da793c2685ce5319ca9bcf4eee683b8a1679051f8e6ec04c4f2fd7dc"}, + {file = "coverage-7.6.12-cp313-cp313-win32.whl", hash = "sha256:200e10beb6ddd7c3ded322a4186313d5ca9e63e33d8fab4faa67ef46d3460af3"}, + {file = "coverage-7.6.12-cp313-cp313-win_amd64.whl", hash = "sha256:2b996819ced9f7dbb812c701485d58f261bef08f9b85304d41219b1496b591ef"}, + {file = "coverage-7.6.12-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:299cf973a7abff87a30609879c10df0b3bfc33d021e1adabc29138a48888841e"}, + {file = "coverage-7.6.12-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4b467a8c56974bf06e543e69ad803c6865249d7a5ccf6980457ed2bc50312703"}, + {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2458f275944db8129f95d91aee32c828a408481ecde3b30af31d552c2ce284a0"}, + {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a9d8be07fb0832636a0f72b80d2a652fe665e80e720301fb22b191c3434d924"}, + {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14d47376a4f445e9743f6c83291e60adb1b127607a3618e3185bbc8091f0467b"}, + {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b95574d06aa9d2bd6e5cc35a5bbe35696342c96760b69dc4287dbd5abd4ad51d"}, + {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:ecea0c38c9079570163d663c0433a9af4094a60aafdca491c6a3d248c7432827"}, + {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2251fabcfee0a55a8578a9d29cecfee5f2de02f11530e7d5c5a05859aa85aee9"}, + {file = "coverage-7.6.12-cp313-cp313t-win32.whl", hash = "sha256:eb5507795caabd9b2ae3f1adc95f67b1104971c22c624bb354232d65c4fc90b3"}, + {file = "coverage-7.6.12-cp313-cp313t-win_amd64.whl", hash = "sha256:f60a297c3987c6c02ffb29effc70eadcbb412fe76947d394a1091a3615948e2f"}, + {file = "coverage-7.6.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e7575ab65ca8399c8c4f9a7d61bbd2d204c8b8e447aab9d355682205c9dd948d"}, + {file = "coverage-7.6.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8161d9fbc7e9fe2326de89cd0abb9f3599bccc1287db0aba285cb68d204ce929"}, + {file = "coverage-7.6.12-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a1e465f398c713f1b212400b4e79a09829cd42aebd360362cd89c5bdc44eb87"}, + {file = "coverage-7.6.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f25d8b92a4e31ff1bd873654ec367ae811b3a943583e05432ea29264782dc32c"}, + {file = "coverage-7.6.12-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a936309a65cc5ca80fa9f20a442ff9e2d06927ec9a4f54bcba9c14c066323f2"}, + {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aa6f302a3a0b5f240ee201297fff0bbfe2fa0d415a94aeb257d8b461032389bd"}, + {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f973643ef532d4f9be71dd88cf7588936685fdb576d93a79fe9f65bc337d9d73"}, + {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:78f5243bb6b1060aed6213d5107744c19f9571ec76d54c99cc15938eb69e0e86"}, + {file = "coverage-7.6.12-cp39-cp39-win32.whl", hash = "sha256:69e62c5034291c845fc4df7f8155e8544178b6c774f97a99e2734b05eb5bed31"}, + {file = "coverage-7.6.12-cp39-cp39-win_amd64.whl", hash = "sha256:b01a840ecc25dce235ae4c1b6a0daefb2a203dba0e6e980637ee9c2f6ee0df57"}, + {file = "coverage-7.6.12-pp39.pp310-none-any.whl", hash = "sha256:7e39e845c4d764208e7b8f6a21c541ade741e2c41afabdfa1caa28687a3c98cf"}, + {file = "coverage-7.6.12-py3-none-any.whl", hash = "sha256:eb8668cfbc279a536c633137deeb9435d2962caec279c3f8cf8b91fff6ff8953"}, + {file = "coverage-7.6.12.tar.gz", hash = "sha256:48cfc4641d95d34766ad41d9573cc0f22a48aa88d22657a1fe01dca0dbae4de2"}, ] [package.dependencies] tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} [package.extras] -toml = ["tomli"] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] [[package]] name = "dill" @@ -408,40 +396,39 @@ test = ["pytest (>=6)"] [[package]] name = "faker" -version = "33.1.0" +version = "37.0.0" description = "Faker is a Python package that generates fake data for you." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "Faker-33.1.0-py3-none-any.whl", hash = "sha256:d30c5f0e2796b8970de68978365247657486eb0311c5abe88d0b895b68dff05d"}, - {file = "faker-33.1.0.tar.gz", hash = "sha256:1c925fc0e86a51fc46648b504078c88d0cd48da1da2595c4e712841cab43a1e4"}, + {file = "faker-37.0.0-py3-none-any.whl", hash = "sha256:2598f78b76710a4ed05e197dda5235be409b4c291ba5c9c7514989cfbc7a5144"}, + {file = "faker-37.0.0.tar.gz", hash = "sha256:d2e4e2a30d459a8ec0ae52a552aa51c48973cb32cf51107dee90f58a8322a880"}, ] [package.dependencies] -python-dateutil = ">=2.4" -typing-extensions = "*" +tzdata = "*" [[package]] name = "fastapi" -version = "0.115.6" +version = "0.115.11" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ - {file = "fastapi-0.115.6-py3-none-any.whl", hash = "sha256:e9240b29e36fa8f4bb7290316988e90c381e5092e0cbe84e7818cc3713bcf305"}, - {file = "fastapi-0.115.6.tar.gz", hash = "sha256:9ec46f7addc14ea472958a96aae5b5de65f39721a46aaf5705c480d9a8b76654"}, + {file = "fastapi-0.115.11-py3-none-any.whl", hash = "sha256:32e1541b7b74602e4ef4a0260ecaf3aadf9d4f19590bba3e1bf2ac4666aa2c64"}, + {file = "fastapi-0.115.11.tar.gz", hash = "sha256:cc81f03f688678b92600a65a5e618b93592c65005db37157147204d8924bf94f"}, ] [package.dependencies] pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" -starlette = ">=0.40.0,<0.42.0" +starlette = ">=0.40.0,<0.47.0" typing-extensions = ">=4.8.0" [package.extras] -all = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] -standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "jinja2 (>=2.11.2)", "python-multipart (>=0.0.7)", "uvicorn[standard] (>=0.12.0)"] +all = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=3.1.5)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] +standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "jinja2 (>=3.1.5)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"] [[package]] name = "h11" @@ -496,29 +483,30 @@ files = [ [[package]] name = "isort" -version = "5.13.2" +version = "6.0.1" description = "A Python utility / library to sort Python imports." optional = false -python-versions = ">=3.8.0" +python-versions = ">=3.9.0" groups = ["lint-and-format"] files = [ - {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, - {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, + {file = "isort-6.0.1-py3-none-any.whl", hash = "sha256:2dc5d7f65c9678d94c88dfc29161a320eec67328bc97aad576874cb4be1e9615"}, + {file = "isort-6.0.1.tar.gz", hash = "sha256:1cb5df28dfbc742e490c5e41bad6da41b805b0a8be7bc93cd0fb2a8a890ac450"}, ] [package.extras] -colors = ["colorama (>=0.4.6)"] +colors = ["colorama"] +plugins = ["setuptools"] [[package]] name = "jinja2" -version = "3.1.4" +version = "3.1.6" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" groups = ["main", "lint-and-format"] files = [ - {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, - {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, + {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, + {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, ] [package.dependencies] @@ -551,32 +539,32 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- [[package]] name = "jsonschema-path" -version = "0.3.3" +version = "0.3.4" description = "JSONSchema Spec with object-oriented paths" optional = false python-versions = "<4.0.0,>=3.8.0" groups = ["main"] files = [ - {file = "jsonschema_path-0.3.3-py3-none-any.whl", hash = "sha256:203aff257f8038cd3c67be614fe6b2001043408cb1b4e36576bc4921e09d83c4"}, - {file = "jsonschema_path-0.3.3.tar.gz", hash = "sha256:f02e5481a4288ec062f8e68c808569e427d905bedfecb7f2e4c69ef77957c382"}, + {file = "jsonschema_path-0.3.4-py3-none-any.whl", hash = "sha256:f502191fdc2b22050f9a81c9237be9d27145b9001c55842bece5e94e382e52f8"}, + {file = "jsonschema_path-0.3.4.tar.gz", hash = "sha256:8365356039f16cc65fddffafda5f58766e34bebab7d6d105616ab52bc4297001"}, ] [package.dependencies] pathable = ">=0.4.1,<0.5.0" PyYAML = ">=5.1" -referencing = ">=0.28.0,<0.36.0" +referencing = "<0.37.0" requests = ">=2.31.0,<3.0.0" [[package]] name = "jsonschema-specifications" -version = "2023.12.1" +version = "2024.10.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, - {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, + {file = "jsonschema_specifications-2024.10.1-py3-none-any.whl", hash = "sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf"}, + {file = "jsonschema_specifications-2024.10.1.tar.gz", hash = "sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272"}, ] [package.dependencies] @@ -751,14 +739,14 @@ files = [ [[package]] name = "more-itertools" -version = "10.5.0" +version = "10.6.0" description = "More routines for operating on iterables, beyond itertools" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "more-itertools-10.5.0.tar.gz", hash = "sha256:5482bfef7849c25dc3c6dd53a6173ae4795da2a41a80faea6700d9f5846c5da6"}, - {file = "more_itertools-10.5.0-py3-none-any.whl", hash = "sha256:037b0d3203ce90cca8ab1defbbdac29d5f993fc20131f3664dc8d6acfa872aef"}, + {file = "more-itertools-10.6.0.tar.gz", hash = "sha256:2cd7fad1009c31cc9fb6a035108509e6547547a7a738374f10bd49a09eb3ee3b"}, + {file = "more_itertools-10.6.0-py3-none-any.whl", hash = "sha256:6eb054cb4b6db1473f6e15fcc676a08e4732548acd47c708f0e179c2c7c01e89"}, ] [[package]] @@ -872,19 +860,19 @@ starlette = ["aioitertools (>=0.11.0,<0.12.0)", "starlette (>=0.26.1,<0.39.0)"] [[package]] name = "openapi-schema-validator" -version = "0.6.2" +version = "0.6.3" description = "OpenAPI schema validation for Python" optional = false -python-versions = ">=3.8.0,<4.0.0" +python-versions = "<4.0.0,>=3.8.0" groups = ["main"] files = [ - {file = "openapi_schema_validator-0.6.2-py3-none-any.whl", hash = "sha256:c4887c1347c669eb7cded9090f4438b710845cd0f90d1fb9e1b3303fb37339f8"}, - {file = "openapi_schema_validator-0.6.2.tar.gz", hash = "sha256:11a95c9c9017912964e3e5f2545a5b11c3814880681fcacfb73b1759bb4f2804"}, + {file = "openapi_schema_validator-0.6.3-py3-none-any.whl", hash = "sha256:f3b9870f4e556b5a62a1c39da72a6b4b16f3ad9c73dc80084b1b11e74ba148a3"}, + {file = "openapi_schema_validator-0.6.3.tar.gz", hash = "sha256:f37bace4fc2a5d96692f4f8b31dc0f8d7400fd04f3a937798eaf880d425de6ee"}, ] [package.dependencies] jsonschema = ">=4.19.1,<5.0.0" -jsonschema-specifications = ">=2023.5.2,<2024.0.0" +jsonschema-specifications = ">=2023.5.2" rfc3339-validator = "*" [[package]] @@ -931,14 +919,14 @@ files = [ [[package]] name = "pathable" -version = "0.4.3" +version = "0.4.4" description = "Object-oriented paths" optional = false -python-versions = ">=3.7.0,<4.0.0" +python-versions = "<4.0.0,>=3.7.0" groups = ["main"] files = [ - {file = "pathable-0.4.3-py3-none-any.whl", hash = "sha256:cdd7b1f9d7d5c8b8d3315dbf5a86b2596053ae845f056f57d97c0eefff84da14"}, - {file = "pathable-0.4.3.tar.gz", hash = "sha256:5c869d315be50776cc8a993f3af43e0c60dc01506b399643f919034ebf4cdcab"}, + {file = "pathable-0.4.4-py3-none-any.whl", hash = "sha256:5ae9e94793b6ef5a4cbe0a7ce9dbbefc1eec38df253763fd0aeeacf2762dbbc2"}, + {file = "pathable-0.4.4.tar.gz", hash = "sha256:6905a3cd17804edfac7875b5f6c9142a218c7caef78693c2dbbbfbac186d88b2"}, ] [[package]] @@ -959,7 +947,7 @@ version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" -groups = ["main", "lint-and-format"] +groups = ["main", "dev", "lint-and-format", "type-checking"] files = [ {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, @@ -970,6 +958,22 @@ docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-a test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] type = ["mypy (>=1.11.2)"] +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +groups = ["dev", "type-checking"] +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + [[package]] name = "prance" version = "23.6.21.0" @@ -1000,133 +1004,133 @@ ssv = ["swagger-spec-validator (>=2.4,<3.0)"] [[package]] name = "pydantic" -version = "2.10.3" +version = "2.10.6" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ - {file = "pydantic-2.10.3-py3-none-any.whl", hash = "sha256:be04d85bbc7b65651c5f8e6b9976ed9c6f41782a55524cef079a34a0bb82144d"}, - {file = "pydantic-2.10.3.tar.gz", hash = "sha256:cb5ac360ce894ceacd69c403187900a02c4b20b693a9dd1d643e1effab9eadf9"}, + {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, + {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, ] [package.dependencies] annotated-types = ">=0.6.0" -pydantic-core = "2.27.1" +pydantic-core = "2.27.2" typing-extensions = ">=4.12.2" [package.extras] email = ["email-validator (>=2.0.0)"] -timezone = ["tzdata"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] [[package]] name = "pydantic-core" -version = "2.27.1" +version = "2.27.2" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ - {file = "pydantic_core-2.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:71a5e35c75c021aaf400ac048dacc855f000bdfed91614b4a726f7432f1f3d6a"}, - {file = "pydantic_core-2.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f82d068a2d6ecfc6e054726080af69a6764a10015467d7d7b9f66d6ed5afa23b"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:121ceb0e822f79163dd4699e4c54f5ad38b157084d97b34de8b232bcaad70278"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4603137322c18eaf2e06a4495f426aa8d8388940f3c457e7548145011bb68e05"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a33cd6ad9017bbeaa9ed78a2e0752c5e250eafb9534f308e7a5f7849b0b1bfb4"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15cc53a3179ba0fcefe1e3ae50beb2784dede4003ad2dfd24f81bba4b23a454f"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45d9c5eb9273aa50999ad6adc6be5e0ecea7e09dbd0d31bd0c65a55a2592ca08"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8bf7b66ce12a2ac52d16f776b31d16d91033150266eb796967a7e4621707e4f6"}, - {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:655d7dd86f26cb15ce8a431036f66ce0318648f8853d709b4167786ec2fa4807"}, - {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:5556470f1a2157031e676f776c2bc20acd34c1990ca5f7e56f1ebf938b9ab57c"}, - {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f69ed81ab24d5a3bd93861c8c4436f54afdf8e8cc421562b0c7504cf3be58206"}, - {file = "pydantic_core-2.27.1-cp310-none-win32.whl", hash = "sha256:f5a823165e6d04ccea61a9f0576f345f8ce40ed533013580e087bd4d7442b52c"}, - {file = "pydantic_core-2.27.1-cp310-none-win_amd64.whl", hash = "sha256:57866a76e0b3823e0b56692d1a0bf722bffb324839bb5b7226a7dbd6c9a40b17"}, - {file = "pydantic_core-2.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac3b20653bdbe160febbea8aa6c079d3df19310d50ac314911ed8cc4eb7f8cb8"}, - {file = "pydantic_core-2.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a5a8e19d7c707c4cadb8c18f5f60c843052ae83c20fa7d44f41594c644a1d330"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f7059ca8d64fea7f238994c97d91f75965216bcbe5f695bb44f354893f11d52"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bed0f8a0eeea9fb72937ba118f9db0cb7e90773462af7962d382445f3005e5a4"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3cb37038123447cf0f3ea4c74751f6a9d7afef0eb71aa07bf5f652b5e6a132c"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84286494f6c5d05243456e04223d5a9417d7f443c3b76065e75001beb26f88de"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acc07b2cfc5b835444b44a9956846b578d27beeacd4b52e45489e93276241025"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4fefee876e07a6e9aad7a8c8c9f85b0cdbe7df52b8a9552307b09050f7512c7e"}, - {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:258c57abf1188926c774a4c94dd29237e77eda19462e5bb901d88adcab6af919"}, - {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:35c14ac45fcfdf7167ca76cc80b2001205a8d5d16d80524e13508371fb8cdd9c"}, - {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1b26e1dff225c31897696cab7d4f0a315d4c0d9e8666dbffdb28216f3b17fdc"}, - {file = "pydantic_core-2.27.1-cp311-none-win32.whl", hash = "sha256:2cdf7d86886bc6982354862204ae3b2f7f96f21a3eb0ba5ca0ac42c7b38598b9"}, - {file = "pydantic_core-2.27.1-cp311-none-win_amd64.whl", hash = "sha256:3af385b0cee8df3746c3f406f38bcbfdc9041b5c2d5ce3e5fc6637256e60bbc5"}, - {file = "pydantic_core-2.27.1-cp311-none-win_arm64.whl", hash = "sha256:81f2ec23ddc1b476ff96563f2e8d723830b06dceae348ce02914a37cb4e74b89"}, - {file = "pydantic_core-2.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9cbd94fc661d2bab2bc702cddd2d3370bbdcc4cd0f8f57488a81bcce90c7a54f"}, - {file = "pydantic_core-2.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f8c4718cd44ec1580e180cb739713ecda2bdee1341084c1467802a417fe0f02"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15aae984e46de8d376df515f00450d1522077254ef6b7ce189b38ecee7c9677c"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ba5e3963344ff25fc8c40da90f44b0afca8cfd89d12964feb79ac1411a260ac"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:992cea5f4f3b29d6b4f7f1726ed8ee46c8331c6b4eed6db5b40134c6fe1768bb"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0325336f348dbee6550d129b1627cb8f5351a9dc91aad141ffb96d4937bd9529"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7597c07fbd11515f654d6ece3d0e4e5093edc30a436c63142d9a4b8e22f19c35"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3bbd5d8cc692616d5ef6fbbbd50dbec142c7e6ad9beb66b78a96e9c16729b089"}, - {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:dc61505e73298a84a2f317255fcc72b710b72980f3a1f670447a21efc88f8381"}, - {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:e1f735dc43da318cad19b4173dd1ffce1d84aafd6c9b782b3abc04a0d5a6f5bb"}, - {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f4e5658dbffe8843a0f12366a4c2d1c316dbe09bb4dfbdc9d2d9cd6031de8aae"}, - {file = "pydantic_core-2.27.1-cp312-none-win32.whl", hash = "sha256:672ebbe820bb37988c4d136eca2652ee114992d5d41c7e4858cdd90ea94ffe5c"}, - {file = "pydantic_core-2.27.1-cp312-none-win_amd64.whl", hash = "sha256:66ff044fd0bb1768688aecbe28b6190f6e799349221fb0de0e6f4048eca14c16"}, - {file = "pydantic_core-2.27.1-cp312-none-win_arm64.whl", hash = "sha256:9a3b0793b1bbfd4146304e23d90045f2a9b5fd5823aa682665fbdaf2a6c28f3e"}, - {file = "pydantic_core-2.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f216dbce0e60e4d03e0c4353c7023b202d95cbaeff12e5fd2e82ea0a66905073"}, - {file = "pydantic_core-2.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a2e02889071850bbfd36b56fd6bc98945e23670773bc7a76657e90e6b6603c08"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b0e23f119b2b456d07ca91b307ae167cc3f6c846a7b169fca5326e32fdc6cf"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:764be71193f87d460a03f1f7385a82e226639732214b402f9aa61f0d025f0737"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c00666a3bd2f84920a4e94434f5974d7bbc57e461318d6bb34ce9cdbbc1f6b2"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ccaa88b24eebc0f849ce0a4d09e8a408ec5a94afff395eb69baf868f5183107"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c65af9088ac534313e1963443d0ec360bb2b9cba6c2909478d22c2e363d98a51"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206b5cf6f0c513baffaeae7bd817717140770c74528f3e4c3e1cec7871ddd61a"}, - {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:062f60e512fc7fff8b8a9d680ff0ddaaef0193dba9fa83e679c0c5f5fbd018bc"}, - {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:a0697803ed7d4af5e4c1adf1670af078f8fcab7a86350e969f454daf598c4960"}, - {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:58ca98a950171f3151c603aeea9303ef6c235f692fe555e883591103da709b23"}, - {file = "pydantic_core-2.27.1-cp313-none-win32.whl", hash = "sha256:8065914ff79f7eab1599bd80406681f0ad08f8e47c880f17b416c9f8f7a26d05"}, - {file = "pydantic_core-2.27.1-cp313-none-win_amd64.whl", hash = "sha256:ba630d5e3db74c79300d9a5bdaaf6200172b107f263c98a0539eeecb857b2337"}, - {file = "pydantic_core-2.27.1-cp313-none-win_arm64.whl", hash = "sha256:45cf8588c066860b623cd11c4ba687f8d7175d5f7ef65f7129df8a394c502de5"}, - {file = "pydantic_core-2.27.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:5897bec80a09b4084aee23f9b73a9477a46c3304ad1d2d07acca19723fb1de62"}, - {file = "pydantic_core-2.27.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d0165ab2914379bd56908c02294ed8405c252250668ebcb438a55494c69f44ab"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b9af86e1d8e4cfc82c2022bfaa6f459381a50b94a29e95dcdda8442d6d83864"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f6c8a66741c5f5447e047ab0ba7a1c61d1e95580d64bce852e3df1f895c4067"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a42d6a8156ff78981f8aa56eb6394114e0dedb217cf8b729f438f643608cbcd"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64c65f40b4cd8b0e049a8edde07e38b476da7e3aaebe63287c899d2cff253fa5"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdcf339322a3fae5cbd504edcefddd5a50d9ee00d968696846f089b4432cf78"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bf99c8404f008750c846cb4ac4667b798a9f7de673ff719d705d9b2d6de49c5f"}, - {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8f1edcea27918d748c7e5e4d917297b2a0ab80cad10f86631e488b7cddf76a36"}, - {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:159cac0a3d096f79ab6a44d77a961917219707e2a130739c64d4dd46281f5c2a"}, - {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:029d9757eb621cc6e1848fa0b0310310de7301057f623985698ed7ebb014391b"}, - {file = "pydantic_core-2.27.1-cp38-none-win32.whl", hash = "sha256:a28af0695a45f7060e6f9b7092558a928a28553366519f64083c63a44f70e618"}, - {file = "pydantic_core-2.27.1-cp38-none-win_amd64.whl", hash = "sha256:2d4567c850905d5eaaed2f7a404e61012a51caf288292e016360aa2b96ff38d4"}, - {file = "pydantic_core-2.27.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e9386266798d64eeb19dd3677051f5705bf873e98e15897ddb7d76f477131967"}, - {file = "pydantic_core-2.27.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4228b5b646caa73f119b1ae756216b59cc6e2267201c27d3912b592c5e323b60"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3dfe500de26c52abe0477dde16192ac39c98f05bf2d80e76102d394bd13854"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aee66be87825cdf72ac64cb03ad4c15ffef4143dbf5c113f64a5ff4f81477bf9"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b748c44bb9f53031c8cbc99a8a061bc181c1000c60a30f55393b6e9c45cc5bd"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ca038c7f6a0afd0b2448941b6ef9d5e1949e999f9e5517692eb6da58e9d44be"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e0bd57539da59a3e4671b90a502da9a28c72322a4f17866ba3ac63a82c4498e"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ac6c2c45c847bbf8f91930d88716a0fb924b51e0c6dad329b793d670ec5db792"}, - {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b94d4ba43739bbe8b0ce4262bcc3b7b9f31459ad120fb595627eaeb7f9b9ca01"}, - {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:00e6424f4b26fe82d44577b4c842d7df97c20be6439e8e685d0d715feceb9fb9"}, - {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:38de0a70160dd97540335b7ad3a74571b24f1dc3ed33f815f0880682e6880131"}, - {file = "pydantic_core-2.27.1-cp39-none-win32.whl", hash = "sha256:7ccebf51efc61634f6c2344da73e366c75e735960b5654b63d7e6f69a5885fa3"}, - {file = "pydantic_core-2.27.1-cp39-none-win_amd64.whl", hash = "sha256:a57847b090d7892f123726202b7daa20df6694cbd583b67a592e856bff603d6c"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3fa80ac2bd5856580e242dbc202db873c60a01b20309c8319b5c5986fbe53ce6"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d950caa237bb1954f1b8c9227b5065ba6875ac9771bb8ec790d956a699b78676"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e4216e64d203e39c62df627aa882f02a2438d18a5f21d7f721621f7a5d3611d"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02a3d637bd387c41d46b002f0e49c52642281edacd2740e5a42f7017feea3f2c"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:161c27ccce13b6b0c8689418da3885d3220ed2eae2ea5e9b2f7f3d48f1d52c27"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:19910754e4cc9c63bc1c7f6d73aa1cfee82f42007e407c0f413695c2f7ed777f"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e173486019cc283dc9778315fa29a363579372fe67045e971e89b6365cc035ed"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:af52d26579b308921b73b956153066481f064875140ccd1dfd4e77db89dbb12f"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:981fb88516bd1ae8b0cbbd2034678a39dedc98752f264ac9bc5839d3923fa04c"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5fde892e6c697ce3e30c61b239330fc5d569a71fefd4eb6512fc6caec9dd9e2f"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:816f5aa087094099fff7edabb5e01cc370eb21aa1a1d44fe2d2aefdfb5599b31"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c10c309e18e443ddb108f0ef64e8729363adbfd92d6d57beec680f6261556f3"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98476c98b02c8e9b2eec76ac4156fd006628b1b2d0ef27e548ffa978393fd154"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c3027001c28434e7ca5a6e1e527487051136aa81803ac812be51802150d880dd"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7699b1df36a48169cdebda7ab5a2bac265204003f153b4bd17276153d997670a"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1c39b07d90be6b48968ddc8c19e7585052088fd7ec8d568bb31ff64c70ae3c97"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:46ccfe3032b3915586e469d4972973f893c0a2bb65669194a5bdea9bacc088c2"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:62ba45e21cf6571d7f716d903b5b7b6d2617e2d5d67c0923dc47b9d41369f840"}, - {file = "pydantic_core-2.27.1.tar.gz", hash = "sha256:62a763352879b84aa31058fc931884055fd75089cccbd9d58bb6afd01141b235"}, + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"}, + {file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"}, ] [package.dependencies] @@ -1134,14 +1138,14 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pygments" -version = "2.18.0" +version = "2.19.1" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" groups = ["main", "lint-and-format"] files = [ - {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, - {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, + {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, + {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, ] [package.extras] @@ -1149,14 +1153,14 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pylint" -version = "3.3.4" +version = "3.3.5" description = "python code static checker" optional = false python-versions = ">=3.9.0" groups = ["lint-and-format"] files = [ - {file = "pylint-3.3.4-py3-none-any.whl", hash = "sha256:289e6a1eb27b453b08436478391a48cd53bb0efb824873f949e709350f3de018"}, - {file = "pylint-3.3.4.tar.gz", hash = "sha256:74ae7a38b177e69a9b525d0794bd8183820bfa7eb68cc1bee6e8ed22a42be4ce"}, + {file = "pylint-3.3.5-py3-none-any.whl", hash = "sha256:7cb170929a371238530b2eeea09f5f28236d106b70308c3d46a9c0cf11634633"}, + {file = "pylint-3.3.5.tar.gz", hash = "sha256:38d0f784644ed493d91f76b5333a0e370a1c1bc97c22068a77523b4bf1e82c31"}, ] [package.dependencies] @@ -1165,7 +1169,7 @@ colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = [ {version = ">=0.2", markers = "python_version < \"3.11\""}, {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, - {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, + {version = ">=0.3.6", markers = "python_version == \"3.11\""}, ] isort = ">=4.2.5,<5.13.0 || >5.13.0,<7" mccabe = ">=0.6,<0.8" @@ -1179,14 +1183,14 @@ testutils = ["gitpython (>3)"] [[package]] name = "pyright" -version = "1.1.390" +version = "1.1.396" description = "Command line wrapper for pyright" optional = false python-versions = ">=3.7" groups = ["type-checking"] files = [ - {file = "pyright-1.1.390-py3-none-any.whl", hash = "sha256:ecebfba5b6b50af7c1a44c2ba144ba2ab542c227eb49bc1f16984ff714e0e110"}, - {file = "pyright-1.1.390.tar.gz", hash = "sha256:aad7f160c49e0fbf8209507a15e17b781f63a86a1facb69ca877c71ef2e9538d"}, + {file = "pyright-1.1.396-py3-none-any.whl", hash = "sha256:c635e473095b9138c471abccca22b9fedbe63858e0b40d4fc4b67da041891844"}, + {file = "pyright-1.1.396.tar.gz", hash = "sha256:142901f5908f5a0895be3d3befcc18bedcdb8cc1798deecaec86ef7233a29b03"}, ] [package.dependencies] @@ -1204,7 +1208,7 @@ version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -groups = ["main", "lint-and-format"] +groups = ["lint-and-format"] files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, @@ -1215,14 +1219,14 @@ six = ">=1.5" [[package]] name = "pytz" -version = "2024.2" +version = "2025.1" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" groups = ["lint-and-format"] files = [ - {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, - {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, + {file = "pytz-2025.1-py2.py3-none-any.whl", hash = "sha256:89dd22dca55b46eac6eda23b2d72721bf1bdfef212645d81513ef5d03038de57"}, + {file = "pytz-2025.1.tar.gz", hash = "sha256:c2db42be2a2518b28e65f9207c4d05e6ff547d1efa4086469ef855e4ab70178e"}, ] [[package]] @@ -1290,19 +1294,20 @@ files = [ [[package]] name = "referencing" -version = "0.35.1" +version = "0.36.2" description = "JSON Referencing + Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de"}, - {file = "referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c"}, + {file = "referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0"}, + {file = "referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa"}, ] [package.dependencies] attrs = ">=22.2.0" rpds-py = ">=0.7.0" +typing-extensions = {version = ">=4.4.0", markers = "python_version < \"3.13\""} [[package]] name = "requests" @@ -1363,35 +1368,169 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "rich-click" -version = "1.8.3" +version = "1.8.5" description = "Format click help output nicely with rich" optional = false python-versions = ">=3.7" groups = ["main", "lint-and-format"] files = [ - {file = "rich_click-1.8.3-py3-none-any.whl", hash = "sha256:636d9c040d31c5eee242201b5bf4f2d358bfae4db14bb22ec1cafa717cfd02cd"}, - {file = "rich_click-1.8.3.tar.gz", hash = "sha256:6d75bdfa7aa9ed2c467789a0688bc6da23fbe3a143e19aa6ad3f8bac113d2ab3"}, + {file = "rich_click-1.8.5-py3-none-any.whl", hash = "sha256:0fab7bb5b66c15da17c210b4104277cd45f3653a7322e0098820a169880baee0"}, + {file = "rich_click-1.8.5.tar.gz", hash = "sha256:a3eebe81da1c9da3c32f3810017c79bd687ff1b3fa35bfc9d8a3338797f1d1a1"}, ] [package.dependencies] click = ">=7" rich = ">=10.7" -typing-extensions = "*" +typing_extensions = ">=4" [package.extras] dev = ["mypy", "packaging", "pre-commit", "pytest", "pytest-cov", "rich-codex", "ruff", "types-setuptools"] -docs = ["markdown-include", "mkdocs", "mkdocs-glightbox", "mkdocs-material-extensions", "mkdocs-material[imaging] (>=9.5.18,<9.6.0)", "mkdocs-rss-plugin", "mkdocstrings[python]", "rich-codex"] +docs = ["markdown_include", "mkdocs", "mkdocs-glightbox", "mkdocs-material-extensions", "mkdocs-material[imaging] (>=9.5.18,<9.6.0)", "mkdocs-rss-plugin", "mkdocstrings[python]", "rich-codex"] + +[[package]] +name = "robotcode" +version = "1.0.3" +description = "Command line interface for RobotCode" +optional = false +python-versions = ">=3.8" +groups = ["dev", "type-checking"] +files = [ + {file = "robotcode-1.0.3-py3-none-any.whl", hash = "sha256:91226b78da1d8ce81921ae0caf4e4d5fc4adcbc76e2bdf805fa8a8ccb2a0dfd9"}, + {file = "robotcode-1.0.3.tar.gz", hash = "sha256:4981cca799eef7763c1bd392a4af10c15aa345bc0b3a6f1260da529faa08c614"}, +] + +[package.dependencies] +robotcode-core = "1.0.3" +robotcode-plugin = "1.0.3" +robotcode-robot = "1.0.3" + +[package.extras] +all = ["docutils", "pyyaml (>=5.4)", "rich", "robotcode-analyze (==1.0.3)", "robotcode-debugger (==1.0.3)", "robotcode-language-server (==1.0.3)", "robotcode-repl (==1.0.3)", "robotcode-repl-server (==1.0.3)", "robotcode-runner (==1.0.3)", "robotframework-robocop (>=2.0.0)", "robotframework-tidy (>=2.0.0)"] +analyze = ["robotcode-analyze (==1.0.3)"] +colored = ["rich"] +debugger = ["robotcode-debugger (==1.0.3)"] +languageserver = ["robotcode-language-server (==1.0.3)"] +lint = ["robotframework-robocop (>=2.0.0)"] +repl = ["robotcode-repl (==1.0.3)"] +replserver = ["robotcode-repl-server (==1.0.3)"] +rest = ["docutils"] +runner = ["robotcode-runner (==1.0.3)"] +tidy = ["robotframework-tidy (>=2.0.0)"] +yaml = ["pyyaml (>=5.4)"] + +[[package]] +name = "robotcode-analyze" +version = "1.0.3" +description = "RobotCode analyze plugin for Robot Framework" +optional = false +python-versions = ">=3.8" +groups = ["type-checking"] +files = [ + {file = "robotcode_analyze-1.0.3-py3-none-any.whl", hash = "sha256:d8083a75c1329721816c48e51c98f7f17e0dc9d43b9f0eebf2e9c6a9bad3a21f"}, + {file = "robotcode_analyze-1.0.3.tar.gz", hash = "sha256:fa3f2e329c521a3ed3d9e2ea05f91c6e552196a3abbc157a7f051c66563639c8"}, +] + +[package.dependencies] +robotcode = "1.0.3" +robotcode-plugin = "1.0.3" +robotcode-robot = "1.0.3" +robotframework = ">=4.1.0" + +[[package]] +name = "robotcode-core" +version = "1.0.3" +description = "Some core classes for RobotCode" +optional = false +python-versions = ">=3.8" +groups = ["dev", "type-checking"] +files = [ + {file = "robotcode_core-1.0.3-py3-none-any.whl", hash = "sha256:920da47efd9e4d1f7756156d672c9fa4232cc1fd6c92f2bd0f847689b93e2d59"}, + {file = "robotcode_core-1.0.3.tar.gz", hash = "sha256:0fc96582806f7488506db25142c3adbb6771dacfdc3f2dab6ed1b5835129a0d1"}, +] + +[package.dependencies] +typing-extensions = ">=4.4.0" + +[[package]] +name = "robotcode-modifiers" +version = "1.0.3" +description = "Some Robot Framework Modifiers for RobotCode" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "robotcode_modifiers-1.0.3-py3-none-any.whl", hash = "sha256:871a5b1affe56a818571c50426320f046e55b9e795219f9d411f1c28e6228b10"}, + {file = "robotcode_modifiers-1.0.3.tar.gz", hash = "sha256:755edb9b3b97131d4bd12bb8edf85bca33981ab46ff03852302a5f846e35a240"}, +] + +[package.dependencies] +robotframework = ">=4.1.0" + +[[package]] +name = "robotcode-plugin" +version = "1.0.3" +description = "Some classes for RobotCode plugin management" +optional = false +python-versions = ">=3.8" +groups = ["dev", "type-checking"] +files = [ + {file = "robotcode_plugin-1.0.3-py3-none-any.whl", hash = "sha256:ebb3c7acf98e3c697d4026fabd585fd8354f1164b202bcf7ed6a5e3ed4d3e982"}, + {file = "robotcode_plugin-1.0.3.tar.gz", hash = "sha256:ba9441268016684f9e028b32d85cd6ff82f8754555bcbb410c6d9da6b721e515"}, +] + +[package.dependencies] +click = ">=8.1.0" +colorama = ">=0.4.6" +pluggy = ">=1.0.0" +tomli-w = ">=1.0.0" + +[[package]] +name = "robotcode-robot" +version = "1.0.3" +description = "Support classes for RobotCode for handling Robot Framework projects." +optional = false +python-versions = ">=3.8" +groups = ["dev", "type-checking"] +files = [ + {file = "robotcode_robot-1.0.3-py3-none-any.whl", hash = "sha256:3bf953be1fed1e1669182b10d29e211a7a6b6ce4417e3e462bf1e44fc0db56da"}, + {file = "robotcode_robot-1.0.3.tar.gz", hash = "sha256:f00846374194ba6f30df893fff9c2e5ac8fb10a4af9da74e0c5f9d72af40c0c1"}, +] + +[package.dependencies] +platformdirs = ">=3.2.0,<4.4.0" +robotcode-core = "1.0.3" +robotframework = ">=4.1.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} + +[[package]] +name = "robotcode-runner" +version = "1.0.3" +description = "RobotCode runner for Robot Framework" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "robotcode_runner-1.0.3-py3-none-any.whl", hash = "sha256:7d64c5bfbec7ec8de6ad0b5bfa7b691613d57e1536a418fd63bb2adda20cff15"}, + {file = "robotcode_runner-1.0.3.tar.gz", hash = "sha256:c2128e876bd7e33822641630f3811141a574514dbc352a24adabaadbd2315656"}, +] + +[package.dependencies] +robotcode = "1.0.3" +robotcode-modifiers = "1.0.3" +robotcode-plugin = "1.0.3" +robotcode-robot = "1.0.3" +robotframework = ">=4.1.0" [[package]] name = "robotframework" -version = "7.1.1" +version = "7.2.2" description = "Generic automation framework for acceptance testing and robotic process automation (RPA)" optional = false python-versions = ">=3.8" -groups = ["main", "dev", "lint-and-format"] +groups = ["main", "dev", "lint-and-format", "type-checking"] files = [ - {file = "robotframework-7.1.1-py3-none-any.whl", hash = "sha256:0461360be00dfb8ce1ab3f42370fa6eea3779e41c0b8d79a1f8ddcd2ec8e3679"}, - {file = "robotframework-7.1.1.zip", hash = "sha256:f85919c68c4d0837006e5f09dde1ef689f082eba2e7e64d5758753f9ee8bfea9"}, + {file = "robotframework-7.2.2-py3-none-any.whl", hash = "sha256:1cb4ec69d52aae515bf6037cee66a2a2d8dc3256368081c0f4b3d4578d40904e"}, + {file = "robotframework-7.2.2.tar.gz", hash = "sha256:9c420f6d35e9c8cd4b75b77cc78e36407604534ec4ab0cbddf699d7c0b2fc435"}, ] [[package]] @@ -1482,115 +1621,115 @@ generate-config = ["tomli_w (>=1.0,<1.2)"] [[package]] name = "rpds-py" -version = "0.22.3" +version = "0.23.1" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "rpds_py-0.22.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:6c7b99ca52c2c1752b544e310101b98a659b720b21db00e65edca34483259967"}, - {file = "rpds_py-0.22.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:be2eb3f2495ba669d2a985f9b426c1797b7d48d6963899276d22f23e33d47e37"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70eb60b3ae9245ddea20f8a4190bd79c705a22f8028aaf8bbdebe4716c3fab24"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4041711832360a9b75cfb11b25a6a97c8fb49c07b8bd43d0d02b45d0b499a4ff"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64607d4cbf1b7e3c3c8a14948b99345eda0e161b852e122c6bb71aab6d1d798c"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e69b0a0e2537f26d73b4e43ad7bc8c8efb39621639b4434b76a3de50c6966e"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc27863442d388870c1809a87507727b799c8460573cfbb6dc0eeaef5a11b5ec"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e79dd39f1e8c3504be0607e5fc6e86bb60fe3584bec8b782578c3b0fde8d932c"}, - {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e0fa2d4ec53dc51cf7d3bb22e0aa0143966119f42a0c3e4998293a3dd2856b09"}, - {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fda7cb070f442bf80b642cd56483b5548e43d366fe3f39b98e67cce780cded00"}, - {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cff63a0272fcd259dcc3be1657b07c929c466b067ceb1c20060e8d10af56f5bf"}, - {file = "rpds_py-0.22.3-cp310-cp310-win32.whl", hash = "sha256:9bd7228827ec7bb817089e2eb301d907c0d9827a9e558f22f762bb690b131652"}, - {file = "rpds_py-0.22.3-cp310-cp310-win_amd64.whl", hash = "sha256:9beeb01d8c190d7581a4d59522cd3d4b6887040dcfc744af99aa59fef3e041a8"}, - {file = "rpds_py-0.22.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d20cfb4e099748ea39e6f7b16c91ab057989712d31761d3300d43134e26e165f"}, - {file = "rpds_py-0.22.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:68049202f67380ff9aa52f12e92b1c30115f32e6895cd7198fa2a7961621fc5a"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb4f868f712b2dd4bcc538b0a0c1f63a2b1d584c925e69a224d759e7070a12d5"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bc51abd01f08117283c5ebf64844a35144a0843ff7b2983e0648e4d3d9f10dbb"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f3cec041684de9a4684b1572fe28c7267410e02450f4561700ca5a3bc6695a2"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7ef9d9da710be50ff6809fed8f1963fecdfecc8b86656cadfca3bc24289414b0"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59f4a79c19232a5774aee369a0c296712ad0e77f24e62cad53160312b1c1eaa1"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a60bce91f81ddaac922a40bbb571a12c1070cb20ebd6d49c48e0b101d87300d"}, - {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e89391e6d60251560f0a8f4bd32137b077a80d9b7dbe6d5cab1cd80d2746f648"}, - {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e3fb866d9932a3d7d0c82da76d816996d1667c44891bd861a0f97ba27e84fc74"}, - {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1352ae4f7c717ae8cba93421a63373e582d19d55d2ee2cbb184344c82d2ae55a"}, - {file = "rpds_py-0.22.3-cp311-cp311-win32.whl", hash = "sha256:b0b4136a252cadfa1adb705bb81524eee47d9f6aab4f2ee4fa1e9d3cd4581f64"}, - {file = "rpds_py-0.22.3-cp311-cp311-win_amd64.whl", hash = "sha256:8bd7c8cfc0b8247c8799080fbff54e0b9619e17cdfeb0478ba7295d43f635d7c"}, - {file = "rpds_py-0.22.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:27e98004595899949bd7a7b34e91fa7c44d7a97c40fcaf1d874168bb652ec67e"}, - {file = "rpds_py-0.22.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1978d0021e943aae58b9b0b196fb4895a25cc53d3956b8e35e0b7682eefb6d56"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:655ca44a831ecb238d124e0402d98f6212ac527a0ba6c55ca26f616604e60a45"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:feea821ee2a9273771bae61194004ee2fc33f8ec7db08117ef9147d4bbcbca8e"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22bebe05a9ffc70ebfa127efbc429bc26ec9e9b4ee4d15a740033efda515cf3d"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3af6e48651c4e0d2d166dc1b033b7042ea3f871504b6805ba5f4fe31581d8d38"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67ba3c290821343c192f7eae1d8fd5999ca2dc99994114643e2f2d3e6138b15"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:02fbb9c288ae08bcb34fb41d516d5eeb0455ac35b5512d03181d755d80810059"}, - {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f56a6b404f74ab372da986d240e2e002769a7d7102cc73eb238a4f72eec5284e"}, - {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0a0461200769ab3b9ab7e513f6013b7a97fdeee41c29b9db343f3c5a8e2b9e61"}, - {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8633e471c6207a039eff6aa116e35f69f3156b3989ea3e2d755f7bc41754a4a7"}, - {file = "rpds_py-0.22.3-cp312-cp312-win32.whl", hash = "sha256:593eba61ba0c3baae5bc9be2f5232430453fb4432048de28399ca7376de9c627"}, - {file = "rpds_py-0.22.3-cp312-cp312-win_amd64.whl", hash = "sha256:d115bffdd417c6d806ea9069237a4ae02f513b778e3789a359bc5856e0404cc4"}, - {file = "rpds_py-0.22.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ea7433ce7e4bfc3a85654aeb6747babe3f66eaf9a1d0c1e7a4435bbdf27fea84"}, - {file = "rpds_py-0.22.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6dd9412824c4ce1aca56c47b0991e65bebb7ac3f4edccfd3f156150c96a7bf25"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20070c65396f7373f5df4005862fa162db5d25d56150bddd0b3e8214e8ef45b4"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b09865a9abc0ddff4e50b5ef65467cd94176bf1e0004184eb915cbc10fc05c5"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3453e8d41fe5f17d1f8e9c383a7473cd46a63661628ec58e07777c2fff7196dc"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5d36399a1b96e1a5fdc91e0522544580dbebeb1f77f27b2b0ab25559e103b8b"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009de23c9c9ee54bf11303a966edf4d9087cd43a6003672e6aa7def643d06518"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1aef18820ef3e4587ebe8b3bc9ba6e55892a6d7b93bac6d29d9f631a3b4befbd"}, - {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f60bd8423be1d9d833f230fdbccf8f57af322d96bcad6599e5a771b151398eb2"}, - {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:62d9cfcf4948683a18a9aff0ab7e1474d407b7bab2ca03116109f8464698ab16"}, - {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9253fc214112405f0afa7db88739294295f0e08466987f1d70e29930262b4c8f"}, - {file = "rpds_py-0.22.3-cp313-cp313-win32.whl", hash = "sha256:fb0ba113b4983beac1a2eb16faffd76cb41e176bf58c4afe3e14b9c681f702de"}, - {file = "rpds_py-0.22.3-cp313-cp313-win_amd64.whl", hash = "sha256:c58e2339def52ef6b71b8f36d13c3688ea23fa093353f3a4fee2556e62086ec9"}, - {file = "rpds_py-0.22.3-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:f82a116a1d03628a8ace4859556fb39fd1424c933341a08ea3ed6de1edb0283b"}, - {file = "rpds_py-0.22.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3dfcbc95bd7992b16f3f7ba05af8a64ca694331bd24f9157b49dadeeb287493b"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59259dc58e57b10e7e18ce02c311804c10c5a793e6568f8af4dead03264584d1"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5725dd9cc02068996d4438d397e255dcb1df776b7ceea3b9cb972bdb11260a83"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99b37292234e61325e7a5bb9689e55e48c3f5f603af88b1642666277a81f1fbd"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:27b1d3b3915a99208fee9ab092b8184c420f2905b7d7feb4aeb5e4a9c509b8a1"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f612463ac081803f243ff13cccc648578e2279295048f2a8d5eb430af2bae6e3"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f73d3fef726b3243a811121de45193c0ca75f6407fe66f3f4e183c983573e130"}, - {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3f21f0495edea7fdbaaa87e633a8689cd285f8f4af5c869f27bc8074638ad69c"}, - {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:1e9663daaf7a63ceccbbb8e3808fe90415b0757e2abddbfc2e06c857bf8c5e2b"}, - {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a76e42402542b1fae59798fab64432b2d015ab9d0c8c47ba7addddbaf7952333"}, - {file = "rpds_py-0.22.3-cp313-cp313t-win32.whl", hash = "sha256:69803198097467ee7282750acb507fba35ca22cc3b85f16cf45fb01cb9097730"}, - {file = "rpds_py-0.22.3-cp313-cp313t-win_amd64.whl", hash = "sha256:f5cf2a0c2bdadf3791b5c205d55a37a54025c6e18a71c71f82bb536cf9a454bf"}, - {file = "rpds_py-0.22.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:378753b4a4de2a7b34063d6f95ae81bfa7b15f2c1a04a9518e8644e81807ebea"}, - {file = "rpds_py-0.22.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3445e07bf2e8ecfeef6ef67ac83de670358abf2996916039b16a218e3d95e97e"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b2513ba235829860b13faa931f3b6846548021846ac808455301c23a101689d"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eaf16ae9ae519a0e237a0f528fd9f0197b9bb70f40263ee57ae53c2b8d48aeb3"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:583f6a1993ca3369e0f80ba99d796d8e6b1a3a2a442dd4e1a79e652116413091"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4617e1915a539a0d9a9567795023de41a87106522ff83fbfaf1f6baf8e85437e"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c150c7a61ed4a4f4955a96626574e9baf1adf772c2fb61ef6a5027e52803543"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2fa4331c200c2521512595253f5bb70858b90f750d39b8cbfd67465f8d1b596d"}, - {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:214b7a953d73b5e87f0ebece4a32a5bd83c60a3ecc9d4ec8f1dca968a2d91e99"}, - {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f47ad3d5f3258bd7058d2d506852217865afefe6153a36eb4b6928758041d831"}, - {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f276b245347e6e36526cbd4a266a417796fc531ddf391e43574cf6466c492520"}, - {file = "rpds_py-0.22.3-cp39-cp39-win32.whl", hash = "sha256:bbb232860e3d03d544bc03ac57855cd82ddf19c7a07651a7c0fdb95e9efea8b9"}, - {file = "rpds_py-0.22.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfbc454a2880389dbb9b5b398e50d439e2e58669160f27b60e5eca11f68ae17c"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d48424e39c2611ee1b84ad0f44fb3b2b53d473e65de061e3f460fc0be5f1939d"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:24e8abb5878e250f2eb0d7859a8e561846f98910326d06c0d51381fed59357bd"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b232061ca880db21fa14defe219840ad9b74b6158adb52ddf0e87bead9e8493"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac0a03221cdb5058ce0167ecc92a8c89e8d0decdc9e99a2ec23380793c4dcb96"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb0c341fa71df5a4595f9501df4ac5abfb5a09580081dffbd1ddd4654e6e9123"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf9db5488121b596dbfc6718c76092fda77b703c1f7533a226a5a9f65248f8ad"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b8db6b5b2d4491ad5b6bdc2bc7c017eec108acbf4e6785f42a9eb0ba234f4c9"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b3d504047aba448d70cf6fa22e06cb09f7cbd761939fdd47604f5e007675c24e"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:e61b02c3f7a1e0b75e20c3978f7135fd13cb6cf551bf4a6d29b999a88830a338"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:e35ba67d65d49080e8e5a1dd40101fccdd9798adb9b050ff670b7d74fa41c566"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:26fd7cac7dd51011a245f29a2cc6489c4608b5a8ce8d75661bb4a1066c52dfbe"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:177c7c0fce2855833819c98e43c262007f42ce86651ffbb84f37883308cb0e7d"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bb47271f60660803ad11f4c61b42242b8c1312a31c98c578f79ef9387bbde21c"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:70fb28128acbfd264eda9bf47015537ba3fe86e40d046eb2963d75024be4d055"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44d61b4b7d0c2c9ac019c314e52d7cbda0ae31078aabd0f22e583af3e0d79723"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f0e260eaf54380380ac3808aa4ebe2d8ca28b9087cf411649f96bad6900c728"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b25bc607423935079e05619d7de556c91fb6adeae9d5f80868dde3468657994b"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fb6116dfb8d1925cbdb52595560584db42a7f664617a1f7d7f6e32f138cdf37d"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a63cbdd98acef6570c62b92a1e43266f9e8b21e699c363c0fef13bd530799c11"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2b8f60e1b739a74bab7e01fcbe3dddd4657ec685caa04681df9d562ef15b625f"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2e8b55d8517a2fda8d95cb45d62a5a8bbf9dd0ad39c5b25c8833efea07b880ca"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:2de29005e11637e7a2361fa151f780ff8eb2543a0da1413bb951e9f14b699ef3"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:666ecce376999bf619756a24ce15bb14c5bfaf04bf00abc7e663ce17c3f34fe7"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:5246b14ca64a8675e0a7161f7af68fe3e910e6b90542b4bfb5439ba752191df6"}, - {file = "rpds_py-0.22.3.tar.gz", hash = "sha256:e32fee8ab45d3c2db6da19a5323bc3362237c8b653c70194414b892fd06a080d"}, + {file = "rpds_py-0.23.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2a54027554ce9b129fc3d633c92fa33b30de9f08bc61b32c053dc9b537266fed"}, + {file = "rpds_py-0.23.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b5ef909a37e9738d146519657a1aab4584018746a18f71c692f2f22168ece40c"}, + {file = "rpds_py-0.23.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ee9d6f0b38efb22ad94c3b68ffebe4c47865cdf4b17f6806d6c674e1feb4246"}, + {file = "rpds_py-0.23.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f7356a6da0562190558c4fcc14f0281db191cdf4cb96e7604c06acfcee96df15"}, + {file = "rpds_py-0.23.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9441af1d25aed96901f97ad83d5c3e35e6cd21a25ca5e4916c82d7dd0490a4fa"}, + {file = "rpds_py-0.23.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d8abf7896a91fb97e7977d1aadfcc2c80415d6dc2f1d0fca5b8d0df247248f3"}, + {file = "rpds_py-0.23.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b08027489ba8fedde72ddd233a5ea411b85a6ed78175f40285bd401bde7466d"}, + {file = "rpds_py-0.23.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fee513135b5a58f3bb6d89e48326cd5aa308e4bcdf2f7d59f67c861ada482bf8"}, + {file = "rpds_py-0.23.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:35d5631ce0af26318dba0ae0ac941c534453e42f569011585cb323b7774502a5"}, + {file = "rpds_py-0.23.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a20cb698c4a59c534c6701b1c24a968ff2768b18ea2991f886bd8985ce17a89f"}, + {file = "rpds_py-0.23.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5e9c206a1abc27e0588cf8b7c8246e51f1a16a103734f7750830a1ccb63f557a"}, + {file = "rpds_py-0.23.1-cp310-cp310-win32.whl", hash = "sha256:d9f75a06ecc68f159d5d7603b734e1ff6daa9497a929150f794013aa9f6e3f12"}, + {file = "rpds_py-0.23.1-cp310-cp310-win_amd64.whl", hash = "sha256:f35eff113ad430b5272bbfc18ba111c66ff525828f24898b4e146eb479a2cdda"}, + {file = "rpds_py-0.23.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:b79f5ced71efd70414a9a80bbbfaa7160da307723166f09b69773153bf17c590"}, + {file = "rpds_py-0.23.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c9e799dac1ffbe7b10c1fd42fe4cd51371a549c6e108249bde9cd1200e8f59b4"}, + {file = "rpds_py-0.23.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:721f9c4011b443b6e84505fc00cc7aadc9d1743f1c988e4c89353e19c4a968ee"}, + {file = "rpds_py-0.23.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f88626e3f5e57432e6191cd0c5d6d6b319b635e70b40be2ffba713053e5147dd"}, + {file = "rpds_py-0.23.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:285019078537949cecd0190f3690a0b0125ff743d6a53dfeb7a4e6787af154f5"}, + {file = "rpds_py-0.23.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b92f5654157de1379c509b15acec9d12ecf6e3bc1996571b6cb82a4302060447"}, + {file = "rpds_py-0.23.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e768267cbe051dd8d1c5305ba690bb153204a09bf2e3de3ae530de955f5b5580"}, + {file = "rpds_py-0.23.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c5334a71f7dc1160382d45997e29f2637c02f8a26af41073189d79b95d3321f1"}, + {file = "rpds_py-0.23.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d6adb81564af0cd428910f83fa7da46ce9ad47c56c0b22b50872bc4515d91966"}, + {file = "rpds_py-0.23.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:cafa48f2133d4daa028473ede7d81cd1b9f9e6925e9e4003ebdf77010ee02f35"}, + {file = "rpds_py-0.23.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0fced9fd4a07a1ded1bac7e961ddd9753dd5d8b755ba8e05acba54a21f5f1522"}, + {file = "rpds_py-0.23.1-cp311-cp311-win32.whl", hash = "sha256:243241c95174b5fb7204c04595852fe3943cc41f47aa14c3828bc18cd9d3b2d6"}, + {file = "rpds_py-0.23.1-cp311-cp311-win_amd64.whl", hash = "sha256:11dd60b2ffddba85715d8a66bb39b95ddbe389ad2cfcf42c833f1bcde0878eaf"}, + {file = "rpds_py-0.23.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:3902df19540e9af4cc0c3ae75974c65d2c156b9257e91f5101a51f99136d834c"}, + {file = "rpds_py-0.23.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:66f8d2a17e5838dd6fb9be6baaba8e75ae2f5fa6b6b755d597184bfcd3cb0eba"}, + {file = "rpds_py-0.23.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:112b8774b0b4ee22368fec42749b94366bd9b536f8f74c3d4175d4395f5cbd31"}, + {file = "rpds_py-0.23.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e0df046f2266e8586cf09d00588302a32923eb6386ced0ca5c9deade6af9a149"}, + {file = "rpds_py-0.23.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f3288930b947cbebe767f84cf618d2cbe0b13be476e749da0e6a009f986248c"}, + {file = "rpds_py-0.23.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ce473a2351c018b06dd8d30d5da8ab5a0831056cc53b2006e2a8028172c37ce5"}, + {file = "rpds_py-0.23.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d550d7e9e7d8676b183b37d65b5cd8de13676a738973d330b59dc8312df9c5dc"}, + {file = "rpds_py-0.23.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e14f86b871ea74c3fddc9a40e947d6a5d09def5adc2076ee61fb910a9014fb35"}, + {file = "rpds_py-0.23.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1bf5be5ba34e19be579ae873da515a2836a2166d8d7ee43be6ff909eda42b72b"}, + {file = "rpds_py-0.23.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7031d493c4465dbc8d40bd6cafefef4bd472b17db0ab94c53e7909ee781b9ef"}, + {file = "rpds_py-0.23.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:55ff4151cfd4bc635e51cfb1c59ac9f7196b256b12e3a57deb9e5742e65941ad"}, + {file = "rpds_py-0.23.1-cp312-cp312-win32.whl", hash = "sha256:a9d3b728f5a5873d84cba997b9d617c6090ca5721caaa691f3b1a78c60adc057"}, + {file = "rpds_py-0.23.1-cp312-cp312-win_amd64.whl", hash = "sha256:b03a8d50b137ee758e4c73638b10747b7c39988eb8e6cd11abb7084266455165"}, + {file = "rpds_py-0.23.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:4caafd1a22e5eaa3732acb7672a497123354bef79a9d7ceed43387d25025e935"}, + {file = "rpds_py-0.23.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:178f8a60fc24511c0eb756af741c476b87b610dba83270fce1e5a430204566a4"}, + {file = "rpds_py-0.23.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c632419c3870507ca20a37c8f8f5352317aca097639e524ad129f58c125c61c6"}, + {file = "rpds_py-0.23.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:698a79d295626ee292d1730bc2ef6e70a3ab135b1d79ada8fde3ed0047b65a10"}, + {file = "rpds_py-0.23.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:271fa2184cf28bdded86bb6217c8e08d3a169fe0bbe9be5e8d96e8476b707122"}, + {file = "rpds_py-0.23.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b91cceb5add79ee563bd1f70b30896bd63bc5f78a11c1f00a1e931729ca4f1f4"}, + {file = "rpds_py-0.23.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a6cb95074777f1ecda2ca4fa7717caa9ee6e534f42b7575a8f0d4cb0c24013"}, + {file = "rpds_py-0.23.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:50fb62f8d8364978478b12d5f03bf028c6bc2af04082479299139dc26edf4c64"}, + {file = "rpds_py-0.23.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c8f7e90b948dc9dcfff8003f1ea3af08b29c062f681c05fd798e36daa3f7e3e8"}, + {file = "rpds_py-0.23.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5b98b6c953e5c2bda51ab4d5b4f172617d462eebc7f4bfdc7c7e6b423f6da957"}, + {file = "rpds_py-0.23.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2893d778d4671ee627bac4037a075168b2673c57186fb1a57e993465dbd79a93"}, + {file = "rpds_py-0.23.1-cp313-cp313-win32.whl", hash = "sha256:2cfa07c346a7ad07019c33fb9a63cf3acb1f5363c33bc73014e20d9fe8b01cdd"}, + {file = "rpds_py-0.23.1-cp313-cp313-win_amd64.whl", hash = "sha256:3aaf141d39f45322e44fc2c742e4b8b4098ead5317e5f884770c8df0c332da70"}, + {file = "rpds_py-0.23.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:759462b2d0aa5a04be5b3e37fb8183615f47014ae6b116e17036b131985cb731"}, + {file = "rpds_py-0.23.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3e9212f52074fc9d72cf242a84063787ab8e21e0950d4d6709886fb62bcb91d5"}, + {file = "rpds_py-0.23.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e9f3a3ac919406bc0414bbbd76c6af99253c507150191ea79fab42fdb35982a"}, + {file = "rpds_py-0.23.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c04ca91dda8a61584165825907f5c967ca09e9c65fe8966ee753a3f2b019fe1e"}, + {file = "rpds_py-0.23.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4ab923167cfd945abb9b51a407407cf19f5bee35001221f2911dc85ffd35ff4f"}, + {file = "rpds_py-0.23.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ed6f011bedca8585787e5082cce081bac3d30f54520097b2411351b3574e1219"}, + {file = "rpds_py-0.23.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6959bb9928c5c999aba4a3f5a6799d571ddc2c59ff49917ecf55be2bbb4e3722"}, + {file = "rpds_py-0.23.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1ed7de3c86721b4e83ac440751329ec6a1102229aa18163f84c75b06b525ad7e"}, + {file = "rpds_py-0.23.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5fb89edee2fa237584e532fbf78f0ddd1e49a47c7c8cfa153ab4849dc72a35e6"}, + {file = "rpds_py-0.23.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7e5413d2e2d86025e73f05510ad23dad5950ab8417b7fc6beaad99be8077138b"}, + {file = "rpds_py-0.23.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d31ed4987d72aabdf521eddfb6a72988703c091cfc0064330b9e5f8d6a042ff5"}, + {file = "rpds_py-0.23.1-cp313-cp313t-win32.whl", hash = "sha256:f3429fb8e15b20961efca8c8b21432623d85db2228cc73fe22756c6637aa39e7"}, + {file = "rpds_py-0.23.1-cp313-cp313t-win_amd64.whl", hash = "sha256:d6f6512a90bd5cd9030a6237f5346f046c6f0e40af98657568fa45695d4de59d"}, + {file = "rpds_py-0.23.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:09cd7dbcb673eb60518231e02874df66ec1296c01a4fcd733875755c02014b19"}, + {file = "rpds_py-0.23.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c6760211eee3a76316cf328f5a8bd695b47b1626d21c8a27fb3b2473a884d597"}, + {file = "rpds_py-0.23.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:72e680c1518733b73c994361e4b06441b92e973ef7d9449feec72e8ee4f713da"}, + {file = "rpds_py-0.23.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ae28144c1daa61366205d32abd8c90372790ff79fc60c1a8ad7fd3c8553a600e"}, + {file = "rpds_py-0.23.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c698d123ce5d8f2d0cd17f73336615f6a2e3bdcedac07a1291bb4d8e7d82a05a"}, + {file = "rpds_py-0.23.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98b257ae1e83f81fb947a363a274c4eb66640212516becaff7bef09a5dceacaa"}, + {file = "rpds_py-0.23.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c9ff044eb07c8468594d12602291c635da292308c8c619244e30698e7fc455a"}, + {file = "rpds_py-0.23.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7938c7b0599a05246d704b3f5e01be91a93b411d0d6cc62275f025293b8a11ce"}, + {file = "rpds_py-0.23.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e9cb79ecedfc156c0692257ac7ed415243b6c35dd969baa461a6888fc79f2f07"}, + {file = "rpds_py-0.23.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:7b77e07233925bd33fc0022b8537774423e4c6680b6436316c5075e79b6384f4"}, + {file = "rpds_py-0.23.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a970bfaf130c29a679b1d0a6e0f867483cea455ab1535fb427566a475078f27f"}, + {file = "rpds_py-0.23.1-cp39-cp39-win32.whl", hash = "sha256:4233df01a250b3984465faed12ad472f035b7cd5240ea3f7c76b7a7016084495"}, + {file = "rpds_py-0.23.1-cp39-cp39-win_amd64.whl", hash = "sha256:c617d7453a80e29d9973b926983b1e700a9377dbe021faa36041c78537d7b08c"}, + {file = "rpds_py-0.23.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c1f8afa346ccd59e4e5630d5abb67aba6a9812fddf764fd7eb11f382a345f8cc"}, + {file = "rpds_py-0.23.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:fad784a31869747df4ac968a351e070c06ca377549e4ace94775aaa3ab33ee06"}, + {file = "rpds_py-0.23.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5a96fcac2f18e5a0a23a75cd27ce2656c66c11c127b0318e508aab436b77428"}, + {file = "rpds_py-0.23.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3e77febf227a1dc3220159355dba68faa13f8dca9335d97504abf428469fb18b"}, + {file = "rpds_py-0.23.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:26bb3e8de93443d55e2e748e9fd87deb5f8075ca7bc0502cfc8be8687d69a2ec"}, + {file = "rpds_py-0.23.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:db7707dde9143a67b8812c7e66aeb2d843fe33cc8e374170f4d2c50bd8f2472d"}, + {file = "rpds_py-0.23.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1eedaaccc9bb66581d4ae7c50e15856e335e57ef2734dbc5fd8ba3e2a4ab3cb6"}, + {file = "rpds_py-0.23.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28358c54fffadf0ae893f6c1050e8f8853e45df22483b7fff2f6ab6152f5d8bf"}, + {file = "rpds_py-0.23.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:633462ef7e61d839171bf206551d5ab42b30b71cac8f10a64a662536e057fdef"}, + {file = "rpds_py-0.23.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:a98f510d86f689fcb486dc59e6e363af04151e5260ad1bdddb5625c10f1e95f8"}, + {file = "rpds_py-0.23.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e0397dd0b3955c61ef9b22838144aa4bef6f0796ba5cc8edfc64d468b93798b4"}, + {file = "rpds_py-0.23.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:75307599f0d25bf6937248e5ac4e3bde5ea72ae6618623b86146ccc7845ed00b"}, + {file = "rpds_py-0.23.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3614d280bf7aab0d3721b5ce0e73434acb90a2c993121b6e81a1c15c665298ac"}, + {file = "rpds_py-0.23.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:e5963ea87f88bddf7edd59644a35a0feecf75f8985430124c253612d4f7d27ae"}, + {file = "rpds_py-0.23.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad76f44f70aac3a54ceb1813ca630c53415da3a24fd93c570b2dfb4856591017"}, + {file = "rpds_py-0.23.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2c6ae11e6e93728d86aafc51ced98b1658a0080a7dd9417d24bfb955bb09c3c2"}, + {file = "rpds_py-0.23.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc869af5cba24d45fb0399b0cfdbcefcf6910bf4dee5d74036a57cf5264b3ff4"}, + {file = "rpds_py-0.23.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c76b32eb2ab650a29e423525e84eb197c45504b1c1e6e17b6cc91fcfeb1a4b1d"}, + {file = "rpds_py-0.23.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4263320ed887ed843f85beba67f8b2d1483b5947f2dc73a8b068924558bfeace"}, + {file = "rpds_py-0.23.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7f9682a8f71acdf59fd554b82b1c12f517118ee72c0f3944eda461606dfe7eb9"}, + {file = "rpds_py-0.23.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:754fba3084b70162a6b91efceee8a3f06b19e43dac3f71841662053c0584209a"}, + {file = "rpds_py-0.23.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:a1c66e71ecfd2a4acf0e4bd75e7a3605afa8f9b28a3b497e4ba962719df2be57"}, + {file = "rpds_py-0.23.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:8d67beb6002441faef8251c45e24994de32c4c8686f7356a1f601ad7c466f7c3"}, + {file = "rpds_py-0.23.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a1e17d8dc8e57d8e0fd21f8f0f0a5211b3fa258b2e444c2053471ef93fe25a00"}, + {file = "rpds_py-0.23.1.tar.gz", hash = "sha256:7f3240dcfa14d198dba24b8b9cb3b108c06b68d45b7babd9eefc1038fdf7e707"}, ] [[package]] @@ -1607,14 +1746,14 @@ files = [ [[package]] name = "ruamel-yaml" -version = "0.18.6" +version = "0.18.10" description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" optional = false python-versions = ">=3.7" groups = ["main"] files = [ - {file = "ruamel.yaml-0.18.6-py3-none-any.whl", hash = "sha256:57b53ba33def16c4f3d807c0ccbc00f8a6081827e81ba2491691b76882d0c636"}, - {file = "ruamel.yaml-0.18.6.tar.gz", hash = "sha256:8b27e6a217e786c6fbe5634d8f3f11bc63e0f80f6a5890f28863d9c45aac311b"}, + {file = "ruamel.yaml-0.18.10-py3-none-any.whl", hash = "sha256:30f22513ab2301b3d2b577adc121c6471f28734d3d9728581245f1e76468b4f1"}, + {file = "ruamel.yaml-0.18.10.tar.gz", hash = "sha256:20c86ab29ac2153f80a428e1254a8adf686d3383df04490514ca3b79a362db58"}, ] [package.dependencies] @@ -1683,30 +1822,30 @@ files = [ [[package]] name = "ruff" -version = "0.9.6" +version = "0.9.10" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" groups = ["lint-and-format"] files = [ - {file = "ruff-0.9.6-py3-none-linux_armv6l.whl", hash = "sha256:2f218f356dd2d995839f1941322ff021c72a492c470f0b26a34f844c29cdf5ba"}, - {file = "ruff-0.9.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b908ff4df65dad7b251c9968a2e4560836d8f5487c2f0cc238321ed951ea0504"}, - {file = "ruff-0.9.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:b109c0ad2ececf42e75fa99dc4043ff72a357436bb171900714a9ea581ddef83"}, - {file = "ruff-0.9.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1de4367cca3dac99bcbd15c161404e849bb0bfd543664db39232648dc00112dc"}, - {file = "ruff-0.9.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac3ee4d7c2c92ddfdaedf0bf31b2b176fa7aa8950efc454628d477394d35638b"}, - {file = "ruff-0.9.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5dc1edd1775270e6aa2386119aea692039781429f0be1e0949ea5884e011aa8e"}, - {file = "ruff-0.9.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:4a091729086dffa4bd070aa5dab7e39cc6b9d62eb2bef8f3d91172d30d599666"}, - {file = "ruff-0.9.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1bbc6808bf7b15796cef0815e1dfb796fbd383e7dbd4334709642649625e7c5"}, - {file = "ruff-0.9.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:589d1d9f25b5754ff230dce914a174a7c951a85a4e9270613a2b74231fdac2f5"}, - {file = "ruff-0.9.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc61dd5131742e21103fbbdcad683a8813be0e3c204472d520d9a5021ca8b217"}, - {file = "ruff-0.9.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5e2d9126161d0357e5c8f30b0bd6168d2c3872372f14481136d13de9937f79b6"}, - {file = "ruff-0.9.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:68660eab1a8e65babb5229a1f97b46e3120923757a68b5413d8561f8a85d4897"}, - {file = "ruff-0.9.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c4cae6c4cc7b9b4017c71114115db0445b00a16de3bcde0946273e8392856f08"}, - {file = "ruff-0.9.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:19f505b643228b417c1111a2a536424ddde0db4ef9023b9e04a46ed8a1cb4656"}, - {file = "ruff-0.9.6-py3-none-win32.whl", hash = "sha256:194d8402bceef1b31164909540a597e0d913c0e4952015a5b40e28c146121b5d"}, - {file = "ruff-0.9.6-py3-none-win_amd64.whl", hash = "sha256:03482d5c09d90d4ee3f40d97578423698ad895c87314c4de39ed2af945633caa"}, - {file = "ruff-0.9.6-py3-none-win_arm64.whl", hash = "sha256:0e2bb706a2be7ddfea4a4af918562fdc1bcb16df255e5fa595bbd800ce322a5a"}, - {file = "ruff-0.9.6.tar.gz", hash = "sha256:81761592f72b620ec8fa1068a6fd00e98a5ebee342a3642efd84454f3031dca9"}, + {file = "ruff-0.9.10-py3-none-linux_armv6l.whl", hash = "sha256:eb4d25532cfd9fe461acc83498361ec2e2252795b4f40b17e80692814329e42d"}, + {file = "ruff-0.9.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:188a6638dab1aa9bb6228a7302387b2c9954e455fb25d6b4470cb0641d16759d"}, + {file = "ruff-0.9.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:5284dcac6b9dbc2fcb71fdfc26a217b2ca4ede6ccd57476f52a587451ebe450d"}, + {file = "ruff-0.9.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47678f39fa2a3da62724851107f438c8229a3470f533894b5568a39b40029c0c"}, + {file = "ruff-0.9.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99713a6e2766b7a17147b309e8c915b32b07a25c9efd12ada79f217c9c778b3e"}, + {file = "ruff-0.9.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:524ee184d92f7c7304aa568e2db20f50c32d1d0caa235d8ddf10497566ea1a12"}, + {file = "ruff-0.9.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:df92aeac30af821f9acf819fc01b4afc3dfb829d2782884f8739fb52a8119a16"}, + {file = "ruff-0.9.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de42e4edc296f520bb84954eb992a07a0ec5a02fecb834498415908469854a52"}, + {file = "ruff-0.9.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d257f95b65806104b6b1ffca0ea53f4ef98454036df65b1eda3693534813ecd1"}, + {file = "ruff-0.9.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b60dec7201c0b10d6d11be00e8f2dbb6f40ef1828ee75ed739923799513db24c"}, + {file = "ruff-0.9.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d838b60007da7a39c046fcdd317293d10b845001f38bcb55ba766c3875b01e43"}, + {file = "ruff-0.9.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ccaf903108b899beb8e09a63ffae5869057ab649c1e9231c05ae354ebc62066c"}, + {file = "ruff-0.9.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:f9567d135265d46e59d62dc60c0bfad10e9a6822e231f5b24032dba5a55be6b5"}, + {file = "ruff-0.9.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5f202f0d93738c28a89f8ed9eaba01b7be339e5d8d642c994347eaa81c6d75b8"}, + {file = "ruff-0.9.10-py3-none-win32.whl", hash = "sha256:bfb834e87c916521ce46b1788fbb8484966e5113c02df216680102e9eb960029"}, + {file = "ruff-0.9.10-py3-none-win_amd64.whl", hash = "sha256:f2160eeef3031bf4b17df74e307d4c5fb689a6f3a26a2de3f7ef4044e3c484f1"}, + {file = "ruff-0.9.10-py3-none-win_arm64.whl", hash = "sha256:5fd804c0327a5e5ea26615550e706942f348b197d5475ff34c19733aee4b2e69"}, + {file = "ruff-0.9.10.tar.gz", hash = "sha256:9bacb735d7bada9cfb0f2c227d3658fc443d90a727b47f206fb33f52f3c0eac7"}, ] [[package]] @@ -1735,34 +1874,76 @@ files = [ [[package]] name = "starlette" -version = "0.41.3" +version = "0.46.1" description = "The little ASGI library that shines." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "starlette-0.41.3-py3-none-any.whl", hash = "sha256:44cedb2b7c77a9de33a8b74b2b90e9f50d11fcf25d8270ea525ad71a25374ff7"}, - {file = "starlette-0.41.3.tar.gz", hash = "sha256:0e4ab3d16522a255be6b28260b938eae2482f98ce5cc934cb08dce8dc3ba5835"}, + {file = "starlette-0.46.1-py3-none-any.whl", hash = "sha256:77c74ed9d2720138b25875133f3a2dae6d854af2ec37dceb56aef370c1d8a227"}, + {file = "starlette-0.46.1.tar.gz", hash = "sha256:3c88d58ee4bd1bb807c0d1acb381838afc7752f9ddaec81bbe4383611d833230"}, ] [package.dependencies] -anyio = ">=3.4.0,<5" +anyio = ">=3.6.2,<5" [package.extras] -full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] +full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.18)", "pyyaml"] [[package]] name = "tomli" -version = "2.0.2" +version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" groups = ["main", "dev", "lint-and-format", "type-checking"] files = [ - {file = "tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38"}, - {file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, + {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, + {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, + {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, + {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, + {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, + {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, + {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, + {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, +] +markers = {main = "python_version < \"3.11\"", dev = "python_version < \"3.11\"", type-checking = "python_version < \"3.11\""} + +[[package]] +name = "tomli-w" +version = "1.2.0" +description = "A lil' TOML writer" +optional = false +python-versions = ">=3.9" +groups = ["dev", "type-checking"] +files = [ + {file = "tomli_w-1.2.0-py3-none-any.whl", hash = "sha256:188306098d013b691fcadc011abd66727d3c414c571bb01b1a174ba8c983cf90"}, + {file = "tomli_w-1.2.0.tar.gz", hash = "sha256:2dd14fac5a47c27be9cd4c976af5a12d87fb1f0b4512f81d69cce3b35ae25021"}, ] -markers = {main = "python_version < \"3.11\"", dev = "python_full_version <= \"3.11.0a6\"", type-checking = "python_version < \"3.11\""} [[package]] name = "tomlkit" @@ -1790,14 +1971,14 @@ files = [ [[package]] name = "types-requests" -version = "2.32.0.20241016" +version = "2.32.0.20250306" description = "Typing stubs for requests" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["type-checking"] files = [ - {file = "types-requests-2.32.0.20241016.tar.gz", hash = "sha256:0d9cad2f27515d0e3e3da7134a1b6f28fb97129d86b867f24d9c726452634d95"}, - {file = "types_requests-2.32.0.20241016-py3-none-any.whl", hash = "sha256:4195d62d6d3e043a4eaaf08ff8a62184584d2e8684e9d2aa178c7915a7da3747"}, + {file = "types_requests-2.32.0.20250306-py3-none-any.whl", hash = "sha256:25f2cbb5c8710b2022f8bbee7b2b66f319ef14aeea2f35d80f18c9dbf3b60a0b"}, + {file = "types_requests-2.32.0.20250306.tar.gz", hash = "sha256:0962352694ec5b2f95fda877ee60a159abdf84a0fc6fdace599f20acb41a03d1"}, ] [package.dependencies] @@ -1815,34 +1996,46 @@ files = [ {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] +[[package]] +name = "tzdata" +version = "2025.1" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +groups = ["main"] +files = [ + {file = "tzdata-2025.1-py2.py3-none-any.whl", hash = "sha256:7e127113816800496f027041c570f50bcd464a020098a3b6b199517772303639"}, + {file = "tzdata-2025.1.tar.gz", hash = "sha256:24894909e88cdb28bd1636c6887801df64cb485bd593f2fd83ef29075a81d694"}, +] + [[package]] name = "urllib3" -version = "2.2.3" +version = "2.3.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main", "type-checking"] files = [ - {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, - {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, + {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, + {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] [[package]] name = "uvicorn" -version = "0.32.1" +version = "0.34.0" description = "The lightning-fast ASGI server." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "uvicorn-0.32.1-py3-none-any.whl", hash = "sha256:82ad92fd58da0d12af7482ecdb5f2470a04c9c9a53ced65b9bbb4a205377602e"}, - {file = "uvicorn-0.32.1.tar.gz", hash = "sha256:ee9519c246a72b1c084cea8d3b44ed6026e78a4a309cbedae9c37e4cb9fbb175"}, + {file = "uvicorn-0.34.0-py3-none-any.whl", hash = "sha256:023dc038422502fa28a09c7a30bf2b6991512da7dcdb8fd35fe57cfc154126f4"}, + {file = "uvicorn-0.34.0.tar.gz", hash = "sha256:404051050cd7e905de2c9a7e61790943440b3416f49cb409f965d9dcd0fa73e9"}, ] [package.dependencies] @@ -1851,7 +2044,7 @@ h11 = ">=0.8" typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} [package.extras] -standard = ["colorama (>=0.4)", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] +standard = ["colorama (>=0.4) ; sys_platform == \"win32\"", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1) ; sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"", "watchfiles (>=0.13)", "websockets (>=10.4)"] [[package]] name = "werkzeug" @@ -1874,4 +2067,4 @@ watchdog = ["watchdog (>=2.3)"] [metadata] lock-version = "2.1" python-versions = ">=3.10, <4" -content-hash = "07b84eae8d5a6441f603a59e17970264a4351213cd2810a63100a61c30fa0ebd" +content-hash = "0c8743f6a32b246adb4dc963ac43b3e12ec34089ece6b1f384b731ef25d33298" diff --git a/pyproject.toml b/pyproject.toml index 129aeec..b58bef3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -42,12 +42,14 @@ dev = [ "uvicorn >= 0.27.0", "fastapi >= 0.109.0", "coverage[toml] >= 7.2.0", + "robotcode-runner >= 1.0.3", ] type-checking = [ "mypy >= 1.14.1", "types-requests >= 2.31.0", "types-invoke >= 2.0.0.0", "pyright >= 1.1.350", + "robotcode-analyze >= 1.0.3", ] lint-and-format = [ "ruff >= 0.9.0", @@ -74,6 +76,7 @@ robotframework-stacktrace = ">=0.4.0" uvicorn = ">=0.27.0" fastapi = ">=0.109.0" coverage = {version = ">=7.2.0", extras = ["toml"]} +robotcode-runner = ">=1.0.3" [tool.poetry.group.lint-and-format.dependencies] ruff = ">=0.9.0" @@ -86,6 +89,7 @@ mypy = ">=1.14.1" pyright = ">=1.1.350" types-requests = ">=2.31.0" types-invoke = ">=2.0.0.0" +robotcode-analyze = ">=1.0.3" [build-system] requires = ["poetry-core>=1.0.0"] @@ -146,13 +150,26 @@ known-third-party = [] "__init__.py" = ["F401", "PLC0414"] [tool.pylint.'MESSAGES CONTROL'] -disable = ["logging-fstring-interpolation", "missing-class-docstring"] +disable = [ + "logging-fstring-interpolation", + "missing-class-docstring", + "missing-function-docstring", + "too-few-public-methods", + "too-many-arguments", + "too-many-branches", + "too-many-instance-attributes", + "too-many-locals", + "too-many-positional-arguments", + "too-many-return-statements", + "too-many-statements", +] [tool.pylint.'FORMAT CHECKER'] -max-line-length=120 +max-line-length=140 [tool.pylint.'SIMILARITIES CHECKER'] ignore-imports="yes" +min-similarity-lines=10 [tool.robotidy] line_length = 120 @@ -162,11 +179,12 @@ spacecount = 4 filetypes = [".robot", ".resource"] configure = [ "line-too-long:line_length:120", - "too-many-calls-in-test-case:max_calls:15" + "too-long-test-case:max_len:30", + "too-many-calls-in-test-case:max_calls:25", ] exclude = [ "missing-doc-suite", "missing-doc-test-case", "missing-doc-keyword", - "too-few-calls-in-test-case" + "too-few-calls-in-test-case", ] \ No newline at end of file diff --git a/src/OpenApiDriver/__init__.py b/src/OpenApiDriver/__init__.py index 2efd381..1e9212a 100644 --- a/src/OpenApiDriver/__init__.py +++ b/src/OpenApiDriver/__init__.py @@ -1,3 +1,4 @@ +# pylint: disable=invalid-name """ The OpenApiDriver package is intended to be used as a Robot Framework library. The following classes and constants are exposed to be used by the library user: @@ -21,21 +22,23 @@ ResourceRelation, UniquePropertyValueConstraint, ) +from OpenApiLibCore.validation import ValidationLevel from OpenApiLibCore.value_utils import IGNORE try: __version__ = version("robotframework-openapidriver") -except Exception: # pragma: no cover +except Exception: # pragma: no cover pylint: disable=broad-exception-caught pass __all__ = [ + "IGNORE", "Dto", "IdDependency", "IdReference", + "OpenApiDriver", "PathPropertiesConstraint", "PropertyValueConstraint", "ResourceRelation", "UniquePropertyValueConstraint", - "IGNORE", - "OpenApiDriver", + "ValidationLevel", ] diff --git a/src/OpenApiDriver/openapi_executors.py b/src/OpenApiDriver/openapi_executors.py index 549a9d3..8d54290 100644 --- a/src/OpenApiDriver/openapi_executors.py +++ b/src/OpenApiDriver/openapi_executors.py @@ -1,30 +1,31 @@ """Module containing the classes to perform automatic OpenAPI contract validation.""" -from logging import getLogger +from collections.abc import Mapping, MutableMapping +from http import HTTPStatus from pathlib import Path from random import choice -from typing import Any +from types import MappingProxyType from requests import Response from requests.auth import AuthBase from requests.cookies import RequestsCookieJar as CookieJar -from robot.api import SkipExecution +from robot.api import logger from robot.api.deco import keyword, library +from robot.api.exceptions import SkipExecution from robot.libraries.BuiltIn import BuiltIn from OpenApiLibCore import OpenApiLibCore, RequestData, RequestValues, ValidationLevel +from OpenApiLibCore.annotations import JSON run_keyword = BuiltIn().run_keyword - - -logger = getLogger(__name__) +default_str_mapping: Mapping[str, str] = MappingProxyType({}) @library(scope="SUITE", doc_format="ROBOT") -class OpenApiExecutors(OpenApiLibCore): # pylint: disable=too-many-instance-attributes +class OpenApiExecutors(OpenApiLibCore): """Main class providing the keywords and core logic to perform endpoint validations.""" - def __init__( # pylint: disable=too-many-arguments + def __init__( # noqa: PLR0913, pylint: disable=dangerous-default-value self, source: str, origin: str = "", @@ -34,19 +35,19 @@ def __init__( # pylint: disable=too-many-arguments mappings_path: str | Path = "", invalid_property_default_response: int = 422, default_id_property_name: str = "id", - faker_locale: str | list[str] | None = None, # FIXME: default empty string? + faker_locale: str | list[str] = "", require_body_for_invalid_url: bool = False, recursion_limit: int = 1, - recursion_default: Any = {}, + recursion_default: JSON = {}, username: str = "", password: str = "", security_token: str = "", auth: AuthBase | None = None, - cert: str | tuple[str, str] | None = None, # FIXME: default empty string? + cert: str | tuple[str, str] = "", verify_tls: bool | str = True, - extra_headers: dict[str, str] | None = None, # FIXME: default empty dict? - cookies: dict[str, str] | CookieJar | None = None, # FIXME: default empty dict? - proxies: dict[str, str] | None = None, # FIXME: default empty dict? + extra_headers: Mapping[str, str] = default_str_mapping, + cookies: MutableMapping[str, str] | CookieJar | None = None, + proxies: MutableMapping[str, str] | None = None, ) -> None: super().__init__( source=source, @@ -84,13 +85,13 @@ def test_unauthorized(self, path: str, method: str) -> None: > Note: No headers or (json) body are send with the request. For security reasons, the authorization validation should be checked first. """ - url: str = run_keyword("get_valid_url", path, method) + url: str = run_keyword("get_valid_url", path) response = self.session.request( method=method, url=url, verify=False, ) - if response.status_code != 401: + if response.status_code != int(HTTPStatus.UNAUTHORIZED): raise AssertionError(f"Response {response.status_code} was not 401.") @keyword @@ -105,9 +106,9 @@ def test_forbidden(self, path: str, method: str) -> None: > Note: No headers or (json) body are send with the request. For security reasons, the access rights validation should be checked first. """ - url: str = run_keyword("get_valid_url", path, method) + url: str = run_keyword("get_valid_url", path) response: Response = run_keyword("authorized_request", url, method) - if response.status_code != 403: + if response.status_code != int(HTTPStatus.FORBIDDEN): raise AssertionError(f"Response {response.status_code} was not 403.") @keyword @@ -130,11 +131,11 @@ def test_invalid_url( parameters are send with the request. The `require_body_for_invalid_url` parameter can be set to `True` if needed. """ - valid_url: str = run_keyword("get_valid_url", path, method) + valid_url: str = run_keyword("get_valid_url", path) if not ( url := run_keyword( - "get_invalidated_url", valid_url, path, method, expected_status_code + "get_invalidated_url", valid_url, path, expected_status_code ) ): raise SkipExecution( @@ -144,7 +145,7 @@ def test_invalid_url( params, headers, json_data = None, None, None if self.require_body_for_invalid_url: - request_data = self.get_request_data(method=method, endpoint=path) + request_data: RequestData = run_keyword("get_request_data", path, method) params = request_data.params headers = request_data.headers dto = request_data.dto @@ -169,11 +170,11 @@ def test_endpoint(self, path: str, method: str, status_code: int) -> None: The keyword calls other keywords to generate the neccesary data to perform the desired operation and validate the response against the openapi document. """ - json_data: dict[str, Any] | None = None + json_data: dict[str, JSON] = {} original_data = {} - url: str = run_keyword("get_valid_url", path, method) - request_data: RequestData = self.get_request_data(method=method, endpoint=path) + url: str = run_keyword("get_valid_url", path) + request_data: RequestData = run_keyword("get_request_data", path, method) params = request_data.params headers = request_data.headers if request_data.has_body: @@ -182,7 +183,7 @@ def test_endpoint(self, path: str, method: str, status_code: int) -> None: if method == "PATCH": original_data = self.get_original_data(url=url) # in case of a status code indicating an error, ensure the error occurs - if status_code >= 400: + if status_code >= int(HTTPStatus.BAD_REQUEST): invalidation_keyword_data = { "get_invalid_json_data": [ "get_invalid_json_data", @@ -253,18 +254,18 @@ def test_endpoint(self, path: str, method: str, status_code: int) -> None: ), original_data, ) - if status_code < 300 and ( + if status_code < int(HTTPStatus.MULTIPLE_CHOICES) and ( request_data.has_optional_properties or request_data.has_optional_params or request_data.has_optional_headers ): logger.info("Performing request without optional properties and parameters") - url = run_keyword("get_valid_url", path, method) - request_data = self.get_request_data(method=method, endpoint=path) + url = run_keyword("get_valid_url", path) + request_data = run_keyword("get_request_data", path, method) params = request_data.get_required_params() headers = request_data.get_required_headers() json_data = ( - request_data.get_minimal_body_dict() if request_data.has_body else None + request_data.get_minimal_body_dict() if request_data.has_body else {} ) original_data = {} if method == "PATCH": @@ -283,15 +284,15 @@ def test_endpoint(self, path: str, method: str, status_code: int) -> None: original_data, ) - def get_original_data(self, url: str) -> dict[str, Any]: + def get_original_data(self, url: str) -> dict[str, JSON]: """ Attempt to GET the current data for the given url and return it. If the GET request fails, an empty dict is returned. """ original_data = {} - path = self.get_parameterized_endpoint_from_url(url) - get_request_data = self.get_request_data(endpoint=path, method="GET") + path = self.get_parameterized_path_from_url(url) + get_request_data: RequestData = run_keyword("get_request_data", path, "GET") get_params = get_request_data.params get_headers = get_request_data.headers response: Response = run_keyword( diff --git a/src/OpenApiDriver/openapi_reader.py b/src/OpenApiDriver/openapi_reader.py index e68fea7..0b21753 100644 --- a/src/OpenApiDriver/openapi_reader.py +++ b/src/OpenApiDriver/openapi_reader.py @@ -6,7 +6,6 @@ from DataDriver.ReaderConfig import TestCaseData -# pylint: disable=too-few-public-methods class Test: """ Helper class to support ignoring endpoint responses when generating the test cases. diff --git a/src/OpenApiDriver/openapidriver.libspec b/src/OpenApiDriver/openapidriver.libspec index be0e832..c46caa0 100644 --- a/src/OpenApiDriver/openapidriver.libspec +++ b/src/OpenApiDriver/openapidriver.libspec @@ -1,12 +1,12 @@ - -0.4.0 + +1.0.0 <p>Visit the <a href="https://github.com/MarketSquare/robotframework-openapidriver">library page</a> for an introduction and examples.</p> - - + + source @@ -21,49 +21,37 @@ - + included_paths - - - -None +frozenset() - + ignored_paths - - - -None +frozenset() - + ignored_responses - - - -None +frozenset() - + ignored_testcases - - + - - -None +frozenset() response_validation @@ -93,16 +81,15 @@ id - + faker_locale - + - -None + require_body_for_invalid_url @@ -114,9 +101,23 @@ 1 - + recursion_default - + + + + + + + + + + + + + + + {} @@ -142,42 +143,37 @@ None - + cert - + - -None + - + verify_tls - True - + extra_headers - - + - - -None +{} - + cookies - + @@ -186,10 +182,10 @@ None - + proxies - + @@ -213,7 +209,7 @@ <h4>ignored_responses</h4> <p>A list of responses that will be ignored when generating the test cases.</p> <h4>ignored_testcases</h4> -<p>A list of specific test cases that, if it would be generated, will be ignored. Specific test cases to ignore must be specified as a <code>Tuple</code> or <code>List</code> of <code>path</code>, <code>method</code> and <code>response</code>.</p> +<p>A list of specific test cases that, if it would be generated, will be ignored. Specific test cases to ignore must be specified as a <code>tuple</code> or <code>list</code> of <code>path</code>, <code>method</code> and <code>response</code>.</p> <h4>response_validation</h4> <p>By default, a <code>WARN</code> is logged when the Response received after a Request does not comply with the schema as defined in the openapi document for the given operation. The following values are supported:</p> <ul> @@ -265,7 +261,7 @@ - + path @@ -285,7 +281,7 @@ <p>The keyword calls other keywords to generate the neccesary data to perform the desired operation and validate the response against the openapi document.</p> Validate that performing the `method` operation on `path` results in a `status_code` response. - + path @@ -301,7 +297,7 @@ <p>For this keyword to pass, the authorization parameters used to initialize the library should grant insufficient access rights to the target endpoint. &gt; Note: No headers or (json) body are send with the request. For security reasons, the access rights validation should be checked first.</p> Perform a request for `method` on the `path`, with the provided authorization. - + path @@ -323,7 +319,7 @@ <p>&gt; Note: Depending on API design, the url may be validated before or after validation of headers, query parameters and / or (json) body. By default, no parameters are send with the request. The <span class="name">require_body_for_invalid_url</span> parameter can be set to <span class="name">True</span> if needed.</p> Perform a request for the provided 'path' and 'method' where the url for the `path` is invalidated. - + path @@ -341,15 +337,6 @@ - -<p>Any value is accepted. No conversion is done.</p> - -Any - - -__init__ - - <p>Strings <code>TRUE</code>, <code>YES</code>, <code>ON</code> and <code>1</code> are converted to Boolean <code>True</code>, the empty string as well as strings <code>FALSE</code>, <code>NO</code>, <code>OFF</code> and <code>0</code> are converted to Boolean <code>False</code>, and the string <code>NONE</code> is converted to the Python <code>None</code> object. Other strings and other accepted values are passed as-is, allowing keywords to handle them specially if needed. All string comparisons are case-insensitive.</p> <p>Examples: <code>TRUE</code> (converted to <code>True</code>), <code>off</code> (converted to <code>False</code>), <code>example</code> (used as-is)</p> @@ -363,6 +350,17 @@ __init__ + +<p>Strings are converted to bytes so that each Unicode code point below 256 is directly mapped to a matching byte. Higher code points are not allowed. Robot Framework's <code>\xHH</code> escape syntax is convenient with bytes having non-printable values.</p> +<p>Examples: <code>good</code>, <code>hyvรค</code> (same as <code>hyv\xE4</code>), <code>\x00</code> (the null byte)</p> + +string +bytearray + + +__init__ + + <p>Strings must be Python <a href="https://docs.python.org/library/stdtypes.html#dict">dictionary</a> literals. They are converted to actual dictionaries using the <a href="https://docs.python.org/library/ast.html#ast.literal_eval">ast.literal_eval</a> function. They can contain any values <code>ast.literal_eval</code> supports, including dictionaries and other containers.</p> <p>If the type has nested types like <code>dict[str, int]</code>, items are converted to those types automatically. This in new in Robot Framework 6.0.</p> @@ -375,6 +373,18 @@ __init__ + +<p>Conversion is done using Python's <a href="https://docs.python.org/library/functions.html#float">float</a> built-in function.</p> +<p>Starting from RF 4.1, spaces and underscores can be used as visual separators for digit grouping purposes.</p> +<p>Examples: <code>3.14</code>, <code>2.9979e8</code>, <code>10 000.000 01</code></p> + +string +Real + + +__init__ + + <p>Conversion is done using Python's <a href="https://docs.python.org/library/functions.html#int">int</a> built-in function. Floating point numbers are accepted only if they can be represented as integers exactly. For example, <code>1.0</code> is accepted and <code>1.1</code> is not.</p> <p>Starting from RF 4.1, it is possible to use hexadecimal, octal and binary numbers by prefixing values with <code>0x</code>, <code>0o</code> and <code>0b</code>, respectively.</p> diff --git a/src/OpenApiDriver/openapidriver.py b/src/OpenApiDriver/openapidriver.py index 4ca1356..c202085 100644 --- a/src/OpenApiDriver/openapidriver.py +++ b/src/OpenApiDriver/openapidriver.py @@ -122,16 +122,22 @@ """ +from collections.abc import Mapping, MutableMapping from pathlib import Path -from typing import Any, Iterable +from types import MappingProxyType +from typing import Iterable -from DataDriver import DataDriver +from DataDriver.DataDriver import DataDriver from requests.auth import AuthBase from requests.cookies import RequestsCookieJar as CookieJar from robot.api.deco import library -from OpenApiDriver.openapi_executors import OpenApiExecutors, ValidationLevel +from OpenApiDriver.openapi_executors import OpenApiExecutors from OpenApiDriver.openapi_reader import OpenApiReader +from OpenApiLibCore import ValidationLevel +from OpenApiLibCore.annotations import JSON + +default_str_mapping: Mapping[str, str] = MappingProxyType({}) @library(scope="SUITE", doc_format="ROBOT") @@ -141,35 +147,33 @@ class OpenApiDriver(OpenApiExecutors, DataDriver): for an introduction and examples. """ - def __init__( # pylint: disable=too-many-arguments, too-many-locals, dangerous-default-value + def __init__( # noqa: PLR0913, pylint: disable=dangerous-default-value self, source: str, origin: str = "", base_path: str = "", - included_paths: Iterable[str] | None = None, # FIXME: default set? - ignored_paths: Iterable[str] | None = None, # FIXME: default set? - ignored_responses: Iterable[int] | None = None, # FIXME: default set? - ignored_testcases: ( - Iterable[tuple[str, str, int]] | None - ) = None, # FIXME: default set? + included_paths: Iterable[str] = frozenset(), + ignored_paths: Iterable[str] = frozenset(), + ignored_responses: Iterable[int] = frozenset(), + ignored_testcases: Iterable[tuple[str, str, int]] = frozenset(), response_validation: ValidationLevel = ValidationLevel.WARN, disable_server_validation: bool = True, mappings_path: str | Path = "", invalid_property_default_response: int = 422, default_id_property_name: str = "id", - faker_locale: str | list[str] | None = None, # FIXME: default empty string? + faker_locale: str | list[str] = "", require_body_for_invalid_url: bool = False, recursion_limit: int = 1, - recursion_default: Any = {}, + recursion_default: JSON = {}, username: str = "", password: str = "", security_token: str = "", auth: AuthBase | None = None, - cert: str | tuple[str, str] | None = None, # FIXME: default empty string? + cert: str | tuple[str, str] = "", verify_tls: bool | str = True, - extra_headers: dict[str, str] | None = None, # FIXME: default empty dict? - cookies: dict[str, str] | CookieJar | None = None, # FIXME: default empty dict? - proxies: dict[str, str] | None = None, # FIXME: default empty dict? + extra_headers: Mapping[str, str] = default_str_mapping, + cookies: MutableMapping[str, str] | CookieJar | None = None, + proxies: MutableMapping[str, str] | None = None, ): """ == Base parameters == diff --git a/src/OpenApiLibCore/__init__.py b/src/OpenApiLibCore/__init__.py index 94bf7f6..6e188ec 100644 --- a/src/OpenApiLibCore/__init__.py +++ b/src/OpenApiLibCore/__init__.py @@ -1,3 +1,4 @@ +# pylint: disable=invalid-name """ The OpenApiLibCore package is intended to be used as a dependency for other Robot Framework libraries that facilitate the testing of OpenAPI / Swagger APIs. @@ -25,30 +26,30 @@ from OpenApiLibCore.dto_utils import DefaultDto from OpenApiLibCore.openapi_libcore import ( OpenApiLibCore, - RequestData, - RequestValues, - ValidationLevel, ) +from OpenApiLibCore.request_data import RequestData, RequestValues +from OpenApiLibCore.validation import ValidationLevel from OpenApiLibCore.value_utils import IGNORE try: __version__ = version("robotframework-openapi-libcore") -except Exception: # pragma: no cover +except Exception: # pragma: no cover pylint: disable=broad-exception-caught pass + __all__ = [ + "IGNORE", + "DefaultDto", "Dto", "IdDependency", "IdReference", + "OpenApiLibCore", "PathPropertiesConstraint", "PropertyValueConstraint", - "ResourceRelation", - "UniquePropertyValueConstraint", - "DefaultDto", - "OpenApiLibCore", "RequestData", "RequestValues", + "ResourceRelation", + "UniquePropertyValueConstraint", "ValidationLevel", "resolve_schema", - "IGNORE", ] diff --git a/src/OpenApiLibCore/annotations.py b/src/OpenApiLibCore/annotations.py new file mode 100644 index 0000000..170c17f --- /dev/null +++ b/src/OpenApiLibCore/annotations.py @@ -0,0 +1,3 @@ +"""Module holding reusable compound annotations.""" + +JSON = dict[str, "JSON"] | list["JSON"] | str | bytes | int | float | bool | None diff --git a/src/OpenApiLibCore/data_generation/__init__.py b/src/OpenApiLibCore/data_generation/__init__.py new file mode 100644 index 0000000..ea66e00 --- /dev/null +++ b/src/OpenApiLibCore/data_generation/__init__.py @@ -0,0 +1,12 @@ +""" +Module holding the functions related to data generation +for the requests made as part of keyword exection. +""" + +from .body_data_generation import get_json_data_for_dto_class +from .data_generation_core import get_request_data + +__all__ = [ + "get_json_data_for_dto_class", + "get_request_data", +] diff --git a/src/OpenApiLibCore/data_generation/body_data_generation.py b/src/OpenApiLibCore/data_generation/body_data_generation.py new file mode 100644 index 0000000..74ed586 --- /dev/null +++ b/src/OpenApiLibCore/data_generation/body_data_generation.py @@ -0,0 +1,266 @@ +""" +Module holding the functions related to (json) data generation +for the body of requests made as part of keyword exection. +""" + +from random import choice, randint, sample +from typing import Any + +from robot.api import logger + +import OpenApiLibCore.path_functions as pf +from OpenApiLibCore.annotations import JSON +from OpenApiLibCore.dto_base import ( + Dto, + IdDependency, + PropertyValueConstraint, +) +from OpenApiLibCore.dto_utils import DefaultDto +from OpenApiLibCore.protocols import GetIdPropertyNameType +from OpenApiLibCore.value_utils import IGNORE, get_valid_value + + +def get_json_data_for_dto_class( + schema: dict[str, Any], + dto_class: type[Dto], + get_id_property_name: GetIdPropertyNameType, + operation_id: str = "", +) -> JSON: + match schema.get("type"): + case "object": + return get_dict_data_for_dto_class( + schema=schema, + dto_class=dto_class, + get_id_property_name=get_id_property_name, + operation_id=operation_id, + ) + case "array": + return get_list_data_for_dto_class( + schema=schema, + dto_class=dto_class, + get_id_property_name=get_id_property_name, + operation_id=operation_id, + ) + case _: + return get_valid_value(value_schema=schema) + + +def get_dict_data_for_dto_class( + schema: dict[str, Any], + dto_class: type[Dto], + get_id_property_name: GetIdPropertyNameType, + operation_id: str = "", +) -> dict[str, Any]: + json_data: dict[str, Any] = {} + + property_names = get_property_names_to_process(schema=schema, dto_class=dto_class) + + for property_name in property_names: + property_schema = schema["properties"][property_name] + if property_schema.get("readOnly", False): + continue + + json_data[property_name] = get_data_for_property( + property_name=property_name, + property_schema=property_schema, + get_id_property_name=get_id_property_name, + dto_class=dto_class, + operation_id=operation_id, + ) + + return json_data + + +def get_list_data_for_dto_class( + schema: dict[str, Any], + dto_class: type[Dto], + get_id_property_name: GetIdPropertyNameType, + operation_id: str = "", +) -> list[Any]: + json_data: list[Any] = [] + list_item_schema = schema.get("items", {}) + min_items = schema.get("minItems", 0) + max_items = schema.get("maxItems", 1) + number_of_items_to_generate = randint(min_items, max_items) + for _ in range(number_of_items_to_generate): + list_item_data = get_json_data_for_dto_class( + schema=list_item_schema, + dto_class=dto_class, + get_id_property_name=get_id_property_name, + operation_id=operation_id, + ) + json_data.append(list_item_data) + return json_data + + +def get_data_for_property( + property_name: str, + property_schema: dict[str, Any], + get_id_property_name: GetIdPropertyNameType, + dto_class: type[Dto], + operation_id: str, +) -> JSON: + property_type = property_schema.get("type") + if property_type is None: + property_types = property_schema.get("types") + if property_types is None: + if property_schema.get("properties") is None: + raise NotImplementedError + + nested_data = get_json_data_for_dto_class( + schema=property_schema, + dto_class=DefaultDto, + get_id_property_name=get_id_property_name, + ) + return nested_data + + selected_type_schema = choice(property_types) + property_type = selected_type_schema["type"] + property_schema = selected_type_schema + + if constrained_values := get_constrained_values( + dto_class=dto_class, property_name=property_name + ): + constrained_value = choice(constrained_values) + # Check if the chosen value is a nested Dto; since a Dto is never + # instantiated, we can use isinstance(..., type) for this. + if isinstance(constrained_value, type): + return get_value_constrained_by_nested_dto( + property_schema=property_schema, + nested_dto_class=constrained_value, + get_id_property_name=get_id_property_name, + operation_id=operation_id, + ) + return constrained_value + + if ( + dependent_id := get_dependent_id( + dto_class=dto_class, + property_name=property_name, + operation_id=operation_id, + get_id_property_name=get_id_property_name, + ) + ) is not None: + return dependent_id + + if property_type == "object": + object_data = get_json_data_for_dto_class( + schema=property_schema, + dto_class=DefaultDto, + get_id_property_name=get_id_property_name, + operation_id="", + ) + return object_data + + if property_type == "array": + array_data = get_json_data_for_dto_class( + schema=property_schema["items"], + dto_class=DefaultDto, + get_id_property_name=get_id_property_name, + operation_id=operation_id, + ) + return [array_data] + + return get_valid_value(property_schema) + + +def get_value_constrained_by_nested_dto( + property_schema: dict[str, Any], + nested_dto_class: type[Dto], + get_id_property_name: GetIdPropertyNameType, + operation_id: str, +) -> JSON: + nested_schema = get_schema_for_nested_dto(property_schema=property_schema) + nested_value = get_json_data_for_dto_class( + schema=nested_schema, + dto_class=nested_dto_class, + get_id_property_name=get_id_property_name, + operation_id=operation_id, + ) + return nested_value + + +def get_schema_for_nested_dto(property_schema: dict[str, Any]) -> dict[str, Any]: + if property_schema.get("type"): + return property_schema + + if possible_types := property_schema.get("types"): + return choice(possible_types) + + raise NotImplementedError + + +def get_property_names_to_process( + schema: dict[str, Any], + dto_class: type[Dto], +) -> list[str]: + property_names = [] + + for property_name in schema.get("properties", []): + if constrained_values := get_constrained_values( + dto_class=dto_class, property_name=property_name + ): + # do not add properties that are configured to be ignored + if IGNORE in constrained_values: # type: ignore[comparison-overlap] + continue + property_names.append(property_name) + + max_properties = schema.get("maxProperties") + if max_properties and len(property_names) > max_properties: + required_properties = schema.get("required", []) + number_of_optional_properties = max_properties - len(required_properties) + optional_properties = [ + name for name in property_names if name not in required_properties + ] + selected_optional_properties = sample( + optional_properties, number_of_optional_properties + ) + property_names = required_properties + selected_optional_properties + + return property_names + + +def get_constrained_values( + dto_class: type[Dto], property_name: str +) -> list[JSON | type[Dto]]: + relations = dto_class.get_relations() + values_list = [ + c.values + for c in relations + if (isinstance(c, PropertyValueConstraint) and c.property_name == property_name) + ] + # values should be empty or contain 1 list of allowed values + return values_list.pop() if values_list else [] + + +def get_dependent_id( + dto_class: type[Dto], + property_name: str, + operation_id: str, + get_id_property_name: GetIdPropertyNameType, +) -> str | int | float | None: + relations = dto_class.get_relations() + # multiple get paths are possible based on the operation being performed + id_get_paths = [ + (d.get_path, d.operation_id) + for d in relations + if (isinstance(d, IdDependency) and d.property_name == property_name) + ] + if not id_get_paths: + return None + if len(id_get_paths) == 1: + id_get_path, _ = id_get_paths.pop() + else: + try: + [id_get_path] = [ + path for path, operation in id_get_paths if operation == operation_id + ] + # There could be multiple get_paths, but not one for the current operation + except ValueError: + return None + + valid_id = pf.get_valid_id_for_path( + path=id_get_path, get_id_property_name=get_id_property_name + ) + logger.debug(f"get_dependent_id for {id_get_path} returned {valid_id}") + return valid_id diff --git a/src/OpenApiLibCore/data_generation/data_generation_core.py b/src/OpenApiLibCore/data_generation/data_generation_core.py new file mode 100644 index 0000000..ba8d9b9 --- /dev/null +++ b/src/OpenApiLibCore/data_generation/data_generation_core.py @@ -0,0 +1,237 @@ +""" +Module holding the main functions related to data generation +for the requests made as part of keyword exection. +""" + +import re +from dataclasses import Field, field, make_dataclass +from random import choice +from typing import Any, cast + +from robot.api import logger + +import OpenApiLibCore.path_functions as pf +from OpenApiLibCore.annotations import JSON +from OpenApiLibCore.dto_base import ( + Dto, + PropertyValueConstraint, + ResourceRelation, + resolve_schema, +) +from OpenApiLibCore.dto_utils import DefaultDto +from OpenApiLibCore.protocols import GetDtoClassType, GetIdPropertyNameType +from OpenApiLibCore.request_data import RequestData +from OpenApiLibCore.value_utils import IGNORE, get_valid_value + +from .body_data_generation import ( + get_json_data_for_dto_class as _get_json_data_for_dto_class, +) + + +def get_request_data( + path: str, + method: str, + get_dto_class: GetDtoClassType, + get_id_property_name: GetIdPropertyNameType, + openapi_spec: dict[str, Any], +) -> RequestData: + method = method.lower() + dto_cls_name = get_dto_cls_name(path=path, method=method) + # The path can contain already resolved Ids that have to be matched + # against the parametrized paths in the paths section. + spec_path = pf.get_parametrized_path(path=path, openapi_spec=openapi_spec) + dto_class = get_dto_class(path=spec_path, method=method) + try: + method_spec = openapi_spec["paths"][spec_path][method] + except KeyError: + logger.info( + f"method '{method}' not supported on '{spec_path}, using empty spec." + ) + method_spec = {} + + parameters, params, headers = get_request_parameters( + dto_class=dto_class, method_spec=method_spec + ) + if (body_spec := method_spec.get("requestBody", None)) is None: + dto_instance = _get_dto_instance_for_empty_body( + dto_class=dto_class, + dto_cls_name=dto_cls_name, + method_spec=method_spec, + ) + return RequestData( + dto=dto_instance, + parameters=parameters, + params=params, + headers=headers, + has_body=False, + ) + + headers.update({"content-type": get_content_type(body_spec)}) + + content_schema = resolve_schema(get_content_schema(body_spec)) + dto_data = _get_json_data_for_dto_class( + schema=content_schema, + dto_class=dto_class, + get_id_property_name=get_id_property_name, + operation_id=method_spec.get("operationId", ""), + ) + dto_instance = _get_dto_instance_from_dto_data( + content_schema=content_schema, + dto_class=dto_class, + dto_data=dto_data, + method_spec=method_spec, + dto_cls_name=dto_cls_name, + ) + return RequestData( + dto=dto_instance, + dto_schema=content_schema, + parameters=parameters, + params=params, + headers=headers, + ) + + +def _get_dto_instance_for_empty_body( + dto_class: type[Dto], + dto_cls_name: str, + method_spec: dict[str, Any], +) -> Dto: + if dto_class == DefaultDto: + dto_instance: Dto = DefaultDto() + else: + dto_class = make_dataclass( + cls_name=method_spec.get("operationId", dto_cls_name), + fields=[], + bases=(dto_class,), + ) + dto_instance = dto_class() + return dto_instance + + +def _get_dto_instance_from_dto_data( + content_schema: dict[str, Any], + dto_class: type[Dto], + dto_data: JSON, + method_spec: dict[str, Any], + dto_cls_name: str, +) -> Dto: + if not isinstance(dto_data, (dict, list)): + return DefaultDto() + + if isinstance(dto_data, list): + raise NotImplementedError + + fields = get_fields_from_dto_data(content_schema, dto_data) + dto_class_ = make_dataclass( + cls_name=method_spec.get("operationId", dto_cls_name), + fields=fields, + bases=(dto_class,), + ) + dto_data = {get_safe_key(key): value for key, value in dto_data.items()} + return cast(Dto, dto_class_(**dto_data)) + + +def get_fields_from_dto_data( + content_schema: dict[str, Any], dto_data: dict[str, JSON] +) -> list[tuple[str, type[Any], Field[Any]]]: + """Get a dataclasses fields list based on the content_schema and dto_data.""" + fields: list[tuple[str, type[Any], Field[Any]]] = [] + for key, value in dto_data.items(): + required_properties = content_schema.get("required", []) + safe_key = get_safe_key(key) + metadata = {"original_property_name": key} + if key in required_properties: + # The fields list is used to create a dataclass, so non-default fields + # must go before fields with a default + field_ = cast(Field[Any], field(metadata=metadata)) # pylint: disable=invalid-field-call + fields.insert(0, (safe_key, type(value), field_)) + else: + field_ = cast(Field[Any], field(default=None, metadata=metadata)) # pylint: disable=invalid-field-call + fields.append((safe_key, type(value), field_)) + return fields + + +def get_safe_key(key: str) -> str: + """ + Helper function to convert a valid JSON property name to a string that can be used + as a Python variable or function / method name. + """ + key = key.replace("-", "_") + key = key.replace("@", "_") + if key[0].isdigit(): + key = f"_{key}" + return key + + +def get_dto_cls_name(path: str, method: str) -> str: + method = method.capitalize() + path = path.translate({ord(i): None for i in "{}"}) + path_parts = path.split("/") + path_parts = [p.capitalize() for p in path_parts] + result = "".join([method, *path_parts]) + return result + + +def get_content_schema(body_spec: dict[str, Any]) -> dict[str, Any]: + """Get the content schema from the requestBody spec.""" + content_type = get_content_type(body_spec) + content_schema = body_spec["content"][content_type]["schema"] + return resolve_schema(content_schema) + + +def get_content_type(body_spec: dict[str, Any]) -> str: + """Get and validate the first supported content type from the requested body spec + + Should be application/json like content type, + e.g "application/json;charset=utf-8" or "application/merge-patch+json" + """ + content_types: list[str] = body_spec["content"].keys() + json_regex = r"application/([a-z\-]+\+)?json(;\s?charset=(.+))?" + for content_type in content_types: + if re.search(json_regex, content_type): + return content_type + + # At present no supported for other types. + raise NotImplementedError( + f"Only content types like 'application/json' are supported. " + f"Content types definded in the spec are '{content_types}'." + ) + + +def get_request_parameters( + dto_class: Dto | type[Dto], method_spec: dict[str, Any] +) -> tuple[list[dict[str, Any]], dict[str, Any], dict[str, str]]: + """Get the methods parameter spec and params and headers with valid data.""" + parameters = method_spec.get("parameters", []) + parameter_relations = dto_class.get_parameter_relations() + query_params = [p for p in parameters if p.get("in") == "query"] + header_params = [p for p in parameters if p.get("in") == "header"] + params = get_parameter_data(query_params, parameter_relations) + headers = get_parameter_data(header_params, parameter_relations) + return parameters, params, headers + + +def get_parameter_data( + parameters: list[dict[str, Any]], + parameter_relations: list[ResourceRelation], +) -> dict[str, str]: + """Generate a valid list of key-value pairs for all parameters.""" + result: dict[str, str] = {} + value: Any = None + for parameter in parameters: + parameter_name = parameter["name"] + parameter_schema = resolve_schema(parameter["schema"]) + relations = [ + r for r in parameter_relations if r.property_name == parameter_name + ] + if constrained_values := [ + r.values for r in relations if isinstance(r, PropertyValueConstraint) + ]: + value = choice(*constrained_values) + if value is IGNORE: + continue + result[parameter_name] = value + continue + value = get_valid_value(parameter_schema) + result[parameter_name] = value + return result diff --git a/src/OpenApiLibCore/data_invalidation.py b/src/OpenApiLibCore/data_invalidation.py new file mode 100644 index 0000000..94fc373 --- /dev/null +++ b/src/OpenApiLibCore/data_invalidation.py @@ -0,0 +1,281 @@ +""" +Module holding the functions related to invalidation of valid data (generated +to make 2xx requests) to support testing for 4xx responses. +""" + +from copy import deepcopy +from random import choice +from typing import Any + +from requests import Response +from robot.api import logger +from robot.libraries.BuiltIn import BuiltIn + +from OpenApiLibCore.dto_base import ( + NOT_SET, + Dto, + IdReference, + PathPropertiesConstraint, + PropertyValueConstraint, + UniquePropertyValueConstraint, + resolve_schema, +) +from OpenApiLibCore.request_data import RequestData +from OpenApiLibCore.value_utils import IGNORE, get_invalid_value, get_valid_value + +run_keyword = BuiltIn().run_keyword + + +def get_invalid_json_data( + url: str, + method: str, + status_code: int, + request_data: RequestData, + invalid_property_default_response: int, +) -> dict[str, Any]: + method = method.lower() + data_relations = request_data.dto.get_relations_for_error_code(status_code) + data_relations = [ + r for r in data_relations if not isinstance(r, PathPropertiesConstraint) + ] + if not data_relations: + if not request_data.dto_schema: + raise ValueError( + "Failed to invalidate: no data_relations and empty schema." + ) + json_data = request_data.dto.get_invalidated_data( + schema=request_data.dto_schema, + status_code=status_code, + invalid_property_default_code=invalid_property_default_response, + ) + return json_data + resource_relation = choice(data_relations) + if isinstance(resource_relation, UniquePropertyValueConstraint): + json_data = run_keyword( + "get_json_data_with_conflict", + url, + method, + request_data.dto, + status_code, + ) + elif isinstance(resource_relation, IdReference): + run_keyword("ensure_in_use", url, resource_relation) + json_data = request_data.dto.as_dict() + else: + json_data = request_data.dto.get_invalidated_data( + schema=request_data.dto_schema, + status_code=status_code, + invalid_property_default_code=invalid_property_default_response, + ) + return json_data + + +def get_invalidated_parameters( + status_code: int, request_data: RequestData, invalid_property_default_response: int +) -> tuple[dict[str, Any], dict[str, str]]: + if not request_data.parameters: + raise ValueError("No params or headers to invalidate.") + + # ensure the status_code can be triggered + relations = request_data.dto.get_parameter_relations_for_error_code(status_code) + relations_for_status_code = [ + r + for r in relations + if isinstance(r, PropertyValueConstraint) + and (status_code in (r.error_code, r.invalid_value_error_code)) + ] + parameters_to_ignore = { + r.property_name + for r in relations_for_status_code + if r.invalid_value_error_code == status_code and r.invalid_value == IGNORE + } + relation_property_names = {r.property_name for r in relations_for_status_code} + if not relation_property_names: + if status_code != invalid_property_default_response: + raise ValueError(f"No relations to cause status_code {status_code} found.") + + # ensure we're not modifying mutable properties + params = deepcopy(request_data.params) + headers = deepcopy(request_data.headers) + + if status_code == invalid_property_default_response: + # take the params and headers that can be invalidated based on data type + # and expand the set with properties that can be invalided by relations + parameter_names = set(request_data.params_that_can_be_invalidated).union( + request_data.headers_that_can_be_invalidated + ) + parameter_names.update(relation_property_names) + if not parameter_names: + raise ValueError( + "None of the query parameters and headers can be invalidated." + ) + else: + # non-default status_codes can only be the result of a Relation + parameter_names = relation_property_names + + # Dto mappings may contain generic mappings for properties that are not present + # in this specific schema + request_data_parameter_names = [p.get("name") for p in request_data.parameters] + additional_relation_property_names = { + n for n in relation_property_names if n not in request_data_parameter_names + } + if additional_relation_property_names: + logger.warn( + f"get_parameter_relations_for_error_code yielded properties that are " + f"not defined in the schema: {additional_relation_property_names}\n" + f"These properties will be ignored for parameter invalidation." + ) + parameter_names = parameter_names - additional_relation_property_names + + if not parameter_names: + raise ValueError( + f"No parameter can be changed to cause status_code {status_code}." + ) + + parameter_names = parameter_names - parameters_to_ignore + parameter_to_invalidate = choice(tuple(parameter_names)) + + # check for invalid parameters in the provided request_data + try: + [parameter_data] = [ + data + for data in request_data.parameters + if data["name"] == parameter_to_invalidate + ] + except Exception: + raise ValueError( + f"{parameter_to_invalidate} not found in provided parameters." + ) from None + + # get the invalid_value for the chosen parameter + try: + [invalid_value_for_error_code] = [ + r.invalid_value + for r in relations_for_status_code + if r.property_name == parameter_to_invalidate + and r.invalid_value_error_code == status_code + ] + except ValueError: + invalid_value_for_error_code = NOT_SET + + # get the constraint values if available for the chosen parameter + try: + [values_from_constraint] = [ + r.values + for r in relations_for_status_code + if r.property_name == parameter_to_invalidate + ] + except ValueError: + values_from_constraint = [] + + # if the parameter was not provided, add it to params / headers + params, headers = ensure_parameter_in_parameters( + parameter_to_invalidate=parameter_to_invalidate, + params=params, + headers=headers, + parameter_data=parameter_data, + values_from_constraint=values_from_constraint, + ) + + # determine the invalid_value + if invalid_value_for_error_code != NOT_SET: + invalid_value = invalid_value_for_error_code + else: + if parameter_to_invalidate in params.keys(): + valid_value = params[parameter_to_invalidate] + else: + valid_value = headers[parameter_to_invalidate] + + value_schema = resolve_schema(parameter_data["schema"]) + invalid_value = get_invalid_value( + value_schema=value_schema, + current_value=valid_value, + values_from_constraint=values_from_constraint, + ) + logger.debug(f"{parameter_to_invalidate} changed to {invalid_value}") + + # update the params / headers and return + if parameter_to_invalidate in params.keys(): + params[parameter_to_invalidate] = invalid_value + else: + headers[parameter_to_invalidate] = str(invalid_value) + return params, headers + + +def ensure_parameter_in_parameters( + parameter_to_invalidate: str, + params: dict[str, Any], + headers: dict[str, str], + parameter_data: dict[str, Any], + values_from_constraint: list[Any], +) -> tuple[dict[str, Any], dict[str, str]]: + """ + Returns the params, headers tuple with parameter_to_invalidate with a valid + value to params or headers if not originally present. + """ + if ( + parameter_to_invalidate not in params.keys() + and parameter_to_invalidate not in headers.keys() + ): + if values_from_constraint: + valid_value = choice(values_from_constraint) + else: + parameter_schema = resolve_schema(parameter_data["schema"]) + valid_value = get_valid_value(parameter_schema) + if ( + parameter_data["in"] == "query" + and parameter_to_invalidate not in params.keys() + ): + params[parameter_to_invalidate] = valid_value + if ( + parameter_data["in"] == "header" + and parameter_to_invalidate not in headers.keys() + ): + headers[parameter_to_invalidate] = str(valid_value) + return params, headers + + +def get_json_data_with_conflict( + url: str, base_url: str, method: str, dto: Dto, conflict_status_code: int +) -> dict[str, Any]: + method = method.lower() + json_data = dto.as_dict() + unique_property_value_constraints = [ + r for r in dto.get_relations() if isinstance(r, UniquePropertyValueConstraint) + ] + for relation in unique_property_value_constraints: + json_data[relation.property_name] = relation.value + # create a new resource that the original request will conflict with + if method in ["patch", "put"]: + post_url_parts = url.split("/")[:-1] + post_url = "/".join(post_url_parts) + # the PATCH or PUT may use a different dto than required for POST + # so a valid POST dto must be constructed + path = post_url.replace(base_url, "") + request_data: RequestData = run_keyword("get_request_data", path, "post") + post_json = request_data.dto.as_dict() + for key in post_json.keys(): + if key in json_data: + post_json[key] = json_data.get(key) + else: + post_url = url + post_json = json_data + path = post_url.replace(base_url, "") + request_data = run_keyword("get_request_data", path, "post") + + response: Response = run_keyword( + "authorized_request", + post_url, + "post", + request_data.params, + request_data.headers, + post_json, + ) + # conflicting resource may already exist + assert response.ok or response.status_code == conflict_status_code, ( + f"get_json_data_with_conflict received {response.status_code}: {response.json()}" + ) + return json_data + raise ValueError( + f"No UniquePropertyValueConstraint in the get_relations list on dto {dto}." + ) diff --git a/src/OpenApiLibCore/dto_base.py b/src/OpenApiLibCore/dto_base.py index a617ded..814ed80 100644 --- a/src/OpenApiLibCore/dto_base.py +++ b/src/OpenApiLibCore/dto_base.py @@ -7,14 +7,13 @@ from abc import ABC from copy import deepcopy from dataclasses import dataclass, fields -from logging import getLogger from random import choice, shuffle from typing import Any from uuid import uuid4 -from OpenApiLibCore import value_utils +from robot.api import logger -logger = getLogger(__name__) +from OpenApiLibCore import value_utils NOT_SET = object() SENTINEL = object() @@ -86,7 +85,7 @@ def merge_schemas(first: dict[str, Any], second: dict[str, Any]) -> dict[str, An return merged_schema -class ResourceRelation(ABC): # pylint: disable=too-few-public-methods +class ResourceRelation(ABC): """ABC for all resource relations or restrictions within the API.""" property_name: str @@ -95,7 +94,7 @@ class ResourceRelation(ABC): # pylint: disable=too-few-public-methods @dataclass class PathPropertiesConstraint(ResourceRelation): - """The resolved path for the endpoint.""" + """The value to be used as the ``path`` for related requests.""" path: str property_name: str = "id" @@ -231,12 +230,12 @@ def get_invalidated_data( if isinstance(r, IdDependency) and r.property_name == property_name ] if id_dependencies: - invalid_value = uuid4().hex + invalid_id = uuid4().hex logger.debug( f"Breaking IdDependency for status_code {status_code}: replacing " - f"{properties[property_name]} with {invalid_value}" + f"{properties[property_name]} with {invalid_id}" ) - properties[property_name] = invalid_value + properties[property_name] = invalid_id return properties invalid_value_from_constraint = [ @@ -297,11 +296,11 @@ def get_invalidated_data( ) properties[property_name] = invalid_value logger.debug( - f"Property {property_name} changed to {invalid_value} (received from " + f"Property {property_name} changed to {invalid_value!r} (received from " f"get_invalid_value)" ) return properties - logger.warning("get_invalidated_data returned unchanged properties") + logger.warn("get_invalidated_data returned unchanged properties") return properties # pragma: no cover def as_dict(self) -> dict[Any, Any]: diff --git a/src/OpenApiLibCore/dto_utils.py b/src/OpenApiLibCore/dto_utils.py index 84e5b7d..3f8368e 100644 --- a/src/OpenApiLibCore/dto_utils.py +++ b/src/OpenApiLibCore/dto_utils.py @@ -2,12 +2,12 @@ from dataclasses import dataclass from importlib import import_module -from logging import getLogger -from typing import Callable, Type +from typing import Any, Callable, Type, overload -from OpenApiLibCore.dto_base import Dto +from robot.api import logger -logger = getLogger(__name__) +from OpenApiLibCore.dto_base import Dto +from OpenApiLibCore.protocols import GetDtoClassType, GetIdPropertyNameType @dataclass @@ -23,8 +23,11 @@ class DefaultDto(Dto): """A default Dto that can be instantiated.""" -# pylint: disable=invalid-name, too-few-public-methods -class get_dto_class: +def get_dto_class(mappings_module_name: str) -> GetDtoClassType: + return GetDtoClass(mappings_module_name=mappings_module_name) + + +class GetDtoClass: """Callable class to return Dtos from user-implemented mappings file.""" def __init__(self, mappings_module_name: str) -> None: @@ -38,16 +41,19 @@ def __init__(self, mappings_module_name: str) -> None: logger.error(f"DTO_MAPPING was not imported: {exception}") self.dto_mapping = {} - def __call__(self, endpoint: str, method: str) -> Type[Dto]: + def __call__(self, path: str, method: str) -> Type[Dto]: try: - return self.dto_mapping[(endpoint, method.lower())] + return self.dto_mapping[(path, method.lower())] except KeyError: - logger.debug(f"No Dto mapping for {endpoint} {method}.") + logger.debug(f"No Dto mapping for {path} {method}.") return DefaultDto -# pylint: disable=invalid-name, too-few-public-methods -class get_id_property_name: +def get_id_property_name(mappings_module_name: str) -> GetIdPropertyNameType: + return GetIdPropertyName(mappings_module_name=mappings_module_name) + + +class GetIdPropertyName: """ Callable class to return the name of the property that uniquely identifies the resource from user-implemented mappings file. @@ -58,8 +64,7 @@ def __init__(self, mappings_module_name: str) -> None: mappings_module = import_module(mappings_module_name) self.id_mapping: dict[ str, - str - | tuple[str, tuple[Callable[[str | int | float], str | int | float]]], + str | tuple[str, Callable[[str], str] | Callable[[int], int]], ] = mappings_module.ID_MAPPING except (ImportError, AttributeError, ValueError) as exception: if mappings_module_name != "no mapping": @@ -67,13 +72,26 @@ def __init__(self, mappings_module_name: str) -> None: self.id_mapping = {} def __call__( - self, endpoint: str - ) -> str | tuple[str, tuple[Callable[[str | int | float], str | int | float]]]: + self, path: str + ) -> tuple[str, Callable[[str], str] | Callable[[int], int]]: try: - return self.id_mapping[endpoint] + value_or_mapping = self.id_mapping[path] + if isinstance(value_or_mapping, str): + return (value_or_mapping, dummy_transformer) + return value_or_mapping except KeyError: default_id_name = DEFAULT_ID_PROPERTY_NAME.id_property_name - logger.debug( - f"No id mapping for {endpoint} ('{default_id_name}' will be used)" - ) - return default_id_name + logger.debug(f"No id mapping for {path} ('{default_id_name}' will be used)") + return (default_id_name, dummy_transformer) + + +@overload +def dummy_transformer(valid_id: str) -> str: ... + + +@overload +def dummy_transformer(valid_id: int) -> int: ... + + +def dummy_transformer(valid_id: Any) -> Any: + return valid_id diff --git a/src/OpenApiLibCore/oas_cache.py b/src/OpenApiLibCore/oas_cache.py index 474bd40..a9c4d24 100644 --- a/src/OpenApiLibCore/oas_cache.py +++ b/src/OpenApiLibCore/oas_cache.py @@ -1,17 +1,18 @@ -from typing import Callable +"""Module holding the (global) parser cache.""" + +from dataclasses import dataclass from openapi_core import Spec -from openapi_core.contrib.requests import ( - RequestsOpenAPIRequest, - RequestsOpenAPIResponse, -) from prance import ResolvingParser -PARSER_CACHE: dict[ - str, - tuple[ - ResolvingParser, - Spec, - Callable[[RequestsOpenAPIRequest, RequestsOpenAPIResponse], None], - ], -] = {} +from OpenApiLibCore.protocols import ResponseValidatorType + + +@dataclass +class CachedParser: + parser: ResolvingParser + validation_spec: Spec + response_validator: ResponseValidatorType + + +PARSER_CACHE: dict[str, CachedParser] = {} diff --git a/src/OpenApiLibCore/openapi_libcore.libspec b/src/OpenApiLibCore/openapi_libcore.libspec index 5db5ae2..5601f88 100644 --- a/src/OpenApiLibCore/openapi_libcore.libspec +++ b/src/OpenApiLibCore/openapi_libcore.libspec @@ -1,13 +1,13 @@ - -0.4.0 + +1.0.0 <p>Main class providing the keywords and core logic to interact with an OpenAPI server.</p> <p>Visit the <a href="https://github.com/MarketSquare/robotframework-openapi-libcore">library page</a> for an introduction.</p> - - + + source @@ -50,16 +50,15 @@ id - + faker_locale - + - -None + require_body_for_invalid_url @@ -71,9 +70,23 @@ 1 - + recursion_default - + + + + + + + + + + + + + + + {} @@ -99,42 +112,37 @@ None - + cert - + - -None + - + verify_tls - True - + extra_headers - - + - - -None +{} - + cookies - + @@ -143,10 +151,10 @@ None - + proxies - + @@ -161,7 +169,7 @@ <h4>origin</h4> <p>The server (and port) of the target server. E.g. <code>https://localhost:8000</code></p> <h4>base_path</h4> -<p>The routing between <code>origin</code> and the endpoints as found in the <code>paths</code> section in the openapi document. E.g. <code>/petshop/v2</code>.</p> +<p>The routing between <code>origin</code> and the paths as found in the <code>paths</code> section in the openapi document. E.g. <code>/petshop/v2</code>.</p> <h3>Test case execution</h3> <h4>response_validation</h4> <p>By default, a <code>WARN</code> is logged when the Response received after a Request does not comply with the schema as defined in the openapi document for the given operation. The following values are supported:</p> @@ -214,8 +222,39 @@ - - + + + +href + + + +referenced_resource + + + + + + + + + + + + + + + + + + + + +<p>Attempt to GET the resource referenced by the <span class="name">href</span> and validate it's equal to the provided <span class="name">referenced_resource</span> object / dictionary.</p> +Attempt to GET the resource referenced by the `href` and validate it's equal to the provided `referenced_resource` object / dictionary. + + + url @@ -224,10 +263,10 @@ method - + params - + @@ -235,10 +274,10 @@ None - + headers - + @@ -246,43 +285,18 @@ None - + json_data - - - - - - - - - - - - - - - - - - - - + - + - - - - - - - + @@ -290,14 +304,20 @@ None - + data + + + None - + files + + + None @@ -307,7 +327,7 @@ <p>&gt; Note: provided username / password or auth objects take precedence over token based security</p> Perform a request using the security token or authentication set in the library. - + url @@ -321,20 +341,20 @@ <p>Ensure that the (right-most) <span class="name">id</span> of the resource referenced by the <span class="name">url</span> is used by the resource defined by the <span class="name">resource_relation</span>.</p> Ensure that the (right-most) `id` of the resource referenced by the `url` is used by the resource defined by the `resource_relation`. - + url - + <p>Perform a GET request on the <span class="name">url</span> and return the list of resource <span class="name">ids</span> from the response.</p> Perform a GET request on the `url` and return the list of resource `ids` from the response. - + url @@ -353,15 +373,29 @@ - + - + + + + + + + + + + + + + + + <p>Return <span class="name">json_data</span> based on the <span class="name">dto</span> on the <span class="name">request_data</span> that will cause the provided <span class="name">status_code</span> for the <span class="name">method</span> operation on the <span class="name">url</span>.</p> <p>&gt; Note: applicable UniquePropertyValueConstraint and IdReference Relations are considered before changes to <span class="name">json_data</span> are made.</p> Return `json_data` based on the `dto` on the `request_data` that will cause the provided `status_code` for the `method` operation on the `url`. - + status_code @@ -372,12 +406,26 @@ - - + + - + + + + + + + + + + + + + + + - + @@ -385,8 +433,8 @@ <p>Returns a version of <span class="name">params, headers</span> as present on <span class="name">request_data</span> that has been modified to cause the provided <span class="name">status_code</span>.</p> Returns a version of `params, headers` as present on `request_data` that has been modified to cause the provided `status_code`. - - + + valid_url @@ -396,42 +444,45 @@ - -method - - - expected_status_code 404 - - - - -<p>Return an url with all the path parameters in the <span class="name">valid_url</span> replaced by a random UUID if no PathPropertiesConstraint is mapped for the <a href="#type-Path" class="name">path</a>, <span class="name">method</span> and <span class="name">expected_status_code</span>. If a PathPropertiesConstraint is mapped, the <span class="name">invalid_value</span> is returned.</p> + +<p>Return an url with all the path parameters in the <span class="name">valid_url</span> replaced by a random UUID if no PathPropertiesConstraint is mapped for the <span class="name">"get"</span> operation on the mapped <a href="#type-Path" class="name">path</a> and <span class="name">expected_status_code</span>. If a PathPropertiesConstraint is mapped, the <span class="name">invalid_value</span> is returned.</p> <p>Raises ValueError if the valid_url cannot be invalidated.</p> -Return an url with all the path parameters in the `valid_url` replaced by a random UUID if no PathPropertiesConstraint is mapped for the `path`, `method` and `expected_status_code`. If a PathPropertiesConstraint is mapped, the `invalid_value` is returned. +Return an url with all the path parameters in the `valid_url` replaced by a random UUID if no PathPropertiesConstraint is mapped for the `"get"` operation on the mapped `path` and `expected_status_code`. If a PathPropertiesConstraint is mapped, the `invalid_value` is returned. - - - + + + schema - + - + + + + + + + + + + + + + + + - + dto_class - - - + - operation_id @@ -440,16 +491,24 @@ - + - + + + + + + + + + -<p>Generate a valid (json-compatible) dict for all the <span class="name">dto_class</span> properties.</p> -Generate a valid (json-compatible) dict for all the `dto_class` properties. +<p>Generate valid (json-compatible) data for the <span class="name">dto_class</span>.</p> +Generate valid (json-compatible) data for the `dto_class`. - + url @@ -468,14 +527,28 @@ - + - + + + + + + + + + + + + + + + <p>Return <span class="name">json_data</span> based on the <span class="name">UniquePropertyValueConstraint</span> that must be returned by the <span class="name">get_relations</span> implementation on the <span class="name">dto</span> for the given <span class="name">conflict_status_code</span>.</p> Return `json_data` based on the `UniquePropertyValueConstraint` that must be returned by the `get_relations` implementation on the `dto` for the given `conflict_status_code`. - + url @@ -483,13 +556,13 @@ -<p>Return the endpoint as found in the <span class="name">paths</span> section based on the given <span class="name">url</span>.</p> -Return the endpoint as found in the `paths` section based on the given `url`. +<p>Return the path as found in the <span class="name">paths</span> section based on the given <span class="name">url</span>.</p> +Return the path as found in the `paths` section based on the given `url`. - - - -endpoint + + + +path @@ -501,14 +574,10 @@ <p>Return an object with valid request data for body, headers and query params.</p> Return an object with valid request data for body, headers and query params. - - - -endpoint - - - -method + + + +path @@ -517,29 +586,25 @@ -<p>Support keyword that returns the <span class="name">id</span> for an existing resource at <span class="name">endpoint</span>.</p> -<p>To prevent resource conflicts with other test cases, a new resource is created (POST) if possible.</p> -Support keyword that returns the `id` for an existing resource at `endpoint`. +<p>Support keyword that returns the <span class="name">id</span> for an existing resource at <a href="#type-Path" class="name">path</a>.</p> +<p>To prevent resource conflicts with other test cases, a new resource is created (by a POST operation) if possible.</p> +Support keyword that returns the `id` for an existing resource at `path`. - - - -endpoint - - - -method + + + +path -<p>This keyword returns a valid url for the given <span class="name">endpoint</span> and <span class="name">method</span>.</p> -<p>If the <span class="name">endpoint</span> contains path parameters the Get Valid Id For Endpoint keyword will be executed to retrieve valid ids for the path parameters.</p> -<p>&gt; Note: if valid ids cannot be retrieved within the scope of the API, the <span class="name">PathPropertiesConstraint</span> Relation can be used. More information can be found <a href="https://marketsquare.github.io/robotframework-openapi-libcore/advanced_use.html">here</a>.</p> -This keyword returns a valid url for the given `endpoint` and `method`. +<p>This keyword returns a valid url for the given <a href="#type-Path" class="name">path</a>.</p> +<p>If the <a href="#type-Path" class="name">path</a> contains path parameters the Get Valid Id For Path keyword will be executed to retrieve valid ids for the path parameters.</p> +<p>&gt; Note: if valid ids cannot be retrieved within the scope of the API, the <span class="name">PathPropertiesConstraint</span> Relation can be used. More information can be found <a href="https://marketsquare.github.io/robotframework-openapitools/advanced_use.html">here</a>.</p> +This keyword returns a valid url for the given `path`. - - + + path @@ -552,22 +617,19 @@ request_values - + original_data - - + - + - - -None +{} <p>This keyword first calls the Authorized Request keyword, then the Validate Response keyword and finally validates, for <span class="name">DELETE</span> operations, whether the target resource was indeed deleted (OK response) or not (error responses).</p> This keyword first calls the Authorized Request keyword, then the Validate Response keyword and finally validates, for `DELETE` operations, whether the target resource was indeed deleted (OK response) or not (error responses). - + auth @@ -578,7 +640,7 @@ <p>After calling this keyword, subsequent requests will use the provided <span class="name">auth</span> instance.</p> Set the `auth` used for authentication after the library is imported. - + username @@ -593,11 +655,11 @@ <p>After calling this keyword, subsequent requests will use the provided credentials.</p> Set the `username` and `password` used for basic authentication after the library is imported. - - - + + + extra_headers - + @@ -607,7 +669,7 @@ <p>After calling this keyword, subsequent requests will use the provided <span class="name">extra_headers</span>.</p> Set the `extra_headers` used in requests after the library is imported. - + origin @@ -619,7 +681,7 @@ <p>In combination with OpenApiLibCore, the <span class="name">origin</span> can be used at any point to target another server that hosts an API that complies to the same OAS.</p> Set the `origin` after the library is imported. - + security_token @@ -630,28 +692,56 @@ <p>After calling this keyword, subsequent requests will use the provided token.</p> Set the `security_token` after the library is imported. - - - + + + resource - + - + + + + + + + + + + + + + + + - + schema - + - + + + + + + + + + + + + + + + <p>Validate that the <span class="name">resource</span> does not contain any properties that are not defined in the <span class="name">schema_properties</span>.</p> Validate that the `resource` does not contain any properties that are not defined in the `schema_properties`. - - + + path @@ -660,45 +750,53 @@ response - + original_data - - + - + - - -None +{} <p>Validate the <span class="name">response</span> by performing the following validations:</p> <ul> -<li>validate the <span class="name">response</span> against the openapi schema for the <span class="name">endpoint</span></li> +<li>validate the <span class="name">response</span> against the openapi schema for the <a href="#type-Path" class="name">path</a></li> <li>validate that the response does not contain extra properties</li> <li>validate that a href, if present, refers to the correct resource</li> <li>validate that the value for a property that is in the response is equal to the property value that was send</li> <li>validate that no <span class="name">original_data</span> is preserved when performing a PUT operation</li> <li>validate that a PATCH operation only updates the provided properties</li> </ul> -Validate the `response` by performing the following validations: - validate the `response` against the openapi schema for the `endpoint` - validate that the response does not contain extra properties - validate that a href, if present, refers to the correct resource - validate that the value for a property that is in the response is equal to the property value that was send - validate that no `original_data` is preserved when performing a PUT operation - validate that a PATCH operation only updates the provided properties +Validate the `response` by performing the following validations: - validate the `response` against the openapi schema for the `path` - validate that the response does not contain extra properties - validate that a href, if present, refers to the correct resource - validate that the value for a property that is in the response is equal to the property value that was send - validate that no `original_data` is preserved when performing a PUT operation - validate that a PATCH operation only updates the provided properties + + + + +request + + + +response + + + +<p>Validate the <span class="name">response</span> for a given <span class="name">request</span> against the OpenAPI Spec that is loaded during library initialization.</p> +Validate the `response` for a given `request` against the OpenAPI Spec that is loaded during library initialization. - - + + response - + original_data - - + - - - + -None +{} <p>Validate that each property that was send that is in the response has the value that was send. In case a PATCH request, validate that only the properties that were patched have changed and that other properties are still at their pre-patch values.</p> @@ -712,16 +810,7 @@ Any -__init__ Authorized Request -Get Invalid Json Data -Get Invalidated Parameters -Get Json Data For Dto Class -Get Json Data With Conflict -Perform Validated Request -Validate Resource Properties -Validate Response -Validate Send Response @@ -735,7 +824,31 @@ __init__ +Assert Href To Resource Is Valid Authorized Request +Get Invalid Json Data +Get Invalidated Parameters +Get Json Data For Dto Class +Get Json Data With Conflict +Validate Resource Properties + + + +<p>Strings are converted to bytes so that each Unicode code point below 256 is directly mapped to a matching byte. Higher code points are not allowed. Robot Framework's <code>\xHH</code> escape syntax is convenient with bytes having non-printable values.</p> +<p>Examples: <code>good</code>, <code>hyvรค</code> (same as <code>hyv\xE4</code>), <code>\x00</code> (the null byte)</p> + +string +bytearray + + +__init__ +Assert Href To Resource Is Valid +Authorized Request +Get Invalid Json Data +Get Invalidated Parameters +Get Json Data For Dto Class +Get Json Data With Conflict +Validate Resource Properties @@ -748,6 +861,7 @@ __init__ +Assert Href To Resource Is Valid Authorized Request Get Invalid Json Data Get Invalidated Parameters @@ -769,8 +883,15 @@ Real +__init__ +Assert Href To Resource Is Valid Authorized Request -Get Valid Id For Endpoint +Get Invalid Json Data +Get Invalidated Parameters +Get Json Data For Dto Class +Get Json Data With Conflict +Get Valid Id For Path +Validate Resource Properties @@ -784,13 +905,16 @@ __init__ +Assert Href To Resource Is Valid Authorized Request Get Invalid Json Data Get Invalidated Parameters Get Invalidated Url +Get Json Data For Dto Class Get Json Data With Conflict -Get Valid Id For Endpoint +Get Valid Id For Path Perform Validated Request +Validate Resource Properties @@ -803,8 +927,14 @@ __init__ +Assert Href To Resource Is Valid Authorized Request Get Ids From Url +Get Invalid Json Data +Get Invalidated Parameters +Get Json Data For Dto Class +Get Json Data With Conflict +Validate Resource Properties @@ -814,12 +944,13 @@ __init__ +Assert Href To Resource Is Valid Authorized Request -Get Invalidated Url +Get Invalid Json Data +Get Invalidated Parameters Get Json Data For Dto Class -Perform Validated Request -Validate Response -Validate Send Response +Get Json Data With Conflict +Validate Resource Properties @@ -840,6 +971,7 @@ __init__ +Assert Href To Resource Is Valid Authorized Request Ensure In Use Get Ids From Url @@ -848,9 +980,9 @@ Get Invalidated Url Get Json Data For Dto Class Get Json Data With Conflict -Get Parameterized Endpoint From Url +Get Parameterized Path From Url Get Request Data -Get Valid Id For Endpoint +Get Valid Id For Path Get Valid Url Perform Validated Request Set Basic Auth diff --git a/src/OpenApiLibCore/openapi_libcore.py b/src/OpenApiLibCore/openapi_libcore.py index eedd25c..0a29d07 100644 --- a/src/OpenApiLibCore/openapi_libcore.py +++ b/src/OpenApiLibCore/openapi_libcore.py @@ -75,7 +75,7 @@ If the openapi document passes this validation, the next step is trying to do a test run with a minimal test suite. -The example below can be used, with `source`, `origin` and 'endpoint' altered to +The example below can be used, with `source`, `origin` and `path` altered to fit your situation. ``` robotframework @@ -86,7 +86,7 @@ *** Test Cases *** Getting Started - ${url}= Get Valid Url endpoint=/employees/{employee_id} method=get + ${url}= Get Valid Url path=/employees/{employee_id} ``` @@ -119,331 +119,55 @@ """ import json as _json -import re import sys +from collections.abc import Mapping, MutableMapping from copy import deepcopy -from dataclasses import Field, dataclass, field, make_dataclass -from enum import Enum from functools import cached_property -from itertools import zip_longest -from logging import getLogger from pathlib import Path -from random import choice, sample -from typing import Any, Callable, Generator -from uuid import uuid4 +from types import MappingProxyType +from typing import Any, Generator from openapi_core import Config, OpenAPI, Spec from openapi_core.contrib.requests import ( RequestsOpenAPIRequest, RequestsOpenAPIResponse, ) -from openapi_core.exceptions import OpenAPIError -from openapi_core.templating.paths.exceptions import ServerNotFound from openapi_core.validation.exceptions import ValidationError -from openapi_core.validation.response.exceptions import ResponseValidationError -from openapi_core.validation.schemas.exceptions import InvalidSchemaValue from prance import ResolvingParser from prance.util.url import ResolutionError from requests import Response, Session from requests.auth import AuthBase, HTTPBasicAuth from requests.cookies import RequestsCookieJar as CookieJar +from robot.api import logger from robot.api.deco import keyword, library -from robot.api.exceptions import Failure +from robot.api.exceptions import FatalError from robot.libraries.BuiltIn import BuiltIn -from OpenApiLibCore import value_utils -from OpenApiLibCore.dto_base import ( - NOT_SET, - Dto, - IdDependency, - IdReference, - PathPropertiesConstraint, - PropertyValueConstraint, - ResourceRelation, - UniquePropertyValueConstraint, - resolve_schema, -) +import OpenApiLibCore.data_generation as _data_generation +import OpenApiLibCore.data_invalidation as di +import OpenApiLibCore.path_functions as pf +import OpenApiLibCore.path_invalidation as pi +import OpenApiLibCore.resource_relations as rr +import OpenApiLibCore.validation as val +from OpenApiLibCore.annotations import JSON +from OpenApiLibCore.dto_base import Dto, IdReference from OpenApiLibCore.dto_utils import ( DEFAULT_ID_PROPERTY_NAME, - DefaultDto, get_dto_class, get_id_property_name, ) -from OpenApiLibCore.oas_cache import PARSER_CACHE -from OpenApiLibCore.value_utils import FAKE, IGNORE, JSON +from OpenApiLibCore.oas_cache import PARSER_CACHE, CachedParser +from OpenApiLibCore.protocols import ResponseValidatorType +from OpenApiLibCore.request_data import RequestData, RequestValues +from OpenApiLibCore.value_utils import FAKE run_keyword = BuiltIn().run_keyword - -logger = getLogger(__name__) - - -class ValidationLevel(str, Enum): - """The available levels for the response_validation parameter.""" - - DISABLED = "DISABLED" - INFO = "INFO" - WARN = "WARN" - STRICT = "STRICT" - - -def get_safe_key(key: str) -> str: - """ - Helper function to convert a valid JSON property name to a string that can be used - as a Python variable or function / method name. - """ - key = key.replace("-", "_") - key = key.replace("@", "_") - if key[0].isdigit(): - key = f"_{key}" - return key - - -@dataclass -class RequestValues: - """Helper class to hold parameter values needed to make a request.""" - - url: str - method: str - params: dict[str, Any] | None # FIXME: default empty dict? - headers: dict[str, str] | None # FIXME: default empty dict? - json_data: dict[str, Any] | None # FIXME: default empty dict? - - -@dataclass -class RequestData: - """Helper class to manage parameters used when making requests.""" - - dto: Dto | DefaultDto = field(default_factory=DefaultDto) - dto_schema: dict[str, Any] = field(default_factory=dict) - parameters: list[dict[str, Any]] = field(default_factory=list) - params: dict[str, Any] = field(default_factory=dict) - headers: dict[str, Any] = field(default_factory=dict) - has_body: bool = True - - def __post_init__(self) -> None: - # prevent modification by reference - self.dto_schema = deepcopy(self.dto_schema) - self.parameters = deepcopy(self.parameters) - self.params = deepcopy(self.params) - self.headers = deepcopy(self.headers) - - @property - def has_optional_properties(self) -> bool: - """Whether or not the dto data (json data) contains optional properties.""" - - def is_required_property(property_name: str) -> bool: - return property_name in self.dto_schema.get("required", []) - - properties = (self.dto.as_dict()).keys() - return not all(map(is_required_property, properties)) - - @property - def has_optional_params(self) -> bool: - """Whether or not any of the query parameters are optional.""" - - def is_optional_param(query_param: str) -> bool: - optional_params = [ - p.get("name") - for p in self.parameters - if p.get("in") == "query" and not p.get("required") - ] - return query_param in optional_params - - return any(map(is_optional_param, self.params)) - - @cached_property - def params_that_can_be_invalidated(self) -> set[str]: - """ - The query parameters that can be invalidated by violating data - restrictions, data type or by not providing them in a request. - """ - result = set() - params = [h for h in self.parameters if h.get("in") == "query"] - for param in params: - # required params can be omitted to invalidate a request - if param["required"]: - result.add(param["name"]) - continue - - schema = resolve_schema(param["schema"]) - if schema.get("type", None): - param_types = [schema] - else: - param_types = schema["types"] - for param_type in param_types: - # any basic non-string type except "null" can be invalidated by - # replacing it with a string - if param_type["type"] not in ["string", "array", "object", "null"]: - result.add(param["name"]) - continue - # enums, strings and arrays with boundaries can be invalidated - if set(param_type.keys()).intersection( - { - "enum", - "minLength", - "maxLength", - "minItems", - "maxItems", - } - ): - result.add(param["name"]) - continue - # an array of basic non-string type can be invalidated by replacing the - # items in the array with strings - if param_type["type"] == "array" and param_type["items"][ - "type" - ] not in [ - "string", - "array", - "object", - "null", - ]: - result.add(param["name"]) - return result - - @property - def has_optional_headers(self) -> bool: - """Whether or not any of the headers are optional.""" - - def is_optional_header(header: str) -> bool: - optional_headers = [ - p.get("name") - for p in self.parameters - if p.get("in") == "header" and not p.get("required") - ] - return header in optional_headers - - return any(map(is_optional_header, self.headers)) - - @cached_property - def headers_that_can_be_invalidated(self) -> set[str]: - """ - The header parameters that can be invalidated by violating data - restrictions or by not providing them in a request. - """ - result = set() - headers = [h for h in self.parameters if h.get("in") == "header"] - for header in headers: - # required headers can be omitted to invalidate a request - if header["required"]: - result.add(header["name"]) - continue - - schema = resolve_schema(header["schema"]) - if schema.get("type", None): - header_types = [schema] - else: - header_types = schema["types"] - for header_type in header_types: - # any basic non-string type except "null" can be invalidated by - # replacing it with a string - if header_type["type"] not in ["string", "array", "object", "null"]: - result.add(header["name"]) - continue - # enums, strings and arrays with boundaries can be invalidated - if set(header_type.keys()).intersection( - { - "enum", - "minLength", - "maxLength", - "minItems", - "maxItems", - } - ): - result.add(header["name"]) - continue - # an array of basic non-string type can be invalidated by replacing the - # items in the array with strings - if header_type["type"] == "array" and header_type["items"][ - "type" - ] not in [ - "string", - "array", - "object", - "null", - ]: - result.add(header["name"]) - return result - - def get_required_properties_dict(self) -> dict[str, Any]: - """Get the json-compatible dto data containing only the required properties.""" - relations = self.dto.get_relations() - mandatory_properties = [ - relation.property_name - for relation in relations - if getattr(relation, "treat_as_mandatory", False) - ] - required_properties: list[str] = self.dto_schema.get("required", []) - required_properties.extend(mandatory_properties) - - required_properties_dict: dict[str, Any] = {} - for key, value in (self.dto.as_dict()).items(): - if key in required_properties: - required_properties_dict[key] = value - return required_properties_dict - - def get_minimal_body_dict(self) -> dict[str, Any]: - required_properties_dict = self.get_required_properties_dict() - - min_properties = self.dto_schema.get("minProperties", 0) - number_of_optional_properties_to_add = min_properties - len( - required_properties_dict - ) - - if number_of_optional_properties_to_add < 1: - return required_properties_dict - - optional_properties_dict = { - k: v - for k, v in self.dto.as_dict().items() - if k not in required_properties_dict - } - optional_properties_to_keep = sample( - sorted(optional_properties_dict), number_of_optional_properties_to_add - ) - optional_properties_dict = { - k: v - for k, v in optional_properties_dict.items() - if k in optional_properties_to_keep - } - - return {**required_properties_dict, **optional_properties_dict} - - def get_required_params(self) -> dict[str, str]: - """Get the params dict containing only the required query parameters.""" - relations = self.dto.get_parameter_relations() - mandatory_properties = [ - relation.property_name - for relation in relations - if getattr(relation, "treat_as_mandatory", False) - ] - mandatory_parameters = [p for p in mandatory_properties if p in self.parameters] - - required_parameters = [ - p.get("name") for p in self.parameters if p.get("required") - ] - required_parameters.extend(mandatory_parameters) - return {k: v for k, v in self.params.items() if k in required_parameters} - - def get_required_headers(self) -> dict[str, str]: - """Get the headers dict containing only the required headers.""" - relations = self.dto.get_parameter_relations() - mandatory_properties = [ - relation.property_name - for relation in relations - if getattr(relation, "treat_as_mandatory", False) - ] - mandatory_parameters = [p for p in mandatory_properties if p in self.parameters] - - required_parameters = [ - p.get("name") for p in self.parameters if p.get("required") - ] - required_parameters.extend(mandatory_parameters) - return {k: v for k, v in self.headers.items() if k in required_parameters} +default_str_mapping: Mapping[str, str] = MappingProxyType({}) +default_any_mapping: Mapping[str, object] = MappingProxyType({}) @library(scope="SUITE", doc_format="ROBOT") -class OpenApiLibCore: # pylint: disable=too-many-instance-attributes +class OpenApiLibCore: # pylint: disable=too-many-public-methods """ Main class providing the keywords and core logic to interact with an OpenAPI server. @@ -451,12 +175,12 @@ class OpenApiLibCore: # pylint: disable=too-many-instance-attributes for an introduction. """ - def __init__( # pylint: disable=too-many-arguments, too-many-locals, dangerous-default-value + def __init__( # noqa: PLR0913, pylint: disable=dangerous-default-value self, source: str, origin: str = "", base_path: str = "", - response_validation: ValidationLevel = ValidationLevel.WARN, + response_validation: val.ValidationLevel = val.ValidationLevel.WARN, disable_server_validation: bool = True, mappings_path: str | Path = "", invalid_property_default_response: int = 422, @@ -464,16 +188,16 @@ def __init__( # pylint: disable=too-many-arguments, too-many-locals, dangerous- faker_locale: str | list[str] = "", require_body_for_invalid_url: bool = False, recursion_limit: int = 1, - recursion_default: Any = {}, + recursion_default: JSON = {}, username: str = "", password: str = "", security_token: str = "", auth: AuthBase | None = None, - cert: str | tuple[str, str] | None = None, + cert: str | tuple[str, str] = "", verify_tls: bool | str = True, - extra_headers: dict[str, str] = {}, - cookies: dict[str, str] | CookieJar = {}, - proxies: dict[str, str] = {}, + extra_headers: Mapping[str, str] = default_str_mapping, + cookies: MutableMapping[str, str] | CookieJar | None = None, + proxies: MutableMapping[str, str] | None = None, ) -> None: """ == Base parameters == @@ -485,7 +209,7 @@ def __init__( # pylint: disable=too-many-arguments, too-many-locals, dangerous- The server (and port) of the target server. E.g. ``https://localhost:8000`` === base_path === - The routing between ``origin`` and the endpoints as found in the ``paths`` + The routing between ``origin`` and the paths as found in the ``paths`` section in the openapi document. E.g. ``/petshop/v2``. @@ -601,18 +325,18 @@ def __init__( # pylint: disable=too-many-arguments, too-many-locals, dangerous- self.response_validation = response_validation self.disable_server_validation = disable_server_validation self._recursion_limit = recursion_limit - self._recursion_default = recursion_default + self._recursion_default = deepcopy(recursion_default) self.session = Session() - # only username and password, security_token or auth object should be provided + # Only username and password, security_token or auth object should be provided # if multiple are provided, username and password take precedence self.security_token = security_token self.auth = auth if username: self.auth = HTTPBasicAuth(username, password) - # Robot Framework does not allow users to create tuples and requests - # does not accept lists, so perform the conversion here - if isinstance(cert, list): - cert = tuple(cert) + # Requests only allows a string or a tuple[str, str], so ensure cert is a tuple + # if the passed argument is not a string. + if not isinstance(cert, str): + cert = (cert[0], cert[1]) self.cert = cert self.verify = verify_tls self.extra_headers = extra_headers @@ -622,11 +346,9 @@ def __init__( # pylint: disable=too-many-arguments, too-many-locals, dangerous- if mappings_path and str(mappings_path) != ".": mappings_path = Path(mappings_path) if not mappings_path.is_file(): - logger.warning( - f"mappings_path '{mappings_path}' is not a Python module." - ) - # intermediate variable to ensure path.append is possible so we'll never - # path.pop a location that we didn't append + logger.warn(f"mappings_path '{mappings_path}' is not a Python module.") + # Intermediate variable to ensure path.append is possible so we'll never + # path.pop a location that we didn't append. mappings_folder = str(mappings_path.parent) sys.path.append(mappings_folder) mappings_module_name = mappings_path.stem @@ -649,10 +371,7 @@ def __init__( # pylint: disable=too-many-arguments, too-many-locals, dangerous- DEFAULT_ID_PROPERTY_NAME.id_property_name = default_id_property_name self._server_validation_warning_logged = False - @property - def origin(self) -> str: - return self._origin - + # region: library configuration keywords @keyword def set_origin(self, origin: str) -> None: """ @@ -708,1018 +427,197 @@ def set_extra_headers(self, extra_headers: dict[str, str]) -> None: """ self.extra_headers = extra_headers - @property - def base_url(self) -> str: - return f"{self.origin}{self._base_path}" - - @cached_property - def validation_spec(self) -> Spec: - _, validation_spec, _ = self._load_specs_and_validator() - return validation_spec - - @property - def openapi_spec(self) -> dict[str, Any]: - """Return a deepcopy of the parsed openapi document.""" - # protect the parsed openapi spec from being mutated by reference - return deepcopy(self._openapi_spec) - - @cached_property - def _openapi_spec(self) -> dict[str, Any]: - parser, _, _ = self._load_specs_and_validator() - return parser.specification + # endregion + # region: data generation keywords + @keyword + def get_request_data(self, path: str, method: str) -> RequestData: + """Return an object with valid request data for body, headers and query params.""" + return _data_generation.get_request_data( + path=path, + method=method, + get_dto_class=self.get_dto_class, + get_id_property_name=self.get_id_property_name, + openapi_spec=self.openapi_spec, + ) - @cached_property - def response_validator( + @keyword + def get_json_data_for_dto_class( self, - ) -> Callable[[RequestsOpenAPIRequest, RequestsOpenAPIResponse], None]: - _, _, response_validator = self._load_specs_and_validator() - return response_validator - - def _get_json_types_from_spec(self, spec: dict[str, Any]) -> set[str]: - json_types: set[str] = set(self._get_json_types(spec)) - return {json_type for json_type in json_types if json_type is not None} - - def _get_json_types(self, item: Any) -> Generator[str, None, None]: - if isinstance(item, dict): - content_dict = item.get("content") - if content_dict is None: - for value in item.values(): - yield from self._get_json_types(value) - - else: - for content_type in content_dict: - if "json" in content_type: - content_type_without_charset, _, _ = content_type.partition(";") - yield content_type_without_charset - - if isinstance(item, list): - for list_item in item: - yield from self._get_json_types(list_item) + schema: dict[str, JSON], + dto_class: type[Dto], + operation_id: str = "", + ) -> JSON: + """ + Generate valid (json-compatible) data for the `dto_class`. + """ + return _data_generation.get_json_data_for_dto_class( + schema=schema, + dto_class=dto_class, + get_id_property_name=self.get_id_property_name, + operation_id=operation_id, + ) - def _load_specs_and_validator( + @keyword + def get_invalid_json_data( self, - ) -> tuple[ - ResolvingParser, - Spec, - Callable[[RequestsOpenAPIRequest, RequestsOpenAPIResponse], None], - ]: - try: - - def recursion_limit_handler( - limit: int, refstring: str, recursions: Any - ) -> Any: - return self._recursion_default - - # Since parsing of the OAS and creating the Spec can take a long time, - # they are cached. This is done by storing them in an imported module that - # will have a global scope due to how the Python import system works. This - # ensures that in a Suite of Suites where multiple Suites use the same - # `source`, that OAS is only parsed / loaded once. - parser, validation_spec, response_validator = PARSER_CACHE.get( - self._source, (None, None, None) - ) - - if parser is None: - parser = ResolvingParser( - self._source, - backend="openapi-spec-validator", - recursion_limit=self._recursion_limit, - recursion_limit_handler=recursion_limit_handler, - ) - - if parser.specification is None: # pragma: no cover - BuiltIn().fatal_error( - "Source was loaded, but no specification was present after parsing." - ) - - validation_spec = Spec.from_dict(parser.specification) - - json_types_from_spec: set[str] = self._get_json_types_from_spec( - parser.specification - ) - extra_deserializers = { - json_type: _json.loads for json_type in json_types_from_spec - } - config = Config(extra_media_type_deserializers=extra_deserializers) - openapi = OpenAPI(spec=validation_spec, config=config) - response_validator = openapi.validate_response - - PARSER_CACHE[self._source] = ( - parser, - validation_spec, - response_validator, - ) - - return parser, validation_spec, response_validator + url: str, + method: str, + status_code: int, + request_data: RequestData, + ) -> dict[str, JSON]: + """ + Return `json_data` based on the `dto` on the `request_data` that will cause + the provided `status_code` for the `method` operation on the `url`. - except ResolutionError as exception: - BuiltIn().fatal_error( - f"ResolutionError while trying to load openapi spec: {exception}" - ) - except ValidationError as exception: - BuiltIn().fatal_error( - f"ValidationError while trying to load openapi spec: {exception}" - ) + > Note: applicable UniquePropertyValueConstraint and IdReference Relations are + considered before changes to `json_data` are made. + """ + return di.get_invalid_json_data( + url=url, + method=method, + status_code=status_code, + request_data=request_data, + invalid_property_default_response=self.invalid_property_default_response, + ) - def validate_response_vs_spec( - self, request: RequestsOpenAPIRequest, response: RequestsOpenAPIResponse - ) -> None: + @keyword + def get_invalidated_parameters( + self, + status_code: int, + request_data: RequestData, + ) -> tuple[dict[str, JSON], dict[str, str]]: """ - Validate the reponse for a given request against the OpenAPI Spec that is - loaded during library initialization. + Returns a version of `params, headers` as present on `request_data` that has + been modified to cause the provided `status_code`. """ - self.response_validator(request=request, response=response) + return di.get_invalidated_parameters( + status_code=status_code, + request_data=request_data, + invalid_property_default_response=self.invalid_property_default_response, + ) - def read_paths(self) -> dict[str, Any]: - return self.openapi_spec["paths"] + @keyword + def get_json_data_with_conflict( + self, url: str, method: str, dto: Dto, conflict_status_code: int + ) -> dict[str, JSON]: + """ + Return `json_data` based on the `UniquePropertyValueConstraint` that must be + returned by the `get_relations` implementation on the `dto` for the given + `conflict_status_code`. + """ + return di.get_json_data_with_conflict( + url=url, + base_url=self.base_url, + method=method, + dto=dto, + conflict_status_code=conflict_status_code, + ) + # endregion + # region: path-related keywords @keyword - def get_valid_url(self, endpoint: str, method: str) -> str: + def get_valid_url(self, path: str) -> str: """ - This keyword returns a valid url for the given `endpoint` and `method`. + This keyword returns a valid url for the given `path`. - If the `endpoint` contains path parameters the Get Valid Id For Endpoint + If the `path` contains path parameters the Get Valid Id For Path keyword will be executed to retrieve valid ids for the path parameters. > Note: if valid ids cannot be retrieved within the scope of the API, the `PathPropertiesConstraint` Relation can be used. More information can be found - [https://marketsquare.github.io/robotframework-openapi-libcore/advanced_use.html | here]. + [https://marketsquare.github.io/robotframework-openapitools/advanced_use.html | here]. """ - method = method.lower() - try: - # endpoint can be partially resolved or provided by a PathPropertiesConstraint - parametrized_endpoint = self.get_parametrized_endpoint(endpoint=endpoint) - _ = self.openapi_spec["paths"][parametrized_endpoint] - except KeyError: - raise ValueError( - f"{endpoint} not found in paths section of the OpenAPI document." - ) from None - dto_class = self.get_dto_class(endpoint=endpoint, method=method) - relations = dto_class.get_relations() - paths = [p.path for p in relations if isinstance(p, PathPropertiesConstraint)] - if paths: - url = f"{self.base_url}{choice(paths)}" - return url - endpoint_parts = list(endpoint.split("/")) - for index, part in enumerate(endpoint_parts): - if part.startswith("{") and part.endswith("}"): - type_endpoint_parts = endpoint_parts[slice(index)] - type_endpoint = "/".join(type_endpoint_parts) - existing_id: str | int | float = run_keyword( - "get_valid_id_for_endpoint", type_endpoint, method - ) - endpoint_parts[index] = str(existing_id) - resolved_endpoint = "/".join(endpoint_parts) - url = f"{self.base_url}{resolved_endpoint}" - return url + return pf.get_valid_url( + path=path, + base_url=self.base_url, + get_dto_class=self.get_dto_class, + openapi_spec=self.openapi_spec, + ) @keyword - def get_valid_id_for_endpoint( - self, endpoint: str, method: str - ) -> str | int | float: + def get_valid_id_for_path(self, path: str) -> str | int | float: """ - Support keyword that returns the `id` for an existing resource at `endpoint`. + Support keyword that returns the `id` for an existing resource at `path`. To prevent resource conflicts with other test cases, a new resource is created - (POST) if possible. + (by a POST operation) if possible. """ - - def dummy_transformer(valid_id: str | int | float) -> str | int | float: - return valid_id - - method = method.lower() - url: str = run_keyword("get_valid_url", endpoint, method) - # Try to create a new resource to prevent conflicts caused by - # operations performed on the same resource by other test cases - request_data = self.get_request_data(endpoint=endpoint, method="post") - - response: Response = run_keyword( - "authorized_request", - url, - "post", - request_data.get_required_params(), - request_data.get_required_headers(), - request_data.get_required_properties_dict(), + return pf.get_valid_id_for_path( + path=path, get_id_property_name=self.get_id_property_name ) - # determine the id property name for this path and whether or not a transformer is used - mapping = self.get_id_property_name(endpoint=endpoint) - if isinstance(mapping, str): - id_property = mapping - # set the transformer to a dummy callable that returns the original value so - # the transformer can be applied on any returned id - id_transformer = dummy_transformer - else: - id_property, id_transformer = mapping - - if not response.ok: - # If a new resource cannot be created using POST, try to retrieve a - # valid id using a GET request. - try: - valid_id = choice(run_keyword("get_ids_from_url", url)) - return id_transformer(valid_id) - except Exception as exception: - raise AssertionError( - f"Failed to get a valid id using GET on {url}" - ) from exception - - response_data = response.json() - if prepared_body := response.request.body: - if isinstance(prepared_body, bytes): - send_json = _json.loads(prepared_body.decode("UTF-8")) - else: - send_json = _json.loads(prepared_body) - else: - send_json = None - - # no support for retrieving an id from an array returned on a POST request - if isinstance(response_data, list): - raise NotImplementedError( - f"Unexpected response body for POST request: expected an object but " - f"received an array ({response_data})" - ) - - # POST on /resource_type/{id}/array_item/ will return the updated {id} resource - # instead of a newly created resource. In this case, the send_json must be - # in the array of the 'array_item' property on {id} - send_path: str = response.request.path_url - response_href: str = response_data.get("href", "") - if response_href and (send_path not in response_href) and send_json: - try: - property_to_check = send_path.replace(response_href, "")[1:] - item_list: list[dict[str, Any]] = response_data[property_to_check] - # Use the (mandatory) id to get the POSTed resource from the list - [valid_id] = [ - item[id_property] - for item in item_list - if item[id_property] == send_json[id_property] - ] - except Exception as exception: - raise AssertionError( - f"Failed to get a valid id from {response_href}" - ) from exception - else: - try: - valid_id = response_data[id_property] - except KeyError: - raise AssertionError( - f"Failed to get a valid id from {response_data}" - ) from None - return id_transformer(valid_id) - @keyword - def get_ids_from_url(self, url: str) -> list[str]: + def get_parameterized_path_from_url(self, url: str) -> str: """ - Perform a GET request on the `url` and return the list of resource - `ids` from the response. + Return the path as found in the `paths` section based on the given `url`. """ - endpoint = self.get_parameterized_endpoint_from_url(url) - request_data = self.get_request_data(endpoint=endpoint, method="get") - response = run_keyword( - "authorized_request", - url, - "get", - request_data.get_required_params(), - request_data.get_required_headers(), - ) - response.raise_for_status() - response_data: dict[str, Any] | list[dict[str, Any]] = response.json() - - # determine the property name to use - mapping = self.get_id_property_name(endpoint=endpoint) - if isinstance(mapping, str): - id_property = mapping - else: - id_property, _ = mapping - - if isinstance(response_data, list): - valid_ids: list[str] = [item[id_property] for item in response_data] - return valid_ids - # if the response is an object (dict), check if it's hal+json - if embedded := response_data.get("_embedded"): - # there should be 1 item in the dict that has a value that's a list - for value in embedded.values(): - if isinstance(value, list): - valid_ids = [item[id_property] for item in value] - return valid_ids - if (valid_id := response_data.get(id_property)) is not None: - return [valid_id] - valid_ids = [item[id_property] for item in response_data["items"]] - return valid_ids - - @keyword - def get_request_data(self, endpoint: str, method: str) -> RequestData: - """Return an object with valid request data for body, headers and query params.""" - method = method.lower() - dto_cls_name = self._get_dto_cls_name(endpoint=endpoint, method=method) - # The endpoint can contain already resolved Ids that have to be matched - # against the parametrized endpoints in the paths section. - spec_endpoint = self.get_parametrized_endpoint(endpoint) - dto_class = self.get_dto_class(endpoint=spec_endpoint, method=method) - try: - method_spec = self.openapi_spec["paths"][spec_endpoint][method] - except KeyError: - logger.info( - f"method '{method}' not supported on '{spec_endpoint}, using empty spec." - ) - method_spec = {} - - parameters, params, headers = self.get_request_parameters( - dto_class=dto_class, method_spec=method_spec - ) - if (body_spec := method_spec.get("requestBody", None)) is None: - if dto_class == DefaultDto: - dto_instance: Dto = DefaultDto() - else: - dto_class = make_dataclass( - cls_name=method_spec.get("operationId", dto_cls_name), - fields=[], - bases=(dto_class,), - ) - dto_instance = dto_class() - return RequestData( - dto=dto_instance, - parameters=parameters, - params=params, - headers=headers, - has_body=False, - ) - content_schema = resolve_schema(self.get_content_schema(body_spec)) - headers.update({"content-type": self.get_content_type(body_spec)}) - dto_data = self.get_json_data_for_dto_class( - schema=content_schema, - dto_class=dto_class, - operation_id=method_spec.get("operationId", ""), - ) - if dto_data is None: - dto_instance = DefaultDto() - else: - fields = self.get_fields_from_dto_data(content_schema, dto_data) - dto_class = make_dataclass( - cls_name=method_spec.get("operationId", dto_cls_name), - fields=fields, - bases=(dto_class,), - ) - dto_data = {get_safe_key(key): value for key, value in dto_data.items()} - dto_instance = dto_class(**dto_data) - return RequestData( - dto=dto_instance, - dto_schema=content_schema, - parameters=parameters, - params=params, - headers=headers, - ) - - @staticmethod - def _get_dto_cls_name(endpoint: str, method: str) -> str: - method = method.capitalize() - path = endpoint.translate({ord(i): None for i in "{}"}) + path = url.replace(self.base_url, "") path_parts = path.split("/") - path_parts = [p.capitalize() for p in path_parts] - result = "".join([method, *path_parts]) - return result - - @staticmethod - def get_fields_from_dto_data( - content_schema: dict[str, Any], dto_data: dict[str, Any] - ) -> list[str | tuple[str, type[Any]] | tuple[str, type[Any], Field[Any]]]: - """Get a dataclasses fields list based on the content_schema and dto_data.""" - fields: list[ - str | tuple[str, type[Any]] | tuple[str, type[Any], Field[Any]] - ] = [] - for key, value in dto_data.items(): - required_properties = content_schema.get("required", []) - safe_key = get_safe_key(key) - metadata = {"original_property_name": key} - if key in required_properties: - # The fields list is used to create a dataclass, so non-default fields - # must go before fields with a default - fields.insert(0, (safe_key, type(value), field(metadata=metadata))) - else: - fields.append((safe_key, type(value), field(default=None, metadata=metadata))) # type: ignore[arg-type] - return fields - - def get_request_parameters( - self, dto_class: Dto | type[Dto], method_spec: dict[str, Any] - ) -> tuple[list[dict[str, Any]], dict[str, Any], dict[str, str]]: - """Get the methods parameter spec and params and headers with valid data.""" - parameters = method_spec.get("parameters", []) - parameter_relations = dto_class.get_parameter_relations() - query_params = [p for p in parameters if p.get("in") == "query"] - header_params = [p for p in parameters if p.get("in") == "header"] - params = self.get_parameter_data(query_params, parameter_relations) - headers = self.get_parameter_data(header_params, parameter_relations) - return parameters, params, headers - - @classmethod - def get_content_schema(cls, body_spec: dict[str, Any]) -> dict[str, Any]: - """Get the content schema from the requestBody spec.""" - content_type = cls.get_content_type(body_spec) - content_schema = body_spec["content"][content_type]["schema"] - return resolve_schema(content_schema) - - @staticmethod - def get_content_type(body_spec: dict[str, Any]) -> str: - """Get and validate the first supported content type from the requested body spec - - Should be application/json like content type, - e.g "application/json;charset=utf-8" or "application/merge-patch+json" - """ - content_types: list[str] = body_spec["content"].keys() - json_regex = r"application/([a-z\-]+\+)?json(;\s?charset=(.+))?" - for content_type in content_types: - if re.search(json_regex, content_type): - return content_type - - # At present no supported for other types. - raise NotImplementedError( - f"Only content types like 'application/json' are supported. " - f"Content types definded in the spec are '{content_types}'." + # first part will be '' since a path starts with / + path_parts.pop(0) + parameterized_path = pf.get_parametrized_path( + path=path, openapi_spec=self.openapi_spec ) - - def get_parametrized_endpoint(self, endpoint: str) -> str: - """ - Get the parametrized endpoint as found in the `paths` section of the openapi - document from a (partially) resolved endpoint. - """ - - def match_parts(parts: list[str], spec_parts: list[str]) -> bool: - for part, spec_part in zip_longest(parts, spec_parts, fillvalue="Filler"): - if part == "Filler" or spec_part == "Filler": - return False - if part != spec_part and not spec_part.startswith("{"): - return False - return True - - endpoint_parts = endpoint.split("/") - # if the last part is empty, the path has a trailing `/` that - # should be ignored during matching - if endpoint_parts[-1] == "": - _ = endpoint_parts.pop(-1) - - spec_endpoints: list[str] = {**self.openapi_spec}["paths"].keys() - - candidates: list[str] = [] - - for spec_endpoint in spec_endpoints: - spec_endpoint_parts = spec_endpoint.split("/") - # ignore trailing `/` the same way as for endpoint_parts - if spec_endpoint_parts[-1] == "": - _ = spec_endpoint_parts.pop(-1) - if match_parts(endpoint_parts, spec_endpoint_parts): - candidates.append(spec_endpoint) - - if not candidates: - raise ValueError( - f"{endpoint} not found in paths section of the OpenAPI document." - ) - - if len(candidates) == 1: - return candidates[0] - # Multiple matches can happen in APIs with overloaded endpoints, e.g. - # /users/me - # /users/${user_id} - # In this case, find the closest (or exact) match - exact_match = [c for c in candidates if c == endpoint] - if exact_match: - return exact_match[0] - # TODO: Implement a decision mechanism when real-world examples become available - # In the face of ambiguity, refuse the temptation to guess. - raise ValueError(f"{endpoint} matched to multiple paths: {candidates}") - - @staticmethod - def get_parameter_data( - parameters: list[dict[str, Any]], - parameter_relations: list[ResourceRelation], - ) -> dict[str, str]: - """Generate a valid list of key-value pairs for all parameters.""" - result: dict[str, str] = {} - value: Any = None - for parameter in parameters: - parameter_name = parameter["name"] - parameter_schema = resolve_schema(parameter["schema"]) - relations = [ - r for r in parameter_relations if r.property_name == parameter_name - ] - if constrained_values := [ - r.values for r in relations if isinstance(r, PropertyValueConstraint) - ]: - value = choice(*constrained_values) - if value is IGNORE: - continue - result[parameter_name] = value - continue - value = value_utils.get_valid_value(parameter_schema) - result[parameter_name] = value - return result + return parameterized_path @keyword - def get_json_data_for_dto_class( - self, - schema: dict[str, Any], - dto_class: Dto | type[Dto], - operation_id: str = "", - ) -> dict[str, Any]: + def get_ids_from_url(self, url: str) -> list[str]: """ - Generate a valid (json-compatible) dict for all the `dto_class` properties. + Perform a GET request on the `url` and return the list of resource + `ids` from the response. """ - - def get_constrained_values(property_name: str) -> list[Any]: - relations = dto_class.get_relations() - values_list = [ - c.values - for c in relations - if ( - isinstance(c, PropertyValueConstraint) - and c.property_name == property_name - ) - ] - # values should be empty or contain 1 list of allowed values - return values_list.pop() if values_list else [] - - def get_dependent_id( - property_name: str, operation_id: str - ) -> str | int | float | None: - relations = dto_class.get_relations() - # multiple get paths are possible based on the operation being performed - id_get_paths = [ - (d.get_path, d.operation_id) - for d in relations - if (isinstance(d, IdDependency) and d.property_name == property_name) - ] - if not id_get_paths: - return None - if len(id_get_paths) == 1: - id_get_path, _ = id_get_paths.pop() - else: - try: - [id_get_path] = [ - path - for path, operation in id_get_paths - if operation == operation_id - ] - # There could be multiple get_paths, but not one for the current operation - except ValueError: - return None - valid_id = self.get_valid_id_for_endpoint( - endpoint=id_get_path, method="get" - ) - logger.debug(f"get_dependent_id for {id_get_path} returned {valid_id}") - return valid_id - - json_data: dict[str, Any] = {} - - property_names = [] - for property_name in schema.get("properties", []): - if constrained_values := get_constrained_values(property_name): - # do not add properties that are configured to be ignored - if IGNORE in constrained_values: - continue - property_names.append(property_name) - - max_properties = schema.get("maxProperties") - if max_properties and len(property_names) > max_properties: - required_properties = schema.get("required", []) - number_of_optional_properties = max_properties - len(required_properties) - optional_properties = [ - name for name in property_names if name not in required_properties - ] - selected_optional_properties = sample( - optional_properties, number_of_optional_properties - ) - property_names = required_properties + selected_optional_properties - - for property_name in property_names: - properties_schema = schema["properties"][property_name] - - property_type = properties_schema.get("type") - if property_type is None: - property_types = properties_schema.get("types") - if property_types is None: - if properties_schema.get("properties") is not None: - nested_data = self.get_json_data_for_dto_class( - schema=properties_schema, - dto_class=DefaultDto, - ) - json_data[property_name] = nested_data - continue - selected_type_schema = choice(property_types) - property_type = selected_type_schema["type"] - if properties_schema.get("readOnly", False): - continue - if constrained_values := get_constrained_values(property_name): - json_data[property_name] = choice(constrained_values) - continue - if ( - dependent_id := get_dependent_id( - property_name=property_name, operation_id=operation_id - ) - ) is not None: - json_data[property_name] = dependent_id - continue - if property_type == "object": - object_data = self.get_json_data_for_dto_class( - schema=properties_schema, - dto_class=DefaultDto, - operation_id="", - ) - json_data[property_name] = object_data - continue - if property_type == "array": - array_data = self.get_json_data_for_dto_class( - schema=properties_schema["items"], - dto_class=DefaultDto, - operation_id=operation_id, - ) - json_data[property_name] = [array_data] - continue - json_data[property_name] = value_utils.get_valid_value(properties_schema) - - return json_data + return pf.get_ids_from_url( + url=url, get_id_property_name=self.get_id_property_name + ) @keyword def get_invalidated_url( self, valid_url: str, path: str = "", - method: str = "", expected_status_code: int = 404, ) -> str: """ Return an url with all the path parameters in the `valid_url` replaced by a - random UUID if no PathPropertiesConstraint is mapped for the `path`, `method` - and `expected_status_code`. + random UUID if no PathPropertiesConstraint is mapped for the `"get"` operation + on the mapped `path` and `expected_status_code`. If a PathPropertiesConstraint is mapped, the `invalid_value` is returned. Raises ValueError if the valid_url cannot be invalidated. """ - dto_class = self.get_dto_class(endpoint=path, method=method) - relations = dto_class.get_relations() - paths = [ - p.invalid_value - for p in relations - if isinstance(p, PathPropertiesConstraint) - and p.invalid_value_error_code == expected_status_code - ] - if paths: - url = f"{self.base_url}{choice(paths)}" - return url - parameterized_endpoint = self.get_parameterized_endpoint_from_url(valid_url) - parameterized_url = self.base_url + parameterized_endpoint - valid_url_parts = list(reversed(valid_url.split("/"))) - parameterized_parts = reversed(parameterized_url.split("/")) - for index, (parameterized_part, _) in enumerate( - zip(parameterized_parts, valid_url_parts) - ): - if parameterized_part.startswith("{") and parameterized_part.endswith("}"): - valid_url_parts[index] = uuid4().hex - valid_url_parts.reverse() - invalid_url = "/".join(valid_url_parts) - return invalid_url - raise ValueError(f"{parameterized_endpoint} could not be invalidated.") - - @keyword - def get_parameterized_endpoint_from_url(self, url: str) -> str: - """ - Return the endpoint as found in the `paths` section based on the given `url`. - """ - endpoint = url.replace(self.base_url, "") - endpoint_parts = endpoint.split("/") - # first part will be '' since an endpoint starts with / - endpoint_parts.pop(0) - parameterized_endpoint = self.get_parametrized_endpoint(endpoint=endpoint) - return parameterized_endpoint - - @keyword - def get_invalid_json_data( - self, - url: str, - method: str, - status_code: int, - request_data: RequestData, - ) -> dict[str, Any]: - """ - Return `json_data` based on the `dto` on the `request_data` that will cause - the provided `status_code` for the `method` operation on the `url`. - - > Note: applicable UniquePropertyValueConstraint and IdReference Relations are - considered before changes to `json_data` are made. - """ - method = method.lower() - data_relations = request_data.dto.get_relations_for_error_code(status_code) - data_relations = [ - r for r in data_relations if not isinstance(r, PathPropertiesConstraint) - ] - if not data_relations: - if not request_data.dto_schema: - raise ValueError( - "Failed to invalidate: no data_relations and empty schema." - ) - json_data = request_data.dto.get_invalidated_data( - schema=request_data.dto_schema, - status_code=status_code, - invalid_property_default_code=self.invalid_property_default_response, - ) - return json_data - resource_relation = choice(data_relations) - if isinstance(resource_relation, UniquePropertyValueConstraint): - json_data = run_keyword( - "get_json_data_with_conflict", - url, - method, - request_data.dto, - status_code, - ) - elif isinstance(resource_relation, IdReference): - run_keyword("ensure_in_use", url, resource_relation) - json_data = request_data.dto.as_dict() - else: - json_data = request_data.dto.get_invalidated_data( - schema=request_data.dto_schema, - status_code=status_code, - invalid_property_default_code=self.invalid_property_default_response, - ) - return json_data - - @keyword - def get_invalidated_parameters( - self, - status_code: int, - request_data: RequestData, - ) -> tuple[dict[str, Any], dict[str, str]]: - """ - Returns a version of `params, headers` as present on `request_data` that has - been modified to cause the provided `status_code`. - """ - if not request_data.parameters: - raise ValueError("No params or headers to invalidate.") - - # ensure the status_code can be triggered - relations = request_data.dto.get_parameter_relations_for_error_code(status_code) - relations_for_status_code = [ - r - for r in relations - if isinstance(r, PropertyValueConstraint) - and ( - r.error_code == status_code or r.invalid_value_error_code == status_code - ) - ] - parameters_to_ignore = { - r.property_name - for r in relations_for_status_code - if r.invalid_value_error_code == status_code and r.invalid_value == IGNORE - } - relation_property_names = {r.property_name for r in relations_for_status_code} - if not relation_property_names: - if status_code != self.invalid_property_default_response: - raise ValueError( - f"No relations to cause status_code {status_code} found." - ) - - # ensure we're not modifying mutable properties - params = deepcopy(request_data.params) - headers = deepcopy(request_data.headers) - - if status_code == self.invalid_property_default_response: - # take the params and headers that can be invalidated based on data type - # and expand the set with properties that can be invalided by relations - parameter_names = set(request_data.params_that_can_be_invalidated).union( - request_data.headers_that_can_be_invalidated - ) - parameter_names.update(relation_property_names) - if not parameter_names: - raise ValueError( - "None of the query parameters and headers can be invalidated." - ) - else: - # non-default status_codes can only be the result of a Relation - parameter_names = relation_property_names - - # Dto mappings may contain generic mappings for properties that are not present - # in this specific schema - request_data_parameter_names = [p.get("name") for p in request_data.parameters] - additional_relation_property_names = { - n for n in relation_property_names if n not in request_data_parameter_names - } - if additional_relation_property_names: - logger.warning( - f"get_parameter_relations_for_error_code yielded properties that are " - f"not defined in the schema: {additional_relation_property_names}\n" - f"These properties will be ignored for parameter invalidation." - ) - parameter_names = parameter_names - additional_relation_property_names - - if not parameter_names: - raise ValueError( - f"No parameter can be changed to cause status_code {status_code}." - ) - - parameter_names = parameter_names - parameters_to_ignore - parameter_to_invalidate = choice(tuple(parameter_names)) - - # check for invalid parameters in the provided request_data - try: - [parameter_data] = [ - data - for data in request_data.parameters - if data["name"] == parameter_to_invalidate - ] - except Exception: - raise ValueError( - f"{parameter_to_invalidate} not found in provided parameters." - ) from None - - # get the invalid_value for the chosen parameter - try: - [invalid_value_for_error_code] = [ - r.invalid_value - for r in relations_for_status_code - if r.property_name == parameter_to_invalidate - and r.invalid_value_error_code == status_code - ] - except ValueError: - invalid_value_for_error_code = NOT_SET - - # get the constraint values if available for the chosen parameter - try: - [values_from_constraint] = [ - r.values - for r in relations_for_status_code - if r.property_name == parameter_to_invalidate - ] - except ValueError: - values_from_constraint = [] - - # if the parameter was not provided, add it to params / headers - params, headers = self.ensure_parameter_in_parameters( - parameter_to_invalidate=parameter_to_invalidate, - params=params, - headers=headers, - parameter_data=parameter_data, - values_from_constraint=values_from_constraint, + return pi.get_invalidated_url( + valid_url=valid_url, + path=path, + base_url=self.base_url, + get_dto_class=self.get_dto_class, + expected_status_code=expected_status_code, ) - # determine the invalid_value - if invalid_value_for_error_code != NOT_SET: - invalid_value = invalid_value_for_error_code - else: - if parameter_to_invalidate in params.keys(): - valid_value = params[parameter_to_invalidate] - else: - valid_value = headers[parameter_to_invalidate] - - value_schema = resolve_schema(parameter_data["schema"]) - invalid_value = value_utils.get_invalid_value( - value_schema=value_schema, - current_value=valid_value, - values_from_constraint=values_from_constraint, - ) - logger.debug(f"{parameter_to_invalidate} changed to {invalid_value}") - - # update the params / headers and return - if parameter_to_invalidate in params.keys(): - params[parameter_to_invalidate] = invalid_value - else: - headers[parameter_to_invalidate] = invalid_value - return params, headers - - @staticmethod - def ensure_parameter_in_parameters( - parameter_to_invalidate: str, - params: dict[str, Any], - headers: dict[str, str], - parameter_data: dict[str, Any], - values_from_constraint: list[Any], - ) -> tuple[dict[str, Any], dict[str, str]]: - """ - Returns the params, headers tuple with parameter_to_invalidate with a valid - value to params or headers if not originally present. - """ - if ( - parameter_to_invalidate not in params.keys() - and parameter_to_invalidate not in headers.keys() - ): - if values_from_constraint: - valid_value = choice(values_from_constraint) - else: - parameter_schema = resolve_schema(parameter_data["schema"]) - valid_value = value_utils.get_valid_value(parameter_schema) - if ( - parameter_data["in"] == "query" - and parameter_to_invalidate not in params.keys() - ): - params[parameter_to_invalidate] = valid_value - if ( - parameter_data["in"] == "header" - and parameter_to_invalidate not in headers.keys() - ): - headers[parameter_to_invalidate] = valid_value - return params, headers - + # endregion + # region: resource relations keywords @keyword def ensure_in_use(self, url: str, resource_relation: IdReference) -> None: """ Ensure that the (right-most) `id` of the resource referenced by the `url` is used by the resource defined by the `resource_relation`. """ - resource_id = "" - - endpoint = url.replace(self.base_url, "") - endpoint_parts = endpoint.split("/") - parameterized_endpoint = self.get_parametrized_endpoint(endpoint=endpoint) - parameterized_endpoint_parts = parameterized_endpoint.split("/") - for part, param_part in zip( - reversed(endpoint_parts), reversed(parameterized_endpoint_parts) - ): - if param_part.endswith("}"): - resource_id = part - break - if not resource_id: - raise ValueError(f"The provided url ({url}) does not contain an id.") - request_data = self.get_request_data( - method="post", endpoint=resource_relation.post_path - ) - json_data = request_data.dto.as_dict() - json_data[resource_relation.property_name] = resource_id - post_url: str = run_keyword( - "get_valid_url", - resource_relation.post_path, - "post", - ) - response: Response = run_keyword( - "authorized_request", - post_url, - "post", - request_data.params, - request_data.headers, - json_data, - ) - if not response.ok: - logger.debug( - f"POST on {post_url} with json {json_data} failed: {response.json()}" - ) - response.raise_for_status() - - @keyword - def get_json_data_with_conflict( - self, url: str, method: str, dto: Dto, conflict_status_code: int - ) -> dict[str, Any]: - """ - Return `json_data` based on the `UniquePropertyValueConstraint` that must be - returned by the `get_relations` implementation on the `dto` for the given - `conflict_status_code`. - """ - method = method.lower() - json_data = dto.as_dict() - unique_property_value_constraints = [ - r - for r in dto.get_relations() - if isinstance(r, UniquePropertyValueConstraint) - ] - for relation in unique_property_value_constraints: - json_data[relation.property_name] = relation.value - # create a new resource that the original request will conflict with - if method in ["patch", "put"]: - post_url_parts = url.split("/")[:-1] - post_url = "/".join(post_url_parts) - # the PATCH or PUT may use a different dto than required for POST - # so a valid POST dto must be constructed - endpoint = post_url.replace(self.base_url, "") - request_data = self.get_request_data(endpoint=endpoint, method="post") - post_json = request_data.dto.as_dict() - for key in post_json.keys(): - if key in json_data: - post_json[key] = json_data.get(key) - else: - post_url = url - post_json = json_data - endpoint = post_url.replace(self.base_url, "") - request_data = self.get_request_data(endpoint=endpoint, method="post") - response: Response = run_keyword( - "authorized_request", - post_url, - "post", - request_data.params, - request_data.headers, - post_json, - ) - # conflicting resource may already exist - assert ( - response.ok or response.status_code == conflict_status_code - ), f"get_json_data_with_conflict received {response.status_code}: {response.json()}" - return json_data - raise ValueError( - f"No UniquePropertyValueConstraint in the get_relations list on dto {dto}." + rr.ensure_in_use( + url=url, + base_url=self.base_url, + openapi_spec=self.openapi_spec, + resource_relation=resource_relation, ) + # endregion + # region: request keywords @keyword def authorized_request( # pylint: disable=too-many-arguments self, url: str, method: str, - params: dict[str, Any] = {}, - headers: dict[str, str] = {}, - json_data: JSON = {}, + params: dict[str, Any] | None = None, + headers: dict[str, str] | None = None, + json_data: JSON = None, data: Any = None, files: Any = None, ) -> Response: @@ -1734,7 +632,7 @@ def authorized_request( # pylint: disable=too-many-arguments > Note: provided username / password or auth objects take precedence over token based security """ - headers = headers if headers else {} + headers = deepcopy(headers) if headers else {} if self.extra_headers: headers.update(self.extra_headers) # if both an auth object and a token are available, auth takes precedence @@ -1759,90 +657,67 @@ def authorized_request( # pylint: disable=too-many-arguments logger.debug(f"Response text: {response.text}") return response + # endregion + # region: validation keywords @keyword def perform_validated_request( self, path: str, status_code: int, request_values: RequestValues, - original_data: dict[str, Any] = {}, + original_data: Mapping[str, object] = default_any_mapping, ) -> None: """ This keyword first calls the Authorized Request keyword, then the Validate Response keyword and finally validates, for `DELETE` operations, whether the target resource was indeed deleted (OK response) or not (error responses). """ - response = run_keyword( - "authorized_request", - request_values.url, - request_values.method, - request_values.params, - request_values.headers, - request_values.json_data, + val.perform_validated_request( + path=path, + status_code=status_code, + request_values=request_values, + original_data=original_data, ) - if response.status_code != status_code: - try: - response_json = response.json() - except Exception as _: # pylint: disable=broad-except - logger.info( - f"Failed to get json content from response. " - f"Response text was: {response.text}" - ) - response_json = {} - if not response.ok: - if description := response_json.get("detail"): - pass - else: - description = response_json.get( - "message", "response contains no message or detail." - ) - logger.error(f"{response.reason}: {description}") - - logger.debug( - f"\nSend: {_json.dumps(request_values.json_data, indent=4, sort_keys=True)}" - f"\nGot: {_json.dumps(response_json, indent=4, sort_keys=True)}" - ) - raise AssertionError( - f"Response status_code {response.status_code} was not {status_code}" - ) - run_keyword("validate_response", path, response, original_data) + @keyword + def validate_response_using_validator( + self, request: RequestsOpenAPIRequest, response: RequestsOpenAPIResponse + ) -> None: + """ + Validate the `response` for a given `request` against the OpenAPI Spec that is + loaded during library initialization. + """ + val.validate_response_using_validator( + request=request, + response=response, + response_validator=self.response_validator, + ) - if request_values.method == "DELETE": - get_request_data = self.get_request_data(endpoint=path, method="GET") - get_params = get_request_data.params - get_headers = get_request_data.headers - get_response = run_keyword( - "authorized_request", request_values.url, "GET", get_params, get_headers - ) - if response.ok: - if get_response.ok: - raise AssertionError( - f"Resource still exists after deletion. Url was {request_values.url}" - ) - # if the path supports GET, 404 is expected, if not 405 is expected - if get_response.status_code not in [404, 405]: - logger.warning( - f"Unexpected response after deleting resource: Status_code " - f"{get_response.status_code} was received after trying to get {request_values.url} " - f"after sucessfully deleting it." - ) - elif not get_response.ok: - raise AssertionError( - f"Resource could not be retrieved after failed deletion. " - f"Url was {request_values.url}, status_code was {get_response.status_code}" - ) + @keyword + def assert_href_to_resource_is_valid( + self, href: str, referenced_resource: dict[str, JSON] + ) -> None: + """ + Attempt to GET the resource referenced by the `href` and validate it's equal + to the provided `referenced_resource` object / dictionary. + """ + val.assert_href_to_resource_is_valid( + href=href, + origin=self.origin, + base_url=self.base_url, + referenced_resource=referenced_resource, + ) @keyword def validate_response( self, path: str, response: Response, - original_data: dict[str, Any] = {}, + original_data: Mapping[str, object] = default_any_mapping, ) -> None: """ Validate the `response` by performing the following validations: - - validate the `response` against the openapi schema for the `endpoint` + - validate the `response` against the openapi schema for the `path` - validate that the response does not contain extra properties - validate that a href, if present, refers to the correct resource - validate that the value for a property that is in the response is equal to @@ -1850,278 +725,36 @@ def validate_response( - validate that no `original_data` is preserved when performing a PUT operation - validate that a PATCH operation only updates the provided properties """ - if response.status_code == 204: - assert not response.content - return None - - try: - self._validate_response_against_spec(response) - except OpenAPIError as exception: - raise Failure(f"Response did not pass schema validation: {exception}") - - request_method = response.request.method - if request_method is None: - logger.warning( - f"Could not validate response for path {path}; no method found " - f"on the request property of the provided response." - ) - return None - - response_spec = self._get_response_spec( + val.validate_response( path=path, - method=request_method, - status_code=response.status_code, - ) - - content_type_from_response = response.headers.get("Content-Type", "unknown") - mime_type_from_response, _, _ = content_type_from_response.partition(";") - - if not response_spec.get("content"): - logger.warning( - "The response cannot be validated: 'content' not specified in the OAS." - ) - return None - - # multiple content types can be specified in the OAS - content_types = list(response_spec["content"].keys()) - supported_types = [ - ct for ct in content_types if ct.partition(";")[0].endswith("json") - ] - if not supported_types: - raise NotImplementedError( - f"The content_types '{content_types}' are not supported. " - f"Only json types are currently supported." - ) - content_type = supported_types[0] - mime_type = content_type.partition(";")[0] - - if mime_type != mime_type_from_response: - raise ValueError( - f"Content-Type '{content_type_from_response}' of the response " - f"does not match '{mime_type}' as specified in the OpenAPI document." - ) - - json_response = response.json() - response_schema = resolve_schema( - response_spec["content"][content_type]["schema"] + response=response, + response_validator=self.response_validator, + server_validation_warning_logged=self._server_validation_warning_logged, + disable_server_validation=self.disable_server_validation, + invalid_property_default_response=self.invalid_property_default_response, + response_validation=self.response_validation, + openapi_spec=self.openapi_spec, + original_data=original_data, ) - response_types = response_schema.get("types") - if response_types: - # In case of oneOf / anyOf there can be multiple possible response types - # which makes generic validation too complex - return None - response_type = response_schema.get("type", "undefined") - if response_type not in ["object", "array"]: - self._validate_value_type(value=json_response, expected_type=response_type) - return None - - if list_item_schema := response_schema.get("items"): - if not isinstance(json_response, list): - raise AssertionError( - f"Response schema violation: the schema specifies an array as " - f"response type but the response was of type {type(json_response)}." - ) - type_of_list_items = list_item_schema.get("type") - if type_of_list_items == "object": - for resource in json_response: - run_keyword( - "validate_resource_properties", resource, list_item_schema - ) - else: - for item in json_response: - self._validate_value_type( - value=item, expected_type=type_of_list_items - ) - # no further validation; value validation of individual resources should - # be performed on the endpoints for the specific resource - return None - - run_keyword("validate_resource_properties", json_response, response_schema) - # ensure the href is valid if present in the response - if href := json_response.get("href"): - self._assert_href_is_valid(href, json_response) - # every property that was sucessfully send and that is in the response - # schema must have the value that was send - if response.ok and response.request.method in ["POST", "PUT", "PATCH"]: - run_keyword("validate_send_response", response, original_data) - return None - - def _assert_href_is_valid(self, href: str, json_response: dict[str, Any]) -> None: - url = f"{self.origin}{href}" - path = url.replace(self.base_url, "") - request_data = self.get_request_data(endpoint=path, method="GET") - params = request_data.params - headers = request_data.headers - get_response = run_keyword("authorized_request", url, "GET", params, headers) - assert ( - get_response.json() == json_response - ), f"{get_response.json()} not equal to original {json_response}" - - def _validate_response_against_spec(self, response: Response) -> None: - try: - self.validate_response_vs_spec( - request=RequestsOpenAPIRequest(response.request), - response=RequestsOpenAPIResponse(response), - ) - except (ResponseValidationError, ServerNotFound) as exception: - errors: list[InvalidSchemaValue] = exception.__cause__ - validation_errors: list[ValidationError] = getattr( - errors, "schema_errors", [] - ) - if validation_errors: - error_message = "\n".join( - [ - f"{list(error.schema_path)}: {error.message}" - for error in validation_errors - ] - ) - else: - error_message = str(exception) - - if isinstance(exception, ServerNotFound): - if not self._server_validation_warning_logged: - logger.warning( - f"ServerNotFound was raised during response validation. " - f"Due to this, no full response validation will be performed." - f"\nThe original error was: {error_message}" - ) - self._server_validation_warning_logged = True - if self.disable_server_validation: - return - if response.status_code == self.invalid_property_default_response: - logger.debug(error_message) - return - if self.response_validation == ValidationLevel.STRICT: - logger.error(error_message) - raise exception - if self.response_validation == ValidationLevel.WARN: - logger.warning(error_message) - elif self.response_validation == ValidationLevel.INFO: - logger.info(error_message) - @keyword def validate_resource_properties( - self, resource: dict[str, Any], schema: dict[str, Any] + self, resource: dict[str, JSON], schema: dict[str, JSON] ) -> None: """ Validate that the `resource` does not contain any properties that are not defined in the `schema_properties`. """ - schema_properties = schema.get("properties", {}) - property_names_from_schema = set(schema_properties.keys()) - property_names_in_resource = set(resource.keys()) - - if property_names_from_schema != property_names_in_resource: - # The additionalProperties property determines whether properties with - # unspecified names are allowed. This property can be boolean or an object - # (dict) that specifies the type of any additional properties. - additional_properties = schema.get("additionalProperties", True) - if isinstance(additional_properties, bool): - allow_additional_properties = additional_properties - allowed_additional_properties_type = None - else: - allow_additional_properties = True - allowed_additional_properties_type = additional_properties["type"] - - extra_property_names = property_names_in_resource.difference( - property_names_from_schema - ) - if allow_additional_properties: - # If a type is defined for extra properties, validate them - if allowed_additional_properties_type: - extra_properties = { - key: value - for key, value in resource.items() - if key in extra_property_names - } - self._validate_type_of_extra_properties( - extra_properties=extra_properties, - expected_type=allowed_additional_properties_type, - ) - # If allowed, validation should not fail on extra properties - extra_property_names = set() - - required_properties = set(schema.get("required", [])) - missing_properties = required_properties.difference( - property_names_in_resource - ) - - if extra_property_names or missing_properties: - extra = ( - f"\n\tExtra properties in response: {extra_property_names}" - if extra_property_names - else "" - ) - missing = ( - f"\n\tRequired properties missing in response: {missing_properties}" - if missing_properties - else "" - ) - raise AssertionError( - f"Response schema violation: the response contains properties that are " - f"not specified in the schema or does not contain properties that are " - f"required according to the schema." - f"\n\tReceived in the response: {property_names_in_resource}" - f"\n\tDefined in the schema: {property_names_from_schema}" - f"{extra}{missing}" - ) - - @staticmethod - def _validate_value_type(value: Any, expected_type: str) -> None: - type_mapping = { - "string": str, - "number": float, - "integer": int, - "boolean": bool, - "array": list, - "object": dict, - } - python_type = type_mapping.get(expected_type, None) - if python_type is None: - raise AssertionError( - f"Validation of type '{expected_type}' is not supported." - ) - if not isinstance(value, python_type): - raise AssertionError(f"{value} is not of type {expected_type}") - - @staticmethod - def _validate_type_of_extra_properties( - extra_properties: dict[str, Any], expected_type: str - ) -> None: - type_mapping = { - "string": str, - "number": float, - "integer": int, - "boolean": bool, - "array": list, - "object": dict, - } - - python_type = type_mapping.get(expected_type, None) - if python_type is None: - logger.warning( - f"Additonal properties were not validated: " - f"type '{expected_type}' is not supported." - ) - return - - invalid_extra_properties = { - key: value - for key, value in extra_properties.items() - if not isinstance(value, python_type) - } - if invalid_extra_properties: - raise AssertionError( - f"Response contains invalid additionalProperties: " - f"{invalid_extra_properties} are not of type {expected_type}." - ) + val.validate_resource_properties( + resource=resource, + schema=schema, + ) @staticmethod @keyword def validate_send_response( response: Response, - original_data: dict[str, Any] = {}, + original_data: Mapping[str, object] = default_any_mapping, ) -> None: """ Validate that each property that was send that is in the response has the value @@ -2129,102 +762,130 @@ def validate_send_response( In case a PATCH request, validate that only the properties that were patched have changed and that other properties are still at their pre-patch values. """ + val.validate_send_response(response=response, original_data=original_data) + + # endregion + + @property + def origin(self) -> str: + return self._origin + + @property + def base_url(self) -> str: + return f"{self.origin}{self._base_path}" + + @cached_property + def validation_spec(self) -> Spec: + _, validation_spec, _ = self._load_specs_and_validator() + return validation_spec + + @property + def openapi_spec(self) -> dict[str, JSON]: + """Return a deepcopy of the parsed openapi document.""" + # protect the parsed openapi spec from being mutated by reference + return deepcopy(self._openapi_spec) + + @cached_property + def _openapi_spec(self) -> dict[str, JSON]: + parser, _, _ = self._load_specs_and_validator() + return parser.specification # type: ignore[no-any-return] + + @cached_property + def response_validator( + self, + ) -> ResponseValidatorType: + _, _, response_validator = self._load_specs_and_validator() + return response_validator + + def _get_json_types_from_spec(self, spec: dict[str, JSON]) -> set[str]: + json_types: set[str] = set(self._get_json_types(spec)) + return {json_type for json_type in json_types if json_type is not None} + + def _get_json_types(self, item: object) -> Generator[str, None, None]: + if isinstance(item, dict): + content_dict = item.get("content") + if content_dict is None: + for value in item.values(): + yield from self._get_json_types(value) - def validate_list_response( - send_list: list[Any], received_list: list[Any] - ) -> None: - for item in send_list: - if item not in received_list: - raise AssertionError( - f"Received value '{received_list}' does " - f"not contain '{item}' in the {response.request.method} request." - f"\nSend: {_json.dumps(send_json, indent=4, sort_keys=True)}" - f"\nGot: {_json.dumps(response_data, indent=4, sort_keys=True)}" - ) - - def validate_dict_response( - send_dict: dict[str, Any], received_dict: dict[str, Any] - ) -> None: - for send_property_name, send_property_value in send_dict.items(): - # sometimes, a property in the request is not in the response, e.g. a password - if send_property_name not in received_dict.keys(): - continue - if send_property_value is not None: - # if a None value is send, the target property should be cleared or - # reverted to the default value (which cannot be specified in the - # openapi document) - received_value = received_dict[send_property_name] - # In case of lists / arrays, the send values are often appended to - # existing data - if isinstance(received_value, list): - validate_list_response( - send_list=send_property_value, received_list=received_value - ) - continue - - # when dealing with objects, we'll need to iterate the properties - if isinstance(received_value, dict): - validate_dict_response( - send_dict=send_property_value, received_dict=received_value - ) - continue - - assert received_value == send_property_value, ( - f"Received value for {send_property_name} '{received_value}' does not " - f"match '{send_property_value}' in the {response.request.method} request." - f"\nSend: {_json.dumps(send_json, indent=4, sort_keys=True)}" - f"\nGot: {_json.dumps(response_data, indent=4, sort_keys=True)}" - ) - - if response.request.body is None: - logger.warning( - "Could not validate send response; the body of the request property " - "on the provided response was None." + else: + for content_type in content_dict: + if "json" in content_type: + content_type_without_charset, _, _ = content_type.partition(";") + yield content_type_without_charset + + if isinstance(item, list): + for list_item in item: + yield from self._get_json_types(list_item) + + def _load_specs_and_validator( + self, + ) -> tuple[ + ResolvingParser, + Spec, + ResponseValidatorType, + ]: + def recursion_limit_handler( + limit: int, + refstring: str, + recursions: JSON, # pylint: disable=unused-argument + ) -> JSON: + return self._recursion_default + + try: + # Since parsing of the OAS and creating the Spec can take a long time, + # they are cached. This is done by storing them in an imported module that + # will have a global scope due to how the Python import system works. This + # ensures that in a Suite of Suites where multiple Suites use the same + # `source`, that OAS is only parsed / loaded once. + cached_parser = PARSER_CACHE.get(self._source, None) + if cached_parser: + return ( + cached_parser.parser, + cached_parser.validation_spec, + cached_parser.response_validator, + ) + + parser = ResolvingParser( + self._source, + backend="openapi-spec-validator", + recursion_limit=self._recursion_limit, + recursion_limit_handler=recursion_limit_handler, ) - return None - if isinstance(response.request.body, bytes): - send_json = _json.loads(response.request.body.decode("UTF-8")) - else: - send_json = _json.loads(response.request.body) - - response_data = response.json() - # POST on /resource_type/{id}/array_item/ will return the updated {id} resource - # instead of a newly created resource. In this case, the send_json must be - # in the array of the 'array_item' property on {id} - send_path: str = response.request.path_url - response_path = response_data.get("href", None) - if response_path and send_path not in response_path: - property_to_check = send_path.replace(response_path, "")[1:] - if response_data.get(property_to_check) and isinstance( - response_data[property_to_check], list - ): - item_list: list[dict[str, Any]] = response_data[property_to_check] - # Use the (mandatory) id to get the POSTed resource from the list - [response_data] = [ - item for item in item_list if item["id"] == send_json["id"] - ] - - # incoming arguments are dictionaries, so they can be validated as such - validate_dict_response(send_dict=send_json, received_dict=response_data) - - # In case of PATCH requests, ensure that only send properties have changed - if original_data: - for send_property_name, send_value in original_data.items(): - if send_property_name not in send_json.keys(): - assert send_value == response_data[send_property_name], ( - f"Received value for {send_property_name} '{response_data[send_property_name]}' does not " - f"match '{send_value}' in the pre-patch data" - f"\nPre-patch: {_json.dumps(original_data, indent=4, sort_keys=True)}" - f"\nGot: {_json.dumps(response_data, indent=4, sort_keys=True)}" - ) - return None - - def _get_response_spec( - self, path: str, method: str, status_code: int - ) -> dict[str, Any]: - method = method.lower() - status = str(status_code) - spec: dict[str, Any] = {**self.openapi_spec}["paths"][path][method][ - "responses" - ][status] - return spec + + if parser.specification is None: # pragma: no cover + raise FatalError( + "Source was loaded, but no specification was present after parsing." + ) + + validation_spec = Spec.from_dict(parser.specification) # pyright: ignore[reportArgumentType] + + json_types_from_spec: set[str] = self._get_json_types_from_spec( + parser.specification + ) + extra_deserializers = { + json_type: _json.loads for json_type in json_types_from_spec + } + config = Config(extra_media_type_deserializers=extra_deserializers) # type: ignore[arg-type] + openapi = OpenAPI(spec=validation_spec, config=config) + response_validator: ResponseValidatorType = openapi.validate_response # type: ignore[assignment] + + PARSER_CACHE[self._source] = CachedParser( + parser=parser, + validation_spec=validation_spec, + response_validator=response_validator, + ) + + return parser, validation_spec, response_validator + + except ResolutionError as exception: + raise FatalError( + f"ResolutionError while trying to load openapi spec: {exception}" + ) from exception + except ValidationError as exception: + raise FatalError( + f"ValidationError while trying to load openapi spec: {exception}" + ) from exception + + def read_paths(self) -> dict[str, JSON]: + return self.openapi_spec["paths"] # type: ignore[return-value] diff --git a/src/OpenApiLibCore/path_functions.py b/src/OpenApiLibCore/path_functions.py new file mode 100644 index 0000000..f41b838 --- /dev/null +++ b/src/OpenApiLibCore/path_functions.py @@ -0,0 +1,209 @@ +"""Module holding the functions related to paths and urls.""" + +import json as _json +from itertools import zip_longest +from random import choice +from typing import Any + +from requests import Response +from robot.libraries.BuiltIn import BuiltIn + +from OpenApiLibCore.dto_base import PathPropertiesConstraint +from OpenApiLibCore.protocols import GetDtoClassType, GetIdPropertyNameType +from OpenApiLibCore.request_data import RequestData + +run_keyword = BuiltIn().run_keyword + + +def match_parts(parts: list[str], spec_parts: list[str]) -> bool: + for part, spec_part in zip_longest(parts, spec_parts, fillvalue="Filler"): + if part == "Filler" or spec_part == "Filler": + return False + if part != spec_part and not spec_part.startswith("{"): + return False + return True + + +def get_parametrized_path(path: str, openapi_spec: dict[str, Any]) -> str: + path_parts = path.split("/") + # if the last part is empty, the path has a trailing `/` that + # should be ignored during matching + if path_parts[-1] == "": + _ = path_parts.pop(-1) + + spec_paths: list[str] = {**openapi_spec}["paths"].keys() + + candidates: list[str] = [] + + for spec_path in spec_paths: + spec_path_parts = spec_path.split("/") + # ignore trailing `/` the same way as for path_parts + if spec_path_parts[-1] == "": + _ = spec_path_parts.pop(-1) + if match_parts(path_parts, spec_path_parts): + candidates.append(spec_path) + + if not candidates: + raise ValueError(f"{path} not found in paths section of the OpenAPI document.") + + if len(candidates) == 1: + return candidates[0] + # Multiple matches can happen in APIs with overloaded paths, e.g. + # /users/me + # /users/${user_id} + # In this case, find the closest (or exact) match + exact_match = [c for c in candidates if c == path] + if exact_match: + return exact_match[0] + # TODO: Implement a decision mechanism when real-world examples become available + # In the face of ambiguity, refuse the temptation to guess. + raise ValueError(f"{path} matched to multiple paths: {candidates}") + + +def get_valid_url( + path: str, + base_url: str, + get_dto_class: GetDtoClassType, + openapi_spec: dict[str, Any], +) -> str: + try: + # path can be partially resolved or provided by a PathPropertiesConstraint + parametrized_path = get_parametrized_path(path=path, openapi_spec=openapi_spec) + _ = openapi_spec["paths"][parametrized_path] + except KeyError: + raise ValueError( + f"{path} not found in paths section of the OpenAPI document." + ) from None + dto_class = get_dto_class(path=path, method="get") + relations = dto_class.get_relations() + paths = [p.path for p in relations if isinstance(p, PathPropertiesConstraint)] + if paths: + url = f"{base_url}{choice(paths)}" + return url + path_parts = list(path.split("/")) + for index, part in enumerate(path_parts): + if part.startswith("{") and part.endswith("}"): + type_path_parts = path_parts[slice(index)] + type_path = "/".join(type_path_parts) + existing_id: str | int | float = run_keyword( + "get_valid_id_for_path", type_path + ) + path_parts[index] = str(existing_id) + resolved_path = "/".join(path_parts) + url = f"{base_url}{resolved_path}" + return url + + +def get_valid_id_for_path( + path: str, + get_id_property_name: GetIdPropertyNameType, +) -> str | int: + url: str = run_keyword("get_valid_url", path) + # Try to create a new resource to prevent conflicts caused by + # operations performed on the same resource by other test cases + request_data: RequestData = run_keyword("get_request_data", path, "post") + + response: Response = run_keyword( + "authorized_request", + url, + "post", + request_data.get_required_params(), + request_data.get_required_headers(), + request_data.get_required_properties_dict(), + ) + + id_property, id_transformer = get_id_property_name(path=path) + + if not response.ok: + # If a new resource cannot be created using POST, try to retrieve a + # valid id using a GET request. + try: + valid_id = choice(run_keyword("get_ids_from_url", url)) + return id_transformer(valid_id) + except Exception as exception: + raise AssertionError( + f"Failed to get a valid id using GET on {url}" + ) from exception + + response_data = response.json() + if prepared_body := response.request.body: + if isinstance(prepared_body, bytes): + send_json = _json.loads(prepared_body.decode("UTF-8")) + else: + send_json = _json.loads(prepared_body) + else: + send_json = None + + # no support for retrieving an id from an array returned on a POST request + if isinstance(response_data, list): + raise NotImplementedError( + f"Unexpected response body for POST request: expected an object but " + f"received an array ({response_data})" + ) + + # POST on /resource_type/{id}/array_item/ will return the updated {id} resource + # instead of a newly created resource. In this case, the send_json must be + # in the array of the 'array_item' property on {id} + send_path: str = response.request.path_url + response_href: str = response_data.get("href", "") + if response_href and (send_path not in response_href) and send_json: + try: + property_to_check = send_path.replace(response_href, "")[1:] + item_list: list[dict[str, Any]] = response_data[property_to_check] + # Use the (mandatory) id to get the POSTed resource from the list + [valid_id] = [ + item[id_property] + for item in item_list + if item[id_property] == send_json[id_property] + ] + except Exception as exception: + raise AssertionError( + f"Failed to get a valid id from {response_href}" + ) from exception + else: + try: + valid_id = response_data[id_property] + except KeyError: + raise AssertionError( + f"Failed to get a valid id from {response_data}" + ) from None + return id_transformer(valid_id) + + +def get_ids_from_url( + url: str, + get_id_property_name: GetIdPropertyNameType, +) -> list[str]: + path: str = run_keyword("get_parameterized_path_from_url", url) + request_data: RequestData = run_keyword("get_request_data", path, "get") + response = run_keyword( + "authorized_request", + url, + "get", + request_data.get_required_params(), + request_data.get_required_headers(), + ) + response.raise_for_status() + response_data: dict[str, Any] | list[dict[str, Any]] = response.json() + + # determine the property name to use + mapping = get_id_property_name(path=path) + if isinstance(mapping, str): + id_property = mapping + else: + id_property, _ = mapping + + if isinstance(response_data, list): + valid_ids: list[str] = [item[id_property] for item in response_data] + return valid_ids + # if the response is an object (dict), check if it's hal+json + if embedded := response_data.get("_embedded"): + # there should be 1 item in the dict that has a value that's a list + for value in embedded.values(): + if isinstance(value, list): + valid_ids = [item[id_property] for item in value] + return valid_ids + if (valid_id := response_data.get(id_property)) is not None: + return [valid_id] + valid_ids = [item[id_property] for item in response_data["items"]] + return valid_ids diff --git a/src/OpenApiLibCore/path_invalidation.py b/src/OpenApiLibCore/path_invalidation.py new file mode 100644 index 0000000..29b2283 --- /dev/null +++ b/src/OpenApiLibCore/path_invalidation.py @@ -0,0 +1,44 @@ +"""Module holding functions related to invalidation of paths and urls.""" + +from random import choice +from uuid import uuid4 + +from robot.libraries.BuiltIn import BuiltIn + +from OpenApiLibCore.dto_base import PathPropertiesConstraint +from OpenApiLibCore.protocols import GetDtoClassType + +run_keyword = BuiltIn().run_keyword + + +def get_invalidated_url( + valid_url: str, + path: str, + base_url: str, + get_dto_class: GetDtoClassType, + expected_status_code: int, +) -> str: + dto_class = get_dto_class(path=path, method="get") + relations = dto_class.get_relations() + paths = [ + p.invalid_value + for p in relations + if isinstance(p, PathPropertiesConstraint) + and p.invalid_value_error_code == expected_status_code + ] + if paths: + url = f"{base_url}{choice(paths)}" + return url + parameterized_path: str = run_keyword("get_parameterized_path_from_url", valid_url) + parameterized_url = base_url + parameterized_path + valid_url_parts = list(reversed(valid_url.split("/"))) + parameterized_parts = reversed(parameterized_url.split("/")) + for index, (parameterized_part, _) in enumerate( + zip(parameterized_parts, valid_url_parts) + ): + if parameterized_part.startswith("{") and parameterized_part.endswith("}"): + valid_url_parts[index] = uuid4().hex + valid_url_parts.reverse() + invalid_url = "/".join(valid_url_parts) + return invalid_url + raise ValueError(f"{parameterized_path} could not be invalidated.") diff --git a/src/OpenApiLibCore/protocols.py b/src/OpenApiLibCore/protocols.py new file mode 100644 index 0000000..5ddeb93 --- /dev/null +++ b/src/OpenApiLibCore/protocols.py @@ -0,0 +1,30 @@ +"""A module holding Protcols.""" + +from typing import Callable, Protocol, Type + +from openapi_core.contrib.requests import ( + RequestsOpenAPIRequest, + RequestsOpenAPIResponse, +) + +from OpenApiLibCore.dto_base import Dto + + +class ResponseValidatorType(Protocol): + def __call__( + self, request: RequestsOpenAPIRequest, response: RequestsOpenAPIResponse + ) -> None: ... + + +class GetDtoClassType(Protocol): + def __init__(self, mappings_module_name: str) -> None: ... + + def __call__(self, path: str, method: str) -> Type[Dto]: ... + + +class GetIdPropertyNameType(Protocol): + def __init__(self, mappings_module_name: str) -> None: ... + + def __call__( + self, path: str + ) -> tuple[str, Callable[[str], str] | Callable[[int], int]]: ... diff --git a/src/OpenApiLibCore/request_data.py b/src/OpenApiLibCore/request_data.py new file mode 100644 index 0000000..5c9e03f --- /dev/null +++ b/src/OpenApiLibCore/request_data.py @@ -0,0 +1,258 @@ +"""Module holding the classes used to manage request data.""" + +from copy import deepcopy +from dataclasses import dataclass, field +from functools import cached_property +from random import sample +from typing import Any + +from OpenApiLibCore.dto_base import ( + Dto, + resolve_schema, +) +from OpenApiLibCore.dto_utils import DefaultDto + + +@dataclass +class RequestValues: + """Helper class to hold parameter values needed to make a request.""" + + url: str + method: str + params: dict[str, Any] = field(default_factory=dict) + headers: dict[str, str] = field(default_factory=dict) + json_data: dict[str, Any] = field(default_factory=dict) + + +@dataclass +class RequestData: + """Helper class to manage parameters used when making requests.""" + + dto: Dto | DefaultDto = field(default_factory=DefaultDto) + dto_schema: dict[str, Any] = field(default_factory=dict) + parameters: list[dict[str, Any]] = field(default_factory=list) + params: dict[str, Any] = field(default_factory=dict) + headers: dict[str, Any] = field(default_factory=dict) + has_body: bool = True + + def __post_init__(self) -> None: + # prevent modification by reference + self.dto_schema = deepcopy(self.dto_schema) + self.parameters = deepcopy(self.parameters) + self.params = deepcopy(self.params) + self.headers = deepcopy(self.headers) + + @property + def has_optional_properties(self) -> bool: + """Whether or not the dto data (json data) contains optional properties.""" + + def is_required_property(property_name: str) -> bool: + return property_name in self.dto_schema.get("required", []) + + properties = (self.dto.as_dict()).keys() + return not all(map(is_required_property, properties)) + + @property + def has_optional_params(self) -> bool: + """Whether or not any of the query parameters are optional.""" + + def is_optional_param(query_param: str) -> bool: + optional_params = [ + p.get("name") + for p in self.parameters + if p.get("in") == "query" and not p.get("required") + ] + return query_param in optional_params + + return any(map(is_optional_param, self.params)) + + @cached_property + def params_that_can_be_invalidated(self) -> set[str]: + """ + The query parameters that can be invalidated by violating data + restrictions, data type or by not providing them in a request. + """ + result = set() + params = [h for h in self.parameters if h.get("in") == "query"] + for param in params: + # required params can be omitted to invalidate a request + if param["required"]: + result.add(param["name"]) + continue + + schema = resolve_schema(param["schema"]) + if schema.get("type", None): + param_types = [schema] + else: + param_types = schema["types"] + for param_type in param_types: + # any basic non-string type except "null" can be invalidated by + # replacing it with a string + if param_type["type"] not in ["string", "array", "object", "null"]: + result.add(param["name"]) + continue + # enums, strings and arrays with boundaries can be invalidated + if set(param_type.keys()).intersection( + { + "enum", + "minLength", + "maxLength", + "minItems", + "maxItems", + } + ): + result.add(param["name"]) + continue + # an array of basic non-string type can be invalidated by replacing the + # items in the array with strings + if param_type["type"] == "array" and param_type["items"][ + "type" + ] not in [ + "string", + "array", + "object", + "null", + ]: + result.add(param["name"]) + return result + + @property + def has_optional_headers(self) -> bool: + """Whether or not any of the headers are optional.""" + + def is_optional_header(header: str) -> bool: + optional_headers = [ + p.get("name") + for p in self.parameters + if p.get("in") == "header" and not p.get("required") + ] + return header in optional_headers + + return any(map(is_optional_header, self.headers)) + + @cached_property + def headers_that_can_be_invalidated(self) -> set[str]: + """ + The header parameters that can be invalidated by violating data + restrictions or by not providing them in a request. + """ + result = set() + headers = [h for h in self.parameters if h.get("in") == "header"] + for header in headers: + # required headers can be omitted to invalidate a request + if header["required"]: + result.add(header["name"]) + continue + + schema = resolve_schema(header["schema"]) + if schema.get("type", None): + header_types = [schema] + else: + header_types = schema["types"] + for header_type in header_types: + # any basic non-string type except "null" can be invalidated by + # replacing it with a string + if header_type["type"] not in ["string", "array", "object", "null"]: + result.add(header["name"]) + continue + # enums, strings and arrays with boundaries can be invalidated + if set(header_type.keys()).intersection( + { + "enum", + "minLength", + "maxLength", + "minItems", + "maxItems", + } + ): + result.add(header["name"]) + continue + # an array of basic non-string type can be invalidated by replacing the + # items in the array with strings + if header_type["type"] == "array" and header_type["items"][ + "type" + ] not in [ + "string", + "array", + "object", + "null", + ]: + result.add(header["name"]) + return result + + def get_required_properties_dict(self) -> dict[str, Any]: + """Get the json-compatible dto data containing only the required properties.""" + relations = self.dto.get_relations() + mandatory_properties = [ + relation.property_name + for relation in relations + if getattr(relation, "treat_as_mandatory", False) + ] + required_properties: list[str] = self.dto_schema.get("required", []) + required_properties.extend(mandatory_properties) + + required_properties_dict: dict[str, Any] = {} + for key, value in (self.dto.as_dict()).items(): + if key in required_properties: + required_properties_dict[key] = value + return required_properties_dict + + def get_minimal_body_dict(self) -> dict[str, Any]: + required_properties_dict = self.get_required_properties_dict() + + min_properties = self.dto_schema.get("minProperties", 0) + number_of_optional_properties_to_add = min_properties - len( + required_properties_dict + ) + + if number_of_optional_properties_to_add < 1: + return required_properties_dict + + optional_properties_dict = { + k: v + for k, v in self.dto.as_dict().items() + if k not in required_properties_dict + } + optional_properties_to_keep = sample( + sorted(optional_properties_dict), number_of_optional_properties_to_add + ) + optional_properties_dict = { + k: v + for k, v in optional_properties_dict.items() + if k in optional_properties_to_keep + } + + return {**required_properties_dict, **optional_properties_dict} + + def get_required_params(self) -> dict[str, str]: + """Get the params dict containing only the required query parameters.""" + return { + k: v for k, v in self.params.items() if k in self.required_parameter_names + } + + def get_required_headers(self) -> dict[str, str]: + """Get the headers dict containing only the required headers.""" + return { + k: v for k, v in self.headers.items() if k in self.required_parameter_names + } + + @property + def required_parameter_names(self) -> list[str]: + """ + The names of the mandatory parameters, including the parameters configured to be + treated as mandatory using a PropertyValueConstraint. + """ + relations = self.dto.get_parameter_relations() + mandatory_property_names = [ + relation.property_name + for relation in relations + if getattr(relation, "treat_as_mandatory", False) + ] + parameter_names = [p["name"] for p in self.parameters] + mandatory_parameters = [ + p for p in mandatory_property_names if p in parameter_names + ] + + required_parameters = [p["name"] for p in self.parameters if p.get("required")] + required_parameters.extend(mandatory_parameters) + return required_parameters diff --git a/src/OpenApiLibCore/resource_relations.py b/src/OpenApiLibCore/resource_relations.py new file mode 100644 index 0000000..6c25393 --- /dev/null +++ b/src/OpenApiLibCore/resource_relations.py @@ -0,0 +1,54 @@ +"""Module holding the functions related to relations between resources.""" + +from typing import Any + +from requests import Response +from robot.api import logger +from robot.libraries.BuiltIn import BuiltIn + +import OpenApiLibCore.path_functions as pf +from OpenApiLibCore.dto_base import IdReference +from OpenApiLibCore.request_data import RequestData + +run_keyword = BuiltIn().run_keyword + + +def ensure_in_use( + url: str, + base_url: str, + openapi_spec: dict[str, Any], + resource_relation: IdReference, +) -> None: + resource_id = "" + + path = url.replace(base_url, "") + path_parts = path.split("/") + parameterized_path = pf.get_parametrized_path(path=path, openapi_spec=openapi_spec) + parameterized_path_parts = parameterized_path.split("/") + for part, param_part in zip( + reversed(path_parts), reversed(parameterized_path_parts) + ): + if param_part.endswith("}"): + resource_id = part + break + if not resource_id: + raise ValueError(f"The provided url ({url}) does not contain an id.") + request_data: RequestData = run_keyword( + "get_request_data", resource_relation.post_path, "post" + ) + json_data = request_data.dto.as_dict() + json_data[resource_relation.property_name] = resource_id + post_url: str = run_keyword("get_valid_url", resource_relation.post_path) + response: Response = run_keyword( + "authorized_request", + post_url, + "post", + request_data.params, + request_data.headers, + json_data, + ) + if not response.ok: + logger.debug( + f"POST on {post_url} with json {json_data} failed: {response.json()}" + ) + response.raise_for_status() diff --git a/src/OpenApiLibCore/validation.py b/src/OpenApiLibCore/validation.py new file mode 100644 index 0000000..9150ef8 --- /dev/null +++ b/src/OpenApiLibCore/validation.py @@ -0,0 +1,497 @@ +"""Module holding the functions related to validation of requests and responses.""" + +import json as _json +from enum import Enum +from http import HTTPStatus +from typing import Any, Mapping + +from openapi_core.contrib.requests import ( + RequestsOpenAPIRequest, + RequestsOpenAPIResponse, +) +from openapi_core.exceptions import OpenAPIError +from openapi_core.templating.paths.exceptions import ServerNotFound +from openapi_core.validation.exceptions import ValidationError +from openapi_core.validation.response.exceptions import ResponseValidationError +from requests import Response +from robot.api import logger +from robot.api.exceptions import Failure +from robot.libraries.BuiltIn import BuiltIn + +from OpenApiLibCore.dto_base import resolve_schema +from OpenApiLibCore.protocols import ResponseValidatorType +from OpenApiLibCore.request_data import RequestData, RequestValues + +run_keyword = BuiltIn().run_keyword + + +class ValidationLevel(str, Enum): + """The available levels for the response_validation parameter.""" + + DISABLED = "DISABLED" + INFO = "INFO" + WARN = "WARN" + STRICT = "STRICT" + + +def perform_validated_request( + path: str, + status_code: int, + request_values: RequestValues, + original_data: Mapping[str, Any], +) -> None: + response = run_keyword( + "authorized_request", + request_values.url, + request_values.method, + request_values.params, + request_values.headers, + request_values.json_data, + ) + if response.status_code != status_code: + try: + response_json = response.json() + except Exception as _: # pylint: disable=broad-except + logger.info( + f"Failed to get json content from response. " + f"Response text was: {response.text}" + ) + response_json = {} + if not response.ok: + if description := response_json.get("detail"): + pass + else: + description = response_json.get( + "message", "response contains no message or detail." + ) + logger.error(f"{response.reason}: {description}") + + logger.debug( + f"\nSend: {_json.dumps(request_values.json_data, indent=4, sort_keys=True)}" + f"\nGot: {_json.dumps(response_json, indent=4, sort_keys=True)}" + ) + raise AssertionError( + f"Response status_code {response.status_code} was not {status_code}" + ) + + run_keyword("validate_response", path, response, original_data) + + if request_values.method == "DELETE": + request_data: RequestData = run_keyword("get_request_data", path, "GET") + get_params = request_data.params + get_headers = request_data.headers + get_response = run_keyword( + "authorized_request", request_values.url, "GET", get_params, get_headers + ) + if response.ok: + if get_response.ok: + raise AssertionError( + f"Resource still exists after deletion. Url was {request_values.url}" + ) + # if the path supports GET, 404 is expected, if not 405 is expected + if get_response.status_code not in [404, 405]: + logger.warn( + f"Unexpected response after deleting resource: Status_code " + f"{get_response.status_code} was received after trying to get " + f"{request_values.url} after sucessfully deleting it." + ) + elif not get_response.ok: + raise AssertionError( + f"Resource could not be retrieved after failed deletion. " + f"Url was {request_values.url}, status_code was {get_response.status_code}" + ) + + +def assert_href_to_resource_is_valid( + href: str, origin: str, base_url: str, referenced_resource: dict[str, Any] +) -> None: + url = f"{origin}{href}" + path = url.replace(base_url, "") + request_data: RequestData = run_keyword("get_request_data", path, "GET") + params = request_data.params + headers = request_data.headers + get_response = run_keyword("authorized_request", url, "GET", params, headers) + assert get_response.json() == referenced_resource, ( + f"{get_response.json()} not equal to original {referenced_resource}" + ) + + +def validate_response( + path: str, + response: Response, + response_validator: ResponseValidatorType, + server_validation_warning_logged: bool, + disable_server_validation: bool, + invalid_property_default_response: int, + response_validation: str, + openapi_spec: dict[str, Any], + original_data: Mapping[str, Any], +) -> None: + if response.status_code == int(HTTPStatus.NO_CONTENT): + assert not response.content + return None + + try: + _validate_response( + response=response, + response_validator=response_validator, + server_validation_warning_logged=server_validation_warning_logged, + disable_server_validation=disable_server_validation, + invalid_property_default_response=invalid_property_default_response, + response_validation=response_validation, + ) + except OpenAPIError as exception: + raise Failure( + f"Response did not pass schema validation: {exception}" + ) from exception + + request_method = response.request.method + if request_method is None: + logger.warn( + f"Could not validate response for path {path}; no method found " + f"on the request property of the provided response." + ) + return None + + response_spec = _get_response_spec( + path=path, + method=request_method, + status_code=response.status_code, + openapi_spec=openapi_spec, + ) + + content_type_from_response = response.headers.get("Content-Type", "unknown") + mime_type_from_response, _, _ = content_type_from_response.partition(";") + + if not response_spec.get("content"): + logger.warn( + "The response cannot be validated: 'content' not specified in the OAS." + ) + return None + + # multiple content types can be specified in the OAS + content_types = list(response_spec["content"].keys()) + supported_types = [ + ct for ct in content_types if ct.partition(";")[0].endswith("json") + ] + if not supported_types: + raise NotImplementedError( + f"The content_types '{content_types}' are not supported. " + f"Only json types are currently supported." + ) + content_type = supported_types[0] + mime_type = content_type.partition(";")[0] + + if mime_type != mime_type_from_response: + raise ValueError( + f"Content-Type '{content_type_from_response}' of the response " + f"does not match '{mime_type}' as specified in the OpenAPI document." + ) + + json_response = response.json() + response_schema = resolve_schema(response_spec["content"][content_type]["schema"]) + + response_types = response_schema.get("types") + if response_types: + # In case of oneOf / anyOf there can be multiple possible response types + # which makes generic validation too complex + return None + response_type = response_schema.get("type", "undefined") + if response_type not in ["object", "array"]: + _validate_value_type(value=json_response, expected_type=response_type) + return None + + if list_item_schema := response_schema.get("items"): + if not isinstance(json_response, list): + raise AssertionError( + f"Response schema violation: the schema specifies an array as " + f"response type but the response was of type {type(json_response)}." + ) + type_of_list_items = list_item_schema.get("type") + if type_of_list_items == "object": + for resource in json_response: + run_keyword("validate_resource_properties", resource, list_item_schema) + else: + for item in json_response: + _validate_value_type(value=item, expected_type=type_of_list_items) + # no further validation; value validation of individual resources should + # be performed on the path for the specific resources + return None + + run_keyword("validate_resource_properties", json_response, response_schema) + # ensure the href is valid if present in the response + if href := json_response.get("href"): + run_keyword("assert_href_to_resource_is_valid", href, json_response) + # every property that was sucessfully send and that is in the response + # schema must have the value that was send + if response.ok and response.request.method in ["POST", "PUT", "PATCH"]: + run_keyword("validate_send_response", response, original_data) + return None + + +def validate_resource_properties( + resource: dict[str, Any], schema: dict[str, Any] +) -> None: + schema_properties = schema.get("properties", {}) + property_names_from_schema = set(schema_properties.keys()) + property_names_in_resource = set(resource.keys()) + + if property_names_from_schema != property_names_in_resource: + # The additionalProperties property determines whether properties with + # unspecified names are allowed. This property can be boolean or an object + # (dict) that specifies the type of any additional properties. + additional_properties = schema.get("additionalProperties", True) + if isinstance(additional_properties, bool): + allow_additional_properties = additional_properties + allowed_additional_properties_type = None + else: + allow_additional_properties = True + allowed_additional_properties_type = additional_properties["type"] + + extra_property_names = property_names_in_resource.difference( + property_names_from_schema + ) + if allow_additional_properties: + # If a type is defined for extra properties, validate them + if allowed_additional_properties_type: + extra_properties = { + key: value + for key, value in resource.items() + if key in extra_property_names + } + _validate_type_of_extra_properties( + extra_properties=extra_properties, + expected_type=allowed_additional_properties_type, + ) + # If allowed, validation should not fail on extra properties + extra_property_names = set() + + required_properties = set(schema.get("required", [])) + missing_properties = required_properties.difference(property_names_in_resource) + + if extra_property_names or missing_properties: + extra = ( + f"\n\tExtra properties in response: {extra_property_names}" + if extra_property_names + else "" + ) + missing = ( + f"\n\tRequired properties missing in response: {missing_properties}" + if missing_properties + else "" + ) + raise AssertionError( + f"Response schema violation: the response contains properties that are " + f"not specified in the schema or does not contain properties that are " + f"required according to the schema." + f"\n\tReceived in the response: {property_names_in_resource}" + f"\n\tDefined in the schema: {property_names_from_schema}" + f"{extra}{missing}" + ) + + +def validate_send_response( + response: Response, + original_data: Mapping[str, Any], +) -> None: + def validate_list_response(send_list: list[Any], received_list: list[Any]) -> None: + for item in send_list: + if item not in received_list: + raise AssertionError( + f"Received value '{received_list}' does " + f"not contain '{item}' in the {response.request.method} request." + f"\nSend: {_json.dumps(send_json, indent=4, sort_keys=True)}" + f"\nGot: {_json.dumps(response_data, indent=4, sort_keys=True)}" + ) + + def validate_dict_response( + send_dict: dict[str, Any], received_dict: dict[str, Any] + ) -> None: + for send_property_name, send_property_value in send_dict.items(): + # sometimes, a property in the request is not in the response, e.g. a password + if send_property_name not in received_dict.keys(): + continue + if send_property_value is not None: + # if a None value is send, the target property should be cleared or + # reverted to the default value (which cannot be specified in the + # openapi document) + received_value = received_dict[send_property_name] + # In case of lists / arrays, the send values are often appended to + # existing data + if isinstance(received_value, list): + validate_list_response( + send_list=send_property_value, received_list=received_value + ) + continue + + # when dealing with objects, we'll need to iterate the properties + if isinstance(received_value, dict): + validate_dict_response( + send_dict=send_property_value, received_dict=received_value + ) + continue + + assert received_value == send_property_value, ( + f"Received value for {send_property_name} '{received_value}' does not " + f"match '{send_property_value}' in the {response.request.method} request." + f"\nSend: {_json.dumps(send_json, indent=4, sort_keys=True)}" + f"\nGot: {_json.dumps(response_data, indent=4, sort_keys=True)}" + ) + + if response.request.body is None: + logger.warn( + "Could not validate send response; the body of the request property " + "on the provided response was None." + ) + return None + if isinstance(response.request.body, bytes): + send_json = _json.loads(response.request.body.decode("UTF-8")) + else: + send_json = _json.loads(response.request.body) + + response_data = response.json() + # POST on /resource_type/{id}/array_item/ will return the updated {id} resource + # instead of a newly created resource. In this case, the send_json must be + # in the array of the 'array_item' property on {id} + send_path: str = response.request.path_url + response_path = response_data.get("href", None) + if response_path and send_path not in response_path: + property_to_check = send_path.replace(response_path, "")[1:] + if response_data.get(property_to_check) and isinstance( + response_data[property_to_check], list + ): + item_list: list[dict[str, Any]] = response_data[property_to_check] + # Use the (mandatory) id to get the POSTed resource from the list + [response_data] = [ + item for item in item_list if item["id"] == send_json["id"] + ] + + # incoming arguments are dictionaries, so they can be validated as such + validate_dict_response(send_dict=send_json, received_dict=response_data) + + # In case of PATCH requests, ensure that only send properties have changed + if original_data: + for send_property_name, send_value in original_data.items(): + if send_property_name not in send_json.keys(): + assert send_value == response_data[send_property_name], ( + f"Received value for {send_property_name} '{response_data[send_property_name]}' does not " + f"match '{send_value}' in the pre-patch data" + f"\nPre-patch: {_json.dumps(original_data, indent=4, sort_keys=True)}" + f"\nGot: {_json.dumps(response_data, indent=4, sort_keys=True)}" + ) + return None + + +def validate_response_using_validator( + request: RequestsOpenAPIRequest, + response: RequestsOpenAPIResponse, + response_validator: ResponseValidatorType, +) -> None: + response_validator(request=request, response=response) + + +def _validate_response( + response: Response, + response_validator: ResponseValidatorType, + server_validation_warning_logged: bool, + disable_server_validation: bool, + invalid_property_default_response: int, + response_validation: str, +) -> None: + try: + validate_response_using_validator( + RequestsOpenAPIRequest(response.request), + RequestsOpenAPIResponse(response), + response_validator=response_validator, + ) + except (ResponseValidationError, ServerNotFound) as exception: + error: BaseException | None = exception.__cause__ + validation_errors: list[ValidationError] = getattr(error, "schema_errors", []) + if validation_errors: + error_message = "\n".join( + [ + f"{list(getattr(error, 'schema_path', ''))}: {getattr(error, 'message', '')}" + for error in validation_errors + ] + ) + else: + error_message = str(exception) + + if isinstance(exception, ServerNotFound): + if not server_validation_warning_logged: + logger.warn( + f"ServerNotFound was raised during response validation. " + f"Due to this, no full response validation will be performed." + f"\nThe original error was: {error_message}" + ) + server_validation_warning_logged = True + if disable_server_validation: + return + + if response.status_code == invalid_property_default_response: + logger.debug(error_message) + return + if response_validation == ValidationLevel.STRICT: + logger.error(error_message) + raise exception + if response_validation == ValidationLevel.WARN: + logger.warn(error_message) + elif response_validation == ValidationLevel.INFO: + logger.info(error_message) + + +def _validate_value_type(value: Any, expected_type: str) -> None: + type_mapping = { + "string": str, + "number": float, + "integer": int, + "boolean": bool, + "array": list, + "object": dict, + } + python_type = type_mapping.get(expected_type, None) + if python_type is None: + raise AssertionError(f"Validation of type '{expected_type}' is not supported.") + if not isinstance(value, python_type): + raise AssertionError(f"{value} is not of type {expected_type}") + + +def _validate_type_of_extra_properties( + extra_properties: dict[str, Any], expected_type: str +) -> None: + type_mapping = { + "string": str, + "number": float, + "integer": int, + "boolean": bool, + "array": list, + "object": dict, + } + + python_type = type_mapping.get(expected_type, None) + if python_type is None: + logger.warn( + f"Additonal properties were not validated: " + f"type '{expected_type}' is not supported." + ) + return + + invalid_extra_properties = { + key: value + for key, value in extra_properties.items() + if not isinstance(value, python_type) + } + if invalid_extra_properties: + raise AssertionError( + f"Response contains invalid additionalProperties: " + f"{invalid_extra_properties} are not of type {expected_type}." + ) + + +def _get_response_spec( + path: str, method: str, status_code: int, openapi_spec: dict[str, Any] +) -> dict[str, Any]: + method = method.lower() + status = str(status_code) + spec: dict[str, Any] = {**openapi_spec}["paths"][path][method]["responses"][status] + return spec diff --git a/src/OpenApiLibCore/value_utils.py b/src/OpenApiLibCore/value_utils.py index fdfeeeb..a2d818a 100644 --- a/src/OpenApiLibCore/value_utils.py +++ b/src/OpenApiLibCore/value_utils.py @@ -1,20 +1,24 @@ # mypy: disable-error-code=no-any-return """Utility module with functions to handle OpenAPI value types and restrictions.""" + import base64 import datetime from copy import deepcopy -from logging import getLogger from random import choice, randint, uniform -from typing import Any, Callable +from typing import Any, Callable, Iterable, Mapping, cast, overload import faker import rstr +from robot.api import logger + +from OpenApiLibCore.annotations import JSON -JSON = dict[str, "JSON"] | list["JSON"] | str | int | float | bool | None -logger = getLogger(__name__) +class Ignore: + """Helper class to flag properties to be ignored in data generation.""" -IGNORE = object() + +IGNORE = Ignore() class LocalizedFaker: @@ -107,7 +111,7 @@ def json_type_name_of_python_type(python_type: Any) -> str: raise ValueError(f"No json type mapping for Python type {python_type} available.") -def python_type_by_json_type_name(type_name: str) -> Any: +def python_type_by_json_type_name(type_name: str) -> type: """Return the Python type based on the JSON type name.""" if type_name == "string": return str @@ -126,7 +130,7 @@ def python_type_by_json_type_name(type_name: str) -> Any: raise ValueError(f"No Python type mapping for JSON type '{type_name}' available.") -def get_valid_value(value_schema: dict[str, Any]) -> Any: +def get_valid_value(value_schema: Mapping[str, Any]) -> JSON: """Return a random value that is valid under the provided value_schema.""" value_schema = deepcopy(value_schema) @@ -156,10 +160,10 @@ def get_valid_value(value_schema: dict[str, Any]) -> Any: def get_invalid_value( - value_schema: dict[str, Any], + value_schema: Mapping[str, Any], current_value: Any, - values_from_constraint: list[Any] | None = None, # FIXME: default empty list? -) -> Any: + values_from_constraint: Iterable[Any] = tuple(), +) -> JSON | Ignore: """Return a random value that violates the provided value_schema.""" value_schema = deepcopy(value_schema) @@ -180,7 +184,7 @@ def get_invalid_value( values_from_constraint and ( invalid_value := get_invalid_value_from_constraint( - values_from_constraint=values_from_constraint, + values_from_constraint=list(values_from_constraint), value_type=value_type, ) ) @@ -212,7 +216,7 @@ def get_invalid_value( return FAKE.uuid() -def get_random_int(value_schema: dict[str, Any]) -> int: +def get_random_int(value_schema: Mapping[str, Any]) -> int: """Generate a random int within the min/max range of the schema, if specified.""" # Use int32 integers if "format" does not specify int64 property_format = value_schema.get("format", "int32") @@ -241,7 +245,7 @@ def get_random_int(value_schema: dict[str, Any]) -> int: return randint(minimum, maximum) -def get_random_float(value_schema: dict[str, Any]) -> float: +def get_random_float(value_schema: Mapping[str, Any]) -> float: """Generate a random float within the min/max range of the schema, if specified.""" # Python floats are already double precision, so no check for "format" minimum = value_schema.get("minimum") @@ -289,7 +293,7 @@ def get_random_float(value_schema: dict[str, Any]) -> float: return uniform(minimum, maximum) -def get_random_string(value_schema: dict[str, Any]) -> bytes | str: +def get_random_string(value_schema: Mapping[str, Any]) -> bytes | str: """Generate a random string within the min/max length in the schema, if specified.""" # if a pattern is provided, format and min/max length can be ignored if pattern := value_schema.get("pattern"): @@ -324,7 +328,7 @@ def fake_string(string_format: str) -> str: return value -def get_random_array(value_schema: dict[str, Any]) -> list[Any]: +def get_random_array(value_schema: Mapping[str, Any]) -> list[JSON]: """Generate a list with random elements as specified by the schema.""" minimum = value_schema.get("minItems", 0) maximum = value_schema.get("maxItems", 1) @@ -338,8 +342,8 @@ def get_random_array(value_schema: dict[str, Any]) -> list[Any]: def get_invalid_value_from_constraint( - values_from_constraint: list[Any], value_type: str -) -> Any: + values_from_constraint: list[JSON | Ignore], value_type: str +) -> JSON | Ignore: """ Return a value of the same type as the values in the values_from_constraints that is not in the values_from_constraints, if possible. Otherwise returns None. @@ -347,7 +351,7 @@ def get_invalid_value_from_constraint( # if IGNORE is in the values_from_constraints, the parameter needs to be # ignored for an OK response so leaving the value at it's original value # should result in the specified error response - if IGNORE in values_from_constraint: + if any(map(lambda x: isinstance(x, Ignore), values_from_constraint)): return IGNORE # if the value is forced True or False, return the opposite to invalidate if len(values_from_constraint) == 1 and value_type == "boolean": @@ -362,46 +366,75 @@ def get_invalid_value_from_constraint( values_from_constraint = deepcopy(values_from_constraint) # for objects, keep the keys intact but update the values if value_type == "object": - valid_object = values_from_constraint.pop() - invalid_object = {} + valid_object = cast(dict[str, JSON], values_from_constraint.pop()) + invalid_object: dict[str, JSON] = {} for key, value in valid_object.items(): python_type_of_value = type(value) json_type_of_value = json_type_name_of_python_type(python_type_of_value) - invalid_object[key] = get_invalid_value_from_constraint( - values_from_constraint=[value], - value_type=json_type_of_value, + invalid_value = cast( + JSON, + get_invalid_value_from_constraint( + values_from_constraint=[value], + value_type=json_type_of_value, + ), ) + invalid_object[key] = invalid_value return invalid_object # for arrays, update each value in the array to a value of the same type if value_type == "array": - valid_array = values_from_constraint.pop() - invalid_array = [] + valid_array = cast(list[JSON], values_from_constraint.pop()) + invalid_array: list[JSON] = [] for value in valid_array: python_type_of_value = type(value) json_type_of_value = json_type_name_of_python_type(python_type_of_value) - invalid_value = get_invalid_value_from_constraint( - values_from_constraint=[value], - value_type=json_type_of_value, + invalid_value = cast( + JSON, + get_invalid_value_from_constraint( + values_from_constraint=[value], + value_type=json_type_of_value, + ), ) invalid_array.append(invalid_value) return invalid_array + if value_type in ["integer", "number"]: + int_or_number_list = cast(list[int | float], values_from_constraint) + return get_invalid_int_or_number(values_from_constraint=int_or_number_list) + + str_or_bytes_list = cast(list[str] | list[bytes], values_from_constraint) + invalid_value = get_invalid_str_or_bytes(values_from_constraint=str_or_bytes_list) + # None for empty string + return invalid_value if invalid_value else None + + +def get_invalid_int_or_number(values_from_constraint: list[int | float]) -> int | float: + invalid_values = 2 * values_from_constraint + invalid_value = invalid_values.pop() + for value in invalid_values: + invalid_value = abs(invalid_value) + abs(value) + if not invalid_value: + invalid_value += 1 + return invalid_value + + +@overload +def get_invalid_str_or_bytes(values_from_constraint: list[str]) -> str: ... + + +@overload +def get_invalid_str_or_bytes(values_from_constraint: list[bytes]) -> bytes: ... + + +def get_invalid_str_or_bytes(values_from_constraint: list[Any]) -> Any: invalid_values = 2 * values_from_constraint invalid_value = invalid_values.pop() - if value_type in ["integer", "number"]: - for value in invalid_values: - invalid_value = abs(invalid_value) + abs(value) - if not invalid_value: - invalid_value += 1 - return invalid_value for value in invalid_values: invalid_value = invalid_value + value - # None for empty string - return invalid_value if invalid_value else None + return invalid_value -def get_invalid_value_from_enum(values: list[Any], value_type: str) -> Any: +def get_invalid_value_from_enum(values: list[Any], value_type: str) -> JSON: """Return a value not in the enum by combining the enum values.""" if value_type == "string": invalid_value: Any = "" @@ -413,7 +446,7 @@ def get_invalid_value_from_enum(values: list[Any], value_type: str) -> Any: # force creation of a new object since we will be modifying it invalid_value = {**values[0]} else: - logger.warning(f"Cannot invalidate enum value with type {value_type}") + logger.warn(f"Cannot invalidate enum value with type {value_type}") return None for value in values: # repeat each addition to ensure single-item enums are invalidated @@ -435,7 +468,9 @@ def get_invalid_value_from_enum(values: list[Any], value_type: str) -> Any: return invalid_value -def get_value_out_of_bounds(value_schema: dict[str, Any], current_value: Any) -> Any: +def get_value_out_of_bounds( + value_schema: Mapping[str, Any], current_value: JSON +) -> JSON: """ Return a value just outside the value or length range if specified in the provided schema, otherwise None is returned. @@ -458,22 +493,24 @@ def get_value_out_of_bounds(value_schema: dict[str, Any], current_value: Any) -> if (exclusive_maximum := value_schema.get("exclusiveMaximum")) is not None: return exclusive_maximum if value_type == "array": + current_list = cast(list[JSON], current_value) if minimum := value_schema.get("minItems", 0) > 0: - return current_value[0 : minimum - 1] + return current_list[0 : minimum - 1] if (maximum := value_schema.get("maxItems")) is not None: - invalid_value = current_value if current_value else ["x"] + invalid_value = current_list if current_list else ["x"] while len(invalid_value) <= maximum: - invalid_value.append(choice(invalid_value)) - return invalid_value + invalid_value.append(choice(invalid_value)) # pyright: ignore[reportArgumentType] + return invalid_value # type: ignore[unused-ignore] if value_type == "string": + current_string = cast(str, current_value) # if there is a minimum length, send 1 character less if minimum := value_schema.get("minLength", 0): - return current_value[0 : minimum - 1] + return current_string[0 : minimum - 1] # if there is a maximum length, send 1 character more if maximum := value_schema.get("maxLength"): - invalid_value = current_value if current_value else "x" + invalid_string_value = current_string if current_string else "x" # add random characters from the current value to prevent adding new characters - while len(invalid_value) <= maximum: - invalid_value += choice(invalid_value) - return invalid_value + while len(invalid_string_value) <= maximum: + invalid_string_value += choice(invalid_string_value) + return invalid_string_value return None diff --git a/src/roboswag/core.py b/src/roboswag/core.py index f47ee52..bec62e1 100644 --- a/src/roboswag/core.py +++ b/src/roboswag/core.py @@ -60,9 +60,9 @@ def send_request( self.logger.log_request(resp) self.logger.log_response(resp) if status is not None: - assert ( - resp.status_code == status - ), f"Expected return status: {status} but received: {resp.status_code}" + assert resp.status_code == status, ( + f"Expected return status: {status} but received: {resp.status_code}" + ) return resp def post(self, *args, **kwargs): diff --git a/tasks.py b/tasks.py index 4c30ae1..1d098bd 100644 --- a/tasks.py +++ b/tasks.py @@ -6,8 +6,6 @@ from invoke.context import Context from invoke.tasks import task -# from OpenApiLibCore import openapi_libcore - ROOT = pathlib.Path(__file__).parent.resolve().as_posix() VERSION = version("robotframework-openapitools") @@ -79,6 +77,11 @@ def tests(context: Context) -> None: def type_check(context: Context) -> None: subprocess.run(f"mypy {ROOT}/src", shell=True, check=False) subprocess.run(f"pyright {ROOT}/src", shell=True, check=False) + subprocess.run( + f"robotcode analyze code {ROOT}/tests/driver {ROOT}/tests/libcore", + shell=True, + check=False, + ) @task diff --git a/tests/driver/suites/load_from_url.robot b/tests/driver/suites/load_from_url.robot index e3dcac8..fc2d42b 100644 --- a/tests/driver/suites/load_from_url.robot +++ b/tests/driver/suites/load_from_url.robot @@ -16,6 +16,7 @@ Test Template Validate Test Endpoint Keyword *** Test Cases *** +# robotcode: ignore[ModelError, VariableNotReplaced] Test Endpoint for ${method} on ${path} where ${status_code} is expected diff --git a/tests/driver/suites/load_json.robot b/tests/driver/suites/load_json.robot index ba1f74a..73181f5 100644 --- a/tests/driver/suites/load_json.robot +++ b/tests/driver/suites/load_json.robot @@ -16,10 +16,11 @@ Test Template Do Nothing *** Test Cases *** +# robotcode: ignore[ModelError, VariableNotReplaced] OpenApiJson test for ${method} on ${path} where ${status_code} is expected *** Keywords *** Do Nothing - [Arguments] ${path} ${method} ${status_code} + [Arguments] ${path} ${method} ${status_code} # robocop: off=unused-argument No Operation diff --git a/tests/driver/suites/load_yaml.robot b/tests/driver/suites/load_yaml.robot index 0d7d133..87409a3 100644 --- a/tests/driver/suites/load_yaml.robot +++ b/tests/driver/suites/load_yaml.robot @@ -16,10 +16,11 @@ Test Template Do Nothing *** Test Cases *** +# robotcode: ignore[ModelError, VariableNotReplaced] OpenApiYaml test for ${method} on ${path} where ${status_code} is expected *** Keywords *** Do Nothing - [Arguments] ${path} ${method} ${status_code} + [Arguments] ${path} ${method} ${status_code} # robocop: off=unused-argument No Operation diff --git a/tests/driver/suites/test_mismatching_schemas.robot b/tests/driver/suites/test_mismatching_schemas.robot index 799fc5e..5f2106c 100644 --- a/tests/driver/suites/test_mismatching_schemas.robot +++ b/tests/driver/suites/test_mismatching_schemas.robot @@ -26,6 +26,7 @@ Test Template Validate Test Endpoint Keyword *** Test Cases *** +# robotcode: ignore[ModelError, VariableNotReplaced] Test Endpoint for ${method} on ${path} where ${status_code} is expected diff --git a/tests/libcore/suites/test_auth_headers_and_token.robot b/tests/libcore/suites/test_auth_headers_and_token.robot index da78cac..ae1cda0 100644 --- a/tests/libcore/suites/test_auth_headers_and_token.robot +++ b/tests/libcore/suites/test_auth_headers_and_token.robot @@ -20,7 +20,7 @@ ${ORIGIN}= http://localhost:8000 *** Test Cases *** Test Authorized Request With Security Token And Extra Headers - ${request_data}= Get Request Data endpoint=/secret_message method=get + ${request_data}= Get Request Data path=/secret_message method=get ${response}= Authorized Request ... url=${ORIGIN}/secret_message method=get headers=${request_data.headers} Should Be Equal As Integers ${response.status_code} 200 @@ -36,7 +36,7 @@ Test Authorized Request With Security Token And Extra Headers Test Set Security Token Set Security Token another secret - ${request_data}= Get Request Data endpoint=/secret_message method=get + ${request_data}= Get Request Data path=/secret_message method=get ${response}= Authorized Request ... url=${ORIGIN}/secret_message method=get headers=${request_data.headers} Should Be Equal As Integers ${response.status_code} 200 @@ -52,7 +52,7 @@ Test Set Security Token Test Set Extra Headers Set Extra Headers {"spam": "bacon"} - ${request_data}= Get Request Data endpoint=/secret_message method=get + ${request_data}= Get Request Data path=/secret_message method=get ${response}= Authorized Request ... url=${ORIGIN}/secret_message method=get headers=${request_data.headers} Should Be Equal As Integers ${response.status_code} 200 @@ -70,7 +70,7 @@ Test Set Extra Headers Test Set Basic Auth Set Basic Auth username=Joe password=Jane - ${request_data}= Get Request Data endpoint=/secret_message method=get + ${request_data}= Get Request Data path=/secret_message method=get ${response}= Authorized Request ... url=${ORIGIN}/secret_message method=get headers=${request_data.headers} Should Be Equal As Integers ${response.status_code} 200 @@ -88,7 +88,7 @@ Test Set Basic Auth Test Set Auth Set Auth auth=${DIGEST_AUTH} - ${request_data}= Get Request Data endpoint=/secret_message method=get + ${request_data}= Get Request Data path=/secret_message method=get ${response}= Authorized Request ... url=${ORIGIN}/secret_message method=get headers=${request_data.headers} Should Be Equal As Integers ${response.status_code} 200 diff --git a/tests/libcore/suites/test_default_id_property_name.robot b/tests/libcore/suites/test_default_id_property_name.robot index 245a396..1f23dda 100644 --- a/tests/libcore/suites/test_default_id_property_name.robot +++ b/tests/libcore/suites/test_default_id_property_name.robot @@ -12,10 +12,10 @@ ${ORIGIN}= http://localhost:8000 *** Test Cases *** -Test Get Valid Id For Endpoint Returns Id For Id Defined In ID_MAPPING - ${id}= Get Valid Id For Endpoint endpoint=/wagegroups method=post +Test Get Valid Id For Path Returns Id For Id Defined In ID_MAPPING + ${id}= Get Valid Id For Path path=/wagegroups Length Should Be ${id} 36 -Test Get Valid Id For Endpoint Raises For Resource With Non-default Id +Test Get Valid Id For Path Raises For Resource With Non-default Id Run Keyword And Expect Error Failed to get a valid id using* - ... Get Valid Id For Endpoint endpoint=/available_employees method=get + ... Get Valid Id For Path path=/available_employees diff --git a/tests/libcore/suites/test_ensure_in_use.robot b/tests/libcore/suites/test_ensure_in_use.robot index a966fdd..aa0c782 100644 --- a/tests/libcore/suites/test_ensure_in_use.robot +++ b/tests/libcore/suites/test_ensure_in_use.robot @@ -14,19 +14,19 @@ ${ORIGIN}= http://localhost:8000 *** Test Cases *** Test Ensure In Use With Single Id In Url - ${url}= Get Valid Url endpoint=/wagegroups/{wagegroup_id} method=get + ${url}= Get Valid Url path=/wagegroups/{wagegroup_id} Ensure In Use url=${url} resource_relation=${ID_REFERENCE} # Test Ensure In Use With Multiple Ids In Url -# ${url}= Get Valid Url endpoint=/wagegroups/{wagegroup_id} method=get +# ${url}= Get Valid Url path=/wagegroups/{wagegroup_id} # Ensure In Use url=${url} resource_relation=${ID_REFERENCE} Test Ensure In Use Raises When No Id In Url - ${url}= Get Valid Url endpoint=/wagegroups method=post + ${url}= Get Valid Url path=/wagegroups Run Keyword And Expect Error ValueError: The provided url* ... Ensure In Use url=${url} resource_relation=${ID_REFERENCE} Test Ensure In Use Raises When Post Fails - ${url}= Get Valid Url endpoint=/wagegroups/{wagegroup_id} method=get + ${url}= Get Valid Url path=/wagegroups/{wagegroup_id} Run Keyword And Expect Error HTTPError: 405 Client Error* ... Ensure In Use url=${url} resource_relation=${INVALID_ID_REFERENCE} diff --git a/tests/libcore/suites/test_faker_locale.robot b/tests/libcore/suites/test_faker_locale.robot index 0392f06..b1c7f5d 100644 --- a/tests/libcore/suites/test_faker_locale.robot +++ b/tests/libcore/suites/test_faker_locale.robot @@ -13,6 +13,6 @@ ${ORIGIN}= http://localhost:8000 *** Test Cases *** Test Get Request Data For Schema With allOf - ${request_data}= Get Request Data endpoint=/hypermedia method=post + ${request_data}= Get Request Data path=/hypermedia method=post # this regex should match all characters in the simplified Chinese character set Should Match Regexp ${request_data.dto.title} ^[\u4E00-\u9FA5]+$ diff --git a/tests/libcore/suites/test_get_ids_for_endpoint.robot b/tests/libcore/suites/test_get_ids_for_endpoint.robot index 8d5212e..127bb86 100644 --- a/tests/libcore/suites/test_get_ids_for_endpoint.robot +++ b/tests/libcore/suites/test_get_ids_for_endpoint.robot @@ -14,14 +14,14 @@ ${ORIGIN}= http://localhost:8000 *** Test Cases *** Test Get Ids From Url That Returns Single Resource - ${url}= Get Valid Url endpoint=/wagegroups/{wagegroup_id} method=post + ${url}= Get Valid Url path=/wagegroups/{wagegroup_id} ${ids}= Get Ids From Url url=${url} Length Should Be item=${ids} length=1 Test Get Ids From Url That Returns List Of Resources # Create an Employee resource so the returned list is not empty - Get Valid Url endpoint=/employees/{employee_id} method=get - ${url}= Get Valid Url endpoint=/employees method=get + Get Valid Url path=/employees/{employee_id} + ${url}= Get Valid Url path=/employees ${ids}= Get Ids From Url url=${url} ${number_of_ids}= Get Length item=${ids} Should Be True $number_of_ids > 0 diff --git a/tests/libcore/suites/test_get_invalid_json_data.robot b/tests/libcore/suites/test_get_invalid_json_data.robot index 12a89f0..4520175 100644 --- a/tests/libcore/suites/test_get_invalid_json_data.robot +++ b/tests/libcore/suites/test_get_invalid_json_data.robot @@ -14,7 +14,7 @@ ${ORIGIN}= http://localhost:8000 *** Test Cases *** Test Get Invalid Json Data Raises If Data Cannot Be Invalidated - ${request_data}= Get Request Data endpoint=/ method=get + ${request_data}= Get Request Data path=/ method=get Run Keyword And Expect Error ValueError: Failed to invalidate: no data_relations and empty schema. ... Get Invalid Json Data ... url=none @@ -22,7 +22,7 @@ Test Get Invalid Json Data Raises If Data Cannot Be Invalidated ... status_code=999 ... request_data=${request_data} - ${request_data}= Get Request Data endpoint=/employees method=post + ${request_data}= Get Request Data path=/employees method=post Run Keyword And Expect Error ValueError: No property can be invalidated to cause status_code 999 ... Get Invalid Json Data ... url=none @@ -31,7 +31,7 @@ Test Get Invalid Json Data Raises If Data Cannot Be Invalidated ... request_data=${request_data} Test Get Invalid Json Data Based On Schema - ${request_data}= Get Request Data endpoint=/events/ method=post + ${request_data}= Get Request Data path=/events/ method=post Should Be Empty ${request_data.dto.get_relations_for_error_code(422)} ${invalid_json}= Get Invalid Json Data ... url=none @@ -44,7 +44,7 @@ Test Get Invalid Json Data Based On Schema Should Be Equal As Integers ${response.status_code} 422 Test Get Invalid Json Data For UniquePropertyValueConstraint - ${request_data}= Get Request Data endpoint=/wagegroups method=post + ${request_data}= Get Request Data path=/wagegroups method=post ${invalid_json}= Get Invalid Json Data ... url=${ORIGIN}/wagegroups ... method=post @@ -56,8 +56,8 @@ Test Get Invalid Json Data For UniquePropertyValueConstraint Should Be Equal As Integers ${response.status_code} 418 Test Get Invalid Json Data For IdReference - ${url}= Get Valid Url endpoint=/wagegroups/{wagegroup_id} method=delete - ${request_data}= Get Request Data endpoint=/wagegroups/{wagegroup_id} method=delete + ${url}= Get Valid Url path=/wagegroups/{wagegroup_id} + ${request_data}= Get Request Data path=/wagegroups/{wagegroup_id} method=delete ${invalid_json}= Get Invalid Json Data ... url=${url} ... method=delete @@ -69,8 +69,8 @@ Test Get Invalid Json Data For IdReference Should Be Equal As Integers ${response.status_code} 406 Test Get Invalid Json Data For IdDependency - ${url}= Get Valid Url endpoint=/employees method=post - ${request_data}= Get Request Data endpoint=/employees method=post + ${url}= Get Valid Url path=/employees + ${request_data}= Get Request Data path=/employees method=post ${invalid_json}= Get Invalid Json Data ... url=${url} ... method=post @@ -82,7 +82,7 @@ Test Get Invalid Json Data For IdDependency Should Be Equal As Integers ${response.status_code} 451 Test Get Invalid Json Data For Dto With Other Relations - ${request_data}= Get Request Data endpoint=/employees method=post + ${request_data}= Get Request Data path=/employees method=post ${invalid_json}= Get Invalid Json Data ... url=${ORIGIN}/employees ... method=post @@ -94,8 +94,8 @@ Test Get Invalid Json Data For Dto With Other Relations Should Be Equal As Integers ${response.status_code} 403 Test Get Invalid Json Data Can Invalidate Missing Optional Parameters - ${url}= Get Valid Url endpoint=/employees/{emplyee_id} method=patch - ${request_data}= Get Request Data endpoint=/employees/{emplyee_id} method=patch + ${url}= Get Valid Url path=/employees/{emplyee_id} + ${request_data}= Get Request Data path=/employees/{emplyee_id} method=patch Evaluate ${request_data.dto.__dict__.clear()} is None ${invalid_json}= Get Invalid Json Data ... url=${url} @@ -105,5 +105,5 @@ Test Get Invalid Json Data Can Invalidate Missing Optional Parameters Should Not Be Equal ${invalid_json} ${request_data.dto.as_dict()} ${response}= Authorized Request ... url=${url} method=patch json_data=${invalid_json} - ${expected_status_codes}= Create List ${403} ${422} ${451} + VAR @{expected_status_codes}= ${403} ${422} ${451} Should Contain ${expected_status_codes} ${response.status_code} diff --git a/tests/libcore/suites/test_get_invalidated_parameters.robot b/tests/libcore/suites/test_get_invalidated_parameters.robot index 9d85f25..77f1f30 100644 --- a/tests/libcore/suites/test_get_invalidated_parameters.robot +++ b/tests/libcore/suites/test_get_invalidated_parameters.robot @@ -14,7 +14,7 @@ ${ORIGIN}= http://localhost:8000 *** Test Cases *** Test Get Invalidated Parameters Raises For Empty Parameters List - ${request_data}= Get Request Data endpoint=/secret_message method=get + ${request_data}= Get Request Data path=/secret_message method=get Evaluate ${request_data.parameters.clear()} is None Run Keyword And Expect Error ValueError: No params or headers to invalidate. ... Get Invalidated Parameters @@ -22,7 +22,7 @@ Test Get Invalidated Parameters Raises For Empty Parameters List ... request_data=${request_data} Test Get Invalidated Parameters Raises For Mismatched Parameters List - ${request_data}= Get Request Data endpoint=/secret_message method=get + ${request_data}= Get Request Data path=/secret_message method=get Evaluate ${request_data.parameters.clear()} is None Evaluate ${request_data.parameters.append({"name": "dummy"})} is None Run Keyword And Expect Error ValueError: No parameter can be changed to cause status_code 401. @@ -31,21 +31,21 @@ Test Get Invalidated Parameters Raises For Mismatched Parameters List ... request_data=${request_data} Test Get Invalidated Parameters Raises For Status Code That Cannot Be Invalidated - ${request_data}= Get Request Data endpoint=/secret_message method=get + ${request_data}= Get Request Data path=/secret_message method=get Run Keyword And Expect Error ValueError: No relations to cause status_code 200 found. ... Get Invalidated Parameters ... status_code=200 ... request_data=${request_data} Test Get Invalidated Parameters Raises For Headers That Cannot Be Invalidated - ${request_data}= Get Request Data endpoint=/ method=get + ${request_data}= Get Request Data path=/ method=get Run Keyword And Expect Error ValueError: None of the query parameters and headers can be invalidated. ... Get Invalidated Parameters ... status_code=422 ... request_data=${request_data} Test Get Invalidated Parameters For Invalid Propery Default Response - ${request_data}= Get Request Data endpoint=/secret_message method=get + ${request_data}= Get Request Data path=/secret_message method=get ${invalidated}= Get Invalidated Parameters ... status_code=422 ... request_data=${request_data} @@ -53,14 +53,14 @@ Test Get Invalidated Parameters For Invalid Propery Default Response Length Should Be ${secret_code} 36 Test Get Invalidated Parameters For PropertyValueConstraint - ${request_data}= Get Request Data endpoint=/secret_message method=get + ${request_data}= Get Request Data path=/secret_message method=get ${invalidated}= Get Invalidated Parameters ... status_code=401 ... request_data=${request_data} ${secret_code}= Set Variable ${invalidated[1].get("secret-code")} Should Be True int($secret_code) != 42 - ${request_data}= Get Request Data endpoint=/secret_message method=get + ${request_data}= Get Request Data path=/secret_message method=get ${invalidated}= Get Invalidated Parameters ... status_code=403 ... request_data=${request_data} @@ -68,7 +68,7 @@ Test Get Invalidated Parameters For PropertyValueConstraint Should Not Be Equal ${seal} ${NONE} Test Get Invalidated Parameters Adds Optional Parameter If Not Provided - ${request_data}= Get Request Data endpoint=/secret_message method=get + ${request_data}= Get Request Data path=/secret_message method=get Evaluate ${request_data.headers.clear()} is None ${invalidated}= Get Invalidated Parameters ... status_code=422 @@ -77,7 +77,7 @@ Test Get Invalidated Parameters Adds Optional Parameter If Not Provided Length Should Be ${headers} 1 Test Get Invalidated Parameters Adds Optional Parameter If treat_as_mandatory Is True - ${request_data}= Get Request Data endpoint=/energy_label/{zipcode}/{home_number} method=get + ${request_data}= Get Request Data path=/energy_label/{zipcode}/{home_number} method=get Evaluate ${request_data.params.clear()} is None ${invalidated}= Get Invalidated Parameters ... status_code=422 diff --git a/tests/libcore/suites/test_get_invalidated_url.robot b/tests/libcore/suites/test_get_invalidated_url.robot index 5e59d4f..6860166 100644 --- a/tests/libcore/suites/test_get_invalidated_url.robot +++ b/tests/libcore/suites/test_get_invalidated_url.robot @@ -22,30 +22,29 @@ Test Get Invalidated Url Raises For Endpoint That Cannot Be Invalidated ... Get Invalidated Url valid_url=${ORIGIN}/employees Test Get Invalidated Url For Endpoint Ending With Path Id - ${url}= Get Valid Url endpoint=/employees/{employee_id} method=get + ${url}= Get Valid Url path=/employees/{employee_id} ${invalidated}= Get Invalidated Url valid_url=${url} Should Not Be Equal ${url} ${invalidated} Should Start With ${invalidated} http://localhost:8000/employees/ Test Get Invalidated Url For Endpoint Not Ending With Path Id - ${url}= Get Valid Url endpoint=/wagegroups/{wagegroup_id}/employees method=get + ${url}= Get Valid Url path=/wagegroups/{wagegroup_id}/employees ${invalidated}= Get Invalidated Url valid_url=${url} Should Not Be Equal ${url} ${invalidated} Should Start With ${invalidated} http://localhost:8000/wagegroups/ Should End With ${invalidated} /employees Test Get Invalidated Url For Endpoint With Multiple Path Ids - ${url}= Get Valid Url endpoint=/energy_label/{zipcode}/{home_number} method=get + ${url}= Get Valid Url path=/energy_label/{zipcode}/{home_number} ${invalidated}= Get Invalidated Url valid_url=${url} Should Not Be Equal ${url} ${invalidated} Should Start With ${invalidated} http://localhost:8000/energy_label/1111AA/ Test Get Invalidated Url For PathPropertiesConstraint Invalid Value Status Code - ${url}= Get Valid Url endpoint=/energy_label/{zipcode}/{home_number} method=get + ${url}= Get Valid Url path=/energy_label/{zipcode}/{home_number} ${invalidated}= Get Invalidated Url ... valid_url=${url} ... path=/energy_label/{zipcode}/{home_number} - ... method=get ... expected_status_code=422 Should Not Be Equal ${url} ${invalidated} Should Start With ${invalidated} http://localhost:8000/energy_label/0123AA diff --git a/tests/libcore/suites/test_get_json_data_for_dto_class.robot b/tests/libcore/suites/test_get_json_data_for_dto_class.robot index 7d2d04b..3f64431 100644 --- a/tests/libcore/suites/test_get_json_data_for_dto_class.robot +++ b/tests/libcore/suites/test_get_json_data_for_dto_class.robot @@ -13,35 +13,35 @@ ${ORIGIN}= http://localhost:8000 *** Test Cases *** Test Get Json Data For Dto Class With DefaultDto - ${request_data}= Get Request Data endpoint=/wagegroups method=post + ${request_data}= Get Request Data path=/wagegroups method=post Get Json Data For Dto Class ... schema=${request_data.dto_schema} ... dto_class=${DEFAULT_DTO} ... operation_id=dummy Test Get Json Data For Dto Class With IGNORE Constrained - ${request_data}= Get Request Data endpoint=/wagegroups method=post + ${request_data}= Get Request Data path=/wagegroups method=post Get Json Data For Dto Class ... schema=${request_data.dto_schema} ... dto_class=${WAGEGROUP_DTO} ... operation_id=dummy Test Get Json Data For Dto Class With Single DependantId - ${request_data}= Get Request Data endpoint=/employees method=post + ${request_data}= Get Request Data path=/employees method=post Get Json Data For Dto Class ... schema=${request_data.dto_schema} ... dto_class=${EMPLOYEE_DTO} ... operation_id=dummy # Test Get Json Data For Dto Class With Multiple DependantIds -# ${request_data}= Get Request Data endpoint=/employees method=post +# ${request_data}= Get Request Data path=/employees method=post # Get Json Data For Dto Class # ... schema=${request_data.dto_schema} # ... dto_class=${EMPLOYEE_DTO} # ... operation_id=dummy Test Get Json Data For Dto Class With Array And Object - ${request_data}= Get Request Data endpoint=/events/ method=post + ${request_data}= Get Request Data path=/events/ method=post Get Json Data For Dto Class ... schema=${request_data.dto_schema} ... dto_class=${DEFAULT_DTO} diff --git a/tests/libcore/suites/test_get_json_data_with_conflict.robot b/tests/libcore/suites/test_get_json_data_with_conflict.robot index b39b83f..52f3b98 100644 --- a/tests/libcore/suites/test_get_json_data_with_conflict.robot +++ b/tests/libcore/suites/test_get_json_data_with_conflict.robot @@ -13,7 +13,7 @@ ${ORIGIN}= http://localhost:8000 *** Test Cases *** Test Get Json Data With Conflict Raises For No UniquePropertyValueConstraint - ${url}= Get Valid Url endpoint=/wagegroups method=post + ${url}= Get Valid Url path=/wagegroups Run Keyword And Expect Error ValueError: No UniquePropertyValueConstraint* ... Get Json Data With Conflict ... url=${url} @@ -22,8 +22,8 @@ Test Get Json Data With Conflict Raises For No UniquePropertyValueConstraint ... conflict_status_code=418 Test Get Json Data With Conflict For Post Request - ${url}= Get Valid Url endpoint=/wagegroups method=post - ${request_data}= Get Request Data endpoint=/wagegroups method=post + ${url}= Get Valid Url path=/wagegroups + ${request_data}= Get Request Data path=/wagegroups method=post ${invalid_data}= Get Json Data With Conflict ... url=${url} ... method=post @@ -32,8 +32,8 @@ Test Get Json Data With Conflict For Post Request Should Not Be Empty ${invalid_data} Test Get Json Data With Conflict For Put Request - ${url}= Get Valid Url endpoint=/wagegroups/{wagegroup_id} method=put - ${request_data}= Get Request Data endpoint=/wagegroups/{wagegroup_id} method=put + ${url}= Get Valid Url path=/wagegroups/{wagegroup_id} + ${request_data}= Get Request Data path=/wagegroups/{wagegroup_id} method=put ${invalid_json}= Get Json Data With Conflict ... url=${url} ... method=put @@ -44,8 +44,8 @@ Test Get Json Data With Conflict For Put Request Should Be Equal As Integers ${response.status_code} 418 # Test Get Json Data With Conflict For Patch Request -# ${url}= Get Valid Url endpoint=/wagegroups/{wagegroup_id} method=put -# ${request_data}= Get Request Data endpoint=/wagegroups/{wagegroup_id} method=put +# ${url}= Get Valid Url path=/wagegroups/{wagegroup_id} +# ${request_data}= Get Request Data path=/wagegroups/{wagegroup_id} method=put # ${invalid_json}= Get Json Data With Conflict # ... url=${url} # ... method=put diff --git a/tests/libcore/suites/test_get_parameterized_endpoint_from_url.robot b/tests/libcore/suites/test_get_parameterized_endpoint_from_url.robot index 976559e..e7e98a4 100644 --- a/tests/libcore/suites/test_get_parameterized_endpoint_from_url.robot +++ b/tests/libcore/suites/test_get_parameterized_endpoint_from_url.robot @@ -13,20 +13,20 @@ ${ORIGIN}= http://localhost:8000 *** Test Cases *** -Test Get Parameterized Endpoint From Url Raises For Invalid Endpoint +Test Get Parameterized Path From Url Raises For Invalid Endpoint Run KeyWord And Expect Error ValueError: /dummy not found in paths section of the OpenAPI document. - ... Get Parameterized Endpoint From Url url=${ORIGIN}/dummy + ... Get Parameterized Path From Url url=${ORIGIN}/dummy -Test Get Parameterized Endpoint From Url With No Id - ${url}= Get Valid Url endpoint=/events/ method=get - ${endpoint}= Get Parameterized Endpoint From Url url=${url} +Test Get Parameterized Path From Url With No Id + ${url}= Get Valid Url path=/events/ + ${endpoint}= Get Parameterized Path From Url url=${url} Should Be Equal ${endpoint} /events/ -Test Get Parameterized Endpoint From Url With Single Id - ${url}= Get Valid Url endpoint=/employees/{employee_id} method=get - ${endpoint}= Get Parameterized Endpoint From Url url=${url} +Test Get Parameterized Path From Url With Single Id + ${url}= Get Valid Url path=/employees/{employee_id} + ${endpoint}= Get Parameterized Path From Url url=${url} Should Be Equal ${endpoint} /employees/{employee_id} -# Test Get Parameterized Endpoint From Url With Multiple Ids -# ${url}= Get Valid Url endpoint=/events/ method=get -# Get Parameterized Endpoint From Url url=${url} +# Test Get Parameterized Path From Url With Multiple Ids +# ${url}= Get Valid Url path=/events/ +# Get Parameterized Path From Url url=${url} diff --git a/tests/libcore/suites/test_get_request_data.robot b/tests/libcore/suites/test_get_request_data.robot index ab8d206..7b4ca28 100644 --- a/tests/libcore/suites/test_get_request_data.robot +++ b/tests/libcore/suites/test_get_request_data.robot @@ -1,4 +1,5 @@ *** Settings *** +Library Collections Library OpenApiLibCore ... source=${ORIGIN}/openapi.json ... origin=${ORIGIN} @@ -13,9 +14,9 @@ ${ORIGIN}= http://localhost:8000 *** Test Cases *** Test Get Request Data For Invalid Method On Endpoint - ${request_data}= Get Request Data endpoint=/events/ method=delete - ${dict}= Create Dictionary - ${list}= Create List + ${request_data}= Get Request Data path=/events/ method=delete + VAR &{dict}= &{EMPTY} + VAR @{list}= @{EMPTY} Should Be Equal ${request_data.dto} ${DEFAULT_DTO()} Should Be Equal ${request_data.dto_schema} ${dict} Should Be Equal ${request_data.parameters} ${list} @@ -24,25 +25,35 @@ Test Get Request Data For Invalid Method On Endpoint Should Not Be True ${request_data.has_body} Test Get Request Data For Endpoint With RequestBody - ${request_data}= Get Request Data endpoint=/employees method=post - ${dict}= Create Dictionary - ${list}= Create List - ${birthdays}= Create List 1970-07-07 1980-08-08 1990-09-09 - ${parttime_days}= Create List Monday Tuesday Wednesday Thursday Friday ${NONE} + ${request_data}= Get Request Data path=/employees method=post + VAR &{dict}= &{EMPTY} + VAR @{list}= @{EMPTY} + VAR @{birthdays}= 1970-07-07 1980-08-08 1990-09-09 + VAR @{weekdays}= Monday Tuesday Wednesday Thursday Friday Length Should Be ${request_data.dto.name} 36 Length Should Be ${request_data.dto.wagegroup_id} 36 Should Contain ${birthdays} ${request_data.dto.date_of_birth} - Should Contain ${parttime_days} ${request_data.dto.parttime_day} + VAR ${generated_parttime_schedule}= ${request_data.dto.parttime_schedule} + IF $generated_parttime_schedule is not None + ${parttime_days}= Get From Dictionary ${generated_parttime_schedule} parttime_days + Should Be True 1 <= len($parttime_days) <= 5 + FOR ${parttime_day} IN @{parttime_days} + ${weekday}= Get From Dictionary ${parttime_day} weekday + Should Contain ${weekdays} ${weekday} + ${available_hours}= Get From Dictionary ${parttime_day} available_hours + Should Be True 0 <= $available_hours < 8 + END + END Should Not Be Empty ${request_data.dto_schema} Should Be Equal ${request_data.parameters} ${list} Should Be Equal ${request_data.params} ${dict} - &{expected_headers}= Create Dictionary content-type=application/json + VAR &{expected_headers}= content-type=application/json Should Be Equal ${request_data.headers} ${expected_headers} Should Be True ${request_data.has_body} Test Get Request Data For Endpoint Without RequestBody But With DtoClass - ${request_data}= Get Request Data endpoint=/wagegroups/{wagegroup_id} method=delete - ${dict}= Create Dictionary + ${request_data}= Get Request Data path=/wagegroups/{wagegroup_id} method=delete + VAR &{dict}= &{EMPTY} Should Be Equal As Strings ${request_data.dto} delete_wagegroup_wagegroups__wagegroup_id__delete() Should Be Equal ${request_data.dto_schema} ${dict} Should Not Be Empty ${request_data.parameters} @@ -51,13 +62,13 @@ Test Get Request Data For Endpoint Without RequestBody But With DtoClass Should Not Be True ${request_data.has_body} # Test Get Request Data For Endpoint With RequestBody With Only Ignored Properties -# ${request_data}= Get Request Data endpoint=/wagegroups/{wagegroup_id} method=delete -# ${dict}= Create Dictionary -# ${list}= Create List +# ${request_data}= Get Request Data path=/wagegroups/{wagegroup_id} method=delete +# VAR &{dict}= &{EMPTY} +# VAR @{list}= @{EMPTY} # Should Be Equal As Strings ${request_data.dto} delete_wagegroup_wagegroups__wagegroup_id__delete() # Should Be Equal ${request_data.dto_schema} ${dict} # Should Not Be Empty ${request_data.parameters} # Should Be Equal ${request_data.params} ${dict} -# &{expected_headers}= Create Dictionary content-type=application/json +# VAR &{expected_headers}= content-type=application/json # Should Be Equal ${request_data.headers} ${expected_headers} # Should Be True ${request_data.has_body} diff --git a/tests/libcore/suites/test_get_valid_id_for_endpoint.robot b/tests/libcore/suites/test_get_valid_id_for_endpoint.robot index 9a7f321..e3d4c0c 100644 --- a/tests/libcore/suites/test_get_valid_id_for_endpoint.robot +++ b/tests/libcore/suites/test_get_valid_id_for_endpoint.robot @@ -13,28 +13,28 @@ ${ORIGIN}= http://localhost:8000 *** Test Cases *** -Test Get Valid Id For Endpoint Raises For Endpoint Without Id In Path +Test Get Valid Id For Path Raises For Endpoint Without Id In Path Run Keyword And Expect Error Failed to get a valid id from* - ... Get Valid Id For Endpoint endpoint=/events/ method=get + ... Get Valid Id For Path path=/events/ -Test Get Valid Id For Endpoint Raises For Endpoint With No Post Operation And No Resources +Test Get Valid Id For Path Raises For Endpoint With No Post Operation And No Resources Run Keyword And Expect Error Failed to get a valid id using GET on* - ... Get Valid Id For Endpoint endpoint=/secret_message method=get + ... Get Valid Id For Path path=/secret_message -Test Get Valid Id For Endpoint Returns Id For Resource Created By Post Operation - ${id}= Get Valid Id For Endpoint endpoint=/wagegroups/{wagegroup_id} method=get +Test Get Valid Id For Path Returns Id For Resource Created By Post Operation + ${id}= Get Valid Id For Path path=/wagegroups/{wagegroup_id} Length Should Be ${id} 36 -Test Get Valid Id For Endpoint Returns Random Id From Array Endpoint With No Post Operation - ${url}= Get Valid Url endpoint=/employees method=post - ${request_data}= Get Request Data endpoint=/employees method=post +Test Get Valid Id For Path Returns Random Id From Array Endpoint With No Post Operation + ${url}= Get Valid Url path=/employees + ${request_data}= Get Request Data path=/employees method=post Authorized Request ... url=${url} ... method=post ... json_data=${request_data.get_required_properties_dict()} - ${id}= Get Valid Id For Endpoint endpoint=/available_employees method=get + ${id}= Get Valid Id For Path path=/available_employees Length Should Be ${id} 32 -# Test Get Valid Id For Endpoint By Href +# Test Get Valid Id For Path By Href -# Test Get Valid Id For Endpoint Raises For Post Operation That Returns Array +# Test Get Valid Id For Path Raises For Post Operation That Returns Array diff --git a/tests/libcore/suites/test_get_valid_url.robot b/tests/libcore/suites/test_get_valid_url.robot index 3951b48..e9beea3 100644 --- a/tests/libcore/suites/test_get_valid_url.robot +++ b/tests/libcore/suites/test_get_valid_url.robot @@ -14,16 +14,16 @@ ${ORIGIN}= http://localhost:8000 *** Test Cases *** Test Get Valid Url Raises For Invalid Endpoint Run Keyword And Expect Error ValueError: /dummy not found in paths section of the OpenAPI document. - ... Get Valid Url endpoint=/dummy method=get + ... Get Valid Url path=/dummy Test Get Valid Url With Unsupported Method - ${url}= Get Valid Url endpoint=/events/ method=patch + ${url}= Get Valid Url path=/events/ Should Be Equal ${url} ${ORIGIN}/events/ Test Get Valid Url With Id - ${url}= Get Valid Url endpoint=/wagegroups/{wagegroup_id} method=get + ${url}= Get Valid Url path=/wagegroups/{wagegroup_id} Should Contain container=${url} item=${ORIGIN}/wagegroups/ Test Get Valid Url By PathPropertiesContraint - ${url}= Get Valid Url endpoint=/energy_label/{zipcode}/{home_number} method=get + ${url}= Get Valid Url path=/energy_label/{zipcode}/{home_number} Should Be Equal As Strings ${url} ${ORIGIN}/energy_label/1111AA/10 diff --git a/tests/libcore/suites/test_readonly.robot b/tests/libcore/suites/test_readonly.robot index 2d3330f..385e81f 100644 --- a/tests/libcore/suites/test_readonly.robot +++ b/tests/libcore/suites/test_readonly.robot @@ -13,7 +13,7 @@ ${ORIGIN}= http://localhost:8000 *** Test Cases *** Test ReadOnly Is Filtered From Request Data - ${request_data}= Get Request Data endpoint=/api/location method=post + ${request_data}= Get Request Data path=/api/location method=post ${json_data}= Set Variable ${request_data.dto.as_dict()} Should Not Contain ${json_data} id Should Contain ${json_data} locationId diff --git a/tests/libcore/suites/test_request_data_class.robot b/tests/libcore/suites/test_request_data_class.robot index 37eb865..0fb1d5c 100644 --- a/tests/libcore/suites/test_request_data_class.robot +++ b/tests/libcore/suites/test_request_data_class.robot @@ -13,76 +13,77 @@ ${ORIGIN}= http://localhost:8000 *** Test Cases *** Test Has Optional Properties - ${request_data}= Get Request Data endpoint=/employees method=get + ${request_data}= Get Request Data path=/employees method=get Should Be Equal ${request_data.has_optional_properties} ${FALSE} - ${request_data}= Get Request Data endpoint=/employees method=post + ${request_data}= Get Request Data path=/employees method=post Should Be Equal ${request_data.has_optional_properties} ${TRUE} Test Has Optional Params - ${request_data}= Get Request Data endpoint=/available_employees method=get + ${request_data}= Get Request Data path=/available_employees method=get Should Be Equal ${request_data.has_optional_params} ${FALSE} - ${request_data}= Get Request Data endpoint=/energy_label/{zipcode}/{home_number} method=get + ${request_data}= Get Request Data path=/energy_label/{zipcode}/{home_number} method=get Should Be Equal ${request_data.has_optional_params} ${TRUE} Test Has Optional Headers - ${request_data}= Get Request Data endpoint=/employees method=get + ${request_data}= Get Request Data path=/employees method=get Should Be Equal ${request_data.has_optional_headers} ${FALSE} - ${request_data}= Get Request Data endpoint=/ method=get + ${request_data}= Get Request Data path=/ method=get Should Be Equal ${request_data.has_optional_headers} ${TRUE} Test Params That Can Be Invalidated - ${request_data}= Get Request Data endpoint=/available_employees method=get + ${request_data}= Get Request Data path=/available_employees method=get ${params}= Set Variable ${request_data.params_that_can_be_invalidated} Should Contain ${params} weekday - ${request_data}= Get Request Data endpoint=/energy_label/{zipcode}/{home_number} method=get + ${request_data}= Get Request Data path=/energy_label/{zipcode}/{home_number} method=get ${params}= Set Variable ${request_data.params_that_can_be_invalidated} Should Contain ${params} extension - ${request_data}= Get Request Data endpoint=/events/ method=get + ${request_data}= Get Request Data path=/events/ method=get ${params}= Set Variable ${request_data.params_that_can_be_invalidated} Should Be Empty ${params} Test Headers That Can Be Invalidated - ${request_data}= Get Request Data endpoint=/ method=get + ${request_data}= Get Request Data path=/ method=get ${headers}= Set Variable ${request_data.headers_that_can_be_invalidated} Should Be Empty ${headers} - ${request_data}= Get Request Data endpoint=/secret_message method=get + ${request_data}= Get Request Data path=/secret_message method=get ${headers}= Set Variable ${request_data.headers_that_can_be_invalidated} Should Contain ${headers} secret-code Test Get Required Properties Dict - ${request_data}= Get Request Data endpoint=/employees method=post - Should Contain ${request_data.dto.as_dict()} parttime_day + ${request_data}= Get Request Data path=/employees method=post + Should Contain ${request_data.dto.as_dict()} parttime_schedule Should Not Be Empty ${request_data.dto.name} ${required_properties}= Set Variable ${request_data.get_required_properties_dict()} Should Contain ${required_properties} name - # parttime_day is configured with treat_as_mandatory=True - Should Contain ${required_properties} parttime_day + # parttime_schedule is configured with treat_as_mandatory=True + Should Contain ${required_properties} parttime_schedule Test Get Required Params - ${request_data}= Get Request Data endpoint=/available_employees method=get + ${request_data}= Get Request Data path=/available_employees method=get Should Not Be Empty ${request_data.params.get("weekday")} ${required_params}= Set Variable ${request_data.get_required_params()} Should Contain ${required_params} weekday - ${request_data}= Get Request Data endpoint=/energy_label/{zipcode}/{home_number} method=get + ${request_data}= Get Request Data path=/energy_label/{zipcode}/{home_number} method=get Should Contain ${request_data.params} extension ${required_params}= Set Variable ${request_data.get_required_params()} - Should Be Empty ${required_params} + # extension is configured with treat_as_mandatory=True + Should Contain ${required_params} extension Test Get Required Headers - ${request_data}= Get Request Data endpoint=/secret_message method=get + ${request_data}= Get Request Data path=/secret_message method=get Should Be Equal As Integers ${request_data.headers.get("secret-code")} 42 ${required_headers}= Set Variable ${request_data.get_required_headers()} Should Contain ${required_headers} secret-code Should Not Contain ${required_headers} seal - ${request_data}= Get Request Data endpoint=/ method=get + ${request_data}= Get Request Data path=/ method=get Should Not Be Empty ${request_data.headers.get("name-from-header")} Should Not Be Empty ${request_data.headers.get("title")} ${required_headers}= Set Variable ${request_data.get_required_headers()} diff --git a/tests/libcore/suites/test_schema_variations.robot b/tests/libcore/suites/test_schema_variations.robot index 201e2f0..45db8a1 100644 --- a/tests/libcore/suites/test_schema_variations.robot +++ b/tests/libcore/suites/test_schema_variations.robot @@ -12,14 +12,14 @@ ${ORIGIN}= http://localhost:8000 *** Test Cases *** Test Get Request Data For Schema With allOf - ${request_data}= Get Request Data endpoint=/hypermedia method=post - ${dict}= Create Dictionary - ${list}= Create List - ${list_of_dict}= Create List ${dict} - ${expected_headers}= Create Dictionary content-type=application/hal+json + ${request_data}= Get Request Data path=/hypermedia method=post + VAR &{dict}= &{EMPTY} + VAR @{list}= @{EMPTY} + VAR &{expected_headers}= content-type=application/hal+json Length Should Be ${request_data.dto.isan} 36 Length Should Be ${request_data.dto.published} 10 - Should Be Equal ${request_data.dto.tags} ${list_of_dict} + Length Should Be ${request_data.dto.tags} 1 + Length Should Be ${request_data.dto.tags}[0] 36 Length Should Be ${request_data.dto_schema} 4 Length Should Be ${request_data.dto_schema.get("properties")} 4 Should Be Equal ${request_data.parameters} ${list} diff --git a/tests/libcore/suites/test_validate_response.robot b/tests/libcore/suites/test_validate_response.robot index e6307ea..0a41d2e 100644 --- a/tests/libcore/suites/test_validate_response.robot +++ b/tests/libcore/suites/test_validate_response.robot @@ -13,8 +13,8 @@ ${ORIGIN}= http://localhost:8000 *** Test Cases *** Test Bool Response - ${url}= Get Valid Url endpoint=/employees/{employee_id} method=patch - ${request_data}= Get Request Data endpoint=/employees/{employee_id} method=patch + ${url}= Get Valid Url path=/employees/{employee_id} + ${request_data}= Get Request Data path=/employees/{employee_id} method=patch ${response}= Authorized Request ... url=${url} ... method=patch diff --git a/tests/libcore/unittests/test_get_safe_key.py b/tests/libcore/unittests/test_get_safe_key.py index 3878f6a..f22be52 100644 --- a/tests/libcore/unittests/test_get_safe_key.py +++ b/tests/libcore/unittests/test_get_safe_key.py @@ -1,7 +1,7 @@ # pylint: disable="missing-class-docstring", "missing-function-docstring" import unittest -from OpenApiLibCore.openapi_libcore import get_safe_key +from OpenApiLibCore.data_generation.data_generation_core import get_safe_key class TestGetSafeKey(unittest.TestCase): diff --git a/tests/server/testserver.py b/tests/server/testserver.py index d10a093..f6ea475 100644 --- a/tests/server/testserver.py +++ b/tests/server/testserver.py @@ -39,6 +39,15 @@ class WeekDay(str, Enum): Friday = "Friday" +class ParttimeDay(BaseModel): + weekday: WeekDay + available_hours: int = Field(4, ge=0, lt=8) + + +class ParttimeSchedule(BaseModel): + parttime_days: list[ParttimeDay] = Field(..., min_length=1, max_length=5) + + class Wing(str, Enum): N = "North" E = "East" @@ -71,14 +80,14 @@ class EmployeeDetails(BaseModel): employee_number: int wagegroup_id: str date_of_birth: datetime.date - parttime_day: WeekDay | None = None + parttime_schedule: ParttimeSchedule | None = None class Employee(BaseModel): name: str wagegroup_id: str date_of_birth: datetime.date - parttime_day: WeekDay | None = None + parttime_schedule: ParttimeSchedule | None = None class EmployeeUpdate(BaseModel): @@ -86,7 +95,7 @@ class EmployeeUpdate(BaseModel): employee_number: int | None = None wagegroup_id: str | None = None date_of_birth: datetime.date | None = None - parttime_day: WeekDay | None = None + parttime_schedule: ParttimeSchedule | None = None WAGE_GROUPS: dict[str, WageGroup] = {} @@ -280,10 +289,16 @@ def post_employee(employee: Employee) -> EmployeeDetails: raise HTTPException( status_code=403, detail="An employee must be at least 18 years old." ) + parttime_schedule = employee.parttime_schedule + if parttime_schedule is not None: + parttime_schedule = ParttimeSchedule.model_validate(parttime_schedule) new_employee = EmployeeDetails( identification=uuid4().hex, + name=employee.name, employee_number=next(EMPLOYEE_NUMBERS), - **employee.model_dump(), + wagegroup_id=employee.wagegroup_id, + date_of_birth=employee.date_of_birth, + parttime_schedule=parttime_schedule, ) EMPLOYEES[new_employee.identification] = new_employee return new_employee @@ -349,12 +364,28 @@ def patch_employee(employee_id: str, employee: EmployeeUpdate) -> JSONResponse: ) updated_employee = stored_employee_data.model_copy(update=employee_update_data) + if updated_employee.parttime_schedule is not None: + parttime_schedule = ParttimeSchedule.model_validate( + updated_employee.parttime_schedule + ) + updated_employee.parttime_schedule = parttime_schedule EMPLOYEES[employee_id] = updated_employee return JSONResponse(content=True) @app.get("/available_employees", status_code=200, response_model=list[EmployeeDetails]) def get_available_employees(weekday: WeekDay = Query(...)) -> list[EmployeeDetails]: - return [ - e for e in EMPLOYEES.values() if getattr(e, "parttime_day", None) != weekday - ] + available_employees: list[EmployeeDetails] = [] + for employee in EMPLOYEES.values(): + if not employee.parttime_schedule: + continue + + weekday_availability = [ + d.available_hours + for d in employee.parttime_schedule.parttime_days + if d.weekday == weekday + ] + if weekday_availability and weekday_availability[0] > 0: + available_employees.append(employee) + + return available_employees diff --git a/tests/user_implemented/custom_user_mappings.py b/tests/user_implemented/custom_user_mappings.py index fe23d93..b7c95d6 100644 --- a/tests/user_implemented/custom_user_mappings.py +++ b/tests/user_implemented/custom_user_mappings.py @@ -60,6 +60,30 @@ def get_relations() -> list[ResourceRelation]: return relations +class ParttimeDayDto(Dto): + @staticmethod + def get_relations() -> list[ResourceRelation]: + relations: list[ResourceRelation] = [ + PropertyValueConstraint( + property_name="weekday", + values=["Monday", "Tuesday", "Wednesday", "Thursday", "Friday"], + ), + ] + return relations + + +class ParttimeScheduleDto(Dto): + @staticmethod + def get_relations() -> list[ResourceRelation]: + relations: list[ResourceRelation] = [ + PropertyValueConstraint( + property_name="parttime_days", + values=[ParttimeDayDto], + ), + ] + return relations + + class EmployeeDto(Dto): @staticmethod def get_relations() -> list[ResourceRelation]: @@ -77,8 +101,8 @@ def get_relations() -> list[ResourceRelation]: error_code=422, ), PropertyValueConstraint( - property_name="parttime_day", - values=["Monday", "Tuesday", "Wednesday", "Thursday", "Friday"], + property_name="parttime_schedule", + values=[ParttimeScheduleDto], treat_as_mandatory=True, ), ]