From 8a4ec9832e5244f83dd234383977496f82021206 Mon Sep 17 00:00:00 2001 From: Fatih Acar Date: Wed, 18 Sep 2024 13:21:31 +0200 Subject: [PATCH 01/10] fix(backend): allow usage of "updated_at" attributes Signed-off-by: Fatih Acar --- backend/infrahub/core/node/__init__.py | 6 ++++++ changelog/3730.fixed.md | 1 + 2 files changed, 7 insertions(+) create mode 100644 changelog/3730.fixed.md diff --git a/backend/infrahub/core/node/__init__.py b/backend/infrahub/core/node/__init__.py index 62cf2d64aa..366795d505 100644 --- a/backend/infrahub/core/node/__init__.py +++ b/backend/infrahub/core/node/__init__.py @@ -239,6 +239,9 @@ async def _process_fields(self, fields: dict, db: InfrahubDatabase) -> None: # ------------------------------------------- # Validate Input # ------------------------------------------- + if "updated_at" in fields and "updated_at" not in self._schema.valid_input_names: + # FIXME: Allow users to use "updated_at" named attributes until we have proper metadata handling + fields.pop("updated_at") for field_name in fields.keys(): if field_name not in self._schema.valid_input_names: errors.append(ValidationError({field_name: f"{field_name} is not a valid input for {self.get_kind()}"})) @@ -398,6 +401,9 @@ async def load( self._existing = True if updated_at: + kwargs["updated_at"] = ( + updated_at # FIXME: Allow users to use "updated_at" named attributes until we have proper metadata handling + ) self._updated_at = Timestamp(updated_at) await self._process_fields(db=db, fields=kwargs) diff --git a/changelog/3730.fixed.md b/changelog/3730.fixed.md new file mode 100644 index 0000000000..39b82cdb25 --- /dev/null +++ b/changelog/3730.fixed.md @@ -0,0 +1 @@ +GraphQL results when querying nodes with `updated_at` named attributes will now return correct values instead of null/None From 160069f0e3ae2b415cb4084548a4ee5934afd42f Mon Sep 17 00:00:00 2001 From: Damien Garros Date: Wed, 18 Sep 2024 19:23:42 +0200 Subject: [PATCH 02/10] remove local python sdk --- python_sdk/CHANGELOG.md | 28 - python_sdk/LICENSE.txt | 201 - python_sdk/README.md | 57 - python_sdk/changelog/.gitignore | 1 - python_sdk/examples/branch_create.py | 13 - python_sdk/examples/branch_create_sync.py | 11 - python_sdk/examples/branch_list.py | 15 - python_sdk/examples/branch_list_sync.py | 13 - python_sdk/examples/branch_merge.py | 12 - python_sdk/examples/branch_merge_sync.py | 10 - python_sdk/examples/branch_rebase.py | 12 - python_sdk/examples/branch_rebase_sync.py | 10 - python_sdk/examples/example_create_sync.py | 18 - python_sdk/examples/example_delete.py | 13 - python_sdk/examples/example_update.py | 14 - python_sdk/examples/node_create_data.py | 20 - python_sdk/examples/node_create_inline.py | 20 - python_sdk/examples/query_all.py | 15 - python_sdk/examples/query_filters.py | 15 - python_sdk/infrahub_sdk/__init__.py | 79 - python_sdk/infrahub_sdk/_importer.py | 34 - python_sdk/infrahub_sdk/analyzer.py | 119 - python_sdk/infrahub_sdk/async_typer.py | 31 - python_sdk/infrahub_sdk/batch.py | 67 - python_sdk/infrahub_sdk/branch.py | 297 -- python_sdk/infrahub_sdk/checks.py | 181 - python_sdk/infrahub_sdk/client.py | 1743 ------ python_sdk/infrahub_sdk/config.py | 152 - python_sdk/infrahub_sdk/constants.py | 7 - python_sdk/infrahub_sdk/ctl/__init__.py | 0 python_sdk/infrahub_sdk/ctl/branch.py | 142 - python_sdk/infrahub_sdk/ctl/check.py | 262 - python_sdk/infrahub_sdk/ctl/cli.py | 11 - python_sdk/infrahub_sdk/ctl/cli_commands.py | 445 -- python_sdk/infrahub_sdk/ctl/client.py | 72 - python_sdk/infrahub_sdk/ctl/config.py | 91 - python_sdk/infrahub_sdk/ctl/constants.py | 63 - python_sdk/infrahub_sdk/ctl/exceptions.py | 14 - python_sdk/infrahub_sdk/ctl/exporter.py | 50 - python_sdk/infrahub_sdk/ctl/generator.py | 102 - python_sdk/infrahub_sdk/ctl/importer.py | 51 - python_sdk/infrahub_sdk/ctl/parameters.py | 14 - python_sdk/infrahub_sdk/ctl/render.py | 11 - python_sdk/infrahub_sdk/ctl/repository.py | 104 - python_sdk/infrahub_sdk/ctl/schema.py | 220 - python_sdk/infrahub_sdk/ctl/transform.py | 11 - python_sdk/infrahub_sdk/ctl/utils.py | 182 - python_sdk/infrahub_sdk/ctl/validate.py | 112 - python_sdk/infrahub_sdk/data.py | 25 - python_sdk/infrahub_sdk/exceptions.py | 133 - python_sdk/infrahub_sdk/generator.py | 148 - python_sdk/infrahub_sdk/graphql.py | 162 - python_sdk/infrahub_sdk/jinja2.py | 30 - python_sdk/infrahub_sdk/node.py | 2048 ------- python_sdk/infrahub_sdk/object_store.py | 111 - python_sdk/infrahub_sdk/playback.py | 55 - python_sdk/infrahub_sdk/protocols.py | 867 --- python_sdk/infrahub_sdk/protocols_base.py | 146 - .../infrahub_sdk/pytest_plugin/__init__.py | 0 .../infrahub_sdk/pytest_plugin/exceptions.py | 62 - .../pytest_plugin/items/__init__.py | 28 - .../infrahub_sdk/pytest_plugin/items/base.py | 77 - .../infrahub_sdk/pytest_plugin/items/check.py | 90 - .../pytest_plugin/items/graphql_query.py | 61 - .../pytest_plugin/items/jinja2_transform.py | 118 - .../pytest_plugin/items/python_transform.py | 95 - .../infrahub_sdk/pytest_plugin/loader.py | 111 - .../infrahub_sdk/pytest_plugin/models.py | 203 - .../infrahub_sdk/pytest_plugin/plugin.py | 106 - .../infrahub_sdk/pytest_plugin/utils.py | 20 - python_sdk/infrahub_sdk/queries.py | 44 - python_sdk/infrahub_sdk/query_groups.py | 291 - python_sdk/infrahub_sdk/recorder.py | 70 - python_sdk/infrahub_sdk/schema.py | 964 ---- python_sdk/infrahub_sdk/store.py | 123 - python_sdk/infrahub_sdk/task_report.py | 210 - python_sdk/infrahub_sdk/timestamp.py | 92 - python_sdk/infrahub_sdk/topological_sort.py | 68 - python_sdk/infrahub_sdk/transfer/__init__.py | 0 python_sdk/infrahub_sdk/transfer/constants.py | 1 - .../infrahub_sdk/transfer/exceptions.py | 13 - .../transfer/exporter/__init__.py | 0 .../transfer/exporter/interface.py | 10 - .../infrahub_sdk/transfer/exporter/json.py | 166 - .../transfer/importer/__init__.py | 0 .../transfer/importer/interface.py | 7 - .../infrahub_sdk/transfer/importer/json.py | 195 - .../infrahub_sdk/transfer/schema_sorter.py | 29 - python_sdk/infrahub_sdk/transforms.py | 115 - python_sdk/infrahub_sdk/types.py | 63 - python_sdk/infrahub_sdk/utils.py | 337 -- python_sdk/infrahub_sdk/uuidt.py | 65 - python_sdk/infrahub_sdk/yaml.py | 25 - python_sdk/poetry.lock | 1970 ------- python_sdk/pyproject.toml | 366 -- python_sdk/tests/__init__.py | 5 - python_sdk/tests/adapters | 1 - python_sdk/tests/conftest.py | 22 - python_sdk/tests/constants | 1 - python_sdk/tests/fixtures/models/empty.json | 0 .../fixtures/models/non_valid_json_01.json | 17 - .../fixtures/models/non_valid_model_01.json | 18 - .../fixtures/models/non_valid_namespace.json | 26 - .../tests/fixtures/models/valid_model_01.json | 26 - .../models/valid_schemas/contract.yml | 35 - .../fixtures/models/valid_schemas/rack.yml | 39 - python_sdk/tests/fixtures/schema_01.json | 404 -- python_sdk/tests/fixtures/schema_02.json | 1055 ---- python_sdk/tests/fixtures/schema_03.json | 974 ---- python_sdk/tests/fixtures/schema_04.json | 2097 -------- python_sdk/tests/fixtures/schema_ipam.json | 4733 ----------------- python_sdk/tests/helpers | 1 - python_sdk/tests/integration/__init__.py | 0 python_sdk/tests/integration/conftest.py | 517 -- .../tests/integration/test_export_import.py | 542 -- .../tests/integration/test_infrahub_client.py | 285 - .../integration/test_infrahub_client_sync.py | 287 - python_sdk/tests/integration/test_node.py | 403 -- .../tests/integration/test_object_store.py | 19 - python_sdk/tests/integration/test_schema.py | 57 - python_sdk/tests/unit/__init__.py | 5 - python_sdk/tests/unit/ctl/__init__.py | 0 python_sdk/tests/unit/ctl/conftest.py | 106 - python_sdk/tests/unit/ctl/test_branch_app.py | 33 - python_sdk/tests/unit/ctl/test_cli.py | 23 - python_sdk/tests/unit/ctl/test_schema_app.py | 130 - .../tests/unit/ctl/test_validate_app.py | 40 - .../tests/unit/pytest_plugin/__init__.py | 0 .../tests/unit/pytest_plugin/test_plugin.py | 233 - python_sdk/tests/unit/sdk/__init__.py | 0 python_sdk/tests/unit/sdk/checks/__init__.py | 0 python_sdk/tests/unit/sdk/checks/conftest.py | 21 - .../tests/unit/sdk/checks/test_checks.py | 64 - python_sdk/tests/unit/sdk/conftest.py | 2335 -------- python_sdk/tests/unit/sdk/test_artifact.py | 90 - python_sdk/tests/unit/sdk/test_batch.py | 50 - python_sdk/tests/unit/sdk/test_branch.py | 41 - python_sdk/tests/unit/sdk/test_client.py | 672 --- python_sdk/tests/unit/sdk/test_config.py | 41 - python_sdk/tests/unit/sdk/test_graphql.py | 472 -- .../tests/unit/sdk/test_group_context.py | 79 - python_sdk/tests/unit/sdk/test_node.py | 1725 ------ .../tests/unit/sdk/test_object_store.py | 79 - .../tests/unit/sdk/test_query_analyzer.py | 175 - python_sdk/tests/unit/sdk/test_schema.py | 314 -- .../tests/unit/sdk/test_schema_sorter.py | 16 - python_sdk/tests/unit/sdk/test_store.py | 43 - python_sdk/tests/unit/sdk/test_timestamp.py | 57 - .../tests/unit/sdk/test_topological_sort.py | 95 - python_sdk/tests/unit/sdk/test_utils.py | 206 - python_sdk/tests/unit/sdk/test_uuidt.py | 27 - python_sdk/tests/unit/test_package.py | 24 - 152 files changed, 33845 deletions(-) delete mode 100644 python_sdk/CHANGELOG.md delete mode 100644 python_sdk/LICENSE.txt delete mode 100644 python_sdk/README.md delete mode 100644 python_sdk/changelog/.gitignore delete mode 100644 python_sdk/examples/branch_create.py delete mode 100644 python_sdk/examples/branch_create_sync.py delete mode 100644 python_sdk/examples/branch_list.py delete mode 100644 python_sdk/examples/branch_list_sync.py delete mode 100644 python_sdk/examples/branch_merge.py delete mode 100644 python_sdk/examples/branch_merge_sync.py delete mode 100644 python_sdk/examples/branch_rebase.py delete mode 100644 python_sdk/examples/branch_rebase_sync.py delete mode 100644 python_sdk/examples/example_create_sync.py delete mode 100644 python_sdk/examples/example_delete.py delete mode 100644 python_sdk/examples/example_update.py delete mode 100644 python_sdk/examples/node_create_data.py delete mode 100644 python_sdk/examples/node_create_inline.py delete mode 100644 python_sdk/examples/query_all.py delete mode 100644 python_sdk/examples/query_filters.py delete mode 100644 python_sdk/infrahub_sdk/__init__.py delete mode 100644 python_sdk/infrahub_sdk/_importer.py delete mode 100644 python_sdk/infrahub_sdk/analyzer.py delete mode 100644 python_sdk/infrahub_sdk/async_typer.py delete mode 100644 python_sdk/infrahub_sdk/batch.py delete mode 100644 python_sdk/infrahub_sdk/branch.py delete mode 100644 python_sdk/infrahub_sdk/checks.py delete mode 100644 python_sdk/infrahub_sdk/client.py delete mode 100644 python_sdk/infrahub_sdk/config.py delete mode 100644 python_sdk/infrahub_sdk/constants.py delete mode 100644 python_sdk/infrahub_sdk/ctl/__init__.py delete mode 100644 python_sdk/infrahub_sdk/ctl/branch.py delete mode 100644 python_sdk/infrahub_sdk/ctl/check.py delete mode 100644 python_sdk/infrahub_sdk/ctl/cli.py delete mode 100644 python_sdk/infrahub_sdk/ctl/cli_commands.py delete mode 100644 python_sdk/infrahub_sdk/ctl/client.py delete mode 100644 python_sdk/infrahub_sdk/ctl/config.py delete mode 100644 python_sdk/infrahub_sdk/ctl/constants.py delete mode 100644 python_sdk/infrahub_sdk/ctl/exceptions.py delete mode 100644 python_sdk/infrahub_sdk/ctl/exporter.py delete mode 100644 python_sdk/infrahub_sdk/ctl/generator.py delete mode 100644 python_sdk/infrahub_sdk/ctl/importer.py delete mode 100644 python_sdk/infrahub_sdk/ctl/parameters.py delete mode 100644 python_sdk/infrahub_sdk/ctl/render.py delete mode 100644 python_sdk/infrahub_sdk/ctl/repository.py delete mode 100644 python_sdk/infrahub_sdk/ctl/schema.py delete mode 100644 python_sdk/infrahub_sdk/ctl/transform.py delete mode 100644 python_sdk/infrahub_sdk/ctl/utils.py delete mode 100644 python_sdk/infrahub_sdk/ctl/validate.py delete mode 100644 python_sdk/infrahub_sdk/data.py delete mode 100644 python_sdk/infrahub_sdk/exceptions.py delete mode 100644 python_sdk/infrahub_sdk/generator.py delete mode 100644 python_sdk/infrahub_sdk/graphql.py delete mode 100644 python_sdk/infrahub_sdk/jinja2.py delete mode 100644 python_sdk/infrahub_sdk/node.py delete mode 100644 python_sdk/infrahub_sdk/object_store.py delete mode 100644 python_sdk/infrahub_sdk/playback.py delete mode 100644 python_sdk/infrahub_sdk/protocols.py delete mode 100644 python_sdk/infrahub_sdk/protocols_base.py delete mode 100644 python_sdk/infrahub_sdk/pytest_plugin/__init__.py delete mode 100644 python_sdk/infrahub_sdk/pytest_plugin/exceptions.py delete mode 100644 python_sdk/infrahub_sdk/pytest_plugin/items/__init__.py delete mode 100644 python_sdk/infrahub_sdk/pytest_plugin/items/base.py delete mode 100644 python_sdk/infrahub_sdk/pytest_plugin/items/check.py delete mode 100644 python_sdk/infrahub_sdk/pytest_plugin/items/graphql_query.py delete mode 100644 python_sdk/infrahub_sdk/pytest_plugin/items/jinja2_transform.py delete mode 100644 python_sdk/infrahub_sdk/pytest_plugin/items/python_transform.py delete mode 100644 python_sdk/infrahub_sdk/pytest_plugin/loader.py delete mode 100644 python_sdk/infrahub_sdk/pytest_plugin/models.py delete mode 100644 python_sdk/infrahub_sdk/pytest_plugin/plugin.py delete mode 100644 python_sdk/infrahub_sdk/pytest_plugin/utils.py delete mode 100644 python_sdk/infrahub_sdk/queries.py delete mode 100644 python_sdk/infrahub_sdk/query_groups.py delete mode 100644 python_sdk/infrahub_sdk/recorder.py delete mode 100644 python_sdk/infrahub_sdk/schema.py delete mode 100644 python_sdk/infrahub_sdk/store.py delete mode 100644 python_sdk/infrahub_sdk/task_report.py delete mode 100644 python_sdk/infrahub_sdk/timestamp.py delete mode 100644 python_sdk/infrahub_sdk/topological_sort.py delete mode 100644 python_sdk/infrahub_sdk/transfer/__init__.py delete mode 100644 python_sdk/infrahub_sdk/transfer/constants.py delete mode 100644 python_sdk/infrahub_sdk/transfer/exceptions.py delete mode 100644 python_sdk/infrahub_sdk/transfer/exporter/__init__.py delete mode 100644 python_sdk/infrahub_sdk/transfer/exporter/interface.py delete mode 100644 python_sdk/infrahub_sdk/transfer/exporter/json.py delete mode 100644 python_sdk/infrahub_sdk/transfer/importer/__init__.py delete mode 100644 python_sdk/infrahub_sdk/transfer/importer/interface.py delete mode 100644 python_sdk/infrahub_sdk/transfer/importer/json.py delete mode 100644 python_sdk/infrahub_sdk/transfer/schema_sorter.py delete mode 100644 python_sdk/infrahub_sdk/transforms.py delete mode 100644 python_sdk/infrahub_sdk/types.py delete mode 100644 python_sdk/infrahub_sdk/utils.py delete mode 100644 python_sdk/infrahub_sdk/uuidt.py delete mode 100644 python_sdk/infrahub_sdk/yaml.py delete mode 100644 python_sdk/poetry.lock delete mode 100644 python_sdk/pyproject.toml delete mode 100644 python_sdk/tests/__init__.py delete mode 120000 python_sdk/tests/adapters delete mode 100644 python_sdk/tests/conftest.py delete mode 120000 python_sdk/tests/constants delete mode 100644 python_sdk/tests/fixtures/models/empty.json delete mode 100644 python_sdk/tests/fixtures/models/non_valid_json_01.json delete mode 100644 python_sdk/tests/fixtures/models/non_valid_model_01.json delete mode 100644 python_sdk/tests/fixtures/models/non_valid_namespace.json delete mode 100644 python_sdk/tests/fixtures/models/valid_model_01.json delete mode 100644 python_sdk/tests/fixtures/models/valid_schemas/contract.yml delete mode 100644 python_sdk/tests/fixtures/models/valid_schemas/rack.yml delete mode 100644 python_sdk/tests/fixtures/schema_01.json delete mode 100644 python_sdk/tests/fixtures/schema_02.json delete mode 100644 python_sdk/tests/fixtures/schema_03.json delete mode 100644 python_sdk/tests/fixtures/schema_04.json delete mode 100644 python_sdk/tests/fixtures/schema_ipam.json delete mode 120000 python_sdk/tests/helpers delete mode 100644 python_sdk/tests/integration/__init__.py delete mode 100644 python_sdk/tests/integration/conftest.py delete mode 100644 python_sdk/tests/integration/test_export_import.py delete mode 100644 python_sdk/tests/integration/test_infrahub_client.py delete mode 100644 python_sdk/tests/integration/test_infrahub_client_sync.py delete mode 100644 python_sdk/tests/integration/test_node.py delete mode 100644 python_sdk/tests/integration/test_object_store.py delete mode 100644 python_sdk/tests/integration/test_schema.py delete mode 100644 python_sdk/tests/unit/__init__.py delete mode 100644 python_sdk/tests/unit/ctl/__init__.py delete mode 100644 python_sdk/tests/unit/ctl/conftest.py delete mode 100644 python_sdk/tests/unit/ctl/test_branch_app.py delete mode 100644 python_sdk/tests/unit/ctl/test_cli.py delete mode 100644 python_sdk/tests/unit/ctl/test_schema_app.py delete mode 100644 python_sdk/tests/unit/ctl/test_validate_app.py delete mode 100644 python_sdk/tests/unit/pytest_plugin/__init__.py delete mode 100644 python_sdk/tests/unit/pytest_plugin/test_plugin.py delete mode 100644 python_sdk/tests/unit/sdk/__init__.py delete mode 100644 python_sdk/tests/unit/sdk/checks/__init__.py delete mode 100644 python_sdk/tests/unit/sdk/checks/conftest.py delete mode 100644 python_sdk/tests/unit/sdk/checks/test_checks.py delete mode 100644 python_sdk/tests/unit/sdk/conftest.py delete mode 100644 python_sdk/tests/unit/sdk/test_artifact.py delete mode 100644 python_sdk/tests/unit/sdk/test_batch.py delete mode 100644 python_sdk/tests/unit/sdk/test_branch.py delete mode 100644 python_sdk/tests/unit/sdk/test_client.py delete mode 100644 python_sdk/tests/unit/sdk/test_config.py delete mode 100644 python_sdk/tests/unit/sdk/test_graphql.py delete mode 100644 python_sdk/tests/unit/sdk/test_group_context.py delete mode 100644 python_sdk/tests/unit/sdk/test_node.py delete mode 100644 python_sdk/tests/unit/sdk/test_object_store.py delete mode 100644 python_sdk/tests/unit/sdk/test_query_analyzer.py delete mode 100644 python_sdk/tests/unit/sdk/test_schema.py delete mode 100644 python_sdk/tests/unit/sdk/test_schema_sorter.py delete mode 100644 python_sdk/tests/unit/sdk/test_store.py delete mode 100644 python_sdk/tests/unit/sdk/test_timestamp.py delete mode 100644 python_sdk/tests/unit/sdk/test_topological_sort.py delete mode 100644 python_sdk/tests/unit/sdk/test_utils.py delete mode 100644 python_sdk/tests/unit/sdk/test_uuidt.py delete mode 100644 python_sdk/tests/unit/test_package.py diff --git a/python_sdk/CHANGELOG.md b/python_sdk/CHANGELOG.md deleted file mode 100644 index 9c6acaa432..0000000000 --- a/python_sdk/CHANGELOG.md +++ /dev/null @@ -1,28 +0,0 @@ -# Infrahub SDK Changelog - -This is the changelog for the Infrahub SDK. -All notable changes to this project will be documented in this file. - -Issue tracking is located in [Github](https://github.com/opsmill/infrahub/issues). - -The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). - -This project uses [*towncrier*](https://towncrier.readthedocs.io/) and the changes for the upcoming release can be found in . - - - -## [0.13.0](https://github.com/opsmill/infrahub/tree/v0.13.0) - 2024-09-12 - -### Added - -- Add support to search a node by human friendly ID using client's `get` method ([#3908](https://github.com/opsmill/infrahub/issues/3908)) -- Add support for Number resource pool - -### Changed - -- Fix `infrahubctl` not displaying error message under certain conditions - -### Fixed - -- Fix fetching relationship attributes when relationship inherits from a generic ([#3900](https://github.com/opsmill/infrahub/issues/3900)) -- Fix the retrieving on schema and nodes on the right branch ([#4056](https://github.com/opsmill/infrahub/issues/4056)) diff --git a/python_sdk/LICENSE.txt b/python_sdk/LICENSE.txt deleted file mode 100644 index 06154b9e1f..0000000000 --- a/python_sdk/LICENSE.txt +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2024 OpsMill SAS - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. \ No newline at end of file diff --git a/python_sdk/README.md b/python_sdk/README.md deleted file mode 100644 index b78e55dd14..0000000000 --- a/python_sdk/README.md +++ /dev/null @@ -1,57 +0,0 @@ - -![Infrahub Logo](https://assets-global.website-files.com/657aff4a26dd8afbab24944b/657b0e0678f7fd35ce130776_Logo%20INFRAHUB.svg) - - -# Infrahub by OpsMill - -[Infrahub](https://github.com/opsmill/infrahub) by [OpsMill](https://opsmill.com) is taking a new approach to Infrastructure Management by providing a new generation of datastore to organize and control all the data that defines how an infrastructure should run. - -At its heart, Infrahub is built on 3 fundamental pillars: - -- **Powerful Schema**: that's easily extensible -- **Unified Version Control**: for data and files -- **Data Synchronization**: with traceability and ownership - -## Infrahub SDK - -The Infrahub Python SDK greatly simplifies how you can interact with Infrahub programmatically. - -More information can be found in the [Infrahub Python SDK Documentation](https://docs.infrahub.app/python-sdk/). - -## Installation - -The Infrahub SDK can be installed using the pip package installer. It is recommended to install the SDK into a virtual environment. - -```bash -python3 -m venv .venv -source .venv/bin/activate -pip install infrahub-sdk -``` - -### Installing optional extras - -Extras can be installed as part of the Python SDK and are not installed by default. - -#### ctl - -The ctl extra provides the `infrahubctl` command, which allows you to interact with an Infrahub instance. - -```bash -pip install 'infrahub-sdk[ctl]' -``` - -#### tests - -The tests extra provides all the components for the testing framework of Transforms, Queries and Checks. - -```bash -pip install 'infrahub-sdk[tests]' -``` - -#### all - -Installs infrahub-sdk together with all the extras. - -```bash -pip install 'infrahub-sdk[all]' -``` diff --git a/python_sdk/changelog/.gitignore b/python_sdk/changelog/.gitignore deleted file mode 100644 index f935021a8f..0000000000 --- a/python_sdk/changelog/.gitignore +++ /dev/null @@ -1 +0,0 @@ -!.gitignore diff --git a/python_sdk/examples/branch_create.py b/python_sdk/examples/branch_create.py deleted file mode 100644 index 0292f2dbb3..0000000000 --- a/python_sdk/examples/branch_create.py +++ /dev/null @@ -1,13 +0,0 @@ -from asyncio import run as aiorun - -from infrahub_sdk import InfrahubClient - - -async def main(): - client = InfrahubClient(address="http://localhost:8000") - await client.branch.create(branch_name="new-branch", description="description", sync_with_git=False) - print("New branch created") - - -if __name__ == "__main__": - aiorun(main()) diff --git a/python_sdk/examples/branch_create_sync.py b/python_sdk/examples/branch_create_sync.py deleted file mode 100644 index 4d1ce110b9..0000000000 --- a/python_sdk/examples/branch_create_sync.py +++ /dev/null @@ -1,11 +0,0 @@ -from infrahub_sdk import InfrahubClientSync - - -def main(): - client = InfrahubClientSync(address="http://localhost:8000") - client.branch.create(branch_name="new-branch2", description="description", sync_with_git=False) - print("New branch created") - - -if __name__ == "__main__": - main() diff --git a/python_sdk/examples/branch_list.py b/python_sdk/examples/branch_list.py deleted file mode 100644 index c9b097184e..0000000000 --- a/python_sdk/examples/branch_list.py +++ /dev/null @@ -1,15 +0,0 @@ -from asyncio import run as aiorun - -from rich import print as rprint - -from infrahub_sdk import InfrahubClient - - -async def main(): - client = InfrahubClient(address="http://localhost:8000") - branches = await client.branch.all() - rprint(branches) - - -if __name__ == "__main__": - aiorun(main()) diff --git a/python_sdk/examples/branch_list_sync.py b/python_sdk/examples/branch_list_sync.py deleted file mode 100644 index c4dce8a3d4..0000000000 --- a/python_sdk/examples/branch_list_sync.py +++ /dev/null @@ -1,13 +0,0 @@ -from rich import print as rprint - -from infrahub_sdk import InfrahubClientSync - - -def main(): - client = InfrahubClientSync(address="http://localhost:8000") - branches = client.branch.all() - rprint(branches) - - -if __name__ == "__main__": - main() diff --git a/python_sdk/examples/branch_merge.py b/python_sdk/examples/branch_merge.py deleted file mode 100644 index ac7432be9f..0000000000 --- a/python_sdk/examples/branch_merge.py +++ /dev/null @@ -1,12 +0,0 @@ -from asyncio import run as aiorun - -from infrahub_sdk import InfrahubClient - - -async def main(): - client = InfrahubClient(address="http://localhost:8000") - await client.branch.merge(branch_name="new-branch") - - -if __name__ == "__main__": - aiorun(main()) diff --git a/python_sdk/examples/branch_merge_sync.py b/python_sdk/examples/branch_merge_sync.py deleted file mode 100644 index f96efef4f2..0000000000 --- a/python_sdk/examples/branch_merge_sync.py +++ /dev/null @@ -1,10 +0,0 @@ -from infrahub_sdk import InfrahubClientSync - - -def main(): - client = InfrahubClientSync(address="http://localhost:8000") - client.branch.merge(branch_name="new-branch") - - -if __name__ == "__main__": - main() diff --git a/python_sdk/examples/branch_rebase.py b/python_sdk/examples/branch_rebase.py deleted file mode 100644 index ee4080f45e..0000000000 --- a/python_sdk/examples/branch_rebase.py +++ /dev/null @@ -1,12 +0,0 @@ -from asyncio import run as aiorun - -from infrahub_sdk import InfrahubClient - - -async def main(): - client = InfrahubClient(address="http://localhost:8000") - await client.branch.rebase(branch_name="new-branch") - - -if __name__ == "__main__": - aiorun(main()) diff --git a/python_sdk/examples/branch_rebase_sync.py b/python_sdk/examples/branch_rebase_sync.py deleted file mode 100644 index 8a9a716d37..0000000000 --- a/python_sdk/examples/branch_rebase_sync.py +++ /dev/null @@ -1,10 +0,0 @@ -from infrahub_sdk import InfrahubClientSync - - -def main(): - client = InfrahubClientSync(address="http://localhost:8000") - client.branch.rebase(branch_name="new-branch") - - -if __name__ == "__main__": - main() diff --git a/python_sdk/examples/example_create_sync.py b/python_sdk/examples/example_create_sync.py deleted file mode 100644 index 452f308780..0000000000 --- a/python_sdk/examples/example_create_sync.py +++ /dev/null @@ -1,18 +0,0 @@ -from infrahub_sdk import InfrahubClientSync - - -def main(): - client = InfrahubClientSync(address="http://localhost:8000") - data = { - "name": "janedoe", - "label": "Jane Doe", - "type": "User", - "password": "J4nesSecret!", - } - obj = client.create(kind="CoreAccount", data=data) - obj.save() - print(f"New user created with the Id {obj.id}") - - -if __name__ == "__main__": - main() diff --git a/python_sdk/examples/example_delete.py b/python_sdk/examples/example_delete.py deleted file mode 100644 index 5087d0eb00..0000000000 --- a/python_sdk/examples/example_delete.py +++ /dev/null @@ -1,13 +0,0 @@ -from asyncio import run as aiorun - -from infrahub_sdk import InfrahubClient - - -async def main(): - client = InfrahubClient(address="http://localhost:8000") - obj = await client.get(kind="CoreAccount", name__value="johndoe") - await obj.delete() - - -if __name__ == "__main__": - aiorun(main()) diff --git a/python_sdk/examples/example_update.py b/python_sdk/examples/example_update.py deleted file mode 100644 index b1f168d08b..0000000000 --- a/python_sdk/examples/example_update.py +++ /dev/null @@ -1,14 +0,0 @@ -from asyncio import run as aiorun - -from infrahub_sdk import InfrahubClient - - -async def main(): - client = InfrahubClient(address="http://localhost:8000") - obj = await client.get(kind="CoreAccount", name__value="admin") - obj.label.value = "Administrator" - await obj.save() - - -if __name__ == "__main__": - aiorun(main()) diff --git a/python_sdk/examples/node_create_data.py b/python_sdk/examples/node_create_data.py deleted file mode 100644 index 67668d870a..0000000000 --- a/python_sdk/examples/node_create_data.py +++ /dev/null @@ -1,20 +0,0 @@ -from asyncio import run as aiorun - -from infrahub_sdk import InfrahubClient - - -async def main(): - client = InfrahubClient(address="http://localhost:8000") - data = { - "name": "johndoe", - "label": "John Doe", - "type": "User", - "password": "J0esSecret!", - } - obj = await client.create(kind="CoreAccount", data=data) - await obj.save() - print(f"New user created with the Id {obj.id}") - - -if __name__ == "__main__": - aiorun(main()) diff --git a/python_sdk/examples/node_create_inline.py b/python_sdk/examples/node_create_inline.py deleted file mode 100644 index 309f01f234..0000000000 --- a/python_sdk/examples/node_create_inline.py +++ /dev/null @@ -1,20 +0,0 @@ -from asyncio import run as aiorun - -from infrahub_sdk import InfrahubClient - - -async def main(): - client = InfrahubClient(address="http://localhost:8000") - obj = await client.create( - kind="CoreAccount", - name="janedoe", - label="Jane Doe", - account_type="User", - password="J0esSecret!", - ) - await obj.save() - print(f"New user created with the Id {obj.id}") - - -if __name__ == "__main__": - aiorun(main()) diff --git a/python_sdk/examples/query_all.py b/python_sdk/examples/query_all.py deleted file mode 100644 index efe3a02d02..0000000000 --- a/python_sdk/examples/query_all.py +++ /dev/null @@ -1,15 +0,0 @@ -from asyncio import run as aiorun - -from rich import print as rprint - -from infrahub_sdk import InfrahubClient - - -async def main(): - client = InfrahubClient(address="http://localhost:8000") - accounts = await client.all(kind="CoreAccount") - rprint(accounts) - - -if __name__ == "__main__": - aiorun(main()) diff --git a/python_sdk/examples/query_filters.py b/python_sdk/examples/query_filters.py deleted file mode 100644 index f2eb0cf1b1..0000000000 --- a/python_sdk/examples/query_filters.py +++ /dev/null @@ -1,15 +0,0 @@ -from asyncio import run as aiorun - -from rich import print as rprint - -from infrahub_sdk import InfrahubClient - - -async def main(): - client = InfrahubClient(address="http://localhost:8000") - accounts = await client.filters(kind="CoreAccount") - rprint(accounts) - - -if __name__ == "__main__": - aiorun(main()) diff --git a/python_sdk/infrahub_sdk/__init__.py b/python_sdk/infrahub_sdk/__init__.py deleted file mode 100644 index 4d44fdd2cf..0000000000 --- a/python_sdk/infrahub_sdk/__init__.py +++ /dev/null @@ -1,79 +0,0 @@ -from __future__ import annotations - -import importlib.metadata - -from infrahub_sdk.analyzer import GraphQLOperation, GraphQLQueryAnalyzer, GraphQLQueryVariable -from infrahub_sdk.batch import InfrahubBatch -from infrahub_sdk.branch import InfrahubBranchManager, InfrahubBranchManagerSync -from infrahub_sdk.client import InfrahubClient, InfrahubClientSync -from infrahub_sdk.config import Config -from infrahub_sdk.exceptions import ( - AuthenticationError, - Error, - FilterNotFoundError, - GraphQLError, - NodeNotFoundError, - ServerNotReachableError, - ServerNotResponsiveError, - ValidationError, -) -from infrahub_sdk.graphql import Mutation, Query -from infrahub_sdk.node import InfrahubNode, InfrahubNodeSync -from infrahub_sdk.schema import ( - AttributeSchema, - GenericSchema, - InfrahubRepositoryConfig, - InfrahubSchema, - MainSchemaTypes, - NodeSchema, - ProfileSchema, - RelationshipCardinality, - RelationshipKind, - RelationshipSchema, - SchemaRoot, -) -from infrahub_sdk.store import NodeStore, NodeStoreSync -from infrahub_sdk.timestamp import Timestamp -from infrahub_sdk.uuidt import UUIDT, generate_uuid - -__all__ = [ - "AttributeSchema", - "AuthenticationError", - "Config", - "Error", - "InfrahubBatch", - "InfrahubBranchManager", - "InfrahubBranchManagerSync", - "InfrahubClient", - "InfrahubClientSync", - "InfrahubNode", - "InfrahubNodeSync", - "InfrahubRepositoryConfig", - "InfrahubSchema", - "FilterNotFoundError", - "generate_uuid", - "GenericSchema", - "GraphQLQueryAnalyzer", - "GraphQLQueryVariable", - "GraphQLError", - "GraphQLOperation", - "MainSchemaTypes", - "NodeNotFoundError", - "NodeSchema", - "Mutation", - "NodeStore", - "NodeStoreSync", - "ProfileSchema", - "Query", - "RelationshipSchema", - "RelationshipCardinality", - "RelationshipKind", - "SchemaRoot", - "ServerNotReachableError", - "ServerNotResponsiveError", - "Timestamp", - "UUIDT", - "ValidationError", -] - -__version__ = importlib.metadata.version("infrahub-sdk") diff --git a/python_sdk/infrahub_sdk/_importer.py b/python_sdk/infrahub_sdk/_importer.py deleted file mode 100644 index e4305b4999..0000000000 --- a/python_sdk/infrahub_sdk/_importer.py +++ /dev/null @@ -1,34 +0,0 @@ -from __future__ import annotations - -import importlib -import sys -from typing import TYPE_CHECKING, Optional - -from infrahub_sdk.exceptions import ModuleImportError - -if TYPE_CHECKING: - from pathlib import Path - from types import ModuleType - - -def import_module( - module_path: Path, import_root: Optional[str] = None, relative_path: Optional[str] = None -) -> ModuleType: - import_root = import_root or str(module_path.parent) - - if import_root not in sys.path: - sys.path.append(import_root) - - module_name = module_path.stem - - if relative_path: - module_name = relative_path.replace("/", ".") + f".{module_name}" - - try: - module = importlib.import_module(module_name) - except ModuleNotFoundError as exc: - raise ModuleImportError(message=f"{str(exc)} ({module_path})") from exc - except SyntaxError as exc: - raise ModuleImportError(message=str(exc)) from exc - - return module diff --git a/python_sdk/infrahub_sdk/analyzer.py b/python_sdk/infrahub_sdk/analyzer.py deleted file mode 100644 index defcfff2ac..0000000000 --- a/python_sdk/infrahub_sdk/analyzer.py +++ /dev/null @@ -1,119 +0,0 @@ -from typing import Any, Optional - -from graphql import ( - DocumentNode, - FieldNode, - GraphQLError, - GraphQLSchema, - OperationDefinitionNode, - OperationType, - parse, - validate, -) -from pydantic import BaseModel - -from infrahub_sdk.utils import calculate_dict_depth, calculate_dict_height, extract_fields - - -class GraphQLQueryVariable(BaseModel): - name: str - type: str - required: bool = False - default_value: Optional[Any] = None - - -class GraphQLOperation(BaseModel): - name: Optional[str] = None - operation_type: OperationType - - -class GraphQLQueryAnalyzer: - def __init__(self, query: str, schema: Optional[GraphQLSchema] = None): - self.query: str = query - self.schema: Optional[GraphQLSchema] = schema - self.document: DocumentNode = parse(self.query) - self._fields: Optional[dict] = None - - @property - def is_valid(self) -> tuple[bool, Optional[list[GraphQLError]]]: - if self.schema is None: - return False, [GraphQLError("Schema is not provided")] - - errors = validate(schema=self.schema, document_ast=self.document) - if errors: - return False, errors - - return True, None - - @property - def nbr_queries(self) -> int: - return len(self.document.definitions) - - @property - def operations(self) -> list[GraphQLOperation]: - operations = [] - for definition in self.document.definitions: - if not isinstance(definition, OperationDefinitionNode): - continue - operation_type = definition.operation - for field_node in definition.selection_set.selections: - if not isinstance(field_node, FieldNode): - continue - operations.append(GraphQLOperation(operation_type=operation_type, name=field_node.name.value)) - return operations - - @property - def contains_mutation(self) -> bool: - return any(op.operation_type == OperationType.MUTATION for op in self.operations) - - @property - def variables(self) -> list[GraphQLQueryVariable]: - response = [] - for definition in self.document.definitions: - variable_definitions = getattr(definition, "variable_definitions", None) - if not variable_definitions: - continue - for variable in variable_definitions: - data = {"name": variable.variable.name.value} - variable_type = variable.type - non_null = variable_type.kind == "non_null_type" - - # This should not iterate a lot but it allows to inspect non-nullable iterables - while hasattr(variable_type, "type"): - variable_type = variable_type.type - data["type"] = variable_type.name.value - - if variable.default_value: - if data["type"] == "Int": - data["default_value"] = int(variable.default_value.value) - else: - data["default_value"] = variable.default_value.value - - if not data.get("default_value", None) and non_null: - data["required"] = True - - response.append(GraphQLQueryVariable(**data)) - - return response - - async def calculate_depth(self) -> int: - """Number of nested levels in the query""" - fields = await self.get_fields() - return calculate_dict_depth(data=fields) - - async def calculate_height(self) -> int: - """Total number of fields requested in the query""" - fields = await self.get_fields() - return calculate_dict_height(data=fields) - - async def get_fields(self) -> dict[str, Any]: - if not self._fields: - fields = {} - for definition in self.document.definitions: - if not isinstance(definition, OperationDefinitionNode): - continue - fields_to_update = await extract_fields(definition.selection_set) - if fields_to_update is not None: - fields.update(fields_to_update) - self._fields = fields - return self._fields diff --git a/python_sdk/infrahub_sdk/async_typer.py b/python_sdk/infrahub_sdk/async_typer.py deleted file mode 100644 index 39014a32f1..0000000000 --- a/python_sdk/infrahub_sdk/async_typer.py +++ /dev/null @@ -1,31 +0,0 @@ -from __future__ import annotations - -import asyncio -import inspect -from functools import partial, wraps -from typing import Any, Callable - -from typer import Typer - - -class AsyncTyper(Typer): - @staticmethod - def maybe_run_async(decorator: Callable, func: Callable) -> Any: - if inspect.iscoroutinefunction(func): - - @wraps(func) - def runner(*args: Any, **kwargs: Any) -> Any: - return asyncio.run(func(*args, **kwargs)) - - decorator(runner) - else: - decorator(func) - return func - - def callback(self, *args: Any, **kwargs: Any) -> Any: - decorator = super().callback(*args, **kwargs) - return partial(self.maybe_run_async, decorator) - - def command(self, *args: Any, **kwargs: Any) -> Any: - decorator = super().command(*args, **kwargs) - return partial(self.maybe_run_async, decorator) diff --git a/python_sdk/infrahub_sdk/batch.py b/python_sdk/infrahub_sdk/batch.py deleted file mode 100644 index 81348888ad..0000000000 --- a/python_sdk/infrahub_sdk/batch.py +++ /dev/null @@ -1,67 +0,0 @@ -import asyncio -from dataclasses import dataclass -from typing import Any, AsyncGenerator, Awaitable, Callable, Optional - -from infrahub_sdk.node import InfrahubNode - - -@dataclass -class BatchTask: - task: Callable[[Any], Awaitable[Any]] - args: tuple[Any, ...] - kwargs: dict[str, Any] - node: Optional[InfrahubNode] = None - - -async def execute_batch_task_in_pool( - task: BatchTask, semaphore: asyncio.Semaphore, return_exceptions: bool = False -) -> tuple[Optional[InfrahubNode], Any]: - async with semaphore: - try: - result = await task.task(*task.args, **task.kwargs) - - except Exception as exc: # pylint: disable=broad-exception-caught - if return_exceptions: - return (task.node, exc) - raise exc - - return (task.node, result) - - -class InfrahubBatch: - def __init__( - self, - semaphore: Optional[asyncio.Semaphore] = None, - max_concurrent_execution: int = 5, - return_exceptions: bool = False, - ): - self._tasks: list[BatchTask] = [] - self.semaphore = semaphore or asyncio.Semaphore(value=max_concurrent_execution) - self.return_exceptions = return_exceptions - - @property - def num_tasks(self) -> int: - return len(self._tasks) - - def add( - self, *args: Any, task: Callable[[Any], Awaitable[Any]], node: Optional[InfrahubNode] = None, **kwargs: Any - ) -> None: - self._tasks.append(BatchTask(task=task, node=node, args=args, kwargs=kwargs)) - - async def execute(self) -> AsyncGenerator: - tasks = [] - - for batch_task in self._tasks: - tasks.append( - asyncio.create_task( - execute_batch_task_in_pool( - task=batch_task, semaphore=self.semaphore, return_exceptions=self.return_exceptions - ) - ) - ) - - for completed_task in asyncio.as_completed(tasks): - node, result = await completed_task - if isinstance(result, Exception) and not self.return_exceptions: - raise result - yield node, result diff --git a/python_sdk/infrahub_sdk/branch.py b/python_sdk/infrahub_sdk/branch.py deleted file mode 100644 index 131c501139..0000000000 --- a/python_sdk/infrahub_sdk/branch.py +++ /dev/null @@ -1,297 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING, Any, Optional, Union - -from pydantic import BaseModel - -from infrahub_sdk.exceptions import BranchNotFoundError -from infrahub_sdk.graphql import Mutation, Query -from infrahub_sdk.utils import decode_json - -if TYPE_CHECKING: - from infrahub_sdk.client import InfrahubClient, InfrahubClientSync - - -class BranchData(BaseModel): - id: str - name: str - description: Optional[str] = None - sync_with_git: bool - is_default: bool - has_schema_changes: bool - origin_branch: Optional[str] = None - branched_from: str - - -BRANCH_DATA = { - "id": None, - "name": None, - "description": None, - "origin_branch": None, - "branched_from": None, - "is_default": None, - "sync_with_git": None, - "has_schema_changes": None, -} - -BRANCH_DATA_FILTER = {"@filters": {"name": "$branch_name"}} - - -MUTATION_QUERY_DATA = {"ok": None, "object": BRANCH_DATA} - -QUERY_ALL_BRANCHES_DATA = {"Branch": BRANCH_DATA} - -QUERY_ONE_BRANCH_DATA = {"Branch": {**BRANCH_DATA, **BRANCH_DATA_FILTER}} - - -class InfraHubBranchManagerBase: - @classmethod - def generate_diff_data_url( - cls, - client: Union[InfrahubClient, InfrahubClientSync], - branch_name: str, - branch_only: bool = True, - time_from: Optional[str] = None, - time_to: Optional[str] = None, - ) -> str: - """Generate the URL for the diff_data function.""" - url = f"{client.address}/api/diff/data?branch={branch_name}" - url += f"&branch_only={str(branch_only).lower()}" - if time_from: - url += f"&time_from={time_from}" - if time_to: - url += f"&time_to={time_to}" - - return url - - -class InfrahubBranchManager(InfraHubBranchManagerBase): - def __init__(self, client: InfrahubClient): - self.client = client - - async def create( - self, - branch_name: str, - sync_with_git: bool = True, - description: str = "", - background_execution: bool = False, - ) -> BranchData: - input_data = { - "background_execution": background_execution, - "data": { - "name": branch_name, - "description": description, - "sync_with_git": sync_with_git, - }, - } - - query = Mutation(mutation="BranchCreate", input_data=input_data, query=MUTATION_QUERY_DATA) - response = await self.client.execute_graphql(query=query.render(), tracker="mutation-branch-create") - - return BranchData(**response["BranchCreate"]["object"]) - - async def delete(self, branch_name: str) -> bool: - input_data = { - "data": { - "name": branch_name, - } - } - query = Mutation(mutation="BranchDelete", input_data=input_data, query={"ok": None}) - response = await self.client.execute_graphql(query=query.render(), tracker="mutation-branch-delete") - return response["BranchDelete"]["ok"] - - async def rebase(self, branch_name: str) -> BranchData: - input_data = { - "data": { - "name": branch_name, - } - } - query = Mutation(mutation="BranchRebase", input_data=input_data, query=MUTATION_QUERY_DATA) - response = await self.client.execute_graphql(query=query.render(), tracker="mutation-branch-rebase") - return response["BranchRebase"]["ok"] - - async def validate(self, branch_name: str) -> BranchData: - input_data = { - "data": { - "name": branch_name, - } - } - - query_data = { - "ok": None, - "messages": None, - "object": { - "id": None, - "name": None, - }, - } - - query = Mutation(mutation="BranchValidate", input_data=input_data, query=query_data) - response = await self.client.execute_graphql(query=query.render(), tracker="mutation-branch-validate") - - return response["BranchValidate"]["ok"] - - async def merge(self, branch_name: str) -> bool: - input_data = { - "data": { - "name": branch_name, - } - } - query = Mutation(mutation="BranchMerge", input_data=input_data, query=MUTATION_QUERY_DATA) - response = await self.client.execute_graphql( - query=query.render(), tracker="mutation-branch-merge", timeout=max(120, self.client.default_timeout) - ) - - return response["BranchMerge"]["ok"] - - async def all(self) -> dict[str, BranchData]: - query = Query(name="GetAllBranch", query=QUERY_ALL_BRANCHES_DATA) - data = await self.client.execute_graphql(query=query.render(), tracker="query-branch-all") - - branches = {branch["name"]: BranchData(**branch) for branch in data["Branch"]} - - return branches - - async def get(self, branch_name: str) -> BranchData: - query = Query(name="GetBranch", query=QUERY_ONE_BRANCH_DATA, variables={"branch_name": str}) - data = await self.client.execute_graphql( - query=query.render(), - variables={"branch_name": branch_name}, - tracker="query-branch", - ) - - if not data["Branch"]: - raise BranchNotFoundError(identifier=branch_name) - return BranchData(**data["Branch"][0]) - - async def diff_data( - self, - branch_name: str, - branch_only: bool = True, - time_from: Optional[str] = None, - time_to: Optional[str] = None, - ) -> dict[Any, Any]: - url = self.generate_diff_data_url( - client=self.client, - branch_name=branch_name, - branch_only=branch_only, - time_from=time_from, - time_to=time_to, - ) - response = await self.client._get(url=url, headers=self.client.headers) - return decode_json(response=response) - - -class InfrahubBranchManagerSync(InfraHubBranchManagerBase): - def __init__(self, client: InfrahubClientSync): - self.client = client - - def all(self) -> dict[str, BranchData]: - query = Query(name="GetAllBranch", query=QUERY_ALL_BRANCHES_DATA) - data = self.client.execute_graphql(query=query.render(), tracker="query-branch-all") - - branches = {branch["name"]: BranchData(**branch) for branch in data["Branch"]} - - return branches - - def get(self, branch_name: str) -> BranchData: - query = Query(name="GetBranch", query=QUERY_ONE_BRANCH_DATA, variables={"branch_name": str}) - data = self.client.execute_graphql( - query=query.render(), - variables={"branch_name": branch_name}, - tracker="query-branch", - ) - - if not data["Branch"]: - raise BranchNotFoundError(identifier=branch_name) - return BranchData(**data["Branch"][0]) - - def create( - self, - branch_name: str, - sync_with_git: bool = True, - description: str = "", - background_execution: bool = False, - ) -> BranchData: - input_data = { - "background_execution": background_execution, - "data": { - "name": branch_name, - "description": description, - "sync_with_git": sync_with_git, - }, - } - - query = Mutation(mutation="BranchCreate", input_data=input_data, query=MUTATION_QUERY_DATA) - response = self.client.execute_graphql(query=query.render(), tracker="mutation-branch-create") - - return BranchData(**response["BranchCreate"]["object"]) - - def delete(self, branch_name: str) -> bool: - input_data = { - "data": { - "name": branch_name, - } - } - query = Mutation(mutation="BranchDelete", input_data=input_data, query={"ok": None}) - response = self.client.execute_graphql(query=query.render(), tracker="mutation-branch-delete") - return response["BranchDelete"]["ok"] - - def diff_data( - self, - branch_name: str, - branch_only: bool = True, - time_from: Optional[str] = None, - time_to: Optional[str] = None, - ) -> dict[Any, Any]: - url = self.generate_diff_data_url( - client=self.client, - branch_name=branch_name, - branch_only=branch_only, - time_from=time_from, - time_to=time_to, - ) - response = self.client._get(url=url, headers=self.client.headers) - return decode_json(response=response) - - def merge(self, branch_name: str) -> bool: - input_data = { - "data": { - "name": branch_name, - } - } - query = Mutation(mutation="BranchMerge", input_data=input_data, query=MUTATION_QUERY_DATA) - response = self.client.execute_graphql(query=query.render(), tracker="mutation-branch-merge") - - return response["BranchMerge"]["ok"] - - def rebase(self, branch_name: str) -> BranchData: - input_data = { - "data": { - "name": branch_name, - } - } - query = Mutation(mutation="BranchRebase", input_data=input_data, query=MUTATION_QUERY_DATA) - response = self.client.execute_graphql(query=query.render(), tracker="mutation-branch-rebase") - return response["BranchRebase"]["ok"] - - def validate(self, branch_name: str) -> BranchData: - input_data = { - "data": { - "name": branch_name, - } - } - - query_data = { - "ok": None, - "messages": None, - "object": { - "id": None, - "name": None, - }, - } - - query = Mutation(mutation="BranchValidate", input_data=input_data, query=query_data) - response = self.client.execute_graphql(query=query.render(), tracker="mutation-branch-validate") - - return response["BranchValidate"]["ok"] diff --git a/python_sdk/infrahub_sdk/checks.py b/python_sdk/infrahub_sdk/checks.py deleted file mode 100644 index 4d869c1c95..0000000000 --- a/python_sdk/infrahub_sdk/checks.py +++ /dev/null @@ -1,181 +0,0 @@ -from __future__ import annotations - -import asyncio -import importlib -import os -from abc import abstractmethod -from typing import TYPE_CHECKING, Any, Optional - -import ujson -from git.repo import Repo -from pydantic import BaseModel, Field - -from infrahub_sdk import InfrahubClient -from infrahub_sdk.exceptions import InfrahubCheckNotFoundError - -if TYPE_CHECKING: - from pathlib import Path - - from infrahub_sdk.schema import InfrahubCheckDefinitionConfig - -INFRAHUB_CHECK_VARIABLE_TO_IMPORT = "INFRAHUB_CHECKS" - - -class InfrahubCheckInitializer(BaseModel): - """Information about the originator of the check.""" - - proposed_change_id: str = Field( - default="", description="If available the ID of the proposed change that requested the check" - ) - - -class InfrahubCheck: - name: Optional[str] = None - query: str = "" - timeout: int = 10 - - def __init__( - self, - branch: Optional[str] = None, - root_directory: str = "", - output: Optional[str] = None, - initializer: Optional[InfrahubCheckInitializer] = None, - params: Optional[dict] = None, - ): - self.git: Optional[Repo] = None - self.initializer = initializer or InfrahubCheckInitializer() - - self.logs: list[dict[str, Any]] = [] - self.passed = False - - self.output = output - - self.branch = branch - self.params = params or {} - - self.root_directory = root_directory or os.getcwd() - - self.client: InfrahubClient - - if not self.name: - self.name = self.__class__.__name__ - - if not self.query: - raise ValueError("A query must be provided") - - def __str__(self) -> str: - return self.__class__.__name__ - - @classmethod - async def init(cls, client: Optional[InfrahubClient] = None, *args: Any, **kwargs: Any) -> InfrahubCheck: - """Async init method, If an existing InfrahubClient client hasn't been provided, one will be created automatically.""" - - instance = cls(*args, **kwargs) - instance.client = client or InfrahubClient() - - return instance - - @property - def errors(self) -> list[dict[str, Any]]: - return [log for log in self.logs if log["level"] == "ERROR"] - - def _write_log_entry( - self, message: str, level: str, object_id: Optional[str] = None, object_type: Optional[str] = None - ) -> None: - log_message = {"level": level, "message": message, "branch": self.branch_name} - if object_id: - log_message["object_id"] = object_id - if object_type: - log_message["object_type"] = object_type - self.logs.append(log_message) - - if self.output == "stdout": - print(ujson.dumps(log_message)) - - def log_error(self, message: str, object_id: Optional[str] = None, object_type: Optional[str] = None) -> None: - self._write_log_entry(message=message, level="ERROR", object_id=object_id, object_type=object_type) - - def log_info(self, message: str, object_id: Optional[str] = None, object_type: Optional[str] = None) -> None: - self._write_log_entry(message=message, level="INFO", object_id=object_id, object_type=object_type) - - @property - def log_entries(self) -> str: - output = "" - for log in self.logs: - output += "-----------------------\n" - output += f"Message: {log['message']}\n" - output += f"Level: {log['level']}\n" - if "object_id" in log: - output += f"Object ID: {log['object_id']}\n" - if "object_type" in log: - output += f"Object ID: {log['object_type']}\n" - return output - - @property - def branch_name(self) -> str: - """Return the name of the current git branch.""" - - if self.branch: - return self.branch - - if not self.git: - self.git = Repo(self.root_directory) - - self.branch = str(self.git.active_branch) - - return self.branch - - @abstractmethod - def validate(self, data: dict) -> None: - """Code to validate the status of this check.""" - - async def collect_data(self) -> dict: - """Query the result of the GraphQL Query defined in self.query and return the result""" - - return await self.client.query_gql_query(name=self.query, branch_name=self.branch_name, variables=self.params) - - async def run(self, data: Optional[dict] = None) -> bool: - """Execute the check after collecting the data from the GraphQL query. - The result of the check is determined based on the presence or not of ERROR log messages.""" - - if not data: - data = await self.collect_data() - unpacked = data.get("data") or data - - if asyncio.iscoroutinefunction(self.validate): - await self.validate(data=unpacked) - else: - self.validate(data=unpacked) - - nbr_errors = len([log for log in self.logs if log["level"] == "ERROR"]) - - self.passed = bool(nbr_errors == 0) - - if self.passed: - self.log_info("Check succesfully completed") - - return self.passed - - -def get_check_class_instance( - check_config: InfrahubCheckDefinitionConfig, search_path: Optional[Path] = None -) -> InfrahubCheck: - if check_config.file_path.is_absolute() or search_path is None: - search_location = check_config.file_path - else: - search_location = search_path / check_config.file_path - - try: - spec = importlib.util.spec_from_file_location(check_config.class_name, search_location) - module = importlib.util.module_from_spec(spec) # type: ignore[arg-type] - spec.loader.exec_module(module) # type: ignore[union-attr] - - # Get the specified class from the module - check_class = getattr(module, check_config.class_name) - - # Create an instance of the class - check_instance = check_class() - except (FileNotFoundError, AttributeError) as exc: - raise InfrahubCheckNotFoundError(name=check_config.name) from exc - - return check_instance diff --git a/python_sdk/infrahub_sdk/client.py b/python_sdk/infrahub_sdk/client.py deleted file mode 100644 index 8833f04bbd..0000000000 --- a/python_sdk/infrahub_sdk/client.py +++ /dev/null @@ -1,1743 +0,0 @@ -from __future__ import annotations - -import asyncio -import copy -import logging -import warnings -from functools import wraps -from time import sleep -from typing import TYPE_CHECKING, Any, Callable, Coroutine, MutableMapping, Optional, TypedDict, Union - -import httpx -import ujson -from typing_extensions import NotRequired, Self -from typing_extensions import TypedDict as ExtensionTypedDict - -from infrahub_sdk.batch import InfrahubBatch -from infrahub_sdk.branch import ( - BranchData, - InfrahubBranchManager, - InfrahubBranchManagerSync, -) -from infrahub_sdk.config import Config -from infrahub_sdk.constants import InfrahubClientMode -from infrahub_sdk.data import RepositoryBranchInfo, RepositoryData -from infrahub_sdk.exceptions import ( - AuthenticationError, - Error, - GraphQLError, - NodeNotFoundError, - ServerNotReachableError, - ServerNotResponsiveError, -) -from infrahub_sdk.graphql import Mutation, Query -from infrahub_sdk.node import ( - InfrahubNode, - InfrahubNodeSync, -) -from infrahub_sdk.object_store import ObjectStore, ObjectStoreSync -from infrahub_sdk.queries import get_commit_update_mutation -from infrahub_sdk.query_groups import InfrahubGroupContext, InfrahubGroupContextSync -from infrahub_sdk.schema import InfrahubSchema, InfrahubSchemaSync, NodeSchema -from infrahub_sdk.store import NodeStore, NodeStoreSync -from infrahub_sdk.timestamp import Timestamp -from infrahub_sdk.types import AsyncRequester, HTTPMethod, SyncRequester -from infrahub_sdk.utils import decode_json, is_valid_uuid - -if TYPE_CHECKING: - from types import TracebackType - -# pylint: disable=redefined-builtin disable=too-many-lines - - -class NodeDiff(ExtensionTypedDict): - branch: str - kind: str - id: str - action: str - display_label: str - elements: list[NodeDiffElement] - - -class NodeDiffElement(ExtensionTypedDict): - name: str - element_type: str - action: str - summary: NodeDiffSummary - peers: NotRequired[list[NodeDiffPeer]] - - -class NodeDiffSummary(ExtensionTypedDict): - added: int - updated: int - removed: int - - -class NodeDiffPeer(ExtensionTypedDict): - action: str - summary: NodeDiffSummary - - -class ProcessRelationsNode(TypedDict): - nodes: list[InfrahubNode] - related_nodes: list[InfrahubNode] - - -class ProcessRelationsNodeSync(TypedDict): - nodes: list[InfrahubNodeSync] - related_nodes: list[InfrahubNodeSync] - - -def handle_relogin(func: Callable[..., Coroutine[Any, Any, httpx.Response]]): # type: ignore[no-untyped-def] - @wraps(func) - async def wrapper(client: InfrahubClient, *args: Any, **kwargs: Any) -> httpx.Response: - response = await func(client, *args, **kwargs) - if response.status_code == 401: - errors = response.json().get("errors", []) - if "Expired Signature" in [error.get("message") for error in errors]: - await client.login(refresh=True) - return await func(client, *args, **kwargs) - return response - - return wrapper - - -def handle_relogin_sync(func: Callable[..., httpx.Response]): # type: ignore[no-untyped-def] - @wraps(func) - def wrapper(client: InfrahubClientSync, *args: Any, **kwargs: Any) -> httpx.Response: - response = func(client, *args, **kwargs) - if response.status_code == 401: - errors = response.json().get("errors", []) - if "Expired Signature" in [error.get("message") for error in errors]: - client.login(refresh=True) - return func(client, *args, **kwargs) - return response - - return wrapper - - -class BaseClient: - """Base class for InfrahubClient and InfrahubClientSync""" - - def __init__( - self, - address: str = "", - config: Optional[Union[Config, dict[str, Any]]] = None, - ): - self.client = None - self.headers = {"content-type": "application/json"} - self.access_token: str = "" - self.refresh_token: str = "" - if isinstance(config, Config): - self.config = config - else: - config = config or {} - self.config = Config(**config) - - self.default_branch = self.config.default_infrahub_branch - self.default_timeout = self.config.timeout - self.config.address = address or self.config.address - self.insert_tracker = self.config.insert_tracker - self.log = self.config.logger or logging.getLogger("infrahub_sdk") - self.address = self.config.address - self.mode = self.config.mode - self.pagination_size = self.config.pagination_size - self.retry_delay = self.config.retry_delay - self.retry_on_failure = self.config.retry_on_failure - - if self.config.api_token: - self.headers["X-INFRAHUB-KEY"] = self.config.api_token - - self.max_concurrent_execution = self.config.max_concurrent_execution - - self.update_group_context = self.config.update_group_context - self.identifier = self.config.identifier - self.group_context: Union[InfrahubGroupContext, InfrahubGroupContextSync] - self._initialize() - - def _initialize(self) -> None: - """Sets the properties for each version of the client""" - - def _record(self, response: httpx.Response) -> None: - self.config.custom_recorder.record(response) - - def _echo(self, url: str, query: str, variables: Optional[dict] = None) -> None: - if self.config.echo_graphql_queries: - print(f"URL: {url}") - print(f"QUERY:\n{query}") - if variables: - print(f"VARIABLES:\n{ujson.dumps(variables, indent=4)}\n") - - def start_tracking( - self, - identifier: Optional[str] = None, - params: Optional[dict[str, Any]] = None, - delete_unused_nodes: bool = False, - group_type: Optional[str] = None, - ) -> Self: - self.mode = InfrahubClientMode.TRACKING - identifier = identifier or self.identifier or "python-sdk" - self.set_context_properties( - identifier=identifier, params=params, delete_unused_nodes=delete_unused_nodes, group_type=group_type - ) - return self - - def set_context_properties( - self, - identifier: str, - params: Optional[dict[str, str]] = None, - delete_unused_nodes: bool = True, - reset: bool = True, - group_type: Optional[str] = None, - ) -> None: - if reset: - if isinstance(self, InfrahubClient): - self.group_context = InfrahubGroupContext(self) - elif isinstance(self, InfrahubClientSync): - self.group_context = InfrahubGroupContextSync(self) - self.group_context.set_properties( - identifier=identifier, params=params, delete_unused_nodes=delete_unused_nodes, group_type=group_type - ) - - def _graphql_url( - self, - branch_name: Optional[str] = None, - at: Optional[Union[str, Timestamp]] = None, - ) -> str: - url = f"{self.config.address}/graphql" - if branch_name: - url += f"/{branch_name}" - - url_params = {} - if at: - at = Timestamp(at) - url_params["at"] = at.to_string() - url += "?" + "&".join([f"{key}={value}" for key, value in url_params.items()]) - - return url - - def _build_ip_address_allocation_query( - self, - resource_pool_id: str, - identifier: Optional[str] = None, - prefix_length: Optional[int] = None, - address_type: Optional[str] = None, - data: Optional[dict[str, Any]] = None, - ) -> Mutation: - input_data: dict[str, Any] = {"id": resource_pool_id} - - if identifier: - input_data["identifier"] = identifier - if prefix_length: - input_data["prefix_length"] = prefix_length - if address_type: - input_data["prefix_type"] = address_type - if data: - input_data["data"] = data - - return Mutation( - name="AllocateIPAddress", - mutation="IPAddressPoolGetResource", - query={"ok": None, "node": {"id": None, "kind": None, "identifier": None, "display_label": None}}, - input_data={"data": input_data}, - ) - - def _build_ip_prefix_allocation_query( - self, - resource_pool_id: str, - identifier: Optional[str] = None, - prefix_length: Optional[int] = None, - member_type: Optional[str] = None, - prefix_type: Optional[str] = None, - data: Optional[dict[str, Any]] = None, - ) -> Mutation: - input_data: dict[str, Any] = {"id": resource_pool_id} - - if identifier: - input_data["identifier"] = identifier - if prefix_length: - input_data["prefix_length"] = prefix_length - if member_type: - if member_type not in ("prefix", "address"): - raise ValueError("member_type possible values are 'prefix' or 'address'") - input_data["member_type"] = member_type - if prefix_type: - input_data["prefix_type"] = prefix_type - if data: - input_data["data"] = data - - return Mutation( - name="AllocateIPPrefix", - mutation="IPPrefixPoolGetResource", - query={"ok": None, "node": {"id": None, "kind": None, "identifier": None, "display_label": None}}, - input_data={"data": input_data}, - ) - - -class InfrahubClient(BaseClient): - """GraphQL Client to interact with Infrahub.""" - - group_context: InfrahubGroupContext - - def _initialize(self) -> None: - self.schema = InfrahubSchema(self) - self.branch = InfrahubBranchManager(self) - self.object_store = ObjectStore(self) - self.store = NodeStore() - self.concurrent_execution_limit = asyncio.Semaphore(self.max_concurrent_execution) - self._request_method: AsyncRequester = self.config.requester or self._default_request_method - self.group_context = InfrahubGroupContext(self) - - @classmethod - async def init( - cls, - address: str = "", - config: Optional[Union[Config, dict[str, Any]]] = None, - ) -> InfrahubClient: - warnings.warn( - "InfrahubClient.init has been deprecated and will be removed in Infrahub SDK 0.14.0 or the next major version", - DeprecationWarning, - stacklevel=1, - ) - return cls(address=address, config=config) - - async def create( - self, - kind: str, - data: Optional[dict] = None, - branch: Optional[str] = None, - **kwargs: Any, - ) -> InfrahubNode: - branch = branch or self.default_branch - schema = await self.schema.get(kind=kind, branch=branch) - - if not data and not kwargs: - raise ValueError("Either data or a list of keywords but be provided") - - return InfrahubNode(client=self, schema=schema, branch=branch, data=data or kwargs) - - async def delete(self, kind: str, id: str, branch: Optional[str] = None) -> None: - branch = branch or self.default_branch - schema = await self.schema.get(kind=kind, branch=branch) - - node = InfrahubNode(client=self, schema=schema, branch=branch, data={"id": id}) - await node.delete() - - async def get( - self, - kind: str, - at: Optional[Timestamp] = None, - branch: Optional[str] = None, - id: Optional[str] = None, - hfid: Optional[list[str]] = None, - include: Optional[list[str]] = None, - exclude: Optional[list[str]] = None, - populate_store: bool = False, - fragment: bool = False, - prefetch_relationships: bool = False, - **kwargs: Any, - ) -> InfrahubNode: - branch = branch or self.default_branch - schema = await self.schema.get(kind=kind, branch=branch) - - filters: MutableMapping[str, Any] = {} - - if id: - if not is_valid_uuid(id) and isinstance(schema, NodeSchema) and schema.default_filter: - filters[schema.default_filter] = id - else: - filters["ids"] = [id] - elif hfid: - if isinstance(schema, NodeSchema) and schema.human_friendly_id: - filters["hfid"] = hfid - else: - raise ValueError("Cannot filter by HFID if the node doesn't have an HFID defined") - elif kwargs: - filters = kwargs - else: - raise ValueError("At least one filter must be provided to get()") - - results = await self.filters( - kind=kind, - at=at, - branch=branch, - populate_store=populate_store, - include=include, - exclude=exclude, - fragment=fragment, - prefetch_relationships=prefetch_relationships, - **filters, - ) - - if len(results) == 0: - raise NodeNotFoundError(branch_name=branch, node_type=kind, identifier=filters) - if len(results) > 1: - raise IndexError("More than 1 node returned") - - return results[0] - - async def _process_nodes_and_relationships( - self, response: dict[str, Any], schema_kind: str, branch: str, prefetch_relationships: bool - ) -> ProcessRelationsNode: - """Processes InfrahubNode and their Relationships from the GraphQL query response. - - Args: - response (dict[str, Any]): The response from the GraphQL query. - schema_kind (str): The kind of schema being queried. - branch (str): The branch name. - prefetch_relationships (bool): Flag to indicate whether to prefetch relationship data. - - Returns: - ProcessRelationsNodeSync: A TypedDict containing two lists: - - 'nodes': A list of InfrahubNode objects representing the nodes processed. - - 'related_nodes': A list of InfrahubNode objects representing the related nodes - """ - - nodes: list[InfrahubNode] = [] - related_nodes: list[InfrahubNode] = [] - - for item in response.get(schema_kind, {}).get("edges", []): - node = await InfrahubNode.from_graphql(client=self, branch=branch, data=item) - nodes.append(node) - - if prefetch_relationships: - await node._process_relationships(node_data=item, branch=branch, related_nodes=related_nodes) - - return ProcessRelationsNode(nodes=nodes, related_nodes=related_nodes) - - async def all( - self, - kind: str, - at: Optional[Timestamp] = None, - branch: Optional[str] = None, - populate_store: bool = False, - offset: Optional[int] = None, - limit: Optional[int] = None, - include: Optional[list[str]] = None, - exclude: Optional[list[str]] = None, - fragment: bool = False, - prefetch_relationships: bool = False, - ) -> list[InfrahubNode]: - """Retrieve all nodes of a given kind - - Args: - kind (str): kind of the nodes to query - at (Timestamp, optional): Time of the query. Defaults to Now. - branch (str, optional): Name of the branch to query from. Defaults to default_branch. - populate_store (bool, optional): Flag to indicate whether to populate the store with the retrieved nodes. - offset (int, optional): The offset for pagination. - limit (int, optional): The limit for pagination. - include (list[str], optional): List of attributes or relationships to include in the query. - exclude (list[str], optional): List of attributes or relationships to exclude from the query. - fragment (bool, optional): Flag to use GraphQL fragments for generic schemas. - prefetch_relationships (bool, optional): Flag to indicate whether to prefetch related node data. - - Returns: - list[InfrahubNode]: List of Nodes - """ - return await self.filters( - kind=kind, - at=at, - branch=branch, - populate_store=populate_store, - offset=offset, - limit=limit, - include=include, - exclude=exclude, - fragment=fragment, - prefetch_relationships=prefetch_relationships, - ) - - async def filters( - self, - kind: str, - at: Optional[Timestamp] = None, - branch: Optional[str] = None, - populate_store: bool = False, - offset: Optional[int] = None, - limit: Optional[int] = None, - include: Optional[list[str]] = None, - exclude: Optional[list[str]] = None, - fragment: bool = False, - prefetch_relationships: bool = False, - partial_match: bool = False, - **kwargs: Any, - ) -> list[InfrahubNode]: - """Retrieve nodes of a given kind based on provided filters. - - Args: - kind (str): kind of the nodes to query - at (Timestamp, optional): Time of the query. Defaults to Now. - branch (str, optional): Name of the branch to query from. Defaults to default_branch. - populate_store (bool, optional): Flag to indicate whether to populate the store with the retrieved nodes. - offset (int, optional): The offset for pagination. - limit (int, optional): The limit for pagination. - include (list[str], optional): List of attributes or relationships to include in the query. - exclude (list[str], optional): List of attributes or relationships to exclude from the query. - fragment (bool, optional): Flag to use GraphQL fragments for generic schemas. - prefetch_relationships (bool, optional): Flag to indicate whether to prefetch related node data. - partial_match (bool, optional): Allow partial match of filter criteria for the query. - **kwargs (Any): Additional filter criteria for the query. - - Returns: - list[InfrahubNodeSync]: List of Nodes that match the given filters. - """ - schema = await self.schema.get(kind=kind, branch=branch) - - branch = branch or self.default_branch - if at: - at = Timestamp(at) - - node = InfrahubNode(client=self, schema=schema, branch=branch) - filters = kwargs - - if filters: - node.validate_filters(filters=filters) - - nodes: list[InfrahubNode] = [] - related_nodes: list[InfrahubNode] = [] - - has_remaining_items = True - page_number = 1 - - while has_remaining_items: - page_offset = (page_number - 1) * self.pagination_size - - query_data = await InfrahubNode(client=self, schema=schema, branch=branch).generate_query_data( - offset=offset or page_offset, - limit=limit or self.pagination_size, - filters=filters, - include=include, - exclude=exclude, - fragment=fragment, - prefetch_relationships=prefetch_relationships, - partial_match=partial_match, - ) - query = Query(query=query_data) - response = await self.execute_graphql( - query=query.render(), - branch_name=branch, - at=at, - tracker=f"query-{str(schema.kind).lower()}-page{page_number}", - ) - - process_result: ProcessRelationsNode = await self._process_nodes_and_relationships( - response=response, schema_kind=schema.kind, branch=branch, prefetch_relationships=prefetch_relationships - ) - nodes.extend(process_result["nodes"]) - related_nodes.extend(process_result["related_nodes"]) - - remaining_items = response[schema.kind].get("count", 0) - (page_offset + self.pagination_size) - if remaining_items < 0 or offset is not None or limit is not None: - has_remaining_items = False - - page_number += 1 - - if populate_store: - for node in nodes: - if node.id: - self.store.set(key=node.id, node=node) - related_nodes = list(set(related_nodes)) - for node in related_nodes: - if node.id: - self.store.set(key=node.id, node=node) - - return nodes - - def clone(self) -> InfrahubClient: - """Return a cloned version of the client using the same configuration""" - return InfrahubClient(config=self.config) - - async def execute_graphql( - self, - query: str, - variables: Optional[dict] = None, - branch_name: Optional[str] = None, - at: Optional[Union[str, Timestamp]] = None, - timeout: Optional[int] = None, - raise_for_error: bool = True, - tracker: Optional[str] = None, - ) -> dict: - """Execute a GraphQL query (or mutation). - If retry_on_failure is True, the query will retry until the server becomes reacheable. - - Args: - query (_type_): GraphQL Query to execute, can be a query or a mutation - variables (dict, optional): Variables to pass along with the GraphQL query. Defaults to None. - branch_name (str, optional): Name of the branch on which the query will be executed. Defaults to None. - at (str, optional): Time when the query should be executed. Defaults to None. - timeout (int, optional): Timeout in second for the query. Defaults to None. - raise_for_error (bool, optional): Flag to indicate that we need to raise an exception if the response has some errors. Defaults to True. - Raises: - GraphQLError: _description_ - - Returns: - _type_: _description_ - """ - - url = self._graphql_url(branch_name=branch_name, at=at) - - payload: dict[str, Union[str, dict]] = {"query": query} - if variables: - payload["variables"] = variables - - headers = copy.copy(self.headers or {}) - if self.insert_tracker and tracker: - headers["X-Infrahub-Tracker"] = tracker - - self._echo(url=url, query=query, variables=variables) - - retry = True - resp = None - while retry: - retry = self.retry_on_failure - try: - resp = await self._post(url=url, payload=payload, headers=headers, timeout=timeout) - - if raise_for_error: - resp.raise_for_status() - - retry = False - except ServerNotReachableError: - if retry: - self.log.warning( - f"Unable to connect to {self.address}, will retry in {self.retry_delay} seconds .." - ) - await asyncio.sleep(delay=self.retry_delay) - else: - self.log.error(f"Unable to connect to {self.address} .. ") - raise - except httpx.HTTPStatusError as exc: - if exc.response.status_code in [401, 403]: - response = decode_json(response=exc.response) - errors = response.get("errors", []) - messages = [error.get("message") for error in errors] - raise AuthenticationError(" | ".join(messages)) from exc - - if not resp: - raise Error("Unexpected situation, resp hasn't been initialized.") - - response = decode_json(response=resp) - - if "errors" in response: - raise GraphQLError(errors=response["errors"], query=query, variables=variables) - - return response["data"] - - # TODO add a special method to execute mutation that will check if the method returned OK - - @handle_relogin - async def _post( - self, url: str, payload: dict, headers: Optional[dict] = None, timeout: Optional[int] = None - ) -> httpx.Response: - """Execute a HTTP POST with HTTPX. - - Raises: - ServerNotReachableError if we are not able to connect to the server - ServerNotResponsiveError if the server didn't respond before the timeout expired - """ - await self.login() - - headers = headers or {} - base_headers = copy.copy(self.headers or {}) - headers.update(base_headers) - - return await self._request( - url=url, method=HTTPMethod.POST, headers=headers, timeout=timeout or self.default_timeout, payload=payload - ) - - @handle_relogin - async def _get(self, url: str, headers: Optional[dict] = None, timeout: Optional[int] = None) -> httpx.Response: - """Execute a HTTP GET with HTTPX. - - Raises: - ServerNotReachableError if we are not able to connect to the server - ServerNotResponsiveError if the server didnd't respond before the timeout expired - """ - await self.login() - - headers = headers or {} - base_headers = copy.copy(self.headers or {}) - headers.update(base_headers) - - return await self._request( - url=url, method=HTTPMethod.GET, headers=headers, timeout=timeout or self.default_timeout - ) - - async def _request( - self, url: str, method: HTTPMethod, headers: dict[str, Any], timeout: int, payload: Optional[dict] = None - ) -> httpx.Response: - response = await self._request_method(url=url, method=method, headers=headers, timeout=timeout, payload=payload) - self._record(response) - return response - - async def _default_request_method( - self, url: str, method: HTTPMethod, headers: dict[str, Any], timeout: int, payload: Optional[dict] = None - ) -> httpx.Response: - params: dict[str, Any] = {} - if payload: - params["json"] = payload - - proxy_config: dict[str, Union[str, dict[str, httpx.HTTPTransport]]] = {} - if self.config.proxy: - proxy_config["proxy"] = self.config.proxy - elif self.config.proxy_mounts.is_set: - proxy_config["mounts"] = { - key: httpx.HTTPTransport(proxy=value) - for key, value in self.config.proxy_mounts.model_dump(by_alias=True).items() - } - - async with httpx.AsyncClient( - **proxy_config, # type: ignore[arg-type] - verify=self.config.tls_ca_file if self.config.tls_ca_file else not self.config.tls_insecure, - ) as client: - try: - response = await client.request( - method=method.value, - url=url, - headers=headers, - timeout=timeout, - **params, - ) - except httpx.NetworkError as exc: - raise ServerNotReachableError(address=self.address) from exc - except httpx.ReadTimeout as exc: - raise ServerNotResponsiveError(url=url, timeout=timeout) from exc - - return response - - async def refresh_login(self) -> None: - if not self.refresh_token: - return - - url = f"{self.address}/api/auth/refresh" - response = await self._request( - url=url, - method=HTTPMethod.POST, - headers={"content-type": "application/json", "Authorization": f"Bearer {self.refresh_token}"}, - timeout=self.default_timeout, - ) - - response.raise_for_status() - data = decode_json(response=response) - self.access_token = data["access_token"] - self.headers["Authorization"] = f"Bearer {self.access_token}" - - async def login(self, refresh: bool = False) -> None: - if not self.config.password_authentication: - return - - if self.access_token and not refresh: - return - - if self.refresh_token and refresh: - try: - await self.refresh_login() - return - except httpx.HTTPStatusError as exc: - # If we got a 401 while trying to refresh a token we must restart the authentication process - # Other status codes indicate other errors - if exc.response.status_code != 401: - response = exc.response.json() - errors = response.get("errors") - messages = [error.get("message") for error in errors] - raise AuthenticationError(" | ".join(messages)) from exc - - url = f"{self.address}/api/auth/login" - response = await self._request( - url=url, - method=HTTPMethod.POST, - payload={"username": self.config.username, "password": self.config.password}, - headers={"content-type": "application/json"}, - timeout=self.default_timeout, - ) - - response.raise_for_status() - data = decode_json(response=response) - self.access_token = data["access_token"] - self.refresh_token = data["refresh_token"] - self.headers["Authorization"] = f"Bearer {self.access_token}" - - async def query_gql_query( - self, - name: str, - variables: Optional[dict] = None, - update_group: bool = False, - subscribers: Optional[list[str]] = None, - params: Optional[dict] = None, - branch_name: Optional[str] = None, - at: Optional[str] = None, - timeout: Optional[int] = None, - tracker: Optional[str] = None, - raise_for_error: bool = True, - ) -> dict: - url = f"{self.address}/api/query/{name}" - url_params = copy.deepcopy(params or {}) - headers = copy.copy(self.headers or {}) - - if self.insert_tracker and tracker: - headers["X-Infrahub-Tracker"] = tracker - - if branch_name: - url_params["branch"] = branch_name - if at: - url_params["at"] = at - - if subscribers: - url_params["subscribers"] = subscribers - - url_params["update_group"] = str(update_group).lower() - - if url_params: - url_params_str = [] - for key, value in url_params.items(): - if isinstance(value, (list)): - for item in value: - url_params_str.append(f"{key}={item}") - else: - url_params_str.append(f"{key}={value}") - - url += "?" + "&".join(url_params_str) - - payload = {} - if variables: - payload["variables"] = variables - - resp = await self._post( - url=url, - headers=headers, - payload=payload, - timeout=timeout or self.default_timeout, - ) - - if raise_for_error: - resp.raise_for_status() - - return decode_json(response=resp) - - async def get_diff_summary( - self, - branch: str, - timeout: Optional[int] = None, - tracker: Optional[str] = None, - raise_for_error: bool = True, - ) -> list[NodeDiff]: - query = """ - query { - DiffSummary { - branch - id - kind - action - display_label - elements { - element_type - name - action - summary { - added - updated - removed - } - ... on DiffSummaryElementRelationshipMany { - peers { - action - summary { - added - updated - removed - } - } - } - } - } - } - """ - response = await self.execute_graphql( - query=query, branch_name=branch, timeout=timeout, tracker=tracker, raise_for_error=raise_for_error - ) - return response["DiffSummary"] - - async def allocate_next_ip_address( - self, - resource_pool: InfrahubNode, - identifier: Optional[str] = None, - prefix_length: Optional[int] = None, - address_type: Optional[str] = None, - data: Optional[dict[str, Any]] = None, - branch: Optional[str] = None, - timeout: Optional[int] = None, - tracker: Optional[str] = None, - raise_for_error: bool = True, - ) -> Optional[InfrahubNode]: - """Allocate a new IP address by using the provided resource pool. - - Args: - resource_pool (InfrahubNode): Node corresponding to the pool to allocate resources from. - identifier (str, optional): Value to perform idempotent allocation, the same resource will be returned for a given identifier. - prefix_length (int, optional): Length of the prefix to set on the address to allocate. - address_type (str, optional): Kind of the address to allocate. - data (dict, optional): A key/value map to use to set attributes values on the allocated address. - branch (str, optional): Name of the branch to allocate from. Defaults to default_branch. - timeout (int, optional): Flag to indicate whether to populate the store with the retrieved nodes. - tracker (str, optional): The offset for pagination. - raise_for_error (bool, optional): The limit for pagination. - Returns: - InfrahubNode: Node corresponding to the allocated resource. - """ - if resource_pool.get_kind() != "CoreIPAddressPool": - raise ValueError("resource_pool is not an IP address pool") - - branch = branch or self.default_branch - mutation_name = "IPAddressPoolGetResource" - - query = self._build_ip_address_allocation_query( - resource_pool_id=resource_pool.id, - identifier=identifier, - prefix_length=prefix_length, - address_type=address_type, - data=data, - ) - response = await self.execute_graphql( - query=query.render(), - branch_name=branch, - timeout=timeout, - tracker=tracker, - raise_for_error=raise_for_error, - ) - - if response[mutation_name]["ok"]: - resource_details = response[mutation_name]["node"] - return await self.get(kind=resource_details["kind"], id=resource_details["id"], branch=branch) - return None - - async def allocate_next_ip_prefix( - self, - resource_pool: InfrahubNode, - identifier: Optional[str] = None, - prefix_length: Optional[int] = None, - member_type: Optional[str] = None, - prefix_type: Optional[str] = None, - data: Optional[dict[str, Any]] = None, - branch: Optional[str] = None, - timeout: Optional[int] = None, - tracker: Optional[str] = None, - raise_for_error: bool = True, - ) -> Optional[InfrahubNode]: - """Allocate a new IP prefix by using the provided resource pool. - - Args: - resource_pool (InfrahubNode): Node corresponding to the pool to allocate resources from. - identifier (str, optional): Value to perform idempotent allocation, the same resource will be returned for a given identifier. - prefix_length (int, optional): Length of the prefix to allocate. - member_type (str, optional): Member type of the prefix to allocate. - prefix_type (str, optional): Kind of the prefix to allocate. - data (dict, optional): A key/value map to use to set attributes values on the allocated prefix. - branch (str, optional): Name of the branch to allocate from. Defaults to default_branch. - timeout (int, optional): Flag to indicate whether to populate the store with the retrieved nodes. - tracker (str, optional): The offset for pagination. - raise_for_error (bool, optional): The limit for pagination. - Returns: - InfrahubNode: Node corresponding to the allocated resource. - """ - if resource_pool.get_kind() != "CoreIPPrefixPool": - raise ValueError("resource_pool is not an IP prefix pool") - - branch = branch or self.default_branch - mutation_name = "IPPrefixPoolGetResource" - - query = self._build_ip_prefix_allocation_query( - resource_pool_id=resource_pool.id, - identifier=identifier, - prefix_length=prefix_length, - member_type=member_type, - prefix_type=prefix_type, - data=data, - ) - response = await self.execute_graphql( - query=query.render(), branch_name=branch, timeout=timeout, tracker=tracker, raise_for_error=raise_for_error - ) - - if response[mutation_name]["ok"]: - resource_details = response[mutation_name]["node"] - return await self.get(kind=resource_details["kind"], id=resource_details["id"], branch=branch) - return None - - async def create_batch(self, return_exceptions: bool = False) -> InfrahubBatch: - return InfrahubBatch(semaphore=self.concurrent_execution_limit, return_exceptions=return_exceptions) - - async def get_list_repositories( - self, branches: Optional[dict[str, BranchData]] = None, kind: str = "CoreGenericRepository" - ) -> dict[str, RepositoryData]: - branches = branches or await self.branch.all() - - batch = await self.create_batch() - for branch_name, branch in branches.items(): - batch.add( - task=self.all, - node=branch, # type: ignore[arg-type] - kind=kind, - branch=branch_name, - fragment=True, - include=["id", "name", "location", "commit", "ref", "internal_status"], - ) - - responses: dict[str, Any] = {} - async for branch, response in batch.execute(): - responses[branch.name] = response - - repositories: dict[str, RepositoryData] = {} - - for branch_name, response in responses.items(): - for repository in response: - repo_name = repository.name.value - if repo_name not in repositories: - repositories[repo_name] = RepositoryData( - repository=repository, - branches={}, - ) - - repositories[repo_name].branches[branch_name] = repository.commit.value - repositories[repo_name].branch_info[branch_name] = RepositoryBranchInfo( - internal_status=repository.internal_status.value - ) - - return repositories - - async def repository_update_commit( - self, branch_name: str, repository_id: str, commit: str, is_read_only: bool = False - ) -> bool: - variables = {"repository_id": str(repository_id), "commit": str(commit)} - await self.execute_graphql( - query=get_commit_update_mutation(is_read_only=is_read_only), - variables=variables, - branch_name=branch_name, - tracker="mutation-repository-update-commit", - ) - - return True - - async def __aenter__(self) -> Self: - return self - - async def __aexit__( - self, - exc_type: Optional[type[BaseException]], - exc_value: Optional[BaseException], - traceback: Optional[TracebackType], - ) -> None: - if exc_type is None and self.mode == InfrahubClientMode.TRACKING: - await self.group_context.update_group() - - self.mode = InfrahubClientMode.DEFAULT - - -class InfrahubClientSync(BaseClient): - group_context: InfrahubGroupContextSync - - def _initialize(self) -> None: - self.schema = InfrahubSchemaSync(self) - self.branch = InfrahubBranchManagerSync(self) - self.object_store = ObjectStoreSync(self) - self.store = NodeStoreSync() - self._request_method: SyncRequester = self.config.sync_requester or self._default_request_method - self.group_context = InfrahubGroupContextSync(self) - - @classmethod - def init( - cls, - address: str = "", - config: Optional[Union[Config, dict[str, Any]]] = None, - ) -> InfrahubClientSync: - warnings.warn( - "InfrahubClientSync.init has been deprecated and will be removed in Infrahub SDK 0.14.0 or the next major version", - DeprecationWarning, - stacklevel=1, - ) - return cls(address=address, config=config) - - def create( - self, - kind: str, - data: Optional[dict] = None, - branch: Optional[str] = None, - **kwargs: Any, - ) -> InfrahubNodeSync: - branch = branch or self.default_branch - schema = self.schema.get(kind=kind, branch=branch) - - if not data and not kwargs: - raise ValueError("Either data or a list of keywords but be provided") - - return InfrahubNodeSync(client=self, schema=schema, branch=branch, data=data or kwargs) - - def delete(self, kind: str, id: str, branch: Optional[str] = None) -> None: - branch = branch or self.default_branch - schema = self.schema.get(kind=kind, branch=branch) - - node = InfrahubNodeSync(client=self, schema=schema, branch=branch, data={"id": id}) - node.delete() - - def create_batch(self, return_exceptions: bool = False) -> InfrahubBatch: - raise NotImplementedError("This method hasn't been implemented in the sync client yet.") - - def clone(self) -> InfrahubClientSync: - """Return a cloned version of the client using the same configuration""" - return InfrahubClientSync(config=self.config) - - def execute_graphql( - self, - query: str, - variables: Optional[dict] = None, - branch_name: Optional[str] = None, - at: Optional[Union[str, Timestamp]] = None, - timeout: Optional[int] = None, - raise_for_error: bool = True, - tracker: Optional[str] = None, - ) -> dict: - """Execute a GraphQL query (or mutation). - If retry_on_failure is True, the query will retry until the server becomes reacheable. - - Args: - query (str): GraphQL Query to execute, can be a query or a mutation - variables (dict, optional): Variables to pass along with the GraphQL query. Defaults to None. - branch_name (str, optional): Name of the branch on which the query will be executed. Defaults to None. - at (str, optional): Time when the query should be executed. Defaults to None. - timeout (int, optional): Timeout in second for the query. Defaults to None. - raise_for_error (bool, optional): Flag to indicate that we need to raise an exception if the response has some errors. Defaults to True. - Raises: - GraphQLError: When an error occurs during the execution of the GraphQL query or mutation. - - Returns: - dict: The result of the GraphQL query or mutation. - """ - - url = self._graphql_url(branch_name=branch_name, at=at) - - payload: dict[str, Union[str, dict]] = {"query": query} - if variables: - payload["variables"] = variables - - headers = copy.copy(self.headers or {}) - if self.insert_tracker and tracker: - headers["X-Infrahub-Tracker"] = tracker - - self._echo(url=url, query=query, variables=variables) - - retry = True - resp = None - while retry: - retry = self.retry_on_failure - try: - resp = self._post(url=url, payload=payload, headers=headers, timeout=timeout) - - if raise_for_error: - resp.raise_for_status() - - retry = False - except ServerNotReachableError: - if retry: - self.log.warning( - f"Unable to connect to {self.address}, will retry in {self.retry_delay} seconds .." - ) - sleep(self.retry_delay) - else: - self.log.error(f"Unable to connect to {self.address} .. ") - raise - except httpx.HTTPStatusError as exc: - if exc.response.status_code in [401, 403]: - response = decode_json(response=exc.response) - errors = response.get("errors", []) - messages = [error.get("message") for error in errors] - raise AuthenticationError(" | ".join(messages)) from exc - - if not resp: - raise Error("Unexpected situation, resp hasn't been initialized.") - - response = decode_json(response=resp) - - if "errors" in response: - raise GraphQLError(errors=response["errors"], query=query, variables=variables) - - return response["data"] - - # TODO add a special method to execute mutation that will check if the method returned OK - - def all( - self, - kind: str, - at: Optional[Timestamp] = None, - branch: Optional[str] = None, - populate_store: bool = False, - offset: Optional[int] = None, - limit: Optional[int] = None, - include: Optional[list[str]] = None, - exclude: Optional[list[str]] = None, - fragment: bool = False, - prefetch_relationships: bool = False, - ) -> list[InfrahubNodeSync]: - """Retrieve all nodes of a given kind - - Args: - kind (str): kind of the nodes to query - at (Timestamp, optional): Time of the query. Defaults to Now. - branch (str, optional): Name of the branch to query from. Defaults to default_branch. - populate_store (bool, optional): Flag to indicate whether to populate the store with the retrieved nodes. - offset (int, optional): The offset for pagination. - limit (int, optional): The limit for pagination. - include (list[str], optional): List of attributes or relationships to include in the query. - exclude (list[str], optional): List of attributes or relationships to exclude from the query. - fragment (bool, optional): Flag to use GraphQL fragments for generic schemas. - prefetch_relationships (bool, optional): Flag to indicate whether to prefetch related node data. - - Returns: - list[InfrahubNodeSync]: List of Nodes - """ - return self.filters( - kind=kind, - at=at, - branch=branch, - populate_store=populate_store, - offset=offset, - limit=limit, - include=include, - exclude=exclude, - fragment=fragment, - prefetch_relationships=prefetch_relationships, - ) - - def _process_nodes_and_relationships( - self, response: dict[str, Any], schema_kind: str, branch: str, prefetch_relationships: bool - ) -> ProcessRelationsNodeSync: - """Processes InfrahubNodeSync and their Relationships from the GraphQL query response. - - Args: - response (dict[str, Any]): The response from the GraphQL query. - schema_kind (str): The kind of schema being queried. - branch (str): The branch name. - prefetch_relationships (bool): Flag to indicate whether to prefetch relationship data. - - Returns: - ProcessRelationsNodeSync: A TypedDict containing two lists: - - 'nodes': A list of InfrahubNodeSync objects representing the nodes processed. - - 'related_nodes': A list of InfrahubNodeSync objects representing the related nodes - """ - - nodes: list[InfrahubNodeSync] = [] - related_nodes: list[InfrahubNodeSync] = [] - - for item in response.get(schema_kind, {}).get("edges", []): - node = InfrahubNodeSync.from_graphql(client=self, branch=branch, data=item) - nodes.append(node) - - if prefetch_relationships: - node._process_relationships(node_data=item, branch=branch, related_nodes=related_nodes) - - return ProcessRelationsNodeSync(nodes=nodes, related_nodes=related_nodes) - - def filters( - self, - kind: str, - at: Optional[Timestamp] = None, - branch: Optional[str] = None, - populate_store: bool = False, - offset: Optional[int] = None, - limit: Optional[int] = None, - include: Optional[list[str]] = None, - exclude: Optional[list[str]] = None, - fragment: bool = False, - prefetch_relationships: bool = False, - partial_match: bool = False, - **kwargs: Any, - ) -> list[InfrahubNodeSync]: - """Retrieve nodes of a given kind based on provided filters. - - Args: - kind (str): kind of the nodes to query - at (Timestamp, optional): Time of the query. Defaults to Now. - branch (str, optional): Name of the branch to query from. Defaults to default_branch. - populate_store (bool, optional): Flag to indicate whether to populate the store with the retrieved nodes. - offset (int, optional): The offset for pagination. - limit (int, optional): The limit for pagination. - include (list[str], optional): List of attributes or relationships to include in the query. - exclude (list[str], optional): List of attributes or relationships to exclude from the query. - fragment (bool, optional): Flag to use GraphQL fragments for generic schemas. - prefetch_relationships (bool, optional): Flag to indicate whether to prefetch related node data. - partial_match (bool, optional): Allow partial match of filter criteria for the query. - **kwargs (Any): Additional filter criteria for the query. - - Returns: - list[InfrahubNodeSync]: List of Nodes that match the given filters. - """ - schema = self.schema.get(kind=kind, branch=branch) - - branch = branch or self.default_branch - if at: - at = Timestamp(at) - - node = InfrahubNodeSync(client=self, schema=schema, branch=branch) - filters = kwargs - - if filters: - node.validate_filters(filters=filters) - - nodes: list[InfrahubNodeSync] = [] - related_nodes: list[InfrahubNodeSync] = [] - - has_remaining_items = True - page_number = 1 - - while has_remaining_items: - page_offset = (page_number - 1) * self.pagination_size - - query_data = InfrahubNodeSync(client=self, schema=schema, branch=branch).generate_query_data( - offset=offset or page_offset, - limit=limit or self.pagination_size, - filters=filters, - include=include, - exclude=exclude, - fragment=fragment, - prefetch_relationships=prefetch_relationships, - partial_match=partial_match, - ) - query = Query(query=query_data) - response = self.execute_graphql( - query=query.render(), - branch_name=branch, - at=at, - tracker=f"query-{str(schema.kind).lower()}-page{page_number}", - ) - - process_result: ProcessRelationsNodeSync = self._process_nodes_and_relationships( - response=response, schema_kind=schema.kind, branch=branch, prefetch_relationships=prefetch_relationships - ) - nodes.extend(process_result["nodes"]) - related_nodes.extend(process_result["related_nodes"]) - - remaining_items = response[schema.kind].get("count", 0) - (page_offset + self.pagination_size) - if remaining_items < 0 or offset is not None or limit is not None: - has_remaining_items = False - - page_number += 1 - - if populate_store: - for node in nodes: - if node.id: - self.store.set(key=node.id, node=node) - related_nodes = list(set(related_nodes)) - for node in related_nodes: - if node.id: - self.store.set(key=node.id, node=node) - - return nodes - - def get( - self, - kind: str, - at: Optional[Timestamp] = None, - branch: Optional[str] = None, - id: Optional[str] = None, - hfid: Optional[list[str]] = None, - include: Optional[list[str]] = None, - exclude: Optional[list[str]] = None, - populate_store: bool = False, - fragment: bool = False, - prefetch_relationships: bool = False, - **kwargs: Any, - ) -> InfrahubNodeSync: - branch = branch or self.default_branch - schema = self.schema.get(kind=kind, branch=branch) - - filters: MutableMapping[str, Any] = {} - - if id: - if not is_valid_uuid(id) and isinstance(schema, NodeSchema) and schema.default_filter: - filters[schema.default_filter] = id - else: - filters["ids"] = [id] - elif hfid: - if isinstance(schema, NodeSchema) and schema.human_friendly_id: - filters["hfid"] = hfid - else: - raise ValueError("Cannot filter by HFID if the node doesn't have an HFID defined") - elif kwargs: - filters = kwargs - else: - raise ValueError("At least one filter must be provided to get()") - - results = self.filters( - kind=kind, - at=at, - branch=branch, - populate_store=populate_store, - include=include, - exclude=exclude, - fragment=fragment, - prefetch_relationships=prefetch_relationships, - **filters, - ) - - if len(results) == 0: - raise NodeNotFoundError(branch_name=branch, node_type=kind, identifier=filters) - if len(results) > 1: - raise IndexError("More than 1 node returned") - - return results[0] - - def get_list_repositories( - self, branches: Optional[dict[str, BranchData]] = None, kind: str = "CoreGenericRepository" - ) -> dict[str, RepositoryData]: - raise NotImplementedError( - "This method is deprecated in the async client and won't be implemented in the sync client." - ) - - def query_gql_query( - self, - name: str, - variables: Optional[dict] = None, - update_group: bool = False, - subscribers: Optional[list[str]] = None, - params: Optional[dict] = None, - branch_name: Optional[str] = None, - at: Optional[str] = None, - timeout: Optional[int] = None, - tracker: Optional[str] = None, - raise_for_error: bool = True, - ) -> dict: - url = f"{self.address}/api/query/{name}" - url_params = copy.deepcopy(params or {}) - headers = copy.copy(self.headers or {}) - - if self.insert_tracker and tracker: - headers["X-Infrahub-Tracker"] = tracker - - if branch_name: - url_params["branch"] = branch_name - if at: - url_params["at"] = at - if subscribers: - url_params["subscribers"] = subscribers - - url_params["update_group"] = str(update_group).lower() - - if url_params: - url_params_str = [] - for key, value in url_params.items(): - if isinstance(value, (list)): - for item in value: - url_params_str.append(f"{key}={item}") - else: - url_params_str.append(f"{key}={value}") - - url += "?" + "&".join(url_params_str) - - payload = {} - if variables: - payload["variables"] = variables - - resp = self._post( - url=url, - headers=headers, - payload=payload, - timeout=timeout or self.default_timeout, - ) - - if raise_for_error: - resp.raise_for_status() - - return decode_json(response=resp) - - def get_diff_summary( - self, - branch: str, - timeout: Optional[int] = None, - tracker: Optional[str] = None, - raise_for_error: bool = True, - ) -> list[NodeDiff]: - query = """ - query { - DiffSummary { - branch - id - kind - action - display_label - elements { - element_type - name - action - summary { - added - updated - removed - } - ... on DiffSummaryElementRelationshipMany { - peers { - action - summary { - added - updated - removed - } - } - } - } - } - } - """ - response = self.execute_graphql( - query=query, branch_name=branch, timeout=timeout, tracker=tracker, raise_for_error=raise_for_error - ) - return response["DiffSummary"] - - def allocate_next_ip_address( - self, - resource_pool: InfrahubNodeSync, - identifier: Optional[str] = None, - prefix_length: Optional[int] = None, - address_type: Optional[str] = None, - data: Optional[dict[str, Any]] = None, - branch: Optional[str] = None, - timeout: Optional[int] = None, - tracker: Optional[str] = None, - raise_for_error: bool = True, - ) -> Optional[InfrahubNodeSync]: - """Allocate a new IP address by using the provided resource pool. - - Args: - resource_pool (InfrahubNodeSync): Node corresponding to the pool to allocate resources from. - identifier (str, optional): Value to perform idempotent allocation, the same resource will be returned for a given identifier. - prefix_length (int, optional): Length of the prefix to set on the address to allocate. - address_type (str, optional): Kind of the address to allocate. - data (dict, optional): A key/value map to use to set attributes values on the allocated address. - branch (str, optional): Name of the branch to allocate from. Defaults to default_branch. - timeout (int, optional): Flag to indicate whether to populate the store with the retrieved nodes. - tracker (str, optional): The offset for pagination. - raise_for_error (bool, optional): The limit for pagination. - Returns: - InfrahubNodeSync: Node corresponding to the allocated resource. - """ - if resource_pool.get_kind() != "CoreIPAddressPool": - raise ValueError("resource_pool is not an IP address pool") - - branch = branch or self.default_branch - mutation_name = "IPAddressPoolGetResource" - - query = self._build_ip_address_allocation_query( - resource_pool_id=resource_pool.id, - identifier=identifier, - prefix_length=prefix_length, - address_type=address_type, - data=data, - ) - response = self.execute_graphql( - query=query.render(), branch_name=branch, timeout=timeout, tracker=tracker, raise_for_error=raise_for_error - ) - - if response[mutation_name]["ok"]: - resource_details = response[mutation_name]["node"] - return self.get(kind=resource_details["kind"], id=resource_details["id"], branch=branch) - return None - - def allocate_next_ip_prefix( - self, - resource_pool: InfrahubNodeSync, - identifier: Optional[str] = None, - prefix_length: Optional[int] = None, - member_type: Optional[str] = None, - prefix_type: Optional[str] = None, - data: Optional[dict[str, Any]] = None, - branch: Optional[str] = None, - timeout: Optional[int] = None, - tracker: Optional[str] = None, - raise_for_error: bool = True, - ) -> Optional[InfrahubNodeSync]: - """Allocate a new IP prefix by using the provided resource pool. - - Args: - resource_pool (InfrahubNodeSync): Node corresponding to the pool to allocate resources from. - identifier (str, optional): Value to perform idempotent allocation, the same resource will be returned for a given identifier. - size (int, optional): Length of the prefix to allocate. - member_type (str, optional): Member type of the prefix to allocate. - prefix_type (str, optional): Kind of the prefix to allocate. - data (dict, optional): A key/value map to use to set attributes values on the allocated prefix. - branch (str, optional): Name of the branch to allocate from. Defaults to default_branch. - timeout (int, optional): Flag to indicate whether to populate the store with the retrieved nodes. - tracker (str, optional): The offset for pagination. - raise_for_error (bool, optional): The limit for pagination. - Returns: - InfrahubNodeSync: Node corresponding to the allocated resource. - """ - if resource_pool.get_kind() != "CoreIPPrefixPool": - raise ValueError("resource_pool is not an IP prefix pool") - - branch = branch or self.default_branch - mutation_name = "IPPrefixPoolGetResource" - - query = self._build_ip_prefix_allocation_query( - resource_pool_id=resource_pool.id, - identifier=identifier, - prefix_length=prefix_length, - member_type=member_type, - prefix_type=prefix_type, - data=data, - ) - response = self.execute_graphql( - query=query.render(), branch_name=branch, timeout=timeout, tracker=tracker, raise_for_error=raise_for_error - ) - - if response[mutation_name]["ok"]: - resource_details = response[mutation_name]["node"] - return self.get(kind=resource_details["kind"], id=resource_details["id"], branch=branch) - return None - - def repository_update_commit( - self, branch_name: str, repository_id: str, commit: str, is_read_only: bool = False - ) -> bool: - raise NotImplementedError( - "This method is deprecated in the async client and won't be implemented in the sync client." - ) - - @handle_relogin_sync - def _get(self, url: str, headers: Optional[dict] = None, timeout: Optional[int] = None) -> httpx.Response: - """Execute a HTTP GET with HTTPX. - - Raises: - ServerNotReachableError if we are not able to connect to the server - ServerNotResponsiveError if the server didnd't respond before the timeout expired - """ - self.login() - - headers = headers or {} - base_headers = copy.copy(self.headers or {}) - headers.update(base_headers) - - return self._request(url=url, method=HTTPMethod.GET, headers=headers, timeout=timeout or self.default_timeout) - - @handle_relogin_sync - def _post( - self, url: str, payload: dict, headers: Optional[dict] = None, timeout: Optional[int] = None - ) -> httpx.Response: - """Execute a HTTP POST with HTTPX. - - Raises: - ServerNotReachableError if we are not able to connect to the server - ServerNotResponsiveError if the server didnd't respond before the timeout expired - """ - self.login() - - headers = headers or {} - base_headers = copy.copy(self.headers or {}) - headers.update(base_headers) - - return self._request( - url=url, method=HTTPMethod.POST, payload=payload, headers=headers, timeout=timeout or self.default_timeout - ) - - def _request( - self, url: str, method: HTTPMethod, headers: dict[str, Any], timeout: int, payload: Optional[dict] = None - ) -> httpx.Response: - response = self._request_method(url=url, method=method, headers=headers, timeout=timeout, payload=payload) - self._record(response) - return response - - def _default_request_method( - self, url: str, method: HTTPMethod, headers: dict[str, Any], timeout: int, payload: Optional[dict] = None - ) -> httpx.Response: - params: dict[str, Any] = {} - if payload: - params["json"] = payload - - proxy_config: dict[str, Union[str, dict[str, httpx.HTTPTransport]]] = {} - if self.config.proxy: - proxy_config["proxy"] = self.config.proxy - elif self.config.proxy_mounts.is_set: - proxy_config["mounts"] = { - key: httpx.HTTPTransport(proxy=value) - for key, value in self.config.proxy_mounts.model_dump(by_alias=True).items() - } - - with httpx.Client( - **proxy_config, # type: ignore[arg-type] - verify=self.config.tls_ca_file if self.config.tls_ca_file else not self.config.tls_insecure, - ) as client: - try: - response = client.request( - method=method.value, - url=url, - headers=headers, - timeout=timeout, - **params, - ) - except httpx.NetworkError as exc: - raise ServerNotReachableError(address=self.address) from exc - except httpx.ReadTimeout as exc: - raise ServerNotResponsiveError(url=url, timeout=timeout) from exc - - return response - - def refresh_login(self) -> None: - if not self.refresh_token: - return - - url = f"{self.address}/api/auth/refresh" - response = self._request( - url=url, - method=HTTPMethod.POST, - headers={"content-type": "application/json", "Authorization": f"Bearer {self.refresh_token}"}, - timeout=self.default_timeout, - ) - - response.raise_for_status() - data = decode_json(response=response) - self.access_token = data["access_token"] - self.headers["Authorization"] = f"Bearer {self.access_token}" - - def login(self, refresh: bool = False) -> None: - if not self.config.password_authentication: - return - - if self.access_token and not refresh: - return - - if self.refresh_token and refresh: - try: - self.refresh_login() - return - except httpx.HTTPStatusError as exc: - # If we got a 401 while trying to refresh a token we must restart the authentication process - # Other status codes indicate other errors - if exc.response.status_code != 401: - response = exc.response.json() - errors = response.get("errors") - messages = [error.get("message") for error in errors] - raise AuthenticationError(" | ".join(messages)) from exc - - url = f"{self.address}/api/auth/login" - response = self._request( - url=url, - method=HTTPMethod.POST, - payload={"username": self.config.username, "password": self.config.password}, - headers={"content-type": "application/json"}, - timeout=self.default_timeout, - ) - - response.raise_for_status() - data = decode_json(response=response) - self.access_token = data["access_token"] - self.refresh_token = data["refresh_token"] - self.headers["Authorization"] = f"Bearer {self.access_token}" - - def __enter__(self) -> Self: - return self - - def __exit__( - self, - exc_type: Optional[type[BaseException]], - exc_value: Optional[BaseException], - traceback: Optional[TracebackType], - ) -> None: - if exc_type is None and self.mode == InfrahubClientMode.TRACKING: - self.group_context.update_group() - - self.mode = InfrahubClientMode.DEFAULT diff --git a/python_sdk/infrahub_sdk/config.py b/python_sdk/infrahub_sdk/config.py deleted file mode 100644 index d0ce48def8..0000000000 --- a/python_sdk/infrahub_sdk/config.py +++ /dev/null @@ -1,152 +0,0 @@ -from typing import Any, Optional - -from pydantic import Field, field_validator, model_validator -from pydantic_settings import BaseSettings, SettingsConfigDict -from typing_extensions import Self - -from infrahub_sdk.constants import InfrahubClientMode -from infrahub_sdk.playback import JSONPlayback -from infrahub_sdk.recorder import JSONRecorder, NoRecorder, Recorder, RecorderType -from infrahub_sdk.types import AsyncRequester, InfrahubLoggers, RequesterTransport, SyncRequester -from infrahub_sdk.utils import get_branch, is_valid_url - - -class ProxyMountsConfig(BaseSettings): - model_config = SettingsConfigDict(populate_by_name=True) - http: Optional[str] = Field( - default=None, - description="Proxy for HTTP requests", - alias="http://", - validation_alias="INFRAHUB_PROXY_MOUNTS_HTTP", - ) - https: Optional[str] = Field( - default=None, - description="Proxy for HTTPS requests", - alias="https://", - validation_alias="INFRAHUB_PROXY_MOUNTS_HTTPS", - ) - - @property - def is_set(self) -> bool: - return self.http is not None or self.https is not None - - -class ConfigBase(BaseSettings): - model_config = SettingsConfigDict(env_prefix="INFRAHUB_", validate_assignment=True) - address: str = Field(default="http://localhost:8000", description="The URL to use when connecting to Infrahub.") - api_token: Optional[str] = Field(default=None, description="API token for authentication against Infrahub.") - echo_graphql_queries: bool = Field( - default=False, description="If set the GraphQL query and variables will be echoed to the screen" - ) - username: Optional[str] = Field(default=None, description="Username for accessing Infrahub", min_length=1) - password: Optional[str] = Field(default=None, description="Password for accessing Infrahub", min_length=1) - default_branch: str = Field( - default="main", description="Default branch to target if not specified for each request." - ) - default_branch_from_git: bool = Field( - default=False, - description="Indicates if the default Infrahub branch to target should come from the active branch in the local Git repository.", - ) - identifier: Optional[str] = Field(default=None, description="Tracker identifier") - insert_tracker: bool = Field(default=False, description="Insert a tracker on queries to the server") - max_concurrent_execution: int = Field(default=5, description="Max concurrent execution in batch mode") - mode: InfrahubClientMode = Field(default=InfrahubClientMode.DEFAULT, description="Default mode for the client") - pagination_size: int = Field(default=50, description="Page size for queries to the server") - retry_delay: int = Field(default=5, description="Number of seconds to wait until attempting a retry.") - retry_on_failure: bool = Field(default=False, description="Retry operation in case of failure") - timeout: int = Field(default=10, description="Default connection timeout in seconds") - transport: RequesterTransport = Field( - default=RequesterTransport.HTTPX, description="Set an alternate transport using a predefined option" - ) - proxy: Optional[str] = Field(default=None, description="Proxy address") - proxy_mounts: ProxyMountsConfig = Field(default=ProxyMountsConfig(), description="Proxy mounts configuration") - update_group_context: bool = Field(default=False, description="Update GraphQL query groups") - tls_insecure: bool = Field( - default=False, - description=""" - Indicates if TLS certificates are verified. - Enabling this option will disable: CA verification, expiry date verification, hostname verification). - Can be useful to test with self-signed certificates.""", - ) - tls_ca_file: Optional[str] = Field(default=None, description="File path to CA cert or bundle in PEM format") - - @model_validator(mode="before") - @classmethod - def validate_credentials_input(cls, values: dict[str, Any]) -> dict[str, Any]: - has_username = "username" in values - has_password = "password" in values - if has_username != has_password: - raise ValueError("Both 'username' and 'password' needs to be set") - return values - - @model_validator(mode="before") - @classmethod - def set_transport(cls, values: dict[str, Any]) -> dict[str, Any]: - if values.get("transport") == RequesterTransport.JSON: - playback = JSONPlayback() - if "requester" not in values: - values["requester"] = playback.async_request - if "sync_requester" not in values: - values["sync_requester"] = playback.sync_request - - return values - - @model_validator(mode="before") - @classmethod - def validate_mix_authentication_schemes(cls, values: dict[str, Any]) -> dict[str, Any]: - if values.get("password") and values.get("api_token"): - raise ValueError("Unable to combine password with token based authentication") - return values - - @field_validator("address") - @classmethod - def validate_address(cls, value: str) -> str: - if is_valid_url(value): - return value.rstrip("/") - - raise ValueError("The configured address is not a valid url") - - @model_validator(mode="after") - def validate_proxy_config(self) -> Self: - if self.proxy and self.proxy_mounts.is_set: # pylint: disable=no-member - raise ValueError("'proxy' and 'proxy_mounts' are mutually exclusive") - return self - - @property - def default_infrahub_branch(self) -> str: - branch: Optional[str] = None - if not self.default_branch_from_git: - branch = self.default_branch - - return get_branch(branch=branch) - - @property - def password_authentication(self) -> bool: - return bool(self.username) - - -class Config(ConfigBase): - recorder: RecorderType = Field(default=RecorderType.NONE, description="Select builtin recorder for later replay.") - custom_recorder: Recorder = Field( - default_factory=NoRecorder.default, description="Provides a way to record responses from the Infrahub API" - ) - requester: Optional[AsyncRequester] = None - sync_requester: Optional[SyncRequester] = None - log: Optional[Any] = None - - @property - def logger(self) -> InfrahubLoggers: - # We expect the log to adhere to the definitions defined by the InfrahubLoggers object - # When using structlog the logger doesn't expose the expected methods by looking at the - # object to pydantic rejects them. This is a workaround to allow structlog to be used - # as a logger - return self.log # type: ignore - - @model_validator(mode="before") - @classmethod - def set_custom_recorder(cls, values: dict[str, Any]) -> dict[str, Any]: - if values.get("recorder") == RecorderType.NONE and "custom_recorder" not in values: - values["custom_recorder"] = NoRecorder() - elif values.get("recorder") == RecorderType.JSON and "custom_recorder" not in values: - values["custom_recorder"] = JSONRecorder() - return values diff --git a/python_sdk/infrahub_sdk/constants.py b/python_sdk/infrahub_sdk/constants.py deleted file mode 100644 index 04dd6b955c..0000000000 --- a/python_sdk/infrahub_sdk/constants.py +++ /dev/null @@ -1,7 +0,0 @@ -import enum - - -class InfrahubClientMode(str, enum.Enum): - DEFAULT = "default" - TRACKING = "tracking" - # IDEMPOTENT = "idempotent" diff --git a/python_sdk/infrahub_sdk/ctl/__init__.py b/python_sdk/infrahub_sdk/ctl/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/python_sdk/infrahub_sdk/ctl/branch.py b/python_sdk/infrahub_sdk/ctl/branch.py deleted file mode 100644 index 96b164ecd4..0000000000 --- a/python_sdk/infrahub_sdk/ctl/branch.py +++ /dev/null @@ -1,142 +0,0 @@ -import logging - -import typer -from rich.console import Console -from rich.table import Table - -from infrahub_sdk.async_typer import AsyncTyper -from infrahub_sdk.ctl.client import initialize_client -from infrahub_sdk.ctl.utils import calculate_time_diff, catch_exception - -from .parameters import CONFIG_PARAM - -app = AsyncTyper() -console = Console() - - -DEFAULT_CONFIG_FILE = "infrahubctl.toml" -ENVVAR_CONFIG_FILE = "INFRAHUBCTL_CONFIG" - - -@app.callback() -def callback() -> None: - """ - Manage the branches in a remote Infrahub instance. - - List, create, merge, rebase .. - """ - - -@app.command("list") -@catch_exception(console=console) -async def list_branch(_: str = CONFIG_PARAM) -> None: - """List all existing branches.""" - - logging.getLogger("infrahub_sdk").setLevel(logging.CRITICAL) - - client = await initialize_client() - branches = await client.branch.all() - - table = Table(title="List of all branches") - - table.add_column("Name", justify="right", style="cyan", no_wrap=True) - table.add_column("Description") - table.add_column("Origin Branch") - table.add_column("Branched From") - table.add_column("Sync with Git") - table.add_column("Has Schema Changes") - table.add_column("Is Default") - - # identify the default branch and always print it first - default_branch = [branch for branch in branches.values() if branch.is_default][0] - table.add_row( - default_branch.name, - default_branch.description or " - ", - default_branch.origin_branch, - f"{default_branch.branched_from} ({calculate_time_diff(default_branch.branched_from)})", - "[green]True" if default_branch.sync_with_git else "[#FF7F50]False", - "[green]True" if default_branch.has_schema_changes else "[#FF7F50]False", - "[green]True" if default_branch.is_default else "[#FF7F50]False", - ) - - for branch in branches.values(): - if branch.is_default: - continue - - table.add_row( - branch.name, - branch.description or " - ", - branch.origin_branch, - f"{branch.branched_from} ({calculate_time_diff(branch.branched_from)})", - "[green]True" if branch.sync_with_git else "[#FF7F50]False", - "[green]True" if default_branch.has_schema_changes else "[#FF7F50]False", - "[green]True" if branch.is_default else "[#FF7F50]False", - ) - - console.print(table) - - -@app.command() -@catch_exception(console=console) -async def create( - branch_name: str = typer.Argument(..., help="Name of the branch to create"), - description: str = typer.Option(default="", help="Description of the branch"), - sync_with_git: bool = typer.Option( - False, help="Extend the branch to Git and have Infrahub create the branch in connected repositories." - ), - isolated: bool = typer.Option(True, hidden=True, help="Set the branch to isolated mode (deprecated)"), # pylint: disable=unused-argument - _: str = CONFIG_PARAM, -) -> None: - """Create a new branch.""" - - logging.getLogger("infrahub_sdk").setLevel(logging.CRITICAL) - - client = await initialize_client() - branch = await client.branch.create(branch_name=branch_name, description=description, sync_with_git=sync_with_git) - console.print(f"Branch {branch_name!r} created successfully ({branch.id}).") - - -@app.command() -@catch_exception(console=console) -async def delete(branch_name: str, _: str = CONFIG_PARAM) -> None: - """Delete a branch.""" - - logging.getLogger("infrahub_sdk").setLevel(logging.CRITICAL) - - client = await initialize_client() - await client.branch.delete(branch_name=branch_name) - console.print(f"Branch '{branch_name}' deleted successfully.") - - -@app.command() -@catch_exception(console=console) -async def rebase(branch_name: str, _: str = CONFIG_PARAM) -> None: - """Rebase a Branch with main.""" - - logging.getLogger("infrahub_sdk").setLevel(logging.CRITICAL) - - client = await initialize_client() - await client.branch.rebase(branch_name=branch_name) - console.print(f"Branch '{branch_name}' rebased successfully.") - - -@app.command() -@catch_exception(console=console) -async def merge(branch_name: str, _: str = CONFIG_PARAM) -> None: - """Merge a Branch with main.""" - - logging.getLogger("infrahub_sdk").setLevel(logging.CRITICAL) - - client = await initialize_client() - await client.branch.merge(branch_name=branch_name) - console.print(f"Branch '{branch_name}' merged successfully.") - - -@app.command() -@catch_exception(console=console) -async def validate(branch_name: str, _: str = CONFIG_PARAM) -> None: - """Validate if a branch has some conflict and is passing all the tests (NOT IMPLEMENTED YET).""" - - client = await initialize_client() - await client.branch.validate(branch_name=branch_name) - console.print(f"Branch '{branch_name}' is valid.") diff --git a/python_sdk/infrahub_sdk/ctl/check.py b/python_sdk/infrahub_sdk/ctl/check.py deleted file mode 100644 index f9a131ed69..0000000000 --- a/python_sdk/infrahub_sdk/ctl/check.py +++ /dev/null @@ -1,262 +0,0 @@ -import importlib -import logging -import sys -from asyncio import run as aiorun -from dataclasses import dataclass -from pathlib import Path -from types import ModuleType -from typing import Optional - -import typer -from rich.console import Console -from rich.logging import RichHandler - -from infrahub_sdk import InfrahubClient -from infrahub_sdk.checks import InfrahubCheck -from infrahub_sdk.ctl import config -from infrahub_sdk.ctl.client import initialize_client -from infrahub_sdk.ctl.exceptions import QueryNotFoundError -from infrahub_sdk.ctl.repository import get_repository_config -from infrahub_sdk.ctl.utils import catch_exception, execute_graphql_query -from infrahub_sdk.schema import InfrahubCheckDefinitionConfig, InfrahubRepositoryConfig - -app = typer.Typer() -console = Console() - - -@dataclass -class CheckModule: - name: str - module: ModuleType - definition: InfrahubCheckDefinitionConfig - - def get_check(self) -> InfrahubCheck: - return getattr(self.module, self.definition.class_name) - - -@app.callback() -def callback() -> None: - """ - Execute user-defined checks. - """ - - -@app.command() -@catch_exception(console=console) -def run( - *, - path: str, - debug: bool, - format_json: bool, - list_available: bool, - variables: dict[str, str], - name: Optional[str] = None, - branch: Optional[str] = None, -) -> None: - """Locate and execute all checks under the defined path.""" - - log_level = "DEBUG" if debug else "INFO" - FORMAT = "%(message)s" - logging.basicConfig(level=log_level, format=FORMAT, datefmt="[%X]", handlers=[RichHandler()]) - - repository_config = get_repository_config(Path(config.INFRAHUB_REPO_CONFIG_FILE)) - - if list_available: - list_checks(repository_config=repository_config) - return - - check_definitions = repository_config.check_definitions - if name: - check_definitions = [check for check in repository_config.check_definitions if check.name == name] # pylint: disable=not-an-iterable - if not check_definitions: - console.print(f"[red]Unable to find requested transform: {name}") - list_checks(repository_config=repository_config) - return - - check_modules = get_modules(check_definitions=check_definitions) - aiorun( - run_checks( - check_modules=check_modules, - format_json=format_json, - path=path, - variables=variables, - branch=branch, - repository_config=repository_config, - ) - ) - - -async def run_check( - check_module: CheckModule, - client: InfrahubClient, - format_json: bool, - path: str, - repository_config: InfrahubRepositoryConfig, - branch: Optional[str] = None, - params: Optional[dict] = None, -) -> bool: - module_name = check_module.name - output = "stdout" if format_json else None - log = logging.getLogger("infrahub") - passed = True - check_class = check_module.get_check() - check = await check_class.init(client=client, params=params, output=output, root_directory=path, branch=branch) - param_log = f" - {params}" if params else "" - try: - data = execute_graphql_query( - query=check.query, - variables_dict=check.params, - repository_config=repository_config, - branch=branch, - debug=False, - ) - passed = await check.run(data) - if passed: - if not format_json: - log.info(f"{module_name}::{check}: [green]PASSED[/]{param_log}", extra={"markup": True}) - else: - passed = False - if not format_json: - log.error(f"{module_name}::{check}: [red]FAILED[/]{param_log}", extra={"markup": True}) - - for log_message in check.logs: - log.error(f" {log_message['message']}") - - except QueryNotFoundError as exc: - log.warning(f"{module_name}::{check}: unable to find query ({str(exc)})") - passed = False - except Exception as exc: # pylint: disable=broad-exception-caught - log.warning(f"{module_name}::{check}: An error occurred during execution ({exc})") - passed = False - - return passed - - -async def run_targeted_check( - check_module: CheckModule, - client: InfrahubClient, - format_json: bool, - path: str, - repository_config: InfrahubRepositoryConfig, - variables: dict[str, str], - branch: Optional[str] = None, -) -> bool: - filters = {} - param_value = list(check_module.definition.parameters.values()) - if param_value: - filters[param_value[0]] = check_module.definition.targets - - param_key = list(check_module.definition.parameters.keys()) - identifier = None - if param_key: - identifier = param_key[0] - - check_summary: list[bool] = [] - if variables: - result = await run_check( - check_module=check_module, - client=client, - format_json=format_json, - path=path, - branch=branch, - params=variables, - repository_config=repository_config, - ) - check_summary.append(result) - else: - targets = await client.get(kind="CoreGroup", include=["members"], **filters) - await targets.members.fetch() - for member in targets.members.peers: - check_parameter = {} - if identifier: - attribute = getattr(member.peer, identifier) - check_parameter = {identifier: attribute.value} - result = await run_check( - check_module=check_module, - client=client, - format_json=format_json, - path=path, - branch=branch, - params=check_parameter, - repository_config=repository_config, - ) - check_summary.append(result) - - return all(check_summary) - - -async def run_checks( - check_modules: list[CheckModule], - format_json: bool, - path: str, - variables: dict[str, str], - repository_config: InfrahubRepositoryConfig, - branch: Optional[str] = None, -) -> None: - log = logging.getLogger("infrahub") - - check_summary: list[bool] = [] - client = await initialize_client() - for check_module in check_modules: - if check_module.definition.targets: - result = await run_targeted_check( - check_module=check_module, - client=client, - repository_config=repository_config, - format_json=format_json, - path=path, - variables=variables, - branch=branch, - ) - check_summary.append(result) - else: - result = await run_check( - check_module=check_module, - client=client, - format_json=format_json, - path=path, - branch=branch, - repository_config=repository_config, - ) - check_summary.append(result) - - if not check_modules: - if not format_json: - log.warning("No check found") - else: - print('{"level": "WARNING", "message": "message", ""No check found"}') - - if not all(check_summary): - sys.exit(1) - - -def get_modules(check_definitions: list[InfrahubCheckDefinitionConfig]) -> list[CheckModule]: - log = logging.getLogger("infrahub") - modules = [] - for check_definition in check_definitions: - directory_name = str(check_definition.file_path.parent) - module_name = check_definition.file_path.stem - - if directory_name not in sys.path: - sys.path.append(directory_name) - - try: - module = importlib.import_module(module_name) - except ModuleNotFoundError: - log.error(f"Unable to load {check_definition.file_path}") - continue - - if check_definition.class_name not in dir(module): - log.error(f"{check_definition.class_name} class not found in {check_definition.file_path}") - continue - modules.append(CheckModule(name=module_name, module=module, definition=check_definition)) - - return modules - - -def list_checks(repository_config: InfrahubRepositoryConfig) -> None: - console.print(f"Python checks defined in repository: {len(repository_config.check_definitions)}") - - for check in repository_config.check_definitions: - target = check.targets or "-global-" - console.print(f"{check.name} ({check.file_path}::{check.class_name}) Target: {target}") diff --git a/python_sdk/infrahub_sdk/ctl/cli.py b/python_sdk/infrahub_sdk/ctl/cli.py deleted file mode 100644 index 58998788aa..0000000000 --- a/python_sdk/infrahub_sdk/ctl/cli.py +++ /dev/null @@ -1,11 +0,0 @@ -import sys - -try: - from .cli_commands import app -except ImportError as exc: - sys.exit( - f"Module {exc.name} is not available, install the 'ctl' extra of the infrahub-sdk package, `pip install 'infrahub-sdk[ctl]'` or enable the " - "Poetry shell and run `poetry install --extras ctl`." - ) - -__all__ = ["app"] diff --git a/python_sdk/infrahub_sdk/ctl/cli_commands.py b/python_sdk/infrahub_sdk/ctl/cli_commands.py deleted file mode 100644 index 8779b0f9a3..0000000000 --- a/python_sdk/infrahub_sdk/ctl/cli_commands.py +++ /dev/null @@ -1,445 +0,0 @@ -import asyncio -import functools -import importlib -import logging -import sys -from pathlib import Path -from typing import Any, Callable, Optional, Union - -import jinja2 -import typer -import ujson -from rich.console import Console -from rich.logging import RichHandler -from rich.traceback import Traceback - -from infrahub_sdk import __version__ as sdk_version -from infrahub_sdk import protocols as sdk_protocols -from infrahub_sdk.async_typer import AsyncTyper -from infrahub_sdk.ctl import config -from infrahub_sdk.ctl.branch import app as branch_app -from infrahub_sdk.ctl.check import run as run_check -from infrahub_sdk.ctl.client import initialize_client, initialize_client_sync -from infrahub_sdk.ctl.constants import PROTOCOLS_TEMPLATE -from infrahub_sdk.ctl.exceptions import QueryNotFoundError -from infrahub_sdk.ctl.generator import run as run_generator -from infrahub_sdk.ctl.render import list_jinja2_transforms -from infrahub_sdk.ctl.repository import app as repository_app -from infrahub_sdk.ctl.repository import get_repository_config -from infrahub_sdk.ctl.schema import app as schema -from infrahub_sdk.ctl.transform import list_transforms -from infrahub_sdk.ctl.utils import catch_exception, execute_graphql_query, parse_cli_vars -from infrahub_sdk.ctl.validate import app as validate_app -from infrahub_sdk.exceptions import GraphQLError, InfrahubTransformNotFoundError -from infrahub_sdk.jinja2 import identify_faulty_jinja_code -from infrahub_sdk.schema import AttributeSchema, GenericSchema, InfrahubRepositoryConfig, NodeSchema, RelationshipSchema -from infrahub_sdk.transforms import get_transform_class_instance -from infrahub_sdk.utils import get_branch, write_to_file - -from .exporter import dump -from .importer import load -from .parameters import CONFIG_PARAM - -app = AsyncTyper(pretty_exceptions_show_locals=False) - -app.add_typer(branch_app, name="branch") -app.add_typer(schema, name="schema") -app.add_typer(validate_app, name="validate") -app.add_typer(repository_app, name="repository") -app.command(name="dump")(dump) -app.command(name="load")(load) - -console = Console() - - -@app.command(name="check") -@catch_exception(console=console) -def check( - check_name: str = typer.Argument(default="", help="Name of the Python check"), - branch: Optional[str] = None, - path: str = typer.Option(".", help="Root directory"), - debug: bool = False, - format_json: bool = False, - _: str = CONFIG_PARAM, - list_available: bool = typer.Option(False, "--list", help="Show available Python checks"), - variables: Optional[list[str]] = typer.Argument( - None, help="Variables to pass along with the query. Format key=value key=value." - ), -) -> None: - """Execute user-defined checks.""" - - variables_dict = parse_cli_vars(variables) - run_check( - path=path, - debug=debug, - branch=branch, - format_json=format_json, - list_available=list_available, - name=check_name, - variables=variables_dict, - ) - - -@app.command(name="generator") -@catch_exception(console=console) -async def generator( - generator_name: str = typer.Argument(default="", help="Name of the Generator"), - branch: Optional[str] = None, - path: str = typer.Option(".", help="Root directory"), - debug: bool = False, - _: str = CONFIG_PARAM, - list_available: bool = typer.Option(False, "--list", help="Show available Generators"), - variables: Optional[list[str]] = typer.Argument( - None, help="Variables to pass along with the query. Format key=value key=value." - ), -) -> None: - """Run a generator script.""" - await run_generator( - generator_name=generator_name, - branch=branch, - path=path, - debug=debug, - list_available=list_available, - variables=variables, - ) - - -@app.command(name="run") -@catch_exception(console=console) -async def run( - script: Path, - method: str = "run", - debug: bool = False, - _: str = CONFIG_PARAM, - branch: str = typer.Option("main", help="Branch on which to run the script."), - concurrent: int = typer.Option( - 4, - help="Maximum number of requests to execute at the same time.", - envvar="INFRAHUBCTL_CONCURRENT_EXECUTION", - ), - timeout: int = typer.Option(60, help="Timeout in sec", envvar="INFRAHUBCTL_TIMEOUT"), - variables: Optional[list[str]] = typer.Argument( - None, help="Variables to pass along with the query. Format key=value key=value." - ), -) -> None: - """Execute a script.""" - - logging.getLogger("infrahub_sdk").setLevel(logging.CRITICAL) - logging.getLogger("httpx").setLevel(logging.ERROR) - logging.getLogger("httpcore").setLevel(logging.ERROR) - - log_level = "DEBUG" if debug else "INFO" - FORMAT = "%(message)s" - logging.basicConfig(level=log_level, format=FORMAT, datefmt="[%X]", handlers=[RichHandler()]) - log = logging.getLogger("infrahubctl") - - variables_dict = parse_cli_vars(variables) - - directory_name = str(script.parent) - module_name = script.stem - - if directory_name not in sys.path: - sys.path.append(directory_name) - - try: - module = importlib.import_module(module_name) - except ModuleNotFoundError as exc: - raise typer.Abort(f"Unable to Load the Python script at {script}") from exc - - if not hasattr(module, method): - raise typer.Abort(f"Unable to Load the method {method} in the Python script at {script}") - - client = await initialize_client( - branch=branch, timeout=timeout, max_concurrent_execution=concurrent, identifier=module_name - ) - func = getattr(module, method) - await func(client=client, log=log, branch=branch, **variables_dict) - - -def render_jinja2_template(template_path: Path, variables: dict[str, str], data: dict[str, Any]) -> str: - if not template_path.is_file(): - console.print(f"[red]Unable to locate the template at {template_path}") - raise typer.Exit(1) - - templateLoader = jinja2.FileSystemLoader(searchpath=".") - templateEnv = jinja2.Environment(loader=templateLoader, trim_blocks=True, lstrip_blocks=True) - template = templateEnv.get_template(str(template_path)) - - try: - rendered_tpl = template.render(**variables, data=data) # type: ignore[arg-type] - except jinja2.TemplateSyntaxError as exc: - console.print("[red]Syntax Error detected on the template") - console.print(f"[yellow] {exc}") - raise typer.Exit(1) from exc - - except jinja2.UndefinedError as exc: - console.print("[red]An error occurred while rendering the jinja template") - traceback = Traceback(show_locals=False) - errors = identify_faulty_jinja_code(traceback=traceback) - for frame, syntax in errors: - console.print(f"[yellow]{frame.filename} on line {frame.lineno}\n") - console.print(syntax) - console.print("") - console.print(traceback.trace.stacks[0].exc_value) - raise typer.Exit(1) from exc - - return rendered_tpl - - -def _run_transform( - query: str, - variables: dict[str, Any], - transformer: Callable, - branch: str, - debug: bool, - repository_config: InfrahubRepositoryConfig, -): - branch = get_branch(branch) - - try: - response = execute_graphql_query( - query=query, variables_dict=variables, branch=branch, debug=debug, repository_config=repository_config - ) - except QueryNotFoundError as exc: - console.print(f"[red]Unable to find query : {exc}") - raise typer.Exit(1) from exc - except GraphQLError as exc: - console.print(f"[red]{len(exc.errors)} error(s) occurred while executing the query") - for error in exc.errors: - if isinstance(error, dict) and "message" in error and "locations" in error: - console.print(f"[yellow] - Message: {error['message']}") # type: ignore[typeddict-item] - console.print(f"[yellow] Location: {error['locations']}") # type: ignore[typeddict-item] - elif isinstance(error, str) and "Branch:" in error: - console.print(f"[yellow] - {error}") - console.print("[yellow] you can specify a different branch with --branch") - raise typer.Abort() - - if asyncio.iscoroutinefunction(transformer.func): - output = asyncio.run(transformer(response)) - else: - output = transformer(response) - return output - - -@app.command(name="render") -@catch_exception(console=console) -def render( - transform_name: str = typer.Argument(default="", help="Name of the Python transformation", show_default=False), - variables: Optional[list[str]] = typer.Argument( - None, help="Variables to pass along with the query. Format key=value key=value." - ), - branch: str = typer.Option(None, help="Branch on which to render the transform."), - debug: bool = False, - _: str = CONFIG_PARAM, - list_available: bool = typer.Option(False, "--list", help="Show available transforms"), - out: str = typer.Option(None, help="Path to a file to save the result."), -) -> None: - """Render a local Jinja2 Transform for debugging purpose.""" - - variables_dict = parse_cli_vars(variables) - repository_config = get_repository_config(Path(config.INFRAHUB_REPO_CONFIG_FILE)) - - if list_available: - list_jinja2_transforms(config=repository_config) - return - - try: - transform_config = repository_config.get_jinja2_transform(name=transform_name) - except KeyError as exc: - console.print(f'[red]Unable to find "{transform_name}" in {config.INFRAHUB_REPO_CONFIG_FILE}') - list_jinja2_transforms(config=repository_config) - raise typer.Exit(1) from exc - - transformer = functools.partial(render_jinja2_template, transform_config.template_path, variables_dict) - result = _run_transform( - query=transform_config.query, - variables=variables_dict, - transformer=transformer, - branch=branch, - debug=debug, - repository_config=repository_config, - ) - - if out: - write_to_file(Path(out), result) - else: - console.print(result) - - -@app.command(name="transform") -@catch_exception(console=console) -def transform( - transform_name: str = typer.Argument(default="", help="Name of the Python transformation", show_default=False), - variables: Optional[list[str]] = typer.Argument( - None, help="Variables to pass along with the query. Format key=value key=value." - ), - branch: str = typer.Option(None, help="Branch on which to run the transformation"), - debug: bool = False, - _: str = CONFIG_PARAM, - list_available: bool = typer.Option(False, "--list", help="Show available transforms"), - out: str = typer.Option(None, help="Path to a file to save the result."), -) -> None: - """Render a local transform (TransformPython) for debugging purpose.""" - - variables_dict = parse_cli_vars(variables) - repository_config = get_repository_config(Path(config.INFRAHUB_REPO_CONFIG_FILE)) - - if list_available: - list_transforms(config=repository_config) - return - - matched = [transform for transform in repository_config.python_transforms if transform.name == transform_name] # pylint: disable=not-an-iterable - - if not matched: - console.print(f"[red]Unable to find requested transform: {transform_name}") - list_transforms(config=repository_config) - return - - transform_config = matched[0] - - try: - transform_instance = get_transform_class_instance(transform_config=transform_config) - except InfrahubTransformNotFoundError as exc: - console.print(f"Unable to load {transform_name} from python_transforms") - raise typer.Exit(1) from exc - - transformer = functools.partial(transform_instance.transform) - result = _run_transform( - query=transform_instance.query, - variables=variables_dict, - transformer=transformer, - branch=branch, - debug=debug, - repository_config=repository_config, - ) - - json_string = ujson.dumps(result, indent=2, sort_keys=True) - if out: - write_to_file(Path(out), json_string) - else: - console.print(json_string) - - -@app.command(name="protocols") -@catch_exception(console=console) -def protocols( # noqa: PLR0915 - branch: str = typer.Option(None, help="Branch of schema to export Python protocols for."), - _: str = CONFIG_PARAM, - out: str = typer.Option("schema_protocols.py", help="Path to a file to save the result."), -) -> None: - """Export Python protocols corresponding to a schema.""" - - def _jinja2_filter_inheritance(value: dict[str, Any]) -> str: - inherit_from: list[str] = value.get("inherit_from", []) - - if not inherit_from: - return "CoreNode" - return ", ".join(inherit_from) - - def _jinja2_filter_render_attribute(value: AttributeSchema) -> str: - attribute_kind_map = { - "boolean": "bool", - "datetime": "datetime", - "dropdown": "str", - "hashedpassword": "str", - "iphost": "str", - "ipnetwork": "str", - "json": "dict", - "list": "list", - "number": "int", - "password": "str", - "text": "str", - "textarea": "str", - "url": "str", - } - - name = value.name - kind = value.kind - - attribute_kind = attribute_kind_map[kind.lower()] - if value.optional: - attribute_kind = f"Optional[{attribute_kind}]" - - return f"{name}: {attribute_kind}" - - def _jinja2_filter_render_relationship(value: RelationshipSchema, sync: bool = False) -> str: - name = value.name - cardinality = value.cardinality - - type_ = "RelatedNode" - if cardinality == "many": - type_ = "RelationshipManager" - - if sync: - type_ += "Sync" - - return f"{name}: {type_}" - - def _sort_and_filter_models( - models: dict[str, Union[GenericSchema, NodeSchema]], filters: Optional[list[str]] = None - ) -> list[Union[GenericSchema, NodeSchema]]: - if filters is None: - filters = ["CoreNode"] - - filtered: list[Union[GenericSchema, NodeSchema]] = [] - for name, model in models.items(): - if name in filters: - continue - filtered.append(model) - - return sorted(filtered, key=lambda k: k.name) - - client = initialize_client_sync() - current_schema = client.schema.all(branch=branch) - - generics: dict[str, GenericSchema] = {} - nodes: dict[str, NodeSchema] = {} - - for name, schema_type in current_schema.items(): - if isinstance(schema_type, GenericSchema): - generics[name] = schema_type - if isinstance(schema_type, NodeSchema): - nodes[name] = schema_type - - base_protocols = [ - e - for e in dir(sdk_protocols) - if not e.startswith("__") - and not e.endswith("__") - and e not in ("TYPE_CHECKING", "CoreNode", "Optional", "Protocol", "Union", "annotations", "runtime_checkable") - ] - sorted_generics = _sort_and_filter_models(generics, filters=["CoreNode"] + base_protocols) - sorted_nodes = _sort_and_filter_models(nodes, filters=["CoreNode"] + base_protocols) - - jinja2_env = jinja2.Environment(loader=jinja2.BaseLoader, trim_blocks=True, lstrip_blocks=True) - jinja2_env.filters["inheritance"] = _jinja2_filter_inheritance - jinja2_env.filters["render_attribute"] = _jinja2_filter_render_attribute - jinja2_env.filters["render_relationship"] = _jinja2_filter_render_relationship - - template = jinja2_env.from_string(PROTOCOLS_TEMPLATE) - rendered = template.render(generics=sorted_generics, nodes=sorted_nodes, base_protocols=base_protocols, sync=False) - rendered_sync = template.render( - generics=sorted_generics, nodes=sorted_nodes, base_protocols=base_protocols, sync=True - ) - output_file = Path(out) - output_file_sync = Path(output_file.stem + "_sync" + output_file.suffix) - - if out: - write_to_file(output_file, rendered) - write_to_file(output_file_sync, rendered_sync) - console.print(f"Python protocols exported in {output_file} and {output_file_sync}") - else: - console.print(rendered) - console.print(rendered_sync) - - -@app.command(name="version") -@catch_exception(console=console) -def version(_: str = CONFIG_PARAM): - """Display the version of Infrahub and the version of the Python SDK in use.""" - - client = initialize_client_sync() - response = client.execute_graphql(query="query { InfrahubInfo { version }}") - - infrahub_version = response["InfrahubInfo"]["version"] - console.print(f"Infrahub: v{infrahub_version}\nPython SDK: v{sdk_version}") diff --git a/python_sdk/infrahub_sdk/ctl/client.py b/python_sdk/infrahub_sdk/ctl/client.py deleted file mode 100644 index 49e32f65d2..0000000000 --- a/python_sdk/infrahub_sdk/ctl/client.py +++ /dev/null @@ -1,72 +0,0 @@ -from typing import Any, Optional - -from infrahub_sdk import InfrahubClient, InfrahubClientSync -from infrahub_sdk.config import Config -from infrahub_sdk.ctl import config - - -async def initialize_client( - branch: Optional[str] = None, - identifier: Optional[str] = None, - timeout: Optional[int] = None, - max_concurrent_execution: Optional[int] = None, - retry_on_failure: Optional[bool] = None, -) -> InfrahubClient: - return InfrahubClient( - config=_define_config( - branch=branch, - identifier=identifier, - timeout=timeout, - max_concurrent_execution=max_concurrent_execution, - retry_on_failure=retry_on_failure, - ) - ) - - -def initialize_client_sync( - branch: Optional[str] = None, - identifier: Optional[str] = None, - timeout: Optional[int] = None, - max_concurrent_execution: Optional[int] = None, - retry_on_failure: Optional[bool] = None, -) -> InfrahubClientSync: - return InfrahubClientSync( - config=_define_config( - branch=branch, - identifier=identifier, - timeout=timeout, - max_concurrent_execution=max_concurrent_execution, - retry_on_failure=retry_on_failure, - ) - ) - - -def _define_config( - branch: Optional[str] = None, - identifier: Optional[str] = None, - timeout: Optional[int] = None, - max_concurrent_execution: Optional[int] = None, - retry_on_failure: Optional[bool] = None, -) -> Config: - client_config: dict[str, Any] = { - "address": config.SETTINGS.active.server_address, - "insert_tracker": True, - "identifier": identifier, - } - - if config.SETTINGS.active.api_token: - client_config["api_token"] = config.SETTINGS.active.api_token - - if timeout: - client_config["timeout"] = timeout - - if max_concurrent_execution is not None: - client_config["max_concurrent_execution"] = max_concurrent_execution - - if retry_on_failure is not None: - client_config["retry_on_failure"] = retry_on_failure - - if branch: - client_config["default_branch"] = branch - - return Config(**client_config) diff --git a/python_sdk/infrahub_sdk/ctl/config.py b/python_sdk/infrahub_sdk/ctl/config.py deleted file mode 100644 index cfeb2fb70a..0000000000 --- a/python_sdk/infrahub_sdk/ctl/config.py +++ /dev/null @@ -1,91 +0,0 @@ -"""Config Class.""" - -from pathlib import Path -from typing import Optional, Union - -import toml -import typer -from pydantic import Field, ValidationError, field_validator -from pydantic_settings import BaseSettings, SettingsConfigDict - -DEFAULT_CONFIG_FILE = "infrahubctl.toml" -ENVVAR_CONFIG_FILE = "INFRAHUBCTL_CONFIG" -INFRAHUB_REPO_CONFIG_FILE = ".infrahub.yml" - - -class Settings(BaseSettings): - """Main Settings Class for the project.""" - - model_config = SettingsConfigDict(env_prefix="INFRAHUB_", populate_by_name=True, extra="allow") - server_address: str = Field(default="http://localhost:8000", validation_alias="infrahub_address") - api_token: Optional[str] = Field(default=None) - default_branch: str = Field(default="main") - - @field_validator("server_address") - @classmethod - def cleanup_server_address(cls, v: str) -> str: - return v.rstrip("/") - - -class ConfiguredSettings: - def __init__(self) -> None: - self._settings: Optional[Settings] = None - - @property - def active(self) -> Settings: - if self._settings: - return self._settings - - print("Configuration not properly loaded") - raise typer.Abort() - - def load(self, config_file: Union[str, Path] = "infrahubctl.toml", config_data: Optional[dict] = None) -> None: - """Load configuration. - - Configuration is loaded from a config file in toml format that contains the settings, - or from a dictionary of those settings passed in as "config_data" - """ - - if self._settings: - return - - if config_data: - self._settings = Settings(**config_data) - return - - if not isinstance(config_file, Path): - config_file = Path(config_file) - - if config_file.is_file(): - config_string = config_file.read_text(encoding="utf-8") - config_tmp = toml.loads(config_string) - - self._settings = Settings(**config_tmp) - return - - self._settings = Settings() - - def load_and_exit( - self, config_file: Union[str, Path] = "infrahubctl.toml", config_data: Optional[dict] = None - ) -> None: - """Calls load, but wraps it in a try except block. - - This is done to handle a ValidationErorr which is raised when settings are specified but invalid. - In such cases, a message is printed to the screen indicating the settings which don't pass validation. - - Args: - config_file_name (str, optional): [description]. Defaults to "pyprojectctl.toml". - config_data (dict, optional): [description]. Defaults to None. - """ - - try: - self.load(config_file=config_file, config_data=config_data) - except ValidationError as exc: - print(f"Configuration not valid, found {len(exc.errors())} error(s)") - for error in exc.errors(): - loc_str = [str(item) for item in error["loc"]] - print(f" {'/'.join(loc_str)} | {error['msg']} ({error['type']})") - raise typer.Abort() - - -SETTINGS = ConfiguredSettings() diff --git a/python_sdk/infrahub_sdk/ctl/constants.py b/python_sdk/infrahub_sdk/ctl/constants.py deleted file mode 100644 index 24887197b9..0000000000 --- a/python_sdk/infrahub_sdk/ctl/constants.py +++ /dev/null @@ -1,63 +0,0 @@ -PROTOCOLS_TEMPLATE = """# -# Generated by "infrahubctl protocols" -# - -from __future__ import annotations - -from typing import TYPE_CHECKING, Optional - -from infrahub_sdk.protocols import CoreNode, {{ base_protocols | join(', ') }} - -if TYPE_CHECKING: - {% if sync %} - from infrahub_sdk.node import RelatedNodeSync, RelationshipManagerSync - {% else %} - from infrahub_sdk.node import RelatedNode, RelationshipManager - {% endif %} - - -{% for generic in generics %} -class {{ generic.namespace + generic.name }}(CoreNode): - {% if not generic.attributes|default([]) and not generic.relationships|default([]) %} - pass - {% endif %} - {% for attribute in generic.attributes|default([]) %} - {{ attribute | render_attribute }} - {% endfor %} - {% for relationship in generic.relationships|default([]) %} - {{ relationship | render_relationship(sync) }} - {% endfor %} - {% if generic.hierarchical | default(false) %} - {% if sync %} - parent: RelatedNodeSync - children: RelationshipManagerSync - {% else %} - parent: RelatedNode - children: RelationshipManager - {% endif %} - {% endif %} -{% endfor %} - - -{% for node in nodes %} -class {{ node.namespace + node.name }}({{ node.inherit_from | join(", ") or "CoreNode" }}): - {% if not node.attributes|default([]) and not node.relationships|default([]) %} - pass - {% endif %} - {% for attribute in node.attributes|default([]) %} - {{ attribute | render_attribute }} - {% endfor %} - {% for relationship in node.relationships|default([]) %} - {{ relationship | render_relationship(sync) }} - {% endfor %} - {% if node.hierarchical | default(false) %} - {% if sync %} - parent: RelatedNodeSync - children: RelationshipManagerSync - {% else %} - parent: RelatedNode - children: RelationshipManager - {% endif %} - {% endif %} -{% endfor %} -""" diff --git a/python_sdk/infrahub_sdk/ctl/exceptions.py b/python_sdk/infrahub_sdk/ctl/exceptions.py deleted file mode 100644 index 28436d5b2d..0000000000 --- a/python_sdk/infrahub_sdk/ctl/exceptions.py +++ /dev/null @@ -1,14 +0,0 @@ -class Error(Exception): - """Infrahub CTL Base exception.""" - - -class QueryNotFoundError(Error): - def __init__(self, name: str, message: str = ""): - self.message = message or f"The requested query '{name}' was not found." - super().__init__(self.message) - - -class FileNotValidError(Error): - def __init__(self, name: str, message: str = ""): - self.message = message or f"Cannot parse '{name}' content." - super().__init__(self.message) diff --git a/python_sdk/infrahub_sdk/ctl/exporter.py b/python_sdk/infrahub_sdk/ctl/exporter.py deleted file mode 100644 index a292f23c8e..0000000000 --- a/python_sdk/infrahub_sdk/ctl/exporter.py +++ /dev/null @@ -1,50 +0,0 @@ -from asyncio import run as aiorun -from datetime import datetime, timezone -from pathlib import Path -from typing import List - -import typer -from rich.console import Console - -from infrahub_sdk.ctl.client import initialize_client -from infrahub_sdk.transfer.exceptions import TransferError -from infrahub_sdk.transfer.exporter.json import LineDelimitedJSONExporter - -from .parameters import CONFIG_PARAM - - -def directory_name_with_timestamp(): - right_now = datetime.now(timezone.utc).astimezone() - timestamp = right_now.strftime("%Y%m%d-%H%M%S") - return f"infrahubexport-{timestamp}" - - -def dump( - namespace: List[str] = typer.Option([], help="Namespace(s) to export"), - directory: Path = typer.Option(directory_name_with_timestamp, help="Directory path to store export"), - quiet: bool = typer.Option(False, help="No console output"), - _: str = CONFIG_PARAM, - branch: str = typer.Option("main", help="Branch from which to export"), - concurrent: int = typer.Option( - 4, - help="Maximum number of requests to execute at the same time.", - envvar="INFRAHUBCTL_CONCURRENT_EXECUTION", - ), - timeout: int = typer.Option(60, help="Timeout in sec", envvar="INFRAHUBCTL_TIMEOUT"), - exclude: List[str] = typer.Option( - ["CoreAccount"], - help="Prevent node kind(s) from being exported, CoreAccount is excluded by default", - ), -) -> None: - """Export nodes and their relationships out of the database.""" - console = Console() - - client = aiorun( - initialize_client(branch=branch, timeout=timeout, max_concurrent_execution=concurrent, retry_on_failure=True) - ) - exporter = LineDelimitedJSONExporter(client, console=Console() if not quiet else None) - try: - aiorun(exporter.export(export_directory=directory, namespaces=namespace, branch=branch, exclude=exclude)) - except TransferError as exc: - console.print(f"[red]{exc}") - raise typer.Exit(1) diff --git a/python_sdk/infrahub_sdk/ctl/generator.py b/python_sdk/infrahub_sdk/ctl/generator.py deleted file mode 100644 index a4d0de239c..0000000000 --- a/python_sdk/infrahub_sdk/ctl/generator.py +++ /dev/null @@ -1,102 +0,0 @@ -from pathlib import Path -from typing import Optional - -from rich.console import Console - -from infrahub_sdk import InfrahubNode -from infrahub_sdk.ctl import config -from infrahub_sdk.ctl.client import initialize_client -from infrahub_sdk.ctl.repository import get_repository_config -from infrahub_sdk.ctl.utils import execute_graphql_query, parse_cli_vars -from infrahub_sdk.schema import InfrahubRepositoryConfig - - -async def run( - generator_name: str, - path: str, - debug: bool, - list_available: bool, - branch: Optional[str] = None, - variables: Optional[list[str]] = None, -): # pylint: disable=unused-argument - repository_config = get_repository_config(Path(config.INFRAHUB_REPO_CONFIG_FILE)) - - if list_available: - list_generators(repository_config=repository_config) - return - - matched = [generator for generator in repository_config.generator_definitions if generator.name == generator_name] # pylint: disable=not-an-iterable - - console = Console() - - if not matched: - console.print(f"[red]Unable to find requested generator: {generator_name}") - list_generators(repository_config=repository_config) - return - - generator_config = matched[0] - generator_class = generator_config.load_class() - variables_dict = parse_cli_vars(variables) - - param_key = list(generator_config.parameters.keys()) - identifier = None - if param_key: - identifier = param_key[0] - - client = await initialize_client() - if variables_dict: - data = execute_graphql_query( - query=generator_config.query, - variables_dict=variables_dict, - branch=branch, - debug=False, - repository_config=repository_config, - ) - generator = generator_class( - query=generator_config.query, - client=client, - branch=branch, - params=variables_dict, - convert_query_response=generator_config.convert_query_response, - infrahub_node=InfrahubNode, - ) - await generator._init_client.schema.all(branch=generator.branch_name) - await generator.process_nodes(data=data) - await generator.run(identifier=generator_config.name, data=data) - - else: - targets = await client.get( - kind="CoreGroup", branch=branch, include=["members"], name__value=generator_config.targets - ) - await targets.members.fetch() - for member in targets.members.peers: - check_parameter = {} - if identifier: - attribute = getattr(member.peer, identifier) - check_parameter = {identifier: attribute.value} - params = {"name": member.peer.name.value} - generator = generator_class( - query=generator_config.query, - client=client, - branch=branch, - params=params, - convert_query_response=generator_config.convert_query_response, - infrahub_node=InfrahubNode, - ) - data = execute_graphql_query( - query=generator_config.query, - variables_dict=check_parameter, - branch=branch, - debug=False, - repository_config=repository_config, - ) - await generator._init_client.schema.all(branch=generator.branch_name) - await generator.run(identifier=generator_config.name, data=data) - - -def list_generators(repository_config: InfrahubRepositoryConfig) -> None: - console = Console() - console.print(f"Generators defined in repository: {len(repository_config.generator_definitions)}") - - for generator in repository_config.generator_definitions: - console.print(f"{generator.name} ({generator.file_path}::{generator.class_name}) Target: {generator.targets}") diff --git a/python_sdk/infrahub_sdk/ctl/importer.py b/python_sdk/infrahub_sdk/ctl/importer.py deleted file mode 100644 index b840f831ce..0000000000 --- a/python_sdk/infrahub_sdk/ctl/importer.py +++ /dev/null @@ -1,51 +0,0 @@ -from asyncio import run as aiorun -from pathlib import Path - -import typer -from rich.console import Console - -from infrahub_sdk.ctl.client import initialize_client -from infrahub_sdk.transfer.exceptions import TransferError -from infrahub_sdk.transfer.importer.json import LineDelimitedJSONImporter -from infrahub_sdk.transfer.schema_sorter import InfrahubSchemaTopologicalSorter - -from .parameters import CONFIG_PARAM - - -def local_directory(): - # We use a function here to avoid failure when generating the documentation due to directory name - return Path().resolve() - - -def load( - directory: Path = typer.Option(local_directory, help="Directory path of exported data"), - continue_on_error: bool = typer.Option( - False, help="Allow exceptions during loading and display them when complete" - ), - quiet: bool = typer.Option(False, help="No console output"), - _: str = CONFIG_PARAM, - branch: str = typer.Option("main", help="Branch from which to export"), - concurrent: int = typer.Option( - 4, - help="Maximum number of requests to execute at the same time.", - envvar="INFRAHUBCTL_CONCURRENT_EXECUTION", - ), - timeout: int = typer.Option(60, help="Timeout in sec", envvar="INFRAHUBCTL_TIMEOUT"), -) -> None: - """Import nodes and their relationships into the database.""" - console = Console() - - client = aiorun( - initialize_client(branch=branch, timeout=timeout, max_concurrent_execution=concurrent, retry_on_failure=True) - ) - importer = LineDelimitedJSONImporter( - client, - InfrahubSchemaTopologicalSorter(), - continue_on_error=continue_on_error, - console=Console() if not quiet else None, - ) - try: - aiorun(importer.import_data(import_directory=directory, branch=branch)) - except TransferError as exc: - console.print(f"[red]{exc}") - raise typer.Exit(1) diff --git a/python_sdk/infrahub_sdk/ctl/parameters.py b/python_sdk/infrahub_sdk/ctl/parameters.py deleted file mode 100644 index 740605e83c..0000000000 --- a/python_sdk/infrahub_sdk/ctl/parameters.py +++ /dev/null @@ -1,14 +0,0 @@ -import typer - -from infrahub_sdk.ctl import config - - -def load_configuration(value: str) -> str: - """Load the configuration file using default environment variables or from the specified configuration file""" - config.SETTINGS.load_and_exit(config_file=value) - return value - - -CONFIG_PARAM = typer.Option( - config.DEFAULT_CONFIG_FILE, "--config-file", envvar=config.ENVVAR_CONFIG_FILE, callback=load_configuration -) diff --git a/python_sdk/infrahub_sdk/ctl/render.py b/python_sdk/infrahub_sdk/ctl/render.py deleted file mode 100644 index 6e769c86ec..0000000000 --- a/python_sdk/infrahub_sdk/ctl/render.py +++ /dev/null @@ -1,11 +0,0 @@ -from rich.console import Console - -from ..schema import InfrahubRepositoryConfig - - -def list_jinja2_transforms(config: InfrahubRepositoryConfig) -> None: - console = Console() - console.print(f"Jinja2 transforms defined in repository: {len(config.jinja2_transforms)}") - - for transform in config.jinja2_transforms: - console.print(f"{transform.name} ({transform.template_path})") diff --git a/python_sdk/infrahub_sdk/ctl/repository.py b/python_sdk/infrahub_sdk/ctl/repository.py deleted file mode 100644 index f3b26a6144..0000000000 --- a/python_sdk/infrahub_sdk/ctl/repository.py +++ /dev/null @@ -1,104 +0,0 @@ -from pathlib import Path - -import typer -import yaml -from pydantic import ValidationError -from rich.console import Console - -from infrahub_sdk.async_typer import AsyncTyper -from infrahub_sdk.ctl.client import initialize_client -from infrahub_sdk.ctl.exceptions import FileNotValidError -from infrahub_sdk.ctl.utils import init_logging -from infrahub_sdk.graphql import Mutation -from infrahub_sdk.schema import InfrahubRepositoryConfig - -from .parameters import CONFIG_PARAM - -app = AsyncTyper() -console = Console() - - -def get_repository_config(repo_config_file: Path) -> InfrahubRepositoryConfig: - try: - config_file_data = load_repository_config_file(repo_config_file) - except FileNotFoundError as exc: - console.print(f"[red]File not found {exc}") - raise typer.Exit(1) from exc - except FileNotValidError as exc: - console.print(f"[red]{exc.message}") - raise typer.Exit(1) from exc - - try: - data = InfrahubRepositoryConfig(**config_file_data) - except ValidationError as exc: - console.print(f"[red]Repository config file not valid, found {len(exc.errors())} error(s)") - for error in exc.errors(): - loc_str = [str(item) for item in error["loc"]] - console.print(f" {'/'.join(loc_str)} | {error['msg']} ({error['type']})") - raise typer.Exit(1) from exc - - return data - - -def load_repository_config_file(repo_config_file: Path) -> dict: - if not repo_config_file.is_file(): - raise FileNotFoundError(repo_config_file) - - try: - yaml_data = repo_config_file.read_text() - data = yaml.safe_load(yaml_data) - except yaml.YAMLError as exc: - raise FileNotValidError(name=str(repo_config_file)) from exc - - return data - - -@app.callback() -def callback() -> None: - """ - Manage the repositories in a remote Infrahub instance. - - List, create, delete .. - """ - - -@app.command() -async def add( - name: str, - location: str, - description: str = "", - username: str = "", - password: str = "", - commit: str = "", - read_only: bool = False, - debug: bool = False, - branch: str = typer.Option("main", help="Branch on which to add the repository."), - _: str = CONFIG_PARAM, -) -> None: - """Add a new repository.""" - - init_logging(debug=debug) - - input_data = { - "data": { - "name": {"value": name}, - "location": {"value": location}, - "description": {"value": description}, - "commit": {"value": commit}, - }, - } - - client = await initialize_client() - - if username: - credential = await client.create(kind="CorePasswordCredential", name=name, username=username, password=password) - await credential.save() - input_data["data"]["credential"] = {"id": credential.id} - - query = Mutation( - mutation="CoreReadOnlyRepositoryCreate" if read_only else "CoreRepositoryCreate", - input_data=input_data, - query={"ok": None}, - ) - - await client.execute_graphql(query=query.render(), branch_name=branch, tracker="mutation-repository-create") diff --git a/python_sdk/infrahub_sdk/ctl/schema.py b/python_sdk/infrahub_sdk/ctl/schema.py deleted file mode 100644 index f6f2cda3c5..0000000000 --- a/python_sdk/infrahub_sdk/ctl/schema.py +++ /dev/null @@ -1,220 +0,0 @@ -import asyncio -import time -from pathlib import Path -from typing import Any, Optional - -import typer -import yaml -from pydantic import ValidationError -from rich.console import Console - -from infrahub_sdk import InfrahubClient -from infrahub_sdk.async_typer import AsyncTyper -from infrahub_sdk.ctl.client import initialize_client -from infrahub_sdk.ctl.exceptions import FileNotValidError -from infrahub_sdk.ctl.utils import catch_exception, init_logging -from infrahub_sdk.queries import SCHEMA_HASH_SYNC_STATUS -from infrahub_sdk.utils import find_files -from infrahub_sdk.yaml import SchemaFile - -from .parameters import CONFIG_PARAM - -app = AsyncTyper() -console = Console() - - -@app.callback() -def callback() -> None: - """ - Manage the schema in a remote Infrahub instance. - """ - - -def load_schemas_from_disk(schemas: list[Path]) -> list[SchemaFile]: - schemas_data: list[SchemaFile] = [] - for schema in schemas: - if schema.is_file(): - schema_file = SchemaFile(location=schema) - schema_file.load_content() - schemas_data.append(schema_file) - elif schema.is_dir(): - files = find_files(extension=["yaml", "yml", "json"], directory=schema) - for item in files: - schema_file = SchemaFile(location=item) - schema_file.load_content() - schemas_data.append(schema_file) - else: - raise FileNotValidError(name=schema, message=f"Schema path: {schema} does not exist!") - - return schemas_data - - -def load_schemas_from_disk_and_exit(schemas: list[Path]): - has_error = False - try: - schemas_data = load_schemas_from_disk(schemas=schemas) - except FileNotValidError as exc: - console.print(f"[red]{exc.message}") - raise typer.Exit(1) from exc - - for schema_file in schemas_data: - if schema_file.valid and schema_file.content: - continue - console.print(f"[red]{schema_file.error_message} ({schema_file.location})") - has_error = True - - if has_error: - raise typer.Exit(1) - - return schemas_data - - -def validate_schema_content_and_exit(client: InfrahubClient, schemas: list[SchemaFile]) -> None: - has_error: bool = False - for schema_file in schemas: - try: - client.schema.validate(data=schema_file.content) - except ValidationError as exc: - console.print(f"[red]Schema not valid, found '{len(exc.errors())}' error(s) in {schema_file.location}") - has_error = True - for error in exc.errors(): - loc_str = [str(item) for item in error["loc"]] - console.print(f" '{'/'.join(loc_str)}' | {error['msg']} ({error['type']})") - - if has_error: - raise typer.Exit(1) - - -def display_schema_load_errors(response: dict[str, Any], schemas_data: list[dict]) -> None: - console.print("[red]Unable to load the schema:") - if "detail" not in response: - handle_non_detail_errors(response=response) - return - - for error in response["detail"]: - loc_path = error.get("loc", []) - if not valid_error_path(loc_path=loc_path): - continue - - # if the len of the path is equal to 6, the error is at the root of the object - # if the len of the path is higher than 6, the error is in an attribute or a relationships - schema_index = int(loc_path[2]) - node_index = int(loc_path[4]) - node = get_node(schemas_data=schemas_data, schema_index=schema_index, node_index=node_index) - - if not node: - console.print("Node data not found.") - continue - - if len(loc_path) == 6: - loc_type = loc_path[-1] - input_str = error.get("input", None) - error_message = f"{loc_type} ({input_str}) | {error['msg']} ({error['type']})" - console.print(f" Node: {node.get('namespace', None)}{node.get('name', None)} | {error_message}") - - elif len(loc_path) > 6: - loc_type = loc_path[5] - input_label = node[loc_type][loc_path[6]].get("name", None) - input_str = error.get("input", None) - error_message = f"{loc_type[:-1].title()}: {input_label} ({input_str}) | {error['msg']} ({error['type']})" - console.print(f" Node: {node.get('namespace', None)}{node.get('name', None)} | {error_message}") - - -def handle_non_detail_errors(response: dict[str, Any]) -> None: - if "error" in response: - console.print(f" {response.get('error')}") - elif "errors" in response: - for error in response.get("errors"): - console.print(f" {error.get('message')}") - else: - console.print(f" '{response}'") - - -def valid_error_path(loc_path: list[Any]) -> bool: - return len(loc_path) >= 6 and loc_path[0] == "body" and loc_path[1] == "schemas" - - -def get_node(schemas_data: list[dict], schema_index: int, node_index: int) -> Optional[dict]: - if schema_index < len(schemas_data) and node_index < len(schemas_data[schema_index].content["nodes"]): - return schemas_data[schema_index].content["nodes"][node_index] - return None - - -@app.command() -@catch_exception(console=console) -async def load( - schemas: list[Path], - debug: bool = False, - branch: str = typer.Option("main", help="Branch on which to load the schema."), - wait: int = typer.Option(0, help="Time in seconds to wait until the schema has converged across all workers"), - _: str = CONFIG_PARAM, -) -> None: - """Load one or multiple schema files into Infrahub.""" - - init_logging(debug=debug) - - schemas_data = load_schemas_from_disk_and_exit(schemas=schemas) - schema_definition = "schema" if len(schemas_data) == 1 else "schemas" - client = await initialize_client() - validate_schema_content_and_exit(client=client, schemas=schemas_data) - - start_time = time.time() - response = await client.schema.load(schemas=[item.content for item in schemas_data], branch=branch) - loading_time = time.time() - start_time - - if response.errors: - display_schema_load_errors(response=response.errors, schemas_data=schemas_data) - raise typer.Exit(1) - - if response.schema_updated: - for schema_file in schemas_data: - console.print(f"[green] schema '{schema_file.location}' loaded successfully") - else: - console.print("[green] The schema in Infrahub was is already up to date, no changes were required") - - console.print(f"[green] {len(schemas_data)} {schema_definition} processed in {loading_time:.3f} seconds.") - - if response.schema_updated and wait: - waited = 0 - continue_waiting = True - while continue_waiting: - status = await client.execute_graphql(query=SCHEMA_HASH_SYNC_STATUS, branch_name=branch) - if status["InfrahubStatus"]["summary"]["schema_hash_synced"]: - console.print("[green] Schema updated on all workers.") - continue_waiting = False - else: - if waited >= wait: - console.print("[red] Schema is still not in sync after the specified waiting time") - raise typer.Exit(1) - console.print("[yellow] Waiting for schema to sync across all workers") - waited += 1 - await asyncio.sleep(delay=1) - - -@app.command() -@catch_exception(console=console) -async def check( - schemas: list[Path], - debug: bool = False, - branch: str = typer.Option("main", help="Branch on which to check the schema."), - _: str = CONFIG_PARAM, -) -> None: - """Check if schema files are valid and what would be the impact of loading them with Infrahub.""" - - init_logging(debug=debug) - - schemas_data = load_schemas_from_disk_and_exit(schemas=schemas) - client = await initialize_client() - validate_schema_content_and_exit(client=client, schemas=schemas_data) - - success, response = await client.schema.check(schemas=[item.content for item in schemas_data], branch=branch) - - if not success: - display_schema_load_errors(response=response, schemas_data=schemas_data) - else: - for schema_file in schemas_data: - console.print(f"[green] schema '{schema_file.location}' is Valid!") - if response == {"diff": {"added": {}, "changed": {}, "removed": {}}}: - print("No diff") - else: - print(yaml.safe_dump(data=response, indent=4)) diff --git a/python_sdk/infrahub_sdk/ctl/transform.py b/python_sdk/infrahub_sdk/ctl/transform.py deleted file mode 100644 index e0a85ec276..0000000000 --- a/python_sdk/infrahub_sdk/ctl/transform.py +++ /dev/null @@ -1,11 +0,0 @@ -from rich.console import Console - -from ..schema import InfrahubRepositoryConfig - - -def list_transforms(config: InfrahubRepositoryConfig) -> None: - console = Console() - console.print(f"Python transforms defined in repository: {len(config.python_transforms)}") - - for transform in config.python_transforms: - console.print(f"{transform.name} ({transform.file_path}::{transform.class_name})") diff --git a/python_sdk/infrahub_sdk/ctl/utils.py b/python_sdk/infrahub_sdk/ctl/utils.py deleted file mode 100644 index 032a4d2bde..0000000000 --- a/python_sdk/infrahub_sdk/ctl/utils.py +++ /dev/null @@ -1,182 +0,0 @@ -import asyncio -import logging -import traceback -from functools import wraps -from pathlib import Path -from typing import Any, Callable, Optional, Union - -import pendulum -import typer -from click.exceptions import Exit -from httpx import HTTPError -from pendulum.datetime import DateTime -from rich.console import Console -from rich.logging import RichHandler -from rich.markup import escape - -from infrahub_sdk.ctl.exceptions import QueryNotFoundError -from infrahub_sdk.exceptions import ( - AuthenticationError, - Error, - FilterNotFoundError, - GraphQLError, - NodeNotFoundError, - SchemaNotFoundError, - ServerNotReachableError, - ServerNotResponsiveError, -) -from infrahub_sdk.schema import InfrahubRepositoryConfig - -from .client import initialize_client_sync - - -def init_logging(debug: bool = False) -> None: - logging.getLogger("infrahub_sdk").setLevel(logging.CRITICAL) - logging.getLogger("httpx").setLevel(logging.ERROR) - logging.getLogger("httpcore").setLevel(logging.ERROR) - - log_level = "DEBUG" if debug else "INFO" - FORMAT = "%(message)s" - logging.basicConfig(level=log_level, format=FORMAT, datefmt="[%X]", handlers=[RichHandler()]) - logging.getLogger("infrahubctl") - - -def handle_exception(exc: Exception, console: Console, exit_code: int): - """Handle exeception in a different fashion based on its type.""" - if isinstance(exc, Exit): - raise typer.Exit(code=exc.exit_code) - if isinstance(exc, AuthenticationError): - console.print(f"[red]Authentication failure: {str(exc)}") - raise typer.Exit(code=exit_code) - if isinstance(exc, (ServerNotReachableError, ServerNotResponsiveError)): - console.print(f"[red]{str(exc)}") - raise typer.Exit(code=exit_code) - if isinstance(exc, HTTPError): - console.print(f"[red]HTTP communication failure: {str(exc)} on {exc.request.method} to {exc.request.url}") - raise typer.Exit(code=exit_code) - if isinstance(exc, GraphQLError): - print_graphql_errors(console=console, errors=exc.errors) - raise typer.Exit(code=exit_code) - if isinstance(exc, (SchemaNotFoundError, NodeNotFoundError, FilterNotFoundError)): - console.print(f"[red]Error: {str(exc)}") - raise typer.Exit(code=exit_code) - - console.print(f"[red]Error: {str(exc)}") - console.print(traceback.format_exc()) - raise typer.Exit(code=exit_code) - - -def catch_exception(console: Optional[Console] = None, exit_code: int = 1): - """Decorator to handle exception for commands.""" - if not console: - console = Console() - - def decorator(func: Callable): - if asyncio.iscoroutinefunction(func): - - @wraps(func) - async def async_wrapper(*args: Any, **kwargs: Any): - try: - return await func(*args, **kwargs) - except (Error, Exception) as exc: # pylint: disable=broad-exception-caught - return handle_exception(exc=exc, console=console, exit_code=exit_code) - - return async_wrapper - - @wraps(func) - def wrapper(*args: Any, **kwargs: Any): - try: - return func(*args, **kwargs) - except (Error, Exception) as exc: # pylint: disable=broad-exception-caught - return handle_exception(exc=exc, console=console, exit_code=exit_code) - - return wrapper - - return decorator - - -def execute_graphql_query( - query: str, - variables_dict: dict[str, Any], - repository_config: InfrahubRepositoryConfig, - branch: Optional[str] = None, - debug: bool = False, -) -> dict: - console = Console() - query_object = repository_config.get_query(name=query) - query_str = query_object.load_query() - - client = initialize_client_sync() - response = client.execute_graphql( - query=query_str, - branch_name=branch, - variables=variables_dict, - raise_for_error=False, - ) - - if debug: - message = ("-" * 40, f"Response for GraphQL Query {query}", response, "-" * 40) - console.print("\n".join(message)) - - return response - - -def print_graphql_errors(console: Console, errors: list) -> None: - if not isinstance(errors, list): - console.print(f"[red]{escape(str(errors))}") - - for error in errors: - if isinstance(error, dict) and "message" in error and "path" in error: - console.print(f"[red]{escape(str(error['path']))} {escape(str(error['message']))}") - else: - console.print(f"[red]{escape(str(error))}") - - -def parse_cli_vars(variables: Optional[list[str]]) -> dict[str, str]: - if not variables: - return {} - - return {var.split("=")[0]: var.split("=")[1] for var in variables if "=" in var} - - -def calculate_time_diff(value: str) -> Optional[str]: - """Calculate the time in human format between a timedate in string format and now.""" - try: - time_value = pendulum.parse(value) - except pendulum.parsing.exceptions.ParserError: - return None - - if not isinstance(time_value, DateTime): - return None - - pendulum.set_locale("en") - return time_value.diff_for_humans(other=pendulum.now(), absolute=True) - - -def find_graphql_query(name: str, directory: Union[str, Path] = ".") -> str: - if isinstance(directory, str): - directory = Path(directory) - - for query_file in directory.glob("**/*.gql"): - if query_file.stem != name: - continue - return query_file.read_text(encoding="utf-8") - - raise QueryNotFoundError(name=name) - - -def render_action_rich(value: str) -> str: - if value == "created": - return f"[green]{value.upper()}[/green]" - if value == "updated": - return f"[magenta]{value.upper()}[/magenta]" - if value == "deleted": - return f"[red]{value.upper()}[/red]" - - return value.upper() - - -def get_fixtures_dir() -> Path: - """Get the directory which stores fixtures that are common to multiple unit/integration tests.""" - here = Path(__file__).resolve().parent - return here.parent.parent / "tests" / "fixtures" diff --git a/python_sdk/infrahub_sdk/ctl/validate.py b/python_sdk/infrahub_sdk/ctl/validate.py deleted file mode 100644 index f402b487f6..0000000000 --- a/python_sdk/infrahub_sdk/ctl/validate.py +++ /dev/null @@ -1,112 +0,0 @@ -import sys -from pathlib import Path -from typing import Optional - -import typer -import ujson -import yaml -from pydantic import ValidationError -from rich.console import Console -from ujson import JSONDecodeError - -from infrahub_sdk.async_typer import AsyncTyper -from infrahub_sdk.ctl.client import initialize_client, initialize_client_sync -from infrahub_sdk.ctl.exceptions import QueryNotFoundError -from infrahub_sdk.ctl.utils import catch_exception, find_graphql_query, parse_cli_vars -from infrahub_sdk.exceptions import GraphQLError -from infrahub_sdk.utils import get_branch, write_to_file - -from .parameters import CONFIG_PARAM - -app = AsyncTyper() -console = Console() - - -@app.callback() -def callback() -> None: - """ - Helper to validate the format of various files. - """ - - -@app.command(name="schema") -@catch_exception(console=console) -async def validate_schema(schema: Path, _: str = CONFIG_PARAM) -> None: - """Validate the format of a schema file either in JSON or YAML""" - - try: - schema_data = yaml.safe_load(schema.read_text()) or {} - except JSONDecodeError as exc: - console.print("[red]Invalid JSON file") - raise typer.Exit(1) from exc - - client = await initialize_client() - - try: - client.schema.validate(schema_data) - except ValidationError as exc: - console.print(f"[red]Schema not valid, found {len(exc.errors())} error(s)") - for error in exc.errors(): - loc_str = [str(item) for item in error["loc"]] - console.print(f" '{'/'.join(loc_str)}' | {error['msg']} ({error['type']})") - raise typer.Exit(1) - - console.print("[green]Schema is valid !!") - - -@app.command(name="graphql-query") -@catch_exception(console=console) -def validate_graphql( - query: str, - variables: Optional[list[str]] = typer.Argument( - None, help="Variables to pass along with the query. Format key=value key=value." - ), - debug: bool = typer.Option(False, help="Display more troubleshooting information."), - branch: str = typer.Option(None, help="Branch on which to validate the GraphQL Query."), - _: str = CONFIG_PARAM, - out: str = typer.Option(None, help="Path to a file to save the result."), -) -> None: - """Validate the format of a GraphQL Query stored locally by executing it on a remote GraphQL endpoint""" - - branch = get_branch(branch) - - try: - query_str = find_graphql_query(query) - except QueryNotFoundError: - console.print(f"[red]Unable to find the GraphQL Query : {query}") - sys.exit(1) - - console.print(f"[purple]Query '{query}' will be validated on branch '{branch}'.") - - variables_dict = parse_cli_vars(variables) - - client = initialize_client_sync() - try: - response = client.execute_graphql( - query=query_str, - branch_name=branch, - variables=variables_dict, - raise_for_error=False, - ) - except GraphQLError as exc: - console.print(f"[red]{len(exc.errors)} error(s) occurred while executing the query") - for error in exc.errors: - if isinstance(error, dict) and "message" in error and "locations" in error: - console.print(f"[yellow] - Message: {error['message']}") - console.print(f"[yellow] Location: {error['locations']}") - elif isinstance(error, str) and "Branch:" in error: - console.print(f"[yellow] - {error}") - console.print("[yellow] you can specify a different branch with --branch") - sys.exit(1) - - console.print("[green] Query executed successfully.") - - if debug: - console.print("-" * 40) - console.print(f"Response for GraphQL Query {query}") - console.print(response) - console.print("-" * 40) - - if out: - json_string = ujson.dumps(response, indent=2, sort_keys=True) - write_to_file(Path(out), json_string) diff --git a/python_sdk/infrahub_sdk/data.py b/python_sdk/infrahub_sdk/data.py deleted file mode 100644 index 6b831ed19c..0000000000 --- a/python_sdk/infrahub_sdk/data.py +++ /dev/null @@ -1,25 +0,0 @@ -from typing import Optional - -from pydantic import BaseModel, ConfigDict, Field - -from infrahub_sdk.node import InfrahubNode - - -class RepositoryBranchInfo(BaseModel): - internal_status: str - - -class RepositoryData(BaseModel): - model_config = ConfigDict(arbitrary_types_allowed=True) - repository: InfrahubNode = Field(..., description="InfrahubNode representing a Repository") - branches: dict[str, str] = Field( - ..., description="Dictionary with the name of the branch as the key and the active commit id as the value" - ) - - branch_info: dict[str, RepositoryBranchInfo] = Field(default_factory=dict) - - def get_staging_branch(self) -> Optional[str]: - for branch, info in self.branch_info.items(): # pylint: disable=no-member - if info.internal_status == "staging": - return branch - return None diff --git a/python_sdk/infrahub_sdk/exceptions.py b/python_sdk/infrahub_sdk/exceptions.py deleted file mode 100644 index b9628bebb2..0000000000 --- a/python_sdk/infrahub_sdk/exceptions.py +++ /dev/null @@ -1,133 +0,0 @@ -from __future__ import annotations - -from typing import Any, Mapping, Optional - - -class Error(Exception): - def __init__(self, message: Optional[str] = None): - self.message = message - super().__init__(self.message) - - -class JsonDecodeError(Error): - def __init__(self, message: Optional[str] = None, content: Optional[str] = None, url: Optional[str] = None): - self.message = message - self.content = content - self.url = url - if not self.message and self.url: - self.message = f"Unable to decode response as JSON data from {self.url}" - super().__init__(self.message) - - -class ServerNotReachableError(Error): - def __init__(self, address: str, message: Optional[str] = None): - self.address = address - self.message = message or f"Unable to connect to '{address}'." - super().__init__(self.message) - - -class ServerNotResponsiveError(Error): - def __init__(self, url: str, timeout: Optional[int] = None, message: Optional[str] = None): - self.url = url - self.timeout = timeout - self.message = message or f"Unable to read from '{url}'." - if timeout: - self.message += f" (timeout: {timeout} sec)" - super().__init__(self.message) - - -class GraphQLError(Error): - def __init__(self, errors: list[dict[str, Any]], query: Optional[str] = None, variables: Optional[dict] = None): - self.query = query - self.variables = variables - self.errors = errors - self.message = f"An error occurred while executing the GraphQL Query {self.query}, {self.errors}" - super().__init__(self.message) - - -class BranchNotFoundError(Error): - def __init__(self, identifier: str, message: Optional[str] = None): - self.identifier = identifier - self.message = message or f"Unable to find the branch '{identifier}' in the Database." - super().__init__(self.message) - - -class SchemaNotFoundError(Error): - def __init__(self, identifier: str, message: Optional[str] = None): - self.identifier = identifier - self.message = message or f"Unable to find the schema '{identifier}'." - super().__init__(self.message) - - -class ModuleImportError(Error): - def __init__(self, message: Optional[str] = None): - self.message = message or "Unable to import the module" - super().__init__(self.message) - - -class NodeNotFoundError(Error): - def __init__( - self, - node_type: str, - identifier: Mapping[str, list[str]], - message: str = "Unable to find the node in the database.", - branch_name: Optional[str] = None, - ): - self.node_type = node_type - self.identifier = identifier - self.branch_name = branch_name - - self.message = message - super().__init__(self.message) - - def __str__(self) -> str: - return f""" - {self.message} - {self.branch_name} | {self.node_type} | {self.identifier} - """ - - -class FilterNotFoundError(Error): - def __init__(self, identifier: str, kind: str, message: Optional[str] = None, filters: Optional[list[str]] = None): - self.identifier = identifier - self.kind = kind - self.filters = filters or [] - self.message = message or f"{identifier!r} is not a valid filter for {self.kind!r} ({', '.join(self.filters)})." - super().__init__(self.message) - - -class InfrahubCheckNotFoundError(Error): - def __init__(self, name: str, message: Optional[str] = None): - self.message = message or f"The requested InfrahubCheck '{name}' was not found." - super().__init__(self.message) - - -class InfrahubTransformNotFoundError(Error): - def __init__(self, name: str, message: Optional[str] = None): - self.message = message or f"The requested InfrahubTransform '{name}' was not found." - super().__init__(self.message) - - -class ValidationError(Error): - def __init__(self, identifier: str, message: str): - self.identifier = identifier - self.message = message - super().__init__(self.message) - - -class AuthenticationError(Error): - def __init__(self, message: Optional[str] = None): - self.message = message or "Authentication Error, unable to execute the query." - super().__init__(self.message) - - -class FeatureNotSupportedError(Error): - """Raised when trying to use a method on a node that doesn't support it.""" - - -class UninitializedError(Error): - """Raised when an object requires an initialization step before use""" - - -class InvalidResponseError(Error): - """Raised when an object requires an initialization step before use""" diff --git a/python_sdk/infrahub_sdk/generator.py b/python_sdk/infrahub_sdk/generator.py deleted file mode 100644 index 8e502a74f7..0000000000 --- a/python_sdk/infrahub_sdk/generator.py +++ /dev/null @@ -1,148 +0,0 @@ -from __future__ import annotations - -import os -from abc import abstractmethod -from typing import TYPE_CHECKING, Optional - -from git.repo import Repo - -from infrahub_sdk.exceptions import UninitializedError - -if TYPE_CHECKING: - from infrahub_sdk.client import InfrahubClient - from infrahub_sdk.node import InfrahubNode - from infrahub_sdk.store import NodeStore - - -class InfrahubGenerator: - """Infrahub Generator class""" - - def __init__( - self, - query: str, - client: InfrahubClient, - infrahub_node: type[InfrahubNode], - branch: Optional[str] = None, - root_directory: str = "", - generator_instance: str = "", - params: Optional[dict] = None, - convert_query_response: bool = False, - ) -> None: - self.query = query - self.branch = branch - self.git: Optional[Repo] = None - self.params = params or {} - self.root_directory = root_directory or os.getcwd() - self.generator_instance = generator_instance - self._init_client = client.clone() - self._init_client.config.default_branch = self._init_client.default_branch = self.branch_name - self._client: Optional[InfrahubClient] = None - self._nodes: list[InfrahubNode] = [] - self._related_nodes: list[InfrahubNode] = [] - self.infrahub_node = infrahub_node - self.convert_query_response = convert_query_response - - @property - def store(self) -> NodeStore: - """The store will be populated with nodes based on the query during the collection of data if activated""" - return self._init_client.store - - @property - def nodes(self) -> list[InfrahubNode]: - """Returns nodes collected and parsed during the data collection process if this feature is enables""" - return self._nodes - - @property - def related_nodes(self) -> list[InfrahubNode]: - """Returns nodes collected and parsed during the data collection process if this feature is enables""" - return self._related_nodes - - @property - def subscribers(self) -> Optional[list[str]]: - if self.generator_instance: - return [self.generator_instance] - return None - - @property - def client(self) -> InfrahubClient: - if self._client: - return self._client - raise UninitializedError("The client has not been initialized") - - @client.setter - def client(self, value: InfrahubClient) -> None: - self._client = value - - @property - def branch_name(self) -> str: - """Return the name of the current git branch.""" - - if self.branch: - return self.branch - - if not self.git: - self.git = Repo(self.root_directory) - - self.branch = str(self.git.active_branch) - - return self.branch - - async def collect_data(self) -> dict: - """Query the result of the GraphQL Query defined in self.query and return the result""" - - data = await self._init_client.query_gql_query( - name=self.query, - branch_name=self.branch_name, - variables=self.params, - update_group=True, - subscribers=self.subscribers, - ) - unpacked = data.get("data") or data - await self.process_nodes(data=unpacked) - return data - - async def run(self, identifier: str, data: Optional[dict] = None) -> None: - """Execute the generator after collecting the data from the GraphQL query.""" - - if not data: - data = await self.collect_data() - unpacked = data.get("data") or data - - async with self._init_client.start_tracking( - identifier=identifier, params=self.params, delete_unused_nodes=True, group_type="CoreGeneratorGroup" - ) as self.client: - await self.generate(data=unpacked) - - async def process_nodes(self, data: dict) -> None: - if not self.convert_query_response: - return - - await self._init_client.schema.all(branch=self.branch_name) - - for kind in data: - if kind in self._init_client.schema.cache[self.branch_name]: - for result in data[kind].get("edges", []): - node = await self.infrahub_node.from_graphql( - client=self._init_client, branch=self.branch_name, data=result - ) - self._nodes.append(node) - await node._process_relationships( - node_data=result, branch=self.branch_name, related_nodes=self._related_nodes - ) - - for node in self._nodes + self._related_nodes: - if node.id: - self._init_client.store.set(key=node.id, node=node) - - @abstractmethod - async def generate(self, data: dict) -> None: - """Code to run the generator - - Any child class of the InfrahubGenerator us expected to provide this method. The method is expected - to use the provided InfrahubClient contained in self.client to create or update any nodes in an idempotent - way as the method could be executed multiple times. Typically this would be done by using: - - await new_or_updated_object.save(allow_upsert=True) - - The tracking system will be responsible for deleting nodes that are no longer required. - """ diff --git a/python_sdk/infrahub_sdk/graphql.py b/python_sdk/infrahub_sdk/graphql.py deleted file mode 100644 index 2d1d9d19a1..0000000000 --- a/python_sdk/infrahub_sdk/graphql.py +++ /dev/null @@ -1,162 +0,0 @@ -from __future__ import annotations - -from typing import Any, Optional, Union - -VARIABLE_TYPE_MAPPING = ((str, "String!"), (int, "Int!"), (float, "Float!"), (bool, "Boolean!")) - - -def convert_to_graphql_as_string(value: Union[str, bool, list]) -> str: - if isinstance(value, str) and value.startswith("$"): - return value - if isinstance(value, str): - return f'"{value}"' - if isinstance(value, bool): - return repr(value).lower() - if isinstance(value, list): - values_as_string = [convert_to_graphql_as_string(item) for item in value] - return "[" + ", ".join(values_as_string) + "]" - - return str(value) - - -def render_variables_to_string(data: dict[str, type[Union[str, int, float, bool]]]) -> str: - """Render a dict into a variable string that will be used in a GraphQL Query. - - The $ sign will be automatically added to the name of the query. - """ - vars_dict = {} - for key, value in data.items(): - for class_type, var_string in VARIABLE_TYPE_MAPPING: - if value == class_type: - vars_dict[f"${key}"] = var_string - - return ", ".join([f"{key}: {value}" for key, value in vars_dict.items()]) - - -def render_query_block(data: dict, offset: int = 4, indentation: int = 4) -> list[str]: - FILTERS_KEY = "@filters" - ALIAS_KEY = "@alias" - KEYWORDS_TO_SKIP = [FILTERS_KEY, ALIAS_KEY] - - offset_str = " " * offset - lines = [] - for key, value in data.items(): - if key in KEYWORDS_TO_SKIP: - continue - if value is None: - lines.append(f"{offset_str}{key}") - elif isinstance(value, dict) and len(value) == 1 and ALIAS_KEY in value and value[ALIAS_KEY]: - lines.append(f"{offset_str}{value[ALIAS_KEY]}: {key}") - elif isinstance(value, dict): - if ALIAS_KEY in value and value[ALIAS_KEY]: - key_str = f"{value[ALIAS_KEY]}: {key}" - else: - key_str = key - - if FILTERS_KEY in value and value[FILTERS_KEY]: - filters_str = ", ".join( - [f"{key2}: {convert_to_graphql_as_string(value2)}" for key2, value2 in value[FILTERS_KEY].items()] - ) - lines.append(f"{offset_str}{key_str}({filters_str}) " + "{") - else: - lines.append(f"{offset_str}{key_str} " + "{") - - lines.extend(render_query_block(data=value, offset=offset + indentation, indentation=indentation)) - lines.append(offset_str + "}") - - return lines - - -def render_input_block(data: dict, offset: int = 4, indentation: int = 4) -> list[str]: - offset_str = " " * offset - lines = [] - for key, value in data.items(): - if isinstance(value, dict): - lines.append(f"{offset_str}{key}: " + "{") - lines.extend(render_input_block(data=value, offset=offset + indentation, indentation=indentation)) - lines.append(offset_str + "}") - elif isinstance(value, list): - lines.append(f"{offset_str}{key}: " + "[") - for item in value: - if isinstance(item, dict): - lines.append(f"{offset_str}{' ' * indentation}" + "{") - lines.extend( - render_input_block( - data=item, - offset=offset + indentation + indentation, - indentation=indentation, - ) - ) - lines.append(f"{offset_str}{' ' * indentation}" + "},") - else: - lines.append(f"{offset_str}{' ' * indentation}{convert_to_graphql_as_string(item)},") - lines.append(offset_str + "]") - else: - lines.append(f"{offset_str}{key}: {convert_to_graphql_as_string(value)}") - return lines - - -class BaseGraphQLQuery: - query_type: str = "not-defined" - indentation: int = 4 - - def __init__(self, query: dict, variables: Optional[dict] = None, name: Optional[str] = None): - self.query = query - self.variables = variables - self.name = name or "" - - def render_first_line(self) -> str: - first_line = self.query_type - - if self.name: - first_line += " " + self.name - - if self.variables: - first_line += f" ({render_variables_to_string(self.variables)})" - - first_line += " {" - - return first_line - - -class Query(BaseGraphQLQuery): - query_type = "query" - - def render(self) -> str: - lines = [self.render_first_line()] - lines.extend(render_query_block(data=self.query, indentation=self.indentation, offset=self.indentation)) - lines.append("}") - - return "\n" + "\n".join(lines) + "\n" - - -class Mutation(BaseGraphQLQuery): - query_type = "mutation" - - def __init__(self, *args: Any, mutation: str, input_data: dict, **kwargs: Any): - self.input_data = input_data - self.mutation = mutation - super().__init__(*args, **kwargs) - - def render(self) -> str: - lines = [self.render_first_line()] - lines.append(" " * self.indentation + f"{self.mutation}(") - lines.extend( - render_input_block( - data=self.input_data, - indentation=self.indentation, - offset=self.indentation * 2, - ) - ) - lines.append(" " * self.indentation + "){") - lines.extend( - render_query_block( - data=self.query, - indentation=self.indentation, - offset=self.indentation * 2, - ) - ) - lines.append(" " * self.indentation + "}") - lines.append("}") - - return "\n" + "\n".join(lines) + "\n" diff --git a/python_sdk/infrahub_sdk/jinja2.py b/python_sdk/infrahub_sdk/jinja2.py deleted file mode 100644 index 29afbf0604..0000000000 --- a/python_sdk/infrahub_sdk/jinja2.py +++ /dev/null @@ -1,30 +0,0 @@ -import linecache - -from rich.syntax import Syntax -from rich.traceback import Frame, Traceback - - -def identify_faulty_jinja_code(traceback: Traceback, nbr_context_lines: int = 3) -> list[tuple[Frame, Syntax]]: - """This function identifies the faulty Jinja2 code and beautify it to provide meaningful information to the user. - - We use the rich's Traceback to parse the complete stack trace and extract Frames for each expection found in the trace. - """ - response = [] - - # Extract only the Jinja related exception - for frame in [frame for frame in traceback.trace.stacks[0].frames if frame.filename.endswith(".j2")]: - code = "".join(linecache.getlines(frame.filename)) - lexer_name = Traceback._guess_lexer(frame.filename, code) - syntax = Syntax( - code, - lexer_name, - line_numbers=True, - line_range=(frame.lineno - nbr_context_lines, frame.lineno + nbr_context_lines), - highlight_lines={frame.lineno}, - code_width=88, - theme=traceback.theme, - dedent=False, - ) - response.append((frame, syntax)) - - return response diff --git a/python_sdk/infrahub_sdk/node.py b/python_sdk/infrahub_sdk/node.py deleted file mode 100644 index b75ee77e25..0000000000 --- a/python_sdk/infrahub_sdk/node.py +++ /dev/null @@ -1,2048 +0,0 @@ -from __future__ import annotations - -import ipaddress -import re -from copy import copy -from typing import TYPE_CHECKING, Any, Callable, Iterable, Optional, Union, get_args - -from infrahub_sdk.constants import InfrahubClientMode -from infrahub_sdk.exceptions import ( - Error, - FeatureNotSupportedError, - FilterNotFoundError, - NodeNotFoundError, - UninitializedError, -) -from infrahub_sdk.graphql import Mutation, Query -from infrahub_sdk.schema import GenericSchema, RelationshipCardinality, RelationshipKind -from infrahub_sdk.utils import compare_lists, get_flat_value -from infrahub_sdk.uuidt import UUIDT - -if TYPE_CHECKING: - from typing_extensions import Self - - from infrahub_sdk.client import InfrahubClient, InfrahubClientSync - from infrahub_sdk.schema import AttributeSchema, MainSchemaTypes, RelationshipSchema - -# pylint: disable=too-many-lines - -PROPERTIES_FLAG = ["is_visible", "is_protected"] -PROPERTIES_OBJECT = ["source", "owner"] -SAFE_VALUE = re.compile(r"(^[\. /:a-zA-Z0-9_-]+$)|(^$)") - -IP_TYPES = Union[ipaddress.IPv4Interface, ipaddress.IPv6Interface, ipaddress.IPv4Network, ipaddress.IPv6Network] - -ARTIFACT_FETCH_FEATURE_NOT_SUPPORTED_MESSAGE = ( - "calling artifact_fetch is only supported for nodes that are Artifact Definition target" -) -ARTIFACT_GENERATE_FEATURE_NOT_SUPPORTED_MESSAGE = ( - "calling artifact_generate is only supported for nodes that are Artifact Definition targets" -) -ARTIFACT_DEFINITION_GENERATE_FEATURE_NOT_SUPPORTED_MESSAGE = ( - "calling generate is only supported for CoreArtifactDefinition nodes" -) - - -class Attribute: - """Represents an attribute of a Node, including its schema, value, and properties.""" - - def __init__(self, name: str, schema: AttributeSchema, data: Union[Any, dict]): - """ - Args: - name (str): The name of the attribute. - schema (AttributeSchema): The schema defining the attribute. - data (Union[Any, dict]): The data for the attribute, either in raw form or as a dictionary. - """ - self.name = name - self._schema = schema - - if not isinstance(data, dict) or "value" not in data.keys(): - data = {"value": data} - - self._properties_flag = PROPERTIES_FLAG - self._properties_object = PROPERTIES_OBJECT - self._properties = self._properties_flag + self._properties_object - - self._read_only = ["updated_at", "is_inherited"] - - self.id: Optional[str] = data.get("id", None) - - self.value: Optional[Any] = data.get("value", None) - self.is_default: Optional[bool] = data.get("is_default", None) - self.is_from_profile: Optional[bool] = data.get("is_from_profile", None) - - if self.value: - value_mapper: dict[str, Callable] = { - "IPHost": ipaddress.ip_interface, - "IPNetwork": ipaddress.ip_network, - } - mapper = value_mapper.get(schema.kind, lambda value: value) - self.value = mapper(data.get("value")) - - self.is_inherited: Optional[bool] = data.get("is_inherited", None) - self.updated_at: Optional[str] = data.get("updated_at", None) - - self.is_visible: Optional[bool] = data.get("is_visible", None) - self.is_protected: Optional[bool] = data.get("is_protected", None) - - self.source: Optional[NodeProperty] = None - self.owner: Optional[NodeProperty] = None - - for prop_name in self._properties_object: - if data.get(prop_name): - setattr(self, prop_name, NodeProperty(data=data.get(prop_name))) # type: ignore[arg-type] - - def _generate_input_data(self) -> Optional[dict]: - data: dict[str, Any] = {} - variables: dict[str, Any] = {} - - if self.value is None: - return data - - if isinstance(self.value, str): - if SAFE_VALUE.match(self.value): - data["value"] = self.value - else: - var_name = f"value_{UUIDT.new().hex}" - variables[var_name] = self.value - data["value"] = f"${var_name}" - elif isinstance(self.value, get_args(IP_TYPES)): - data["value"] = self.value.with_prefixlen - elif isinstance(self.value, InfrahubNodeBase) and self.value.is_resource_pool(): - data["from_pool"] = {"id": self.value.id} - else: - data["value"] = self.value - - for prop_name in self._properties_flag: - if getattr(self, prop_name) is not None: - data[prop_name] = getattr(self, prop_name) - - for prop_name in self._properties_object: - if getattr(self, prop_name) is not None: - data[prop_name] = getattr(self, prop_name)._generate_input_data() - - return {"data": data, "variables": variables} - - def _generate_query_data(self) -> Optional[dict]: - data: dict[str, Any] = {"value": None, "is_default": None, "is_from_profile": None} - - for prop_name in self._properties_flag: - data[prop_name] = None - for prop_name in self._properties_object: - data[prop_name] = {"id": None, "display_label": None, "__typename": None} - - return data - - def _generate_mutation_query(self) -> dict[str, Any]: - if isinstance(self.value, InfrahubNodeBase) and self.value.is_resource_pool(): - # If it points to a pool, ask for the value of the pool allocated resource - return {self.name: {"value": None}} - return {} - - -class RelatedNodeBase: - """Base class for representing a related node in a relationship.""" - - def __init__(self, branch: str, schema: RelationshipSchema, data: Union[Any, dict], name: Optional[str] = None): - """ - Args: - branch (str): The branch where the related node resides. - schema (RelationshipSchema): The schema of the relationship. - data (Union[Any, dict]): Data representing the related node. - name (Optional[str]): The name of the related node. - """ - self.schema = schema - self.name = name - - self._branch = branch - - self._properties_flag = PROPERTIES_FLAG - self._properties_object = PROPERTIES_OBJECT - self._properties = self._properties_flag + self._properties_object - - self._peer = None - self._id: Optional[str] = None - self._hfid: Optional[list[str]] = None - self._display_label: Optional[str] = None - self._typename: Optional[str] = None - - if isinstance(data, (InfrahubNode, InfrahubNodeSync)): - self._peer = data - for prop in self._properties: - setattr(self, prop, None) - elif isinstance(data, list): - data = {"hfid": data} - elif not isinstance(data, dict): - data = {"id": data} - - if isinstance(data, dict): - # To support both with and without pagination, we split data into node_data and properties_data - # We should probably clean that once we'll remove the code without pagination. - node_data = data.get("node", data) - properties_data = data.get("properties", data) - - if node_data: - self._id = node_data.get("id", None) - self._hfid = node_data.get("hfid", None) - self._display_label = node_data.get("display_label", None) - self._typename = node_data.get("__typename", None) - - self.updated_at: Optional[str] = data.get("updated_at", data.get("_relation__updated_at", None)) - - # FIXME, we won't need that once we are only supporting paginated results - if self._typename and self._typename.startswith("Related"): - self._typename = self._typename[7:] - - for prop in self._properties: - prop_data = properties_data.get(prop, properties_data.get(f"_relation__{prop}", None)) - if prop_data and isinstance(prop_data, dict) and "id" in prop_data: - setattr(self, prop, prop_data["id"]) - elif prop_data and isinstance(prop_data, (str, bool)): - setattr(self, prop, prop_data) - else: - setattr(self, prop, None) - - @property - def id(self) -> Optional[str]: - if self._peer: - return self._peer.id - return self._id - - @property - def hfid(self) -> Optional[list[Any]]: - if self._peer: - return self._peer.hfid - return self._hfid - - @property - def hfid_str(self) -> Optional[str]: - if self._peer and self.hfid: - return self._peer.get_human_friendly_id_as_string(include_kind=True) - return None - - @property - def is_resource_pool(self) -> bool: - if self._peer: - return self._peer.is_resource_pool() - return False - - @property - def initialized(self) -> bool: - return bool(self.id) or bool(self.hfid) - - @property - def display_label(self) -> Optional[str]: - if self._peer: - return self._peer.display_label - return self._display_label - - @property - def typename(self) -> Optional[str]: - if self._peer: - return self._peer.typename - return self._typename - - def _generate_input_data(self) -> dict[str, Any]: - data: dict[str, Any] = {} - - if self.is_resource_pool: - return {"from_pool": {"id": self.id}} - - if self.id is not None: - data["id"] = self.id - elif self.hfid is not None: - data["hfid"] = self.hfid - - for prop_name in self._properties: - if getattr(self, prop_name) is not None: - data[f"_relation__{prop_name}"] = getattr(self, prop_name) - - return data - - def _generate_mutation_query(self) -> dict[str, Any]: - if self.name and self.is_resource_pool: - # If a related node points to a pool, ask for the ID of the pool allocated resource - return {self.name: {"node": {"id": None, "display_label": None, "__typename": None}}} - return {} - - @classmethod - def _generate_query_data(cls, peer_data: Optional[dict[str, Any]] = None) -> dict: - """Generates the basic structure of a GraphQL query for a single relationship. - - Args: - peer_data (dict[str, Union[Any, Dict]], optional): Additional data to be included in the query for the node. - This is used to add extra fields when prefetching related node data. - - Returns: - Dict: A dictionary representing the basic structure of a GraphQL query, including the node's ID, display label, - and typename. The method also includes additional properties and any peer_data provided. - """ - data: dict[str, Any] = {"node": {"id": None, "hfid": None, "display_label": None, "__typename": None}} - - properties: dict[str, Any] = {} - for prop_name in PROPERTIES_FLAG: - properties[prop_name] = None - for prop_name in PROPERTIES_OBJECT: - properties[prop_name] = {"id": None, "display_label": None, "__typename": None} - - if properties: - data["properties"] = properties - if peer_data: - data["node"].update(peer_data) - - return data - - -class RelatedNode(RelatedNodeBase): - """Represents a RelatedNodeBase in an asynchronous context.""" - - def __init__( - self, - client: InfrahubClient, - branch: str, - schema: RelationshipSchema, - data: Union[Any, dict], - name: Optional[str] = None, - ): - """ - Args: - client (InfrahubClient): The client used to interact with the backend asynchronously. - branch (str): The branch where the related node resides. - schema (RelationshipSchema): The schema of the relationship. - data (Union[Any, dict]): Data representing the related node. - name (Optional[str]): The name of the related node. - """ - self._client = client - super().__init__(branch=branch, schema=schema, data=data, name=name) - - async def fetch(self) -> None: - if not self.id or not self.typename: - raise Error("Unable to fetch the peer, id and/or typename are not defined") - - self._peer = await self._client.get(kind=self.typename, id=self.id, populate_store=True, branch=self._branch) - - @property - def peer(self) -> InfrahubNode: - return self.get() - - def get(self) -> InfrahubNode: - if self._peer: - return self._peer # type: ignore[return-value] - - if self.id and self.typename: - return self._client.store.get(key=self.id, kind=self.typename) # type: ignore[return-value] - - if self.hfid_str: - return self._client.store.get_by_hfid(key=self.hfid_str) # type: ignore[return-value] - - raise ValueError("Node must have at least one identifier (ID or HFID) to query it.") - - -class RelatedNodeSync(RelatedNodeBase): - """Represents a related node in a synchronous context.""" - - def __init__( - self, - client: InfrahubClientSync, - branch: str, - schema: RelationshipSchema, - data: Union[Any, dict], - name: Optional[str] = None, - ): - """ - Args: - client (InfrahubClientSync): The client used to interact with the backend synchronously. - branch (str): The branch where the related node resides. - schema (RelationshipSchema): The schema of the relationship. - data (Union[Any, dict]): Data representing the related node. - name (Optional[str]): The name of the related node. - """ - self._client = client - super().__init__(branch=branch, schema=schema, data=data, name=name) - - def fetch(self) -> None: - if not self.id or not self.typename: - raise Error("Unable to fetch the peer, id and/or typename are not defined") - - self._peer = self._client.get(kind=self.typename, id=self.id, populate_store=True, branch=self._branch) - - @property - def peer(self) -> InfrahubNodeSync: - return self.get() - - def get(self) -> InfrahubNodeSync: - if self._peer: - return self._peer # type: ignore[return-value] - - if self.id and self.typename: - return self._client.store.get(key=self.id, kind=self.typename) # type: ignore[return-value] - - if self.hfid_str: - return self._client.store.get_by_hfid(key=self.hfid_str) # type: ignore[return-value] - - raise ValueError("Node must have at least one identifier (ID or HFID) to query it.") - - -class RelationshipManagerBase: - """Base class for RelationshipManager and RelationshipManagerSync""" - - def __init__(self, name: str, branch: str, schema: RelationshipSchema): - """ - Args: - name (str): The name of the relationship. - branch (str): The branch where the relationship resides. - schema (RelationshipSchema): The schema of the relationship. - """ - self.initialized: bool = False - self._has_update: bool = False - self.name = name - self.schema = schema - self.branch = branch - - self._properties_flag = PROPERTIES_FLAG - self._properties_object = PROPERTIES_OBJECT - self._properties = self._properties_flag + self._properties_object - - self.peers: list[Union[RelatedNode, RelatedNodeSync]] = [] - - @property - def peer_ids(self) -> list[str]: - return [peer.id for peer in self.peers if peer.id] - - @property - def peer_hfids(self) -> list[list[Any]]: - return [peer.hfid for peer in self.peers if peer.hfid] - - @property - def peer_hfids_str(self) -> list[str]: - return [peer.hfid_str for peer in self.peers if peer.hfid_str] - - @property - def has_update(self) -> bool: - return self._has_update - - def _generate_input_data(self) -> list[dict]: - return [peer._generate_input_data() for peer in self.peers] - - def _generate_mutation_query(self) -> dict[str, Any]: - # Does nothing for now - return {} - - @classmethod - def _generate_query_data(cls, peer_data: Optional[dict[str, Any]] = None) -> dict: - """Generates the basic structure of a GraphQL query for relationships with multiple nodes. - - Args: - peer_data (dict[str, Union[Any, Dict]], optional): Additional data to be included in the query for each node. - This is used to add extra fields when prefetching related node data in many-to-many relationships. - - Returns: - Dict: A dictionary representing the basic structure of a GraphQL query for multiple related nodes. - It includes count, edges, and node information (ID, display label, and typename), along with additional properties - and any peer_data provided. - """ - data: dict[str, Any] = { - "count": None, - "edges": {"node": {"id": None, "display_label": None, "__typename": None}}, - } - - properties: dict[str, Any] = {} - for prop_name in PROPERTIES_FLAG: - properties[prop_name] = None - for prop_name in PROPERTIES_OBJECT: - properties[prop_name] = {"id": None, "display_label": None, "__typename": None} - - if properties: - data["edges"]["properties"] = properties - if peer_data: - data["edges"]["node"].update(peer_data) - - return data - - -class RelationshipManager(RelationshipManagerBase): - """Manages relationships of a node in an asynchronous context.""" - - def __init__( - self, - name: str, - client: InfrahubClient, - node: InfrahubNode, - branch: str, - schema: RelationshipSchema, - data: Union[Any, dict], - ): - """ - Args: - name (str): The name of the relationship. - client (InfrahubClient): The client used to interact with the backend. - node (InfrahubNode): The node to which the relationship belongs. - branch (str): The branch where the relationship resides. - schema (RelationshipSchema): The schema of the relationship. - data (Union[Any, dict]): Initial data for the relationships. - """ - self.client = client - self.node = node - - super().__init__(name=name, schema=schema, branch=branch) - - self.initialized = data is not None - self._has_update = False - - if data is None: - return - - if isinstance(data, list): - for item in data: - self.peers.append( - RelatedNode(name=name, client=self.client, branch=self.branch, schema=schema, data=item) - ) - elif isinstance(data, dict) and "edges" in data: - for item in data["edges"]: - self.peers.append( - RelatedNode(name=name, client=self.client, branch=self.branch, schema=schema, data=item) - ) - else: - raise ValueError(f"Unexpected format for {name} found a {type(data)}, {data}") - - def __getitem__(self, item: int) -> RelatedNode: - return self.peers[item] # type: ignore[return-value] - - async def fetch(self) -> None: - if not self.initialized: - exclude = self.node._schema.relationship_names + self.node._schema.attribute_names - exclude.remove(self.schema.name) - node = await self.client.get( - kind=self.node._schema.kind, - id=self.node.id, - branch=self.branch, - include=[self.schema.name], - exclude=exclude, - ) - rm = getattr(node, self.schema.name) - self.peers = rm.peers - self.initialized = True - - for peer in self.peers: - await peer.fetch() # type: ignore[misc] - - def add(self, data: Union[str, RelatedNode, dict]) -> None: - """Add a new peer to this relationship.""" - if not self.initialized: - raise UninitializedError("Must call fetch() on RelationshipManager before editing members") - new_node = RelatedNode(schema=self.schema, client=self.client, branch=self.branch, data=data) - - if new_node.id and new_node.id not in self.peer_ids: - self.peers.append(new_node) - self._has_update = True - - def extend(self, data: Iterable[Union[str, RelatedNode, dict]]) -> None: - """Add new peers to this relationship.""" - for d in data: - self.add(d) - - def remove(self, data: Union[str, RelatedNode, dict]) -> None: - if not self.initialized: - raise UninitializedError("Must call fetch() on RelationshipManager before editing members") - node_to_remove = RelatedNode(schema=self.schema, client=self.client, branch=self.branch, data=data) - - if node_to_remove.id and node_to_remove.id in self.peer_ids: - idx = self.peer_ids.index(node_to_remove.id) - if self.peers[idx].id != node_to_remove.id: - raise IndexError(f"Unexpected situation, the node with the index {idx} should be {node_to_remove.id}") - - self.peers.pop(idx) - self._has_update = True - - -class RelationshipManagerSync(RelationshipManagerBase): - """Manages relationships of a node in a synchronous context.""" - - def __init__( - self, - name: str, - client: InfrahubClientSync, - node: InfrahubNodeSync, - branch: str, - schema: RelationshipSchema, - data: Union[Any, dict], - ): - """ - Args: - name (str): The name of the relationship. - client (InfrahubClientSync): The client used to interact with the backend synchronously. - node (InfrahubNodeSync): The node to which the relationship belongs. - branch (str): The branch where the relationship resides. - schema (RelationshipSchema): The schema of the relationship. - data (Union[Any, dict]): Initial data for the relationships. - """ - self.client = client - self.node = node - - super().__init__(name=name, schema=schema, branch=branch) - - self.initialized = data is not None - self._has_update = False - - if data is None: - return - - if isinstance(data, list): - for item in data: - self.peers.append( - RelatedNodeSync(name=name, client=self.client, branch=self.branch, schema=schema, data=item) - ) - elif isinstance(data, dict) and "edges" in data: - for item in data["edges"]: - self.peers.append( - RelatedNodeSync(name=name, client=self.client, branch=self.branch, schema=schema, data=item) - ) - else: - raise ValueError(f"Unexpected format for {name} found a {type(data)}, {data}") - - def __getitem__(self, item: int) -> RelatedNodeSync: - return self.peers[item] # type: ignore[return-value] - - def fetch(self) -> None: - if not self.initialized: - exclude = self.node._schema.relationship_names + self.node._schema.attribute_names - exclude.remove(self.schema.name) - node = self.client.get( - kind=self.node._schema.kind, - id=self.node.id, - branch=self.branch, - include=[self.schema.name], - exclude=exclude, - ) - rm = getattr(node, self.schema.name) - self.peers = rm.peers - self.initialized = True - - for peer in self.peers: - peer.fetch() - - def add(self, data: Union[str, RelatedNodeSync, dict]) -> None: - """Add a new peer to this relationship.""" - if not self.initialized: - raise UninitializedError("Must call fetch() on RelationshipManager before editing members") - new_node = RelatedNodeSync(schema=self.schema, client=self.client, branch=self.branch, data=data) - - if new_node.id and new_node.id not in self.peer_ids: - self.peers.append(new_node) - self._has_update = True - - def extend(self, data: Iterable[Union[str, RelatedNodeSync, dict]]) -> None: - """Add new peers to this relationship.""" - for d in data: - self.add(d) - - def remove(self, data: Union[str, RelatedNodeSync, dict]) -> None: - if not self.initialized: - raise UninitializedError("Must call fetch() on RelationshipManager before editing members") - node_to_remove = RelatedNodeSync(schema=self.schema, client=self.client, branch=self.branch, data=data) - - if node_to_remove.id and node_to_remove.id in self.peer_ids: - idx = self.peer_ids.index(node_to_remove.id) - if self.peers[idx].id != node_to_remove.id: - raise IndexError(f"Unexpected situation, the node with the index {idx} should be {node_to_remove.id}") - - self.peers.pop(idx) - self._has_update = True - - -class InfrahubNodeBase: - """Base class for InfrahubNode and InfrahubNodeSync""" - - def __init__(self, schema: MainSchemaTypes, branch: str, data: Optional[dict] = None) -> None: - """ - Args: - schema (MainSchemaTypes): The schema of the node. - branch (str): The branch where the node resides. - data (Optional[dict]): Optional data to initialize the node. - """ - self._schema = schema - self._data = data - self._branch = branch - self._existing: bool = True - - self.id = data.get("id", None) if isinstance(data, dict) else None - self.display_label: Optional[str] = data.get("display_label", None) if isinstance(data, dict) else None - self.typename: Optional[str] = data.get("__typename", schema.kind) if isinstance(data, dict) else schema.kind - - self._attributes = [item.name for item in self._schema.attributes] - self._relationships = [item.name for item in self._schema.relationships] - - self._artifact_support = hasattr(schema, "inherit_from") and "CoreArtifactTarget" in schema.inherit_from - self._artifact_definition_support = schema.kind == "CoreArtifactDefinition" - - if not self.id: - self._existing = False - - self._init_attributes(data) - self._init_relationships(data) - - def get_path_value(self, path: str) -> Any: - path_parts = path.split("__") - return_value = None - - # Manage relationship value lookup - if path_parts[0] in self._schema.relationship_names: - related_node = getattr(self, path_parts[0], None) - if not related_node: - return None - - try: - peer = related_node.get() - except (NodeNotFoundError, ValueError): - # This can happen while batch creating nodes, the lookup won't work as the store is not populated - # If so we cannot complete the HFID computation as we cannot access the related node attribute value - return None - - if attribute_piece := path_parts[1] if len(path_parts) > 1 else None: - related_node_attribute = getattr(peer, attribute_piece, None) - else: - return peer.hfid or peer.id - - if property_piece := path_parts[2] if len(path_parts) > 2 else None: - return_value = getattr(related_node_attribute, property_piece, None) - else: - return_value = related_node_attribute - - # Manage attribute value lookup - if path_parts[0] in self._schema.attribute_names: - attribute = getattr(self, path_parts[0], None) - if property_piece := path_parts[1] if len(path_parts) > 1 else None: - return_value = getattr(attribute, property_piece, None) - else: - return_value = attribute - - return return_value - - def get_human_friendly_id(self) -> Optional[list[str]]: - if not hasattr(self._schema, "human_friendly_id"): - return None - - if not self._schema.human_friendly_id: - return None - - # If an HFID component is missing we assume that it is invalid and not usable for this node - hfid_components = [self.get_path_value(path=item) for item in self._schema.human_friendly_id] - if None in hfid_components: - return None - return [str(hfid) for hfid in hfid_components] - - def get_human_friendly_id_as_string(self, include_kind: bool = False) -> Optional[str]: - hfid = self.get_human_friendly_id() - if not hfid: - return None - if include_kind: - hfid = [self.get_kind()] + hfid - return "__".join(hfid) - - @property - def hfid(self) -> Optional[list[str]]: - return self.get_human_friendly_id() - - @property - def hfid_str(self) -> Optional[str]: - return self.get_human_friendly_id_as_string(include_kind=True) - - def _init_attributes(self, data: Optional[dict] = None) -> None: - for attr_name in self._attributes: - attr_schema = [attr for attr in self._schema.attributes if attr.name == attr_name][0] - attr_data = data.get(attr_name, None) if isinstance(data, dict) else None - setattr( - self, - attr_name, - Attribute(name=attr_name, schema=attr_schema, data=attr_data), - ) - - def _init_relationships(self, data: Optional[dict] = None) -> None: - pass - - def __repr__(self) -> str: - if self.display_label: - return self.display_label - if not self._existing: - return f"{self._schema.kind} ({self.id})[NEW]" - - return f"{self._schema.kind} ({self.id}) " - - def get_kind(self) -> str: - return self._schema.kind - - def is_ip_prefix(self) -> bool: - builtin_ipprefix_kind = "BuiltinIPPrefix" - return self.get_kind() == builtin_ipprefix_kind or builtin_ipprefix_kind in self._schema.inherit_from # type: ignore[union-attr] - - def is_ip_address(self) -> bool: - builtin_ipaddress_kind = "BuiltinIPAddress" - return self.get_kind() == builtin_ipaddress_kind or builtin_ipaddress_kind in self._schema.inherit_from # type: ignore[union-attr] - - def is_resource_pool(self) -> bool: - return hasattr(self._schema, "inherit_from") and "CoreResourcePool" in self._schema.inherit_from # type: ignore[union-attr] - - def get_raw_graphql_data(self) -> Optional[dict]: - return self._data - - def _generate_input_data(self, exclude_unmodified: bool = False, exclude_hfid: bool = False) -> dict[str, dict]: # noqa: C901 - """Generate a dictionary that represent the input data required by a mutation. - - Returns: - dict[str, Dict]: Representation of an input data in dict format - """ - # pylint: disable=too-many-branches - data = {} - variables = {} - - for item_name in self._attributes: - attr: Attribute = getattr(self, item_name) - if attr._schema.read_only: - continue - attr_data = attr._generate_input_data() - - # NOTE, this code has been inherited when we splitted attributes and relationships - # into 2 loops, most likely it's possible to simply it - if attr_data and isinstance(attr_data, dict): - if variable_values := attr_data.get("data"): - data[item_name] = variable_values - else: - data[item_name] = attr_data - if variable_names := attr_data.get("variables"): - variables.update(variable_names) - - elif attr_data and isinstance(attr_data, list): - data[item_name] = attr_data - - for item_name in self._relationships: - rel_schema = self._schema.get_relationship(name=item_name) - if not rel_schema or rel_schema.read_only: - continue - - rel: Union[RelatedNodeBase, RelationshipManagerBase] = getattr(self, item_name) - - # BLOCKED by https://github.com/opsmill/infrahub/issues/330 - # if ( - # item is None - # and item_name in self._relationships - # and self._schema.get_relationship(item_name).cardinality == "one" - # ): - # data[item_name] = None - # continue - # el - if rel is None or not rel.initialized: - continue - - rel_data = rel._generate_input_data() - - if rel_data and isinstance(rel_data, dict): - if variable_values := rel_data.get("data"): - data[item_name] = variable_values - else: - data[item_name] = rel_data - if variable_names := rel_data.get("variables"): - variables.update(variable_names) - elif isinstance(rel_data, list): - data[item_name] = rel_data - elif rel_schema.cardinality == RelationshipCardinality.MANY: - data[item_name] = [] - - if exclude_unmodified: - data, variables = self._strip_unmodified(data=data, variables=variables) - - mutation_variables = {key: type(value) for key, value in variables.items()} - - if self.id is not None: - data["id"] = self.id - elif self.hfid is not None and not exclude_hfid: - data["hfid"] = self.hfid - - return {"data": {"data": data}, "variables": variables, "mutation_variables": mutation_variables} - - @staticmethod - def _strip_unmodified_dict(data: dict, original_data: dict, variables: dict, item: str) -> None: - data_item = data.get(item) - if item in original_data and isinstance(original_data[item], dict) and isinstance(data_item, dict): - for item_key in original_data[item].keys(): - for property_name in PROPERTIES_OBJECT: - if item_key == property_name and isinstance(original_data[item][property_name], dict): - if original_data[item][property_name].get("id"): - original_data[item][property_name] = original_data[item][property_name]["id"] - if item_key in data[item].keys(): - if item_key == "id" and len(data[item].keys()) > 1: - # Related nodes typically require an ID. So the ID is only - # removed if it's the last key in the current context - continue - - variable_key = None - if isinstance(data[item].get(item_key), str): - variable_key = data[item][item_key][1:] - - if original_data[item].get(item_key) == data[item].get(item_key): - data[item].pop(item_key) - elif ( - variable_key - and variable_key in variables - and original_data[item].get(item_key) == variables.get(variable_key) - ): - data[item].pop(item_key) - variables.pop(variable_key) - - # TODO: I do not feel _great_ about this - if not data_item and data_item != []: - data.pop(item) - - def _strip_unmodified(self, data: dict, variables: dict) -> tuple[dict, dict]: - original_data = self._data or {} - for relationship in self._relationships: - relationship_property = getattr(self, relationship) - if not relationship_property or relationship not in data: - continue - if not relationship_property.initialized: - data.pop(relationship) - elif isinstance(relationship_property, RelationshipManagerBase) and not relationship_property.has_update: - data.pop(relationship) - - for item in original_data.keys(): - if item in data.keys(): - if data[item] == original_data[item]: - data.pop(item) - continue - if isinstance(original_data[item], dict): - self._strip_unmodified_dict(data=data, original_data=original_data, variables=variables, item=item) - if item in self._relationships and original_data[item].get("node"): - relationship_data_cardinality_one = copy(original_data) - relationship_data_cardinality_one[item] = original_data[item]["node"] - self._strip_unmodified_dict( - data=data, original_data=relationship_data_cardinality_one, variables=variables, item=item - ) - # Run again to remove the "id" key if it's the last one remaining - self._strip_unmodified_dict( - data=data, original_data=relationship_data_cardinality_one, variables=variables, item=item - ) - - return data, variables - - @staticmethod - def _strip_alias(data: dict) -> dict[str, dict]: - clean = {} - - under_node = False - data_to_clean = data - if "node" in data: - under_node = True - data_to_clean = data["node"] - - for key, value in data_to_clean.items(): - if "__alias__" in key: - clean_key = key.split("__")[-1] - clean[clean_key] = value - else: - clean[key] = value - - if under_node: - complete = {k: v for k, v in data.items() if k != "node"} - complete["node"] = clean - return complete - return clean - - def _validate_artifact_support(self, message: str) -> None: - if not self._artifact_support: - raise FeatureNotSupportedError(message) - - def _validate_artifact_definition_support(self, message: str) -> None: - if not self._artifact_definition_support: - raise FeatureNotSupportedError(message) - - def generate_query_data_init( - self, - filters: Optional[dict[str, Any]] = None, - offset: Optional[int] = None, - limit: Optional[int] = None, - include: Optional[list[str]] = None, - exclude: Optional[list[str]] = None, - partial_match: bool = False, - ) -> dict[str, Union[Any, dict]]: - data: dict[str, Any] = { - "count": None, - "edges": {"node": {"id": None, "hfid": None, "display_label": None, "__typename": None}}, - } - - data["@filters"] = filters or {} - - if offset: - data["@filters"]["offset"] = offset - - if limit: - data["@filters"]["limit"] = limit - - if include and exclude: - in_both, _, _ = compare_lists(include, exclude) - if in_both: - raise ValueError(f"{in_both} are part of both include and exclude") - - if partial_match: - data["@filters"]["partial_match"] = True - - return data - - def validate_filters(self, filters: Optional[dict[str, Any]] = None) -> bool: - if not filters: - return True - - for filter_name in filters.keys(): - found = False - for filter_schema in self._schema.filters: - if filter_name == filter_schema.name: - found = True - break - if not found: - valid_filters = [entry.name for entry in self._schema.filters] - raise FilterNotFoundError( - identifier=filter_name, - kind=self._schema.kind, - filters=valid_filters, - ) - - return True - - def extract(self, params: dict[str, str]) -> dict[str, Any]: - """Extract some datapoints defined in a flat notation.""" - result: dict[str, Any] = {} - for key, value in params.items(): - result[key] = get_flat_value(self, key=value) - - return result - - def __hash__(self) -> int: - return hash(self.id) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, (InfrahubNode, InfrahubNodeSync)): - return NotImplemented - return self.id == other.id - - def _relationship_mutation(self, action: str, relation_to_update: str, related_nodes: list[str]) -> str: - related_node_str = ["{ id: " + f'"{node}"' + " }" for node in related_nodes] - return f""" - mutation {{ - Relationship{action}( - data: {{ - id: "{self.id}", - name: "{relation_to_update}", - nodes: [{", ".join(related_node_str)}] - }} - ) {{ - ok - }} - }} - """ - - -class InfrahubNode(InfrahubNodeBase): - """Represents a Infrahub node in an asynchronous context.""" - - def __init__( - self, - client: InfrahubClient, - schema: MainSchemaTypes, - branch: Optional[str] = None, - data: Optional[dict] = None, - ) -> None: - """ - Args: - client (InfrahubClient): The client used to interact with the backend. - schema (MainSchemaTypes): The schema of the node. - branch (Optional[str]): The branch where the node resides. - data (Optional[dict]): Optional data to initialize the node. - """ - self._client = client - self.__class__ = type(f"{schema.kind}InfrahubNode", (self.__class__,), {}) - - if isinstance(data, dict) and isinstance(data.get("node"), dict): - data = data.get("node") - - super().__init__(schema=schema, branch=branch or client.default_branch, data=data) - - @classmethod - async def from_graphql( - cls, client: InfrahubClient, branch: str, data: dict, schema: Optional[MainSchemaTypes] = None - ) -> Self: - if not schema: - node_kind = data.get("__typename", None) or data.get("node", {}).get("__typename", None) - if not node_kind: - raise ValueError("Unable to determine the type of the node, __typename not present in data") - schema = await client.schema.get(kind=node_kind, branch=branch) - - return cls(client=client, schema=schema, branch=branch, data=cls._strip_alias(data)) - - def _init_relationships(self, data: Optional[dict] = None) -> None: - for rel_name in self._relationships: - rel_schema = [rel for rel in self._schema.relationships if rel.name == rel_name][0] - rel_data = data.get(rel_name, None) if isinstance(data, dict) else None - - if rel_schema.cardinality == "one": - setattr(self, f"_{rel_name}", None) - setattr( - self.__class__, - rel_name, - generate_relationship_property(name=rel_name, node=self, node_class=RelatedNode), # type: ignore[arg-type] - ) - setattr(self, rel_name, rel_data) - else: - setattr( - self, - rel_name, - RelationshipManager( - name=rel_name, - client=self._client, - node=self, - branch=self._branch, - schema=rel_schema, - data=rel_data, - ), - ) - - async def generate(self, nodes: Optional[list[str]] = None) -> None: - self._validate_artifact_definition_support(ARTIFACT_DEFINITION_GENERATE_FEATURE_NOT_SUPPORTED_MESSAGE) - - nodes = nodes or [] - payload = {"nodes": nodes} - resp = await self._client._post(f"{self._client.address}/api/artifact/generate/{self.id}", payload=payload) - resp.raise_for_status() - - async def artifact_generate(self, name: str) -> None: - self._validate_artifact_support(ARTIFACT_GENERATE_FEATURE_NOT_SUPPORTED_MESSAGE) - - artifact = await self._client.get(kind="CoreArtifact", definition__name__value=name, object__ids=[self.id]) - await artifact.definition.fetch() # type: ignore[attr-defined] - await artifact.definition.peer.generate([artifact.id]) # type: ignore[attr-defined] - - async def artifact_fetch(self, name: str) -> Union[str, dict[str, Any]]: - self._validate_artifact_support(ARTIFACT_GENERATE_FEATURE_NOT_SUPPORTED_MESSAGE) - - artifact = await self._client.get(kind="CoreArtifact", definition__name__value=name, object__ids=[self.id]) - content = await self._client.object_store.get(identifier=artifact.storage_id.value) # type: ignore[attr-defined] - return content - - async def delete(self) -> None: - input_data = {"data": {"id": self.id}} - mutation_query = {"ok": None} - query = Mutation( - mutation=f"{self._schema.kind}Delete", - input_data=input_data, - query=mutation_query, - ) - await self._client.execute_graphql( - query=query.render(), - branch_name=self._branch, - tracker=f"mutation-{str(self._schema.kind).lower()}-delete", - ) - - async def save(self, allow_upsert: bool = False, update_group_context: Optional[bool] = None) -> None: - if self._existing is False or allow_upsert is True: - await self.create(allow_upsert=allow_upsert) - else: - await self.update() - - if update_group_context is None and self._client.mode == InfrahubClientMode.TRACKING: - update_group_context = True - - if not isinstance(self._schema, GenericSchema): - if "CoreGroup" in self._schema.inherit_from: - await self._client.group_context.add_related_groups( - ids=[self.id], update_group_context=update_group_context - ) - else: - await self._client.group_context.add_related_nodes( - ids=[self.id], update_group_context=update_group_context - ) - else: - await self._client.group_context.add_related_nodes(ids=[self.id], update_group_context=update_group_context) - - self._client.store.set(key=self.id, node=self) - - async def generate_query_data( - self, - filters: Optional[dict[str, Any]] = None, - offset: Optional[int] = None, - limit: Optional[int] = None, - include: Optional[list[str]] = None, - exclude: Optional[list[str]] = None, - fragment: bool = False, - prefetch_relationships: bool = False, - partial_match: bool = False, - ) -> dict[str, Union[Any, dict]]: - data = self.generate_query_data_init( - filters=filters, offset=offset, limit=limit, include=include, exclude=exclude, partial_match=partial_match - ) - data["edges"]["node"].update( - await self.generate_query_data_node( - include=include, - exclude=exclude, - prefetch_relationships=prefetch_relationships, - inherited=True, - ) - ) - - if isinstance(self._schema, GenericSchema) and fragment: - for child in self._schema.used_by: - child_schema = await self._client.schema.get(kind=child) - child_node = InfrahubNode(client=self._client, schema=child_schema) - - # Add the attribute and the relationship already part of the parent to the exclude list for the children - exclude_parent = self._attributes + self._relationships - _, _, only_in_list2 = compare_lists(list1=include or [], list2=exclude_parent) - - exclude_child = only_in_list2 - if exclude: - exclude_child += exclude - - child_data = await child_node.generate_query_data_node( - include=include, - exclude=exclude_child, - prefetch_relationships=prefetch_relationships, - inherited=False, - insert_alias=True, - ) - - if child_data: - data["edges"]["node"][f"...on {child}"] = child_data - - return {self._schema.kind: data} - - async def generate_query_data_node( - self, - include: Optional[list[str]] = None, - exclude: Optional[list[str]] = None, - inherited: bool = True, - insert_alias: bool = False, - prefetch_relationships: bool = False, - ) -> dict[str, Union[Any, dict]]: - """Generate the node part of a GraphQL Query with attributes and nodes. - - Args: - include (Optional[list[str]], optional): List of attributes or relationships to include. Defaults to None. - exclude (Optional[list[str]], optional): List of attributes or relationships to exclude. Defaults to None. - inherited (bool, optional): Indicated of the attributes and the relationships inherited from generics should be included as well. - Defaults to True. - insert_alias (bool, optional): If True, inserts aliases in the query for each attribute or relationship. - prefetch_relationships (bool, optional): If True, pre-fetches relationship data as part of the query. - - Returns: - dict[str, Union[Any, Dict]]: GraphQL query in dictionary format - """ - # pylint: disable=too-many-branches - - data: dict[str, Any] = {} - - for attr_name in self._attributes: - if exclude and attr_name in exclude: - continue - - attr: Attribute = getattr(self, attr_name) - - if not inherited and attr._schema.inherited: - continue - - attr_data = attr._generate_query_data() - if attr_data: - data[attr_name] = attr_data - if insert_alias: - data[attr_name]["@alias"] = f"__alias__{self._schema.kind}__{attr_name}" - elif insert_alias: - if insert_alias: - data[attr_name] = {"@alias": f"__alias__{self._schema.kind}__{attr_name}"} - - for rel_name in self._relationships: - if exclude and rel_name in exclude: - continue - - rel_schema = self._schema.get_relationship(name=rel_name) - - if not rel_schema or (not inherited and rel_schema.inherited): - continue - - if ( - rel_schema.cardinality == RelationshipCardinality.MANY # type: ignore[union-attr] - and rel_schema.kind not in [RelationshipKind.ATTRIBUTE, RelationshipKind.PARENT] # type: ignore[union-attr] - and not (include and rel_name in include) - ): - continue - - peer_data: dict[str, Any] = {} - if rel_schema and prefetch_relationships: - peer_schema = await self._client.schema.get(kind=rel_schema.peer, branch=self._branch) - peer_node = InfrahubNode(client=self._client, schema=peer_schema, branch=self._branch) - peer_data = await peer_node.generate_query_data_node(include=include, exclude=exclude) - - if rel_schema and rel_schema.cardinality == "one": - rel_data = RelatedNode._generate_query_data(peer_data=peer_data) - elif rel_schema and rel_schema.cardinality == "many": - rel_data = RelationshipManager._generate_query_data(peer_data=peer_data) - - data[rel_name] = rel_data - - if insert_alias: - data[rel_name]["@alias"] = f"__alias__{self._schema.kind}__{rel_name}" - - return data - - async def add_relationships(self, relation_to_update: str, related_nodes: list[str]) -> None: - query = self._relationship_mutation( - action="Add", relation_to_update=relation_to_update, related_nodes=related_nodes - ) - tracker = f"mutation-{str(self._schema.kind).lower()}-relationshipadd-{relation_to_update}" - await self._client.execute_graphql(query=query, branch_name=self._branch, tracker=tracker) - - async def remove_relationships(self, relation_to_update: str, related_nodes: list[str]) -> None: - query = self._relationship_mutation( - action="Remove", relation_to_update=relation_to_update, related_nodes=related_nodes - ) - tracker = f"mutation-{str(self._schema.kind).lower()}-relationshipremove-{relation_to_update}" - await self._client.execute_graphql(query=query, branch_name=self._branch, tracker=tracker) - - def _generate_mutation_query(self) -> dict[str, Any]: - query_result: dict[str, Any] = {"ok": None, "object": {"id": None}} - - for attr_name in self._attributes: - attr: Attribute = getattr(self, attr_name) - query_result["object"].update(attr._generate_mutation_query()) - - for rel_name in self._relationships: - rel = getattr(self, rel_name) - if not isinstance(rel, RelatedNode): - continue - - query_result["object"].update(rel._generate_mutation_query()) - - return query_result - - async def _process_mutation_result(self, mutation_name: str, response: dict[str, Any]) -> None: - object_response: dict[str, Any] = response[mutation_name]["object"] - self.id = object_response["id"] - self._existing = True - - for attr_name in self._attributes: - attr = getattr(self, attr_name) - if ( - attr_name not in object_response - or not isinstance(attr.value, InfrahubNodeBase) - or not attr.value.is_resource_pool() - ): - continue - - # Process allocated resource from a pool and update attribute - attr.value = object_response[attr_name] - - for rel_name in self._relationships: - rel = getattr(self, rel_name) - if rel_name not in object_response or not isinstance(rel, RelatedNode) or not rel.is_resource_pool: - continue - - # Process allocated resource from a pool and update related node - allocated_resource = object_response[rel_name] - related_node = RelatedNode( - client=self._client, branch=self._branch, schema=rel.schema, data=allocated_resource - ) - await related_node.fetch() - setattr(self, rel_name, related_node) - - async def create(self, allow_upsert: bool = False) -> None: - mutation_query = self._generate_mutation_query() - - if allow_upsert: - input_data = self._generate_input_data(exclude_hfid=False) - mutation_name = f"{self._schema.kind}Upsert" - tracker = f"mutation-{str(self._schema.kind).lower()}-upsert" - else: - input_data = self._generate_input_data(exclude_hfid=True) - mutation_name = f"{self._schema.kind}Create" - tracker = f"mutation-{str(self._schema.kind).lower()}-create" - query = Mutation( - mutation=mutation_name, - input_data=input_data["data"], - query=mutation_query, - variables=input_data["mutation_variables"], - ) - response = await self._client.execute_graphql( - query=query.render(), branch_name=self._branch, tracker=tracker, variables=input_data["variables"] - ) - await self._process_mutation_result(mutation_name=mutation_name, response=response) - - async def update(self, do_full_update: bool = False) -> None: - input_data = self._generate_input_data(exclude_unmodified=not do_full_update) - mutation_query = self._generate_mutation_query() - mutation_name = f"{self._schema.kind}Update" - - query = Mutation( - mutation=mutation_name, - input_data=input_data["data"], - query=mutation_query, - variables=input_data["mutation_variables"], - ) - response = await self._client.execute_graphql( - query=query.render(), - branch_name=self._branch, - tracker=f"mutation-{str(self._schema.kind).lower()}-update", - variables=input_data["variables"], - ) - await self._process_mutation_result(mutation_name=mutation_name, response=response) - - async def _process_relationships( - self, node_data: dict[str, Any], branch: str, related_nodes: list[InfrahubNode] - ) -> None: - """Processes the Relationships of a InfrahubNode and add Related Nodes to a list. - - Args: - node_data (dict[str, Any]): The item from the GraphQL response corresponding to the node. - branch (str): The branch name. - related_nodes (list[InfrahubNode]): The list to which related nodes will be appended. - """ - for rel_name in self._relationships: - rel = getattr(self, rel_name) - if rel and isinstance(rel, RelatedNode): - relation = node_data["node"].get(rel_name) - if relation.get("node", None): - related_node = await InfrahubNode.from_graphql(client=self._client, branch=branch, data=relation) - related_nodes.append(related_node) - elif rel and isinstance(rel, RelationshipManager): - peers = node_data["node"].get(rel_name) - if peers: - for peer in peers["edges"]: - related_node = await InfrahubNode.from_graphql(client=self._client, branch=branch, data=peer) - related_nodes.append(related_node) - - async def get_pool_allocated_resources(self, resource: InfrahubNode) -> list[InfrahubNode]: - """Fetch all nodes that were allocated for the pool and a given resource. - - Args: - resource (InfrahubNode): The resource from which the nodes were allocated. - - Returns: - list[InfrahubNode]: The allocated nodes. - """ - if not self.is_resource_pool(): - raise ValueError("Allocate resources can only be fetched from resource pool nodes.") - - graphql_query_name = "InfrahubResourcePoolAllocated" - node_ids_per_kind: dict[str, list[str]] = {} - - has_remaining_items = True - page_number = 1 - while has_remaining_items: - page_offset = (page_number - 1) * self._client.pagination_size - - query = Query( - query={ - graphql_query_name: { - "@filters": { - "pool_id": "$pool_id", - "resource_id": "$resource_id", - "offset": page_offset, - "limit": self._client.pagination_size, - }, - "count": None, - "edges": {"node": {"id": None, "kind": None, "branch": None, "identifier": None}}, - } - }, - name="GetAllocatedResourceForPool", - variables={"pool_id": str, "resource_id": str}, - ) - response = await self._client.execute_graphql( - query=query.render(), - variables={"pool_id": self.id, "resource_id": resource.id}, - branch_name=self._branch, - tracker=f"get-allocated-resources-page{page_number}", - ) - - for edge in response[graphql_query_name]["edges"]: - node = edge["node"] - node_ids_per_kind.setdefault(node["kind"], []).append(node["id"]) - - remaining_items = response[graphql_query_name].get("count", 0) - ( - page_offset + self._client.pagination_size - ) - if remaining_items <= 0: - has_remaining_items = False - - page_number += 1 - - nodes: list[InfrahubNode] = [] - for kind, node_ids in node_ids_per_kind.items(): - nodes.extend(await self._client.filters(kind=kind, branch=self._branch, ids=node_ids)) - - return nodes - - async def get_pool_resources_utilization(self) -> list[dict[str, Any]]: - """Fetch the utilization of each resource for the pool. - - Returns: - list[dict[str, Any]]: A list containing the allocation numbers for each resource of the pool. - """ - if not self.is_resource_pool(): - raise ValueError("Pool utilization can only be fetched for resource pool nodes.") - - graphql_query_name = "InfrahubResourcePoolUtilization" - - query = Query( - query={ - graphql_query_name: { - "@filters": {"pool_id": "$pool_id"}, - "count": None, - "edges": { - "node": { - "id": None, - "kind": None, - "utilization": None, - "utilization_branches": None, - "utilization_default_branch": None, - } - }, - } - }, - name="GetUtilizationForPool", - variables={"pool_id": str}, - ) - response = await self._client.execute_graphql( - query=query.render(), - variables={"pool_id": self.id}, - branch_name=self._branch, - tracker="get-pool-utilization", - ) - - if response[graphql_query_name].get("count", 0): - return [edge["node"] for edge in response[graphql_query_name]["edges"]] - return [] - - -class InfrahubNodeSync(InfrahubNodeBase): - """Represents a Infrahub node in a synchronous context.""" - - def __init__( - self, - client: InfrahubClientSync, - schema: MainSchemaTypes, - branch: Optional[str] = None, - data: Optional[dict] = None, - ) -> None: - """ - Args: - client (InfrahubClientSync): The client used to interact with the backend synchronously. - schema (MainSchemaTypes): The schema of the node. - branch (Optional[str]): The branch where the node resides. - data (Optional[dict]): Optional data to initialize the node. - """ - self.__class__ = type(f"{schema.kind}InfrahubNodeSync", (self.__class__,), {}) - self._client = client - - if isinstance(data, dict) and isinstance(data.get("node"), dict): - data = data.get("node") - - super().__init__(schema=schema, branch=branch or client.default_branch, data=data) - - @classmethod - def from_graphql( - cls, client: InfrahubClientSync, branch: str, data: dict, schema: Optional[MainSchemaTypes] = None - ) -> Self: - if not schema: - node_kind = data.get("__typename", None) or data.get("node", {}).get("__typename", None) - if not node_kind: - raise ValueError("Unable to determine the type of the node, __typename not present in data") - schema = client.schema.get(kind=node_kind, branch=branch) - - return cls(client=client, schema=schema, branch=branch, data=cls._strip_alias(data)) - - def _init_relationships(self, data: Optional[dict] = None) -> None: - for rel_name in self._relationships: - rel_schema = [rel for rel in self._schema.relationships if rel.name == rel_name][0] - rel_data = data.get(rel_name, None) if isinstance(data, dict) else None - - if rel_schema.cardinality == "one": - setattr(self, f"_{rel_name}", None) - setattr( - self.__class__, - rel_name, - generate_relationship_property(name=rel_name, node=self, node_class=RelatedNodeSync), # type: ignore[arg-type] - ) - setattr(self, rel_name, rel_data) - else: - setattr( - self, - rel_name, - RelationshipManagerSync( - name=rel_name, - client=self._client, - node=self, - branch=self._branch, - schema=rel_schema, - data=rel_data, - ), - ) - - def generate(self, nodes: Optional[list[str]] = None) -> None: - self._validate_artifact_definition_support(ARTIFACT_DEFINITION_GENERATE_FEATURE_NOT_SUPPORTED_MESSAGE) - nodes = nodes or [] - payload = {"nodes": nodes} - resp = self._client._post(f"{self._client.address}/api/artifact/generate/{self.id}", payload=payload) - resp.raise_for_status() - - def artifact_generate(self, name: str) -> None: - self._validate_artifact_support(ARTIFACT_GENERATE_FEATURE_NOT_SUPPORTED_MESSAGE) - artifact = self._client.get(kind="CoreArtifact", definition__name__value=name, object__ids=[self.id]) - artifact.definition.fetch() # type: ignore[attr-defined] - artifact.definition.peer.generate([artifact.id]) # type: ignore[attr-defined] - - def artifact_fetch(self, name: str) -> Union[str, dict[str, Any]]: - self._validate_artifact_support(ARTIFACT_FETCH_FEATURE_NOT_SUPPORTED_MESSAGE) - artifact = self._client.get(kind="CoreArtifact", definition__name__value=name, object__ids=[self.id]) - content = self._client.object_store.get(identifier=artifact.storage_id.value) # type: ignore[attr-defined] - return content - - def delete(self) -> None: - input_data = {"data": {"id": self.id}} - mutation_query = {"ok": None} - query = Mutation( - mutation=f"{self._schema.kind}Delete", - input_data=input_data, - query=mutation_query, - ) - self._client.execute_graphql( - query=query.render(), - branch_name=self._branch, - tracker=f"mutation-{str(self._schema.kind).lower()}-delete", - ) - - def save(self, allow_upsert: bool = False, update_group_context: Optional[bool] = None) -> None: - if self._existing is False or allow_upsert is True: - self.create(allow_upsert=allow_upsert) - else: - self.update() - - if update_group_context is None and self._client.mode == InfrahubClientMode.TRACKING: - update_group_context = True - - if not isinstance(self._schema, GenericSchema): - if "CoreGroup" in self._schema.inherit_from: - self._client.group_context.add_related_groups(ids=[self.id], update_group_context=update_group_context) - else: - self._client.group_context.add_related_nodes(ids=[self.id], update_group_context=update_group_context) - else: - self._client.group_context.add_related_nodes(ids=[self.id], update_group_context=update_group_context) - - self._client.store.set(key=self.id, node=self) - - def generate_query_data( - self, - filters: Optional[dict[str, Any]] = None, - offset: Optional[int] = None, - limit: Optional[int] = None, - include: Optional[list[str]] = None, - exclude: Optional[list[str]] = None, - fragment: bool = False, - prefetch_relationships: bool = False, - partial_match: bool = False, - ) -> dict[str, Union[Any, dict]]: - data = self.generate_query_data_init( - filters=filters, offset=offset, limit=limit, include=include, exclude=exclude, partial_match=partial_match - ) - data["edges"]["node"].update( - self.generate_query_data_node( - include=include, - exclude=exclude, - prefetch_relationships=prefetch_relationships, - inherited=True, - ) - ) - - if isinstance(self._schema, GenericSchema) and fragment: - for child in self._schema.used_by: - child_schema = self._client.schema.get(kind=child) - child_node = InfrahubNodeSync(client=self._client, schema=child_schema) - - exclude_parent = self._attributes + self._relationships - _, _, only_in_list2 = compare_lists(list1=include or [], list2=exclude_parent) - - exclude_child = only_in_list2 - if exclude: - exclude_child += exclude - - child_data = child_node.generate_query_data_node( - include=include, - exclude=exclude_child, - prefetch_relationships=prefetch_relationships, - inherited=False, - insert_alias=True, - ) - - if child_data: - data["edges"]["node"][f"...on {child}"] = child_data - - return {self._schema.kind: data} - - def generate_query_data_node( - self, - include: Optional[list[str]] = None, - exclude: Optional[list[str]] = None, - inherited: bool = True, - insert_alias: bool = False, - prefetch_relationships: bool = False, - ) -> dict[str, Union[Any, dict]]: - """Generate the node part of a GraphQL Query with attributes and nodes. - - Args: - include (Optional[list[str]], optional): List of attributes or relationships to include. Defaults to None. - exclude (Optional[list[str]], optional): List of attributes or relationships to exclude. Defaults to None. - inherited (bool, optional): Indicated of the attributes and the relationships inherited from generics should be included as well. - Defaults to True. - insert_alias (bool, optional): If True, inserts aliases in the query for each attribute or relationship. - prefetch_relationships (bool, optional): If True, pre-fetches relationship data as part of the query. - - Returns: - dict[str, Union[Any, Dict]]: GraphQL query in dictionary format - """ - # pylint: disable=too-many-branches - - data: dict[str, Any] = {} - - for attr_name in self._attributes: - if exclude and attr_name in exclude: - continue - - attr: Attribute = getattr(self, attr_name) - - if not inherited and attr._schema.inherited: - continue - - attr_data = attr._generate_query_data() - if attr_data: - data[attr_name] = attr_data - if insert_alias: - data[attr_name]["@alias"] = f"__alias__{self._schema.kind}__{attr_name}" - elif insert_alias: - if insert_alias: - data[attr_name] = {"@alias": f"__alias__{self._schema.kind}__{attr_name}"} - - for rel_name in self._relationships: - if exclude and rel_name in exclude: - continue - - rel_schema = self._schema.get_relationship(name=rel_name) - - if not rel_schema or (not inherited and rel_schema.inherited): - continue - - if ( - rel_schema.cardinality == RelationshipCardinality.MANY # type: ignore[union-attr] - and rel_schema.kind not in [RelationshipKind.ATTRIBUTE, RelationshipKind.PARENT] # type: ignore[union-attr] - and not (include and rel_name in include) - ): - continue - - peer_data: dict[str, Any] = {} - if rel_schema and prefetch_relationships: - peer_schema = self._client.schema.get(kind=rel_schema.peer, branch=self._branch) - peer_node = InfrahubNodeSync(client=self._client, schema=peer_schema, branch=self._branch) - peer_data = peer_node.generate_query_data_node(include=include, exclude=exclude) - - if rel_schema and rel_schema.cardinality == "one": - rel_data = RelatedNodeSync._generate_query_data(peer_data=peer_data) - elif rel_schema and rel_schema.cardinality == "many": - rel_data = RelationshipManagerSync._generate_query_data(peer_data=peer_data) - - data[rel_name] = rel_data - - if insert_alias: - data[rel_name]["@alias"] = f"__alias__{self._schema.kind}__{rel_name}" - - return data - - def add_relationships( - self, - relation_to_update: str, - related_nodes: list[str], - ) -> None: - query = self._relationship_mutation( - action="Add", relation_to_update=relation_to_update, related_nodes=related_nodes - ) - tracker = f"mutation-{str(self._schema.kind).lower()}-relationshipadd-{relation_to_update}" - self._client.execute_graphql(query=query, branch_name=self._branch, tracker=tracker) - - def remove_relationships(self, relation_to_update: str, related_nodes: list[str]) -> None: - query = self._relationship_mutation( - action="Remove", relation_to_update=relation_to_update, related_nodes=related_nodes - ) - tracker = f"mutation-{str(self._schema.kind).lower()}-relationshipremove-{relation_to_update}" - self._client.execute_graphql(query=query, branch_name=self._branch, tracker=tracker) - - def _generate_mutation_query(self) -> dict[str, Any]: - query_result: dict[str, Any] = {"ok": None, "object": {"id": None}} - - for attr_name in self._attributes: - attr: Attribute = getattr(self, attr_name) - query_result["object"].update(attr._generate_mutation_query()) - - for rel_name in self._relationships: - rel = getattr(self, rel_name) - if not isinstance(rel, RelatedNodeSync): - continue - - query_result["object"].update(rel._generate_mutation_query()) - - return query_result - - def _process_mutation_result(self, mutation_name: str, response: dict[str, Any]) -> None: - object_response: dict[str, Any] = response[mutation_name]["object"] - self.id = object_response["id"] - self._existing = True - - for attr_name in self._attributes: - attr = getattr(self, attr_name) - if ( - attr_name not in object_response - or not isinstance(attr.value, InfrahubNodeBase) - or not attr.value.is_resource_pool() - ): - continue - - # Process allocated resource from a pool and update attribute - attr.value = object_response[attr_name]["value"] - - for rel_name in self._relationships: - rel = getattr(self, rel_name) - if rel_name not in object_response or not isinstance(rel, RelatedNodeSync) or not rel.is_resource_pool: - continue - - # Process allocated resource from a pool and update related node - allocated_resource = object_response[rel_name] - related_node = RelatedNodeSync( - client=self._client, branch=self._branch, schema=rel.schema, data=allocated_resource - ) - related_node.fetch() - setattr(self, rel_name, related_node) - - def create(self, allow_upsert: bool = False) -> None: - mutation_query = self._generate_mutation_query() - - if allow_upsert: - input_data = self._generate_input_data(exclude_hfid=False) - mutation_name = f"{self._schema.kind}Upsert" - tracker = f"mutation-{str(self._schema.kind).lower()}-upsert" - else: - input_data = self._generate_input_data(exclude_hfid=True) - mutation_name = f"{self._schema.kind}Create" - tracker = f"mutation-{str(self._schema.kind).lower()}-create" - query = Mutation( - mutation=mutation_name, - input_data=input_data["data"], - query=mutation_query, - variables=input_data["mutation_variables"], - ) - - response = self._client.execute_graphql( - query=query.render(), branch_name=self._branch, tracker=tracker, variables=input_data["variables"] - ) - self._process_mutation_result(mutation_name=mutation_name, response=response) - - def update(self, do_full_update: bool = False) -> None: - input_data = self._generate_input_data(exclude_unmodified=not do_full_update) - mutation_query = self._generate_mutation_query() - mutation_name = f"{self._schema.kind}Update" - - query = Mutation( - mutation=mutation_name, - input_data=input_data["data"], - query=mutation_query, - variables=input_data["mutation_variables"], - ) - - response = self._client.execute_graphql( - query=query.render(), - branch_name=self._branch, - tracker=f"mutation-{str(self._schema.kind).lower()}-update", - variables=input_data["variables"], - ) - self._process_mutation_result(mutation_name=mutation_name, response=response) - - def _process_relationships( - self, node_data: dict[str, Any], branch: str, related_nodes: list[InfrahubNodeSync] - ) -> None: - """Processes the Relationships of a InfrahubNodeSync and add Related Nodes to a list. - - Args: - node_data (dict[str, Any]): The item from the GraphQL response corresponding to the node. - branch (str): The branch name. - related_nodes (list[InfrahubNodeSync]): The list to which related nodes will be appended. - """ - for rel_name in self._relationships: - rel = getattr(self, rel_name) - if rel and isinstance(rel, RelatedNodeSync): - relation = node_data["node"].get(rel_name) - if relation.get("node", None): - related_node = InfrahubNodeSync.from_graphql(client=self._client, branch=branch, data=relation) - related_nodes.append(related_node) - elif rel and isinstance(rel, RelationshipManagerSync): - peers = node_data["node"].get(rel_name) - if peers: - for peer in peers["edges"]: - related_node = InfrahubNodeSync.from_graphql(client=self._client, branch=branch, data=peer) - related_nodes.append(related_node) - - def get_pool_allocated_resources(self, resource: InfrahubNodeSync) -> list[InfrahubNodeSync]: - """Fetch all nodes that were allocated for the pool and a given resource. - - Args: - resource (InfrahubNodeSync): The resource from which the nodes were allocated. - - Returns: - list[InfrahubNodeSync]: The allocated nodes. - """ - if not self.is_resource_pool(): - raise ValueError("Allocate resources can only be fetched from resource pool nodes.") - - graphql_query_name = "InfrahubResourcePoolAllocated" - node_ids_per_kind: dict[str, list[str]] = {} - - has_remaining_items = True - page_number = 1 - while has_remaining_items: - page_offset = (page_number - 1) * self._client.pagination_size - - query = Query( - query={ - graphql_query_name: { - "@filters": { - "pool_id": "$pool_id", - "resource_id": "$resource_id", - "offset": page_offset, - "limit": self._client.pagination_size, - }, - "count": None, - "edges": {"node": {"id": None, "kind": None, "branch": None, "identifier": None}}, - } - }, - name="GetAllocatedResourceForPool", - variables={"pool_id": str, "resource_id": str}, - ) - response = self._client.execute_graphql( - query=query.render(), - variables={"pool_id": self.id, "resource_id": resource.id}, - branch_name=self._branch, - tracker=f"get-allocated-resources-page{page_number}", - ) - - for edge in response[graphql_query_name]["edges"]: - node = edge["node"] - node_ids_per_kind.setdefault(node["kind"], []).append(node["id"]) - - remaining_items = response[graphql_query_name].get("count", 0) - ( - page_offset + self._client.pagination_size - ) - if remaining_items <= 0: - has_remaining_items = False - - page_number += 1 - - nodes: list[InfrahubNodeSync] = [] - for kind, node_ids in node_ids_per_kind.items(): - nodes.extend(self._client.filters(kind=kind, branch=self._branch, ids=node_ids)) - - return nodes - - def get_pool_resources_utilization(self) -> list[dict[str, Any]]: - """Fetch the utilization of each resource for the pool. - - Returns: - list[dict[str, Any]]: A list containing the allocation numbers for each resource of the pool. - """ - if not self.is_resource_pool(): - raise ValueError("Pool utilization can only be fetched for resource pool nodes.") - - graphql_query_name = "InfrahubResourcePoolUtilization" - - query = Query( - query={ - graphql_query_name: { - "@filters": {"pool_id": "$pool_id"}, - "count": None, - "edges": { - "node": { - "id": None, - "kind": None, - "utilization": None, - "utilization_branches": None, - "utilization_default_branch": None, - } - }, - } - }, - name="GetUtilizationForPool", - variables={"pool_id": str}, - ) - response = self._client.execute_graphql( - query=query.render(), - variables={"pool_id": self.id}, - branch_name=self._branch, - tracker="get-pool-utilization", - ) - - if response[graphql_query_name].get("count", 0): - return [edge["node"] for edge in response[graphql_query_name]["edges"]] - return [] - - -class NodeProperty: - """Represents a property of a node, typically used for metadata like display labels.""" - - def __init__(self, data: Union[dict, str]): - """ - Args: - data (Union[dict, str]): Data representing the node property. - """ - self.id = None - self.display_label = None - self.typename = None - - if isinstance(data, str): - self.id = data - elif isinstance(data, dict): - self.id = data.get("id", None) - self.display_label = data.get("display_label", None) - self.typename = data.get("__typename", None) - - def _generate_input_data(self) -> Union[str, None]: - return self.id - - -def generate_relationship_property(node: Union[InfrahubNode, InfrahubNodeSync], name: str, node_class): # type: ignore - """Generates a property that stores values under a private non-public name. - - Args: - node (Union[InfrahubNode, InfrahubNodeSync]): The node instance. - name (str): The name of the relationship property. - node_class: The class of the node. - - Returns: - A property object for managing the relationship. - - """ - internal_name = "_" + name.lower() - external_name = name - - @property # type: ignore - def prop(self): # type: ignore - return getattr(self, internal_name) - - @prop.setter - def prop(self, value): # type: ignore - if isinstance(value, RelatedNodeBase) or value is None: - setattr(self, internal_name, value) - else: - schema = [rel for rel in self._schema.relationships if rel.name == external_name][0] - setattr( - self, - internal_name, - node_class(name=external_name, branch=node._branch, client=node._client, schema=schema, data=value), - ) - - return prop diff --git a/python_sdk/infrahub_sdk/object_store.py b/python_sdk/infrahub_sdk/object_store.py deleted file mode 100644 index 33a5c0906b..0000000000 --- a/python_sdk/infrahub_sdk/object_store.py +++ /dev/null @@ -1,111 +0,0 @@ -from __future__ import annotations - -import copy -from typing import TYPE_CHECKING, Optional - -import httpx - -from infrahub_sdk.exceptions import AuthenticationError, ServerNotReachableError - -if TYPE_CHECKING: - from infrahub_sdk.client import InfrahubClient, InfrahubClientSync - - -class ObjectStoreBase: - pass - - -class ObjectStore(ObjectStoreBase): - def __init__(self, client: InfrahubClient): - self.client = client - - async def get(self, identifier: str, tracker: Optional[str] = None) -> str: - url = f"{self.client.address}/api/storage/object/{identifier}" - headers = copy.copy(self.client.headers or {}) - if self.client.insert_tracker and tracker: - headers["X-Infrahub-Tracker"] = tracker - - try: - resp = await self.client._get(url=url, headers=headers) - resp.raise_for_status() - - except ServerNotReachableError: - self.client.log.error(f"Unable to connect to {self.client.address} .. ") - raise - except httpx.HTTPStatusError as exc: - if exc.response.status_code in [401, 403]: - response = exc.response.json() - errors = response.get("errors") - messages = [error.get("message") for error in errors] - raise AuthenticationError(" | ".join(messages)) from exc - - return resp.text - - async def upload(self, content: str, tracker: Optional[str] = None) -> dict[str, str]: - url = f"{self.client.address}/api/storage/upload/content" - headers = copy.copy(self.client.headers or {}) - if self.client.insert_tracker and tracker: - headers["X-Infrahub-Tracker"] = tracker - - try: - resp = await self.client._post(url=url, payload={"content": content}, headers=headers) - resp.raise_for_status() - except ServerNotReachableError: - self.client.log.error(f"Unable to connect to {self.client.address} .. ") - raise - except httpx.HTTPStatusError as exc: - if exc.response.status_code in [401, 403]: - response = exc.response.json() - errors = response.get("errors") - messages = [error.get("message") for error in errors] - raise AuthenticationError(" | ".join(messages)) from exc - - return resp.json() - - -class ObjectStoreSync(ObjectStoreBase): - def __init__(self, client: InfrahubClientSync): - self.client = client - - def get(self, identifier: str, tracker: Optional[str] = None) -> str: - url = f"{self.client.address}/api/storage/object/{identifier}" - headers = copy.copy(self.client.headers or {}) - if self.client.insert_tracker and tracker: - headers["X-Infrahub-Tracker"] = tracker - - try: - resp = self.client._get(url=url, headers=headers) - resp.raise_for_status() - - except ServerNotReachableError: - self.client.log.error(f"Unable to connect to {self.client.address} .. ") - raise - except httpx.HTTPStatusError as exc: - if exc.response.status_code in [401, 403]: - response = exc.response.json() - errors = response.get("errors") - messages = [error.get("message") for error in errors] - raise AuthenticationError(" | ".join(messages)) from exc - - return resp.text - - def upload(self, content: str, tracker: Optional[str] = None) -> dict[str, str]: - url = f"{self.client.address}/api/storage/upload/content" - headers = copy.copy(self.client.headers or {}) - if self.client.insert_tracker and tracker: - headers["X-Infrahub-Tracker"] = tracker - - try: - resp = self.client._post(url=url, payload={"content": content}, headers=headers) - resp.raise_for_status() - except ServerNotReachableError: - self.client.log.error(f"Unable to connect to {self.client.address} .. ") - raise - except httpx.HTTPStatusError as exc: - if exc.response.status_code in [401, 403]: - response = exc.response.json() - errors = response.get("errors") - messages = [error.get("message") for error in errors] - raise AuthenticationError(" | ".join(messages)) from exc - - return resp.json() diff --git a/python_sdk/infrahub_sdk/playback.py b/python_sdk/infrahub_sdk/playback.py deleted file mode 100644 index 72e659545b..0000000000 --- a/python_sdk/infrahub_sdk/playback.py +++ /dev/null @@ -1,55 +0,0 @@ -from pathlib import Path -from typing import Any, Optional - -import httpx -import ujson -from pydantic import Field -from pydantic_settings import BaseSettings, SettingsConfigDict - -from infrahub_sdk.types import HTTPMethod -from infrahub_sdk.utils import generate_request_filename - - -class JSONPlayback(BaseSettings): - model_config = SettingsConfigDict(env_prefix="INFRAHUB_PLAYBACK_") - directory: str = Field(default=".", description="Directory to read recorded files from") - - async def async_request( - self, - url: str, - method: HTTPMethod, - headers: dict[str, Any], - timeout: int, - payload: Optional[dict] = None, - ) -> httpx.Response: - return self._read_request(url=url, method=method, headers=headers, payload=payload, timeout=timeout) - - def sync_request( - self, - url: str, - method: HTTPMethod, - headers: dict[str, Any], - timeout: int, - payload: Optional[dict] = None, - ) -> httpx.Response: - return self._read_request(url=url, method=method, headers=headers, payload=payload, timeout=timeout) - - def _read_request( - self, - url: str, - method: HTTPMethod, - headers: dict[str, Any], - timeout: int, # pylint: disable=unused-argument - payload: Optional[dict] = None, - ) -> httpx.Response: - content: Optional[bytes] = None - if payload: - content = str(ujson.dumps(payload)).encode("utf-8") - request = httpx.Request(method=method.value, url=url, headers=headers, content=content) - - filename = generate_request_filename(request) - with Path(f"{self.directory}/{filename}.json").open(encoding="utf-8") as fobj: - data = ujson.load(fobj) - - response = httpx.Response(status_code=data["status_code"], content=data["response_content"], request=request) - return response diff --git a/python_sdk/infrahub_sdk/protocols.py b/python_sdk/infrahub_sdk/protocols.py deleted file mode 100644 index 9e66873f2b..0000000000 --- a/python_sdk/infrahub_sdk/protocols.py +++ /dev/null @@ -1,867 +0,0 @@ -# Generated by "invoke backend.generate", do not edit directly - -from __future__ import annotations - -from typing import TYPE_CHECKING - -from .protocols_base import CoreNode, CoreNodeSync - -if TYPE_CHECKING: - from infrahub_sdk.node import RelatedNode, RelatedNodeSync, RelationshipManager, RelationshipManagerSync - - from .protocols_base import ( - URL, - Boolean, - BooleanOptional, - DateTime, - DateTimeOptional, - Dropdown, - Enum, - HashedPassword, - Integer, - IntegerOptional, - IPHost, - IPNetwork, - JSONAttribute, - JSONAttributeOptional, - ListAttributeOptional, - String, - StringOptional, - ) - -# pylint: disable=too-many-ancestors - -# --------------------------------------------- -# ASYNC -# --------------------------------------------- - - -class BuiltinIPAddress(CoreNode): - address: IPHost - description: StringOptional - ip_namespace: RelatedNode - ip_prefix: RelatedNode - - -class BuiltinIPNamespace(CoreNode): - name: String - description: StringOptional - ip_prefixes: RelationshipManager - ip_addresses: RelationshipManager - - -class BuiltinIPPrefix(CoreNode): - prefix: IPNetwork - description: StringOptional - member_type: Dropdown - is_pool: Boolean - is_top_level: BooleanOptional - utilization: IntegerOptional - netmask: StringOptional - hostmask: StringOptional - network_address: StringOptional - broadcast_address: StringOptional - ip_namespace: RelatedNode - ip_addresses: RelationshipManager - resource_pool: RelationshipManager - parent: RelatedNode - children: RelationshipManager - - -class CoreArtifactTarget(CoreNode): - artifacts: RelationshipManager - - -class CoreCheck(CoreNode): - name: StringOptional - label: StringOptional - origin: String - kind: String - message: StringOptional - conclusion: Enum - severity: Enum - created_at: DateTimeOptional - validator: RelatedNode - - -class CoreComment(CoreNode): - text: String - created_at: DateTimeOptional - created_by: RelatedNode - - -class CoreCredential(CoreNode): - name: String - label: StringOptional - description: StringOptional - - -class CoreGenericAccount(CoreNode): - name: String - password: HashedPassword - label: StringOptional - description: StringOptional - account_type: Enum - role: Enum - status: Dropdown - tokens: RelationshipManager - - -class CoreGenericRepository(CoreNode): - name: String - description: StringOptional - location: String - internal_status: Dropdown - operational_status: Dropdown - sync_status: Dropdown - credential: RelatedNode - tags: RelationshipManager - transformations: RelationshipManager - queries: RelationshipManager - checks: RelationshipManager - generators: RelationshipManager - - -class CoreGroup(CoreNode): - name: String - label: StringOptional - description: StringOptional - group_type: Enum - members: RelationshipManager - subscribers: RelationshipManager - parent: RelatedNode - children: RelationshipManager - - -class CoreProfile(CoreNode): - profile_name: String - profile_priority: IntegerOptional - - -class CoreResourcePool(CoreNode): - name: String - description: StringOptional - - -class CoreTaskTarget(CoreNode): - pass - - -class CoreThread(CoreNode): - label: StringOptional - resolved: Boolean - created_at: DateTimeOptional - change: RelatedNode - comments: RelationshipManager - created_by: RelatedNode - - -class CoreTransformation(CoreNode): - name: String - label: StringOptional - description: StringOptional - timeout: Integer - query: RelatedNode - repository: RelatedNode - tags: RelationshipManager - - -class CoreValidator(CoreNode): - label: StringOptional - state: Enum - conclusion: Enum - completed_at: DateTimeOptional - started_at: DateTimeOptional - proposed_change: RelatedNode - checks: RelationshipManager - - -class CoreWebhook(CoreNode): - name: String - description: StringOptional - url: URL - validate_certificates: BooleanOptional - - -class LineageOwner(CoreNode): - pass - - -class LineageSource(CoreNode): - pass - - -class BuiltinTag(CoreNode): - name: String - description: StringOptional - - -class CoreAccount(LineageOwner, LineageSource, CoreGenericAccount): - pass - - -class CoreArtifact(CoreTaskTarget): - name: String - status: Enum - content_type: Enum - checksum: StringOptional - storage_id: StringOptional - parameters: JSONAttributeOptional - object: RelatedNode - definition: RelatedNode - - -class CoreArtifactCheck(CoreCheck): - changed: BooleanOptional - checksum: StringOptional - artifact_id: StringOptional - storage_id: StringOptional - line_number: IntegerOptional - - -class CoreArtifactDefinition(CoreTaskTarget): - name: String - artifact_name: String - description: StringOptional - parameters: JSONAttribute - content_type: Enum - targets: RelatedNode - transformation: RelatedNode - - -class CoreArtifactThread(CoreThread): - artifact_id: StringOptional - storage_id: StringOptional - line_number: IntegerOptional - - -class CoreArtifactValidator(CoreValidator): - definition: RelatedNode - - -class CoreChangeComment(CoreComment): - change: RelatedNode - - -class CoreChangeThread(CoreThread): - pass - - -class CoreCheckDefinition(CoreTaskTarget): - name: String - description: StringOptional - file_path: String - class_name: String - timeout: Integer - parameters: JSONAttributeOptional - repository: RelatedNode - query: RelatedNode - targets: RelatedNode - tags: RelationshipManager - - -class CoreCustomWebhook(CoreWebhook, CoreTaskTarget): - transformation: RelatedNode - - -class CoreDataCheck(CoreCheck): - conflicts: JSONAttribute - keep_branch: Enum - enriched_conflict_id: StringOptional - - -class CoreDataValidator(CoreValidator): - pass - - -class CoreFileCheck(CoreCheck): - files: ListAttributeOptional - commit: StringOptional - - -class CoreFileThread(CoreThread): - file: StringOptional - commit: StringOptional - line_number: IntegerOptional - repository: RelatedNode - - -class CoreGeneratorCheck(CoreCheck): - instance: String - - -class CoreGeneratorDefinition(CoreTaskTarget): - name: String - description: StringOptional - parameters: JSONAttribute - file_path: String - class_name: String - convert_query_response: BooleanOptional - query: RelatedNode - repository: RelatedNode - targets: RelatedNode - - -class CoreGeneratorGroup(CoreGroup): - pass - - -class CoreGeneratorInstance(CoreTaskTarget): - name: String - status: Enum - object: RelatedNode - definition: RelatedNode - - -class CoreGeneratorValidator(CoreValidator): - definition: RelatedNode - - -class CoreGraphQLQuery(CoreNode): - name: String - description: StringOptional - query: String - variables: JSONAttributeOptional - operations: ListAttributeOptional - models: ListAttributeOptional - depth: IntegerOptional - height: IntegerOptional - repository: RelatedNode - tags: RelationshipManager - - -class CoreGraphQLQueryGroup(CoreGroup): - parameters: JSONAttributeOptional - query: RelatedNode - - -class CoreIPAddressPool(CoreResourcePool, LineageSource): - default_address_type: String - default_prefix_length: IntegerOptional - resources: RelationshipManager - ip_namespace: RelatedNode - - -class CoreIPPrefixPool(CoreResourcePool, LineageSource): - default_prefix_length: IntegerOptional - default_member_type: Enum - default_prefix_type: StringOptional - resources: RelationshipManager - ip_namespace: RelatedNode - - -class CoreNumberPool(CoreResourcePool, LineageSource): - node: String - node_attribute: String - start_range: Integer - end_range: Integer - - -class CoreObjectThread(CoreThread): - object_path: String - - -class CorePasswordCredential(CoreCredential): - username: StringOptional - password: StringOptional - - -class CoreProposedChange(CoreTaskTarget): - name: String - description: StringOptional - source_branch: String - destination_branch: String - state: Enum - approved_by: RelationshipManager - reviewers: RelationshipManager - created_by: RelatedNode - comments: RelationshipManager - threads: RelationshipManager - validations: RelationshipManager - - -class CoreReadOnlyRepository(LineageOwner, LineageSource, CoreGenericRepository, CoreTaskTarget): - ref: String - commit: StringOptional - - -class CoreRepository(LineageOwner, LineageSource, CoreGenericRepository, CoreTaskTarget): - default_branch: String - commit: StringOptional - - -class CoreRepositoryValidator(CoreValidator): - repository: RelatedNode - - -class CoreSchemaCheck(CoreCheck): - conflicts: JSONAttribute - enriched_conflict_id: StringOptional - - -class CoreSchemaValidator(CoreValidator): - pass - - -class CoreStandardCheck(CoreCheck): - pass - - -class CoreStandardGroup(CoreGroup): - pass - - -class CoreStandardWebhook(CoreWebhook, CoreTaskTarget): - shared_key: String - - -class CoreThreadComment(CoreComment): - thread: RelatedNode - - -class CoreTransformJinja2(CoreTransformation): - template_path: String - - -class CoreTransformPython(CoreTransformation): - file_path: String - class_name: String - - -class CoreUserValidator(CoreValidator): - check_definition: RelatedNode - repository: RelatedNode - - -class InternalAccountToken(CoreNode): - name: StringOptional - token: String - expiration: DateTimeOptional - account: RelatedNode - - -class InternalRefreshToken(CoreNode): - expiration: DateTime - account: RelatedNode - - -class IpamNamespace(BuiltinIPNamespace): - default: BooleanOptional - - -# --------------------------------------------- -# SYNC -# --------------------------------------------- - - -class BuiltinIPAddressSync(CoreNodeSync): - address: IPHost - description: StringOptional - ip_namespace: RelatedNodeSync - ip_prefix: RelatedNodeSync - - -class BuiltinIPNamespaceSync(CoreNodeSync): - name: String - description: StringOptional - ip_prefixes: RelationshipManagerSync - ip_addresses: RelationshipManagerSync - - -class BuiltinIPPrefixSync(CoreNodeSync): - prefix: IPNetwork - description: StringOptional - member_type: Dropdown - is_pool: Boolean - is_top_level: BooleanOptional - utilization: IntegerOptional - netmask: StringOptional - hostmask: StringOptional - network_address: StringOptional - broadcast_address: StringOptional - ip_namespace: RelatedNodeSync - ip_addresses: RelationshipManagerSync - resource_pool: RelationshipManagerSync - parent: RelatedNodeSync - children: RelationshipManagerSync - - -class CoreArtifactTargetSync(CoreNodeSync): - artifacts: RelationshipManagerSync - - -class CoreCheckSync(CoreNodeSync): - name: StringOptional - label: StringOptional - origin: String - kind: String - message: StringOptional - conclusion: Enum - severity: Enum - created_at: DateTimeOptional - validator: RelatedNodeSync - - -class CoreCommentSync(CoreNodeSync): - text: String - created_at: DateTimeOptional - created_by: RelatedNodeSync - - -class CoreCredentialSync(CoreNodeSync): - name: String - label: StringOptional - description: StringOptional - - -class CoreGenericAccountSync(CoreNodeSync): - name: String - password: HashedPassword - label: StringOptional - description: StringOptional - account_type: Enum - role: Enum - status: Dropdown - tokens: RelationshipManagerSync - - -class CoreGenericRepositorySync(CoreNodeSync): - name: String - description: StringOptional - location: String - internal_status: Dropdown - operational_status: Dropdown - sync_status: Dropdown - credential: RelatedNodeSync - tags: RelationshipManagerSync - transformations: RelationshipManagerSync - queries: RelationshipManagerSync - checks: RelationshipManagerSync - generators: RelationshipManagerSync - - -class CoreGroupSync(CoreNodeSync): - name: String - label: StringOptional - description: StringOptional - group_type: Enum - members: RelationshipManagerSync - subscribers: RelationshipManagerSync - parent: RelatedNodeSync - children: RelationshipManagerSync - - -class CoreProfileSync(CoreNodeSync): - profile_name: String - profile_priority: IntegerOptional - - -class CoreResourcePoolSync(CoreNodeSync): - name: String - description: StringOptional - - -class CoreTaskTargetSync(CoreNodeSync): - pass - - -class CoreThreadSync(CoreNodeSync): - label: StringOptional - resolved: Boolean - created_at: DateTimeOptional - change: RelatedNodeSync - comments: RelationshipManagerSync - created_by: RelatedNodeSync - - -class CoreTransformationSync(CoreNodeSync): - name: String - label: StringOptional - description: StringOptional - timeout: Integer - query: RelatedNodeSync - repository: RelatedNodeSync - tags: RelationshipManagerSync - - -class CoreValidatorSync(CoreNodeSync): - label: StringOptional - state: Enum - conclusion: Enum - completed_at: DateTimeOptional - started_at: DateTimeOptional - proposed_change: RelatedNodeSync - checks: RelationshipManagerSync - - -class CoreWebhookSync(CoreNodeSync): - name: String - description: StringOptional - url: URL - validate_certificates: BooleanOptional - - -class LineageOwnerSync(CoreNodeSync): - pass - - -class LineageSourceSync(CoreNodeSync): - pass - - -class BuiltinTagSync(CoreNodeSync): - name: String - description: StringOptional - - -class CoreAccountSync(LineageOwnerSync, LineageSourceSync, CoreGenericAccountSync): - pass - - -class CoreArtifactSync(CoreTaskTargetSync): - name: String - status: Enum - content_type: Enum - checksum: StringOptional - storage_id: StringOptional - parameters: JSONAttributeOptional - object: RelatedNodeSync - definition: RelatedNodeSync - - -class CoreArtifactCheckSync(CoreCheckSync): - changed: BooleanOptional - checksum: StringOptional - artifact_id: StringOptional - storage_id: StringOptional - line_number: IntegerOptional - - -class CoreArtifactDefinitionSync(CoreTaskTargetSync): - name: String - artifact_name: String - description: StringOptional - parameters: JSONAttribute - content_type: Enum - targets: RelatedNodeSync - transformation: RelatedNodeSync - - -class CoreArtifactThreadSync(CoreThreadSync): - artifact_id: StringOptional - storage_id: StringOptional - line_number: IntegerOptional - - -class CoreArtifactValidatorSync(CoreValidatorSync): - definition: RelatedNodeSync - - -class CoreChangeCommentSync(CoreCommentSync): - change: RelatedNodeSync - - -class CoreChangeThreadSync(CoreThreadSync): - pass - - -class CoreCheckDefinitionSync(CoreTaskTargetSync): - name: String - description: StringOptional - file_path: String - class_name: String - timeout: Integer - parameters: JSONAttributeOptional - repository: RelatedNodeSync - query: RelatedNodeSync - targets: RelatedNodeSync - tags: RelationshipManagerSync - - -class CoreCustomWebhookSync(CoreWebhookSync, CoreTaskTargetSync): - transformation: RelatedNodeSync - - -class CoreDataCheckSync(CoreCheckSync): - conflicts: JSONAttribute - keep_branch: Enum - enriched_conflict_id: StringOptional - - -class CoreDataValidatorSync(CoreValidatorSync): - pass - - -class CoreFileCheckSync(CoreCheckSync): - files: ListAttributeOptional - commit: StringOptional - - -class CoreFileThreadSync(CoreThreadSync): - file: StringOptional - commit: StringOptional - line_number: IntegerOptional - repository: RelatedNodeSync - - -class CoreGeneratorCheckSync(CoreCheckSync): - instance: String - - -class CoreGeneratorDefinitionSync(CoreTaskTargetSync): - name: String - description: StringOptional - parameters: JSONAttribute - file_path: String - class_name: String - convert_query_response: BooleanOptional - query: RelatedNodeSync - repository: RelatedNodeSync - targets: RelatedNodeSync - - -class CoreGeneratorGroupSync(CoreGroupSync): - pass - - -class CoreGeneratorInstanceSync(CoreTaskTargetSync): - name: String - status: Enum - object: RelatedNodeSync - definition: RelatedNodeSync - - -class CoreGeneratorValidatorSync(CoreValidatorSync): - definition: RelatedNodeSync - - -class CoreGraphQLQuerySync(CoreNodeSync): - name: String - description: StringOptional - query: String - variables: JSONAttributeOptional - operations: ListAttributeOptional - models: ListAttributeOptional - depth: IntegerOptional - height: IntegerOptional - repository: RelatedNodeSync - tags: RelationshipManagerSync - - -class CoreGraphQLQueryGroupSync(CoreGroupSync): - parameters: JSONAttributeOptional - query: RelatedNodeSync - - -class CoreIPAddressPoolSync(CoreResourcePoolSync, LineageSourceSync): - default_address_type: String - default_prefix_length: IntegerOptional - resources: RelationshipManagerSync - ip_namespace: RelatedNodeSync - - -class CoreIPPrefixPoolSync(CoreResourcePoolSync, LineageSourceSync): - default_prefix_length: IntegerOptional - default_member_type: Enum - default_prefix_type: StringOptional - resources: RelationshipManagerSync - ip_namespace: RelatedNodeSync - - -class CoreNumberPoolSync(CoreResourcePoolSync, LineageSourceSync): - node: String - node_attribute: String - start_range: Integer - end_range: Integer - - -class CoreObjectThreadSync(CoreThreadSync): - object_path: String - - -class CorePasswordCredentialSync(CoreCredentialSync): - username: StringOptional - password: StringOptional - - -class CoreProposedChangeSync(CoreTaskTargetSync): - name: String - description: StringOptional - source_branch: String - destination_branch: String - state: Enum - approved_by: RelationshipManagerSync - reviewers: RelationshipManagerSync - created_by: RelatedNodeSync - comments: RelationshipManagerSync - threads: RelationshipManagerSync - validations: RelationshipManagerSync - - -class CoreReadOnlyRepositorySync(LineageOwnerSync, LineageSourceSync, CoreGenericRepositorySync, CoreTaskTargetSync): - ref: String - commit: StringOptional - - -class CoreRepositorySync(LineageOwnerSync, LineageSourceSync, CoreGenericRepositorySync, CoreTaskTargetSync): - default_branch: String - commit: StringOptional - - -class CoreRepositoryValidatorSync(CoreValidatorSync): - repository: RelatedNodeSync - - -class CoreSchemaCheckSync(CoreCheckSync): - conflicts: JSONAttribute - enriched_conflict_id: StringOptional - - -class CoreSchemaValidatorSync(CoreValidatorSync): - pass - - -class CoreStandardCheckSync(CoreCheckSync): - pass - - -class CoreStandardGroupSync(CoreGroupSync): - pass - - -class CoreStandardWebhookSync(CoreWebhookSync, CoreTaskTargetSync): - shared_key: String - - -class CoreThreadCommentSync(CoreCommentSync): - thread: RelatedNodeSync - - -class CoreTransformJinja2Sync(CoreTransformationSync): - template_path: String - - -class CoreTransformPythonSync(CoreTransformationSync): - file_path: String - class_name: String - - -class CoreUserValidatorSync(CoreValidatorSync): - check_definition: RelatedNodeSync - repository: RelatedNodeSync - - -class InternalAccountTokenSync(CoreNodeSync): - name: StringOptional - token: String - expiration: DateTimeOptional - account: RelatedNodeSync - - -class InternalRefreshTokenSync(CoreNodeSync): - expiration: DateTime - account: RelatedNodeSync - - -class IpamNamespaceSync(BuiltinIPNamespaceSync): - default: BooleanOptional diff --git a/python_sdk/infrahub_sdk/protocols_base.py b/python_sdk/infrahub_sdk/protocols_base.py deleted file mode 100644 index 44f0792724..0000000000 --- a/python_sdk/infrahub_sdk/protocols_base.py +++ /dev/null @@ -1,146 +0,0 @@ -from __future__ import annotations - -from typing import Any, Optional, Protocol, runtime_checkable - - -class RelatedNode(Protocol): ... - - -class RelatedNodeSync(Protocol): ... - - -@runtime_checkable -class Attribute(Protocol): - name: str - id: Optional[str] - - is_default: Optional[bool] - is_from_profile: Optional[bool] - is_inherited: Optional[bool] - updated_at: Optional[str] - is_visible: Optional[bool] - is_protected: Optional[bool] - - -class String(Attribute): - value: str - - -class StringOptional(Attribute): - value: Optional[str] - - -class Integer(Attribute): - value: int - - -class IntegerOptional(Attribute): - value: Optional[int] - - -class Boolean(Attribute): - value: bool - - -class BooleanOptional(Attribute): - value: Optional[bool] - - -class DateTime(Attribute): - value: str - - -class DateTimeOptional(Attribute): - value: Optional[str] - - -class Enum(Attribute): - value: str - - -class EnumOptional(Attribute): - value: Optional[str] - - -class URL(Attribute): - value: str - - -class URLOptional(Attribute): - value: Optional[str] - - -class Dropdown(Attribute): - value: str - - -class DropdownOptional(Attribute): - value: Optional[str] - - -class IPNetwork(Attribute): - value: str - - -class IPNetworkOptional(Attribute): - value: Optional[str] - - -class IPHost(Attribute): - value: str - - -class IPHostOptional(Attribute): - value: Optional[str] - - -class HashedPassword(Attribute): - value: str - - -class HashedPasswordOptional(Attribute): - value: Any - - -class JSONAttribute(Attribute): - value: Any - - -class JSONAttributeOptional(Attribute): - value: Optional[Any] - - -class ListAttribute(Attribute): - value: list[Any] - - -class ListAttributeOptional(Attribute): - value: Optional[list[Any]] - - -class CoreNodeBase(Protocol): - id: str - display_label: Optional[str] - hfid: Optional[list[str]] - hfid_str: Optional[str] - - -class CoreNode(CoreNodeBase, Protocol): - def get_kind(self) -> str: ... - - async def save(self) -> None: ... - - async def update(self, do_full_update: bool) -> None: ... - - -class CoreNodeSync(CoreNodeBase, Protocol): - id: str - display_label: Optional[str] - hfid: Optional[list[str]] - hfid_str: Optional[str] - - def get_kind(self) -> str: ... - - def save(self) -> None: ... - - def update(self, do_full_update: bool) -> None: ... diff --git a/python_sdk/infrahub_sdk/pytest_plugin/__init__.py b/python_sdk/infrahub_sdk/pytest_plugin/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/python_sdk/infrahub_sdk/pytest_plugin/exceptions.py b/python_sdk/infrahub_sdk/pytest_plugin/exceptions.py deleted file mode 100644 index b46dfac50a..0000000000 --- a/python_sdk/infrahub_sdk/pytest_plugin/exceptions.py +++ /dev/null @@ -1,62 +0,0 @@ -from rich.syntax import Syntax -from rich.traceback import Frame, Traceback - - -class Error(Exception): - """pytest-infrahub Base exception.""" - - -class InvalidResourceConfigError(Error): - def __init__(self, resource_name: str): - super().__init__(f"Improperly configured resource with name '{resource_name}'.") - - -class DirectoryNotFoundError(Error): - def __init__(self, name: str, message: str = ""): - self.message = message or f"Unable to find directory {name!r}." - super().__init__(self.message) - - -class FileNotValidError(Error): - def __init__(self, name: str, message: str = ""): - self.message = message or f"Unable to access file {name!r}." - super().__init__(self.message) - - -class OutputMatchError(Error): - def __init__(self, name: str, message: str = "", differences: str = ""): - self.message = message or f"Rendered output does not match expected output for {name!r}." - self.differences = differences - super().__init__(self.message) - - -class Jinja2TransformError(Error): - def __init__(self, name: str, message: str = ""): - self.message = message or f"Unexpected error happened while processing {name!r}." - super().__init__(self.message) - - -class Jinja2TransformUndefinedError(Error): - def __init__(self, name: str, rtb: Traceback, errors: list[tuple[Frame, Syntax]], message: str = ""): - self.rtb = rtb - self.errors = errors - self.message = message or f"Unable to render Jinja2 transform {name!r}." - super().__init__(self.message) - - -class CheckDefinitionError(Error): - def __init__(self, name: str, message: str = ""): - self.message = message or f"Check {name!r} is not properly defined." - super().__init__(self.message) - - -class CheckResultError(Error): - def __init__(self, name: str, message: str = ""): - self.message = message or f"Unexpected result for check {name!r}." - super().__init__(self.message) - - -class PythonTransformDefinitionError(Error): - def __init__(self, name: str, message: str = ""): - self.message = message or f"Python transform {name!r} is not properly defined." - super().__init__(self.message) diff --git a/python_sdk/infrahub_sdk/pytest_plugin/items/__init__.py b/python_sdk/infrahub_sdk/pytest_plugin/items/__init__.py deleted file mode 100644 index e75b252873..0000000000 --- a/python_sdk/infrahub_sdk/pytest_plugin/items/__init__.py +++ /dev/null @@ -1,28 +0,0 @@ -from .base import InfrahubItem -from .check import InfrahubCheckIntegrationItem, InfrahubCheckSmokeItem, InfrahubCheckUnitProcessItem -from .graphql_query import InfrahubGraphQLQueryIntegrationItem, InfrahubGraphQLQuerySmokeItem -from .jinja2_transform import ( - InfrahubJinja2TransformIntegrationItem, - InfrahubJinja2TransformSmokeItem, - InfrahubJinja2TransformUnitRenderItem, -) -from .python_transform import ( - InfrahubPythonTransformIntegrationItem, - InfrahubPythonTransformSmokeItem, - InfrahubPythonTransformUnitProcessItem, -) - -__all__ = [ - "InfrahubItem", - "InfrahubCheckIntegrationItem", - "InfrahubCheckSmokeItem", - "InfrahubCheckUnitProcessItem", - "InfrahubGraphQLQueryIntegrationItem", - "InfrahubGraphQLQuerySmokeItem", - "InfrahubJinja2TransformIntegrationItem", - "InfrahubJinja2TransformSmokeItem", - "InfrahubJinja2TransformUnitRenderItem", - "InfrahubPythonTransformIntegrationItem", - "InfrahubPythonTransformSmokeItem", - "InfrahubPythonTransformUnitProcessItem", -] diff --git a/python_sdk/infrahub_sdk/pytest_plugin/items/base.py b/python_sdk/infrahub_sdk/pytest_plugin/items/base.py deleted file mode 100644 index 1b8817980b..0000000000 --- a/python_sdk/infrahub_sdk/pytest_plugin/items/base.py +++ /dev/null @@ -1,77 +0,0 @@ -from __future__ import annotations - -import difflib -from typing import TYPE_CHECKING, Any, Optional, Union - -import pytest -import ujson -from git.exc import InvalidGitRepositoryError - -from ..exceptions import InvalidResourceConfigError -from ..models import InfrahubInputOutputTest - -if TYPE_CHECKING: - from pathlib import Path - - from infrahub_sdk.schema import InfrahubRepositoryConfigElement - - from ..models import InfrahubTest - - -class InfrahubItem(pytest.Item): - def __init__( - self, - *args: Any, - resource_name: str, - resource_config: InfrahubRepositoryConfigElement, - test: InfrahubTest, - **kwargs: dict[str, Any], - ): - super().__init__(*args, **kwargs) # type: ignore[arg-type] - - self.resource_name: str = resource_name - self.resource_config: InfrahubRepositoryConfigElement = resource_config - self.test: InfrahubTest = test - - # Smoke tests do not need this, hence this clause - if isinstance(self.test.spec, InfrahubInputOutputTest): - self.test.spec.update_paths(base_dir=self.path.parent) - - def validate_resource_config(self) -> None: - """Make sure that a test resource config is properly defined.""" - if self.resource_config is None: - raise InvalidResourceConfigError(self.resource_name) - - def get_result_differences(self, computed: Any) -> Optional[str]: - """Compute the differences between the computed result and the expected one. - - If the results are not JSON parsable, this method must be redefined to handle them. - """ - # We cannot compute a diff if: - # 1. Test is not an input/output one - # 2. Expected output is not provided - # 3. Output can't be computed - if not isinstance(self.test.spec, InfrahubInputOutputTest) or not self.test.spec.output or computed is None: - return None - - expected = self.test.spec.get_output_data() - differences = difflib.unified_diff( - ujson.dumps(expected, indent=4, sort_keys=True).splitlines(), - ujson.dumps(computed, indent=4, sort_keys=True).splitlines(), - fromfile="expected", - tofile="rendered", - lineterm="", - ) - return "\n".join(differences) - - def runtest(self) -> None: - """Run the test logic.""" - - def repr_failure(self, excinfo: pytest.ExceptionInfo, style: Optional[str] = None) -> str: - if isinstance(excinfo.value, InvalidGitRepositoryError): - return f"Invalid Git repository at {excinfo.value}" - - return str(excinfo.value) - - def reportinfo(self) -> tuple[Union[Path, str], Optional[int], str]: - return self.path, 0, f"resource: {self.name}" diff --git a/python_sdk/infrahub_sdk/pytest_plugin/items/check.py b/python_sdk/infrahub_sdk/pytest_plugin/items/check.py deleted file mode 100644 index 7cf230bdc1..0000000000 --- a/python_sdk/infrahub_sdk/pytest_plugin/items/check.py +++ /dev/null @@ -1,90 +0,0 @@ -from __future__ import annotations - -import asyncio -from typing import TYPE_CHECKING, Any, Optional - -import ujson -from httpx import HTTPStatusError - -from infrahub_sdk.checks import get_check_class_instance - -from ..exceptions import CheckDefinitionError, CheckResultError -from ..models import InfrahubTestExpectedResult -from .base import InfrahubItem - -if TYPE_CHECKING: - from pytest import ExceptionInfo - - from infrahub_sdk.checks import InfrahubCheck - from infrahub_sdk.pytest_plugin.models import InfrahubTest - from infrahub_sdk.schema import InfrahubRepositoryConfigElement - - -class InfrahubCheckItem(InfrahubItem): - def __init__( - self, - *args: Any, - resource_name: str, - resource_config: InfrahubRepositoryConfigElement, - test: InfrahubTest, - **kwargs: dict[str, Any], - ): - super().__init__(*args, resource_name=resource_name, resource_config=resource_config, test=test, **kwargs) - - self.check_instance: InfrahubCheck - - def instantiate_check(self) -> None: - self.check_instance = get_check_class_instance( - check_config=self.resource_config, # type: ignore[arg-type] - search_path=self.session.infrahub_config_path.parent, # type: ignore[attr-defined] - ) - - def run_check(self, variables: dict[str, Any]) -> Any: - self.instantiate_check() - return asyncio.run(self.check_instance.run(data=variables)) - - def repr_failure(self, excinfo: ExceptionInfo, style: Optional[str] = None) -> str: - if isinstance(excinfo.value, HTTPStatusError): - try: - response_content = ujson.dumps(excinfo.value.response.json(), indent=4) - except ujson.JSONDecodeError: - response_content = excinfo.value.response.text - return "\n".join( - [ - f"Failed {excinfo.value.request.method} on {excinfo.value.request.url}", - f"Status code: {excinfo.value.response.status_code}", - f"Response: {response_content}", - ] - ) - - return super().repr_failure(excinfo, style=style) - - -class InfrahubCheckSmokeItem(InfrahubCheckItem): - def runtest(self) -> None: - self.instantiate_check() - - for attr in ("query", "validate"): - if not hasattr(self.check_instance, attr): - raise CheckDefinitionError(f"Missing attribute or function {attr}") - - -class InfrahubCheckUnitProcessItem(InfrahubCheckItem): - def runtest(self) -> None: - input_data = self.test.spec.get_input_data() # type: ignore[union-attr] - passed = self.run_check(input_data) - - if not passed and self.test.expect == InfrahubTestExpectedResult.PASS: - raise CheckResultError(name=self.name) - - -class InfrahubCheckIntegrationItem(InfrahubCheckItem): - def runtest(self) -> None: - input_data = self.session.infrahub_client.query_gql_query( # type: ignore[attr-defined] - self.check_instance.query, - variables=self.test.spec.get_variables_data(), # type: ignore[union-attr] - ) - passed = self.run_check(input_data) - - if not passed and self.test.expect == InfrahubTestExpectedResult.PASS: - raise CheckResultError(name=self.name) diff --git a/python_sdk/infrahub_sdk/pytest_plugin/items/graphql_query.py b/python_sdk/infrahub_sdk/pytest_plugin/items/graphql_query.py deleted file mode 100644 index abb048fba6..0000000000 --- a/python_sdk/infrahub_sdk/pytest_plugin/items/graphql_query.py +++ /dev/null @@ -1,61 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING, Any, Optional - -import ujson -from httpx import HTTPStatusError - -from infrahub_sdk.analyzer import GraphQLQueryAnalyzer - -from ..exceptions import OutputMatchError -from ..models import InfrahubTestExpectedResult -from .base import InfrahubItem - -if TYPE_CHECKING: - from pytest import ExceptionInfo - - -class InfrahubGraphQLQueryItem(InfrahubItem): - def validate_resource_config(self) -> None: - # Resource name does not need to match against infrahub repo config - return - - def execute_query(self) -> Any: - return self.session.infrahub_client.query_gql_query( # type: ignore[attr-defined] - self.test.spec.query, # type: ignore[union-attr] - variables=self.test.spec.get_variables_data(), # type: ignore[union-attr] - ) - - def repr_failure(self, excinfo: ExceptionInfo, style: Optional[str] = None) -> str: - if isinstance(excinfo.value, HTTPStatusError): - try: - response_content = ujson.dumps(excinfo.value.response.json(), indent=4) - except ujson.JSONDecodeError: - response_content = excinfo.value.response.text - return "\n".join( - [ - f"Failed {excinfo.value.request.method} on {excinfo.value.request.url}", - f"Status code: {excinfo.value.response.status_code}", - f"Response: {response_content}", - ] - ) - - if isinstance(excinfo.value, OutputMatchError): - return "\n".join([excinfo.value.message, excinfo.value.differences]) - - return super().repr_failure(excinfo, style=style) - - -class InfrahubGraphQLQuerySmokeItem(InfrahubGraphQLQueryItem): - def runtest(self) -> None: - query = (self.session.infrahub_config_path.parent / self.test.spec.path).read_text() # type: ignore[attr-defined,union-attr] - GraphQLQueryAnalyzer(query) - - -class InfrahubGraphQLQueryIntegrationItem(InfrahubGraphQLQueryItem): - def runtest(self) -> None: - computed = self.execute_query() - differences = self.get_result_differences(computed) - - if self.test.spec.output and differences and self.test.expect == InfrahubTestExpectedResult.PASS: # type: ignore[union-attr] - raise OutputMatchError(name=self.name, differences=differences) diff --git a/python_sdk/infrahub_sdk/pytest_plugin/items/jinja2_transform.py b/python_sdk/infrahub_sdk/pytest_plugin/items/jinja2_transform.py deleted file mode 100644 index 59befec45a..0000000000 --- a/python_sdk/infrahub_sdk/pytest_plugin/items/jinja2_transform.py +++ /dev/null @@ -1,118 +0,0 @@ -from __future__ import annotations - -import difflib -from typing import TYPE_CHECKING, Any, Optional - -import jinja2 -import ujson -from httpx import HTTPStatusError -from rich.console import Console -from rich.traceback import Traceback - -from ...jinja2 import identify_faulty_jinja_code -from ..exceptions import Jinja2TransformError, Jinja2TransformUndefinedError, OutputMatchError -from ..models import InfrahubInputOutputTest, InfrahubTestExpectedResult -from .base import InfrahubItem - -if TYPE_CHECKING: - from pathlib import Path - - from pytest import ExceptionInfo - - -class InfrahubJinja2Item(InfrahubItem): - def get_jinja2_environment(self) -> jinja2.Environment: - loader = jinja2.FileSystemLoader(self.session.infrahub_config_path.parent) # type: ignore[attr-defined] - return jinja2.Environment(loader=loader, trim_blocks=True, lstrip_blocks=True) - - def get_jinja2_template(self) -> jinja2.Template: - return self.get_jinja2_environment().get_template(str(self.resource_config.template_path)) # type: ignore[attr-defined] - - def render_jinja2_template(self, variables: dict[str, Any]) -> Optional[str]: - try: - return self.get_jinja2_template().render(**variables) - except jinja2.UndefinedError as exc: - traceback = Traceback(show_locals=False) - errors = identify_faulty_jinja_code(traceback=traceback) - console = Console() - with console.capture() as capture: - console.print(f"An error occurred while rendering Jinja2 transform:{self.name!r}\n", soft_wrap=True) - console.print(f"{exc.message}\n", soft_wrap=True) - for frame, syntax in errors: - console.print(f"{frame.filename} on line {frame.lineno}\n", soft_wrap=True) - console.print(syntax, soft_wrap=True) - str_output = capture.get() - if self.test.expect == InfrahubTestExpectedResult.PASS: - raise Jinja2TransformUndefinedError( - name=self.name, message=str_output, rtb=traceback, errors=errors - ) from exc - return None - - def get_result_differences(self, computed: Any) -> Optional[str]: - if not isinstance(self.test.spec, InfrahubInputOutputTest) or not self.test.spec.output or computed is None: - return None - - differences = difflib.unified_diff( - self.test.spec.get_output_data().splitlines(), - computed.splitlines(), - fromfile="expected", - tofile="rendered", - lineterm="", - ) - return "\n".join(differences) - - def repr_failure(self, excinfo: ExceptionInfo, style: Optional[str] = None) -> str: - if isinstance(excinfo.value, HTTPStatusError): - try: - response_content = ujson.dumps(excinfo.value.response.json(), indent=4, sort_keys=True) - except ujson.JSONDecodeError: - response_content = excinfo.value.response.text - return "\n".join( - [ - f"Failed {excinfo.value.request.method} on {excinfo.value.request.url}", - f"Status code: {excinfo.value.response.status_code}", - f"Response: {response_content}", - ] - ) - - if isinstance(excinfo.value, jinja2.TemplateSyntaxError): - return "\n".join(["Syntax error detected in the template", excinfo.value.message or ""]) - - if isinstance(excinfo.value, OutputMatchError): - return "\n".join([excinfo.value.message, excinfo.value.differences]) - - return super().repr_failure(excinfo, style=style) - - -class InfrahubJinja2TransformSmokeItem(InfrahubJinja2Item): - def runtest(self) -> None: - file_path: Path = self.session.infrahub_config_path.parent / self.resource_config.template_path # type: ignore[attr-defined] - self.get_jinja2_environment().parse(file_path.read_text(), filename=file_path.name) - - -class InfrahubJinja2TransformUnitRenderItem(InfrahubJinja2Item): - def runtest(self) -> None: - computed = self.render_jinja2_template(self.test.spec.get_input_data()) # type: ignore[union-attr] - differences = self.get_result_differences(computed) - - if computed is not None and differences and self.test.expect == InfrahubTestExpectedResult.PASS: - raise OutputMatchError(name=self.name, differences=differences) - - def repr_failure(self, excinfo: ExceptionInfo, style: Optional[str] = None) -> str: - if isinstance(excinfo.value, (Jinja2TransformUndefinedError, Jinja2TransformError)): - return excinfo.value.message - - return super().repr_failure(excinfo, style=style) - - -class InfrahubJinja2TransformIntegrationItem(InfrahubJinja2Item): - def runtest(self) -> None: - graphql_result = self.session.infrahub_client.query_gql_query( # type: ignore[attr-defined] - self.resource_config.query, # type: ignore[attr-defined] - variables=self.test.spec.get_variables_data(), # type: ignore[union-attr] - ) - computed = self.render_jinja2_template(graphql_result) - differences = self.get_result_differences(computed) - - if computed is not None and differences and self.test.expect == InfrahubTestExpectedResult.PASS: - raise OutputMatchError(name=self.name, differences=differences) diff --git a/python_sdk/infrahub_sdk/pytest_plugin/items/python_transform.py b/python_sdk/infrahub_sdk/pytest_plugin/items/python_transform.py deleted file mode 100644 index 9a203665a2..0000000000 --- a/python_sdk/infrahub_sdk/pytest_plugin/items/python_transform.py +++ /dev/null @@ -1,95 +0,0 @@ -from __future__ import annotations - -import asyncio -from typing import TYPE_CHECKING, Any, Optional - -import ujson -from httpx import HTTPStatusError - -from infrahub_sdk.transforms import get_transform_class_instance - -from ..exceptions import OutputMatchError, PythonTransformDefinitionError -from ..models import InfrahubTestExpectedResult -from .base import InfrahubItem - -if TYPE_CHECKING: - from pytest import ExceptionInfo - - from infrahub_sdk.pytest_plugin.models import InfrahubTest - from infrahub_sdk.schema import InfrahubRepositoryConfigElement - from infrahub_sdk.transforms import InfrahubTransform - - -class InfrahubPythonTransformItem(InfrahubItem): - def __init__( - self, - *args: Any, - resource_name: str, - resource_config: InfrahubRepositoryConfigElement, - test: InfrahubTest, - **kwargs: dict[str, Any], - ): - super().__init__(*args, resource_name=resource_name, resource_config=resource_config, test=test, **kwargs) - - self.transform_instance: InfrahubTransform - - def instantiate_transform(self) -> None: - self.transform_instance = get_transform_class_instance( - transform_config=self.resource_config, # type: ignore[arg-type] - search_path=self.session.infrahub_config_path.parent, # type: ignore[attr-defined] - ) - - def run_transform(self, variables: dict[str, Any]) -> Any: - self.instantiate_transform() - return asyncio.run(self.transform_instance.run(data=variables)) - - def repr_failure(self, excinfo: ExceptionInfo, style: Optional[str] = None) -> str: - if isinstance(excinfo.value, HTTPStatusError): - try: - response_content = ujson.dumps(excinfo.value.response.json(), indent=4) - except ujson.JSONDecodeError: - response_content = excinfo.value.response.text - return "\n".join( - [ - f"Failed {excinfo.value.request.method} on {excinfo.value.request.url}", - f"Status code: {excinfo.value.response.status_code}", - f"Response: {response_content}", - ] - ) - - if isinstance(excinfo.value, OutputMatchError): - return "\n".join([excinfo.value.message, excinfo.value.differences]) - - return super().repr_failure(excinfo, style=style) - - -class InfrahubPythonTransformSmokeItem(InfrahubPythonTransformItem): - def runtest(self) -> None: - self.instantiate_transform() - - for attr in ("query", "transform"): - if not hasattr(self.transform_instance, attr): - raise PythonTransformDefinitionError(f"Missing attribute or function {attr}") - - -class InfrahubPythonTransformUnitProcessItem(InfrahubPythonTransformItem): - def runtest(self) -> None: - input_data = self.test.spec.get_input_data() # type: ignore[union-attr] - computed = self.run_transform(input_data) - differences = self.get_result_differences(computed) - - if computed is not None and differences and self.test.expect == InfrahubTestExpectedResult.PASS: - raise OutputMatchError(name=self.name, message=differences) - - -class InfrahubPythonTransformIntegrationItem(InfrahubPythonTransformItem): - def runtest(self) -> None: - input_data = self.session.infrahub_client.query_gql_query( # type: ignore[attr-defined] - self.transform_instance.query, - variables=self.test.spec.get_variables_data(), # type: ignore[union-attr] - ) - computed = self.run_transform(input_data) - differences = self.get_result_differences(computed) - - if computed is not None and differences and self.test.expect == InfrahubTestExpectedResult.PASS: - raise OutputMatchError(name=self.name, message=differences) diff --git a/python_sdk/infrahub_sdk/pytest_plugin/loader.py b/python_sdk/infrahub_sdk/pytest_plugin/loader.py deleted file mode 100644 index e02803e156..0000000000 --- a/python_sdk/infrahub_sdk/pytest_plugin/loader.py +++ /dev/null @@ -1,111 +0,0 @@ -from __future__ import annotations - -from typing import Any, Iterable, Optional - -import pytest -import yaml -from pytest import Item - -from .exceptions import InvalidResourceConfigError -from .items import ( - InfrahubCheckIntegrationItem, - InfrahubCheckSmokeItem, - InfrahubCheckUnitProcessItem, - InfrahubGraphQLQueryIntegrationItem, - InfrahubGraphQLQuerySmokeItem, - InfrahubItem, - InfrahubJinja2TransformIntegrationItem, - InfrahubJinja2TransformSmokeItem, - InfrahubJinja2TransformUnitRenderItem, - InfrahubPythonTransformIntegrationItem, - InfrahubPythonTransformSmokeItem, - InfrahubPythonTransformUnitProcessItem, -) -from .models import InfrahubTestFileV1, InfrahubTestGroup - -MARKER_MAPPING = { - "Check": pytest.mark.infrahub_check, - "GraphQLQuery": pytest.mark.infrahub_graphql_query, - "Jinja2Transform": pytest.mark.infrahub_jinja2_transform, - "PythonTransform": pytest.mark.infrahub_python_transform, -} -CONFIG_MAPPING = { - "Check": "get_check_definition", - "GraphQLQuery": None, - "Jinja2Transform": "get_jinja2_transform", - "PythonTransform": "get_python_transform", -} - -ITEMS_MAPPING = { - "check-smoke": InfrahubCheckSmokeItem, - "check-unit-process": InfrahubCheckUnitProcessItem, - "check-integration": InfrahubCheckIntegrationItem, - "graphql-query-smoke": InfrahubGraphQLQuerySmokeItem, - "graphql-query-integration": InfrahubGraphQLQueryIntegrationItem, - "jinja2-transform-smoke": InfrahubJinja2TransformSmokeItem, - "jinja2-transform-unit-render": InfrahubJinja2TransformUnitRenderItem, - "jinja2-transform-integration": InfrahubJinja2TransformIntegrationItem, - "python-transform-smoke": InfrahubPythonTransformSmokeItem, - "python-transform-unit-process": InfrahubPythonTransformUnitProcessItem, - "python-transform-integration": InfrahubPythonTransformIntegrationItem, -} - - -class InfrahubYamlFile(pytest.File): - def get_resource_config(self, group: InfrahubTestGroup) -> Optional[Any]: - """Retrieve the resource configuration to apply to all tests in a group.""" - resource_config_function = CONFIG_MAPPING.get(group.resource) - - resource_config = None - if resource_config_function is not None: - func = getattr(self.session.infrahub_repo_config, resource_config_function) # type:ignore[attr-defined] - try: - resource_config = func(group.resource_name) - except KeyError: - # Ignore error and just return None - pass - - return resource_config - - def collect_group(self, group: InfrahubTestGroup) -> Iterable[Item]: - """Collect all items for a group.""" - marker = MARKER_MAPPING[group.resource] - resource_config = self.get_resource_config(group) - - for test in group.tests: - item_class: type[pytest.Item] = ITEMS_MAPPING[test.spec.kind] # type: ignore[assignment] - item: InfrahubItem = item_class.from_parent( - name=f"{marker.markname}__{group.resource_name}__{test.name}", - parent=self, - resource_name=group.resource_name, - resource_config=resource_config, - test=test, - ) # type: ignore[assignment] - - # If item does not pass validation, mark it to be skipped - try: - item.validate_resource_config() - except InvalidResourceConfigError as exc: - item.add_marker(pytest.mark.skip(reason=str(exc))) - - item.add_marker(pytest.mark.infrahub) - item.add_marker(marker) - if "smoke" in test.spec.kind: - item.add_marker(pytest.mark.infrahub_smoke) - if "unit" in test.spec.kind: - item.add_marker(pytest.mark.infrahub_unit) - if "integration" in test.spec.kind: - item.add_marker(pytest.mark.infrahub_integration) - - yield item - - def collect(self) -> Iterable[Item]: - raw = yaml.safe_load(self.path.open(encoding="utf-8")) - - if "infrahub_tests" not in raw: - return - - content = InfrahubTestFileV1(**raw) - - for test_group in content.infrahub_tests: - yield from self.collect_group(test_group) diff --git a/python_sdk/infrahub_sdk/pytest_plugin/models.py b/python_sdk/infrahub_sdk/pytest_plugin/models.py deleted file mode 100644 index fd759c82c7..0000000000 --- a/python_sdk/infrahub_sdk/pytest_plugin/models.py +++ /dev/null @@ -1,203 +0,0 @@ -from __future__ import annotations - -from enum import Enum -from pathlib import Path -from typing import Any, Literal, Optional, Union - -import ujson -import yaml -from pydantic import BaseModel, ConfigDict, Field - -from .exceptions import DirectoryNotFoundError - - -class InfrahubTestExpectedResult(str, Enum): - PASS = "PASS" - FAIL = "FAIL" - - -class InfrahubTestResource(str, Enum): - CHECK = "Check" - JINJA2_TRANSFORM = "Jinja2Transform" - PYTHON_TRANSFORM = "PythonTransform" - GRAPHQL_QUERY = "GraphQLQuery" - - -class InfrahubBaseTest(BaseModel): - """Basic Infrahub test model used as a common ground for all tests.""" - - -class InfrahubInputOutputTest(InfrahubBaseTest): - directory: Optional[Path] = Field( - None, description="Path to the directory where the input and output files are located" - ) - input: Path = Field( - Path("input.json"), - description="Path to the file with the input data for the test, can be a relative path from the config file or from the directory.", - ) - output: Optional[Path] = Field( - None, - description="Path to the file with the expected output for the test, can be a relative path from the config file or from the directory.", - ) - - @staticmethod - def parse_user_provided_data(path: Union[Path, None]) -> Any: - """Read and parse user provided data depending on a file extension. - - This function handles JSON and YAML as they can be used to achieve the same goal. However some users may be more used to one format or - another. If the file extension isn't known, assume the content is plain text. - """ - if path is None: - return None - - suffix = path.suffix.lower()[1:] if path.suffix else "" - text = path.read_text() - - if suffix and suffix == "json": - return ujson.loads(text) - if suffix in ("yml", "yaml"): - return yaml.safe_load(text) - - return text - - def update_paths(self, base_dir: Path) -> None: - if self.directory and not self.directory.is_absolute() and not self.directory.is_dir(): - self.directory = Path(base_dir / self.directory) - if not self.directory.is_dir(): - raise DirectoryNotFoundError(name=str(self.directory)) - else: - self.directory = base_dir - - if not self.input or not self.input.is_file(): - search_input: Union[Path, str] = self.input or "input.*" - results = list(self.directory.rglob(str(search_input))) - - if not results: - raise FileNotFoundError(self.input) - if len(results) != 1: - raise FileNotFoundError( - f"Too many files are matching: {self.input}, please set the 'input' test key to the file to use." - ) - self.input = results[0] - - if not self.output or not self.output.is_file(): - search_output: Union[Path, str] = self.output or "output.*" - results = list(self.directory.rglob(str(search_output))) - - if results and len(results) != 1: - raise FileNotFoundError( - f"Too many files are matching: {self.output}, please set the 'output' test key to the file to use." - ) - if results: - self.output = results[0] - - def get_input_data(self) -> Any: - return self.parse_user_provided_data(self.input) - - def get_output_data(self) -> Any: - return self.parse_user_provided_data(self.output) - - -class InfrahubIntegrationTest(InfrahubInputOutputTest): - variables: Union[Path, dict[str, Any]] = Field( - Path("variables.json"), description="Variables and corresponding values to pass to the GraphQL query" - ) - - def update_paths(self, base_dir: Path) -> None: - super().update_paths(base_dir) - - if self.variables and not isinstance(self.variables, dict) and not self.variables.is_file(): - search_variables: Union[Path, str] = self.variables or "variables.*" - results = list(self.directory.rglob(str(search_variables))) # type: ignore[union-attr] - - if not results: - raise FileNotFoundError(self.variables) - if len(results) != 1: - raise FileNotFoundError( - f"Too many files are matching: {self.variables}, please set the 'variables' test key to the file to use." - ) - self.variables = results[0] - - def get_variables_data(self) -> dict[str, Any]: - if isinstance(self.variables, dict): - return self.variables - return self.parse_user_provided_data(self.variables) - - -class InfrahubCheckSmokeTest(InfrahubBaseTest): - kind: Literal["check-smoke"] - - -class InfrahubCheckUnitProcessTest(InfrahubInputOutputTest): - kind: Literal["check-unit-process"] - - -class InfrahubCheckIntegrationTest(InfrahubIntegrationTest): - kind: Literal["check-integration"] - - -class InfrahubGraphQLQuerySmokeTest(InfrahubBaseTest): - kind: Literal["graphql-query-smoke"] - path: Path = Field(..., description="Path to the file in which the GraphQL query is defined") - - -class InfrahubGraphQLQueryIntegrationTest(InfrahubIntegrationTest): - kind: Literal["graphql-query-integration"] - query: str = Field(..., description="Name of a pre-defined GraphQL query to execute") - - -class InfrahubJinja2TransformSmokeTest(InfrahubBaseTest): - kind: Literal["jinja2-transform-smoke"] - - -class InfrahubJinja2TransformUnitRenderTest(InfrahubInputOutputTest): - kind: Literal["jinja2-transform-unit-render"] - - -class InfrahubJinja2TransformIntegrationTest(InfrahubIntegrationTest): - kind: Literal["jinja2-transform-integration"] - - -class InfrahubPythonTransformSmokeTest(InfrahubBaseTest): - kind: Literal["python-transform-smoke"] - - -class InfrahubPythonTransformUnitProcessTest(InfrahubInputOutputTest): - kind: Literal["python-transform-unit-process"] - - -class InfrahubPythonTransformIntegrationTest(InfrahubIntegrationTest): - kind: Literal["python-transform-integration"] - - -class InfrahubTest(BaseModel): - name: str = Field(..., description="Name of the test, must be unique") - expect: InfrahubTestExpectedResult = Field( - InfrahubTestExpectedResult.PASS, - description="Expected outcome of the test, can be either PASS (default) or FAIL", - ) - spec: Union[ - InfrahubCheckSmokeTest, - InfrahubCheckUnitProcessTest, - InfrahubCheckIntegrationTest, - InfrahubGraphQLQuerySmokeTest, - InfrahubGraphQLQueryIntegrationTest, - InfrahubJinja2TransformSmokeTest, - InfrahubJinja2TransformUnitRenderTest, - InfrahubJinja2TransformIntegrationTest, - InfrahubPythonTransformSmokeTest, - InfrahubPythonTransformUnitProcessTest, - InfrahubPythonTransformIntegrationTest, - ] = Field(..., discriminator="kind") - - -class InfrahubTestGroup(BaseModel): - resource: InfrahubTestResource - resource_name: str - tests: list[InfrahubTest] - - -class InfrahubTestFileV1(BaseModel): - model_config = ConfigDict(extra="forbid") - version: Optional[str] = "1.0" - infrahub_tests: list[InfrahubTestGroup] diff --git a/python_sdk/infrahub_sdk/pytest_plugin/plugin.py b/python_sdk/infrahub_sdk/pytest_plugin/plugin.py deleted file mode 100644 index 08d355c176..0000000000 --- a/python_sdk/infrahub_sdk/pytest_plugin/plugin.py +++ /dev/null @@ -1,106 +0,0 @@ -import os -from pathlib import Path -from typing import Optional, Union - -from pytest import Collector, Config, Item, Parser, Session -from pytest import exit as exit_test - -from infrahub_sdk import InfrahubClientSync -from infrahub_sdk.utils import is_valid_url - -from .loader import InfrahubYamlFile -from .utils import load_repository_config - - -def pytest_addoption(parser: Parser) -> None: - group = parser.getgroup("pytest-infrahub") - group.addoption( - "--infrahub-repo-config", - action="store", - dest="infrahub_repo_config", - default=".infrahub.yml", - metavar="INFRAHUB_REPO_CONFIG_FILE", - help="Infrahub configuration file for the repository (default: %(default)s)", - ) - group.addoption( - "--infrahub-address", - action="store", - dest="infrahub_address", - default="http://localhost:8000", - metavar="INFRAHUB_TESTS_ADDRESS", - help="Address of the Infrahub instance for live testing (default: %(default)s)", - ) - group.addoption( - "--infrahub-key", - action="store", - dest="infrahub_key", - metavar="INFRAHUB_TESTS_API_KEY", - default=os.getenv("INFRAHUB_API_TOKEN"), - help="Key to use when querying the Infrahub instance for live testing", - ) - group.addoption( - "--infrahub-username", - action="store", - dest="infrahub_username", - metavar="INFRAHUB_TESTS_USERNAME", - help="Username to use when authenticating on the Infrahub instance for live testing", - ) - group.addoption( - "--infrahub-password", - action="store", - dest="infrahub_password", - metavar="INFRAHUB_TESTS_PASSWORD", - help="Password to use when authenticating on the Infrahub instance for live testing", - ) - group.addoption( - "--infrahub-branch", - action="store", - dest="infrahub_branch", - default="main", - metavar="INFRAHUB_TESTS_BRANCH", - help="Branch to use when running integration tests with an Infrahub instance (default: %(default)s)", - ) - - -def pytest_sessionstart(session: Session) -> None: - session.infrahub_config_path = Path(session.config.option.infrahub_repo_config) # type: ignore[attr-defined] - - if session.infrahub_config_path.is_file(): # type: ignore[attr-defined] - session.infrahub_repo_config = load_repository_config(repo_config_file=session.infrahub_config_path) # type: ignore[attr-defined] - - if not is_valid_url(session.config.option.infrahub_address): - exit_test("Infrahub test instance address is not a valid URL", returncode=1) - - client_config = { - "address": session.config.option.infrahub_address, - "default_branch": session.config.option.infrahub_branch, - } - if hasattr(session.config.option, "infrahub_key"): - client_config["api_token"] = session.config.option.infrahub_key - elif hasattr(session.config.option, "infrahub_username") and hasattr(session.config.option, "infrahub_password"): - client_config.pop("api_token") - client_config["username"] = session.config.option.infrahub_username - client_config["password"] = session.config.option.infrahub_password - - infrahub_client = InfrahubClientSync(config=client_config) - session.infrahub_client = infrahub_client # type: ignore[attr-defined] - - -def pytest_collect_file(parent: Union[Collector, Item], file_path: Path) -> Optional[InfrahubYamlFile]: - if file_path.suffix in [".yml", ".yaml"] and file_path.name.startswith("test_"): - return InfrahubYamlFile.from_parent(parent, path=file_path) - return None - - -def pytest_configure(config: Config) -> None: - config.addinivalue_line("markers", "infrahub: Infrahub test") - config.addinivalue_line("markers", "infrahub_smoke: Smoke test for an Infrahub resource") - config.addinivalue_line("markers", "infrahub_unit: Unit test for an Infrahub resource, works without dependencies") - config.addinivalue_line( - "markers", - "infrahub_integraton: Integation test for an Infrahub resource, depends on an Infrahub running instance", - ) - config.addinivalue_line("markers", "infrahub_check: Test related to an Infrahub Check") - config.addinivalue_line("markers", "infrahub_graphql_query: Test related to an Infrahub GraphQL query") - config.addinivalue_line("markers", "infrahub_jinja2_transform: Test related to an Infrahub Jinja2 Transform") - config.addinivalue_line("markers", "infrahub_python_transform: Test related to an Infrahub Python Transform") diff --git a/python_sdk/infrahub_sdk/pytest_plugin/utils.py b/python_sdk/infrahub_sdk/pytest_plugin/utils.py deleted file mode 100644 index 3923832b61..0000000000 --- a/python_sdk/infrahub_sdk/pytest_plugin/utils.py +++ /dev/null @@ -1,20 +0,0 @@ -from pathlib import Path - -import yaml - -from infrahub_sdk.schema import InfrahubRepositoryConfig - -from .exceptions import FileNotValidError - - -def load_repository_config(repo_config_file: Path) -> InfrahubRepositoryConfig: - if not repo_config_file.is_file(): - raise FileNotFoundError(repo_config_file) - - try: - yaml_data = repo_config_file.read_text() - data = yaml.safe_load(yaml_data) - except yaml.YAMLError as exc: - raise FileNotValidError(name=str(repo_config_file)) from exc - - return InfrahubRepositoryConfig(**data) diff --git a/python_sdk/infrahub_sdk/queries.py b/python_sdk/infrahub_sdk/queries.py deleted file mode 100644 index 75bc5935bf..0000000000 --- a/python_sdk/infrahub_sdk/queries.py +++ /dev/null @@ -1,44 +0,0 @@ -def get_commit_update_mutation(is_read_only: bool = False) -> str: - mutation_commit_update_base = """ - mutation ($repository_id: String!, $commit: String!) {{ - {repo_class}Update(data: {{ id: $repository_id, commit: {{ is_protected: true, source: $repository_id, value: $commit }} }}) {{ - ok - object {{ - commit {{ - value - }} - }} - }} - }} - """ - if is_read_only: - return mutation_commit_update_base.format(repo_class="CoreReadOnlyRepository") - return mutation_commit_update_base.format(repo_class="CoreRepository") - - -QUERY_RELATIONSHIPS = """ - query GetRelationships($relationship_identifiers: [String!]!) { - Relationship(ids: $relationship_identifiers) { - count - edges { - node { - identifier - peers { - id - kind - } - } - } - } - } -""" - -SCHEMA_HASH_SYNC_STATUS = """ -query { - InfrahubStatus { - summary { - schema_hash_synced - } - } -} -""" diff --git a/python_sdk/infrahub_sdk/query_groups.py b/python_sdk/infrahub_sdk/query_groups.py deleted file mode 100644 index 75002f80e2..0000000000 --- a/python_sdk/infrahub_sdk/query_groups.py +++ /dev/null @@ -1,291 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING, Optional - -from infrahub_sdk.constants import InfrahubClientMode -from infrahub_sdk.exceptions import NodeNotFoundError -from infrahub_sdk.utils import dict_hash - -if TYPE_CHECKING: - from infrahub_sdk.client import InfrahubClient, InfrahubClientSync - from infrahub_sdk.node import InfrahubNode, InfrahubNodeSync, RelatedNodeBase - from infrahub_sdk.schema import MainSchemaTypes - - -class InfrahubGroupContextBase: - """Base class for InfrahubGroupContext and InfrahubGroupContextSync""" - - def __init__(self) -> None: - self.related_node_ids: list[str] = [] - self.related_group_ids: list[str] = [] - self.unused_member_ids: Optional[list[str]] = None - self.unused_child_ids: Optional[list[str]] = None - self.previous_members: Optional[list[RelatedNodeBase]] = None - self.previous_children: Optional[list[RelatedNodeBase]] = None - self.identifier: Optional[str] = None - self.params: dict[str, str] = {} - self.delete_unused_nodes: bool = False - self.group_type: str = "CoreStandardGroup" - - def set_properties( - self, - identifier: str, - params: Optional[dict[str, str]] = None, - delete_unused_nodes: bool = False, - group_type: Optional[str] = None, - ) -> None: - """Setter method to set the values of identifier and params. - - Args: - identifier: The new value for the identifier. - params: A dictionary with new values for the params. - """ - self.identifier = identifier - self.params = params or {} - self.delete_unused_nodes = delete_unused_nodes - self.group_type = group_type or self.group_type - - def _get_params_as_str(self) -> str: - """Convert the params in dict format, into a string""" - params_as_str: list[str] = [] - for key, value in self.params.items(): - params_as_str.append(f"{key}: {str(value)}") - return ", ".join(params_as_str) - - def _generate_group_name(self, suffix: Optional[str] = None) -> str: - group_name = self.identifier or "sdk" - - if suffix: - group_name += f"-{suffix}" - - if self.params: - group_name += f"-{dict_hash(self.params)}" - - return group_name - - def _generate_group_description(self, schema: MainSchemaTypes) -> str: - """Generate the description of the group from the params - and ensure it's not longer than the maximum length of the description field.""" - if not self.params: - return "" - - description_str = self._get_params_as_str() - description = schema.get_attribute(name="description") - if description and description.max_length and len(description_str) > description.max_length: - length = description.max_length - 5 - return description_str[:length] + "..." - - return description_str - - -class InfrahubGroupContext(InfrahubGroupContextBase): - """Represents a Infrahub GroupContext in an asynchronous context.""" - - def __init__(self, client: InfrahubClient) -> None: - super().__init__() - self.client = client - - async def get_group(self, store_peers: bool = False) -> Optional[InfrahubNode]: - group_name = self._generate_group_name() - try: - group = await self.client.get(kind=self.group_type, name__value=group_name, include=["members", "children"]) - except NodeNotFoundError: - return None - - if not store_peers: - return group - - self.previous_members = group.members.peers # type: ignore[attr-defined] - self.previous_children = group.children.peers # type: ignore[attr-defined] - return group - - async def delete_unused(self) -> None: - if self.previous_members and self.unused_member_ids: - for member in self.previous_members: - if member.id in self.unused_member_ids and member.typename: - await self.client.delete(kind=member.typename, id=member.id) - - if self.previous_children and self.unused_child_ids: - for child in self.previous_children: - if child.id in self.unused_child_ids and child.typename: - await self.client.delete(kind=child.typename, id=child.id) - - async def add_related_nodes(self, ids: list[str], update_group_context: Optional[bool] = None) -> None: - """ - Add related Nodes IDs to the context. - - Args: - ids (list[str]): List of node IDs to be added. - update_group_context (Optional[bool], optional): Flag to control whether to update the group context. - """ - if update_group_context is not False and ( - self.client.mode == InfrahubClientMode.TRACKING or self.client.update_group_context or update_group_context - ): - self.related_node_ids.extend(ids) - - async def add_related_groups(self, ids: list[str], update_group_context: Optional[bool] = None) -> None: - """ - Add related Groups IDs to the context. - - Args: - ids (list[str]): List of group IDs to be added. - update_group_context (Optional[bool], optional): Flag to control whether to update the group context. - """ - if update_group_context is not False and ( - self.client.mode == InfrahubClientMode.TRACKING or self.client.update_group_context or update_group_context - ): - self.related_group_ids.extend(ids) - - async def update_group(self) -> None: - """ - Create or update (using upsert) a CoreStandardGroup to store all the Nodes and Groups used during an execution. - """ - children: list[str] = [] - members: list[str] = [] - - if self.related_group_ids: - children = self.related_group_ids - if self.related_node_ids: - members = self.related_node_ids - - if not children and not members: - return - - group_name = self._generate_group_name() - schema = await self.client.schema.get(kind=self.group_type) - description = self._generate_group_description(schema=schema) - - existing_group = None - if self.delete_unused_nodes: - existing_group = await self.get_group(store_peers=True) - - group = await self.client.create( - kind=self.group_type, - name=group_name, - description=description, - members=members, - children=children, - ) - await group.save(allow_upsert=True, update_group_context=False) - - if not existing_group: - return - - # Calculate how many nodes should be deleted - self.unused_member_ids = set(existing_group.members.peer_ids) - set(members) # type: ignore - self.unused_child_ids = set(existing_group.children.peer_ids) - set(children) # type: ignore - - if not self.delete_unused_nodes: - return - - await self.delete_unused() - # TODO : create anoter "read" group. Could be based of the store items - # Need to filters the store items inherited from CoreGroup to add them as children - # Need to validate that it's UUIDas "key" if we want to implement other methods to store item - - -class InfrahubGroupContextSync(InfrahubGroupContextBase): - """Represents a Infrahub GroupContext in an synchronous context.""" - - def __init__(self, client: InfrahubClientSync) -> None: - super().__init__() - self.client = client - - def get_group(self, store_peers: bool = False) -> Optional[InfrahubNodeSync]: - group_name = self._generate_group_name() - try: - group = self.client.get(kind=self.group_type, name__value=group_name, include=["members", "children"]) - except NodeNotFoundError: - return None - - if not store_peers: - return group - - self.previous_members = group.members.peers # type: ignore[attr-defined] - self.previous_children = group.children.peers # type: ignore[attr-defined] - return group - - def delete_unused(self) -> None: - if self.previous_members and self.unused_member_ids: - for member in self.previous_members: - if member.id in self.unused_member_ids and member.typename: - self.client.delete(kind=member.typename, id=member.id) - - if self.previous_children and self.unused_child_ids: - for child in self.previous_children: - if child.id in self.unused_child_ids and child.typename: - self.client.delete(kind=child.typename, id=child.id) - - def add_related_nodes(self, ids: list[str], update_group_context: Optional[bool] = None) -> None: - """ - Add related Nodes IDs to the context. - - Args: - ids (list[str]): List of node IDs to be added. - update_group_context (Optional[bool], optional): Flag to control whether to update the group context. - """ - if update_group_context is not False and ( - self.client.mode == InfrahubClientMode.TRACKING or self.client.update_group_context or update_group_context - ): - self.related_node_ids.extend(ids) - - def add_related_groups(self, ids: list[str], update_group_context: Optional[bool] = None) -> None: - """ - Add related Groups IDs to the context. - - Args: - ids (list[str]): List of group IDs to be added. - update_group_context (Optional[bool], optional): Flag to control whether to update the group context. - """ - if update_group_context is not False and ( - self.client.mode == InfrahubClientMode.TRACKING or self.client.update_group_context or update_group_context - ): - self.related_group_ids.extend(ids) - - def update_group(self) -> None: - """ - Create or update (using upsert) a CoreStandardGroup to store all the Nodes and Groups used during an execution. - """ - children: list[str] = [] - members: list[str] = [] - - if self.related_group_ids: - children = self.related_group_ids - if self.related_node_ids: - members = self.related_node_ids - - if not children and not members: - return - - group_name = self._generate_group_name() - schema = self.client.schema.get(kind=self.group_type) - description = self._generate_group_description(schema=schema) - - existing_group = None - if self.delete_unused_nodes: - existing_group = self.get_group(store_peers=True) - - group = self.client.create( - kind=self.group_type, - name=group_name, - description=description, - members=members, - children=children, - ) - group.save(allow_upsert=True, update_group_context=False) - - if not existing_group: - return - - # Calculate how many nodes should be deleted - self.unused_member_ids = set(existing_group.members.peer_ids) - set(members) # type: ignore - self.unused_child_ids = set(existing_group.children.peer_ids) - set(children) # type: ignore - - if not self.delete_unused_nodes: - return - - self.delete_unused() - - # TODO : create anoter "read" group. Could be based of the store items - # Need to filters the store items inherited from CoreGroup to add them as children - # Need to validate that it's UUIDas "key" if we want to implement other methods to store item diff --git a/python_sdk/infrahub_sdk/recorder.py b/python_sdk/infrahub_sdk/recorder.py deleted file mode 100644 index 218cfcac05..0000000000 --- a/python_sdk/infrahub_sdk/recorder.py +++ /dev/null @@ -1,70 +0,0 @@ -from __future__ import annotations - -import enum -from pathlib import Path -from typing import Protocol, runtime_checkable - -import httpx -import ujson -from pydantic_settings import BaseSettings, SettingsConfigDict - -from infrahub_sdk.utils import generate_request_filename - - -class RecorderType(str, enum.Enum): - NONE = "none" - JSON = "json" - - -@runtime_checkable -class Recorder(Protocol): - def record(self, response: httpx.Response) -> None: - """Record the response from Infrahub""" - - -class NoRecorder: - @staticmethod - def record(response: httpx.Response) -> None: - """The NoRecorder just silently returns""" - - @classmethod - def default(cls) -> NoRecorder: - return cls() - - -class JSONRecorder(BaseSettings): - model_config = SettingsConfigDict(env_prefix="INFRAHUB_JSON_RECORDER_") - directory: str = "." - host: str = "" - - def record(self, response: httpx.Response) -> None: - self._set_url_host(response) - filename = generate_request_filename(response.request) - data = { - "status_code": response.status_code, - "method": response.request.method, - "url": str(response.request.url), - "headers": dict(response.request.headers), - "response_content": response.content.decode("utf-8"), - "request_content": response.request.content.decode("utf-8"), - } - - with Path(f"{self.directory}/{filename}.json").open(mode="w", encoding="utf-8") as fobj: - ujson.dump(data, fobj, indent=4, sort_keys=True) - - def _set_url_host(self, response: httpx.Response) -> None: - if not self.host: - return - original = str(response.request.url) - if response.request.url.port: - modified = original.replace( - f"{response.request.url.scheme}://{response.request.url.host}:", - f"{response.request.url.scheme}://{self.host}:", - ) - else: - modified = original.replace( - f"{response.request.url.scheme}://{response.request.url.host}/", - f"{response.request.url.scheme}://{self.host}/", - ) - - response.request.url = httpx.URL(url=modified) diff --git a/python_sdk/infrahub_sdk/schema.py b/python_sdk/infrahub_sdk/schema.py deleted file mode 100644 index 58dd06be25..0000000000 --- a/python_sdk/infrahub_sdk/schema.py +++ /dev/null @@ -1,964 +0,0 @@ -from __future__ import annotations - -from collections import defaultdict -from enum import Enum -from pathlib import Path -from typing import TYPE_CHECKING, Any, MutableMapping, Optional, TypedDict, TypeVar, Union -from urllib.parse import urlencode - -import httpx -from pydantic import BaseModel, ConfigDict, Field, field_validator -from typing_extensions import TypeAlias - -from infrahub_sdk._importer import import_module -from infrahub_sdk.exceptions import InvalidResponseError, ModuleImportError, SchemaNotFoundError, ValidationError -from infrahub_sdk.generator import InfrahubGenerator -from infrahub_sdk.graphql import Mutation -from infrahub_sdk.utils import duplicates - -if TYPE_CHECKING: - from infrahub_sdk.client import InfrahubClient, InfrahubClientSync - from infrahub_sdk.node import InfrahubNode, InfrahubNodeSync - - InfrahubNodeTypes = Union[InfrahubNode, InfrahubNodeSync] - -# pylint: disable=redefined-builtin - - -class DropdownMutationOptionalArgs(TypedDict): - color: Optional[str] - description: Optional[str] - label: Optional[str] - - -ResourceClass = TypeVar("ResourceClass") - -# --------------------------------------------------------------------------------- -# Repository Configuration file -# --------------------------------------------------------------------------------- - - -class InfrahubRepositoryConfigElement(BaseModel): - """Class to regroup all elements of the infrahub configuration for a repository for typing purpose.""" - - -class InfrahubRepositoryArtifactDefinitionConfig(InfrahubRepositoryConfigElement): - model_config = ConfigDict(extra="forbid") - name: str = Field(..., description="The name of the artifact definition") - artifact_name: Optional[str] = Field(default=None, description="Name of the artifact created from this definition") - parameters: dict[str, Any] = Field(..., description="The input parameters required to render this artifact") - content_type: str = Field(..., description="The content type of the rendered artifact") - targets: str = Field(..., description="The group to target when creating artifacts") - transformation: str = Field(..., description="The transformation to use.") - - -class InfrahubJinja2TransformConfig(InfrahubRepositoryConfigElement): - model_config = ConfigDict(extra="forbid") - name: str = Field(..., description="The name of the transform") - query: str = Field(..., description="The name of the GraphQL Query") - template_path: Path = Field(..., description="The path within the repository of the template file") - description: Optional[str] = Field(default=None, description="Description for this transform") - - @property - def template_path_value(self) -> str: - return str(self.template_path) - - @property - def payload(self) -> dict[str, str]: - data = self.model_dump(exclude_none=True) - data["template_path"] = self.template_path_value - return data - - -class InfrahubCheckDefinitionConfig(InfrahubRepositoryConfigElement): - model_config = ConfigDict(extra="forbid") - name: str = Field(..., description="The name of the Check Definition") - file_path: Path = Field(..., description="The file within the repository with the check code.") - parameters: dict[str, Any] = Field( - default_factory=dict, description="The input parameters required to run this check" - ) - targets: Optional[str] = Field( - default=None, description="The group to target when running this check, leave blank for global checks" - ) - class_name: str = Field(default="Check", description="The name of the check class to run.") - - -class InfrahubGeneratorDefinitionConfig(InfrahubRepositoryConfigElement): - model_config = ConfigDict(extra="forbid") - name: str = Field(..., description="The name of the Generator Definition") - file_path: Path = Field(..., description="The file within the repository with the generator code.") - query: str = Field(..., description="The GraphQL query to use as input.") - parameters: dict[str, Any] = Field( - default_factory=dict, description="The input parameters required to run this check" - ) - targets: str = Field(..., description="The group to target when running this generator") - class_name: str = Field(default="Generator", description="The name of the generator class to run.") - convert_query_response: bool = Field( - default=False, - description="Decide if the generator should convert the result of the GraphQL query to SDK InfrahubNode objects.", - ) - - def load_class( - self, import_root: Optional[str] = None, relative_path: Optional[str] = None - ) -> type[InfrahubGenerator]: - module = import_module(module_path=self.file_path, import_root=import_root, relative_path=relative_path) - - if self.class_name not in dir(module): - raise ModuleImportError(message=f"The specified class {self.class_name} was not found within the module") - - generator_class = getattr(module, self.class_name) - - if not issubclass(generator_class, InfrahubGenerator): - raise ModuleImportError(message=f"The specified class {self.class_name} is not an Infrahub Generator") - - return generator_class - - -class InfrahubPythonTransformConfig(InfrahubRepositoryConfigElement): - model_config = ConfigDict(extra="forbid") - name: str = Field(..., description="The name of the Transform") - file_path: Path = Field(..., description="The file within the repository with the transform code.") - class_name: str = Field(default="Transform", description="The name of the transform class to run.") - - -class InfrahubRepositoryGraphQLConfig(InfrahubRepositoryConfigElement): - model_config = ConfigDict(extra="forbid") - name: str = Field(..., description="The name of the GraphQL Query") - file_path: Path = Field(..., description="The file within the repository with the query code.") - - def load_query(self, relative_path: str = ".") -> str: - file_name = Path(f"{relative_path}/{self.file_path}") - with file_name.open("r", encoding="UTF-8") as file: - return file.read() - - -RESOURCE_MAP: dict[Any, str] = { - InfrahubJinja2TransformConfig: "jinja2_transforms", - InfrahubCheckDefinitionConfig: "check_definitions", - InfrahubRepositoryArtifactDefinitionConfig: "artifact_definitions", - InfrahubPythonTransformConfig: "python_transforms", - InfrahubGeneratorDefinitionConfig: "generator_definitions", - InfrahubRepositoryGraphQLConfig: "queries", -} - - -class InfrahubRepositoryConfig(BaseModel): - model_config = ConfigDict(extra="forbid") - check_definitions: list[InfrahubCheckDefinitionConfig] = Field( - default_factory=list, description="User defined checks" - ) - schemas: list[Path] = Field(default_factory=list, description="Schema files") - jinja2_transforms: list[InfrahubJinja2TransformConfig] = Field( - default_factory=list, description="Jinja2 data transformations" - ) - artifact_definitions: list[InfrahubRepositoryArtifactDefinitionConfig] = Field( - default_factory=list, description="Artifact definitions" - ) - python_transforms: list[InfrahubPythonTransformConfig] = Field( - default_factory=list, description="Python data transformations" - ) - generator_definitions: list[InfrahubGeneratorDefinitionConfig] = Field( - default_factory=list, description="Generator definitions" - ) - queries: list[InfrahubRepositoryGraphQLConfig] = Field(default_factory=list, description="GraphQL Queries") - - @field_validator( - "check_definitions", - "jinja2_transforms", - "artifact_definitions", - "python_transforms", - "generator_definitions", - "queries", - ) - @classmethod - def unique_items(cls, v: list[Any]) -> list[Any]: - names = [item.name for item in v] - if dups := duplicates(names): - raise ValueError(f"Found multiples element with the same names: {dups}") - return v - - def _has_resource(self, resource_id: str, resource_type: type[ResourceClass], resource_field: str = "name") -> bool: - for item in getattr(self, RESOURCE_MAP[resource_type]): - if getattr(item, resource_field) == resource_id: - return True - return False - - def _get_resource( - self, resource_id: str, resource_type: type[ResourceClass], resource_field: str = "name" - ) -> ResourceClass: - for item in getattr(self, RESOURCE_MAP[resource_type]): - if getattr(item, resource_field) == resource_id: - return item - raise KeyError(f"Unable to find {resource_id!r} in {RESOURCE_MAP[resource_type]!r}") - - def has_jinja2_transform(self, name: str) -> bool: - return self._has_resource(resource_id=name, resource_type=InfrahubJinja2TransformConfig) - - def get_jinja2_transform(self, name: str) -> InfrahubJinja2TransformConfig: - return self._get_resource(resource_id=name, resource_type=InfrahubJinja2TransformConfig) - - def has_check_definition(self, name: str) -> bool: - return self._has_resource(resource_id=name, resource_type=InfrahubCheckDefinitionConfig) - - def get_check_definition(self, name: str) -> InfrahubCheckDefinitionConfig: - return self._get_resource(resource_id=name, resource_type=InfrahubCheckDefinitionConfig) - - def has_artifact_definition(self, name: str) -> bool: - return self._has_resource(resource_id=name, resource_type=InfrahubRepositoryArtifactDefinitionConfig) - - def get_artifact_definition(self, name: str) -> InfrahubRepositoryArtifactDefinitionConfig: - return self._get_resource(resource_id=name, resource_type=InfrahubRepositoryArtifactDefinitionConfig) - - def has_generator_definition(self, name: str) -> bool: - return self._has_resource(resource_id=name, resource_type=InfrahubGeneratorDefinitionConfig) - - def get_generator_definition(self, name: str) -> InfrahubGeneratorDefinitionConfig: - return self._get_resource(resource_id=name, resource_type=InfrahubGeneratorDefinitionConfig) - - def has_python_transform(self, name: str) -> bool: - return self._has_resource(resource_id=name, resource_type=InfrahubPythonTransformConfig) - - def get_python_transform(self, name: str) -> InfrahubPythonTransformConfig: - return self._get_resource(resource_id=name, resource_type=InfrahubPythonTransformConfig) - - def has_query(self, name: str) -> bool: - return self._has_resource(resource_id=name, resource_type=InfrahubRepositoryGraphQLConfig) - - def get_query(self, name: str) -> InfrahubRepositoryGraphQLConfig: - return self._get_resource(resource_id=name, resource_type=InfrahubRepositoryGraphQLConfig) - - -# --------------------------------------------------------------------------------- -# Main Infrahub Schema File -# --------------------------------------------------------------------------------- -class FilterSchema(BaseModel): - name: str - kind: str - description: Optional[str] = None - - -class RelationshipCardinality(str, Enum): - ONE = "one" - MANY = "many" - - -class BranchSupportType(str, Enum): - AWARE = "aware" - AGNOSTIC = "agnostic" - LOCAL = "local" - - -class RelationshipKind(str, Enum): - GENERIC = "Generic" - ATTRIBUTE = "Attribute" - COMPONENT = "Component" - PARENT = "Parent" - GROUP = "Group" - HIERARCHY = "Hierarchy" - PROFILE = "Profile" - - -class DropdownMutation(str, Enum): - add = "SchemaDropdownAdd" - remove = "SchemaDropdownRemove" - - -class EnumMutation(str, Enum): - add = "SchemaEnumAdd" - remove = "SchemaEnumRemove" - - -class SchemaState(str, Enum): - PRESENT = "present" - ABSENT = "absent" - - -class AttributeSchema(BaseModel): - id: Optional[str] = None - state: SchemaState = SchemaState.PRESENT - name: str - kind: str - label: Optional[str] = None - description: Optional[str] = None - default_value: Optional[Any] = None - inherited: bool = False - unique: bool = False - branch: Optional[BranchSupportType] = None - optional: bool = False - read_only: bool = False - choices: Optional[list[dict[str, Any]]] = None - enum: Optional[list[Union[str, int]]] = None - max_length: Optional[int] = None - min_length: Optional[int] = None - regex: Optional[str] = None - - -class RelationshipSchema(BaseModel): - id: Optional[str] = None - state: SchemaState = SchemaState.PRESENT - name: str - peer: str - kind: RelationshipKind = RelationshipKind.GENERIC - label: Optional[str] = None - description: Optional[str] = None - identifier: Optional[str] = None - inherited: bool = False - cardinality: str = "many" - branch: Optional[BranchSupportType] = None - optional: bool = True - read_only: bool = False - filters: list[FilterSchema] = Field(default_factory=list) - - -class BaseNodeSchema(BaseModel): - id: Optional[str] = None - state: SchemaState = SchemaState.PRESENT - name: str - label: Optional[str] = None - namespace: str - description: Optional[str] = None - attributes: list[AttributeSchema] = Field(default_factory=list) - relationships: list[RelationshipSchema] = Field(default_factory=list) - filters: list[FilterSchema] = Field(default_factory=list) - - @property - def kind(self) -> str: - return self.namespace + self.name - - def get_field(self, name: str, raise_on_error: bool = True) -> Union[AttributeSchema, RelationshipSchema, None]: - if attribute_field := self.get_attribute_or_none(name=name): - return attribute_field - - if relationship_field := self.get_relationship_or_none(name=name): - return relationship_field - - if not raise_on_error: - return None - - raise ValueError(f"Unable to find the field {name}") - - def get_attribute(self, name: str) -> AttributeSchema: - for item in self.attributes: - if item.name == name: - return item - raise ValueError(f"Unable to find the attribute {name}") - - def get_attribute_or_none(self, name: str) -> Optional[AttributeSchema]: - for item in self.attributes: - if item.name == name: - return item - return None - - def get_relationship(self, name: str) -> RelationshipSchema: - for item in self.relationships: - if item.name == name: - return item - raise ValueError(f"Unable to find the relationship {name}") - - def get_relationship_or_none(self, name: str) -> Optional[RelationshipSchema]: - for item in self.relationships: - if item.name == name: - return item - return None - - def get_relationship_by_identifier(self, id: str, raise_on_error: bool = True) -> Union[RelationshipSchema, None]: - for item in self.relationships: - if item.identifier == id: - return item - - if not raise_on_error: - return None - - raise ValueError(f"Unable to find the relationship {id}") - - @property - def attribute_names(self) -> list[str]: - return [item.name for item in self.attributes] - - @property - def relationship_names(self) -> list[str]: - return [item.name for item in self.relationships] - - @property - def mandatory_input_names(self) -> list[str]: - return self.mandatory_attribute_names + self.mandatory_relationship_names - - @property - def mandatory_attribute_names(self) -> list[str]: - return [item.name for item in self.attributes if not item.optional and item.default_value is None] - - @property - def mandatory_relationship_names(self) -> list[str]: - return [item.name for item in self.relationships if not item.optional] - - @property - def local_attributes(self) -> list[AttributeSchema]: - return [item for item in self.attributes if not item.inherited] - - @property - def local_relationships(self) -> list[RelationshipSchema]: - return [item for item in self.relationships if not item.inherited] - - @property - def unique_attributes(self) -> list[AttributeSchema]: - return [item for item in self.attributes if item.unique] - - -class GenericSchema(BaseNodeSchema): - """A Generic can be either an Interface or a Union depending if there are some Attributes or Relationships defined.""" - - used_by: list[str] = Field(default_factory=list) - - -class NodeSchema(BaseNodeSchema): - inherit_from: list[str] = Field(default_factory=list) - branch: Optional[BranchSupportType] = None - default_filter: Optional[str] = None - human_friendly_id: Optional[list[str]] = None - - -class ProfileSchema(BaseNodeSchema): - inherit_from: list[str] = Field(default_factory=list) - - -class NodeExtensionSchema(BaseModel): - name: Optional[str] = None - kind: str - description: Optional[str] = None - label: Optional[str] = None - inherit_from: list[str] = Field(default_factory=list) - branch: Optional[BranchSupportType] = None - default_filter: Optional[str] = None - attributes: list[AttributeSchema] = Field(default_factory=list) - relationships: list[RelationshipSchema] = Field(default_factory=list) - - -class SchemaRoot(BaseModel): - version: str - generics: list[GenericSchema] = Field(default_factory=list) - nodes: list[NodeSchema] = Field(default_factory=list) - profiles: list[ProfileSchema] = Field(default_factory=list) - # node_extensions: list[NodeExtensionSchema] = Field(default_factory=list) - - -MainSchemaTypes: TypeAlias = Union[NodeSchema, GenericSchema, ProfileSchema] - - -class InfrahubSchemaBase: - def validate(self, data: dict[str, Any]) -> None: - SchemaRoot(**data) - - def validate_data_against_schema(self, schema: MainSchemaTypes, data: dict) -> None: - for key in data.keys(): - if key not in schema.relationship_names + schema.attribute_names: - identifier = f"{schema.kind}" - raise ValidationError( - identifier=identifier, - message=f"{key} is not a valid value for {identifier}", - ) - - def generate_payload_create( - self, - schema: MainSchemaTypes, - data: dict, - source: Optional[str] = None, - owner: Optional[str] = None, - is_protected: Optional[bool] = None, - is_visible: Optional[bool] = None, - ) -> dict[str, Any]: - obj_data: dict[str, Any] = {} - item_metadata: dict[str, Any] = {} - if source: - item_metadata["source"] = str(source) - if owner: - item_metadata["owner"] = str(owner) - if is_protected is not None: - item_metadata["is_protected"] = is_protected - if is_visible is not None: - item_metadata["is_visible"] = is_visible - - for key, value in data.items(): - obj_data[key] = {} - if key in schema.attribute_names: - obj_data[key] = {"value": value} - obj_data[key].update(item_metadata) - elif key in schema.relationship_names: - rel = schema.get_relationship(name=key) - if rel: - if rel.cardinality == "one": - obj_data[key] = {"id": str(value)} - obj_data[key].update(item_metadata) - elif rel.cardinality == "many": - obj_data[key] = [{"id": str(item)} for item in value] - for item in obj_data[key]: - item.update(item_metadata) - - return obj_data - - @staticmethod - def _validate_load_schema_response(response: httpx.Response) -> SchemaLoadResponse: - if response.status_code == httpx.codes.OK: - status = response.json() - return SchemaLoadResponse(hash=status["hash"], previous_hash=status["previous_hash"]) - - if response.status_code == httpx.codes.BAD_REQUEST: - return SchemaLoadResponse(errors=response.json()) - - if response.status_code == httpx.codes.UNPROCESSABLE_ENTITY: - return SchemaLoadResponse(errors=response.json()) - - response.raise_for_status() - - raise InvalidResponseError(message=f"Invalid response received from server HTTP {response.status_code}") - - -class InfrahubSchema(InfrahubSchemaBase): - def __init__(self, client: InfrahubClient): - self.client = client - self.cache: dict = defaultdict(lambda: dict) - - async def get(self, kind: str, branch: Optional[str] = None, refresh: bool = False) -> MainSchemaTypes: - branch = branch or self.client.default_branch - - if refresh: - self.cache[branch] = await self.fetch(branch=branch) - - if branch in self.cache and kind in self.cache[branch]: - return self.cache[branch][kind] - - # Fetching the latest schema from the server if we didn't fetch it earlier - # because we coulnd't find the object on the local cache - if not refresh: - self.cache[branch] = await self.fetch(branch=branch) - - if branch in self.cache and kind in self.cache[branch]: - return self.cache[branch][kind] - - raise SchemaNotFoundError(identifier=kind) - - async def all( - self, branch: Optional[str] = None, refresh: bool = False, namespaces: Optional[list[str]] = None - ) -> MutableMapping[str, MainSchemaTypes]: - """Retrieve the entire schema for a given branch. - - if present in cache, the schema will be served from the cache, unless refresh is set to True - if the schema is not present in the cache, it will be fetched automatically from the server - - Args: - branch (str, optional): Name of the branch to query. Defaults to default_branch. - refresh (bool, optional): Force a refresh of the schema. Defaults to False. - - Returns: - dict[str, MainSchemaTypes]: Dictionary of all schema organized by kind - """ - branch = branch or self.client.default_branch - if refresh or branch not in self.cache: - self.cache[branch] = await self.fetch(branch=branch, namespaces=namespaces) - - return self.cache[branch] - - async def load(self, schemas: list[dict], branch: Optional[str] = None) -> SchemaLoadResponse: - branch = branch or self.client.default_branch - url = f"{self.client.address}/api/schema/load?branch={branch}" - response = await self.client._post( - url=url, timeout=max(120, self.client.default_timeout), payload={"schemas": schemas} - ) - - return self._validate_load_schema_response(response=response) - - async def check(self, schemas: list[dict], branch: Optional[str] = None) -> tuple[bool, Optional[dict]]: - branch = branch or self.client.default_branch - url = f"{self.client.address}/api/schema/check?branch={branch}" - response = await self.client._post( - url=url, timeout=max(120, self.client.default_timeout), payload={"schemas": schemas} - ) - - if response.status_code == httpx.codes.ACCEPTED: - return True, response.json() - - if response.status_code == httpx.codes.UNPROCESSABLE_ENTITY: - return False, response.json() - - response.raise_for_status() - return False, None - - async def _get_kind_and_attribute_schema( - self, kind: Union[str, InfrahubNodeTypes], attribute: str, branch: Optional[str] = None - ) -> tuple[str, AttributeSchema]: - node_kind: str = kind._schema.kind if not isinstance(kind, str) else kind - node_schema = await self.client.schema.get(kind=node_kind, branch=branch) - schema_attr = node_schema.get_attribute(name=attribute) - - if schema_attr is None: - raise ValueError(f"Unable to find attribute {attribute}") - - return node_kind, schema_attr - - async def _mutate_enum_attribute( - self, - mutation: EnumMutation, - kind: Union[str, InfrahubNodeTypes], - attribute: str, - option: Union[str, int], - branch: Optional[str] = None, - ) -> None: - node_kind, schema_attr = await self._get_kind_and_attribute_schema( - kind=kind, attribute=attribute, branch=branch - ) - - if schema_attr.enum is None: - raise ValueError(f"Attribute '{schema_attr.name}' is not of kind Enum") - - input_data = {"data": {"kind": node_kind, "attribute": schema_attr.name, "enum": option}} - - query = Mutation(mutation=mutation.value, input_data=input_data, query={"ok": None}) - await self.client.execute_graphql( - query=query.render(), - branch_name=branch, - tracker=f"mutation-{mutation.name}-add", - timeout=max(60, self.client.default_timeout), - ) - - async def add_enum_option( - self, kind: Union[str, InfrahubNodeTypes], attribute: str, option: Union[str, int], branch: Optional[str] = None - ) -> None: - await self._mutate_enum_attribute( - mutation=EnumMutation.add, kind=kind, attribute=attribute, option=option, branch=branch - ) - - async def remove_enum_option( - self, kind: Union[str, InfrahubNodeTypes], attribute: str, option: Union[str, int], branch: Optional[str] = None - ) -> None: - await self._mutate_enum_attribute( - mutation=EnumMutation.remove, kind=kind, attribute=attribute, option=option, branch=branch - ) - - async def _mutate_dropdown_attribute( - self, - mutation: DropdownMutation, - kind: Union[str, InfrahubNodeTypes], - attribute: str, - option: str, - branch: Optional[str] = None, - dropdown_optional_args: Optional[DropdownMutationOptionalArgs] = None, - ) -> None: - dropdown_optional_args = dropdown_optional_args or DropdownMutationOptionalArgs( - color="", description="", label="" - ) - - node_kind, schema_attr = await self._get_kind_and_attribute_schema( - kind=kind, attribute=attribute, branch=branch - ) - - if schema_attr.kind != "Dropdown": - raise ValueError(f"Attribute '{schema_attr.name}' is not of kind Dropdown") - - input_data: dict[str, Any] = { - "data": { - "kind": node_kind, - "attribute": schema_attr.name, - "dropdown": option, - } - } - if mutation == DropdownMutation.add: - input_data["data"].update(dropdown_optional_args) - - query = Mutation(mutation=mutation.value, input_data=input_data, query={"ok": None}) - await self.client.execute_graphql( - query=query.render(), - branch_name=branch, - tracker=f"mutation-{mutation.name}-remove", - timeout=max(60, self.client.default_timeout), - ) - - async def remove_dropdown_option( - self, kind: Union[str, InfrahubNodeTypes], attribute: str, option: str, branch: Optional[str] = None - ) -> None: - await self._mutate_dropdown_attribute( - mutation=DropdownMutation.remove, kind=kind, attribute=attribute, option=option, branch=branch - ) - - async def add_dropdown_option( - self, - kind: Union[str, InfrahubNodeTypes], - attribute: str, - option: str, - color: Optional[str] = "", - description: Optional[str] = "", - label: Optional[str] = "", - branch: Optional[str] = None, - ) -> None: - dropdown_optional_args = DropdownMutationOptionalArgs(color=color, description=description, label=label) - await self._mutate_dropdown_attribute( - mutation=DropdownMutation.add, - kind=kind, - attribute=attribute, - option=option, - branch=branch, - dropdown_optional_args=dropdown_optional_args, - ) - - async def fetch(self, branch: str, namespaces: Optional[list[str]] = None) -> MutableMapping[str, MainSchemaTypes]: - """Fetch the schema from the server for a given branch. - - Args: - branch (str): Name of the branch to fetch the schema for. - - Returns: - dict[str, MainSchemaTypes]: Dictionary of all schema organized by kind - """ - url_parts = [("branch", branch)] - if namespaces: - url_parts.extend([("namespaces", ns) for ns in namespaces]) - query_params = urlencode(url_parts) - url = f"{self.client.address}/api/schema?{query_params}" - - response = await self.client._get(url=url) - response.raise_for_status() - - data: MutableMapping[str, Any] = response.json() - - nodes: MutableMapping[str, MainSchemaTypes] = {} - for node_schema in data.get("nodes", []): - node = NodeSchema(**node_schema) - nodes[node.kind] = node - - for generic_schema in data.get("generics", []): - generic = GenericSchema(**generic_schema) - nodes[generic.kind] = generic - - for profile_schema in data.get("profiles", []): - profile = ProfileSchema(**profile_schema) - nodes[profile.kind] = profile - - return nodes - - -class InfrahubSchemaSync(InfrahubSchemaBase): - def __init__(self, client: InfrahubClientSync): - self.client = client - self.cache: dict = defaultdict(lambda: dict) - - def all( - self, branch: Optional[str] = None, refresh: bool = False, namespaces: Optional[list[str]] = None - ) -> MutableMapping[str, MainSchemaTypes]: - """Retrieve the entire schema for a given branch. - - if present in cache, the schema will be served from the cache, unless refresh is set to True - if the schema is not present in the cache, it will be fetched automatically from the server - - Args: - branch (str, optional): Name of the branch to query. Defaults to default_branch. - refresh (bool, optional): Force a refresh of the schema. Defaults to False. - - Returns: - dict[str, MainSchemaTypes]: Dictionary of all schema organized by kind - """ - branch = branch or self.client.default_branch - if refresh or branch not in self.cache: - self.cache[branch] = self.fetch(branch=branch, namespaces=namespaces) - - return self.cache[branch] - - def get(self, kind: str, branch: Optional[str] = None, refresh: bool = False) -> MainSchemaTypes: - branch = branch or self.client.default_branch - - if refresh: - self.cache[branch] = self.fetch(branch=branch) - - if branch in self.cache and kind in self.cache[branch]: - return self.cache[branch][kind] - - # Fetching the latest schema from the server if we didn't fetch it earlier - # because we coulnd't find the object on the local cache - if not refresh: - self.cache[branch] = self.fetch(branch=branch) - - if branch in self.cache and kind in self.cache[branch]: - return self.cache[branch][kind] - - raise SchemaNotFoundError(identifier=kind) - - def _get_kind_and_attribute_schema( - self, kind: Union[str, InfrahubNodeTypes], attribute: str, branch: Optional[str] = None - ) -> tuple[str, AttributeSchema]: - node_kind: str = kind._schema.kind if not isinstance(kind, str) else kind - node_schema = self.client.schema.get(kind=node_kind, branch=branch) - schema_attr = node_schema.get_attribute(name=attribute) - - if schema_attr is None: - raise ValueError(f"Unable to find attribute {attribute}") - - return node_kind, schema_attr - - def _mutate_enum_attribute( - self, - mutation: EnumMutation, - kind: Union[str, InfrahubNodeTypes], - attribute: str, - option: Union[str, int], - branch: Optional[str] = None, - ) -> None: - node_kind, schema_attr = self._get_kind_and_attribute_schema(kind=kind, attribute=attribute, branch=branch) - - if schema_attr.enum is None: - raise ValueError(f"Attribute '{schema_attr.name}' is not of kind Enum") - - input_data = {"data": {"kind": node_kind, "attribute": schema_attr.name, "enum": option}} - - query = Mutation(mutation=mutation.value, input_data=input_data, query={"ok": None}) - self.client.execute_graphql( - query=query.render(), - branch_name=branch, - tracker=f"mutation-{mutation.name}-add", - timeout=max(60, self.client.default_timeout), - ) - - def add_enum_option( - self, kind: Union[str, InfrahubNodeTypes], attribute: str, option: Union[str, int], branch: Optional[str] = None - ) -> None: - self._mutate_enum_attribute( - mutation=EnumMutation.add, kind=kind, attribute=attribute, option=option, branch=branch - ) - - def remove_enum_option( - self, kind: Union[str, InfrahubNodeTypes], attribute: str, option: Union[str, int], branch: Optional[str] = None - ) -> None: - self._mutate_enum_attribute( - mutation=EnumMutation.remove, kind=kind, attribute=attribute, option=option, branch=branch - ) - - def _mutate_dropdown_attribute( - self, - mutation: DropdownMutation, - kind: Union[str, InfrahubNodeTypes], - attribute: str, - option: str, - branch: Optional[str] = None, - dropdown_optional_args: Optional[DropdownMutationOptionalArgs] = None, - ) -> None: - dropdown_optional_args = dropdown_optional_args or DropdownMutationOptionalArgs( - color="", description="", label="" - ) - node_kind, schema_attr = self._get_kind_and_attribute_schema(kind=kind, attribute=attribute, branch=branch) - - if schema_attr.kind != "Dropdown": - raise ValueError(f"Attribute '{schema_attr.name}' is not of kind Dropdown") - - input_data: dict[str, Any] = { - "data": { - "kind": node_kind, - "attribute": schema_attr.name, - "dropdown": option, - } - } - - if mutation == DropdownMutation.add: - input_data["data"].update(dropdown_optional_args) - - query = Mutation(mutation=mutation.value, input_data=input_data, query={"ok": None}) - self.client.execute_graphql( - query=query.render(), - branch_name=branch, - tracker=f"mutation-{mutation.name}-remove", - timeout=max(60, self.client.default_timeout), - ) - - def remove_dropdown_option( - self, kind: Union[str, InfrahubNodeTypes], attribute: str, option: str, branch: Optional[str] = None - ) -> None: - self._mutate_dropdown_attribute( - mutation=DropdownMutation.remove, kind=kind, attribute=attribute, option=option, branch=branch - ) - - def add_dropdown_option( - self, - kind: Union[str, InfrahubNodeTypes], - attribute: str, - option: str, - color: Optional[str] = "", - description: Optional[str] = "", - label: Optional[str] = "", - branch: Optional[str] = None, - ) -> None: - dropdown_optional_args = DropdownMutationOptionalArgs(color=color, description=description, label=label) - self._mutate_dropdown_attribute( - mutation=DropdownMutation.add, - kind=kind, - attribute=attribute, - option=option, - branch=branch, - dropdown_optional_args=dropdown_optional_args, - ) - - def fetch(self, branch: str, namespaces: Optional[list[str]] = None) -> MutableMapping[str, MainSchemaTypes]: - """Fetch the schema from the server for a given branch. - - Args: - branch (str): Name of the branch to fetch the schema for. - - Returns: - dict[str, MainSchemaTypes]: Dictionary of all schema organized by kind - """ - url_parts = [("branch", branch)] - if namespaces: - url_parts.extend([("namespaces", ns) for ns in namespaces]) - query_params = urlencode(url_parts) - url = f"{self.client.address}/api/schema?{query_params}" - - response = self.client._get(url=url) - response.raise_for_status() - - data: MutableMapping[str, Any] = response.json() - - nodes: MutableMapping[str, MainSchemaTypes] = {} - for node_schema in data.get("nodes", []): - node = NodeSchema(**node_schema) - nodes[node.kind] = node - - for generic_schema in data.get("generics", []): - generic = GenericSchema(**generic_schema) - nodes[generic.kind] = generic - - for profile_schema in data.get("profiles", []): - profile = ProfileSchema(**profile_schema) - nodes[profile.kind] = profile - - return nodes - - def load(self, schemas: list[dict], branch: Optional[str] = None) -> SchemaLoadResponse: - branch = branch or self.client.default_branch - url = f"{self.client.address}/api/schema/load?branch={branch}" - response = self.client._post( - url=url, timeout=max(120, self.client.default_timeout), payload={"schemas": schemas} - ) - - return self._validate_load_schema_response(response=response) - - def check(self, schemas: list[dict], branch: Optional[str] = None) -> tuple[bool, Optional[dict]]: - branch = branch or self.client.default_branch - url = f"{self.client.address}/api/schema/check?branch={branch}" - response = self.client._post( - url=url, timeout=max(120, self.client.default_timeout), payload={"schemas": schemas} - ) - - if response.status_code == httpx.codes.ACCEPTED: - return True, response.json() - - if response.status_code == httpx.codes.UNPROCESSABLE_ENTITY: - return False, response.json() - - response.raise_for_status() - return False, None - - -class SchemaLoadResponse(BaseModel): - hash: str = Field(default="", description="The new hash for the entire schema") - previous_hash: str = Field(default="", description="The previous hash for the entire schema") - errors: dict = Field(default_factory=dict, description="Errors reported by the server") - - @property - def schema_updated(self) -> bool: - if self.hash and self.previous_hash and self.hash != self.previous_hash: - return True - return False diff --git a/python_sdk/infrahub_sdk/store.py b/python_sdk/infrahub_sdk/store.py deleted file mode 100644 index 1384bbd80f..0000000000 --- a/python_sdk/infrahub_sdk/store.py +++ /dev/null @@ -1,123 +0,0 @@ -from __future__ import annotations - -from collections import defaultdict -from typing import TYPE_CHECKING, Any, Literal, Optional, Union, overload - -from infrahub_sdk.exceptions import NodeNotFoundError - -if TYPE_CHECKING: - from infrahub_sdk.node import InfrahubNode, InfrahubNodeSync - - -class NodeStoreBase: - """Internal Store for InfrahubNode objects. - - Often while creating a lot of new objects, - we need to save them in order to reuse them laterto associate them with another node for example. - """ - - def __init__(self) -> None: - self._store: dict[str, dict] = defaultdict(dict) - self._store_by_hfid: dict[str, Any] = defaultdict(dict) - - def _set(self, node: Union[InfrahubNode, InfrahubNodeSync], key: Optional[str] = None) -> None: - hfid = node.get_human_friendly_id_as_string(include_kind=True) - - if not key and not hfid: - raise ValueError("Cannot store node without human friendly ID or key.") - - if key: - node_kind = node._schema.kind - self._store[node_kind][key] = node - - if hfid: - self._store_by_hfid[hfid] = node - - def _get(self, key: str, kind: Optional[str] = None, raise_when_missing: bool = True): # type: ignore[no-untyped-def] - if kind and kind not in self._store and key not in self._store[kind]: # type: ignore[attr-defined] - if not raise_when_missing: - return None - raise NodeNotFoundError( - node_type=kind, - identifier={"key": [key]}, - message="Unable to find the node in the Store", - ) - - if kind and kind in self._store and key in self._store[kind]: # type: ignore[attr-defined] - return self._store[kind][key] # type: ignore[attr-defined] - - for _, item in self._store.items(): # type: ignore[attr-defined] - if key in item: - return item[key] - - if not raise_when_missing: - return None - raise NodeNotFoundError( - node_type="n/a", - identifier={"key": [key]}, - message=f"Unable to find the node {key!r} in the store", - ) - - def _get_by_hfid(self, key: str, raise_when_missing: bool = True): # type: ignore[no-untyped-def] - try: - return self._store_by_hfid[key] - except KeyError as exc: - if raise_when_missing: - raise NodeNotFoundError( - node_type="n/a", - identifier={"key": [key]}, - message=f"Unable to find the node {key!r} in the store", - ) from exc - return None - - -class NodeStore(NodeStoreBase): - @overload - def get(self, key: str, kind: Optional[str] = None, raise_when_missing: Literal[True] = True) -> InfrahubNode: ... - - @overload - def get( - self, key: str, kind: Optional[str] = None, raise_when_missing: Literal[False] = False - ) -> Optional[InfrahubNode]: ... - - def get(self, key: str, kind: Optional[str] = None, raise_when_missing: bool = True) -> Optional[InfrahubNode]: - return self._get(key=key, kind=kind, raise_when_missing=raise_when_missing) - - @overload - def get_by_hfid(self, key: str, raise_when_missing: Literal[True] = True) -> InfrahubNode: ... - - @overload - def get_by_hfid(self, key: str, raise_when_missing: Literal[False] = False) -> Optional[InfrahubNode]: ... - - def get_by_hfid(self, key: str, raise_when_missing: bool = True) -> Optional[InfrahubNode]: - return self._get_by_hfid(key=key, raise_when_missing=raise_when_missing) - - def set(self, node: InfrahubNode, key: Optional[str] = None) -> None: - return self._set(node=node, key=key) - - -class NodeStoreSync(NodeStoreBase): - @overload - def get( - self, key: str, kind: Optional[str] = None, raise_when_missing: Literal[True] = True - ) -> InfrahubNodeSync: ... - - @overload - def get( - self, key: str, kind: Optional[str] = None, raise_when_missing: Literal[False] = False - ) -> Optional[InfrahubNodeSync]: ... - - def get(self, key: str, kind: Optional[str] = None, raise_when_missing: bool = True) -> Optional[InfrahubNodeSync]: - return self._get(key=key, kind=kind, raise_when_missing=raise_when_missing) - - @overload - def get_by_hfid(self, key: str, raise_when_missing: Literal[True] = True) -> InfrahubNodeSync: ... - - @overload - def get_by_hfid(self, key: str, raise_when_missing: Literal[False] = False) -> Optional[InfrahubNodeSync]: ... - - def get_by_hfid(self, key: str, raise_when_missing: bool = True) -> Optional[InfrahubNodeSync]: - return self._get_by_hfid(key=key, raise_when_missing=raise_when_missing) - - def set(self, node: InfrahubNodeSync, key: Optional[str] = None) -> None: - return self._set(node=node, key=key) diff --git a/python_sdk/infrahub_sdk/task_report.py b/python_sdk/infrahub_sdk/task_report.py deleted file mode 100644 index 47b83056af..0000000000 --- a/python_sdk/infrahub_sdk/task_report.py +++ /dev/null @@ -1,210 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING, Any, Final, Optional, Protocol, TypedDict, Union, runtime_checkable - -from typing_extensions import Self - -from infrahub_sdk.uuidt import generate_uuid - -if TYPE_CHECKING: - from types import TracebackType - - from infrahub_sdk.client import InfrahubClient - - -class Log(TypedDict): - message: str - severity: str - - -TaskLogs = Union[list[Log], Log] - - -class TaskReport: - def __init__( - self, - client: InfrahubClient, - logger: InfrahubLogger, - related_node: str, - title: str, - task_id: Optional[str] = None, - created_by: Optional[str] = None, - create_with_context: bool = True, - ): - self.client = client - self.title = title - self.task_id: Final = task_id or generate_uuid() - self.related_node: Final = related_node - self.created_by: Final = created_by - self.has_failures: bool = False - self.finalized: bool = False - self.created: bool = False - self.create_with_context = create_with_context - self.log = logger - - async def __aenter__(self) -> Self: - if self.create_with_context: - await self.create() - return self - - async def __aexit__( - self, - exc_type: Optional[type[BaseException]], - exc_value: Optional[BaseException], - traceback: Optional[TracebackType], - ) -> None: - if exc_type: - self.finalized = True - await self.update(conclusion="FAILURE", logs={"message": str(exc_value), "severity": "ERROR"}) - - if self.finalized or not self.created: - return - - conclusion = "FAILURE" if self.has_failures else "SUCCESS" - await self.update(conclusion=conclusion) - - async def create( - self, title: Optional[str] = None, conclusion: str = "UNKNOWN", logs: Optional[TaskLogs] = None - ) -> None: - variables: dict[str, Any] = { - "related_node": self.related_node, - "task_id": self.task_id, - "title": title or self.title, - "conclusion": conclusion, - } - if self.created_by: - variables["created_by"] = self.created_by - if logs: - variables["logs"] = logs - - await self.client.execute_graphql( - query=CREATE_TASK, - variables=variables, - ) - self.created = True - - async def info(self, event: str, *args: Any, **kw: Any) -> None: - self.log.info(event, *args, **kw) - await self.update(logs={"severity": "INFO", "message": event}) - - async def warning(self, event: str, *args: Any, **kw: Any) -> None: - self.log.warning(event, *args, **kw) - await self.update(logs={"severity": "WARNING", "message": event}) - - async def error(self, event: str, *args: Any, **kw: Any) -> None: - self.log.error(event, *args, **kw) - self.has_failures = True - await self.update(logs={"severity": "ERROR", "message": event}) - - async def critical(self, event: str, *args: Any, **kw: Any) -> None: - self.log.critical(event, *args, **kw) - self.has_failures = True - await self.update(logs={"severity": "CRITICAL", "message": event}) - - async def exception(self, event: str, *args: Any, **kw: Any) -> None: - self.log.critical(event, *args, **kw) - self.has_failures = True - await self.update(logs={"severity": "CRITICAL", "message": event}) - - async def finalise( - self, title: Optional[str] = None, conclusion: str = "SUCCESS", logs: Optional[TaskLogs] = None - ) -> None: - self.finalized = True - await self.update(title=title, conclusion=conclusion, logs=logs) - - async def update( - self, title: Optional[str] = None, conclusion: Optional[str] = None, logs: Optional[TaskLogs] = None - ) -> None: - if not self.created: - await self.create() - variables: dict[str, Any] = {"task_id": self.task_id} - if conclusion: - variables["conclusion"] = conclusion - if title: - variables["title"] = title - if logs: - variables["logs"] = logs - await self.client.execute_graphql(query=UPDATE_TASK, variables=variables) - - -class InfrahubLogger(Protocol): - def debug(self, event: Optional[str] = None, *args: Any, **kw: Any) -> Any: - """Send a debug event""" - - def info(self, event: Optional[str] = None, *args: Any, **kw: Any) -> Any: - """Send an info event""" - - def warning(self, event: Optional[str] = None, *args: Any, **kw: Any) -> Any: - """Send a warning event""" - - def error(self, event: Optional[str] = None, *args: Any, **kw: Any) -> Any: - """Send an error event.""" - - def critical(self, event: Optional[str] = None, *args: Any, **kw: Any) -> Any: - """Send a critical event.""" - - def exception(self, event: Optional[str] = None, *args: Any, **kw: Any) -> Any: - """Send an exception event.""" - - -@runtime_checkable -class InfrahubTaskReportLogger(Protocol): - async def info(self, event: Optional[str] = None, *args: Any, **kw: Any) -> Any: - """Send an info event""" - - async def warning(self, event: Optional[str] = None, *args: Any, **kw: Any) -> Any: - """Send a warning event""" - - async def error(self, event: Optional[str] = None, *args: Any, **kw: Any) -> Any: - """Send an error event.""" - - async def critical(self, event: Optional[str] = None, *args: Any, **kw: Any) -> Any: - """Send a critical event.""" - - async def exception(self, event: Optional[str] = None, *args: Any, **kw: Any) -> Any: - """Send an exception event.""" - - -CREATE_TASK = """ -mutation CreateTask( - $conclusion: TaskConclusion!, - $title: String!, - $task_id: UUID, - $related_node: String!, - $created_by: String, - $logs: [RelatedTaskLogCreateInput] - ) { - InfrahubTaskCreate( - data: { - id: $task_id, - title: $title, - related_node: $related_node, - conclusion: $conclusion, - created_by: $created_by, - logs: $logs - } - ) { - ok - } -} -""" - -UPDATE_TASK = """ -mutation UpdateTask( - $conclusion: TaskConclusion, - $title: String, - $task_id: UUID!, - $logs: [RelatedTaskLogCreateInput] - ) { - InfrahubTaskUpdate( - data: { - id: $task_id, - title: $title, - conclusion: $conclusion, - logs: $logs - } - ) { - ok - } -} -""" diff --git a/python_sdk/infrahub_sdk/timestamp.py b/python_sdk/infrahub_sdk/timestamp.py deleted file mode 100644 index d85dcbed92..0000000000 --- a/python_sdk/infrahub_sdk/timestamp.py +++ /dev/null @@ -1,92 +0,0 @@ -from __future__ import annotations - -import re -from typing import Optional, Union - -import pendulum -from pendulum.datetime import DateTime - -REGEX_MAPPING = { - "seconds": r"(\d+)(s|sec|second|seconds)", - "minutes": r"(\d+)(m|min|minute|minutes)", - "hours": r"(\d+)(h|hour|hours)", -} - - -class TimestampFormatError(ValueError): ... - - -class Timestamp: - def __init__(self, value: Optional[Union[str, DateTime, Timestamp]] = None): - if value and isinstance(value, DateTime): - self.obj = value - elif value and isinstance(value, self.__class__): - self.obj = value.obj - elif isinstance(value, str): - self.obj = self._parse_string(value) - else: - self.obj = DateTime.now(tz="UTC") - - @classmethod - def _parse_string(cls, value: str) -> DateTime: - try: - parsed_date = pendulum.parse(value) - if isinstance(parsed_date, DateTime): - return parsed_date - except (pendulum.parsing.exceptions.ParserError, ValueError): - pass - - params = {} - for key, regex in REGEX_MAPPING.items(): - match = re.search(regex, value) - if match: - params[key] = int(match.group(1)) - - if not params: - raise TimestampFormatError(f"Invalid time format for {value}") - - return DateTime.now(tz="UTC").subtract(**params) - - def __repr__(self) -> str: - return f"Timestamp: {self.to_string()}" - - def to_string(self, with_z: bool = True) -> str: - iso8601_string = self.obj.to_iso8601_string() - if not with_z and iso8601_string[-1] == "Z": - iso8601_string = iso8601_string[:-1] + "+00:00" - return iso8601_string - - def to_timestamp(self) -> int: - return self.obj.int_timestamp - - def __eq__(self, other: object) -> bool: - if not isinstance(other, Timestamp): - return NotImplemented - return self.obj == other.obj - - def __lt__(self, other: object) -> bool: - if not isinstance(other, Timestamp): - return NotImplemented - return self.obj < other.obj - - def __gt__(self, other: object) -> bool: - if not isinstance(other, Timestamp): - return NotImplemented - return self.obj > other.obj - - def __le__(self, other: object) -> bool: - if not isinstance(other, Timestamp): - return NotImplemented - return self.obj <= other.obj - - def __ge__(self, other: object) -> bool: - if not isinstance(other, Timestamp): - return NotImplemented - return self.obj >= other.obj - - def __hash__(self) -> int: - return hash(self.to_string()) - - def add_delta(self, hours: int = 0, minutes: int = 0, seconds: int = 0, microseconds: int = 0) -> Timestamp: - time = self.obj.add(hours=hours, minutes=minutes, seconds=seconds, microseconds=microseconds) - return Timestamp(time) diff --git a/python_sdk/infrahub_sdk/topological_sort.py b/python_sdk/infrahub_sdk/topological_sort.py deleted file mode 100644 index 5cf6b82035..0000000000 --- a/python_sdk/infrahub_sdk/topological_sort.py +++ /dev/null @@ -1,68 +0,0 @@ -from __future__ import annotations - -from itertools import chain -from typing import Any, Iterable, Mapping, Sequence - - -class DependencyCycleExistsError(Exception): - def __init__(self, cycles: Iterable[Sequence[str]], *args: tuple[Any]) -> None: - self.cycles = cycles - super().__init__(*args) - - def get_cycle_strings(self) -> list[str]: - return [" --> ".join([str(node) for node in cycle]) for cycle in self.cycles] - - def __repr__(self) -> str: - return f"{type(self).__name__}({self.get_cycle_strings()})" - - -def topological_sort(dependency_dict: Mapping[str, Iterable[str]]) -> list[set[str]]: - if not dependency_dict: - return [] - - missing_dependent_keys = set(chain(*dependency_dict.values())) - set(dependency_dict.keys()) - - dependency_dict_to_sort = {k: set(v) for k, v in dependency_dict.items()} - dependency_dict_to_sort.update({missing_key: set() for missing_key in missing_dependent_keys}) - - ordered = [] - while len(dependency_dict_to_sort) > 0: - nondependant_nodes = {key for key, dependencies in dependency_dict_to_sort.items() if len(dependencies) == 0} - dependency_dict_to_sort = { - k: v - nondependant_nodes for k, v in dependency_dict_to_sort.items() if k not in nondependant_nodes - } - ordered.append(nondependant_nodes) - - if len(nondependant_nodes) == 0 and len(dependency_dict_to_sort) != 0: - cycles = get_cycles(dependency_dict_to_sort) - raise DependencyCycleExistsError(cycles=cycles) - return ordered - - -def get_cycles(dependency_dict: Mapping[str, Iterable[str]]) -> list[list[str]]: - if not dependency_dict: - return [] - - dict_to_check = {**dependency_dict} - cycles = [] - - while dict_to_check: - start_path = list(dict_to_check.keys())[:1] - cycles += _get_cycles(dependency_dict=dict_to_check, path=start_path) - return cycles - - -def _get_cycles(dependency_dict: dict[str, Iterable[str]], path: list[str]) -> list[list[str]]: - try: - next_nodes = dependency_dict.pop(path[-1]) - except KeyError: - return [] - cycles = [] - for next_node in next_nodes: - if next_node in path: - cycles.append(path[path.index(next_node) :] + [next_node]) - else: - next_cycles = _get_cycles(dependency_dict, path + [next_node]) - if next_cycles: - cycles += next_cycles - return cycles diff --git a/python_sdk/infrahub_sdk/transfer/__init__.py b/python_sdk/infrahub_sdk/transfer/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/python_sdk/infrahub_sdk/transfer/constants.py b/python_sdk/infrahub_sdk/transfer/constants.py deleted file mode 100644 index 9565432e5c..0000000000 --- a/python_sdk/infrahub_sdk/transfer/constants.py +++ /dev/null @@ -1 +0,0 @@ -ILLEGAL_NAMESPACES = {"Internal", "Infrahub", "Schema"} diff --git a/python_sdk/infrahub_sdk/transfer/exceptions.py b/python_sdk/infrahub_sdk/transfer/exceptions.py deleted file mode 100644 index 9477b5483a..0000000000 --- a/python_sdk/infrahub_sdk/transfer/exceptions.py +++ /dev/null @@ -1,13 +0,0 @@ -class TransferError(Exception): ... - - -class FileAlreadyExistsError(TransferError): ... - - -class TransferFileNotFoundError(TransferError): ... - - -class InvalidNamespaceError(TransferError): ... - - -class SchemaImportError(TransferError): ... diff --git a/python_sdk/infrahub_sdk/transfer/exporter/__init__.py b/python_sdk/infrahub_sdk/transfer/exporter/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/python_sdk/infrahub_sdk/transfer/exporter/interface.py b/python_sdk/infrahub_sdk/transfer/exporter/interface.py deleted file mode 100644 index 3a46027ce4..0000000000 --- a/python_sdk/infrahub_sdk/transfer/exporter/interface.py +++ /dev/null @@ -1,10 +0,0 @@ -from abc import ABC, abstractmethod -from pathlib import Path -from typing import Optional - - -class ExporterInterface(ABC): - @abstractmethod - async def export( - self, export_directory: Path, namespaces: list[str], branch: str, exclude: Optional[list[str]] = None - ) -> None: ... diff --git a/python_sdk/infrahub_sdk/transfer/exporter/json.py b/python_sdk/infrahub_sdk/transfer/exporter/json.py deleted file mode 100644 index d35cf96138..0000000000 --- a/python_sdk/infrahub_sdk/transfer/exporter/json.py +++ /dev/null @@ -1,166 +0,0 @@ -from contextlib import contextmanager -from pathlib import Path -from typing import TYPE_CHECKING, Any, Generator, Optional - -import ujson -from rich.console import Console -from rich.progress import Progress - -from infrahub_sdk.client import InfrahubClient -from infrahub_sdk.queries import QUERY_RELATIONSHIPS -from infrahub_sdk.schema import MainSchemaTypes, NodeSchema - -from ..constants import ILLEGAL_NAMESPACES -from ..exceptions import FileAlreadyExistsError, InvalidNamespaceError -from .interface import ExporterInterface - -if TYPE_CHECKING: - from infrahub_sdk.node import InfrahubNode - - -class LineDelimitedJSONExporter(ExporterInterface): - def __init__(self, client: InfrahubClient, console: Optional[Console] = None): - self.client = client - self.console = console - - @contextmanager - def wrapped_task_output(self, start: str, end: str = "[green]done") -> Generator: - if self.console: - self.console.print(f"{start}", end="...") - yield - if self.console: - self.console.print(f"{end}") - - def identify_many_to_many_relationships( - self, node_schema_map: dict[str, MainSchemaTypes] - ) -> dict[tuple[str, str], str]: - # Identify many to many relationships by src/dst couples - many_relationship_identifiers: dict[tuple[str, str], str] = {} - - for node_schema in node_schema_map.values(): - for relationship in node_schema.relationships: - if ( - relationship.cardinality != "many" - or not relationship.optional - or not relationship.identifier - or relationship.peer not in node_schema_map - ): - continue - for peer_relationship in node_schema_map[relationship.peer].relationships: - if peer_relationship.cardinality != "many" or peer_relationship.peer != node_schema.kind: - continue - - forward = many_relationship_identifiers.get((node_schema.kind, relationship.peer)) - backward = many_relationship_identifiers.get((relationship.peer, node_schema.kind)) - - # Record the relationship only if it's not known in one way or another - if not forward and not backward: - many_relationship_identifiers[(node_schema.kind, relationship.peer)] = relationship.identifier - - return many_relationship_identifiers - - async def retrieve_many_to_many_relationships( - self, node_schema_map: dict[str, MainSchemaTypes], branch: str - ) -> list[dict[str, Any]]: - has_remaining_items = True - page_number = 1 - page_size = 50 - - many_relationship_identifiers = list(self.identify_many_to_many_relationships(node_schema_map).values()) - many_relationships: list[dict[str, Any]] = [] - - if not many_relationship_identifiers: - return [] - - while has_remaining_items: - offset = (page_number - 1) * page_size - - response = await self.client.execute_graphql( - QUERY_RELATIONSHIPS, - variables={ - "offset": offset, - "limit": page_size, - "relationship_identifiers": many_relationship_identifiers, - }, - branch_name=branch, - tracker=f"query-relationships-page{page_number}", - ) - many_relationships.extend(response["Relationship"]["edges"]) - - remaining_items = response["Relationship"]["count"] - (offset + page_size) - if remaining_items <= 0: - has_remaining_items = False - page_number += 1 - - return many_relationships - - # FIXME: Split in smaller functions - async def export( # pylint: disable=too-many-branches - self, export_directory: Path, namespaces: list[str], branch: str, exclude: Optional[list[str]] = None - ) -> None: - illegal_namespaces = set(ILLEGAL_NAMESPACES) - node_file = export_directory / "nodes.json" - relationship_file = export_directory / "relationships.json" - - for f in (node_file, relationship_file): - if f.exists(): - raise FileAlreadyExistsError(f"{f.resolve()} already exists") - if set(namespaces) & illegal_namespaces: - raise InvalidNamespaceError(f"namespaces cannot include {illegal_namespaces}") - - with self.wrapped_task_output("Retrieving schema to export"): - node_schema_map = await self.client.schema.all(branch=branch, namespaces=namespaces) - node_schema_map = { - kind: schema - for kind, schema in node_schema_map.items() - if isinstance(schema, NodeSchema) - and schema.namespace not in illegal_namespaces - and (not exclude or kind not in exclude) - } - retrieved_namespaces = {node_schema.namespace for node_schema in node_schema_map.values()} - - if namespaces: - invalid_namespaces = [ns for ns in namespaces if ns not in retrieved_namespaces] - if invalid_namespaces: - raise InvalidNamespaceError(f"these namespaces do not exist on branch {branch}: {invalid_namespaces}") - - with self.wrapped_task_output("Retrieving many-to-many relationships"): - many_relationships = await self.retrieve_many_to_many_relationships(node_schema_map, branch) - - schema_batch = await self.client.create_batch() - for node_schema in node_schema_map.values(): - schema_batch.add(node_schema.kind, task=self.client.all, branch=branch) - - all_nodes: list[InfrahubNode] = [] - if self.console: - progress = Progress() - progress.start() - progress_task = progress.add_task("Retrieving nodes...", total=schema_batch.num_tasks) - async for _, schema_nodes in schema_batch.execute(): - all_nodes.extend(schema_nodes) - if self.console: - progress.update(progress_task, advance=1) - if self.console: - progress.stop() - - with self.wrapped_task_output("Writing export"): - json_lines = [ - ujson.dumps( - { - "id": n.id, - "kind": n.get_kind(), - "graphql_json": ujson.dumps(n.get_raw_graphql_data()), - } - ) - for n in all_nodes - ] - file_content = "\n".join(json_lines) - - if not export_directory.exists(): - export_directory.mkdir() - - node_file.write_text(file_content) - relationship_file.write_text(ujson.dumps(many_relationships)) - - if self.console: - self.console.print(f"Export directory - {export_directory}") diff --git a/python_sdk/infrahub_sdk/transfer/importer/__init__.py b/python_sdk/infrahub_sdk/transfer/importer/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/python_sdk/infrahub_sdk/transfer/importer/interface.py b/python_sdk/infrahub_sdk/transfer/importer/interface.py deleted file mode 100644 index 885a2f4bb1..0000000000 --- a/python_sdk/infrahub_sdk/transfer/importer/interface.py +++ /dev/null @@ -1,7 +0,0 @@ -from abc import ABC, abstractmethod -from pathlib import Path - - -class ImporterInterface(ABC): - @abstractmethod - async def import_data(self, import_directory: Path, branch: str) -> None: ... diff --git a/python_sdk/infrahub_sdk/transfer/importer/json.py b/python_sdk/infrahub_sdk/transfer/importer/json.py deleted file mode 100644 index 20b1ac2a50..0000000000 --- a/python_sdk/infrahub_sdk/transfer/importer/json.py +++ /dev/null @@ -1,195 +0,0 @@ -from collections import defaultdict -from contextlib import contextmanager -from pathlib import Path -from typing import TYPE_CHECKING, Any, Generator, Mapping, Optional, Sequence - -import pyarrow.json as pa_json -import ujson -from rich.console import Console -from rich.progress import Progress - -from infrahub_sdk.batch import InfrahubBatch -from infrahub_sdk.client import InfrahubClient -from infrahub_sdk.exceptions import GraphQLError -from infrahub_sdk.node import InfrahubNode, RelatedNode, RelationshipManager -from infrahub_sdk.transfer.schema_sorter import InfrahubSchemaTopologicalSorter - -from ..exceptions import TransferFileNotFoundError -from .interface import ImporterInterface - -if TYPE_CHECKING: - from infrahub_sdk.schema import NodeSchema, RelationshipSchema - - -class LineDelimitedJSONImporter(ImporterInterface): - def __init__( - self, - client: InfrahubClient, - topological_sorter: InfrahubSchemaTopologicalSorter, - continue_on_error: bool = False, - console: Optional[Console] = None, - ): - self.client = client - self.topological_sorter = topological_sorter - self.continue_on_error = continue_on_error - self.console = console - self.all_nodes: dict[str, InfrahubNode] = {} - self.schemas_by_kind: Mapping[str, NodeSchema] = {} - # Map relationship schema by attribute of a node kind e.g. {"MyNodeKind": {"MyRelationship": RelationshipSchema}} - # This is used to resolve which relationships are many to many to prevent them from being re-imported like others as they'll get duplicated - self.optional_relationships_schemas_by_node_kind: dict[str, dict[str, RelationshipSchema]] = defaultdict(dict) - self.optional_relationships_by_node: dict[str, dict[str, Any]] = defaultdict(dict) - - @contextmanager - def wrapped_task_output(self, start: str, end: str = "[green]done") -> Generator: - if self.console: - self.console.print(f"{start}", end="...") - yield - if self.console: - self.console.print(f"{end}") - - async def import_data(self, import_directory: Path, branch: str) -> None: - node_file = import_directory / "nodes.json" - relationship_file = import_directory / "relationships.json" - for f in (node_file, relationship_file): - if not f.exists(): - raise TransferFileNotFoundError(f"{f.resolve()} does not exist") - with self.wrapped_task_output("Reading import directory"): - table = pa_json.read_json(node_file.resolve()) - - with self.wrapped_task_output("Analyzing import"): - import_nodes_by_kind = defaultdict(list) - for graphql_data, kind in zip(table.column("graphql_json"), table.column("kind")): - node = await InfrahubNode.from_graphql(self.client, branch, ujson.loads(str(graphql_data))) - import_nodes_by_kind[str(kind)].append(node) - self.all_nodes[node.id] = node - - schema_batch = await self.client.create_batch() - for kind in import_nodes_by_kind: - schema_batch.add(task=self.client.schema.get, kind=kind, branch=branch) - schemas = await self.execute_batches([schema_batch], "Retrieving schema") - - self.schemas_by_kind = {schema.kind: schema for schema in schemas} - - with self.wrapped_task_output("Ordering schema for import"): - ordered_schema_names = self.topological_sorter.get_sorted_node_schema(schemas) - - with self.wrapped_task_output("Preparing nodes for import"): - await self.remove_and_store_optional_relationships() - - with self.wrapped_task_output("Building import batches"): - save_batch = await self.client.create_batch(return_exceptions=True) - for group in ordered_schema_names: - for kind in group: - schema_import_nodes = import_nodes_by_kind[kind] - if not schema_import_nodes: - continue - for node in schema_import_nodes: - save_batch.add(task=node.create, node=node, allow_upsert=True) - - await self.execute_batches([save_batch], "Creating and/or updating nodes") - - if not self.optional_relationships_by_node: - return - - await self.update_optional_relationships() - await self.update_many_to_many_relationships(file=relationship_file) - - async def remove_and_store_optional_relationships(self) -> None: - for node in self.all_nodes.values(): - node_kind = node.get_kind() - - # Build a relationship name to relationship schema map, so we can retrieve the schema based on the name of a relationship later - for relationship_schema in self.schemas_by_kind[node_kind].relationships: - if relationship_schema.optional: - self.optional_relationships_schemas_by_node_kind[node_kind][relationship_schema.name] = ( - relationship_schema - ) - - for relationship_name in self.optional_relationships_schemas_by_node_kind[node_kind].keys(): - relationship_value = getattr(node, relationship_name) - if isinstance(relationship_value, RelationshipManager): - if relationship_value.peer_ids: - self.optional_relationships_by_node[node.id][relationship_name] = relationship_value - setattr(node, relationship_name, None) - elif isinstance(relationship_value, RelatedNode): - if relationship_value.id: - self.optional_relationships_by_node[node.id][relationship_name] = relationship_value - setattr(node, relationship_name, None) - - async def update_optional_relationships(self) -> None: - update_batch = await self.client.create_batch(return_exceptions=True) - for node in self.all_nodes.values(): - node_kind = node.get_kind() - if node.id not in self.optional_relationships_by_node: - continue - for relationship_attr, relationship_value in self.optional_relationships_by_node[node.id].items(): - ignore = False - relationship_schema = self.optional_relationships_schemas_by_node_kind[node_kind][relationship_attr] - - # Check if we are in a many-many relationship, ignore importing it if it is - if relationship_schema.cardinality == "many": - for peer_relationship in self.schemas_by_kind[relationship_schema.peer].relationships: - if peer_relationship.cardinality == "many" and peer_relationship.peer == node_kind: - ignore = True - - if not ignore: - setattr(node, relationship_attr, relationship_value) - update_batch.add(task=node.update, node=node) - await self.execute_batches([update_batch], "Adding optional relationships to nodes") - - async def update_many_to_many_relationships(self, file: Path) -> None: - relationships = ujson.loads(file.read_text()) - update_batch = await self.client.create_batch(return_exceptions=True) - - for relationship in relationships: - peers = relationship["node"]["peers"] - src_node = self.all_nodes[peers[0]["id"]] - dst_node = self.all_nodes[peers[1]["id"]] - - src_node_relationship = src_node._schema.get_relationship_by_identifier(relationship["node"]["identifier"]) - if src_node_relationship: - update_batch.add( - task=src_node.add_relationships, # type: ignore[arg-type] - node=src_node, - relation_to_update=src_node_relationship.name, - related_nodes=[dst_node.id], - ) - - await self.execute_batches([update_batch], "Adding many-to-many relationships to nodes") - - async def execute_batches( - self, batches: list[InfrahubBatch], progress_bar_message: str = "Executing batches" - ) -> Sequence[Any]: - if self.console: - task_count = sum((batch.num_tasks for batch in batches)) - progress = Progress() - progress.start() - progress_task = progress.add_task(f"{progress_bar_message}...", total=task_count) - exceptions, results = [], [] - for batch in batches: - async for result in batch.execute(): - if self.console: - progress.update(progress_task, advance=1) - if isinstance(result, Exception): - if not self.continue_on_error: - if self.console: - progress.stop() - raise result - if isinstance(result, GraphQLError): - error_name = type(result).__name__ - error_msgs = [err["message"] for err in result.errors] - error_str = f"{error_name}: {error_msgs}" - else: - error_str = str(result) - exceptions.append(error_str) - else: - results.append(result[1]) - if self.console: - progress.stop() - - if self.console and exceptions: - self.console.print(f"[red]{len(exceptions)} failures") - for exception_str in exceptions: - self.console.print(f"[red]{exception_str}") - return results diff --git a/python_sdk/infrahub_sdk/transfer/schema_sorter.py b/python_sdk/infrahub_sdk/transfer/schema_sorter.py deleted file mode 100644 index 2ef87ccae1..0000000000 --- a/python_sdk/infrahub_sdk/transfer/schema_sorter.py +++ /dev/null @@ -1,29 +0,0 @@ -from typing import Optional, Sequence - -from infrahub_sdk.schema import BaseNodeSchema - -from ..topological_sort import DependencyCycleExistsError, topological_sort -from .exceptions import SchemaImportError - - -class InfrahubSchemaTopologicalSorter: - def get_sorted_node_schema( - self, - schemas: Sequence[BaseNodeSchema], - required_relationships_only: bool = True, - include: Optional[list[str]] = None, - ) -> list[set[str]]: - relationship_graph: dict[str, set[str]] = {} - for node_schema in schemas: - if include and node_schema.kind not in include: - continue - relationship_graph[node_schema.kind] = set() - for relationship_schema in node_schema.relationships: - if required_relationships_only and relationship_schema.optional: - continue - relationship_graph[node_schema.kind].add(relationship_schema.peer) - - try: - return topological_sort(relationship_graph) - except DependencyCycleExistsError as exc: - raise SchemaImportError("Cannot import nodes. There are cycles in the dependency graph.") from exc diff --git a/python_sdk/infrahub_sdk/transforms.py b/python_sdk/infrahub_sdk/transforms.py deleted file mode 100644 index f558ad2270..0000000000 --- a/python_sdk/infrahub_sdk/transforms.py +++ /dev/null @@ -1,115 +0,0 @@ -from __future__ import annotations - -import asyncio -import importlib -import os -from abc import abstractmethod -from typing import TYPE_CHECKING, Any, Optional - -from git import Repo - -from infrahub_sdk import InfrahubClient - -from .exceptions import InfrahubTransformNotFoundError - -if TYPE_CHECKING: - from pathlib import Path - - from .schema import InfrahubPythonTransformConfig - -INFRAHUB_TRANSFORM_VARIABLE_TO_IMPORT = "INFRAHUB_TRANSFORMS" - - -class InfrahubTransform: - name: Optional[str] = None - query: str - timeout: int = 10 - - def __init__(self, branch: str = "", root_directory: str = "", server_url: str = ""): - self.git: Repo - - self.branch = branch - - self.server_url = server_url or os.environ.get("INFRAHUB_URL", "http://127.0.0.1:8000") - self.root_directory = root_directory or os.getcwd() - - self.client: InfrahubClient - - if not self.name: - self.name = self.__class__.__name__ - - if not self.query: - raise ValueError("A query must be provided") - - @classmethod - async def init(cls, client: Optional[InfrahubClient] = None, *args: Any, **kwargs: Any) -> InfrahubTransform: - """Async init method, If an existing InfrahubClient client hasn't been provided, one will be created automatically.""" - - item = cls(*args, **kwargs) - - if client: - item.client = client - else: - item.client = InfrahubClient(address=item.server_url) - - return item - - @property - def branch_name(self) -> str: - """Return the name of the current git branch.""" - - if self.branch: - return self.branch - - if not self.git: - self.git = Repo(self.root_directory) - - self.branch = str(self.git.active_branch) - - return self.branch - - @abstractmethod - def transform(self, data: dict) -> Any: - pass - - async def collect_data(self) -> dict: - """Query the result of the GraphQL Query defined in self.query and return the result""" - - return await self.client.query_gql_query(name=self.query, branch_name=self.branch_name) - - async def run(self, data: Optional[dict] = None) -> Any: - """Execute the transformation after collecting the data from the GraphQL query. - The result of the check is determined based on the presence or not of ERROR log messages.""" - - if not data: - data = await self.collect_data() - unpacked = data.get("data") or data - - if asyncio.iscoroutinefunction(self.transform): - return await self.transform(data=unpacked) - - return self.transform(data=unpacked) - - -def get_transform_class_instance( - transform_config: InfrahubPythonTransformConfig, search_path: Optional[Path] = None -) -> InfrahubTransform: - if transform_config.file_path.is_absolute() or search_path is None: - search_location = transform_config.file_path - else: - search_location = search_path / transform_config.file_path - - try: - spec = importlib.util.spec_from_file_location(transform_config.class_name, search_location) - module = importlib.util.module_from_spec(spec) # type: ignore[arg-type] - spec.loader.exec_module(module) # type: ignore[union-attr] - - # Get the specified class from the module - transform_class = getattr(module, transform_config.class_name) - - # Create an instance of the class - transform_instance = transform_class() - except (FileNotFoundError, AttributeError) as exc: - raise InfrahubTransformNotFoundError(name=transform_config.name) from exc - - return transform_instance diff --git a/python_sdk/infrahub_sdk/types.py b/python_sdk/infrahub_sdk/types.py deleted file mode 100644 index 481103f6fb..0000000000 --- a/python_sdk/infrahub_sdk/types.py +++ /dev/null @@ -1,63 +0,0 @@ -import enum -from logging import Logger -from typing import Any, Optional, Protocol, Union, runtime_checkable - -import httpx - - -class HTTPMethod(str, enum.Enum): - GET = "get" - POST = "post" - - -class RequesterTransport(str, enum.Enum): - HTTPX = "httpx" - JSON = "json" - - -@runtime_checkable -class SyncRequester(Protocol): - def __call__( - self, - url: str, - method: HTTPMethod, - headers: dict[str, Any], - timeout: int, - payload: Optional[dict] = None, - ) -> httpx.Response: ... - - -@runtime_checkable -class AsyncRequester(Protocol): - async def __call__( - self, - url: str, - method: HTTPMethod, - headers: dict[str, Any], - timeout: int, - payload: Optional[dict] = None, - ) -> httpx.Response: ... - - -@runtime_checkable -class InfrahubLogger(Protocol): - def debug(self, event: Optional[str] = None, *args: Any, **kw: Any) -> Any: - """Send a debug event""" - - def info(self, event: Optional[str] = None, *args: Any, **kw: Any) -> Any: - """Send an info event""" - - def warning(self, event: Optional[str] = None, *args: Any, **kw: Any) -> Any: - """Send a warning event""" - - def error(self, event: Optional[str] = None, *args: Any, **kw: Any) -> Any: - """Send an error event.""" - - def critical(self, event: Optional[str] = None, *args: Any, **kw: Any) -> Any: - """Send a critical event.""" - - def exception(self, event: Optional[str] = None, *args: Any, **kw: Any) -> Any: - """Send an exception event.""" - - -InfrahubLoggers = Union[InfrahubLogger, Logger] diff --git a/python_sdk/infrahub_sdk/utils.py b/python_sdk/infrahub_sdk/utils.py deleted file mode 100644 index 0ae3e57607..0000000000 --- a/python_sdk/infrahub_sdk/utils.py +++ /dev/null @@ -1,337 +0,0 @@ -from __future__ import annotations - -import hashlib -import json -from itertools import groupby -from pathlib import Path -from typing import TYPE_CHECKING, Any, Optional, Union -from uuid import UUID, uuid4 - -import httpx -import ujson -from git.repo import Repo -from graphql import ( - FieldNode, - InlineFragmentNode, - SelectionSetNode, -) - -from infrahub_sdk.exceptions import JsonDecodeError - -if TYPE_CHECKING: - from graphql import GraphQLResolveInfo - - -def base36encode(number: int) -> str: - if not isinstance(number, (int)): - raise TypeError("number must be an integer") - is_negative = number < 0 - number = abs(number) - - alphabet = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ" - base36 = "" - - while number: - number, i = divmod(number, 36) - base36 = alphabet[i] + base36 - if is_negative: - base36 = "-" + base36 - - return base36 or alphabet[0] - - -def base36decode(data: str) -> int: - return int(data, 36) - - -def base16decode(data: str) -> int: - return int(data, 16) - - -def base16encode(number: int) -> str: - if not isinstance(number, (int)): - raise TypeError("number must be an integer") - is_negative = number < 0 - number = abs(number) - - alphabet = "0123456789abcdef" - base16 = "" - - while number: - number, i = divmod(number, 16) - base16 = alphabet[i] + base16 - if is_negative: - base16 = "-" + base16 - - return base16 or alphabet[0] - - -def get_fixtures_dir() -> Path: - """Get the directory which stores fixtures that are common to multiple unit/integration tests.""" - here = Path(__file__).resolve().parent - return here.parent / "tests" / "fixtures" - - -def is_valid_uuid(value: Any) -> bool: - """Check if the input is a valid UUID.""" - try: - UUID(str(value)) - return True - except ValueError: - return False - - -def decode_json(response: httpx.Response) -> dict: - try: - return response.json() - except json.decoder.JSONDecodeError as exc: - raise JsonDecodeError(content=response.text, url=response.url) from exc - - -def generate_uuid() -> str: - return str(uuid4()) - - -def duplicates(input_list: list) -> list: - """Identify and return all the duplicates in a list.""" - - dups = [] - - clean_input_list = [item for item in input_list or [] if item is not None] - for x, y in groupby(sorted(clean_input_list)): - # list(y) returns all the occurences of item x - if len(list(y)) > 1: - dups.append(x) - - return dups - - -def intersection(list1: list[Any], list2: list[Any]) -> list: - """Calculate the intersection between 2 lists.""" - return list(set(list1) & set(list2)) - - -def compare_lists(list1: list[Any], list2: list[Any]) -> tuple[list[Any], list[Any], list[Any]]: - """Compare 2 lists and return : - - the intersection of both - - the item present only in list1 - - the item present only in list2 - """ - - in_both = intersection(list1=list1, list2=list2) - in_list_1 = list(set(list1) - set(in_both)) - in_list_2 = list(set(list2) - set(in_both)) - - return sorted(in_both), sorted(in_list_1), sorted(in_list_2) - - -def deep_merge_dict(dicta: dict, dictb: dict, path: Optional[list] = None) -> dict: - """Deep Merge Dictionary B into Dictionary A. - Code is inspired by https://stackoverflow.com/a/7205107 - """ - if path is None: - path = [] - for key in dictb: - if key in dicta: - if isinstance(dicta[key], dict) and isinstance(dictb[key], dict): - deep_merge_dict(dicta[key], dictb[key], path + [str(key)]) - elif dicta[key] == dictb[key]: - pass - else: - raise ValueError("Conflict at %s" % ".".join(path + [str(key)])) - else: - dicta[key] = dictb[key] - return dicta - - -def str_to_bool(value: str) -> bool: - """Convert a String to a Boolean""" - - if isinstance(value, bool): - return value - - if isinstance(value, int) and value in [0, 1]: - return bool(value) - - if not isinstance(value, str): - raise TypeError(f"{value} must be a string") - - MAP = { - "y": True, - "yes": True, - "t": True, - "true": True, - "on": True, - "1": True, - "n": False, - "no": False, - "f": False, - "false": False, - "off": False, - "0": False, - } - try: - return MAP[value.lower()] - except KeyError as exc: - raise ValueError(f"{value} can not be converted into a boolean") from exc - - -def get_flat_value(obj: Any, key: str, separator: str = "__") -> Any: - """Query recursively an value defined in a flat notation (string), on a hierarchy of objects - - Examples: - name__value - module.object.value - """ - if separator not in key: - return getattr(obj, key) - - first_part, remaining_part = key.split(separator, maxsplit=1) - sub_obj = getattr(obj, first_part) - if not sub_obj: - return None - return get_flat_value(obj=sub_obj, key=remaining_part, separator=separator) - - -def generate_request_filename(request: httpx.Request) -> str: - """Return a filename for a request sent to the Infrahub API - - This function is used when recording and playing back requests, as Infrahub is using a GraphQL - API it's not possible to rely on the URL endpoint alone to separate one request from another, - for this reason a hash of the payload is included in a filename. - """ - formatted = ( - str(request.url).replace(":", "_").replace("//", "").replace("/", "__").replace("?", "_q_").replace("&", "_a_") - ) - filename = f"{request.method}_{formatted}" - if request.content: - content_hash = hashlib.sha224(request.content) - filename += f"_{content_hash.hexdigest()}" - - return filename.lower() - - -def is_valid_url(url: str) -> bool: - if not isinstance(url, str): - return False - if "://" not in url and not url.startswith("/"): - return False - if "://" not in url: - url = "http://localhost" + url - - try: - parsed = httpx.URL(url) - return all([parsed.scheme, parsed.netloc]) - except TypeError: - return False - - -def find_files(extension: Union[str, list[str]], directory: Union[str, Path] = ".") -> list[Path]: - files: list[Path] = [] - - if isinstance(extension, str): - extension = [extension] - if isinstance(directory, str): - directory = Path(directory) - - for ext in extension: - files.extend(list(directory.glob(f"**/*.{ext}"))) - files.extend(list(directory.glob(f"**/.*.{ext}"))) - - return files - - -def get_branch(branch: Optional[str] = None, directory: Union[str, Path] = ".") -> str: - """If branch isn't provide, return the name of the local Git branch.""" - if branch: - return branch - - repo = Repo(directory) - return str(repo.active_branch) - - -def dict_hash(dictionary: dict[str, Any]) -> str: - """MD5 hash of a dictionary.""" - # We need to sort arguments so {'a': 1, 'b': 2} is - # the same as {'b': 2, 'a': 1} - encoded = ujson.dumps(dictionary, sort_keys=True).encode() - dhash = hashlib.md5(encoded, usedforsecurity=False) - return dhash.hexdigest() - - -def calculate_dict_depth(data: dict, level: int = 1) -> int: - """Calculate the depth of a nested Dictionary recursively.""" - if not isinstance(data, dict) or not data: - return level - return max(calculate_dict_depth(data=data[key], level=level + 1) for key in data) - - -def calculate_dict_height(data: dict, cnt: int = 0) -> int: - """Calculate the number of fields (height) in a nested Dictionary recursively.""" - for key in data: - if isinstance(data[key], dict): - cnt = calculate_dict_height(data=data[key], cnt=cnt + 1) - else: - cnt += 1 - return cnt - - -async def extract_fields(selection_set: Optional[SelectionSetNode]) -> Optional[dict[str, dict]]: - """This function extract all the requested fields in a tree of Dict from a SelectionSetNode - - The goal of this function is to limit the fields that we need to query from the backend. - - Currently the function support Fields and InlineFragments but in a combined tree where the fragments are merged together - This implementation may seam counter intuitive but in the current implementation - it's better to have slightly more information at time passed to the query manager. - - In the future we'll probably need to redesign how we read GraphQL queries to generate better Database query. - """ - - if not selection_set: - return None - - fields = {} - for node in selection_set.selections: - sub_selection_set = getattr(node, "selection_set", None) - if isinstance(node, FieldNode): - value = await extract_fields(sub_selection_set) - if node.name.value not in fields: - fields[node.name.value] = value - elif isinstance(fields[node.name.value], dict) and isinstance(value, dict): - fields[node.name.value].update(value) - - elif isinstance(node, InlineFragmentNode): - for sub_node in node.selection_set.selections: - sub_sub_selection_set = getattr(sub_node, "selection_set", None) - value = await extract_fields(sub_sub_selection_set) - if sub_node.name.value not in fields: - fields[sub_node.name.value] = await extract_fields(sub_sub_selection_set) - elif isinstance(fields[sub_node.name.value], dict) and isinstance(value, dict): - fields[sub_node.name.value].update(value) - - return fields - - -async def extract_fields_first_node(info: GraphQLResolveInfo) -> dict[str, dict]: - fields = None - if info.field_nodes: - fields = await extract_fields(info.field_nodes[0].selection_set) - - return fields or {} - - -def write_to_file(path: Path, value: Any) -> bool: - """Write a given value into a file and return if the operation was successful. - - If the file does not exist, the function will attempt to create it.""" - if not path.exists(): - path.touch() - - if path.is_dir(): - raise FileExistsError(f"{path} is a directory") - - to_write = str(value) - written = path.write_text(to_write) - - return written is not None diff --git a/python_sdk/infrahub_sdk/uuidt.py b/python_sdk/infrahub_sdk/uuidt.py deleted file mode 100644 index cd3d553d0a..0000000000 --- a/python_sdk/infrahub_sdk/uuidt.py +++ /dev/null @@ -1,65 +0,0 @@ -from __future__ import annotations - -import random -import socket -import time -from pathlib import Path -from typing import Optional -from uuid import UUID - -from infrahub_sdk.utils import base16encode - -BASE = 16 -DIVISOR = BASE - 1 -CHARACTERS = list("0123456789abcdefghijklmnopqrstuvwxyz")[:BASE] -HOSTNAME = socket.gethostname() -DEFAULT_NAMESPACE = str(Path(__file__).parent.resolve()) - -# Code inspired from https://github.com/isaacharrisholt/uuidt - - -def generate_uuid() -> str: - return str(UUIDT()) - - -def encode_number(number: int, min_length: int) -> str: - """Encode a number into a base16 string and ensure the result has a minimum size. - If the initial response produced doesn't match the min requirement, - random number will be used to fill the gap - """ - response = base16encode(number=number).lower() - if len(response) >= min_length: - return response - return response + "".join(random.choices(CHARACTERS, k=min_length - len(response))) - - -class UUIDT: - def __init__( - self, - namespace: Optional[str] = None, - timestamp: Optional[int] = None, - hostname: Optional[str] = None, - random_chars: Optional[str] = None, - ): - self.namespace = namespace or DEFAULT_NAMESPACE - self.timestamp = timestamp or time.time_ns() - self.hostname = hostname or HOSTNAME - self.random_chars = random_chars or "".join(random.choices(CHARACTERS, k=8)) - - def __str__(self) -> str: - hostname_enc = sum(self.hostname.encode("utf-8")) - namespace_enc = sum(self.namespace.encode("utf-8")) - - timestamp_str = encode_number(number=self.timestamp, min_length=16) - hostname_str = encode_number(number=hostname_enc, min_length=4) - namespace_str = encode_number(number=namespace_enc, min_length=4) - - return f"{timestamp_str[:8]}-{timestamp_str[8:12]}-{timestamp_str[-4:]}-{hostname_str[:4]}-{namespace_str[:4]}{self.random_chars[:8]}" - - def short(self) -> str: - """Return the last 8 digit of the UUID (the most random part)""" - return str(self)[-8:] - - @classmethod - def new(cls, namespace: Optional[str] = None) -> UUID: - return UUID(str(cls(namespace=namespace))) diff --git a/python_sdk/infrahub_sdk/yaml.py b/python_sdk/infrahub_sdk/yaml.py deleted file mode 100644 index aa6da1a1d1..0000000000 --- a/python_sdk/infrahub_sdk/yaml.py +++ /dev/null @@ -1,25 +0,0 @@ -from pathlib import Path -from typing import Optional - -import yaml -from pydantic import BaseModel - - -class SchemaFile(BaseModel): - identifier: Optional[str] = None - location: Path - content: Optional[dict] = None - valid: bool = True - error_message: Optional[str] = None - - def load_content(self) -> None: - try: - self.content = yaml.safe_load(self.location.read_text()) - except yaml.YAMLError: - self.error_message = "Invalid YAML/JSON file" - self.valid = False - return - - if not self.content: - self.error_message = "Empty YAML/JSON file" - self.valid = False diff --git a/python_sdk/poetry.lock b/python_sdk/poetry.lock deleted file mode 100644 index 31bfcfa40c..0000000000 --- a/python_sdk/poetry.lock +++ /dev/null @@ -1,1970 +0,0 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. - -[[package]] -name = "annotated-types" -version = "0.6.0" -description = "Reusable constraint types to use with typing.Annotated" -optional = false -python-versions = ">=3.8" -files = [ - {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, - {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, -] - -[[package]] -name = "anyio" -version = "4.3.0" -description = "High level compatibility layer for multiple asynchronous event loop implementations" -optional = false -python-versions = ">=3.8" -files = [ - {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, - {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, -] - -[package.dependencies] -exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} -idna = ">=2.8" -sniffio = ">=1.1" -typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} - -[package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (>=0.23)"] - -[[package]] -name = "appnope" -version = "0.1.4" -description = "Disable App Nap on macOS >= 10.9" -optional = false -python-versions = ">=3.6" -files = [ - {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, - {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, -] - -[[package]] -name = "astroid" -version = "3.1.0" -description = "An abstract syntax tree for Python with inference support." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "astroid-3.1.0-py3-none-any.whl", hash = "sha256:951798f922990137ac090c53af473db7ab4e70c770e6d7fae0cec59f74411819"}, - {file = "astroid-3.1.0.tar.gz", hash = "sha256:ac248253bfa4bd924a0de213707e7ebeeb3138abeb48d798784ead1e56d419d4"}, -] - -[package.dependencies] -typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} - -[[package]] -name = "asttokens" -version = "2.4.1" -description = "Annotate AST trees with source code positions" -optional = false -python-versions = "*" -files = [ - {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, - {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, -] - -[package.dependencies] -six = ">=1.12.0" - -[package.extras] -astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] -test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] - -[[package]] -name = "backcall" -version = "0.2.0" -description = "Specifications for callback functions passed in to an API" -optional = false -python-versions = "*" -files = [ - {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, - {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, -] - -[[package]] -name = "certifi" -version = "2024.7.4" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, - {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, -] - -[[package]] -name = "cfgv" -version = "3.4.0" -description = "Validate configuration and produce human readable error messages." -optional = false -python-versions = ">=3.8" -files = [ - {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, - {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, -] - -[[package]] -name = "charset-normalizer" -version = "3.3.2" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, -] - -[[package]] -name = "click" -version = "8.1.7" -description = "Composable command line interface toolkit" -optional = true -python-versions = ">=3.7" -files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "coverage" -version = "7.4.4" -description = "Code coverage measurement for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "coverage-7.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0be5efd5127542ef31f165de269f77560d6cdef525fffa446de6f7e9186cfb2"}, - {file = "coverage-7.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ccd341521be3d1b3daeb41960ae94a5e87abe2f46f17224ba5d6f2b8398016cf"}, - {file = "coverage-7.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fa497a8ab37784fbb20ab699c246053ac294d13fc7eb40ec007a5043ec91f8"}, - {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1a93009cb80730c9bca5d6d4665494b725b6e8e157c1cb7f2db5b4b122ea562"}, - {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:690db6517f09336559dc0b5f55342df62370a48f5469fabf502db2c6d1cffcd2"}, - {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:09c3255458533cb76ef55da8cc49ffab9e33f083739c8bd4f58e79fecfe288f7"}, - {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ce1415194b4a6bd0cdcc3a1dfbf58b63f910dcb7330fe15bdff542c56949f87"}, - {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b91cbc4b195444e7e258ba27ac33769c41b94967919f10037e6355e998af255c"}, - {file = "coverage-7.4.4-cp310-cp310-win32.whl", hash = "sha256:598825b51b81c808cb6f078dcb972f96af96b078faa47af7dfcdf282835baa8d"}, - {file = "coverage-7.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:09ef9199ed6653989ebbcaacc9b62b514bb63ea2f90256e71fea3ed74bd8ff6f"}, - {file = "coverage-7.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f9f50e7ef2a71e2fae92774c99170eb8304e3fdf9c8c3c7ae9bab3e7229c5cf"}, - {file = "coverage-7.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:623512f8ba53c422fcfb2ce68362c97945095b864cda94a92edbaf5994201083"}, - {file = "coverage-7.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0513b9508b93da4e1716744ef6ebc507aff016ba115ffe8ecff744d1322a7b63"}, - {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40209e141059b9370a2657c9b15607815359ab3ef9918f0196b6fccce8d3230f"}, - {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a2b2b78c78293782fd3767d53e6474582f62443d0504b1554370bde86cc8227"}, - {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:73bfb9c09951125d06ee473bed216e2c3742f530fc5acc1383883125de76d9cd"}, - {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f384c3cc76aeedce208643697fb3e8437604b512255de6d18dae3f27655a384"}, - {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54eb8d1bf7cacfbf2a3186019bcf01d11c666bd495ed18717162f7eb1e9dd00b"}, - {file = "coverage-7.4.4-cp311-cp311-win32.whl", hash = "sha256:cac99918c7bba15302a2d81f0312c08054a3359eaa1929c7e4b26ebe41e9b286"}, - {file = "coverage-7.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:b14706df8b2de49869ae03a5ccbc211f4041750cd4a66f698df89d44f4bd30ec"}, - {file = "coverage-7.4.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:201bef2eea65e0e9c56343115ba3814e896afe6d36ffd37bab783261db430f76"}, - {file = "coverage-7.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:41c9c5f3de16b903b610d09650e5e27adbfa7f500302718c9ffd1c12cf9d6818"}, - {file = "coverage-7.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d898fe162d26929b5960e4e138651f7427048e72c853607f2b200909794ed978"}, - {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ea79bb50e805cd6ac058dfa3b5c8f6c040cb87fe83de10845857f5535d1db70"}, - {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce4b94265ca988c3f8e479e741693d143026632672e3ff924f25fab50518dd51"}, - {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00838a35b882694afda09f85e469c96367daa3f3f2b097d846a7216993d37f4c"}, - {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fdfafb32984684eb03c2d83e1e51f64f0906b11e64482df3c5db936ce3839d48"}, - {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:69eb372f7e2ece89f14751fbcbe470295d73ed41ecd37ca36ed2eb47512a6ab9"}, - {file = "coverage-7.4.4-cp312-cp312-win32.whl", hash = "sha256:137eb07173141545e07403cca94ab625cc1cc6bc4c1e97b6e3846270e7e1fea0"}, - {file = "coverage-7.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:d71eec7d83298f1af3326ce0ff1d0ea83c7cb98f72b577097f9083b20bdaf05e"}, - {file = "coverage-7.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d5ae728ff3b5401cc320d792866987e7e7e880e6ebd24433b70a33b643bb0384"}, - {file = "coverage-7.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc4f1358cb0c78edef3ed237ef2c86056206bb8d9140e73b6b89fbcfcbdd40e1"}, - {file = "coverage-7.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8130a2aa2acb8788e0b56938786c33c7c98562697bf9f4c7d6e8e5e3a0501e4a"}, - {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf271892d13e43bc2b51e6908ec9a6a5094a4df1d8af0bfc360088ee6c684409"}, - {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4cdc86d54b5da0df6d3d3a2f0b710949286094c3a6700c21e9015932b81447e"}, - {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae71e7ddb7a413dd60052e90528f2f65270aad4b509563af6d03d53e979feafd"}, - {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:38dd60d7bf242c4ed5b38e094baf6401faa114fc09e9e6632374388a404f98e7"}, - {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa5b1c1bfc28384f1f53b69a023d789f72b2e0ab1b3787aae16992a7ca21056c"}, - {file = "coverage-7.4.4-cp38-cp38-win32.whl", hash = "sha256:dfa8fe35a0bb90382837b238fff375de15f0dcdb9ae68ff85f7a63649c98527e"}, - {file = "coverage-7.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:b2991665420a803495e0b90a79233c1433d6ed77ef282e8e152a324bbbc5e0c8"}, - {file = "coverage-7.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b799445b9f7ee8bf299cfaed6f5b226c0037b74886a4e11515e569b36fe310d"}, - {file = "coverage-7.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b4d33f418f46362995f1e9d4f3a35a1b6322cb959c31d88ae56b0298e1c22357"}, - {file = "coverage-7.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aadacf9a2f407a4688d700e4ebab33a7e2e408f2ca04dbf4aef17585389eff3e"}, - {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c95949560050d04d46b919301826525597f07b33beba6187d04fa64d47ac82e"}, - {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff7687ca3d7028d8a5f0ebae95a6e4827c5616b31a4ee1192bdfde697db110d4"}, - {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5fc1de20b2d4a061b3df27ab9b7c7111e9a710f10dc2b84d33a4ab25065994ec"}, - {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c74880fc64d4958159fbd537a091d2a585448a8f8508bf248d72112723974cbd"}, - {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:742a76a12aa45b44d236815d282b03cfb1de3b4323f3e4ec933acfae08e54ade"}, - {file = "coverage-7.4.4-cp39-cp39-win32.whl", hash = "sha256:d89d7b2974cae412400e88f35d86af72208e1ede1a541954af5d944a8ba46c57"}, - {file = "coverage-7.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:9ca28a302acb19b6af89e90f33ee3e1906961f94b54ea37de6737b7ca9d8827c"}, - {file = "coverage-7.4.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:b2c5edc4ac10a7ef6605a966c58929ec6c1bd0917fb8c15cb3363f65aa40e677"}, - {file = "coverage-7.4.4.tar.gz", hash = "sha256:c901df83d097649e257e803be22592aedfd5182f07b3cc87d640bbb9afd50f49"}, -] - -[package.dependencies] -tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} - -[package.extras] -toml = ["tomli"] - -[[package]] -name = "decorator" -version = "5.1.1" -description = "Decorators for Humans" -optional = false -python-versions = ">=3.5" -files = [ - {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, - {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, -] - -[[package]] -name = "dill" -version = "0.3.8" -description = "serialize all of Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"}, - {file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"}, -] - -[package.extras] -graph = ["objgraph (>=1.7.2)"] -profile = ["gprof2dot (>=2022.7.29)"] - -[[package]] -name = "distlib" -version = "0.3.8" -description = "Distribution utilities" -optional = false -python-versions = "*" -files = [ - {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, - {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, -] - -[[package]] -name = "exceptiongroup" -version = "1.2.0" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, - {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, -] - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "execnet" -version = "2.0.2" -description = "execnet: rapid multi-Python deployment" -optional = false -python-versions = ">=3.7" -files = [ - {file = "execnet-2.0.2-py3-none-any.whl", hash = "sha256:88256416ae766bc9e8895c76a87928c0012183da3cc4fc18016e6f050e025f41"}, - {file = "execnet-2.0.2.tar.gz", hash = "sha256:cc59bc4423742fd71ad227122eb0dd44db51efb3dc4095b45ac9a08c770096af"}, -] - -[package.extras] -testing = ["hatch", "pre-commit", "pytest", "tox"] - -[[package]] -name = "executing" -version = "2.0.1" -description = "Get the currently executing AST node of a frame, and other information" -optional = false -python-versions = ">=3.5" -files = [ - {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, - {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, -] - -[package.extras] -tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] - -[[package]] -name = "filelock" -version = "3.13.1" -description = "A platform independent file lock." -optional = false -python-versions = ">=3.8" -files = [ - {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, - {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, -] - -[package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] -typing = ["typing-extensions (>=4.8)"] - -[[package]] -name = "gitdb" -version = "4.0.11" -description = "Git Object Database" -optional = false -python-versions = ">=3.7" -files = [ - {file = "gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4"}, - {file = "gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b"}, -] - -[package.dependencies] -smmap = ">=3.0.1,<6" - -[[package]] -name = "gitpython" -version = "3.1.43" -description = "GitPython is a Python library used to interact with Git repositories" -optional = false -python-versions = ">=3.7" -files = [ - {file = "GitPython-3.1.43-py3-none-any.whl", hash = "sha256:eec7ec56b92aad751f9912a73404bc02ba212a23adb2c7098ee668417051a1ff"}, - {file = "GitPython-3.1.43.tar.gz", hash = "sha256:35f314a9f878467f5453cc1fee295c3e18e52f1b99f10f6cf5b1682e968a9e7c"}, -] - -[package.dependencies] -gitdb = ">=4.0.1,<5" - -[package.extras] -doc = ["sphinx (==4.3.2)", "sphinx-autodoc-typehints", "sphinx-rtd-theme", "sphinxcontrib-applehelp (>=1.0.2,<=1.0.4)", "sphinxcontrib-devhelp (==1.0.2)", "sphinxcontrib-htmlhelp (>=2.0.0,<=2.0.1)", "sphinxcontrib-qthelp (==1.0.3)", "sphinxcontrib-serializinghtml (==1.1.5)"] -test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions"] - -[[package]] -name = "graphql-core" -version = "3.2.3" -description = "GraphQL implementation for Python, a port of GraphQL.js, the JavaScript reference implementation for GraphQL." -optional = false -python-versions = ">=3.6,<4" -files = [ - {file = "graphql-core-3.2.3.tar.gz", hash = "sha256:06d2aad0ac723e35b1cb47885d3e5c45e956a53bc1b209a9fc5369007fe46676"}, - {file = "graphql_core-3.2.3-py3-none-any.whl", hash = "sha256:5766780452bd5ec8ba133f8bf287dc92713e3868ddd83aee4faab9fc3e303dc3"}, -] - -[[package]] -name = "h11" -version = "0.14.0" -description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -optional = false -python-versions = ">=3.7" -files = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, -] - -[[package]] -name = "httpcore" -version = "1.0.4" -description = "A minimal low-level HTTP client." -optional = false -python-versions = ">=3.8" -files = [ - {file = "httpcore-1.0.4-py3-none-any.whl", hash = "sha256:ac418c1db41bade2ad53ae2f3834a3a0f5ae76b56cf5aa497d2d033384fc7d73"}, - {file = "httpcore-1.0.4.tar.gz", hash = "sha256:cb2839ccfcba0d2d3c1131d3c3e26dfc327326fbe7a5dc0dbfe9f6c9151bb022"}, -] - -[package.dependencies] -certifi = "*" -h11 = ">=0.13,<0.15" - -[package.extras] -asyncio = ["anyio (>=4.0,<5.0)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.25.0)"] - -[[package]] -name = "httpx" -version = "0.27.0" -description = "The next generation HTTP client." -optional = false -python-versions = ">=3.8" -files = [ - {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, - {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, -] - -[package.dependencies] -anyio = "*" -certifi = "*" -httpcore = "==1.*" -idna = "*" -sniffio = "*" - -[package.extras] -brotli = ["brotli", "brotlicffi"] -cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] - -[[package]] -name = "identify" -version = "2.5.35" -description = "File identification library for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "identify-2.5.35-py2.py3-none-any.whl", hash = "sha256:c4de0081837b211594f8e877a6b4fad7ca32bbfc1a9307fdd61c28bfe923f13e"}, - {file = "identify-2.5.35.tar.gz", hash = "sha256:10a7ca245cfcd756a554a7288159f72ff105ad233c7c4b9c6f0f4d108f5f6791"}, -] - -[package.extras] -license = ["ukkonen"] - -[[package]] -name = "idna" -version = "3.7" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.5" -files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, -] - -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - -[[package]] -name = "ipython" -version = "8.12.3" -description = "IPython: Productive Interactive Computing" -optional = false -python-versions = ">=3.8" -files = [ - {file = "ipython-8.12.3-py3-none-any.whl", hash = "sha256:b0340d46a933d27c657b211a329d0be23793c36595acf9e6ef4164bc01a1804c"}, - {file = "ipython-8.12.3.tar.gz", hash = "sha256:3910c4b54543c2ad73d06579aa771041b7d5707b033bd488669b4cf544e3b363"}, -] - -[package.dependencies] -appnope = {version = "*", markers = "sys_platform == \"darwin\""} -backcall = "*" -colorama = {version = "*", markers = "sys_platform == \"win32\""} -decorator = "*" -jedi = ">=0.16" -matplotlib-inline = "*" -pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} -pickleshare = "*" -prompt-toolkit = ">=3.0.30,<3.0.37 || >3.0.37,<3.1.0" -pygments = ">=2.4.0" -stack-data = "*" -traitlets = ">=5" -typing-extensions = {version = "*", markers = "python_version < \"3.10\""} - -[package.extras] -all = ["black", "curio", "docrepr", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.21)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] -black = ["black"] -doc = ["docrepr", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] -kernel = ["ipykernel"] -nbconvert = ["nbconvert"] -nbformat = ["nbformat"] -notebook = ["ipywidgets", "notebook"] -parallel = ["ipyparallel"] -qtconsole = ["qtconsole"] -test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] -test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] - -[[package]] -name = "isort" -version = "5.13.2" -description = "A Python utility / library to sort Python imports." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, - {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, -] - -[package.extras] -colors = ["colorama (>=0.4.6)"] - -[[package]] -name = "jedi" -version = "0.19.1" -description = "An autocompletion tool for Python that can be used for text editors." -optional = false -python-versions = ">=3.6" -files = [ - {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, - {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, -] - -[package.dependencies] -parso = ">=0.8.3,<0.9.0" - -[package.extras] -docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] -qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] -testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] - -[[package]] -name = "jinja2" -version = "3.1.4" -description = "A very fast and expressive template engine." -optional = true -python-versions = ">=3.7" -files = [ - {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, - {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, -] - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - -[[package]] -name = "markdown-it-py" -version = "3.0.0" -description = "Python port of markdown-it. Markdown parsing, done right!" -optional = false -python-versions = ">=3.8" -files = [ - {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, - {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, -] - -[package.dependencies] -mdurl = ">=0.1,<1.0" - -[package.extras] -benchmarking = ["psutil", "pytest", "pytest-benchmark"] -code-style = ["pre-commit (>=3.0,<4.0)"] -compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] -linkify = ["linkify-it-py (>=1,<3)"] -plugins = ["mdit-py-plugins"] -profiling = ["gprof2dot"] -rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] -testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] - -[[package]] -name = "markupsafe" -version = "2.1.5" -description = "Safely add untrusted strings to HTML/XML markup." -optional = true -python-versions = ">=3.7" -files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, -] - -[[package]] -name = "matplotlib-inline" -version = "0.1.6" -description = "Inline Matplotlib backend for Jupyter" -optional = false -python-versions = ">=3.5" -files = [ - {file = "matplotlib-inline-0.1.6.tar.gz", hash = "sha256:f887e5f10ba98e8d2b150ddcf4702c1e5f8b3a20005eb0f74bfdbd360ee6f304"}, - {file = "matplotlib_inline-0.1.6-py3-none-any.whl", hash = "sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311"}, -] - -[package.dependencies] -traitlets = "*" - -[[package]] -name = "mccabe" -version = "0.7.0" -description = "McCabe checker, plugin for flake8" -optional = false -python-versions = ">=3.6" -files = [ - {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, - {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, -] - -[[package]] -name = "mdurl" -version = "0.1.2" -description = "Markdown URL utilities" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, - {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, -] - -[[package]] -name = "mypy" -version = "1.9.0" -description = "Optional static typing for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"}, - {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"}, - {file = "mypy-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150"}, - {file = "mypy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374"}, - {file = "mypy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03"}, - {file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"}, - {file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"}, - {file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"}, - {file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"}, - {file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"}, - {file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"}, - {file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"}, - {file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"}, - {file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"}, - {file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"}, - {file = "mypy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b"}, - {file = "mypy-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2"}, - {file = "mypy-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e"}, - {file = "mypy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04"}, - {file = "mypy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89"}, - {file = "mypy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02"}, - {file = "mypy-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4"}, - {file = "mypy-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d"}, - {file = "mypy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf"}, - {file = "mypy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9"}, - {file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"}, - {file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"}, -] - -[package.dependencies] -mypy-extensions = ">=1.0.0" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=4.1.0" - -[package.extras] -dmypy = ["psutil (>=4.0)"] -install-types = ["pip"] -mypyc = ["setuptools (>=50)"] -reports = ["lxml"] - -[[package]] -name = "mypy-extensions" -version = "1.0.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, -] - -[[package]] -name = "nodeenv" -version = "1.8.0" -description = "Node.js virtual environment builder" -optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" -files = [ - {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, - {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, -] - -[package.dependencies] -setuptools = "*" - -[[package]] -name = "numpy" -version = "1.26.4" -description = "Fundamental package for array computing in Python" -optional = true -python-versions = ">=3.9" -files = [ - {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, - {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, - {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, - {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, - {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, - {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, - {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, - {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, - {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, - {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, - {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, -] - -[[package]] -name = "packaging" -version = "24.0" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.7" -files = [ - {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, - {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, -] - -[[package]] -name = "parso" -version = "0.8.3" -description = "A Python Parser" -optional = false -python-versions = ">=3.6" -files = [ - {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, - {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, -] - -[package.extras] -qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] -testing = ["docopt", "pytest (<6.0.0)"] - -[[package]] -name = "pathspec" -version = "0.12.1" -description = "Utility library for gitignore style pattern matching of file paths." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, - {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, -] - -[[package]] -name = "pendulum" -version = "3.0.0" -description = "Python datetimes made easy" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pendulum-3.0.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2cf9e53ef11668e07f73190c805dbdf07a1939c3298b78d5a9203a86775d1bfd"}, - {file = "pendulum-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fb551b9b5e6059377889d2d878d940fd0bbb80ae4810543db18e6f77b02c5ef6"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c58227ac260d5b01fc1025176d7b31858c9f62595737f350d22124a9a3ad82d"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60fb6f415fea93a11c52578eaa10594568a6716602be8430b167eb0d730f3332"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b69f6b4dbcb86f2c2fe696ba991e67347bcf87fe601362a1aba6431454b46bde"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:138afa9c373ee450ede206db5a5e9004fd3011b3c6bbe1e57015395cd076a09f"}, - {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:83d9031f39c6da9677164241fd0d37fbfc9dc8ade7043b5d6d62f56e81af8ad2"}, - {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0c2308af4033fa534f089595bcd40a95a39988ce4059ccd3dc6acb9ef14ca44a"}, - {file = "pendulum-3.0.0-cp310-none-win_amd64.whl", hash = "sha256:9a59637cdb8462bdf2dbcb9d389518c0263799189d773ad5c11db6b13064fa79"}, - {file = "pendulum-3.0.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3725245c0352c95d6ca297193192020d1b0c0f83d5ee6bb09964edc2b5a2d508"}, - {file = "pendulum-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6c035f03a3e565ed132927e2c1b691de0dbf4eb53b02a5a3c5a97e1a64e17bec"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:597e66e63cbd68dd6d58ac46cb7a92363d2088d37ccde2dae4332ef23e95cd00"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99a0f8172e19f3f0c0e4ace0ad1595134d5243cf75985dc2233e8f9e8de263ca"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:77d8839e20f54706aed425bec82a83b4aec74db07f26acd039905d1237a5e1d4"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afde30e8146292b059020fbc8b6f8fd4a60ae7c5e6f0afef937bbb24880bdf01"}, - {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:660434a6fcf6303c4efd36713ca9212c753140107ee169a3fc6c49c4711c2a05"}, - {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dee9e5a48c6999dc1106eb7eea3e3a50e98a50651b72c08a87ee2154e544b33e"}, - {file = "pendulum-3.0.0-cp311-none-win_amd64.whl", hash = "sha256:d4cdecde90aec2d67cebe4042fd2a87a4441cc02152ed7ed8fb3ebb110b94ec4"}, - {file = "pendulum-3.0.0-cp311-none-win_arm64.whl", hash = "sha256:773c3bc4ddda2dda9f1b9d51fe06762f9200f3293d75c4660c19b2614b991d83"}, - {file = "pendulum-3.0.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:409e64e41418c49f973d43a28afe5df1df4f1dd87c41c7c90f1a63f61ae0f1f7"}, - {file = "pendulum-3.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a38ad2121c5ec7c4c190c7334e789c3b4624798859156b138fcc4d92295835dc"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fde4d0b2024b9785f66b7f30ed59281bd60d63d9213cda0eb0910ead777f6d37"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b2c5675769fb6d4c11238132962939b960fcb365436b6d623c5864287faa319"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8af95e03e066826f0f4c65811cbee1b3123d4a45a1c3a2b4fc23c4b0dff893b5"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2165a8f33cb15e06c67070b8afc87a62b85c5a273e3aaa6bc9d15c93a4920d6f"}, - {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ad5e65b874b5e56bd942546ea7ba9dd1d6a25121db1c517700f1c9de91b28518"}, - {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17fe4b2c844bbf5f0ece69cfd959fa02957c61317b2161763950d88fed8e13b9"}, - {file = "pendulum-3.0.0-cp312-none-win_amd64.whl", hash = "sha256:78f8f4e7efe5066aca24a7a57511b9c2119f5c2b5eb81c46ff9222ce11e0a7a5"}, - {file = "pendulum-3.0.0-cp312-none-win_arm64.whl", hash = "sha256:28f49d8d1e32aae9c284a90b6bb3873eee15ec6e1d9042edd611b22a94ac462f"}, - {file = "pendulum-3.0.0-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:d4e2512f4e1a4670284a153b214db9719eb5d14ac55ada5b76cbdb8c5c00399d"}, - {file = "pendulum-3.0.0-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:3d897eb50883cc58d9b92f6405245f84b9286cd2de6e8694cb9ea5cb15195a32"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e169cc2ca419517f397811bbe4589cf3cd13fca6dc38bb352ba15ea90739ebb"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f17c3084a4524ebefd9255513692f7e7360e23c8853dc6f10c64cc184e1217ab"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:826d6e258052715f64d05ae0fc9040c0151e6a87aae7c109ba9a0ed930ce4000"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2aae97087872ef152a0c40e06100b3665d8cb86b59bc8471ca7c26132fccd0f"}, - {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ac65eeec2250d03106b5e81284ad47f0d417ca299a45e89ccc69e36130ca8bc7"}, - {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a5346d08f3f4a6e9e672187faa179c7bf9227897081d7121866358af369f44f9"}, - {file = "pendulum-3.0.0-cp37-none-win_amd64.whl", hash = "sha256:235d64e87946d8f95c796af34818c76e0f88c94d624c268693c85b723b698aa9"}, - {file = "pendulum-3.0.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:6a881d9c2a7f85bc9adafcfe671df5207f51f5715ae61f5d838b77a1356e8b7b"}, - {file = "pendulum-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d7762d2076b9b1cb718a6631ad6c16c23fc3fac76cbb8c454e81e80be98daa34"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e8e36a8130819d97a479a0e7bf379b66b3b1b520e5dc46bd7eb14634338df8c"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7dc843253ac373358ffc0711960e2dd5b94ab67530a3e204d85c6e8cb2c5fa10"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a78ad3635d609ceb1e97d6aedef6a6a6f93433ddb2312888e668365908c7120"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b30a137e9e0d1f751e60e67d11fc67781a572db76b2296f7b4d44554761049d6"}, - {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c95984037987f4a457bb760455d9ca80467be792236b69d0084f228a8ada0162"}, - {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d29c6e578fe0f893766c0d286adbf0b3c726a4e2341eba0917ec79c50274ec16"}, - {file = "pendulum-3.0.0-cp38-none-win_amd64.whl", hash = "sha256:deaba8e16dbfcb3d7a6b5fabdd5a38b7c982809567479987b9c89572df62e027"}, - {file = "pendulum-3.0.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b11aceea5b20b4b5382962b321dbc354af0defe35daa84e9ff3aae3c230df694"}, - {file = "pendulum-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a90d4d504e82ad236afac9adca4d6a19e4865f717034fc69bafb112c320dcc8f"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:825799c6b66e3734227756fa746cc34b3549c48693325b8b9f823cb7d21b19ac"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad769e98dc07972e24afe0cff8d365cb6f0ebc7e65620aa1976fcfbcadc4c6f3"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6fc26907eb5fb8cc6188cc620bc2075a6c534d981a2f045daa5f79dfe50d512"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c717eab1b6d898c00a3e0fa7781d615b5c5136bbd40abe82be100bb06df7a56"}, - {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3ddd1d66d1a714ce43acfe337190be055cdc221d911fc886d5a3aae28e14b76d"}, - {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:822172853d7a9cf6da95d7b66a16c7160cb99ae6df55d44373888181d7a06edc"}, - {file = "pendulum-3.0.0-cp39-none-win_amd64.whl", hash = "sha256:840de1b49cf1ec54c225a2a6f4f0784d50bd47f68e41dc005b7f67c7d5b5f3ae"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3b1f74d1e6ffe5d01d6023870e2ce5c2191486928823196f8575dcc786e107b1"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:729e9f93756a2cdfa77d0fc82068346e9731c7e884097160603872686e570f07"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e586acc0b450cd21cbf0db6bae386237011b75260a3adceddc4be15334689a9a"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22e7944ffc1f0099a79ff468ee9630c73f8c7835cd76fdb57ef7320e6a409df4"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fa30af36bd8e50686846bdace37cf6707bdd044e5cb6e1109acbad3277232e04"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:440215347b11914ae707981b9a57ab9c7b6983ab0babde07063c6ee75c0dc6e7"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:314c4038dc5e6a52991570f50edb2f08c339debdf8cea68ac355b32c4174e820"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5acb1d386337415f74f4d1955c4ce8d0201978c162927d07df8eb0692b2d8533"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a789e12fbdefaffb7b8ac67f9d8f22ba17a3050ceaaa635cd1cc4645773a4b1e"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:860aa9b8a888e5913bd70d819306749e5eb488e6b99cd6c47beb701b22bdecf5"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:5ebc65ea033ef0281368217fbf59f5cb05b338ac4dd23d60959c7afcd79a60a0"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d9fef18ab0386ef6a9ac7bad7e43ded42c83ff7ad412f950633854f90d59afa8"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1c134ba2f0571d0b68b83f6972e2307a55a5a849e7dac8505c715c531d2a8795"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:385680812e7e18af200bb9b4a49777418c32422d05ad5a8eb85144c4a285907b"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eec91cd87c59fb32ec49eb722f375bd58f4be790cae11c1b70fac3ee4f00da0"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4386bffeca23c4b69ad50a36211f75b35a4deb6210bdca112ac3043deb7e494a"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dfbcf1661d7146d7698da4b86e7f04814221081e9fe154183e34f4c5f5fa3bf8"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:04a1094a5aa1daa34a6b57c865b25f691848c61583fb22722a4df5699f6bf74c"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5b0ec85b9045bd49dd3a3493a5e7ddfd31c36a2a60da387c419fa04abcaecb23"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0a15b90129765b705eb2039062a6daf4d22c4e28d1a54fa260892e8c3ae6e157"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:bb8f6d7acd67a67d6fedd361ad2958ff0539445ef51cbe8cd288db4306503cd0"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd69b15374bef7e4b4440612915315cc42e8575fcda2a3d7586a0d88192d0c88"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc00f8110db6898360c53c812872662e077eaf9c75515d53ecc65d886eec209a"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:83a44e8b40655d0ba565a5c3d1365d27e3e6778ae2a05b69124db9e471255c4a"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1a3604e9fbc06b788041b2a8b78f75c243021e0f512447806a6d37ee5214905d"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:92c307ae7accebd06cbae4729f0ba9fa724df5f7d91a0964b1b972a22baa482b"}, - {file = "pendulum-3.0.0.tar.gz", hash = "sha256:5d034998dea404ec31fae27af6b22cff1708f830a1ed7353be4d1019bb9f584e"}, -] - -[package.dependencies] -python-dateutil = ">=2.6" -tzdata = ">=2020.1" - -[package.extras] -test = ["time-machine (>=2.6.0)"] - -[[package]] -name = "pexpect" -version = "4.9.0" -description = "Pexpect allows easy control of interactive console applications." -optional = false -python-versions = "*" -files = [ - {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, - {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, -] - -[package.dependencies] -ptyprocess = ">=0.5" - -[[package]] -name = "pickleshare" -version = "0.7.5" -description = "Tiny 'shelve'-like database with concurrency support" -optional = false -python-versions = "*" -files = [ - {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, - {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, -] - -[[package]] -name = "platformdirs" -version = "4.2.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -optional = false -python-versions = ">=3.8" -files = [ - {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, - {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, -] - -[package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] - -[[package]] -name = "pluggy" -version = "1.4.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, - {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "pprintpp" -version = "0.4.0" -description = "A drop-in replacement for pprint that's actually pretty" -optional = false -python-versions = "*" -files = [ - {file = "pprintpp-0.4.0-py2.py3-none-any.whl", hash = "sha256:b6b4dcdd0c0c0d75e4d7b2f21a9e933e5b2ce62b26e1a54537f9651ae5a5c01d"}, - {file = "pprintpp-0.4.0.tar.gz", hash = "sha256:ea826108e2c7f49dc6d66c752973c3fc9749142a798d6b254e1e301cfdbc6403"}, -] - -[[package]] -name = "pre-commit" -version = "2.21.0" -description = "A framework for managing and maintaining multi-language pre-commit hooks." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pre_commit-2.21.0-py2.py3-none-any.whl", hash = "sha256:e2f91727039fc39a92f58a588a25b87f936de6567eed4f0e673e0507edc75bad"}, - {file = "pre_commit-2.21.0.tar.gz", hash = "sha256:31ef31af7e474a8d8995027fefdfcf509b5c913ff31f2015b4ec4beb26a6f658"}, -] - -[package.dependencies] -cfgv = ">=2.0.0" -identify = ">=1.0.0" -nodeenv = ">=0.11.1" -pyyaml = ">=5.1" -virtualenv = ">=20.10.0" - -[[package]] -name = "prompt-toolkit" -version = "3.0.43" -description = "Library for building powerful interactive command lines in Python" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"}, - {file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"}, -] - -[package.dependencies] -wcwidth = "*" - -[[package]] -name = "ptyprocess" -version = "0.7.0" -description = "Run a subprocess in a pseudo terminal" -optional = false -python-versions = "*" -files = [ - {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, - {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, -] - -[[package]] -name = "pure-eval" -version = "0.2.2" -description = "Safely evaluate AST nodes without side effects" -optional = false -python-versions = "*" -files = [ - {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, - {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, -] - -[package.extras] -tests = ["pytest"] - -[[package]] -name = "pyarrow" -version = "14.0.2" -description = "Python library for Apache Arrow" -optional = true -python-versions = ">=3.8" -files = [ - {file = "pyarrow-14.0.2-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:ba9fe808596c5dbd08b3aeffe901e5f81095baaa28e7d5118e01354c64f22807"}, - {file = "pyarrow-14.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:22a768987a16bb46220cef490c56c671993fbee8fd0475febac0b3e16b00a10e"}, - {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dbba05e98f247f17e64303eb876f4a80fcd32f73c7e9ad975a83834d81f3fda"}, - {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a898d134d00b1eca04998e9d286e19653f9d0fcb99587310cd10270907452a6b"}, - {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:87e879323f256cb04267bb365add7208f302df942eb943c93a9dfeb8f44840b1"}, - {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:76fc257559404ea5f1306ea9a3ff0541bf996ff3f7b9209fc517b5e83811fa8e"}, - {file = "pyarrow-14.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0c4a18e00f3a32398a7f31da47fefcd7a927545b396e1f15d0c85c2f2c778cd"}, - {file = "pyarrow-14.0.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:87482af32e5a0c0cce2d12eb3c039dd1d853bd905b04f3f953f147c7a196915b"}, - {file = "pyarrow-14.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:059bd8f12a70519e46cd64e1ba40e97eae55e0cbe1695edd95384653d7626b23"}, - {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f16111f9ab27e60b391c5f6d197510e3ad6654e73857b4e394861fc79c37200"}, - {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06ff1264fe4448e8d02073f5ce45a9f934c0f3db0a04460d0b01ff28befc3696"}, - {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:6dd4f4b472ccf4042f1eab77e6c8bce574543f54d2135c7e396f413046397d5a"}, - {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:32356bfb58b36059773f49e4e214996888eeea3a08893e7dbde44753799b2a02"}, - {file = "pyarrow-14.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:52809ee69d4dbf2241c0e4366d949ba035cbcf48409bf404f071f624ed313a2b"}, - {file = "pyarrow-14.0.2-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:c87824a5ac52be210d32906c715f4ed7053d0180c1060ae3ff9b7e560f53f944"}, - {file = "pyarrow-14.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a25eb2421a58e861f6ca91f43339d215476f4fe159eca603c55950c14f378cc5"}, - {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c1da70d668af5620b8ba0a23f229030a4cd6c5f24a616a146f30d2386fec422"}, - {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2cc61593c8e66194c7cdfae594503e91b926a228fba40b5cf25cc593563bcd07"}, - {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:78ea56f62fb7c0ae8ecb9afdd7893e3a7dbeb0b04106f5c08dbb23f9c0157591"}, - {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:37c233ddbce0c67a76c0985612fef27c0c92aef9413cf5aa56952f359fcb7379"}, - {file = "pyarrow-14.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:e4b123ad0f6add92de898214d404e488167b87b5dd86e9a434126bc2b7a5578d"}, - {file = "pyarrow-14.0.2-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:e354fba8490de258be7687f341bc04aba181fc8aa1f71e4584f9890d9cb2dec2"}, - {file = "pyarrow-14.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:20e003a23a13da963f43e2b432483fdd8c38dc8882cd145f09f21792e1cf22a1"}, - {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc0de7575e841f1595ac07e5bc631084fd06ca8b03c0f2ecece733d23cd5102a"}, - {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66e986dc859712acb0bd45601229021f3ffcdfc49044b64c6d071aaf4fa49e98"}, - {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:f7d029f20ef56673a9730766023459ece397a05001f4e4d13805111d7c2108c0"}, - {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:209bac546942b0d8edc8debda248364f7f668e4aad4741bae58e67d40e5fcf75"}, - {file = "pyarrow-14.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:1e6987c5274fb87d66bb36816afb6f65707546b3c45c44c28e3c4133c010a881"}, - {file = "pyarrow-14.0.2-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:a01d0052d2a294a5f56cc1862933014e696aa08cc7b620e8c0cce5a5d362e976"}, - {file = "pyarrow-14.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a51fee3a7db4d37f8cda3ea96f32530620d43b0489d169b285d774da48ca9785"}, - {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64df2bf1ef2ef14cee531e2dfe03dd924017650ffaa6f9513d7a1bb291e59c15"}, - {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c0fa3bfdb0305ffe09810f9d3e2e50a2787e3a07063001dcd7adae0cee3601a"}, - {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c65bf4fd06584f058420238bc47a316e80dda01ec0dfb3044594128a6c2db794"}, - {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:63ac901baec9369d6aae1cbe6cca11178fb018a8d45068aaf5bb54f94804a866"}, - {file = "pyarrow-14.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:75ee0efe7a87a687ae303d63037d08a48ef9ea0127064df18267252cfe2e9541"}, - {file = "pyarrow-14.0.2.tar.gz", hash = "sha256:36cef6ba12b499d864d1def3e990f97949e0b79400d08b7cf74504ffbd3eb025"}, -] - -[package.dependencies] -numpy = ">=1.16.6" - -[[package]] -name = "pydantic" -version = "2.7.3" -description = "Data validation using Python type hints" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic-2.7.3-py3-none-any.whl", hash = "sha256:ea91b002777bf643bb20dd717c028ec43216b24a6001a280f83877fd2655d0b4"}, - {file = "pydantic-2.7.3.tar.gz", hash = "sha256:c46c76a40bb1296728d7a8b99aa73dd70a48c3510111ff290034f860c99c419e"}, -] - -[package.dependencies] -annotated-types = ">=0.4.0" -pydantic-core = "2.18.4" -typing-extensions = ">=4.6.1" - -[package.extras] -email = ["email-validator (>=2.0.0)"] - -[[package]] -name = "pydantic-core" -version = "2.18.4" -description = "Core functionality for Pydantic validation and serialization" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic_core-2.18.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f76d0ad001edd426b92233d45c746fd08f467d56100fd8f30e9ace4b005266e4"}, - {file = "pydantic_core-2.18.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:59ff3e89f4eaf14050c8022011862df275b552caef8082e37b542b066ce1ff26"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a55b5b16c839df1070bc113c1f7f94a0af4433fcfa1b41799ce7606e5c79ce0a"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4d0dcc59664fcb8974b356fe0a18a672d6d7cf9f54746c05f43275fc48636851"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8951eee36c57cd128f779e641e21eb40bc5073eb28b2d23f33eb0ef14ffb3f5d"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4701b19f7e3a06ea655513f7938de6f108123bf7c86bbebb1196eb9bd35cf724"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e00a3f196329e08e43d99b79b286d60ce46bed10f2280d25a1718399457e06be"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:97736815b9cc893b2b7f663628e63f436018b75f44854c8027040e05230eeddb"}, - {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6891a2ae0e8692679c07728819b6e2b822fb30ca7445f67bbf6509b25a96332c"}, - {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bc4ff9805858bd54d1a20efff925ccd89c9d2e7cf4986144b30802bf78091c3e"}, - {file = "pydantic_core-2.18.4-cp310-none-win32.whl", hash = "sha256:1b4de2e51bbcb61fdebd0ab86ef28062704f62c82bbf4addc4e37fa4b00b7cbc"}, - {file = "pydantic_core-2.18.4-cp310-none-win_amd64.whl", hash = "sha256:6a750aec7bf431517a9fd78cb93c97b9b0c496090fee84a47a0d23668976b4b0"}, - {file = "pydantic_core-2.18.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:942ba11e7dfb66dc70f9ae66b33452f51ac7bb90676da39a7345e99ffb55402d"}, - {file = "pydantic_core-2.18.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b2ebef0e0b4454320274f5e83a41844c63438fdc874ea40a8b5b4ecb7693f1c4"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a642295cd0c8df1b86fc3dced1d067874c353a188dc8e0f744626d49e9aa51c4"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f09baa656c904807e832cf9cce799c6460c450c4ad80803517032da0cd062e2"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98906207f29bc2c459ff64fa007afd10a8c8ac080f7e4d5beff4c97086a3dabd"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19894b95aacfa98e7cb093cd7881a0c76f55731efad31073db4521e2b6ff5b7d"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fbbdc827fe5e42e4d196c746b890b3d72876bdbf160b0eafe9f0334525119c8"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f85d05aa0918283cf29a30b547b4df2fbb56b45b135f9e35b6807cb28bc47951"}, - {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e85637bc8fe81ddb73fda9e56bab24560bdddfa98aa64f87aaa4e4b6730c23d2"}, - {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2f5966897e5461f818e136b8451d0551a2e77259eb0f73a837027b47dc95dab9"}, - {file = "pydantic_core-2.18.4-cp311-none-win32.whl", hash = "sha256:44c7486a4228413c317952e9d89598bcdfb06399735e49e0f8df643e1ccd0558"}, - {file = "pydantic_core-2.18.4-cp311-none-win_amd64.whl", hash = "sha256:8a7164fe2005d03c64fd3b85649891cd4953a8de53107940bf272500ba8a788b"}, - {file = "pydantic_core-2.18.4-cp311-none-win_arm64.whl", hash = "sha256:4e99bc050fe65c450344421017f98298a97cefc18c53bb2f7b3531eb39bc7805"}, - {file = "pydantic_core-2.18.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6f5c4d41b2771c730ea1c34e458e781b18cc668d194958e0112455fff4e402b2"}, - {file = "pydantic_core-2.18.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2fdf2156aa3d017fddf8aea5adfba9f777db1d6022d392b682d2a8329e087cef"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4748321b5078216070b151d5271ef3e7cc905ab170bbfd27d5c83ee3ec436695"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:847a35c4d58721c5dc3dba599878ebbdfd96784f3fb8bb2c356e123bdcd73f34"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c40d4eaad41f78e3bbda31b89edc46a3f3dc6e171bf0ecf097ff7a0ffff7cb1"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:21a5e440dbe315ab9825fcd459b8814bb92b27c974cbc23c3e8baa2b76890077"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01dd777215e2aa86dfd664daed5957704b769e726626393438f9c87690ce78c3"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4b06beb3b3f1479d32befd1f3079cc47b34fa2da62457cdf6c963393340b56e9"}, - {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:564d7922e4b13a16b98772441879fcdcbe82ff50daa622d681dd682175ea918c"}, - {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0eb2a4f660fcd8e2b1c90ad566db2b98d7f3f4717c64fe0a83e0adb39766d5b8"}, - {file = "pydantic_core-2.18.4-cp312-none-win32.whl", hash = "sha256:8b8bab4c97248095ae0c4455b5a1cd1cdd96e4e4769306ab19dda135ea4cdb07"}, - {file = "pydantic_core-2.18.4-cp312-none-win_amd64.whl", hash = "sha256:14601cdb733d741b8958224030e2bfe21a4a881fb3dd6fbb21f071cabd48fa0a"}, - {file = "pydantic_core-2.18.4-cp312-none-win_arm64.whl", hash = "sha256:c1322d7dd74713dcc157a2b7898a564ab091ca6c58302d5c7b4c07296e3fd00f"}, - {file = "pydantic_core-2.18.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:823be1deb01793da05ecb0484d6c9e20baebb39bd42b5d72636ae9cf8350dbd2"}, - {file = "pydantic_core-2.18.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ebef0dd9bf9b812bf75bda96743f2a6c5734a02092ae7f721c048d156d5fabae"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae1d6df168efb88d7d522664693607b80b4080be6750c913eefb77e34c12c71a"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f9899c94762343f2cc2fc64c13e7cae4c3cc65cdfc87dd810a31654c9b7358cc"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99457f184ad90235cfe8461c4d70ab7dd2680e28821c29eca00252ba90308c78"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18f469a3d2a2fdafe99296a87e8a4c37748b5080a26b806a707f25a902c040a8"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7cdf28938ac6b8b49ae5e92f2735056a7ba99c9b110a474473fd71185c1af5d"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:938cb21650855054dc54dfd9120a851c974f95450f00683399006aa6e8abb057"}, - {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:44cd83ab6a51da80fb5adbd9560e26018e2ac7826f9626bc06ca3dc074cd198b"}, - {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:972658f4a72d02b8abfa2581d92d59f59897d2e9f7e708fdabe922f9087773af"}, - {file = "pydantic_core-2.18.4-cp38-none-win32.whl", hash = "sha256:1d886dc848e60cb7666f771e406acae54ab279b9f1e4143babc9c2258213daa2"}, - {file = "pydantic_core-2.18.4-cp38-none-win_amd64.whl", hash = "sha256:bb4462bd43c2460774914b8525f79b00f8f407c945d50881568f294c1d9b4443"}, - {file = "pydantic_core-2.18.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:44a688331d4a4e2129140a8118479443bd6f1905231138971372fcde37e43528"}, - {file = "pydantic_core-2.18.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a2fdd81edd64342c85ac7cf2753ccae0b79bf2dfa063785503cb85a7d3593223"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86110d7e1907ab36691f80b33eb2da87d780f4739ae773e5fc83fb272f88825f"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:46387e38bd641b3ee5ce247563b60c5ca098da9c56c75c157a05eaa0933ed154"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:123c3cec203e3f5ac7b000bd82235f1a3eced8665b63d18be751f115588fea30"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dc1803ac5c32ec324c5261c7209e8f8ce88e83254c4e1aebdc8b0a39f9ddb443"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53db086f9f6ab2b4061958d9c276d1dbe3690e8dd727d6abf2321d6cce37fa94"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:abc267fa9837245cc28ea6929f19fa335f3dc330a35d2e45509b6566dc18be23"}, - {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a0d829524aaefdebccb869eed855e2d04c21d2d7479b6cada7ace5448416597b"}, - {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:509daade3b8649f80d4e5ff21aa5673e4ebe58590b25fe42fac5f0f52c6f034a"}, - {file = "pydantic_core-2.18.4-cp39-none-win32.whl", hash = "sha256:ca26a1e73c48cfc54c4a76ff78df3727b9d9f4ccc8dbee4ae3f73306a591676d"}, - {file = "pydantic_core-2.18.4-cp39-none-win_amd64.whl", hash = "sha256:c67598100338d5d985db1b3d21f3619ef392e185e71b8d52bceacc4a7771ea7e"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:574d92eac874f7f4db0ca653514d823a0d22e2354359d0759e3f6a406db5d55d"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1f4d26ceb5eb9eed4af91bebeae4b06c3fb28966ca3a8fb765208cf6b51102ab"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77450e6d20016ec41f43ca4a6c63e9fdde03f0ae3fe90e7c27bdbeaece8b1ed4"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d323a01da91851a4f17bf592faf46149c9169d68430b3146dcba2bb5e5719abc"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43d447dd2ae072a0065389092a231283f62d960030ecd27565672bd40746c507"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:578e24f761f3b425834f297b9935e1ce2e30f51400964ce4801002435a1b41ef"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:81b5efb2f126454586d0f40c4d834010979cb80785173d1586df845a632e4e6d"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ab86ce7c8f9bea87b9d12c7f0af71102acbf5ecbc66c17796cff45dae54ef9a5"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:90afc12421df2b1b4dcc975f814e21bc1754640d502a2fbcc6d41e77af5ec312"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:51991a89639a912c17bef4b45c87bd83593aee0437d8102556af4885811d59f5"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:293afe532740370aba8c060882f7d26cfd00c94cae32fd2e212a3a6e3b7bc15e"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b48ece5bde2e768197a2d0f6e925f9d7e3e826f0ad2271120f8144a9db18d5c8"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:eae237477a873ab46e8dd748e515c72c0c804fb380fbe6c85533c7de51f23a8f"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:834b5230b5dfc0c1ec37b2fda433b271cbbc0e507560b5d1588e2cc1148cf1ce"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e858ac0a25074ba4bce653f9b5d0a85b7456eaddadc0ce82d3878c22489fa4ee"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2fd41f6eff4c20778d717af1cc50eca52f5afe7805ee530a4fbd0bae284f16e9"}, - {file = "pydantic_core-2.18.4.tar.gz", hash = "sha256:ec3beeada09ff865c344ff3bc2f427f5e6c26401cc6113d77e372c3fdac73864"}, -] - -[package.dependencies] -typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" - -[[package]] -name = "pydantic-settings" -version = "2.3.1" -description = "Settings management using Pydantic" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic_settings-2.3.1-py3-none-any.whl", hash = "sha256:acb2c213140dfff9669f4fe9f8180d43914f51626db28ab2db7308a576cce51a"}, - {file = "pydantic_settings-2.3.1.tar.gz", hash = "sha256:e34bbd649803a6bb3e2f0f58fb0edff1f0c7f556849fda106cc21bcce12c30ab"}, -] - -[package.dependencies] -pydantic = ">=2.7.0" -python-dotenv = ">=0.21.0" - -[package.extras] -toml = ["tomli (>=2.0.1)"] -yaml = ["pyyaml (>=6.0.1)"] - -[[package]] -name = "pygments" -version = "2.17.2" -description = "Pygments is a syntax highlighting package written in Python." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, - {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, -] - -[package.extras] -plugins = ["importlib-metadata"] -windows-terminal = ["colorama (>=0.4.6)"] - -[[package]] -name = "pylint" -version = "3.1.0" -description = "python code static checker" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "pylint-3.1.0-py3-none-any.whl", hash = "sha256:507a5b60953874766d8a366e8e8c7af63e058b26345cfcb5f91f89d987fd6b74"}, - {file = "pylint-3.1.0.tar.gz", hash = "sha256:6a69beb4a6f63debebaab0a3477ecd0f559aa726af4954fc948c51f7a2549e23"}, -] - -[package.dependencies] -astroid = ">=3.1.0,<=3.2.0-dev0" -colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} -dill = [ - {version = ">=0.2", markers = "python_version < \"3.11\""}, - {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, - {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, -] -isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" -mccabe = ">=0.6,<0.8" -platformdirs = ">=2.2.0" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -tomlkit = ">=0.10.1" -typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} - -[package.extras] -spelling = ["pyenchant (>=3.2,<4.0)"] -testutils = ["gitpython (>3)"] - -[[package]] -name = "pytest" -version = "8.1.1" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, - {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=1.4,<2.0" -tomli = {version = ">=1", markers = "python_version < \"3.11\""} - -[package.extras] -testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] - -[[package]] -name = "pytest-asyncio" -version = "0.21.1" -description = "Pytest support for asyncio" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-asyncio-0.21.1.tar.gz", hash = "sha256:40a7eae6dded22c7b604986855ea48400ab15b069ae38116e8c01238e9eeb64d"}, - {file = "pytest_asyncio-0.21.1-py3-none-any.whl", hash = "sha256:8666c1c8ac02631d7c51ba282e0c69a8a452b211ffedf2599099845da5c5c37b"}, -] - -[package.dependencies] -pytest = ">=7.0.0" - -[package.extras] -docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] -testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] - -[[package]] -name = "pytest-clarity" -version = "1.0.1" -description = "A plugin providing an alternative, colourful diff output for failing assertions." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pytest-clarity-1.0.1.tar.gz", hash = "sha256:505fe345fad4fe11c6a4187fe683f2c7c52c077caa1e135f3e483fe112db7772"}, -] - -[package.dependencies] -pprintpp = ">=0.4.0" -pytest = ">=3.5.0" -rich = ">=8.0.0" - -[[package]] -name = "pytest-cov" -version = "4.1.0" -description = "Pytest plugin for measuring coverage." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, - {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, -] - -[package.dependencies] -coverage = {version = ">=5.2.1", extras = ["toml"]} -pytest = ">=4.6" - -[package.extras] -testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] - -[[package]] -name = "pytest-httpx" -version = "0.30.0" -description = "Send responses to httpx." -optional = false -python-versions = ">=3.9" -files = [ - {file = "pytest-httpx-0.30.0.tar.gz", hash = "sha256:755b8edca87c974dd4f3605c374fda11db84631de3d163b99c0df5807023a19a"}, - {file = "pytest_httpx-0.30.0-py3-none-any.whl", hash = "sha256:6d47849691faf11d2532565d0c8e0e02b9f4ee730da31687feae315581d7520c"}, -] - -[package.dependencies] -httpx = "==0.27.*" -pytest = ">=7,<9" - -[package.extras] -testing = ["pytest-asyncio (==0.23.*)", "pytest-cov (==4.*)"] - -[[package]] -name = "pytest-xdist" -version = "3.5.0" -description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-xdist-3.5.0.tar.gz", hash = "sha256:cbb36f3d67e0c478baa57fa4edc8843887e0f6cfc42d677530a36d7472b32d8a"}, - {file = "pytest_xdist-3.5.0-py3-none-any.whl", hash = "sha256:d075629c7e00b611df89f490a5063944bee7a4362a5ff11c7cc7824a03dfce24"}, -] - -[package.dependencies] -execnet = ">=1.1" -pytest = ">=6.2.0" - -[package.extras] -psutil = ["psutil (>=3.0)"] -setproctitle = ["setproctitle"] -testing = ["filelock"] - -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, - {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "python-dotenv" -version = "1.0.1" -description = "Read key-value pairs from a .env file and set them as environment variables" -optional = false -python-versions = ">=3.8" -files = [ - {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, - {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, -] - -[package.extras] -cli = ["click (>=5.0)"] - -[[package]] -name = "pyyaml" -version = "6.0.1" -description = "YAML parser and emitter for Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, -] - -[[package]] -name = "requests" -version = "2.32.2" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.8" -files = [ - {file = "requests-2.32.2-py3-none-any.whl", hash = "sha256:fc06670dd0ed212426dfeb94fc1b983d917c4f9847c863f313c9dfaaffb7c23c"}, - {file = "requests-2.32.2.tar.gz", hash = "sha256:dd951ff5ecf3e3b3aa26b40703ba77495dab41da839ae72ef3c8e5d8e2433289"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "rich" -version = "13.7.1" -description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, - {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, -] - -[package.dependencies] -markdown-it-py = ">=2.2.0" -pygments = ">=2.13.0,<3.0.0" - -[package.extras] -jupyter = ["ipywidgets (>=7.5.1,<9)"] - -[[package]] -name = "ruff" -version = "0.5.0" -description = "An extremely fast Python linter and code formatter, written in Rust." -optional = false -python-versions = ">=3.7" -files = [ - {file = "ruff-0.5.0-py3-none-linux_armv6l.whl", hash = "sha256:ee770ea8ab38918f34e7560a597cc0a8c9a193aaa01bfbd879ef43cb06bd9c4c"}, - {file = "ruff-0.5.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:38f3b8327b3cb43474559d435f5fa65dacf723351c159ed0dc567f7ab735d1b6"}, - {file = "ruff-0.5.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7594f8df5404a5c5c8f64b8311169879f6cf42142da644c7e0ba3c3f14130370"}, - {file = "ruff-0.5.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:adc7012d6ec85032bc4e9065110df205752d64010bed5f958d25dbee9ce35de3"}, - {file = "ruff-0.5.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d505fb93b0fabef974b168d9b27c3960714d2ecda24b6ffa6a87ac432905ea38"}, - {file = "ruff-0.5.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9dc5cfd3558f14513ed0d5b70ce531e28ea81a8a3b1b07f0f48421a3d9e7d80a"}, - {file = "ruff-0.5.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:db3ca35265de239a1176d56a464b51557fce41095c37d6c406e658cf80bbb362"}, - {file = "ruff-0.5.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b1a321c4f68809fddd9b282fab6a8d8db796b270fff44722589a8b946925a2a8"}, - {file = "ruff-0.5.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2c4dfcd8d34b143916994b3876b63d53f56724c03f8c1a33a253b7b1e6bf2a7d"}, - {file = "ruff-0.5.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81e5facfc9f4a674c6a78c64d38becfbd5e4f739c31fcd9ce44c849f1fad9e4c"}, - {file = "ruff-0.5.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e589e27971c2a3efff3fadafb16e5aef7ff93250f0134ec4b52052b673cf988d"}, - {file = "ruff-0.5.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d2ffbc3715a52b037bcb0f6ff524a9367f642cdc5817944f6af5479bbb2eb50e"}, - {file = "ruff-0.5.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:cd096e23c6a4f9c819525a437fa0a99d1c67a1b6bb30948d46f33afbc53596cf"}, - {file = "ruff-0.5.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:46e193b36f2255729ad34a49c9a997d506e58f08555366b2108783b3064a0e1e"}, - {file = "ruff-0.5.0-py3-none-win32.whl", hash = "sha256:49141d267100f5ceff541b4e06552e98527870eafa1acc9dec9139c9ec5af64c"}, - {file = "ruff-0.5.0-py3-none-win_amd64.whl", hash = "sha256:e9118f60091047444c1b90952736ee7b1792910cab56e9b9a9ac20af94cd0440"}, - {file = "ruff-0.5.0-py3-none-win_arm64.whl", hash = "sha256:ed5c4df5c1fb4518abcb57725b576659542bdbe93366f4f329e8f398c4b71178"}, - {file = "ruff-0.5.0.tar.gz", hash = "sha256:eb641b5873492cf9bd45bc9c5ae5320648218e04386a5f0c264ad6ccce8226a1"}, -] - -[[package]] -name = "setuptools" -version = "70.3.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "setuptools-70.3.0-py3-none-any.whl", hash = "sha256:fe384da74336c398e0d956d1cae0669bc02eed936cdb1d49b57de1990dc11ffc"}, - {file = "setuptools-70.3.0.tar.gz", hash = "sha256:f171bab1dfbc86b132997f26a119f6056a57950d058587841a0082e8830f9dc5"}, -] - -[package.extras] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.10.0)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] - -[[package]] -name = "shellingham" -version = "1.5.4" -description = "Tool to Detect Surrounding Shell" -optional = true -python-versions = ">=3.7" -files = [ - {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, - {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, -] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "smmap" -version = "5.0.1" -description = "A pure Python implementation of a sliding window memory map manager" -optional = false -python-versions = ">=3.7" -files = [ - {file = "smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"}, - {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, -] - -[[package]] -name = "sniffio" -version = "1.3.1" -description = "Sniff out which async library your code is running under" -optional = false -python-versions = ">=3.7" -files = [ - {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, - {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, -] - -[[package]] -name = "stack-data" -version = "0.6.3" -description = "Extract data from python stack frames and tracebacks for informative displays" -optional = false -python-versions = "*" -files = [ - {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, - {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, -] - -[package.dependencies] -asttokens = ">=2.1.0" -executing = ">=1.2.0" -pure-eval = "*" - -[package.extras] -tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] - -[[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" -optional = true -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, -] - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] - -[[package]] -name = "tomlkit" -version = "0.12.4" -description = "Style preserving TOML library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomlkit-0.12.4-py3-none-any.whl", hash = "sha256:5cd82d48a3dd89dee1f9d64420aa20ae65cfbd00668d6f094d7578a78efbb77b"}, - {file = "tomlkit-0.12.4.tar.gz", hash = "sha256:7ca1cfc12232806517a8515047ba66a19369e71edf2439d0f5824f91032b6cc3"}, -] - -[[package]] -name = "traitlets" -version = "5.14.2" -description = "Traitlets Python configuration system" -optional = false -python-versions = ">=3.8" -files = [ - {file = "traitlets-5.14.2-py3-none-any.whl", hash = "sha256:fcdf85684a772ddeba87db2f398ce00b40ff550d1528c03c14dbf6a02003cd80"}, - {file = "traitlets-5.14.2.tar.gz", hash = "sha256:8cdd83c040dab7d1dee822678e5f5d100b514f7b72b01615b26fc5718916fdf9"}, -] - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.1)", "pytest-mock", "pytest-mypy-testing"] - -[[package]] -name = "typer" -version = "0.12.3" -description = "Typer, build great CLIs. Easy to code. Based on Python type hints." -optional = true -python-versions = ">=3.7" -files = [ - {file = "typer-0.12.3-py3-none-any.whl", hash = "sha256:070d7ca53f785acbccba8e7d28b08dcd88f79f1fbda035ade0aecec71ca5c914"}, - {file = "typer-0.12.3.tar.gz", hash = "sha256:49e73131481d804288ef62598d97a1ceef3058905aa536a1134f90891ba35482"}, -] - -[package.dependencies] -click = ">=8.0.0" -rich = ">=10.11.0" -shellingham = ">=1.3.0" -typing-extensions = ">=3.7.4.3" - -[[package]] -name = "types-python-slugify" -version = "8.0.2.20240310" -description = "Typing stubs for python-slugify" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-python-slugify-8.0.2.20240310.tar.gz", hash = "sha256:5157b508c7fed587520c70d77f62aea0fafdc6620893c2ec8972f13a1faf5560"}, - {file = "types_python_slugify-8.0.2.20240310-py3-none-any.whl", hash = "sha256:0efec18b802c69ebd22dcee55c91afaeaa80e1e40ddd66ccabf69fd42ce87b74"}, -] - -[[package]] -name = "types-pyyaml" -version = "6.0.12.20240311" -description = "Typing stubs for PyYAML" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-PyYAML-6.0.12.20240311.tar.gz", hash = "sha256:a9e0f0f88dc835739b0c1ca51ee90d04ca2a897a71af79de9aec5f38cb0a5342"}, - {file = "types_PyYAML-6.0.12.20240311-py3-none-any.whl", hash = "sha256:b845b06a1c7e54b8e5b4c683043de0d9caf205e7434b3edc678ff2411979b8f6"}, -] - -[[package]] -name = "types-toml" -version = "0.10.8.20240310" -description = "Typing stubs for toml" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-toml-0.10.8.20240310.tar.gz", hash = "sha256:3d41501302972436a6b8b239c850b26689657e25281b48ff0ec06345b8830331"}, - {file = "types_toml-0.10.8.20240310-py3-none-any.whl", hash = "sha256:627b47775d25fa29977d9c70dc0cbab3f314f32c8d8d0c012f2ef5de7aaec05d"}, -] - -[[package]] -name = "types-ujson" -version = "5.9.0.0" -description = "Typing stubs for ujson" -optional = false -python-versions = ">=3.7" -files = [ - {file = "types-ujson-5.9.0.0.tar.gz", hash = "sha256:7e7042454dc7cd7f31b09c420d7caf36b93d30bdf4b8db93791bd0561713d017"}, - {file = "types_ujson-5.9.0.0-py3-none-any.whl", hash = "sha256:f274fa604ed6317effcd1c424ef4cf292c3b0689cb118fb3180689d40ed1f4ed"}, -] - -[[package]] -name = "typing-extensions" -version = "4.10.0" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -files = [ - {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, - {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, -] - -[[package]] -name = "tzdata" -version = "2024.1" -description = "Provider of IANA time zone data" -optional = false -python-versions = ">=2" -files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, -] - -[[package]] -name = "ujson" -version = "5.9.0" -description = "Ultra fast JSON encoder and decoder for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "ujson-5.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ab71bf27b002eaf7d047c54a68e60230fbd5cd9da60de7ca0aa87d0bccead8fa"}, - {file = "ujson-5.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7a365eac66f5aa7a7fdf57e5066ada6226700884fc7dce2ba5483538bc16c8c5"}, - {file = "ujson-5.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e015122b337858dba5a3dc3533af2a8fc0410ee9e2374092f6a5b88b182e9fcc"}, - {file = "ujson-5.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:779a2a88c53039bebfbccca934430dabb5c62cc179e09a9c27a322023f363e0d"}, - {file = "ujson-5.9.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10ca3c41e80509fd9805f7c149068fa8dbee18872bbdc03d7cca928926a358d5"}, - {file = "ujson-5.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4a566e465cb2fcfdf040c2447b7dd9718799d0d90134b37a20dff1e27c0e9096"}, - {file = "ujson-5.9.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f833c529e922577226a05bc25b6a8b3eb6c4fb155b72dd88d33de99d53113124"}, - {file = "ujson-5.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b68a0caab33f359b4cbbc10065c88e3758c9f73a11a65a91f024b2e7a1257106"}, - {file = "ujson-5.9.0-cp310-cp310-win32.whl", hash = "sha256:7cc7e605d2aa6ae6b7321c3ae250d2e050f06082e71ab1a4200b4ae64d25863c"}, - {file = "ujson-5.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:a6d3f10eb8ccba4316a6b5465b705ed70a06011c6f82418b59278fbc919bef6f"}, - {file = "ujson-5.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b23bbb46334ce51ddb5dded60c662fbf7bb74a37b8f87221c5b0fec1ec6454b"}, - {file = "ujson-5.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6974b3a7c17bbf829e6c3bfdc5823c67922e44ff169851a755eab79a3dd31ec0"}, - {file = "ujson-5.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5964ea916edfe24af1f4cc68488448fbb1ec27a3ddcddc2b236da575c12c8ae"}, - {file = "ujson-5.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ba7cac47dd65ff88571eceeff48bf30ed5eb9c67b34b88cb22869b7aa19600d"}, - {file = "ujson-5.9.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6bbd91a151a8f3358c29355a491e915eb203f607267a25e6ab10531b3b157c5e"}, - {file = "ujson-5.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:829a69d451a49c0de14a9fecb2a2d544a9b2c884c2b542adb243b683a6f15908"}, - {file = "ujson-5.9.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a807ae73c46ad5db161a7e883eec0fbe1bebc6a54890152ccc63072c4884823b"}, - {file = "ujson-5.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8fc2aa18b13d97b3c8ccecdf1a3c405f411a6e96adeee94233058c44ff92617d"}, - {file = "ujson-5.9.0-cp311-cp311-win32.whl", hash = "sha256:70e06849dfeb2548be48fdd3ceb53300640bc8100c379d6e19d78045e9c26120"}, - {file = "ujson-5.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:7309d063cd392811acc49b5016728a5e1b46ab9907d321ebbe1c2156bc3c0b99"}, - {file = "ujson-5.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:20509a8c9f775b3a511e308bbe0b72897ba6b800767a7c90c5cca59d20d7c42c"}, - {file = "ujson-5.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b28407cfe315bd1b34f1ebe65d3bd735d6b36d409b334100be8cdffae2177b2f"}, - {file = "ujson-5.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d302bd17989b6bd90d49bade66943c78f9e3670407dbc53ebcf61271cadc399"}, - {file = "ujson-5.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f21315f51e0db8ee245e33a649dd2d9dce0594522de6f278d62f15f998e050e"}, - {file = "ujson-5.9.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5635b78b636a54a86fdbf6f027e461aa6c6b948363bdf8d4fbb56a42b7388320"}, - {file = "ujson-5.9.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:82b5a56609f1235d72835ee109163c7041b30920d70fe7dac9176c64df87c164"}, - {file = "ujson-5.9.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5ca35f484622fd208f55041b042d9d94f3b2c9c5add4e9af5ee9946d2d30db01"}, - {file = "ujson-5.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:829b824953ebad76d46e4ae709e940bb229e8999e40881338b3cc94c771b876c"}, - {file = "ujson-5.9.0-cp312-cp312-win32.whl", hash = "sha256:25fa46e4ff0a2deecbcf7100af3a5d70090b461906f2299506485ff31d9ec437"}, - {file = "ujson-5.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:60718f1720a61560618eff3b56fd517d107518d3c0160ca7a5a66ac949c6cf1c"}, - {file = "ujson-5.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d581db9db9e41d8ea0b2705c90518ba623cbdc74f8d644d7eb0d107be0d85d9c"}, - {file = "ujson-5.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ff741a5b4be2d08fceaab681c9d4bc89abf3c9db600ab435e20b9b6d4dfef12e"}, - {file = "ujson-5.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdcb02cabcb1e44381221840a7af04433c1dc3297af76fde924a50c3054c708c"}, - {file = "ujson-5.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e208d3bf02c6963e6ef7324dadf1d73239fb7008491fdf523208f60be6437402"}, - {file = "ujson-5.9.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4b3917296630a075e04d3d07601ce2a176479c23af838b6cf90a2d6b39b0d95"}, - {file = "ujson-5.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0c4d6adb2c7bb9eb7c71ad6f6f612e13b264942e841f8cc3314a21a289a76c4e"}, - {file = "ujson-5.9.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0b159efece9ab5c01f70b9d10bbb77241ce111a45bc8d21a44c219a2aec8ddfd"}, - {file = "ujson-5.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f0cb4a7814940ddd6619bdce6be637a4b37a8c4760de9373bac54bb7b229698b"}, - {file = "ujson-5.9.0-cp38-cp38-win32.whl", hash = "sha256:dc80f0f5abf33bd7099f7ac94ab1206730a3c0a2d17549911ed2cb6b7aa36d2d"}, - {file = "ujson-5.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:506a45e5fcbb2d46f1a51fead991c39529fc3737c0f5d47c9b4a1d762578fc30"}, - {file = "ujson-5.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d0fd2eba664a22447102062814bd13e63c6130540222c0aa620701dd01f4be81"}, - {file = "ujson-5.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bdf7fc21a03bafe4ba208dafa84ae38e04e5d36c0e1c746726edf5392e9f9f36"}, - {file = "ujson-5.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2f909bc08ce01f122fd9c24bc6f9876aa087188dfaf3c4116fe6e4daf7e194f"}, - {file = "ujson-5.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd4ea86c2afd41429751d22a3ccd03311c067bd6aeee2d054f83f97e41e11d8f"}, - {file = "ujson-5.9.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:63fb2e6599d96fdffdb553af0ed3f76b85fda63281063f1cb5b1141a6fcd0617"}, - {file = "ujson-5.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:32bba5870c8fa2a97f4a68f6401038d3f1922e66c34280d710af00b14a3ca562"}, - {file = "ujson-5.9.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:37ef92e42535a81bf72179d0e252c9af42a4ed966dc6be6967ebfb929a87bc60"}, - {file = "ujson-5.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f69f16b8f1c69da00e38dc5f2d08a86b0e781d0ad3e4cc6a13ea033a439c4844"}, - {file = "ujson-5.9.0-cp39-cp39-win32.whl", hash = "sha256:3382a3ce0ccc0558b1c1668950008cece9bf463ebb17463ebf6a8bfc060dae34"}, - {file = "ujson-5.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:6adef377ed583477cf005b58c3025051b5faa6b8cc25876e594afbb772578f21"}, - {file = "ujson-5.9.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ffdfebd819f492e48e4f31c97cb593b9c1a8251933d8f8972e81697f00326ff1"}, - {file = "ujson-5.9.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4eec2ddc046360d087cf35659c7ba0cbd101f32035e19047013162274e71fcf"}, - {file = "ujson-5.9.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fbb90aa5c23cb3d4b803c12aa220d26778c31b6e4b7a13a1f49971f6c7d088e"}, - {file = "ujson-5.9.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba0823cb70866f0d6a4ad48d998dd338dce7314598721bc1b7986d054d782dfd"}, - {file = "ujson-5.9.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:4e35d7885ed612feb6b3dd1b7de28e89baaba4011ecdf995e88be9ac614765e9"}, - {file = "ujson-5.9.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b048aa93eace8571eedbd67b3766623e7f0acbf08ee291bef7d8106210432427"}, - {file = "ujson-5.9.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:323279e68c195110ef85cbe5edce885219e3d4a48705448720ad925d88c9f851"}, - {file = "ujson-5.9.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9ac92d86ff34296f881e12aa955f7014d276895e0e4e868ba7fddebbde38e378"}, - {file = "ujson-5.9.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:6eecbd09b316cea1fd929b1e25f70382917542ab11b692cb46ec9b0a26c7427f"}, - {file = "ujson-5.9.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:473fb8dff1d58f49912323d7cb0859df5585cfc932e4b9c053bf8cf7f2d7c5c4"}, - {file = "ujson-5.9.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f91719c6abafe429c1a144cfe27883eace9fb1c09a9c5ef1bcb3ae80a3076a4e"}, - {file = "ujson-5.9.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b1c0991c4fe256f5fdb19758f7eac7f47caac29a6c57d0de16a19048eb86bad"}, - {file = "ujson-5.9.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a8ea0f55a1396708e564595aaa6696c0d8af532340f477162ff6927ecc46e21"}, - {file = "ujson-5.9.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:07e0cfdde5fd91f54cd2d7ffb3482c8ff1bf558abf32a8b953a5d169575ae1cd"}, - {file = "ujson-5.9.0.tar.gz", hash = "sha256:89cc92e73d5501b8a7f48575eeb14ad27156ad092c2e9fc7e3cf949f07e75532"}, -] - -[[package]] -name = "urllib3" -version = "2.2.2" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.8" -files = [ - {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, - {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -h2 = ["h2 (>=4,<5)"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "virtualenv" -version = "20.25.1" -description = "Virtual Python Environment builder" -optional = false -python-versions = ">=3.7" -files = [ - {file = "virtualenv-20.25.1-py3-none-any.whl", hash = "sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a"}, - {file = "virtualenv-20.25.1.tar.gz", hash = "sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197"}, -] - -[package.dependencies] -distlib = ">=0.3.7,<1" -filelock = ">=3.12.2,<4" -platformdirs = ">=3.9.1,<5" - -[package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] -test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] - -[[package]] -name = "wcwidth" -version = "0.2.13" -description = "Measures the displayed width of unicode strings in a terminal" -optional = false -python-versions = "*" -files = [ - {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, - {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, -] - -[[package]] -name = "yamllint" -version = "1.35.1" -description = "A linter for YAML files." -optional = false -python-versions = ">=3.8" -files = [ - {file = "yamllint-1.35.1-py3-none-any.whl", hash = "sha256:2e16e504bb129ff515b37823b472750b36b6de07963bd74b307341ef5ad8bdc3"}, - {file = "yamllint-1.35.1.tar.gz", hash = "sha256:7a003809f88324fd2c877734f2d575ee7881dd9043360657cc8049c809eba6cd"}, -] - -[package.dependencies] -pathspec = ">=0.5.3" -pyyaml = "*" - -[package.extras] -dev = ["doc8", "flake8", "flake8-import-order", "rstcheck[sphinx]", "sphinx"] - -[extras] -all = ["Jinja2", "numpy", "numpy", "pyarrow", "pytest", "pyyaml", "rich", "toml", "typer"] -ctl = ["Jinja2", "numpy", "numpy", "pyarrow", "pyyaml", "rich", "toml", "typer"] -tests = ["Jinja2", "pytest", "pyyaml", "rich"] - -[metadata] -lock-version = "2.0" -python-versions = "^3.9" -content-hash = "eb3e409b617c9427e3958b14314fb34141acd9c922fc038a9ec222bce2a79f31" diff --git a/python_sdk/pyproject.toml b/python_sdk/pyproject.toml deleted file mode 100644 index 86a564a301..0000000000 --- a/python_sdk/pyproject.toml +++ /dev/null @@ -1,366 +0,0 @@ -[tool.poetry] -name = "infrahub-sdk" -version = "0.13.0" -description = "Python Client to interact with Infrahub" -authors = ["OpsMill "] -readme = "README.md" -license = "Apache-2.0" -homepage = "https://opsmill.com" -repository = "https://github.com/opsmill/infrahub" -documentation = "https://docs.infrahub.app/python-sdk/" -packages = [{ include = "infrahub_sdk" }] -classifiers = [ - "Intended Audience :: Developers", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", -] - -[tool.poetry.dependencies] -python = "^3.9" -pydantic = ">=2.0.0,!=2.0.1,!=2.1.0,<3.0.0" -pydantic-settings = ">=2.0" -graphql-core = ">=3.1,<3.3" -httpx = [ - { version = ">=0.20", python = ">=3.9,<3.11" }, - { version = ">=0.23", python = ">=3.11" }, -] -pendulum = [ - { version = ">=2", python = ">=3.9,<3.12" }, - { version = ">=3", python = ">=3.12" }, -] -gitpython = "^3" -ujson = "^5" -Jinja2 = { version = "^3", optional = true } -numpy = [ - { version = "^1.24.2", optional = true, python = ">=3.9,<3.12" }, - { version = "^1.26.2", optional = true, python = ">=3.12" }, -] -pyarrow = { version = "^14", optional = true } -rich = { version = "^13", optional = true } -toml = { version = "^0.10", optional = true } -typer = { version = "^0.12.3", optional = true } -pytest = { version = "*", optional = true } -pyyaml = { version = "^6", optional = true } - -[tool.poetry.group.dev.dependencies] -pytest = "*" -pytest-asyncio = "<0.23" -pytest-clarity = "^1.0.1" -pytest-cov = "^4.0.0" -pytest-httpx = ">=0.30" -yamllint = "*" -pylint = "*" -mypy = "*" -ipython = "*" -requests = "*" -pre-commit = "^2.20.0" -types-toml = "*" -types-ujson = "*" -types-pyyaml = "*" -ruff = "0.5.0" -pytest-xdist = "^3.3.1" -types-python-slugify = "^8.0.0.3" - -[tool.poetry.extras] -ctl = ["Jinja2", "numpy", "pyarrow", "pyyaml", "rich", "toml", "typer"] -tests = ["Jinja2", "pytest", "pyyaml", "rich"] -all = [ - "Jinja2", - "numpy", - "pyarrow", - "pytest", - "pyyaml", - "rich", - "toml", - "typer", -] - -[tool.poetry.scripts] -infrahubctl = "infrahub_sdk.ctl.cli:app" - -[tool.poetry.plugins."pytest11"] -"pytest-infrahub" = "infrahub_sdk.pytest_plugin.plugin" - -[tool.coverage.run] -branch = true - -[tool.coverage.report] -exclude_lines = ["if TYPE_CHECKING:", "raise NotImplementedError()"] - -[tool.pylint.general] -extension-pkg-whitelist = ["pydantic", "ujson"] - -[tool.pylint.format] -disable = "logging-fstring-interpolation" - -[tool.pylint.basic] -# No docstrings required for private methods (Pylint default), or for test_ functions. -no-docstring-rgx = "^(_|test_)" - -[tool.pylint.messages_control] -# Line length is enforced by Black, so pylint doesn't need to check it. -# Pylint and Black disagree about how to format multi-line arrays; Black wins. -# Rules already covered by RUFF -# - too-many-statements -disable = """, - line-too-long, - missing-module-docstring, - missing-function-docstring, - missing-class-docstring, - consider-using-from-import, - invalid-name, - too-many-arguments, - too-many-locals, - keyword-arg-before-vararg, - too-few-public-methods, - too-many-instance-attributes, - too-many-statements, - fixme, - consider-using-f-string, - protected-access, - import-self, - wrong-import-order, - multiple-statements, - """ - -[tool.pylint.miscellaneous] -notes = """, - FIXME, - XXX, - """ - -[tool.pylint.similarities] -min-similarity-lines = 20 - -[tool.pytest.ini_options] -asyncio_mode = "auto" -testpaths = ["tests"] -filterwarnings = [ - "ignore:Module already imported so cannot be rewritten", - "ignore:Deprecated call to", -] -addopts = "-vs --cov-report term-missing --cov-report xml --dist loadscope" - -[tool.mypy] -pretty = true -ignore_missing_imports = true -disallow_untyped_defs = true - -[[tool.mypy.overrides]] -module = "infrahub_sdk.ctl.check" -ignore_errors = true - -[[tool.mypy.overrides]] -module = "infrahub_sdk.ctl.cli_commands" -ignore_errors = true - -[[tool.mypy.overrides]] -module = "infrahub_sdk.ctl.exporter" -ignore_errors = true - -[[tool.mypy.overrides]] -module = "infrahub_sdk.ctl.generator" -ignore_errors = true - -[[tool.mypy.overrides]] -module = "infrahub_sdk.ctl.importer" -ignore_errors = true - -[[tool.mypy.overrides]] -module = "infrahub_sdk.ctl.schema" -ignore_errors = true - -[[tool.mypy.overrides]] -module = "infrahub_sdk.ctl.utils" -ignore_errors = true - -[[tool.mypy.overrides]] -module = "infrahub_sdk.utils" -ignore_errors = true - -[tool.ruff] -line-length = 120 - -exclude = [ - ".git", - ".tox", - ".venv", - "env", - "_build", - "build", - "dist", - "examples", -] - - -[tool.ruff.lint] -preview = true - -task-tags = ["FIXME", "TODO", "XXX"] - -select = [ - "ANN", # flake8-annotations - "ASYNC", # flake8-async - "B", # flake8-bugbear - "C4", # flake8-comprehensions - "C90", # mccabe complexity - "DJ", # flake8-django - "DTZ", # flake8-datetimez - "E", # pycodestyle errors - "EXE", # flake8-executable - "F", # pyflakes - "I", # isort-like checks - "ICN", # flake8-import-conventions - "INP", # flake8-no-pep420 - "N", # pep8-naming - "PERF", # Perflint - "PIE", # flake8-pie - "PL", # pylint - "PTH", # flake8-use-pathlib - "PYI", # flake8-pyi - "Q", # flake8-quotes - "RET", # flake8-return - "S", # flake8-bandit - "TCH", # flake8-type-checking - "T10", # flake8-debugger - "UP", # pyupgrade - "W", # pycodestyle warnings - "YTT", # flake8-2020 -] - -ignore = [ - ################################################################################################## - # The ignored rules below should be removed once the code has been updated, they are included # - # like this so that we can reactivate them one by one. Alternatively ignored after further # - # investigation if they are deemed to not make sense. # - ################################################################################################## - "B007", # Loop control variable `result` not used within loop body - "B008", # Do not perform function call `typer.Option` in argument defaults; instead, perform the call within the function, or read the default from a module-level singleton variable - "B904", # Within an `except` clause, raise exceptions with `raise ... from err` or `raise ... from None` to distinguish them from errors in exception handling - "B018", # Found useless attribute access. Either assign it to a variable or remove it. - "C408", # Unnecessary `dict` call (rewrite as a literal) - "C414", # Unnecessary `list` call within `sorted()` - "N802", # Function name should be lowercase - "N806", # Variable in function should be lowercase - "PERF102", # When using only the values of a dict use the `values()` method - "PERF203", # `try`-`except` within a loop incurs performance overhead - "PERF401", # Use a list comprehension to create a transformed list - "PLC0206", # Extracting value from dictionary without calling `.items()` - "PLR0912", # Too many branches - "PLR0913", # Too many arguments in function definition - "PLR0917", # Too many positional arguments - "PLR2004", # Magic value used in comparison - "PLR6201", # Use a `set` literal when testing for membership - "PLR6301", # Method could be a function, class method, or static method - "PLW0603", # Using the global statement to update `SETTINGS` is discouraged - "PLW1641", # Object does not implement `__hash__` method - "PTH100", # `os.path.abspath()` should be replaced by `Path.resolve()` - "PTH109", # `os.getcwd()` should be replaced by `Path.cwd()` - "RET504", # Unnecessary assignment to `data` before `return` statement - "S105", # Possible hardcoded password assigned to: "PASS" - "S108", # Probable insecure usage of temporary file or directory - "S311", # Standard pseudo-random generators are not suitable for cryptographic purposes - "S701", # By default, jinja2 sets `autoescape` to `False`. Consider using `autoescape=True` - "UP007", # Use X | Y for type annotations - "UP031", # Use format specifiers instead of percent format - "UP034", # Avoid extraneous parentheses -] - - -#https://docs.astral.sh/ruff/formatter/black/ -[tool.ruff.format] -quote-style = "double" -indent-style = "space" -skip-magic-trailing-comma = false -line-ending = "auto" - -[tool.ruff.lint.isort] -known-first-party = ["infrahub_sdk", "infrahub_ctl"] - -[tool.ruff.lint.pycodestyle] -max-line-length = 150 - -[tool.ruff.lint.mccabe] -# Target max-complexity=10 -max-complexity = 17 - -[tool.ruff.lint.per-file-ignores] - -"infrahub_sdk/**/*.py" = [ - ################################################################################################## - # Review and change the below later # - ################################################################################################## - "ANN001", # Missing type annotation for function argument - "ANN201", # ANN201 Missing return type annotation for public function - "ANN202", # Missing return type annotation for private function - "ANN204", # Missing return type annotation for special method - "ANN401", # Dynamically typed expressions (typing.Any) are disallowed -] - - -"tests/**/*.py" = [ - "PLR2004", # Magic value used in comparison - "S101", # Use of assert detected - "S106", # Possible hardcoded password assigned to variable - "S106", # Possible hardcoded password assigned to argument - - ################################################################################################## - # Review and change the below later # - ################################################################################################## - "ANN001", # Missing type annotation for function argument - "ANN201", # ANN201 Missing return type annotation for public function - "ANN202", # Missing return type annotation for private function - "ANN204", # Missing return type annotation for special method -] - -"tests/unit/sdk/test_client.py" = [ - "W293", # Blank line contains whitespace (used within output check) -] - -[tool.towncrier] - -package = "infrahub_sdk" -directory = "changelog" -filename = "CHANGELOG.md" -start_string = "\n" -underlines = ["", "", ""] -title_format = "## [{version}](https://github.com/opsmill/infrahub/tree/v{version}) - {project_date}" -issue_format = "[#{issue}](https://github.com/opsmill/infrahub/issues/{issue})" -orphan_prefix = "+" - -[[tool.towncrier.type]] -directory = "security" -name = "Security" -showcontent = true - -[[tool.towncrier.type]] -directory = "removed" -name = "Removed" -showcontent = true - -[[tool.towncrier.type]] -directory = "deprecated" -name = "Deprecated" -showcontent = true - -[[tool.towncrier.type]] -directory = "added" -name = "Added" -showcontent = true - -[[tool.towncrier.type]] -directory = "changed" -name = "Changed" -showcontent = true - -[[tool.towncrier.type]] -directory = "fixed" -name = "Fixed" -showcontent = true - -[build-system] -requires = ["poetry-core"] -build-backend = "poetry.core.masonry.api" diff --git a/python_sdk/tests/__init__.py b/python_sdk/tests/__init__.py deleted file mode 100644 index 9c48bcf96d..0000000000 --- a/python_sdk/tests/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -import builtins - -from rich import print as rprint - -builtins.rprint = rprint # type: ignore diff --git a/python_sdk/tests/adapters b/python_sdk/tests/adapters deleted file mode 120000 index 78aa5c90ce..0000000000 --- a/python_sdk/tests/adapters +++ /dev/null @@ -1 +0,0 @@ -../../backend/tests/adapters \ No newline at end of file diff --git a/python_sdk/tests/conftest.py b/python_sdk/tests/conftest.py deleted file mode 100644 index 841eb8058c..0000000000 --- a/python_sdk/tests/conftest.py +++ /dev/null @@ -1,22 +0,0 @@ -import asyncio - -import pytest - -from infrahub_sdk.ctl import config - -pytest_plugins = ["pytester"] - - -@pytest.fixture(scope="session") -def event_loop(): - """Overrides pytest default function scoped event loop""" - policy = asyncio.get_event_loop_policy() - loop = policy.new_event_loop() - yield loop - loop.close() - - -@pytest.fixture(scope="session", autouse=True) -def execute_before_any_test(): - config.SETTINGS.load_and_exit() - config.SETTINGS.active.server_address = "http://mock" diff --git a/python_sdk/tests/constants b/python_sdk/tests/constants deleted file mode 120000 index 294159abe4..0000000000 --- a/python_sdk/tests/constants +++ /dev/null @@ -1 +0,0 @@ -../../backend/tests/constants \ No newline at end of file diff --git a/python_sdk/tests/fixtures/models/empty.json b/python_sdk/tests/fixtures/models/empty.json deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/python_sdk/tests/fixtures/models/non_valid_json_01.json b/python_sdk/tests/fixtures/models/non_valid_json_01.json deleted file mode 100644 index cbe0e1a17a..0000000000 --- a/python_sdk/tests/fixtures/models/non_valid_json_01.json +++ /dev/null @@ -1,17 +0,0 @@ -{ "version": "1.0", - "nodes": [ - "name": "device", - "attributes": [ - { - "name": "description", - "kind": "String", - "optional": true - }, - { - "name": "type", - "kind": "String" - } - ] - } - ] -} \ No newline at end of file diff --git a/python_sdk/tests/fixtures/models/non_valid_model_01.json b/python_sdk/tests/fixtures/models/non_valid_model_01.json deleted file mode 100644 index 016ea27731..0000000000 --- a/python_sdk/tests/fixtures/models/non_valid_model_01.json +++ /dev/null @@ -1,18 +0,0 @@ -{ "version": "1.0", - "nodes": [ - { - "name": "device", - "attributes": [ - { - "name": "description", - "kind": "String", - "optional": true - }, - { - "name": "type", - "kind": "String" - } - ] - } - ] -} \ No newline at end of file diff --git a/python_sdk/tests/fixtures/models/non_valid_namespace.json b/python_sdk/tests/fixtures/models/non_valid_namespace.json deleted file mode 100644 index 88de68a193..0000000000 --- a/python_sdk/tests/fixtures/models/non_valid_namespace.json +++ /dev/null @@ -1,26 +0,0 @@ -{ "version": "1.0", - "nodes": [ - { - "name": "Device", - "namespace": "OuT", - "default_filter": "name__value", - "branch": "aware", - "attributes": [ - { - "name": "name", - "kind": "Text", - "unique": true - }, - { - "name": "description", - "kind": "Text", - "optional": true - }, - { - "name": "type", - "kind": "Text" - } - ] - } - ] -} \ No newline at end of file diff --git a/python_sdk/tests/fixtures/models/valid_model_01.json b/python_sdk/tests/fixtures/models/valid_model_01.json deleted file mode 100644 index eacb51a94a..0000000000 --- a/python_sdk/tests/fixtures/models/valid_model_01.json +++ /dev/null @@ -1,26 +0,0 @@ -{ "version": "1.0", - "nodes": [ - { - "name": "Device", - "namespace": "Infra", - "default_filter": "name__value", - "branch": "aware", - "attributes": [ - { - "name": "name", - "kind": "Text", - "unique": true - }, - { - "name": "description", - "kind": "Text", - "optional": true - }, - { - "name": "type", - "kind": "Text" - } - ] - } - ] -} \ No newline at end of file diff --git a/python_sdk/tests/fixtures/models/valid_schemas/contract.yml b/python_sdk/tests/fixtures/models/valid_schemas/contract.yml deleted file mode 100644 index 398f2bd7f2..0000000000 --- a/python_sdk/tests/fixtures/models/valid_schemas/contract.yml +++ /dev/null @@ -1,35 +0,0 @@ ---- -version: '1.0' -nodes: - - name: Contract - namespace: Procurement - description: "Generic Contract" - label: "Contract" - display_labels: - - contract_ref__value - order_by: - - contract_ref__value - attributes: - - name: contract_ref - label: Contract Reference - kind: Text - unique: true - - name: description - kind: Text - optional: true - relationships: - - name: Organization - peer: TestOrganization - optional: false - cardinality: one - kind: Attribute - -extensions: - nodes: - - kind: TestOrganization - relationships: - - name: contract - peer: ProcurementContract - optional: true - cardinality: many - kind: Component diff --git a/python_sdk/tests/fixtures/models/valid_schemas/rack.yml b/python_sdk/tests/fixtures/models/valid_schemas/rack.yml deleted file mode 100644 index aaec5e566f..0000000000 --- a/python_sdk/tests/fixtures/models/valid_schemas/rack.yml +++ /dev/null @@ -1,39 +0,0 @@ ---- -version: '1.0' -nodes: - - name: Rack - namespace: Infra - description: "A Rack represents a physical two- or four-post equipment rack in which devices can be installed." - label: "Rack" - default_filter: name__value - display_labels: - - name__value - attributes: - - name: name - kind: Text - unique: true - - name: description - kind: Text - optional: true - - name: height - kind: Text - relationships: - - name: location - peer: BuiltinLocation - optional: false - cardinality: one - kind: Attribute - - name: tags - peer: BuiltinTag - optional: true - cardinality: many - kind: Attribute -extensions: - nodes: - - kind: BuiltinLocation - relationships: - - name: racks - peer: InfraRack - optional: true - cardinality: many - kind: Generic diff --git a/python_sdk/tests/fixtures/schema_01.json b/python_sdk/tests/fixtures/schema_01.json deleted file mode 100644 index 460afadd24..0000000000 --- a/python_sdk/tests/fixtures/schema_01.json +++ /dev/null @@ -1,404 +0,0 @@ -{ - "nodes": [ - { - "name": "GraphQLQuery", - "namespace": "Core", - "description": null, - "attributes": [ - { - "name": "query", - "kind": "String", - "label": null, - "description": null, - "default_value": null, - "inherited": false, - "unique": false, - "branch": "aware", - "optional": false - }, - { - "name": "description", - "kind": "String", - "label": null, - "description": null, - "default_value": null, - "inherited": false, - "unique": false, - "branch": "aware", - "optional": true - }, - { - "name": "name", - "kind": "String", - "label": null, - "description": null, - "default_value": null, - "inherited": false, - "unique": true, - "branch": "aware", - "optional": false - } - ], - "relationships": [ - { - "name": "tags", - "peer": "BuiltinTag", - "label": null, - "description": null, - "identifier": "graphqlquery__tag", - "inherited": false, - "cardinality": "many", - "branch": "aware", - "optional": true, - "filters": [ - { - "name": "id", - "kind": "String", - "description": null - }, - { - "name": "name__value", - "kind": "String", - "description": null - }, - { - "name": "description__value", - "kind": "String", - "description": null - } - ] - } - ], - "label": null, - "inherit_from": [], - "branch": "aware", - "default_filter": "name__value", - "filters": [ - { - "name": "ids", - "kind": "List", - "description": null - }, - { - "name": "query__value", - "kind": "String", - "description": null - }, - { - "name": "description__value", - "kind": "String", - "description": null - }, - { - "name": "name__value", - "kind": "String", - "description": null - } - ] - }, - { - "name": "Repository", - "namespace": "Core", - "description": null, - "attributes": [ - { - "name": "username", - "kind": "String", - "label": null, - "description": null, - "default_value": null, - "inherited": false, - "unique": false, - "branch": "aware", - "optional": true - }, - { - "name": "type", - "kind": "String", - "label": null, - "description": null, - "default_value": "LOCAL", - "inherited": false, - "unique": false, - "branch": "aware", - "optional": false - }, - { - "name": "name", - "kind": "String", - "label": null, - "description": null, - "default_value": null, - "inherited": false, - "unique": true, - "branch": "aware", - "optional": false - }, - { - "name": "description", - "kind": "String", - "label": null, - "description": null, - "default_value": null, - "inherited": false, - "unique": false, - "branch": "aware", - "optional": true - }, - { - "name": "commit", - "kind": "String", - "label": null, - "description": null, - "default_value": null, - "inherited": false, - "unique": false, - "branch": "aware", - "optional": true - }, - { - "name": "location", - "kind": "String", - "label": null, - "description": null, - "default_value": null, - "inherited": false, - "unique": false, - "branch": "aware", - "optional": false - }, - { - "name": "password", - "kind": "String", - "label": null, - "description": null, - "default_value": null, - "inherited": false, - "unique": false, - "branch": "aware", - "optional": true - }, - { - "name": "default_branch", - "kind": "String", - "label": null, - "description": null, - "default_value": "main", - "inherited": false, - "unique": false, - "branch": "aware", - "optional": false - } - ], - "relationships": [ - { - "name": "tags", - "peer": "BuiltinTag", - "label": null, - "description": null, - "identifier": "repository__tag", - "inherited": false, - "cardinality": "many", - "branch": "aware", - "optional": true, - "filters": [ - { - "name": "id", - "kind": "String", - "description": null - }, - { - "name": "name__value", - "kind": "String", - "description": null - }, - { - "name": "description__value", - "kind": "String", - "description": null - } - ] - }, - { - "name": "queries", - "peer": "CoreGraphQLQuery", - "label": null, - "description": null, - "identifier": "graphqlquery__repository", - "inherited": false, - "cardinality": "many", - "branch": "aware", - "optional": true, - "filters": [ - { - "name": "id", - "kind": "String", - "description": null - }, - { - "name": "query__value", - "kind": "String", - "description": null - }, - { - "name": "description__value", - "kind": "String", - "description": null - }, - { - "name": "name__value", - "kind": "String", - "description": null - } - ] - } - ], - "label": null, - "inherit_from": [ - "DataOwner", - "DataSource" - ], - "branch": "aware", - "default_filter": "name__value", - "human_friendly_id": ["name__value"], - "filters": [ - { - "name": "ids", - "kind": "List", - "description": null - }, - { - "name": "hfid", - "kind": "List", - "description": null - }, - { - "name": "username__value", - "kind": "String", - "description": null - }, - { - "name": "type__value", - "kind": "String", - "description": null - }, - { - "name": "name__value", - "kind": "String", - "description": null - }, - { - "name": "description__value", - "kind": "String", - "description": null - }, - { - "name": "commit__value", - "kind": "String", - "description": null - }, - { - "name": "location__value", - "kind": "String", - "description": null - }, - { - "name": "password__value", - "kind": "String", - "description": null - }, - { - "name": "default_branch__value", - "kind": "String", - "description": null - } - ] - }, - { - "name": "Tag", - "namespace": "Builtin", - "description": null, - "attributes": [ - { - "name": "name", - "kind": "String", - "label": null, - "description": null, - "default_value": null, - "inherited": false, - "unique": true, - "branch": "aware", - "optional": false - }, - { - "name": "description", - "kind": "String", - "label": null, - "description": null, - "default_value": null, - "inherited": false, - "unique": false, - "branch": "aware", - "optional": true - }, - { - "name": "status", - "kind": "Dropdown", - "branch": "aware", - "optional": true, - "choices": [ - { - "name": "active", - "label": "Active", - "description": "A status", - "color": "#ffffff" - } - ] - }, - { - "name": "mode", - "kind": "Text", - "branch": "aware", - "optional": true, - "enum": ["easy"] - } - ], - "relationships": [], - "label": null, - "inherit_from": [], - "branch": "aware", - "default_filter": "name__value", - "filters": [ - { - "name": "ids", - "kind": "List", - "description": null - }, - { - "name": "name__value", - "kind": "String", - "description": null - }, - { - "name": "description__value", - "kind": "String", - "description": null - } - ] - }, - { - "name": "Location", - "namespace": "Builtin", - "default_filter": "name__value", - "attributes": [ - {"name": "name", "kind": "String", "unique": true}, - {"name": "description", "kind": "String", "optional": true}, - {"name": "type", "kind": "String"} - ], - "relationships": [ - {"name": "tags", "peer": "BuiltinTag", "optional": true, "cardinality": "many"}, - {"name": "primary_tag", "peer": "BuiltinTag", "optional": true, "cardinality": "one"} - ] - } - ] -} diff --git a/python_sdk/tests/fixtures/schema_02.json b/python_sdk/tests/fixtures/schema_02.json deleted file mode 100644 index e6619110e3..0000000000 --- a/python_sdk/tests/fixtures/schema_02.json +++ /dev/null @@ -1,1055 +0,0 @@ -{ - "generics": [ - { - "name": "Node", - "namespace": "Core", - "description": "Base Node in Infrahub.", - "default_filter": null, - "branch": "aware", - "order_by": null, - "display_labels": null, - "attributes": [], - "relationships": [], - "label": "Node", - "used_by": ["BuiltinTag", "BuiltinLocation"], - "kind": "CoreNode" - }, - { - "name": "GenericRepository", - "namespace": "Core", - "description": "A Git Repository integrated with Infrahub", - "default_filter": "name__value", - "branch": "agnostic", - "order_by": ["name__value"], - "display_labels": ["name__value"], - "attributes": [ - { - "name": "name", - "kind": "Text", - "label": "Name", - "description": null, - "default_value": null, - "enum": null, - "regex": null, - "max_length": null, - "min_length": null, - "read_only": false, - "inherited": false, - "unique": true, - "branch": "aware", - "optional": false, - "order_weight": 1000, - "choices": null - }, - { - "name": "description", - "kind": "Text", - "label": "Description", - "description": null, - "default_value": null, - "enum": null, - "regex": null, - "max_length": null, - "min_length": null, - "read_only": false, - "inherited": false, - "unique": false, - "branch": "aware", - "optional": true, - "order_weight": 2000, - "choices": null - }, - { - "name": "location", - "kind": "Text", - "label": "Location", - "description": null, - "default_value": null, - "enum": null, - "regex": null, - "max_length": null, - "min_length": null, - "read_only": false, - "inherited": false, - "unique": true, - "branch": "aware", - "optional": false, - "order_weight": 3000, - "choices": null - }, - { - "name": "internal_status", - "kind": "Text", - "label": "Admin Status", - "description": null, - "default_value": null, - "enum": null, - "regex": null, - "max_length": null, - "min_length": null, - "read_only": false, - "inherited": false, - "unique": false, - "branch": "aware", - "optional": false, - "order_weight": 3000, - "choices": null - }, - { - "name": "username", - "kind": "Text", - "label": "Username", - "description": null, - "default_value": null, - "enum": null, - "regex": null, - "max_length": null, - "min_length": null, - "read_only": false, - "inherited": false, - "unique": false, - "branch": "aware", - "optional": true, - "order_weight": 4000, - "choices": null - }, - { - "name": "password", - "kind": "Password", - "label": "Password", - "description": null, - "default_value": null, - "enum": null, - "regex": null, - "max_length": null, - "min_length": null, - "read_only": false, - "inherited": false, - "unique": false, - "branch": "aware", - "optional": true, - "order_weight": 5000, - "choices": null - } - ], - "relationships": [ - { - "name": "account", - "peer": "CoreAccount", - "kind": "Attribute", - "direction": "bidirectional", - "label": "Account", - "description": null, - "identifier": "coreaccount__coregenericrepository", - "inherited": false, - "cardinality": "one", - "branch": "agnostic", - "optional": true, - "order_weight": 6000 - }, - { - "name": "tags", - "peer": "BuiltinTag", - "kind": "Attribute", - "direction": "bidirectional", - "label": "Tags", - "description": null, - "identifier": "builtintag__coregenericrepository", - "inherited": false, - "cardinality": "many", - "branch": "aware", - "optional": true, - "order_weight": 7000 - }, - { - "name": "transformations", - "peer": "CoreTransformation", - "kind": "Generic", - "direction": "bidirectional", - "label": "Transformations", - "description": null, - "identifier": "repository__transformation", - "inherited": false, - "cardinality": "many", - "branch": "aware", - "optional": true, - "order_weight": 8000 - }, - { - "name": "queries", - "peer": "CoreGraphQLQuery", - "kind": "Generic", - "direction": "bidirectional", - "label": "Queries", - "description": null, - "identifier": "graphql_query__repository", - "inherited": false, - "cardinality": "many", - "branch": "aware", - "optional": true, - "order_weight": 9000 - }, - { - "name": "checks", - "peer": "CoreCheckDefinition", - "kind": "Generic", - "direction": "bidirectional", - "label": "Checks", - "description": null, - "identifier": "check_definition__repository", - "inherited": false, - "cardinality": "many", - "branch": "aware", - "optional": true, - "order_weight": 10000 - } - ], - "label": "Git Repository", - "used_by": [ - "CoreReadOnlyRepository", - "CoreRepository" - ], - "kind": "CoreGenericRepository" - } - ], - "nodes": [ - { - "name": "Repository", - "namespace": "Core", - "description": "A Git Repository integrated with Infrahub", - "default_filter": "name__value", - "branch": "aware", - "order_by": ["name__value"], - "display_labels": ["name__value"], - "attributes": [ - { - "name": "default_branch", - "kind": "Text", - "label": "Default Branch", - "description": null, - "default_value": "main", - "enum": null, - "regex": null, - "max_length": null, - "min_length": null, - "read_only": false, - "inherited": false, - "unique": false, - "branch": "aware", - "optional": true, - "order_weight": 1000, - "choices": null - }, - { - "name": "commit", - "kind": "Text", - "label": "Commit", - "description": null, - "default_value": null, - "enum": null, - "regex": null, - "max_length": null, - "min_length": null, - "read_only": false, - "inherited": false, - "unique": false, - "branch": "local", - "optional": true, - "order_weight": 2000, - "choices": null - }, - { - "name": "name", - "kind": "Text", - "label": "Name", - "description": null, - "default_value": null, - "enum": null, - "regex": null, - "max_length": null, - "min_length": null, - "read_only": false, - "inherited": true, - "unique": true, - "branch": "aware", - "optional": false, - "order_weight": 3000, - "choices": null - }, - { - "name": "description", - "kind": "Text", - "label": "Description", - "description": null, - "default_value": null, - "enum": null, - "regex": null, - "max_length": null, - "min_length": null, - "read_only": false, - "inherited": true, - "unique": false, - "branch": "aware", - "optional": true, - "order_weight": 4000, - "choices": null - }, - { - "name": "location", - "kind": "Text", - "label": "Location", - "description": null, - "default_value": null, - "enum": null, - "regex": null, - "max_length": null, - "min_length": null, - "read_only": false, - "inherited": true, - "unique": true, - "branch": "aware", - "optional": false, - "order_weight": 5000, - "choices": null - }, - { - "name": "username", - "kind": "Text", - "label": "Username", - "description": null, - "default_value": null, - "enum": null, - "regex": null, - "max_length": null, - "min_length": null, - "read_only": false, - "inherited": true, - "unique": false, - "branch": "aware", - "optional": true, - "order_weight": 6000, - "choices": null - }, - { - "name": "password", - "kind": "Password", - "label": "Password", - "description": null, - "default_value": null, - "enum": null, - "regex": null, - "max_length": null, - "min_length": null, - "read_only": false, - "inherited": true, - "unique": false, - "branch": "aware", - "optional": true, - "order_weight": 7000, - "choices": null - } - ], - "relationships": [ - { - "name": "account", - "peer": "CoreAccount", - "kind": "Attribute", - "direction": "bidirectional", - "label": "Account", - "description": null, - "identifier": "coreaccount__coregenericrepository", - "inherited": true, - "cardinality": "one", - "branch": "agnostic", - "optional": true, - "order_weight": 8000 - }, - { - "name": "tags", - "peer": "BuiltinTag", - "kind": "Attribute", - "direction": "bidirectional", - "label": "Tags", - "description": null, - "identifier": "builtintag__coregenericrepository", - "inherited": true, - "cardinality": "many", - "branch": "aware", - "optional": true, - "order_weight": 9000 - }, - { - "name": "transformations", - "peer": "CoreTransformation", - "kind": "Generic", - "direction": "bidirectional", - "label": "Transformations", - "description": null, - "identifier": "repository__transformation", - "inherited": true, - "cardinality": "many", - "branch": "aware", - "optional": true, - "order_weight": 10000 - }, - { - "name": "queries", - "peer": "CoreGraphQLQuery", - "kind": "Generic", - "direction": "bidirectional", - "label": "Queries", - "description": null, - "identifier": "graphql_query__repository", - "inherited": true, - "cardinality": "many", - "branch": "aware", - "optional": true, - "order_weight": 11000 - }, - { - "name": "checks", - "peer": "CoreCheckDefinition", - "kind": "Generic", - "direction": "bidirectional", - "label": "Checks", - "description": null, - "identifier": "check_definition__repository", - "inherited": true, - "cardinality": "many", - "branch": "aware", - "optional": true, - "order_weight": 12000 - } - ], - "label": "Repository", - "inherit_from": ["CoreGenericRepository"], - "kind": "CoreRepository" - }, - { - "id": "17a73306-2b51-07fb-43e3-16777efa5bfe", - "name": "ReadOnlyRepository", - "namespace": "Core", - "description": "A Git Repository integrated with Infrahub, Git-side will not be updated", - "default_filter": "name__value", - "branch": "aware", - "order_by": ["name__value"], - "display_labels": ["name__value"], - "attributes": [ - { - "id": "17a73306-2d08-182e-43e6-1677aaae0c06", - "name": "branch", - "kind": "Text", - "label": "Branch", - "description": null, - "default_value": "main", - "enum": null, - "regex": null, - "max_length": null, - "min_length": null, - "read_only": false, - "inherited": false, - "unique": false, - "branch": "aware", - "optional": true, - "order_weight": 1000, - "choices": null - }, - { - "name": "commit", - "kind": "Text", - "label": "Commit", - "description": null, - "default_value": null, - "enum": null, - "regex": null, - "max_length": null, - "min_length": null, - "read_only": false, - "inherited": false, - "unique": false, - "branch": "aware", - "optional": true, - "order_weight": 2000, - "choices": null - }, - { - "id": "17a73306-2f1c-9428-43ee-1677c8719359", - "name": "name", - "kind": "Text", - "label": "Name", - "description": null, - "default_value": null, - "enum": null, - "regex": null, - "max_length": null, - "min_length": null, - "read_only": false, - "inherited": true, - "unique": true, - "branch": "aware", - "optional": false, - "order_weight": 2000, - "choices": null - }, - { - "id": "17a73306-3133-8030-43e7-167700a9a3c0", - "name": "description", - "kind": "Text", - "label": "Description", - "description": null, - "default_value": null, - "enum": null, - "regex": null, - "max_length": null, - "min_length": null, - "read_only": false, - "inherited": true, - "unique": false, - "branch": "aware", - "optional": true, - "order_weight": 3000, - "choices": null - }, - { - "id": "17a73306-3336-cb49-43e0-1677c460b0f3", - "name": "location", - "kind": "Text", - "label": "Location", - "description": null, - "default_value": null, - "enum": null, - "regex": null, - "max_length": null, - "min_length": null, - "read_only": false, - "inherited": true, - "unique": true, - "branch": "aware", - "optional": false, - "order_weight": 4000, - "choices": null - }, - { - "name": "internal_status", - "kind": "Text", - "label": "Admin Status", - "description": null, - "default_value": null, - "enum": null, - "regex": null, - "max_length": null, - "min_length": null, - "read_only": false, - "inherited": true, - "unique": true, - "branch": "aware", - "optional": false, - "order_weight": 3000, - "choices": null - }, - { - "id": "17a73306-353a-f864-43e9-1677c172ed4e", - "name": "username", - "kind": "Text", - "label": "Username", - "description": null, - "default_value": null, - "enum": null, - "regex": null, - "max_length": null, - "min_length": null, - "read_only": false, - "inherited": true, - "unique": false, - "branch": "aware", - "optional": true, - "order_weight": 5000, - "choices": null - }, - { - "id": "17a73306-3741-3641-43e3-16778b790687", - "name": "password", - "kind": "Password", - "label": "Password", - "description": null, - "default_value": null, - "enum": null, - "regex": null, - "max_length": null, - "min_length": null, - "read_only": false, - "inherited": true, - "unique": false, - "branch": "aware", - "optional": true, - "order_weight": 6000, - "choices": null - } - ], - "relationships": [ - { - "name": "account", - "peer": "CoreAccount", - "kind": "Attribute", - "direction": "bidirectional", - "label": "Account", - "description": null, - "identifier": "coreaccount__coregenericrepository", - "inherited": true, - "cardinality": "one", - "branch": "agnostic", - "optional": true, - "order_weight": 8000 - }, - { - "name": "tags", - "peer": "BuiltinTag", - "kind": "Attribute", - "direction": "bidirectional", - "label": "Tags", - "description": null, - "identifier": "builtintag__coregenericrepository", - "inherited": true, - "cardinality": "many", - "branch": "aware", - "optional": true, - "order_weight": 9000 - }, - { - "name": "transformations", - "peer": "CoreTransformation", - "kind": "Generic", - "direction": "bidirectional", - "label": "Transformations", - "description": null, - "identifier": "repository__transformation", - "inherited": true, - "cardinality": "many", - "branch": "aware", - "optional": true, - "order_weight": 10000 - }, - { - "name": "queries", - "peer": "CoreGraphQLQuery", - "kind": "Generic", - "direction": "bidirectional", - "label": "Queries", - "description": null, - "identifier": "graphql_query__repository", - "inherited": true, - "cardinality": "many", - "branch": "aware", - "optional": true, - "order_weight": 11000 - }, - { - "name": "checks", - "peer": "CoreCheckDefinition", - "kind": "Generic", - "direction": "bidirectional", - "label": "Checks", - "description": null, - "identifier": "check_definition__repository", - "inherited": true, - "cardinality": "many", - "branch": "aware", - "optional": true, - "order_weight": 12000 - } - ] - }, - { - "name": "GraphQLQuery", - "namespace": "Core", - "description": null, - "attributes": [ - { - "name": "query", - "kind": "String", - "label": null, - "description": null, - "default_value": null, - "inherited": false, - "unique": false, - "branch": "aware", - "optional": false - }, - { - "name": "description", - "kind": "String", - "label": null, - "description": null, - "default_value": null, - "inherited": false, - "unique": false, - "branch": "aware", - "optional": true - }, - { - "name": "name", - "kind": "String", - "label": null, - "description": null, - "default_value": null, - "inherited": false, - "unique": true, - "branch": "aware", - "optional": false - } - ], - "relationships": [ - { - "name": "tags", - "peer": "BuiltinTag", - "label": null, - "description": null, - "identifier": "graphqlquery__tag", - "inherited": false, - "cardinality": "many", - "branch": "aware", - "optional": true, - "filters": [ - { - "name": "id", - "kind": "String", - "description": null - }, - { - "name": "name__value", - "kind": "String", - "description": null - }, - { - "name": "description__value", - "kind": "String", - "description": null - } - ] - } - ], - "label": null, - "inherit_from": [], - "branch": "aware", - "default_filter": "name__value", - "filters": [ - { - "name": "ids", - "kind": "List", - "description": null - }, - { - "name": "query__value", - "kind": "String", - "description": null - }, - { - "name": "description__value", - "kind": "String", - "description": null - }, - { - "name": "name__value", - "kind": "String", - "description": null - } - ] - }, - { - "name": "Repository", - "namespace": "Core", - "description": null, - "attributes": [ - { - "name": "username", - "kind": "String", - "label": null, - "description": null, - "default_value": null, - "inherited": false, - "unique": false, - "branch": "aware", - "optional": true - }, - { - "name": "type", - "kind": "String", - "label": null, - "description": null, - "default_value": "LOCAL", - "inherited": false, - "unique": false, - "branch": "aware", - "optional": false - }, - { - "name": "name", - "kind": "String", - "label": null, - "description": null, - "default_value": null, - "inherited": false, - "unique": true, - "branch": "aware", - "optional": false - }, - { - "name": "description", - "kind": "String", - "label": null, - "description": null, - "default_value": null, - "inherited": false, - "unique": false, - "branch": "aware", - "optional": true - }, - { - "name": "commit", - "kind": "String", - "label": null, - "description": null, - "default_value": null, - "inherited": false, - "unique": false, - "branch": "aware", - "optional": true - }, - { - "name": "location", - "kind": "String", - "label": null, - "description": null, - "default_value": null, - "inherited": false, - "unique": false, - "branch": "aware", - "optional": false - }, - { - "name": "password", - "kind": "String", - "label": null, - "description": null, - "default_value": null, - "inherited": false, - "unique": false, - "branch": "aware", - "optional": true - }, - { - "name": "default_branch", - "kind": "String", - "label": null, - "description": null, - "default_value": "main", - "inherited": false, - "unique": false, - "branch": "aware", - "optional": false - }, - { - "name": "internal_status", - "kind": "Text", - "label": "Admin Status", - "description": null, - "default_value": null, - "enum": null, - "regex": null, - "max_length": null, - "min_length": null, - "read_only": false, - "inherited": false, - "unique": false, - "branch": "aware", - "optional": false, - "order_weight": 3100, - "choices": null - } - ], - "relationships": [ - { - "name": "tags", - "peer": "BuiltinTag", - "label": null, - "description": null, - "identifier": "repository__tag", - "inherited": false, - "cardinality": "many", - "branch": "aware", - "optional": true, - "filters": [ - { - "name": "id", - "kind": "String", - "description": null - }, - { - "name": "name__value", - "kind": "String", - "description": null - }, - { - "name": "description__value", - "kind": "String", - "description": null - } - ] - }, - { - "name": "queries", - "peer": "CoreGraphQLQuery", - "label": null, - "description": null, - "identifier": "graphqlquery__repository", - "inherited": false, - "cardinality": "many", - "branch": "aware", - "optional": true, - "filters": [ - { - "name": "id", - "kind": "String", - "description": null - }, - { - "name": "query__value", - "kind": "String", - "description": null - }, - { - "name": "description__value", - "kind": "String", - "description": null - }, - { - "name": "name__value", - "kind": "String", - "description": null - } - ] - } - ], - "label": null, - "inherit_from": [ - "DataOwner", - "DataSource" - ], - "branch": "aware", - "default_filter": "name__value", - "filters": [ - { - "name": "ids", - "kind": "List", - "description": null - }, - { - "name": "username__value", - "kind": "String", - "description": null - }, - { - "name": "type__value", - "kind": "String", - "description": null - }, - { - "name": "name__value", - "kind": "String", - "description": null - }, - { - "name": "description__value", - "kind": "String", - "description": null - }, - { - "name": "commit__value", - "kind": "String", - "description": null - }, - { - "name": "location__value", - "kind": "String", - "description": null - }, - { - "name": "password__value", - "kind": "String", - "description": null - }, - { - "name": "default_branch__value", - "kind": "String", - "description": null - } - ] - }, - { - "name": "Tag", - "namespace": "Builtin", - "description": null, - "attributes": [ - { - "name": "name", - "kind": "String", - "label": null, - "description": null, - "default_value": null, - "inherited": false, - "unique": true, - "branch": "aware", - "optional": false - }, - { - "name": "description", - "kind": "String", - "label": null, - "description": null, - "default_value": null, - "inherited": false, - "unique": false, - "branch": "aware", - "optional": true - } - ], - "relationships": [], - "label": null, - "inherit_from": ["CoreNode"], - "branch": "aware", - "default_filter": "name__value", - "filters": [ - { - "name": "ids", - "kind": "List", - "description": null - }, - { - "name": "name__value", - "kind": "String", - "description": null - }, - { - "name": "description__value", - "kind": "String", - "description": null - } - ] - }, - { - "name": "Location", - "namespace": "Builtin", - "default_filter": "name__value", - "inherit_from": ["CoreNode"], - "attributes": [ - {"name": "name", "kind": "String", "unique": true}, - {"name": "description", "kind": "String", "optional": true}, - {"name": "type", "kind": "String"} - ], - "relationships": [ - {"name": "tags", "peer": "BuiltinTag", "optional": true, "cardinality": "many"}, - {"name": "primary_tag", "peer": "BuiltinTag", "optional": true, "cardinality": "one"} - ] - } - ] -} \ No newline at end of file diff --git a/python_sdk/tests/fixtures/schema_03.json b/python_sdk/tests/fixtures/schema_03.json deleted file mode 100644 index 924dd420ad..0000000000 --- a/python_sdk/tests/fixtures/schema_03.json +++ /dev/null @@ -1,974 +0,0 @@ -{ - "nodes": [ - { - "id": "1799f63a-56b4-7f6c-304e-c510849dcb58", - "name": "Artifact", - "namespace": "Core", - "description": null, - "default_filter": "name__value", - "branch": "local", - "order_by": [ - "name__value" - ], - "display_labels": [ - "name__value" - ], - "attributes": [ - { - "id": "1799f63a-5709-60c0-3049-c517825cf4c9", - "name": "name", - "kind": "Text", - "namespace": "Attribute", - "label": "Name", - "description": null, - "default_value": null, - "enum": null, - "regex": null, - "max_length": null, - "min_length": null, - "inherited": false, - "unique": false, - "branch": "local", - "optional": false, - "order_weight": 1000 - }, - { - "id": "1799f63a-5766-dcb8-304d-c51e643fd5aa", - "name": "status", - "kind": "Text", - "namespace": "Attribute", - "label": "Status", - "description": null, - "default_value": null, - "enum": [ - "Error", - "Pending", - "Processing", - "Ready" - ], - "regex": null, - "max_length": null, - "min_length": null, - "inherited": false, - "unique": false, - "branch": "local", - "optional": false, - "order_weight": 2000 - }, - { - "id": "1799f63a-57c7-bcd9-3041-c51b45223d54", - "name": "content_type", - "kind": "Text", - "namespace": "Attribute", - "label": "Content Type", - "description": null, - "default_value": null, - "enum": [ - "application/json", - "text/plain" - ], - "regex": null, - "max_length": null, - "min_length": null, - "inherited": false, - "unique": false, - "branch": "local", - "optional": false, - "order_weight": 3000 - }, - { - "id": "1799f63a-582f-ec8f-304c-c51ff4f83720", - "name": "checksum", - "kind": "Text", - "namespace": "Attribute", - "label": "Checksum", - "description": null, - "default_value": null, - "enum": null, - "regex": null, - "max_length": null, - "min_length": null, - "inherited": false, - "unique": false, - "branch": "local", - "optional": true, - "order_weight": 4000 - }, - { - "id": "1799f63a-5898-86b8-304e-c5143fc3cb31", - "name": "storage_id", - "kind": "Text", - "namespace": "Attribute", - "label": "Storage Id", - "description": "ID of the file in the object store", - "default_value": null, - "enum": null, - "regex": null, - "max_length": null, - "min_length": null, - "inherited": false, - "unique": false, - "branch": "local", - "optional": true, - "order_weight": 5000 - }, - { - "id": "1799f63a-5900-ba56-304e-c515eef9ed77", - "name": "parameters", - "kind": "JSON", - "namespace": "Attribute", - "label": "Parameters", - "description": null, - "default_value": null, - "enum": null, - "regex": null, - "max_length": null, - "min_length": null, - "inherited": false, - "unique": false, - "branch": "local", - "optional": true, - "order_weight": 6000 - } - ], - "relationships": [ - { - "id": "1799f63a-595e-7d97-304d-c51df7daa9a4", - "name": "object", - "peer": "CoreNode", - "kind": "Attribute", - "label": "Object", - "description": null, - "identifier": "artifact__node", - "inherited": false, - "cardinality": "one", - "branch": "local", - "optional": false, - "filters": [ - { - "name": "ids", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - } - ], - "order_weight": 7000 - }, - { - "id": "1799f63a-59b1-183d-3041-c5191c3136f6", - "name": "definition", - "peer": "CoreArtifactDefinition", - "kind": "Attribute", - "label": "Definition", - "description": null, - "identifier": "artifact__artifact_definition", - "inherited": false, - "cardinality": "one", - "branch": "local", - "optional": false, - "filters": [ - { - "name": "ids", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "artifact_name__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "artifact_name__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "artifact_name__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "artifact_name__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "artifact_name__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "content_type__value", - "kind": "Text", - "enum": [ - "application/json", - "text/plain" - ], - "object_kind": null, - "description": null - }, - { - "name": "content_type__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "content_type__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "content_type__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "content_type__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - } - ], - "order_weight": 8000 - }, - { - "id": "1799f63a-5a06-60c2-304b-c513a9abcdd8", - "name": "member_of_groups", - "peer": "CoreGroup", - "kind": "Group", - "label": "Member Of Groups", - "description": null, - "identifier": "group_member", - "inherited": false, - "cardinality": "many", - "branch": "aware", - "optional": true, - "filters": [ - { - "name": "ids", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - } - ], - "order_weight": 9000 - }, - { - "id": "1799f63a-5a59-c417-3044-c51c9c8e5131", - "name": "subscriber_of_groups", - "peer": "CoreGroup", - "kind": "Group", - "label": "Subscriber Of Groups", - "description": null, - "identifier": "group_subscriber", - "inherited": false, - "cardinality": "many", - "branch": "aware", - "optional": true, - "filters": [ - { - "name": "ids", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - } - ], - "order_weight": 10000 - } - ], - "filters": [ - { - "name": "ids", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "status__value", - "kind": "Text", - "enum": [ - "Error", - "Pending", - "Processing", - "Ready" - ], - "object_kind": null, - "description": null - }, - { - "name": "status__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "status__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "status__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "status__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "content_type__value", - "kind": "Text", - "enum": [ - "application/json", - "text/plain" - ], - "object_kind": null, - "description": null - }, - { - "name": "content_type__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "content_type__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "content_type__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "content_type__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "checksum__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "checksum__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "checksum__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "checksum__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "checksum__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "storage_id__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "storage_id__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "storage_id__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "storage_id__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "storage_id__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "any__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "any__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "any__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "any__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "any__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "object__ids", - "kind": "Text", - "enum": null, - "object_kind": "CoreNode", - "description": null - }, - { - "name": "definition__ids", - "kind": "Text", - "enum": null, - "object_kind": "CoreArtifactDefinition", - "description": null - }, - { - "name": "definition__name__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "definition__name__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "definition__name__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "definition__name__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "definition__name__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "definition__artifact_name__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "definition__artifact_name__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "definition__artifact_name__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "definition__artifact_name__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "definition__artifact_name__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "definition__description__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "definition__description__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "definition__description__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "definition__description__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "definition__description__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "definition__content_type__value", - "kind": "Text", - "enum": [ - "application/json", - "text/plain" - ], - "object_kind": null, - "description": null - }, - { - "name": "definition__content_type__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "definition__content_type__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "definition__content_type__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "definition__content_type__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - } - ], - "include_in_menu": false, - "menu_placement": null, - "icon": null, - "label": "Artifact", - "inherit_from": [], - "kind": "CoreArtifact" - } - ], - "generics": [] -} diff --git a/python_sdk/tests/fixtures/schema_04.json b/python_sdk/tests/fixtures/schema_04.json deleted file mode 100644 index 92b6d58f32..0000000000 --- a/python_sdk/tests/fixtures/schema_04.json +++ /dev/null @@ -1,2097 +0,0 @@ -{ - "nodes": [ - { - "id": "179be3f5-eab5-9d05-301d-c51f20da7fbd", - "name": "Artifact", - "namespace": "Core", - "description": null, - "default_filter": "name__value", - "branch": "local", - "order_by": [ - "name__value" - ], - "display_labels": [ - "name__value" - ], - "attributes": [ - { - "id": "179be3f5-eb1c-bd16-301a-c511fdc69f85", - "name": "name", - "kind": "Text", - "namespace": "Attribute", - "label": "Name", - "description": null, - "default_value": null, - "enum": null, - "regex": null, - "max_length": null, - "min_length": null, - "read_only": false, - "inherited": false, - "unique": false, - "branch": "local", - "optional": false, - "order_weight": 1000, - "choices": null - }, - { - "id": "179be3f5-eb8d-c1dd-3017-c514f5cc3594", - "name": "status", - "kind": "Text", - "namespace": "Attribute", - "label": "Status", - "description": null, - "default_value": null, - "enum": [ - "Error", - "Pending", - "Processing", - "Ready" - ], - "regex": null, - "max_length": null, - "min_length": null, - "read_only": false, - "inherited": false, - "unique": false, - "branch": "local", - "optional": false, - "order_weight": 2000, - "choices": null - }, - { - "id": "179be3f5-ec02-7296-3016-c510fbc0122f", - "name": "content_type", - "kind": "Text", - "namespace": "Attribute", - "label": "Content Type", - "description": null, - "default_value": null, - "enum": [ - "application/json", - "text/plain" - ], - "regex": null, - "max_length": null, - "min_length": null, - "read_only": false, - "inherited": false, - "unique": false, - "branch": "local", - "optional": false, - "order_weight": 3000, - "choices": null - }, - { - "id": "179be3f5-ec73-ae37-301d-c51b20302e14", - "name": "checksum", - "kind": "Text", - "namespace": "Attribute", - "label": "Checksum", - "description": null, - "default_value": null, - "enum": null, - "regex": null, - "max_length": null, - "min_length": null, - "read_only": false, - "inherited": false, - "unique": false, - "branch": "local", - "optional": true, - "order_weight": 4000, - "choices": null - }, - { - "id": "179be3f5-ece7-c18c-3011-c51a0e3e273e", - "name": "storage_id", - "kind": "Text", - "namespace": "Attribute", - "label": "Storage Id", - "description": "ID of the file in the object store", - "default_value": null, - "enum": null, - "regex": null, - "max_length": null, - "min_length": null, - "read_only": false, - "inherited": false, - "unique": false, - "branch": "local", - "optional": true, - "order_weight": 5000, - "choices": null - }, - { - "id": "179be3f5-ed5e-53b7-3011-c5143efc850e", - "name": "parameters", - "kind": "JSON", - "namespace": "Attribute", - "label": "Parameters", - "description": null, - "default_value": null, - "enum": null, - "regex": null, - "max_length": null, - "min_length": null, - "read_only": false, - "inherited": false, - "unique": false, - "branch": "local", - "optional": true, - "order_weight": 6000, - "choices": null - } - ], - "relationships": [ - { - "id": "179be3f5-edcd-7f06-3011-c51a4a621a7e", - "name": "object", - "peer": "CoreNode", - "kind": "Attribute", - "label": "Object", - "description": null, - "identifier": "artifact__node", - "inherited": false, - "cardinality": "one", - "branch": "local", - "optional": false, - "filters": [ - { - "name": "ids", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - } - ], - "order_weight": 7000 - }, - { - "id": "179be3f5-ee2d-1349-3013-c517a643565b", - "name": "definition", - "peer": "CoreArtifactDefinition", - "kind": "Attribute", - "label": "Definition", - "description": null, - "identifier": "artifact__artifact_definition", - "inherited": false, - "cardinality": "one", - "branch": "local", - "optional": false, - "filters": [ - { - "name": "ids", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "artifact_name__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "artifact_name__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "artifact_name__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "artifact_name__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "artifact_name__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "content_type__value", - "kind": "Text", - "enum": [ - "application/json", - "text/plain" - ], - "object_kind": null, - "description": null - }, - { - "name": "content_type__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "content_type__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "content_type__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "content_type__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - } - ], - "order_weight": 8000 - }, - { - "id": "179be3f5-ee86-fd31-301c-c51594ed3876", - "name": "member_of_groups", - "peer": "CoreGroup", - "kind": "Group", - "label": "Member Of Groups", - "description": null, - "identifier": "group_member", - "inherited": false, - "cardinality": "many", - "branch": "aware", - "optional": true, - "filters": [ - { - "name": "ids", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - } - ], - "order_weight": 9000 - }, - { - "id": "179be3f5-eee9-ca52-301d-c5166012d5ce", - "name": "subscriber_of_groups", - "peer": "CoreGroup", - "kind": "Group", - "label": "Subscriber Of Groups", - "description": null, - "identifier": "group_subscriber", - "inherited": false, - "cardinality": "many", - "branch": "aware", - "optional": true, - "filters": [ - { - "name": "ids", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - } - ], - "order_weight": 10000 - } - ], - "filters": [ - { - "name": "ids", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "status__value", - "kind": "Text", - "enum": [ - "Error", - "Pending", - "Processing", - "Ready" - ], - "object_kind": null, - "description": null - }, - { - "name": "status__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "status__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "status__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "status__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "content_type__value", - "kind": "Text", - "enum": [ - "application/json", - "text/plain" - ], - "object_kind": null, - "description": null - }, - { - "name": "content_type__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "content_type__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "content_type__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "content_type__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "checksum__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "checksum__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "checksum__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "checksum__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "checksum__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "storage_id__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "storage_id__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "storage_id__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "storage_id__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "storage_id__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "any__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "any__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "any__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "any__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "any__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "object__ids", - "kind": "Text", - "enum": null, - "object_kind": "CoreNode", - "description": null - }, - { - "name": "definition__ids", - "kind": "Text", - "enum": null, - "object_kind": "CoreArtifactDefinition", - "description": null - }, - { - "name": "definition__name__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "definition__name__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "definition__name__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "definition__name__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "definition__name__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "definition__artifact_name__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "definition__artifact_name__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "definition__artifact_name__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "definition__artifact_name__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "definition__artifact_name__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "definition__description__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "definition__description__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "definition__description__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "definition__description__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "definition__description__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "definition__content_type__value", - "kind": "Text", - "enum": [ - "application/json", - "text/plain" - ], - "object_kind": null, - "description": null - }, - { - "name": "definition__content_type__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "definition__content_type__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "definition__content_type__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "definition__content_type__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - } - ], - "include_in_menu": false, - "menu_placement": null, - "icon": null, - "label": "Artifact", - "inherit_from": [], - "kind": "CoreArtifact", - "hash": "eecebc6d23a92ea79a54f8676352ebb5" - }, - { - "id": "1799f63a-5b6b-cb21-304a-c51964ff356c", - "name": "ArtifactDefinition", - "namespace": "Core", - "description": null, - "default_filter": "name__value", - "branch": "aware", - "order_by": [ - "name__value" - ], - "display_labels": [ - "name__value" - ], - "attributes": [ - { - "id": "1799f63a-5bc6-0329-304f-c5133b0a3e09", - "name": "name", - "kind": "Text", - "namespace": "Attribute", - "label": "Name", - "description": null, - "default_value": null, - "enum": null, - "regex": null, - "max_length": null, - "min_length": null, - "inherited": false, - "unique": true, - "branch": "aware", - "optional": false, - "order_weight": 1000 - }, - { - "id": "1799f63a-5c26-749d-304a-c5130edc5d10", - "name": "artifact_name", - "kind": "Text", - "namespace": "Attribute", - "label": "Artifact Name", - "description": null, - "default_value": null, - "enum": null, - "regex": null, - "max_length": null, - "min_length": null, - "inherited": false, - "unique": false, - "branch": "aware", - "optional": false, - "order_weight": 2000 - }, - { - "id": "1799f63a-5c85-969e-3048-c5102fe0d7d0", - "name": "description", - "kind": "Text", - "namespace": "Attribute", - "label": "Description", - "description": null, - "default_value": null, - "enum": null, - "regex": null, - "max_length": null, - "min_length": null, - "inherited": false, - "unique": false, - "branch": "aware", - "optional": true, - "order_weight": 3000 - }, - { - "id": "1799f63a-5ce2-84fb-304a-c51541e59ee2", - "name": "parameters", - "kind": "JSON", - "namespace": "Attribute", - "label": "Parameters", - "description": null, - "default_value": null, - "enum": null, - "regex": null, - "max_length": null, - "min_length": null, - "inherited": false, - "unique": false, - "branch": "aware", - "optional": false, - "order_weight": 4000 - }, - { - "id": "1799f63a-5d69-d3fa-3049-c51efd44dcbf", - "name": "content_type", - "kind": "Text", - "namespace": "Attribute", - "label": "Content Type", - "description": null, - "default_value": null, - "enum": [ - "application/json", - "text/plain" - ], - "regex": null, - "max_length": null, - "min_length": null, - "inherited": false, - "unique": false, - "branch": "aware", - "optional": false, - "order_weight": 5000 - } - ], - "relationships": [ - { - "id": "1799f63a-5dd2-3fc0-3046-c51b909b6a36", - "name": "targets", - "peer": "CoreGroup", - "kind": "Attribute", - "label": "Targets", - "description": null, - "identifier": "artifact_definition___group", - "inherited": false, - "cardinality": "one", - "branch": "aware", - "optional": false, - "filters": [ - { - "name": "ids", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - } - ], - "order_weight": 6000 - }, - { - "id": "1799f63a-5e27-e84c-3046-c514bd181b05", - "name": "transformation", - "peer": "CoreTransformation", - "kind": "Attribute", - "label": "Transformation", - "description": null, - "identifier": "artifact_definition___transformation", - "inherited": false, - "cardinality": "one", - "branch": "aware", - "optional": false, - "filters": [ - { - "name": "ids", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "timeout__value", - "kind": "Number", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "timeout__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "timeout__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "timeout__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "timeout__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - } - ], - "order_weight": 7000 - }, - { - "id": "1799f63a-5e7d-5a28-3048-c51eb5055587", - "name": "member_of_groups", - "peer": "CoreGroup", - "kind": "Group", - "label": "Member Of Groups", - "description": null, - "identifier": "group_member", - "inherited": false, - "cardinality": "many", - "branch": "aware", - "optional": true, - "filters": [ - { - "name": "ids", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - } - ], - "order_weight": 8000 - }, - { - "id": "1799f63a-5ed0-c577-3048-c512f9ff312a", - "name": "subscriber_of_groups", - "peer": "CoreGroup", - "kind": "Group", - "label": "Subscriber Of Groups", - "description": null, - "identifier": "group_subscriber", - "inherited": false, - "cardinality": "many", - "branch": "aware", - "optional": true, - "filters": [ - { - "name": "ids", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "label__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - } - ], - "order_weight": 9000 - } - ], - "filters": [ - { - "name": "ids", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "name__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "artifact_name__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "artifact_name__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "artifact_name__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "artifact_name__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "artifact_name__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "description__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "content_type__value", - "kind": "Text", - "enum": [ - "application/json", - "text/plain" - ], - "object_kind": null, - "description": null - }, - { - "name": "content_type__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "content_type__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "content_type__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "content_type__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "any__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "any__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "any__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "any__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "any__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "targets__ids", - "kind": "Text", - "enum": null, - "object_kind": "CoreGroup", - "description": null - }, - { - "name": "targets__name__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "targets__name__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "targets__name__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "targets__name__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "targets__name__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "targets__label__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "targets__label__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "targets__label__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "targets__label__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "targets__label__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "targets__description__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "targets__description__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "targets__description__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "targets__description__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "targets__description__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "transformation__ids", - "kind": "Text", - "enum": null, - "object_kind": "CoreTransformation", - "description": null - }, - { - "name": "transformation__name__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "transformation__name__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "transformation__name__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "transformation__name__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "transformation__name__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "transformation__label__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "transformation__label__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "transformation__label__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "transformation__label__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "transformation__label__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "transformation__description__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "transformation__description__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "transformation__description__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "transformation__description__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "transformation__description__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "transformation__timeout__value", - "kind": "Number", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "transformation__timeout__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "transformation__timeout__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "transformation__timeout__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "name": "transformation__timeout__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - } - ], - "include_in_menu": false, - "menu_placement": null, - "icon": null, - "label": "Artifact Definition", - "inherit_from": [], - "kind": "CoreArtifactDefinition" - } - ], - "generics": [] -} diff --git a/python_sdk/tests/fixtures/schema_ipam.json b/python_sdk/tests/fixtures/schema_ipam.json deleted file mode 100644 index 5f0226254b..0000000000 --- a/python_sdk/tests/fixtures/schema_ipam.json +++ /dev/null @@ -1,4733 +0,0 @@ -{ - "nodes": [ - { - "id": "17d9bd6f-64d1-2310-2786-179fb702390c", - "state": "present", - "name": "IPPrefix", - "namespace": "Ipam", - "description": "IPv4 or IPv6 network", - "label": "IP Prefix", - "branch": "aware", - "default_filter": "prefix__value", - "human_friendly_id": null, - "display_labels": [ - "prefix__value" - ], - "include_in_menu": false, - "menu_placement": null, - "icon": "mdi:ip-network", - "order_by": [ - "prefix__version", - "prefix__binary_address", - "prefix__prefixlen" - ], - "uniqueness_constraints": null, - "documentation": null, - "filters": [ - { - "id": null, - "state": "present", - "name": "ids", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "prefix__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "member_type__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "member_type__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "member_type__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "member_type__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "member_type__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "member_type__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_pool__value", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_pool__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_pool__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_pool__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_pool__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_pool__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_top_level__value", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_top_level__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_top_level__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_top_level__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_top_level__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_top_level__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "utilization__value", - "kind": "Number", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "utilization__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "utilization__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "utilization__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "utilization__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "utilization__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "netmask__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "netmask__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "netmask__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "netmask__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "netmask__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "netmask__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "hostmask__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "hostmask__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "hostmask__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "hostmask__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "hostmask__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "hostmask__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "network_address__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "network_address__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "network_address__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "network_address__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "network_address__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "network_address__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "broadcast_address__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "broadcast_address__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "broadcast_address__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "broadcast_address__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "broadcast_address__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "broadcast_address__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "any__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "any__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "any__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "any__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "any__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - } - ], - "attributes": [ - { - "id": "17d9bd6f-6523-88d0-2788-179f06f96513", - "state": "present", - "name": "prefix", - "kind": "IPNetwork", - "enum": null, - "choices": null, - "regex": null, - "max_length": null, - "min_length": null, - "label": "Prefix", - "description": null, - "read_only": false, - "unique": false, - "optional": false, - "branch": "aware", - "order_weight": 1000, - "default_value": null, - "inherited": true, - "allow_override": "any" - }, - { - "id": "17d9bd6f-656d-9da8-2781-179fb3e12549", - "state": "present", - "name": "description", - "kind": "Text", - "enum": null, - "choices": null, - "regex": null, - "max_length": null, - "min_length": null, - "label": "Description", - "description": null, - "read_only": false, - "unique": false, - "optional": true, - "branch": "aware", - "order_weight": 2000, - "default_value": null, - "inherited": true, - "allow_override": "any" - }, - { - "id": "17d9bd6f-65bc-5dd0-278f-179f9ae80795", - "state": "present", - "name": "member_type", - "kind": "Dropdown", - "enum": null, - "choices": [ - { - "id": null, - "state": "present", - "name": "prefix", - "description": "Prefix serves as container for other prefixes", - "color": "#ed6a5a", - "label": "Prefix" - }, - { - "id": null, - "state": "present", - "name": "address", - "description": "Prefix serves as subnet for IP addresses", - "color": "#f4f1bb", - "label": "Address" - } - ], - "regex": null, - "max_length": null, - "min_length": null, - "label": "Member Type", - "description": null, - "read_only": false, - "unique": false, - "optional": true, - "branch": "aware", - "order_weight": 3000, - "default_value": "address", - "inherited": true, - "allow_override": "any" - }, - { - "id": "17d9bd6f-6609-5108-2787-179fc10e797a", - "state": "present", - "name": "is_pool", - "kind": "Boolean", - "enum": null, - "choices": null, - "regex": null, - "max_length": null, - "min_length": null, - "label": "Is Pool", - "description": "All IP addresses within this prefix are considered usable", - "read_only": false, - "unique": false, - "optional": true, - "branch": "aware", - "order_weight": 4000, - "default_value": false, - "inherited": true, - "allow_override": "any" - }, - { - "id": "17d9bd6f-6657-9fe8-278b-179f7fcff935", - "state": "present", - "name": "is_top_level", - "kind": "Boolean", - "enum": null, - "choices": null, - "regex": null, - "max_length": null, - "min_length": null, - "label": "Is Top Level", - "description": null, - "read_only": true, - "unique": false, - "optional": true, - "branch": "aware", - "order_weight": 5000, - "default_value": null, - "inherited": true, - "allow_override": "none" - }, - { - "id": "17d9bd6f-66a4-54a0-2782-179fd2a0a7a2", - "state": "present", - "name": "utilization", - "kind": "Number", - "enum": null, - "choices": null, - "regex": null, - "max_length": null, - "min_length": null, - "label": "Utilization", - "description": null, - "read_only": true, - "unique": false, - "optional": true, - "branch": "aware", - "order_weight": 6000, - "default_value": null, - "inherited": true, - "allow_override": "none" - }, - { - "id": "17d9bd6f-6700-b8f8-278c-179f3b309048", - "state": "present", - "name": "netmask", - "kind": "Text", - "enum": null, - "choices": null, - "regex": null, - "max_length": null, - "min_length": null, - "label": "Netmask", - "description": null, - "read_only": true, - "unique": false, - "optional": true, - "branch": "aware", - "order_weight": 7000, - "default_value": null, - "inherited": true, - "allow_override": "none" - }, - { - "id": "17d9bd6f-675c-5a00-278e-179fdb740ee3", - "state": "present", - "name": "hostmask", - "kind": "Text", - "enum": null, - "choices": null, - "regex": null, - "max_length": null, - "min_length": null, - "label": "Hostmask", - "description": null, - "read_only": true, - "unique": false, - "optional": true, - "branch": "aware", - "order_weight": 8000, - "default_value": null, - "inherited": true, - "allow_override": "none" - }, - { - "id": "17d9bd6f-67ae-7d58-2780-179f5690fd6e", - "state": "present", - "name": "network_address", - "kind": "Text", - "enum": null, - "choices": null, - "regex": null, - "max_length": null, - "min_length": null, - "label": "Network Address", - "description": null, - "read_only": true, - "unique": false, - "optional": true, - "branch": "aware", - "order_weight": 9000, - "default_value": null, - "inherited": true, - "allow_override": "none" - }, - { - "id": "17d9bd6f-67fd-1e40-2782-179f3743d578", - "state": "present", - "name": "broadcast_address", - "kind": "Text", - "enum": null, - "choices": null, - "regex": null, - "max_length": null, - "min_length": null, - "label": "Broadcast Address", - "description": null, - "read_only": true, - "unique": false, - "optional": true, - "branch": "aware", - "order_weight": 10000, - "default_value": null, - "inherited": true, - "allow_override": "none" - } - ], - "relationships": [ - { - "id": "17d9bd6f-6848-d510-278d-179fdede0e92", - "state": "present", - "name": "ip_namespace", - "peer": "BuiltinIPNamespace", - "kind": "Generic", - "label": "IP Namespace", - "description": null, - "identifier": "ip_namespace__ip_prefix", - "cardinality": "one", - "min_count": 0, - "max_count": 1, - "order_weight": 11000, - "optional": true, - "branch": "aware", - "inherited": true, - "direction": "bidirectional", - "hierarchical": null, - "filters": [ - { - "id": null, - "state": "present", - "name": "ids", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - } - ], - "on_delete": "no-action", - "allow_override": "none", - "read_only": false - }, - { - "id": "17d9bd6f-6897-5aa0-2783-179ffc129ffd", - "state": "present", - "name": "ip_addresses", - "peer": "BuiltinIPAddress", - "kind": "Generic", - "label": "IP Addresses", - "description": null, - "identifier": "ip_prefix__ip_address", - "cardinality": "many", - "min_count": 0, - "max_count": 0, - "order_weight": 12000, - "optional": true, - "branch": "aware", - "inherited": true, - "direction": "bidirectional", - "hierarchical": null, - "filters": [ - { - "id": null, - "state": "present", - "name": "ids", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - } - ], - "on_delete": "no-action", - "allow_override": "none", - "read_only": true - }, - { - "id": "17d9bd6f-68e6-5560-278f-179fe7a245e5", - "state": "present", - "name": "member_of_groups", - "peer": "CoreGroup", - "kind": "Group", - "label": "Member Of Groups", - "description": null, - "identifier": "group_member", - "cardinality": "many", - "min_count": 0, - "max_count": 0, - "order_weight": 13000, - "optional": true, - "branch": "aware", - "inherited": true, - "direction": "bidirectional", - "hierarchical": null, - "filters": [ - { - "id": null, - "state": "present", - "name": "ids", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "label__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "label__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "label__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "label__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "label__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "label__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - } - ], - "on_delete": "no-action", - "allow_override": "any", - "read_only": false - }, - { - "id": "17d9bd6f-6936-26f8-278a-179fc817723d", - "state": "present", - "name": "subscriber_of_groups", - "peer": "CoreGroup", - "kind": "Group", - "label": "Subscriber Of Groups", - "description": null, - "identifier": "group_subscriber", - "cardinality": "many", - "min_count": 0, - "max_count": 0, - "order_weight": 14000, - "optional": true, - "branch": "aware", - "inherited": true, - "direction": "bidirectional", - "hierarchical": null, - "filters": [ - { - "id": null, - "state": "present", - "name": "ids", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "label__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "label__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "label__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "label__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "label__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "label__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - } - ], - "on_delete": "no-action", - "allow_override": "any", - "read_only": false - }, - { - "id": "17d9bd6f-6989-2128-278c-179f2b57e70f", - "state": "present", - "name": "parent", - "peer": "BuiltinIPPrefix", - "kind": "Hierarchy", - "label": "Parent", - "description": null, - "identifier": "parent__child", - "cardinality": "one", - "min_count": 0, - "max_count": 1, - "order_weight": 15000, - "optional": true, - "branch": "aware", - "inherited": true, - "direction": "outbound", - "hierarchical": "BuiltinIPPrefix", - "filters": [ - { - "id": null, - "state": "present", - "name": "ids", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "member_type__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "member_type__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "member_type__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "member_type__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "member_type__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "member_type__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_pool__value", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_pool__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_pool__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_pool__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_pool__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_pool__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_top_level__value", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_top_level__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_top_level__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_top_level__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_top_level__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_top_level__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "utilization__value", - "kind": "Number", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "utilization__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "utilization__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "utilization__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "utilization__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "utilization__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "netmask__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "netmask__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "netmask__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "netmask__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "netmask__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "netmask__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "hostmask__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "hostmask__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "hostmask__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "hostmask__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "hostmask__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "hostmask__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "network_address__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "network_address__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "network_address__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "network_address__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "network_address__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "network_address__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "broadcast_address__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "broadcast_address__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "broadcast_address__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "broadcast_address__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "broadcast_address__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "broadcast_address__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - } - ], - "on_delete": "no-action", - "allow_override": "any", - "read_only": true - }, - { - "id": "17d9bd6f-69e3-c060-278c-179fdbe91d53", - "state": "present", - "name": "children", - "peer": "BuiltinIPPrefix", - "kind": "Hierarchy", - "label": "Children", - "description": null, - "identifier": "parent__child", - "cardinality": "many", - "min_count": 0, - "max_count": 0, - "order_weight": 16000, - "optional": true, - "branch": "aware", - "inherited": true, - "direction": "inbound", - "hierarchical": "BuiltinIPPrefix", - "filters": [ - { - "id": null, - "state": "present", - "name": "ids", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "member_type__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "member_type__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "member_type__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "member_type__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "member_type__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "member_type__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_pool__value", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_pool__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_pool__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_pool__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_pool__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_pool__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_top_level__value", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_top_level__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_top_level__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_top_level__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_top_level__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_top_level__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "utilization__value", - "kind": "Number", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "utilization__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "utilization__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "utilization__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "utilization__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "utilization__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "netmask__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "netmask__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "netmask__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "netmask__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "netmask__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "netmask__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "hostmask__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "hostmask__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "hostmask__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "hostmask__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "hostmask__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "hostmask__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "network_address__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "network_address__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "network_address__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "network_address__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "network_address__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "network_address__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "broadcast_address__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "broadcast_address__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "broadcast_address__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "broadcast_address__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "broadcast_address__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "broadcast_address__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - } - ], - "on_delete": "no-action", - "allow_override": "any", - "read_only": true - }, - { - "id": "17d9bd6f-6a36-35c0-278b-179f6cb47e47", - "state": "present", - "name": "profiles", - "peer": "ProfileIpamIPPrefix", - "kind": "Profile", - "label": "Profiles", - "description": null, - "identifier": "node__profile", - "cardinality": "many", - "min_count": 0, - "max_count": 0, - "order_weight": 17000, - "optional": true, - "branch": "aware", - "inherited": false, - "direction": "bidirectional", - "hierarchical": null, - "filters": [ - { - "id": null, - "state": "present", - "name": "ids", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "profile_name__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "profile_name__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "profile_name__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "profile_name__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "profile_name__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "profile_name__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "profile_priority__value", - "kind": "Number", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "profile_priority__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "profile_priority__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "profile_priority__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "profile_priority__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "profile_priority__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "member_type__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "member_type__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "member_type__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "member_type__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "member_type__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "member_type__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_pool__value", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_pool__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_pool__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_pool__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_pool__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_pool__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - } - ], - "on_delete": "no-action", - "allow_override": "any", - "read_only": false - } - ], - "inherit_from": [ - "BuiltinIPPrefix" - ], - "generate_profile": true, - "hierarchy": "BuiltinIPPrefix", - "parent": "BuiltinIPPrefix", - "children": "BuiltinIPPrefix", - "kind": "IpamIPPrefix", - "hash": "e32f68b67633af054d23e35b4dd1a5e6" - }, - { - "id": "17d9bd6f-6185-cc88-278c-179fd6360c8d", - "state": "present", - "name": "IPAddress", - "namespace": "Ipam", - "description": "IP Address", - "label": "IP Address", - "branch": "aware", - "default_filter": "address__value", - "human_friendly_id": null, - "display_labels": [ - "address__value" - ], - "include_in_menu": false, - "menu_placement": null, - "icon": "mdi:ip-outline", - "order_by": [ - "address__version", - "address__binary_address" - ], - "uniqueness_constraints": null, - "documentation": null, - "filters": [ - { - "id": null, - "state": "present", - "name": "ids", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "any__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "any__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "any__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "any__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "any__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - } - ], - "attributes": [ - { - "id": "17d9bd6f-61e5-1710-2785-179f38d20caf", - "state": "present", - "name": "address", - "kind": "IPHost", - "enum": null, - "choices": null, - "regex": null, - "max_length": null, - "min_length": null, - "label": "Address", - "description": null, - "read_only": false, - "unique": false, - "optional": false, - "branch": "aware", - "order_weight": 1000, - "default_value": null, - "inherited": true, - "allow_override": "any" - }, - { - "id": "17d9bd6f-6240-9cc0-2783-179f8eb17542", - "state": "present", - "name": "description", - "kind": "Text", - "enum": null, - "choices": null, - "regex": null, - "max_length": null, - "min_length": null, - "label": "Description", - "description": null, - "read_only": false, - "unique": false, - "optional": true, - "branch": "aware", - "order_weight": 2000, - "default_value": null, - "inherited": true, - "allow_override": "any" - } - ], - "relationships": [ - { - "id": "17d9bd6f-6295-2178-2783-179f5df1a9f6", - "state": "present", - "name": "interface", - "peer": "InfraInterfaceL3", - "kind": "Generic", - "label": "Interface", - "description": null, - "identifier": "infrainterfacel3__ipamipaddress", - "cardinality": "one", - "min_count": 0, - "max_count": 1, - "order_weight": 3000, - "optional": true, - "branch": "aware", - "inherited": false, - "direction": "bidirectional", - "hierarchical": null, - "filters": [ - { - "id": null, - "state": "present", - "name": "ids", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "lacp_rate__value", - "kind": "Text", - "enum": [ - "Fast", - "Normal" - ], - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "lacp_rate__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "lacp_rate__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "lacp_rate__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "lacp_rate__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "lacp_rate__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "lacp_priority__value", - "kind": "Number", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "lacp_priority__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "lacp_priority__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "lacp_priority__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "lacp_priority__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "lacp_priority__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "speed__value", - "kind": "Number", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "speed__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "speed__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "speed__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "speed__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "speed__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "mtu__value", - "kind": "Number", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "mtu__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "mtu__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "mtu__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "mtu__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "mtu__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "enabled__value", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "enabled__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "enabled__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "enabled__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "enabled__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "enabled__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "status__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "status__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "status__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "status__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "status__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "status__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "role__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "role__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "role__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "role__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "role__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "role__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - } - ], - "on_delete": "no-action", - "allow_override": "any", - "read_only": false - }, - { - "id": "17d9bd6f-62f4-25b0-278b-179faf71de7c", - "state": "present", - "name": "ip_namespace", - "peer": "BuiltinIPNamespace", - "kind": "Generic", - "label": "IP Namespace", - "description": null, - "identifier": "ip_namespace__ip_address", - "cardinality": "one", - "min_count": 0, - "max_count": 1, - "order_weight": 3000, - "optional": true, - "branch": "aware", - "inherited": true, - "direction": "bidirectional", - "hierarchical": null, - "filters": [ - { - "id": null, - "state": "present", - "name": "ids", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - } - ], - "on_delete": "no-action", - "allow_override": "none", - "read_only": false - }, - { - "id": "17d9bd6f-634e-5f58-2782-179fcb00ad84", - "state": "present", - "name": "ip_prefix", - "peer": "BuiltinIPPrefix", - "kind": "Generic", - "label": "IP Prefix", - "description": null, - "identifier": "ip_prefix__ip_address", - "cardinality": "one", - "min_count": 0, - "max_count": 1, - "order_weight": 4000, - "optional": true, - "branch": "aware", - "inherited": true, - "direction": "bidirectional", - "hierarchical": null, - "filters": [ - { - "id": null, - "state": "present", - "name": "ids", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "member_type__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "member_type__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "member_type__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "member_type__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "member_type__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "member_type__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_pool__value", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_pool__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_pool__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_pool__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_pool__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_pool__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_top_level__value", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_top_level__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_top_level__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_top_level__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_top_level__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "is_top_level__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "utilization__value", - "kind": "Number", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "utilization__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "utilization__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "utilization__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "utilization__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "utilization__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "netmask__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "netmask__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "netmask__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "netmask__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "netmask__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "netmask__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "hostmask__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "hostmask__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "hostmask__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "hostmask__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "hostmask__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "hostmask__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "network_address__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "network_address__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "network_address__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "network_address__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "network_address__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "network_address__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "broadcast_address__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "broadcast_address__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "broadcast_address__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "broadcast_address__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "broadcast_address__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "broadcast_address__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - } - ], - "on_delete": "no-action", - "allow_override": "none", - "read_only": true - }, - { - "id": "17d9bd6f-63ae-78e8-2780-179f0999b0c2", - "state": "present", - "name": "member_of_groups", - "peer": "CoreGroup", - "kind": "Group", - "label": "Member Of Groups", - "description": null, - "identifier": "group_member", - "cardinality": "many", - "min_count": 0, - "max_count": 0, - "order_weight": 5000, - "optional": true, - "branch": "aware", - "inherited": true, - "direction": "bidirectional", - "hierarchical": null, - "filters": [ - { - "id": null, - "state": "present", - "name": "ids", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "label__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "label__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "label__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "label__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "label__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "label__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - } - ], - "on_delete": "no-action", - "allow_override": "any", - "read_only": false - }, - { - "id": "17d9bd6f-6407-faf8-278b-179f4ed42503", - "state": "present", - "name": "subscriber_of_groups", - "peer": "CoreGroup", - "kind": "Group", - "label": "Subscriber Of Groups", - "description": null, - "identifier": "group_subscriber", - "cardinality": "many", - "min_count": 0, - "max_count": 0, - "order_weight": 6000, - "optional": true, - "branch": "aware", - "inherited": true, - "direction": "bidirectional", - "hierarchical": null, - "filters": [ - { - "id": null, - "state": "present", - "name": "ids", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "name__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "label__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "label__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "label__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "label__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "label__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "label__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - } - ], - "on_delete": "no-action", - "allow_override": "any", - "read_only": false - }, - { - "id": "17d9bd6f-6455-1928-278c-179f27ce0bb0", - "state": "present", - "name": "profiles", - "peer": "ProfileIpamIPAddress", - "kind": "Profile", - "label": "Profiles", - "description": null, - "identifier": "node__profile", - "cardinality": "many", - "min_count": 0, - "max_count": 0, - "order_weight": 8000, - "optional": true, - "branch": "aware", - "inherited": false, - "direction": "bidirectional", - "hierarchical": null, - "filters": [ - { - "id": null, - "state": "present", - "name": "ids", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "profile_name__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "profile_name__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "profile_name__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "profile_name__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "profile_name__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "profile_name__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "profile_priority__value", - "kind": "Number", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "profile_priority__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "profile_priority__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "profile_priority__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "profile_priority__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "profile_priority__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__value", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__values", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__is_visible", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__is_protected", - "kind": "Boolean", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__source__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - }, - { - "id": null, - "state": "present", - "name": "description__owner__id", - "kind": "Text", - "enum": null, - "object_kind": null, - "description": null - } - ], - "on_delete": "no-action", - "allow_override": "any", - "read_only": false - } - ], - "inherit_from": [ - "BuiltinIPAddress" - ], - "generate_profile": true, - "hierarchy": null, - "parent": null, - "children": null, - "kind": "IpamIPAddress", - "hash": "146b308b5998170e248732d4c8bb582f" - } - ] -} \ No newline at end of file diff --git a/python_sdk/tests/helpers b/python_sdk/tests/helpers deleted file mode 120000 index 98accb20fd..0000000000 --- a/python_sdk/tests/helpers +++ /dev/null @@ -1 +0,0 @@ -../../backend/tests/helpers \ No newline at end of file diff --git a/python_sdk/tests/integration/__init__.py b/python_sdk/tests/integration/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/python_sdk/tests/integration/conftest.py b/python_sdk/tests/integration/conftest.py deleted file mode 100644 index 1c8797e637..0000000000 --- a/python_sdk/tests/integration/conftest.py +++ /dev/null @@ -1,517 +0,0 @@ -import asyncio -import os -from typing import Any, Optional - -import httpx -import pytest -import ujson -from fastapi.testclient import TestClient -from infrahub import config -from infrahub.components import ComponentType -from infrahub.core.initialization import first_time_initialization, initialization -from infrahub.core.node import Node -from infrahub.core.utils import delete_all_nodes -from infrahub.database import InfrahubDatabase, get_db -from infrahub.lock import initialize_lock -from infrahub.message_bus import InfrahubMessage -from infrahub.message_bus.types import MessageTTL -from infrahub.services.adapters.message_bus import InfrahubMessageBus - -from infrahub_sdk.schema import NodeSchema, SchemaRoot -from infrahub_sdk.types import HTTPMethod -from infrahub_sdk.utils import str_to_bool - -BUILD_NAME = os.environ.get("INFRAHUB_BUILD_NAME", "infrahub") -TEST_IN_DOCKER = str_to_bool(os.environ.get("INFRAHUB_TEST_IN_DOCKER", "false")) - - -@pytest.fixture(scope="session", autouse=True) -def add_tracker(): - os.environ["PYTEST_RUNNING"] = "true" - - -# pylint: disable=redefined-outer-name -class InfrahubTestClient(TestClient): - def _request( - self, url: str, method: HTTPMethod, headers: dict[str, Any], timeout: int, payload: Optional[dict] = None - ) -> httpx.Response: - content = None - if payload: - content = str(ujson.dumps(payload)).encode("UTF-8") - with self as client: - return client.request( - method=method.value, - url=url, - headers=headers, - timeout=timeout, - content=content, - ) - - async def async_request( - self, url: str, method: HTTPMethod, headers: dict[str, Any], timeout: int, payload: Optional[dict] = None - ) -> httpx.Response: - return self._request(url=url, method=method, headers=headers, timeout=timeout, payload=payload) - - def sync_request( - self, url: str, method: HTTPMethod, headers: dict[str, Any], timeout: int, payload: Optional[dict] = None - ) -> httpx.Response: - return self._request(url=url, method=method, headers=headers, timeout=timeout, payload=payload) - - -@pytest.fixture(scope="session") -def event_loop(): - """Overrides pytest default function scoped event loop""" - policy = asyncio.get_event_loop_policy() - loop = policy.new_event_loop() - yield loop - loop.close() - - -@pytest.fixture(scope="module", autouse=True) -def execute_before_any_test(worker_id, tmpdir_factory): - config.load_and_exit() - - config.SETTINGS.storage.driver = config.StorageDriver.FileSystemStorage - - if TEST_IN_DOCKER: - try: - db_id = int(worker_id[2]) + 1 - except (ValueError, IndexError): - db_id = 1 - config.SETTINGS.cache.address = f"{BUILD_NAME}-cache-1" - config.SETTINGS.database.address = f"{BUILD_NAME}-database-{db_id}" - config.SETTINGS.storage.local = config.FileSystemStorageSettings(path="/opt/infrahub/storage") - else: - storage_dir = tmpdir_factory.mktemp("storage") - config.SETTINGS.storage.local.path_ = str(storage_dir) - - config.SETTINGS.broker.enable = False - config.SETTINGS.cache.enable = True - config.SETTINGS.miscellaneous.start_background_runner = False - config.SETTINGS.security.secret_key = "4e26b3d9-b84f-42c9-a03f-fee3ada3b2fa" - config.SETTINGS.main.internal_address = "http://mock" - config.OVERRIDE.message_bus = BusRecorder() - - initialize_lock() - - -@pytest.fixture(scope="module") -async def db() -> InfrahubDatabase: - driver = InfrahubDatabase(driver=await get_db(retry=1)) - - yield driver - - await driver.close() - - -@pytest.fixture(scope="module") -async def init_db_base(db: InfrahubDatabase): - await delete_all_nodes(db=db) - await first_time_initialization(db=db) - await initialization(db=db) - - -@pytest.fixture(scope="module") -async def builtin_org_schema() -> SchemaRoot: - SCHEMA = { - "version": "1.0", - "nodes": [ - { - "name": "Organization", - "namespace": "Test", - "description": "An organization represent a legal entity, a company.", - "include_in_menu": True, - "label": "Organization", - "icon": "mdi:domain", - "default_filter": "name__value", - "order_by": ["name__value"], - "display_labels": ["label__value"], - "branch": "aware", - "attributes": [ - {"name": "name", "kind": "Text", "unique": True}, - {"name": "label", "kind": "Text", "optional": True}, - {"name": "description", "kind": "Text", "optional": True}, - ], - "relationships": [ - { - "name": "tags", - "peer": "BuiltinTag", - "kind": "Attribute", - "optional": True, - "cardinality": "many", - }, - ], - }, - { - "name": "Status", - "namespace": "Builtin", - "description": "Represent the status of an object: active, maintenance", - "include_in_menu": True, - "icon": "mdi:list-status", - "label": "Status", - "default_filter": "name__value", - "order_by": ["name__value"], - "display_labels": ["label__value"], - "branch": "aware", - "attributes": [ - {"name": "name", "kind": "Text", "unique": True}, - {"name": "label", "kind": "Text", "optional": True}, - {"name": "description", "kind": "Text", "optional": True}, - ], - }, - { - "name": "Role", - "namespace": "Builtin", - "description": "Represent the role of an object", - "include_in_menu": True, - "icon": "mdi:ballot", - "label": "Role", - "default_filter": "name__value", - "order_by": ["name__value"], - "display_labels": ["label__value"], - "branch": "aware", - "attributes": [ - {"name": "name", "kind": "Text", "unique": True}, - {"name": "label", "kind": "Text", "optional": True}, - {"name": "description", "kind": "Text", "optional": True}, - ], - }, - { - "name": "Location", - "namespace": "Builtin", - "description": "A location represent a physical element: a building, a site, a city", - "include_in_menu": True, - "icon": "mdi:map-marker-radius-outline", - "label": "Location", - "default_filter": "name__value", - "order_by": ["name__value"], - "display_labels": ["name__value"], - "branch": "aware", - "attributes": [ - {"name": "name", "kind": "Text", "unique": True}, - {"name": "description", "kind": "Text", "optional": True}, - {"name": "type", "kind": "Text"}, - ], - "relationships": [ - { - "name": "tags", - "peer": "BuiltinTag", - "kind": "Attribute", - "optional": True, - "cardinality": "many", - }, - ], - }, - { - "name": "Criticality", - "namespace": "Builtin", - "description": "Level of criticality expressed from 1 to 10.", - "include_in_menu": True, - "icon": "mdi:alert-octagon-outline", - "label": "Criticality", - "default_filter": "name__value", - "order_by": ["name__value"], - "display_labels": ["name__value"], - "branch": "aware", - "attributes": [ - {"name": "name", "kind": "Text", "unique": True}, - {"name": "level", "kind": "Number", "enum": [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]}, - {"name": "description", "kind": "Text", "optional": True}, - ], - }, - ], - } - - return SCHEMA - - -@pytest.fixture -async def location_schema() -> NodeSchema: - data = { - "name": "Location", - "namespace": "Builtin", - "default_filter": "name__value", - "attributes": [ - {"name": "name", "kind": "String", "unique": True}, - {"name": "description", "kind": "String", "optional": True}, - {"name": "type", "kind": "String"}, - ], - "relationships": [ - { - "name": "tags", - "peer": "BuiltinTag", - "optional": True, - "cardinality": "many", - }, - { - "name": "primary_tag", - "peer": "BultinTag", - "optional": True, - "cardinality": "one", - }, - ], - } - return NodeSchema(**data) # type: ignore - - -@pytest.fixture -async def location_cdg(db: InfrahubDatabase, tag_blue: Node, tag_red: Node) -> Node: - obj = await Node.init(schema="BuiltinLocation", db=db) - await obj.new(db=db, name="cdg01", type="SITE", tags=[tag_blue, tag_red]) - await obj.save(db=db) - return obj - - -@pytest.fixture -async def tag_blue(db: InfrahubDatabase) -> Node: - obj = await Node.init(schema="BuiltinTag", db=db) - await obj.new(db=db, name="Blue") - await obj.save(db=db) - return obj - - -@pytest.fixture -async def tag_red(db: InfrahubDatabase) -> Node: - obj = await Node.init(schema="BuiltinTag", db=db) - await obj.new(db=db, name="Red") - await obj.save(db=db) - return obj - - -@pytest.fixture -async def tag_green(db: InfrahubDatabase) -> Node: - obj = await Node.init(schema="BuiltinTag", db=db) - await obj.new(db=db, name="Green") - await obj.save(db=db) - return obj - - -@pytest.fixture -async def first_account(db: InfrahubDatabase) -> Node: - obj = await Node.init(db=db, schema="CoreAccount") - await obj.new(db=db, name="First Account", account_type="Git", password="TestPassword123") - await obj.save(db=db) - return obj - - -@pytest.fixture -async def second_account(db: InfrahubDatabase) -> Node: - obj = await Node.init(db=db, schema="CoreAccount") - await obj.new(db=db, name="Second Account", account_type="Git", password="TestPassword123") - await obj.save(db=db) - return obj - - -@pytest.fixture -async def repo01(db: InfrahubDatabase) -> Node: - obj = await Node.init(db=db, schema="CoreRepository") - await obj.new(db=db, name="repo01", location="https://github.com/my/repo.git") - await obj.save(db=db) - return obj - - -@pytest.fixture -async def repo99(db: InfrahubDatabase) -> Node: - obj = await Node.init(db=db, schema="CoreRepository") - await obj.new(db=db, name="repo99", location="https://github.com/my/repo99.git") - await obj.save(db=db) - return obj - - -@pytest.fixture -async def gqlquery01(db: InfrahubDatabase) -> Node: - obj = await Node.init(db=db, schema="CoreGraphQLQuery") - await obj.new(db=db, name="query01", query="query { device { name { value }}}") - await obj.save(db=db) - return obj - - -@pytest.fixture -async def gqlquery02(db: InfrahubDatabase, repo01: Node, tag_blue: Node, tag_red: Node) -> Node: - obj = await Node.init(db=db, schema="CoreGraphQLQuery") - await obj.new( - db=db, - name="query02", - query="query { CoreRepository { edges { node { name { value }}}}}", - repository=repo01, - tags=[tag_blue, tag_red], - ) - await obj.save(db=db) - return obj - - -@pytest.fixture -async def gqlquery03(db: InfrahubDatabase, repo01: Node, tag_blue: Node, tag_red: Node) -> Node: - obj = await Node.init(db=db, schema="CoreGraphQLQuery") - await obj.new( - db=db, - name="query03", - query="query { CoreRepository { edges { node { name { value }}}}}", - repository=repo01, - tags=[tag_blue, tag_red], - ) - await obj.save(db=db) - return obj - - -@pytest.fixture -async def schema_extension_01() -> dict[str, Any]: - return { - "version": "1.0", - "nodes": [ - { - "name": "Rack", - "namespace": "Infra", - "description": "A Rack represents a physical two- or four-post equipment rack in which devices can be installed.", - "label": "Rack", - "default_filter": "name__value", - "display_labels": ["name__value"], - "attributes": [ - {"name": "name", "kind": "Text"}, - {"name": "description", "kind": "Text", "optional": True}, - ], - "relationships": [ - { - "name": "tags", - "peer": "BuiltinTag", - "optional": True, - "cardinality": "many", - "kind": "Attribute", - }, - ], - } - ], - "extensions": { - "nodes": [ - { - "kind": "BuiltinTag", - "relationships": [ - { - "name": "racks", - "peer": "InfraRack", - "optional": True, - "cardinality": "many", - "kind": "Generic", - } - ], - } - ] - }, - } - - -@pytest.fixture -async def schema_extension_02() -> dict[str, Any]: - return { - "version": "1.0", - "nodes": [ - { - "name": "Contract", - "namespace": "Procurement", - "description": "Generic Contract", - "label": "Contract", - "display_labels": ["contract_ref__value"], - "order_by": ["contract_ref__value"], - "attributes": [ - { - "name": "contract_ref", - "label": "Contract Reference", - "kind": "Text", - "unique": True, - }, - {"name": "description", "kind": "Text", "optional": True}, - ], - "relationships": [ - { - "name": "tags", - "peer": "BuiltinTag", - "optional": True, - "cardinality": "many", - "kind": "Attribute", - }, - ], - } - ], - "extensions": { - "nodes": [ - { - "kind": "BuiltinTag", - "relationships": [ - { - "name": "contracts", - "peer": "ProcurementContract", - "optional": True, - "cardinality": "many", - "kind": "Generic", - } - ], - } - ] - }, - } - - -@pytest.fixture(scope="module") -async def ipam_schema() -> SchemaRoot: - SCHEMA = { - "version": "1.0", - "nodes": [ - { - "name": "IPPrefix", - "namespace": "Ipam", - "include_in_menu": False, - "inherit_from": ["BuiltinIPPrefix"], - "description": "IPv4 or IPv6 network", - "icon": "mdi:ip-network", - "label": "IP Prefix", - }, - { - "name": "IPAddress", - "namespace": "Ipam", - "include_in_menu": False, - "inherit_from": ["BuiltinIPAddress"], - "description": "IP Address", - "icon": "mdi:ip-outline", - "label": "IP Address", - }, - { - "name": "Device", - "namespace": "Infra", - "label": "Device", - "human_friendly_id": ["name__value"], - "order_by": ["name__value"], - "display_labels": ["name__value"], - "attributes": [{"name": "name", "kind": "Text", "unique": True}], - "relationships": [ - { - "name": "primary_address", - "peer": "IpamIPAddress", - "label": "Primary IP Address", - "optional": True, - "cardinality": "one", - "kind": "Attribute", - } - ], - }, - ], - } - - return SCHEMA - - -class BusRecorder(InfrahubMessageBus): - def __init__(self, component_type: Optional[ComponentType] = None): - self.messages: list[InfrahubMessage] = [] - self.messages_per_routing_key: dict[str, list[InfrahubMessage]] = {} - - async def publish( - self, message: InfrahubMessage, routing_key: str, delay: Optional[MessageTTL] = None, is_retry: bool = False - ) -> None: - self.messages.append(message) - if routing_key not in self.messages_per_routing_key: - self.messages_per_routing_key[routing_key] = [] - self.messages_per_routing_key[routing_key].append(message) - - @property - def seen_routing_keys(self) -> list[str]: - return list(self.messages_per_routing_key.keys()) diff --git a/python_sdk/tests/integration/test_export_import.py b/python_sdk/tests/integration/test_export_import.py deleted file mode 100644 index d66038166b..0000000000 --- a/python_sdk/tests/integration/test_export_import.py +++ /dev/null @@ -1,542 +0,0 @@ -from pathlib import Path -from typing import Any, Dict - -import pytest -import ujson - -from infrahub_sdk import InfrahubClient -from infrahub_sdk.ctl.exporter import LineDelimitedJSONExporter -from infrahub_sdk.ctl.importer import LineDelimitedJSONImporter -from infrahub_sdk.exceptions import SchemaNotFoundError -from infrahub_sdk.transfer.exceptions import TransferFileNotFoundError -from infrahub_sdk.transfer.schema_sorter import InfrahubSchemaTopologicalSorter -from tests.helpers.test_app import TestInfrahubApp - -PERSON_KIND = "TestingPerson" -POOL_KIND = "TestingPool" -CAR_KIND = "TestingCar" -MANUFACTURER_KIND = "TestingManufacturer" -TAG_KIND = "TestingTag" - -# pylint: disable=unused-argument - - -class TestSchemaExportImportBase(TestInfrahubApp): - @pytest.fixture(scope="class") - def temporary_directory(self, tmp_path_factory) -> Path: - return tmp_path_factory.mktemp("infrahub-integration-tests") - - @pytest.fixture(scope="class") - def schema_person_base(self) -> Dict[str, Any]: - return { - "name": "Person", - "namespace": "Testing", - "include_in_menu": True, - "label": "Person", - "attributes": [ - {"name": "name", "kind": "Text"}, - {"name": "description", "kind": "Text", "optional": True}, - {"name": "height", "kind": "Number", "optional": True}, - ], - "relationships": [ - {"name": "cars", "kind": "Generic", "optional": True, "peer": "TestingCar", "cardinality": "many"} - ], - } - - @pytest.fixture(scope="class") - def schema_car_base(self) -> Dict[str, Any]: - return { - "name": "Car", - "namespace": "Testing", - "include_in_menu": True, - "label": "Car", - "attributes": [ - {"name": "name", "kind": "Text"}, - {"name": "description", "kind": "Text", "optional": True}, - {"name": "color", "kind": "Text"}, - ], - "relationships": [ - { - "name": "owner", - "kind": "Attribute", - "optional": False, - "peer": "TestingPerson", - "cardinality": "one", - }, - { - "name": "manufacturer", - "kind": "Attribute", - "optional": False, - "peer": "TestingManufacturer", - "cardinality": "one", - "identifier": "car__manufacturer", - }, - ], - } - - @pytest.fixture(scope="class") - def schema_manufacturer_base(self) -> Dict[str, Any]: - return { - "name": "Manufacturer", - "namespace": "Testing", - "include_in_menu": True, - "label": "Manufacturer", - "attributes": [{"name": "name", "kind": "Text"}, {"name": "description", "kind": "Text", "optional": True}], - "relationships": [ - { - "name": "cars", - "kind": "Generic", - "optional": True, - "peer": "TestingCar", - "cardinality": "many", - "identifier": "car__manufacturer", - }, - { - "name": "customers", - "kind": "Generic", - "optional": True, - "peer": "TestingPerson", - "cardinality": "many", - "identifier": "person__manufacturer", - }, - ], - } - - @pytest.fixture(scope="class") - def schema_tag_base(self) -> Dict[str, Any]: - return { - "name": "Tag", - "namespace": "Testing", - "include_in_menu": True, - "label": "Testing Tag", - "attributes": [{"name": "name", "kind": "Text"}], - "relationships": [ - {"name": "cars", "kind": "Generic", "optional": True, "peer": "TestingCar", "cardinality": "many"}, - { - "name": "persons", - "kind": "Generic", - "optional": True, - "peer": "TestingPerson", - "cardinality": "many", - }, - ], - } - - @pytest.fixture(scope="class") - def schema(self, schema_car_base, schema_person_base, schema_manufacturer_base, schema_tag_base) -> Dict[str, Any]: - return { - "version": "1.0", - "nodes": [schema_person_base, schema_car_base, schema_manufacturer_base, schema_tag_base], - } - - @pytest.fixture(scope="class") - async def initial_dataset(self, client: InfrahubClient, schema): - await client.schema.load(schemas=[schema]) - - john = await client.create( - kind=PERSON_KIND, data=dict(name="John", height=175, description="The famous Joe Doe") - ) - await john.save() - - jane = await client.create( - kind=PERSON_KIND, data=dict(name="Jane", height=165, description="The famous Jane Doe") - ) - await jane.save() - - honda = await client.create(kind=MANUFACTURER_KIND, data=dict(name="honda", description="Honda Motor Co., Ltd")) - await honda.save() - - renault = await client.create( - kind=MANUFACTURER_KIND, - data=dict(name="renault", description="Groupe Renault is a French multinational automobile manufacturer"), - ) - await renault.save() - - accord = await client.create( - kind=CAR_KIND, - data=dict(name="accord", description="Honda Accord", color="#3443eb", manufacturer=honda, owner=jane), - ) - await accord.save() - - civic = await client.create( - kind=CAR_KIND, - data=dict(name="civic", description="Honda Civic", color="#c9eb34", manufacturer=honda, owner=jane), - ) - await civic.save() - - megane = await client.create( - kind=CAR_KIND, - data=dict(name="Megane", description="Renault Megane", color="#c93420", manufacturer=renault, owner=john), - ) - await megane.save() - - blue = await client.create(kind=TAG_KIND, data=dict(name="blue", cars=[accord, civic], persons=[jane])) - await blue.save() - - red = await client.create(kind=TAG_KIND, data=dict(name="red", persons=[john])) - await red.save() - - objs = { - "john": john.id, - "jane": jane.id, - "honda": honda.id, - "renault": renault.id, - "accord": accord.id, - "civic": civic.id, - "megane": megane.id, - "blue": blue.id, - "red": red.id, - } - - return objs - - def reset_export_directory(self, temporary_directory: Path): - for file in temporary_directory.iterdir(): - if file.is_file(): - file.unlink() - - async def test_step01_export_no_schema(self, client: InfrahubClient, temporary_directory: Path): - exporter = LineDelimitedJSONExporter(client=client) - await exporter.export(export_directory=temporary_directory, branch="main", namespaces=[]) - - nodes_file = temporary_directory / "nodes.json" - relationships_file = temporary_directory / "relationships.json" - - # Export should create files even if they do not really hold any data - assert nodes_file.exists() - assert relationships_file.exists() - - # Verify that only the admin account has been exported - admin_account_node_dump = ujson.loads(nodes_file.read_text()) - assert admin_account_node_dump - assert admin_account_node_dump["kind"] == "CoreAccount" - assert ujson.loads(admin_account_node_dump["graphql_json"])["name"]["value"] == "admin" - - relationships_dump = ujson.loads(relationships_file.read_text()) - assert not relationships_dump - - async def test_step02_import_no_schema(self, client: InfrahubClient, temporary_directory: Path): - importer = LineDelimitedJSONImporter(client=client, topological_sorter=InfrahubSchemaTopologicalSorter()) - await importer.import_data(import_directory=temporary_directory, branch="main") - - # Schema should not be present - for kind in (PERSON_KIND, CAR_KIND, MANUFACTURER_KIND, TAG_KIND): - with pytest.raises(SchemaNotFoundError): - await client.all(kind=kind) - - # Cleanup for next tests - self.reset_export_directory(temporary_directory) - - async def test_step03_export_empty_dataset(self, client: InfrahubClient, temporary_directory: Path, schema): - await client.schema.load(schemas=[schema]) - - exporter = LineDelimitedJSONExporter(client=client) - await exporter.export(export_directory=temporary_directory, branch="main", namespaces=[]) - - nodes_file = temporary_directory / "nodes.json" - relationships_file = temporary_directory / "relationships.json" - - # Export should create files even if they do not really hold any data - assert nodes_file.exists() - assert relationships_file.exists() - - # Verify that only the admin account has been exported - admin_account_node_dump = ujson.loads(nodes_file.read_text()) - assert admin_account_node_dump - assert admin_account_node_dump["kind"] == "CoreAccount" - assert ujson.loads(admin_account_node_dump["graphql_json"])["name"]["value"] == "admin" - - relationships_dump = ujson.loads(relationships_file.read_text()) - assert not relationships_dump - - async def test_step04_import_empty_dataset(self, client: InfrahubClient, temporary_directory: Path, schema): - await client.schema.load(schemas=[schema]) - - importer = LineDelimitedJSONImporter(client=client, topological_sorter=InfrahubSchemaTopologicalSorter()) - await importer.import_data(import_directory=temporary_directory, branch="main") - - # No data for any kind should be retrieved - for kind in (PERSON_KIND, CAR_KIND, MANUFACTURER_KIND, TAG_KIND): - assert not await client.all(kind=kind) - - # Cleanup for next tests - self.reset_export_directory(temporary_directory) - - async def test_step05_export_initial_dataset( - self, client: InfrahubClient, temporary_directory: Path, initial_dataset - ): - exporter = LineDelimitedJSONExporter(client=client) - await exporter.export(export_directory=temporary_directory, branch="main", namespaces=[]) - - nodes_file = temporary_directory / "nodes.json" - relationships_file = temporary_directory / "relationships.json" - - # Export should create files - assert nodes_file.exists() - assert relationships_file.exists() - - # Verify that nodes have been exported - nodes_dump = [] - with nodes_file.open() as reader: - while line := reader.readline(): - nodes_dump.append(ujson.loads(line)) - assert len(nodes_dump) == len(initial_dataset) + 1 - - relationships_dump = ujson.loads(relationships_file.read_text()) - assert not relationships_dump - - async def test_step06_import_initial_dataset(self, client: InfrahubClient, temporary_directory: Path, schema): - await client.schema.load(schemas=[schema]) - - importer = LineDelimitedJSONImporter(client=client, topological_sorter=InfrahubSchemaTopologicalSorter()) - await importer.import_data(import_directory=temporary_directory, branch="main") - - # Each kind must have nodes - for kind in (PERSON_KIND, CAR_KIND, MANUFACTURER_KIND, TAG_KIND): - assert await client.all(kind=kind) - - async def test_step07_import_initial_dataset_with_existing_data( - self, client: InfrahubClient, temporary_directory: Path, initial_dataset - ): - # Count existing nodes - counters: Dict[str, int] = {} - for kind in (PERSON_KIND, CAR_KIND, MANUFACTURER_KIND, TAG_KIND): - nodes = await client.all(kind=kind) - counters[kind] = len(nodes) - - importer = LineDelimitedJSONImporter(client=client, topological_sorter=InfrahubSchemaTopologicalSorter()) - await importer.import_data(import_directory=temporary_directory, branch="main") - - # Nodes must not be duplicated - for kind in (PERSON_KIND, CAR_KIND, MANUFACTURER_KIND, TAG_KIND): - nodes = await client.all(kind=kind) - assert len(nodes) == counters[kind] - - # Cleanup for next tests - self.reset_export_directory(temporary_directory) - - async def test_step99_import_wrong_drectory(self, client: InfrahubClient): - importer = LineDelimitedJSONImporter(client=client, topological_sorter=InfrahubSchemaTopologicalSorter()) - # Using a directory that does not exist, should lead to exception - with pytest.raises(TransferFileNotFoundError): - await importer.import_data(import_directory=Path("this_directory_does_not_exist"), branch="main") - - -class TestSchemaExportImportManyRelationships(TestInfrahubApp): - @pytest.fixture(scope="class") - def temporary_directory(self, tmp_path_factory) -> Path: - return tmp_path_factory.mktemp("infrahub-integration-tests") - - @pytest.fixture(scope="class") - def schema_pool_base(self) -> Dict[str, Any]: - return { - "name": "Pool", - "namespace": "Testing", - "include_in_menu": True, - "label": "Pool", - "attributes": [{"name": "name", "kind": "Text"}, {"name": "description", "kind": "Text", "optional": True}], - "relationships": [ - { - "name": "cars", - "kind": "Attribute", - "optional": True, - "peer": "TestingCar", - "cardinality": "many", - "identifier": "car__pool", - } - ], - } - - @pytest.fixture(scope="class") - def schema_car_base(self) -> Dict[str, Any]: - return { - "name": "Car", - "namespace": "Testing", - "include_in_menu": True, - "label": "Car", - "attributes": [ - {"name": "name", "kind": "Text"}, - {"name": "description", "kind": "Text", "optional": True}, - {"name": "color", "kind": "Text"}, - ], - "relationships": [ - { - "name": "pools", - "kind": "Attribute", - "optional": True, - "peer": "TestingPool", - "cardinality": "many", - }, - { - "name": "manufacturer", - "kind": "Attribute", - "optional": False, - "peer": "TestingManufacturer", - "cardinality": "one", - "identifier": "car__manufacturer", - }, - ], - } - - @pytest.fixture(scope="class") - def schema_manufacturer_base(self) -> Dict[str, Any]: - return { - "name": "Manufacturer", - "namespace": "Testing", - "include_in_menu": True, - "label": "Manufacturer", - "attributes": [{"name": "name", "kind": "Text"}, {"name": "description", "kind": "Text", "optional": True}], - "relationships": [ - { - "name": "cars", - "kind": "Generic", - "optional": True, - "peer": "TestingCar", - "cardinality": "many", - "identifier": "car__manufacturer", - } - ], - } - - @pytest.fixture(scope="class") - def schema(self, schema_car_base, schema_pool_base, schema_manufacturer_base) -> Dict[str, Any]: - return { - "version": "1.0", - "nodes": [schema_pool_base, schema_car_base, schema_manufacturer_base], - } - - @pytest.fixture(scope="class") - async def initial_dataset(self, client: InfrahubClient, schema): # noqa: PLR0914 - await client.schema.load(schemas=[schema]) - - bmw = await client.create( - kind=MANUFACTURER_KIND, - data=dict( - name="BMW", - description="Bayerische Motoren Werke AG is a German multinational manufacturer of luxury vehicles and motorcycles", - ), - ) - await bmw.save() - - fiat = await client.create( - kind=MANUFACTURER_KIND, - data=dict(name="Fiat", description="Fiat Automobiles S.p.A. is an Italian automobile manufacturer"), - ) - await fiat.save() - - five_series = await client.create( - kind=CAR_KIND, data=dict(name="5 series", description="BMW 5 series", color="#000000", manufacturer=bmw) - ) - await five_series.save() - - five_hundred = await client.create( - kind=CAR_KIND, data=dict(name="500", description="Fiat 500", color="#540302", manufacturer=fiat) - ) - await five_hundred.save() - - premium = await client.create( - kind=POOL_KIND, data=dict(name="Premium", description="Premium cars", cars=[five_series]) - ) - await premium.save() - - compact = await client.create( - kind=POOL_KIND, data=dict(name="Compact", description="Compact cars", cars=[five_hundred]) - ) - await compact.save() - - sedan = await client.create( - kind=POOL_KIND, data=dict(name="Sedan", description="Sedan cars", cars=[five_series]) - ) - await sedan.save() - - city_cars = await client.create( - kind=POOL_KIND, data=dict(name="City", description="City cars", cars=[five_hundred]) - ) - await city_cars.save() - - objs = { - "bmw": bmw.id, - "fiat": fiat.id, - "5series": five_series.id, - "500": five_hundred.id, - "premium": premium.id, - "compact": compact.id, - "sedan": sedan.id, - "city_cars": city_cars.id, - } - - return objs - - def reset_export_directory(self, temporary_directory: Path): - for file in temporary_directory.iterdir(): - if file.is_file(): - file.unlink() - - async def test_step01_export_initial_dataset( - self, client: InfrahubClient, temporary_directory: Path, initial_dataset - ): - exporter = LineDelimitedJSONExporter(client=client) - await exporter.export(export_directory=temporary_directory, branch="main", namespaces=[]) - - nodes_file = temporary_directory / "nodes.json" - relationships_file = temporary_directory / "relationships.json" - - # Export should create files - assert nodes_file.exists() - assert relationships_file.exists() - - # Verify that nodes have been exported - nodes_dump = [] - with nodes_file.open() as reader: - while line := reader.readline(): - nodes_dump.append(ujson.loads(line)) - assert len(nodes_dump) == len(initial_dataset) + 1 - - # Make sure there are as many relationships as there are in the database - relationship_count = 0 - for node in await client.all(kind=POOL_KIND): - await node.cars.fetch() - relationship_count += len(node.cars.peers) - relationships_dump = ujson.loads(relationships_file.read_text()) - assert len(relationships_dump) == relationship_count - - async def test_step02_import_initial_dataset(self, client: InfrahubClient, temporary_directory: Path, schema): - await client.schema.load(schemas=[schema]) - - importer = LineDelimitedJSONImporter(client=client, topological_sorter=InfrahubSchemaTopologicalSorter()) - await importer.import_data(import_directory=temporary_directory, branch="main") - - # Each kind must have nodes - for kind in (POOL_KIND, CAR_KIND, MANUFACTURER_KIND): - assert await client.all(kind=kind) - - # Make sure relationships were properly imported - relationship_count = 0 - for node in await client.all(kind=POOL_KIND): - await node.cars.fetch() - relationship_count += len(node.cars.peers) - relationships_file = temporary_directory / "relationships.json" - relationships_dump = ujson.loads(relationships_file.read_text()) - assert len(relationships_dump) == relationship_count - - async def test_step03_import_initial_dataset_with_existing_data( - self, client: InfrahubClient, temporary_directory: Path, initial_dataset - ): - importer = LineDelimitedJSONImporter(client=client, topological_sorter=InfrahubSchemaTopologicalSorter()) - await importer.import_data(import_directory=temporary_directory, branch="main") - - # Each kind must have nodes - for kind in (POOL_KIND, CAR_KIND, MANUFACTURER_KIND): - assert await client.all(kind=kind) - - # Make sure relationships were properly imported - relationship_count = 0 - for node in await client.all(kind=POOL_KIND): - await node.cars.fetch() - relationship_count += len(node.cars.peers) - relationships_file = temporary_directory / "relationships.json" - relationships_dump = ujson.loads(relationships_file.read_text()) - assert len(relationships_dump) == relationship_count - - # Cleanup for next tests - self.reset_export_directory(temporary_directory) diff --git a/python_sdk/tests/integration/test_infrahub_client.py b/python_sdk/tests/integration/test_infrahub_client.py deleted file mode 100644 index b07d79e93a..0000000000 --- a/python_sdk/tests/integration/test_infrahub_client.py +++ /dev/null @@ -1,285 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING - -import pytest -from infrahub.core import registry -from infrahub.core.initialization import create_branch -from infrahub.core.node import Node -from infrahub.server import app - -from infrahub_sdk import Config, InfrahubClient -from infrahub_sdk.constants import InfrahubClientMode -from infrahub_sdk.exceptions import BranchNotFoundError -from infrahub_sdk.node import InfrahubNode -from infrahub_sdk.playback import JSONPlayback -from infrahub_sdk.recorder import JSONRecorder -from infrahub_sdk.schema import ProfileSchema - -from .conftest import InfrahubTestClient - -if TYPE_CHECKING: - from pathlib import Path - - from infrahub.database import InfrahubDatabase - - -# pylint: disable=unused-argument - - -class TestInfrahubClient: - @pytest.fixture(scope="class") - async def test_client(self) -> InfrahubTestClient: - registry.delete_all() - - return InfrahubTestClient(app) - - @pytest.fixture - def client(self, test_client: InfrahubTestClient) -> InfrahubClient: - config = Config(username="admin", password="infrahub", requester=test_client.async_request) - return InfrahubClient(config=config) - - @pytest.fixture(scope="class") - async def base_dataset(self, db: InfrahubDatabase, test_client: InfrahubTestClient, builtin_org_schema): - config = Config(username="admin", password="infrahub", requester=test_client.async_request) - client = InfrahubClient(config=config) - response = await client.schema.load(schemas=[builtin_org_schema]) - assert not response.errors - - await create_branch(branch_name="branch01", db=db) - - query_string = """ - query { - branch { - id - name - } - } - """ - obj1 = await Node.init(schema="CoreGraphQLQuery", db=db) - await obj1.new(db=db, name="test_query2", description="test query", query=query_string) - await obj1.save(db=db) - - obj2 = await Node.init(schema="CoreRepository", db=db) - await obj2.new( - db=db, - name="repository1", - description="test repository", - location="git@github.com:mock/test.git", - ) - await obj2.save(db=db) - - obj3 = await Node.init(schema="CoreTransformJinja2", db=db) - await obj3.new( - db=db, - name="rfile1", - description="test rfile", - template_path="mytemplate.j2", - repository=obj2, - query=obj1, - ) - await obj3.save(db=db) - - obj4 = await Node.init(schema="CoreTransformPython", db=db) - await obj4.new( - db=db, - name="transform01", - description="test transform01", - file_path="mytransformation.py", - class_name="Transform01", - query=obj1, - repository=obj2, - ) - await obj4.save(db=db) - - async def test_query_branches(self, client: InfrahubClient, init_db_base, base_dataset): - branches = await client.branch.all() - main = await client.branch.get(branch_name="main") - - with pytest.raises(BranchNotFoundError): - await client.branch.get(branch_name="not-found") - - assert main.name == "main" - assert "main" in branches - assert "branch01" in branches - - async def test_branch_delete(self, client: InfrahubClient, init_db_base, base_dataset, db): - async_branch = "async-delete-branch" - await create_branch(branch_name=async_branch, db=db) - pre_delete = await client.branch.all() - await client.branch.delete(async_branch) - post_delete = await client.branch.all() - assert async_branch in pre_delete.keys() - assert async_branch not in post_delete.keys() - - async def test_get_all(self, client: InfrahubClient, init_db_base, base_dataset): - obj1 = await client.create(kind="BuiltinLocation", name="jfk1", description="new york", type="site") - await obj1.save() - - obj2 = await client.create(kind="BuiltinLocation", name="sfo1", description="san francisco", type="site") - await obj2.save() - - nodes = await client.all(kind="BuiltinLocation") - assert len(nodes) == 2 - assert isinstance(nodes[0], InfrahubNode) - assert sorted([node.name.value for node in nodes]) == ["jfk1", "sfo1"] # type: ignore[attr-defined] - - async def test_get_one(self, client: InfrahubClient, init_db_base, base_dataset): - obj1 = await client.create(kind="BuiltinLocation", name="jfk2", description="new york", type="site") - await obj1.save() - - obj2 = await client.create(kind="BuiltinLocation", name="sfo2", description="san francisco", type="site") - await obj2.save() - - node1 = await client.get(kind="BuiltinLocation", id=obj1.id) - assert isinstance(node1, InfrahubNode) - assert node1.name.value == "jfk2" # type: ignore[attr-defined] - - node2 = await client.get(kind="BuiltinLocation", id="jfk2") - assert isinstance(node2, InfrahubNode) - assert node2.name.value == "jfk2" # type: ignore[attr-defined] - - async def test_filters_partial_match(self, client: InfrahubClient, init_db_base, base_dataset): - nodes = await client.filters(kind="BuiltinLocation", name__value="jfk") - assert not nodes - - nodes = await client.filters(kind="BuiltinLocation", partial_match=True, name__value="jfk") - assert len(nodes) == 2 - assert isinstance(nodes[0], InfrahubNode) - assert sorted([node.name.value for node in nodes]) == ["jfk1", "jfk2"] # type: ignore[attr-defined] - - async def test_get_generic(self, client: InfrahubClient, db: InfrahubDatabase, init_db_base): - nodes = await client.all(kind="CoreNode") - assert len(nodes) - - async def test_get_generic_fragment(self, client: InfrahubClient, db: InfrahubDatabase, init_db_base): - nodes = await client.all(kind="CoreGenericAccount", fragment=True, exclude=["type"]) - assert len(nodes) - assert nodes[0].typename == "CoreAccount" - assert nodes[0].name.value is not None # type: ignore[attr-defined] - - async def test_get_generic_filter_source(self, client: InfrahubClient, db: InfrahubDatabase, init_db_base): - admin = await client.get(kind="CoreAccount", name__value="admin") - - obj1 = await client.create( - kind="BuiltinLocation", name={"value": "jfk3", "source": admin.id}, description="new york", type="site" - ) - await obj1.save() - - nodes = await client.filters(kind="CoreNode", any__source__id=admin.id) - assert len(nodes) == 1 - assert nodes[0].typename == "BuiltinLocation" - assert nodes[0].id == obj1.id - - async def test_get_related_nodes(self, client: InfrahubClient, db: InfrahubDatabase, init_db_base): - nodes = await client.all(kind="CoreRepository") - assert len(nodes) == 1 - repo = nodes[0] - - assert repo.transformations.peers == [] # type: ignore[attr-defined] - await repo.transformations.fetch() # type: ignore[attr-defined] - assert len(repo.transformations.peers) == 2 # type: ignore[attr-defined] - - async def test_tracking_mode(self, client: InfrahubClient, db: InfrahubDatabase, init_db_base, base_dataset): - tag_names = ["BLUE", "RED", "YELLOW"] - orgname = "Acme" - - async def create_org_with_tag(clt: InfrahubClient, nbr_tags: int): - tags = [] - for idx in range(nbr_tags): - obj = await clt.create(kind="BuiltinTag", name=f"tracking-{tag_names[idx]}") - await obj.save(allow_upsert=True) - tags.append(obj) - - org = await clt.create(kind="TestOrganization", name=orgname, tags=tags) - await org.save(allow_upsert=True) - - # First execution, we create one org with 3 tags - nbr_tags = 3 - async with client.start_tracking(params={"orgname": orgname}, delete_unused_nodes=True) as clt: - await create_org_with_tag(clt=clt, nbr_tags=nbr_tags) - - assert client.mode == InfrahubClientMode.DEFAULT - group = await client.get( - kind="CoreStandardGroup", name__value=client.group_context._generate_group_name(), include=["members"] - ) - assert len(group.members.peers) == 4 - tags = await client.all(kind="BuiltinTag") - assert len(tags) == 3 - - # Second execution, we create one org with 2 tags but we don't delete the third one - nbr_tags = 2 - async with client.start_tracking(params={"orgname": orgname}, delete_unused_nodes=False) as clt: - await create_org_with_tag(clt=clt, nbr_tags=nbr_tags) - - assert client.mode == InfrahubClientMode.DEFAULT - group = await client.get( - kind="CoreStandardGroup", name__value=client.group_context._generate_group_name(), include=["members"] - ) - assert len(group.members.peers) == 3 - tags = await client.all(kind="BuiltinTag") - assert len(tags) == 3 - - # Third execution, we create one org with 1 tag and we delete the second one - nbr_tags = 1 - async with client.start_tracking(params={"orgname": orgname}, delete_unused_nodes=True) as clt: - await create_org_with_tag(clt=clt, nbr_tags=nbr_tags) - - assert client.mode == InfrahubClientMode.DEFAULT - group = await client.get( - kind="CoreStandardGroup", name__value=client.group_context._generate_group_name(), include=["members"] - ) - assert len(group.members.peers) == 2 - - tags = await client.all(kind="BuiltinTag") - assert len(tags) == 2 - - # Forth one, validate that the group will not be updated if there is an exception - nbr_tags = 3 - with pytest.raises(ValueError): - async with client.start_tracking(params={"orgname": orgname}, delete_unused_nodes=True) as clt: - await create_org_with_tag(clt=clt, nbr_tags=nbr_tags) - raise ValueError("something happened") - - assert client.mode == InfrahubClientMode.DEFAULT - group = await client.get( - kind="CoreStandardGroup", name__value=client.group_context._generate_group_name(), include=["members"] - ) - assert len(group.members.peers) == 2 - - async def test_recorder_with_playback_rewrite_host( - self, client: InfrahubClient, db: InfrahubDatabase, init_db_base, base_dataset, tmp_path: Path - ): - client.config.custom_recorder = JSONRecorder(host="recorder-test", directory=str(tmp_path)) - nodes = await client.all(kind="CoreRepository") - - playback_config = JSONPlayback(directory=str(tmp_path)) - config = Config( - address="http://recorder-test:8000", - requester=playback_config.async_request, - ) - playback = InfrahubClient(config=config) - recorded_nodes = await playback.all(kind="CoreRepository") - - assert len(nodes) == 1 - assert nodes == recorded_nodes - assert recorded_nodes[0].name.value == "repository1" - - async def test_profile(self, client: InfrahubClient, db: InfrahubDatabase, init_db_base, base_dataset): - profile_schema_kind = "ProfileBuiltinStatus" - profile_schema = await client.schema.get(kind=profile_schema_kind) - assert isinstance(profile_schema, ProfileSchema) - - profile1 = await client.create( - kind=profile_schema_kind, - profile_name="profile1", - profile_priority=1000, - description="description in profile", - ) - await profile1.save() - - obj = await client.create(kind="BuiltinStatus", name="planned", profiles=[profile1]) - await obj.save() - - obj1 = await client.get(kind="BuiltinStatus", id=obj.id) - assert obj1.description.value == "description in profile" diff --git a/python_sdk/tests/integration/test_infrahub_client_sync.py b/python_sdk/tests/integration/test_infrahub_client_sync.py deleted file mode 100644 index 5be96418b3..0000000000 --- a/python_sdk/tests/integration/test_infrahub_client_sync.py +++ /dev/null @@ -1,287 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING - -import pytest -from infrahub.core.initialization import create_branch -from infrahub.core.node import Node -from infrahub.server import app - -from infrahub_sdk import Config, InfrahubClientSync -from infrahub_sdk.constants import InfrahubClientMode -from infrahub_sdk.exceptions import BranchNotFoundError -from infrahub_sdk.node import InfrahubNodeSync -from infrahub_sdk.playback import JSONPlayback -from infrahub_sdk.recorder import JSONRecorder -from infrahub_sdk.schema import ProfileSchema - -from .conftest import InfrahubTestClient - -if TYPE_CHECKING: - from pathlib import Path - - from infrahub.database import InfrahubDatabase - - -# pylint: disable=unused-argument - - -class TestInfrahubClientSync: - @pytest.fixture(scope="class") - async def test_client(self) -> InfrahubTestClient: - return InfrahubTestClient(app) - - @pytest.fixture - def client(self, test_client: InfrahubTestClient): - config = Config( - username="admin", - password="infrahub", - sync_requester=test_client.sync_request, - ) - return InfrahubClientSync(config=config) - - @pytest.fixture(scope="class") - async def base_dataset(self, db: InfrahubDatabase, test_client: InfrahubTestClient, builtin_org_schema): - config = Config(username="admin", password="infrahub", sync_requester=test_client.sync_request) - client = InfrahubClientSync(config=config) - response = client.schema.load(schemas=[builtin_org_schema]) - assert not response.errors - - await create_branch(branch_name="branch01", db=db) - - query_string = """ - query { - branch { - id - name - } - } - """ - obj1 = await Node.init(schema="CoreGraphQLQuery", db=db) - await obj1.new(db=db, name="test_query2", description="test query", query=query_string) - await obj1.save(db=db) - - obj2 = await Node.init(schema="CoreRepository", db=db) - await obj2.new( - db=db, - name="repository1", - description="test repository", - location="git@github.com:mock/test.git", - ) - await obj2.save(db=db) - - obj3 = await Node.init(schema="CoreTransformJinja2", db=db) - await obj3.new( - db=db, - name="rfile1", - description="test rfile", - template_path="mytemplate.j2", - repository=obj2, - query=obj1, - ) - await obj3.save(db=db) - - obj4 = await Node.init(schema="CoreTransformPython", db=db) - await obj4.new( - db=db, - name="transform01", - description="test transform01", - file_path="mytransformation.py", - class_name="Transform01", - query=obj1, - repository=obj2, - ) - await obj4.save(db=db) - - async def test_query_branches(self, client: InfrahubClientSync, init_db_base, base_dataset): - branches = client.branch.all() - main = client.branch.get(branch_name="main") - - with pytest.raises(BranchNotFoundError): - client.branch.get(branch_name="not-found") - - assert main.name == "main" - assert "main" in branches - assert "branch01" in branches - - async def test_branch_delete(self, client: InfrahubClientSync, init_db_base, base_dataset, db): - async_branch = "async-delete-branch" - await create_branch(branch_name=async_branch, db=db) - - pre_delete = client.branch.all() - client.branch.delete(async_branch) - post_delete = client.branch.all() - assert async_branch in pre_delete.keys() - assert async_branch not in post_delete.keys() - - async def test_get_all(self, client: InfrahubClientSync, init_db_base, base_dataset): - obj1 = client.create(kind="BuiltinLocation", name="jfk1", description="new york", type="site") - obj1.save() - - obj2 = client.create(kind="BuiltinLocation", name="sfo1", description="san francisco", type="site") - obj2.save() - - nodes = client.all(kind="BuiltinLocation") - assert len(nodes) == 2 - assert isinstance(nodes[0], InfrahubNodeSync) - assert sorted([node.name.value for node in nodes]) == ["jfk1", "sfo1"] # type: ignore[attr-defined] - - async def test_get_one(self, client: InfrahubClientSync, init_db_base, base_dataset): - obj1 = client.create(kind="BuiltinLocation", name="jfk2", description="new york", type="site") - obj1.save() - - obj2 = client.create(kind="BuiltinLocation", name="sfo2", description="san francisco", type="site") - obj2.save() - - node1 = client.get(kind="BuiltinLocation", id=obj1.id) - assert isinstance(node1, InfrahubNodeSync) - assert node1.name.value == "jfk2" # type: ignore[attr-defined] - - node2 = client.get(kind="BuiltinLocation", id="jfk2") - assert isinstance(node2, InfrahubNodeSync) - assert node2.name.value == "jfk2" # type: ignore[attr-defined] - - async def test_filters_partial_match(self, client: InfrahubClientSync, init_db_base, base_dataset): - nodes = client.filters(kind="BuiltinLocation", name__value="jfk") - assert not nodes - - nodes = client.filters(kind="BuiltinLocation", partial_match=True, name__value="jfk") - assert len(nodes) == 2 - assert isinstance(nodes[0], InfrahubNodeSync) - assert sorted([node.name.value for node in nodes]) == ["jfk1", "jfk2"] # type: ignore[attr-defined] - - async def test_get_generic(self, client: InfrahubClientSync, init_db_base): - nodes = client.all(kind="CoreNode") - assert len(nodes) - - async def test_get_generic_fragment(self, client: InfrahubClientSync, init_db_base, base_dataset): - nodes = client.all(kind="CoreGenericAccount", fragment=True, exclude=["type"]) - assert len(nodes) - assert nodes[0].typename == "CoreAccount" - assert nodes[0].name.value is not None # type: ignore[attr-defined] - - async def test_get_generic_filter_source(self, client: InfrahubClientSync, init_db_base, base_dataset): - admin = client.get(kind="CoreAccount", name__value="admin") - - obj1 = client.create( - kind="BuiltinLocation", name={"value": "jfk3", "source": admin.id}, description="new york", type="site" - ) - obj1.save() - - nodes = client.filters(kind="CoreNode", any__source__id=admin.id) - assert len(nodes) == 1 - assert nodes[0].typename == "BuiltinLocation" - assert nodes[0].id == obj1.id - - async def test_get_related_nodes(self, client: InfrahubClientSync, init_db_base, base_dataset): - nodes = client.all(kind="CoreRepository") - assert len(nodes) == 1 - repo = nodes[0] - - assert repo.transformations.peers == [] # type: ignore[attr-defined] - repo.transformations.fetch() # type: ignore[attr-defined] - assert len(repo.transformations.peers) == 2 # type: ignore[attr-defined] - - def test_tracking_mode(self, client: InfrahubClientSync, db: InfrahubDatabase, init_db_base, base_dataset): - tag_names = ["BLUE", "RED", "YELLOW"] - orgname = "Acme" - - def create_org_with_tag(clt: InfrahubClientSync, nbr_tags: int): - tags = [] - for idx in range(nbr_tags): - obj = clt.create(kind="BuiltinTag", name=f"tracking-{tag_names[idx]}") - obj.save(allow_upsert=True) - tags.append(obj) - - org = clt.create(kind="TestOrganization", name=orgname, tags=tags) - org.save(allow_upsert=True) - - # First execution, we create one org with 3 tags - nbr_tags = 3 - with client.start_tracking(params={"orgname": orgname}, delete_unused_nodes=True) as clt: - create_org_with_tag(clt=clt, nbr_tags=nbr_tags) - - assert client.mode == InfrahubClientMode.DEFAULT - group = client.get( - kind="CoreStandardGroup", name__value=client.group_context._generate_group_name(), include=["members"] - ) - assert len(group.members.peers) == 4 - tags = client.all(kind="BuiltinTag") - assert len(tags) == 3 - - # Second execution, we create one org with 2 tags but we don't delete the third one - nbr_tags = 2 - with client.start_tracking(params={"orgname": orgname}, delete_unused_nodes=False) as clt: - create_org_with_tag(clt=clt, nbr_tags=nbr_tags) - - assert client.mode == InfrahubClientMode.DEFAULT - group = client.get( - kind="CoreStandardGroup", name__value=client.group_context._generate_group_name(), include=["members"] - ) - assert len(group.members.peers) == 3 - tags = client.all(kind="BuiltinTag") - assert len(tags) == 3 - - # Third execution, we create one org with 1 tag and we delete the second one - nbr_tags = 1 - with client.start_tracking(params={"orgname": orgname}, delete_unused_nodes=True) as clt: - create_org_with_tag(clt=clt, nbr_tags=nbr_tags) - - assert client.mode == InfrahubClientMode.DEFAULT - group = client.get( - kind="CoreStandardGroup", name__value=client.group_context._generate_group_name(), include=["members"] - ) - assert len(group.members.peers) == 2 - - tags = client.all(kind="BuiltinTag") - assert len(tags) == 2 - - # Forth one, validate that the group will not be updated if there is an exception - nbr_tags = 3 - with pytest.raises(ValueError): - with client.start_tracking(params={"orgname": orgname}, delete_unused_nodes=True) as clt: - create_org_with_tag(clt=clt, nbr_tags=nbr_tags) - raise ValueError("something happened") - - assert client.mode == InfrahubClientMode.DEFAULT - group = client.get( - kind="CoreStandardGroup", name__value=client.group_context._generate_group_name(), include=["members"] - ) - assert len(group.members.peers) == 2 - - def test_recorder_with_playback( - self, client: InfrahubClientSync, db: InfrahubDatabase, init_db_base, base_dataset, tmp_path: Path - ): - client.config.custom_recorder = JSONRecorder(directory=str(tmp_path)) - nodes = client.all(kind="CoreRepository") - - playback_config = JSONPlayback(directory=str(tmp_path)) - config = Config( - address=client.config.address, - sync_requester=playback_config.sync_request, - ) - playback = InfrahubClientSync(config=config) - recorded_nodes = playback.all(kind="CoreRepository") - - assert len(nodes) == 1 - assert nodes == recorded_nodes - assert recorded_nodes[0].name.value == "repository1" - - def test_profile(self, client: InfrahubClientSync, db: InfrahubDatabase, init_db_base, base_dataset): - profile_schema_kind = "ProfileBuiltinStatus" - profile_schema = client.schema.get(kind=profile_schema_kind) - assert isinstance(profile_schema, ProfileSchema) - - profile1 = client.create( - kind=profile_schema_kind, - profile_name="profile1", - profile_priority=1000, - description="description in profile", - ) - profile1.save() - - obj = client.create(kind="BuiltinStatus", name="planned", profiles=[profile1]) - obj.save() - - obj1 = client.get(kind="BuiltinStatus", id=obj.id) - assert obj1.description.value == "description in profile" diff --git a/python_sdk/tests/integration/test_node.py b/python_sdk/tests/integration/test_node.py deleted file mode 100644 index b03100dcdf..0000000000 --- a/python_sdk/tests/integration/test_node.py +++ /dev/null @@ -1,403 +0,0 @@ -import pytest -from infrahub.core.manager import NodeManager -from infrahub.core.node import Node -from infrahub.database import InfrahubDatabase -from infrahub.server import app - -from infrahub_sdk import Config, InfrahubClient -from infrahub_sdk.exceptions import NodeNotFoundError, UninitializedError -from infrahub_sdk.node import InfrahubNode - -from .conftest import InfrahubTestClient - -# pylint: disable=unused-argument - - -class TestInfrahubNode: - @pytest.fixture(scope="class") - async def test_client(self): - return InfrahubTestClient(app) - - @pytest.fixture - async def client(self, test_client): - config = Config(username="admin", password="infrahub", requester=test_client.async_request) - return InfrahubClient(config=config) - - @pytest.fixture(scope="class") - async def load_builtin_schema(self, db: InfrahubDatabase, test_client: InfrahubTestClient, builtin_org_schema): - config = Config(username="admin", password="infrahub", requester=test_client.async_request) - client = InfrahubClient(config=config) - response = await client.schema.load(schemas=[builtin_org_schema]) - assert not response.errors - - @pytest.fixture(scope="class") - async def load_ipam_schema(self, db: InfrahubDatabase, test_client: InfrahubTestClient, ipam_schema) -> None: - config = Config(username="admin", password="infrahub", requester=test_client.async_request) - client = InfrahubClient(config=config) - response = await client.schema.load(schemas=[ipam_schema]) - assert not response.errors - - @pytest.fixture - async def default_ipam_namespace(self, client: InfrahubClient) -> InfrahubNode: - return await client.get(kind="IpamNamespace", name__value="default") - - async def test_node_create(self, client: InfrahubClient, init_db_base, load_builtin_schema, location_schema): - data = { - "name": {"value": "JFK1"}, - "description": {"value": "JFK Airport"}, - "type": {"value": "SITE"}, - } - node = InfrahubNode(client=client, schema=location_schema, data=data) - await node.save() - - assert node.id is not None - - async def test_node_delete_client( - self, - db: InfrahubDatabase, - client: InfrahubClient, - init_db_base, - load_builtin_schema, - location_schema, - ): - data = { - "name": {"value": "ARN"}, - "description": {"value": "Arlanda Airport"}, - "type": {"value": "SITE"}, - } - node = InfrahubNode(client=client, schema=location_schema, data=data) - await node.save() - nodedb_pre_delete = await NodeManager.get_one(id=node.id, db=db, include_owner=True, include_source=True) - - await node.delete() - nodedb_post_delete = await NodeManager.get_one(id=node.id, db=db, include_owner=True, include_source=True) - assert nodedb_pre_delete - assert nodedb_pre_delete.id - assert not nodedb_post_delete - - async def test_node_delete_node( - self, - db: InfrahubDatabase, - client: InfrahubClient, - init_db_base, - load_builtin_schema, - location_schema, - ): - obj = await Node.init(db=db, schema="CoreAccount") - await obj.new(db=db, name="delete-my-account", account_type="Git", password="delete-my-password") - await obj.save(db=db) - node_pre_delete = await client.get(kind="CoreAccount", name__value="delete-my-account") - assert node_pre_delete - assert node_pre_delete.id - await node_pre_delete.delete() - with pytest.raises(NodeNotFoundError): - await client.get(kind="CoreAccount", name__value="delete-my-account") - - async def test_node_create_with_relationships( - self, - db: InfrahubDatabase, - client: InfrahubClient, - init_db_base, - load_builtin_schema, - tag_blue: Node, - tag_red: Node, - repo01: Node, - gqlquery01: Node, - ): - data = { - "name": {"value": "rfile01"}, - "template_path": {"value": "mytemplate.j2"}, - "query": gqlquery01.id, - "repository": {"id": repo01.id}, - "tags": [tag_blue.id, tag_red.id], - } - - node = await client.create(kind="CoreTransformJinja2", data=data) - await node.save() - - assert node.id is not None - - nodedb = await NodeManager.get_one(id=node.id, db=db, include_owner=True, include_source=True) - assert nodedb.name.value == node.name.value # type: ignore[attr-defined] - querydb = await nodedb.query.get_peer(db=db) - assert node.query.id == querydb.id # type: ignore[attr-defined] - - async def test_node_update_payload_with_relationships( - self, - db: InfrahubDatabase, - client: InfrahubClient, - init_db_base, - load_builtin_schema, - tag_blue: Node, - tag_red: Node, - repo01: Node, - gqlquery01: Node, - ): - data = { - "name": "rfile10", - "template_path": "mytemplate.j2", - "query": gqlquery01.id, - "repository": repo01.id, - "tags": [tag_blue.id, tag_red.id], - } - schema = await client.schema.get(kind="CoreTransformJinja2", branch="main") - create_payload = client.schema.generate_payload_create( - schema=schema, data=data, source=repo01.id, is_protected=True - ) - obj = await client.create(kind="CoreTransformJinja2", branch="main", **create_payload) - await obj.save() - - assert obj.id is not None - nodedb = await client.get(kind="CoreTransformJinja2", id=str(obj.id)) - - input_data = nodedb._generate_input_data()["data"]["data"] - assert input_data["name"]["value"] == "rfile10" - # Validate that the source isn't a dictionary bit a reference to the repo - assert input_data["name"]["source"] == repo01.id - - async def test_node_create_with_properties( - self, - db: InfrahubDatabase, - client: InfrahubClient, - init_db_base, - load_builtin_schema, - tag_blue: Node, - tag_red: Node, - repo01: Node, - gqlquery01: Node, - first_account: Node, - ): - data = { - "name": { - "value": "rfile02", - "is_protected": True, - "source": first_account.id, - "owner": first_account.id, - }, - "template_path": {"value": "mytemplate.j2"}, - "query": {"id": gqlquery01.id}, # "source": first_account.id, "owner": first_account.id}, - "repository": {"id": repo01.id}, # "source": first_account.id, "owner": first_account.id}, - "tags": [tag_blue.id, tag_red.id], - } - - node = await client.create(kind="CoreTransformJinja2", data=data) - await node.save() - - assert node.id is not None - - nodedb = await NodeManager.get_one(id=node.id, db=db, include_owner=True, include_source=True) - assert nodedb.name.value == node.name.value # type: ignore[attr-defined] - assert nodedb.name.is_protected is True - - async def test_node_update( - self, - db: InfrahubDatabase, - client: InfrahubClient, - init_db_base, - load_builtin_schema, - tag_blue: Node, - tag_red: Node, - repo99: Node, - ): - node = await client.get(kind="CoreRepository", name__value="repo99") - assert node.id is not None - - node.name.value = "repo95" # type: ignore[attr-defined] - node.tags.add(tag_blue.id) # type: ignore[attr-defined] - node.tags.add(tag_red.id) # type: ignore[attr-defined] - await node.save() - - nodedb = await NodeManager.get_one(id=node.id, db=db, include_owner=True, include_source=True) - assert nodedb.name.value == "repo95" - tags = await nodedb.tags.get(db=db) - assert len(tags) == 2 - - async def test_node_update_2( - self, - db: InfrahubDatabase, - client: InfrahubClient, - init_db_base, - load_builtin_schema, - tag_green: Node, - tag_red: Node, - tag_blue: Node, - gqlquery02: Node, - repo99: Node, - ): - node = await client.get(kind="CoreGraphQLQuery", name__value="query02") - assert node.id is not None - - node.name.value = "query021" # type: ignore[attr-defined] - node.repository = repo99.id # type: ignore[attr-defined] - node.tags.add(tag_green.id) # type: ignore[attr-defined] - node.tags.remove(tag_red.id) # type: ignore[attr-defined] - await node.save() - - nodedb = await NodeManager.get_one(id=node.id, db=db, include_owner=True, include_source=True) - repodb = await nodedb.repository.get_peer(db=db) - assert repodb.id == repo99.id - - tags = await nodedb.tags.get(db=db) - assert sorted([tag.peer_id for tag in tags]) == sorted([tag_green.id, tag_blue.id]) - - async def test_node_update_3_idempotency( - self, - db: InfrahubDatabase, - client: InfrahubClient, - init_db_base, - load_builtin_schema, - tag_green: Node, - tag_red: Node, - tag_blue: Node, - gqlquery03: Node, - repo99: Node, - ): - node = await client.get(kind="CoreGraphQLQuery", name__value="query03") - assert node.id is not None - - updated_query = f"\n\n{node.query.value}" # type: ignore[attr-defined] - node.name.value = "query031" # type: ignore[attr-defined] - node.query.value = updated_query # type: ignore[attr-defined] - first_update = node._generate_input_data(exclude_unmodified=True) - await node.save() - nodedb = await NodeManager.get_one(id=node.id, db=db, include_owner=True, include_source=True) - - node = await client.get(kind="CoreGraphQLQuery", name__value="query031") - - node.name.value = "query031" # type: ignore[attr-defined] - node.query.value = updated_query # type: ignore[attr-defined] - - second_update = node._generate_input_data(exclude_unmodified=True) - - assert nodedb.query.value == updated_query # type: ignore[attr-defined] - assert "query" in first_update["data"]["data"] - assert "value" in first_update["data"]["data"]["query"] - assert first_update["variables"] - assert "query" not in second_update["data"]["data"] - assert not second_update["variables"] - - async def test_convert_node( - self, - db: InfrahubDatabase, - client: InfrahubClient, - location_schema, - init_db_base, - load_builtin_schema, - location_cdg: Node, - ): - data = await location_cdg.to_graphql(db=db) - node = InfrahubNode(client=client, schema=location_schema, data=data) - - # pylint: disable=no-member - assert node.name.value == "cdg01" # type: ignore[attr-defined] - - async def test_relationship_manager_errors_without_fetch(self, client: InfrahubClient, load_builtin_schema): - organization = await client.create("TestOrganization", name="organization-1") - await organization.save() - tag = await client.create("BuiltinTag", name="blurple") - await tag.save() - - with pytest.raises(UninitializedError, match=r"Must call fetch"): - organization.tags.add(tag) - - await organization.tags.fetch() - organization.tags.add(tag) - await organization.save() - - organization = await client.get("TestOrganization", name__value="organization-1") - assert [t.id for t in organization.tags.peers] == [tag.id] - - async def test_relationships_not_overwritten( - self, client: InfrahubClient, load_builtin_schema, schema_extension_01 - ): - await client.schema.load(schemas=[schema_extension_01]) - rack = await client.create("InfraRack", name="rack-1") - await rack.save() - tag = await client.create("BuiltinTag", name="blizzow") - # TODO: is it a bug that we need to save the object and fetch the tags before adding to a RelationshipManager now? - await tag.save() - await tag.racks.fetch() - tag.racks.add(rack) - await tag.save() - tag_2 = await client.create("BuiltinTag", name="blizzow2") - await tag_2.save() - - # the "rack" object has no link to the "tag" object here - # rack.tags.peers is empty - rack.name.value = "New Rack Name" - await rack.save() - - # assert that the above rack.save() did not overwrite the existing Rack-Tag relationship - refreshed_rack = await client.get("InfraRack", id=rack.id) - await refreshed_rack.tags.fetch() - assert [t.id for t in refreshed_rack.tags.peers] == [tag.id] - - # check that we can purposefully remove a tag - refreshed_rack.tags.remove(tag.id) - await refreshed_rack.save() - rack_without_tag = await client.get("InfraRack", id=rack.id) - await rack_without_tag.tags.fetch() - assert rack_without_tag.tags.peers == [] - - # check that we can purposefully add a tag - rack_without_tag.tags.add(tag_2) - await rack_without_tag.save() - refreshed_rack_with_tag = await client.get("InfraRack", id=rack.id) - await refreshed_rack_with_tag.tags.fetch() - assert [t.id for t in refreshed_rack_with_tag.tags.peers] == [tag_2.id] - - async def test_node_create_from_pool( - self, db: InfrahubDatabase, client: InfrahubClient, init_db_base, default_ipam_namespace, load_ipam_schema - ): - ip_prefix = await client.create(kind="IpamIPPrefix", prefix="192.0.2.0/24") - await ip_prefix.save() - - ip_pool = await client.create( - kind="CoreIPAddressPool", - name="Core loopbacks 1", - default_address_type="IpamIPAddress", - default_prefix_length=32, - ip_namespace=default_ipam_namespace, - resources=[ip_prefix], - ) - await ip_pool.save() - - devices = [] - for i in range(1, 5): - d = await client.create(kind="InfraDevice", name=f"core0{i}", primary_address=ip_pool) - await d.save() - devices.append(d) - - assert [str(device.primary_address.peer.address.value) for device in devices] == [ - "192.0.2.1/32", - "192.0.2.2/32", - "192.0.2.3/32", - "192.0.2.4/32", - ] - - async def test_node_update_from_pool( - self, db: InfrahubDatabase, client: InfrahubClient, init_db_base, default_ipam_namespace, load_ipam_schema - ): - starter_ip_address = await client.create(kind="IpamIPAddress", address="10.0.0.1/32") - await starter_ip_address.save() - - ip_prefix = await client.create(kind="IpamIPPrefix", prefix="192.168.0.0/24") - await ip_prefix.save() - - ip_pool = await client.create( - kind="CoreIPAddressPool", - name="Core loopbacks 2", - default_address_type="IpamIPAddress", - default_prefix_length=32, - ip_namespace=default_ipam_namespace, - resources=[ip_prefix], - ) - await ip_pool.save() - - device = await client.create(kind="InfraDevice", name="core05", primary_address=starter_ip_address) - await device.save() - - device.primary_address = ip_pool - await device.save() - - assert str(device.primary_address.peer.address.value) == "192.168.0.1/32" diff --git a/python_sdk/tests/integration/test_object_store.py b/python_sdk/tests/integration/test_object_store.py deleted file mode 100644 index 3da9d57f84..0000000000 --- a/python_sdk/tests/integration/test_object_store.py +++ /dev/null @@ -1,19 +0,0 @@ -from infrahub_sdk import InfrahubClient -from tests.helpers.test_app import TestInfrahubApp - -FILE_CONTENT_01 = """ - any content - another content - """ - - -class TestObjectStore(TestInfrahubApp): - async def test_upload_and_get(self, client: InfrahubClient): - response = await client.object_store.upload(content=FILE_CONTENT_01) - - assert sorted(list(response.keys())) == ["checksum", "identifier"] - assert response["checksum"] == "aa19b96860ec59a73906dd8660bb3bad" - assert response["identifier"] - - content = await client.object_store.get(identifier=response["identifier"]) - assert content == FILE_CONTENT_01 diff --git a/python_sdk/tests/integration/test_schema.py b/python_sdk/tests/integration/test_schema.py deleted file mode 100644 index 1f193127d7..0000000000 --- a/python_sdk/tests/integration/test_schema.py +++ /dev/null @@ -1,57 +0,0 @@ -import pytest -from infrahub.core.schema import core_models -from infrahub.server import app - -from infrahub_sdk import Config, InfrahubClient -from infrahub_sdk.schema import NodeSchema - -from .conftest import InfrahubTestClient - -# pylint: disable=unused-argument - - -class TestInfrahubSchema: - @pytest.fixture(scope="class") - async def client(self): - return InfrahubTestClient(app) - - async def test_schema_all(self, client, init_db_base): - config = Config(requester=client.async_request) - ifc = InfrahubClient(config=config) - schema_nodes = await ifc.schema.all() - - nodes = [node for node in core_models["nodes"] if node["namespace"] != "Internal"] - generics = [node for node in core_models["generics"] if node["namespace"] != "Internal"] - - profiles = [node for node in schema_nodes.values() if node.namespace == "Profile"] - assert profiles - - assert len(schema_nodes) == len(nodes) + len(generics) + len(profiles) - assert "BuiltinTag" in schema_nodes - assert isinstance(schema_nodes["BuiltinTag"], NodeSchema) - - async def test_schema_get(self, client, init_db_base): - config = Config(username="admin", password="infrahub", requester=client.async_request) - ifc = InfrahubClient(config=config) - schema_node = await ifc.schema.get(kind="BuiltinTag") - - assert isinstance(schema_node, NodeSchema) - assert ifc.default_branch in ifc.schema.cache - nodes = [node for node in core_models["nodes"] if node["namespace"] != "Internal"] - generics = [node for node in core_models["generics"] if node["namespace"] != "Internal"] - - schema_without_profiles = [ - node for node in ifc.schema.cache[ifc.default_branch].values() if node.namespace != "Profile" - ] - assert len(schema_without_profiles) == len(nodes) + len(generics) - - async def test_schema_load_many(self, client, init_db_base, schema_extension_01, schema_extension_02): - config = Config(username="admin", password="infrahub", requester=client.async_request) - ifc = InfrahubClient(config=config) - response = await ifc.schema.load(schemas=[schema_extension_01, schema_extension_02]) - - assert response.schema_updated - - schema_nodes = await ifc.schema.all(refresh=True) - assert "InfraRack" in schema_nodes.keys() - assert "ProcurementContract" in schema_nodes.keys() diff --git a/python_sdk/tests/unit/__init__.py b/python_sdk/tests/unit/__init__.py deleted file mode 100644 index 9c48bcf96d..0000000000 --- a/python_sdk/tests/unit/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -import builtins - -from rich import print as rprint - -builtins.rprint = rprint # type: ignore diff --git a/python_sdk/tests/unit/ctl/__init__.py b/python_sdk/tests/unit/ctl/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/python_sdk/tests/unit/ctl/conftest.py b/python_sdk/tests/unit/ctl/conftest.py deleted file mode 100644 index 17c0ecce68..0000000000 --- a/python_sdk/tests/unit/ctl/conftest.py +++ /dev/null @@ -1,106 +0,0 @@ -import pytest -from pytest_httpx import HTTPXMock - - -@pytest.fixture -async def mock_branches_list_query(httpx_mock: HTTPXMock) -> HTTPXMock: - response = { - "data": { - "Branch": [ - { - "id": "eca306cf-662e-4e03-8180-2b788b191d3c", - "name": "main", - "sync_with_git": True, - "is_default": True, - "origin_branch": "main", - "branched_from": "2023-02-17T09:30:17.811719Z", - "has_schema_changes": False, - }, - { - "id": "7d9f817a-b958-4e76-8528-8afd0c689ada", - "name": "cr1234", - "sync_with_git": False, - "is_default": False, - "origin_branch": "main", - "branched_from": "2023-02-17T09:30:17.811719Z", - "has_schema_changes": True, - }, - ] - } - } - - httpx_mock.add_response( - method="POST", - json=response, - match_headers={"X-Infrahub-Tracker": "query-branch-all"}, - ) - return httpx_mock - - -@pytest.fixture -async def authentication_error_payload(): - response = { - "data": None, - "errors": [ - { - "message": "Authentication is required to perform this operation", - "extensions": {"code": 401}, - } - ], - } - - return response - - -@pytest.fixture -async def mock_branch_create_error(httpx_mock: HTTPXMock) -> HTTPXMock: - response = { - "data": {"BranchCreate": None}, - "errors": [ - { - "message": 'invalid field name: string does not match regex "^[a-z][a-z0-9\\-]+$"', - "locations": [{"line": 2, "column": 3}], - "path": ["BranchCreate"], - } - ], - } - - httpx_mock.add_response( - status_code=200, - method="POST", - json=response, - match_headers={"X-Infrahub-Tracker": "mutation-branch-create"}, - ) - return httpx_mock - - -@pytest.fixture -async def mock_repositories_query(httpx_mock: HTTPXMock) -> HTTPXMock: - response1 = { - "data": { - "repository": [ - { - "id": "9486cfce-87db-479d-ad73-07d80ba96a0f", - "name": {"value": "infrahub-demo-edge"}, - "location": {"value": "git@github.com:dgarros/infrahub-demo-edge.git"}, - "commit": {"value": "aaaaaaaaaaaaaaaaaaaa"}, - } - ] - } - } - response2 = { - "data": { - "repository": [ - { - "id": "9486cfce-87db-479d-ad73-07d80ba96a0f", - "name": {"value": "infrahub-demo-edge"}, - "location": {"value": "git@github.com:dgarros/infrahub-demo-edge.git"}, - "commit": {"value": "bbbbbbbbbbbbbbbbbbbb"}, - } - ] - } - } - - httpx_mock.add_response(method="POST", url="http://mock/graphql/main", json=response1) - httpx_mock.add_response(method="POST", url="http://mock/graphql/cr1234", json=response2) - return httpx_mock diff --git a/python_sdk/tests/unit/ctl/test_branch_app.py b/python_sdk/tests/unit/ctl/test_branch_app.py deleted file mode 100644 index 1b21a1a38b..0000000000 --- a/python_sdk/tests/unit/ctl/test_branch_app.py +++ /dev/null @@ -1,33 +0,0 @@ -from pytest_httpx import HTTPXMock -from typer.testing import CliRunner - -from infrahub_sdk.ctl.branch import app - -runner = CliRunner() - -# pylint: disable=unused-argument - - -def test_branch_list(mock_branches_list_query): - result = runner.invoke(app=app, args=["list"]) - assert result.exit_code == 0 - assert "cr1234" in result.stdout - - -def test_branch_create_no_auth(httpx_mock: HTTPXMock, authentication_error_payload): - httpx_mock.add_response( - status_code=401, - method="POST", - json=authentication_error_payload, - match_headers={"X-Infrahub-Tracker": "mutation-branch-create"}, - ) - result = runner.invoke(app=app, args=["create", "branch2"]) - assert result.exit_code == 1 - assert "Authentication is required" in result.stdout - - -def test_branch_create_wrong_name(mock_branch_create_error): - result = runner.invoke(app=app, args=["create", "branch2"]) - - assert result.exit_code == 1 - assert "invalid field name: string does not match regex" in result.stdout.replace("\n", "") diff --git a/python_sdk/tests/unit/ctl/test_cli.py b/python_sdk/tests/unit/ctl/test_cli.py deleted file mode 100644 index 8132305b37..0000000000 --- a/python_sdk/tests/unit/ctl/test_cli.py +++ /dev/null @@ -1,23 +0,0 @@ -from typer.testing import CliRunner - -from infrahub_sdk.ctl.cli import app - -runner = CliRunner() - - -def test_main_app(): - result = runner.invoke(app, ["--help"]) - assert result.exit_code == 0 - assert "[OPTIONS] COMMAND [ARGS]" in result.stdout - - -def test_validate_all_commands_have_names(): - assert app.registered_commands - for command in app.registered_commands: - assert command.name - - -def test_validate_all_groups_have_names(): - assert app.registered_groups - for group in app.registered_groups: - assert group.name diff --git a/python_sdk/tests/unit/ctl/test_schema_app.py b/python_sdk/tests/unit/ctl/test_schema_app.py deleted file mode 100644 index f3db9cae20..0000000000 --- a/python_sdk/tests/unit/ctl/test_schema_app.py +++ /dev/null @@ -1,130 +0,0 @@ -import yaml -from pytest_httpx import HTTPXMock -from typer.testing import CliRunner - -from infrahub_sdk.ctl.schema import app -from infrahub_sdk.ctl.utils import get_fixtures_dir -from tests.helpers.cli import remove_ansi_color - -runner = CliRunner() - - -def test_schema_load_empty(httpx_mock: HTTPXMock): - fixture_file = get_fixtures_dir() / "models" / "empty.json" - result = runner.invoke(app=app, args=["load", str(fixture_file)]) - - assert result.exit_code == 1 - assert "Empty YAML/JSON file" in result.stdout - - -def test_schema_load_one_valid(httpx_mock: HTTPXMock): - fixture_file = get_fixtures_dir() / "models" / "valid_model_01.json" - - httpx_mock.add_response( - method="POST", - url="http://mock/api/schema/load?branch=main", - status_code=200, - json={ - "hash": "497c17fbe915062c8c5a698be62130e4", - "previous_hash": "d3f7f4e7161f0ae6538a01d5a42dc661", - "diff": { - "added": {"InfraDevice": {"added": {}, "changed": {}, "removed": {}}}, - "changed": {}, - "removed": {}, - }, - "schema_updated": True, - }, - ) - result = runner.invoke(app=app, args=["load", str(fixture_file)]) - - assert result.exit_code == 0 - assert f"schema '{fixture_file}' loaded successfully" in remove_ansi_color(result.stdout.replace("\n", "")) - - content = httpx_mock.get_requests()[0].content.decode("utf8") - content_json = yaml.safe_load(content) - fixture_file_content = yaml.safe_load( - fixture_file.read_text(encoding="utf-8"), - ) - assert content_json == {"schemas": [fixture_file_content]} - - -def test_schema_load_multiple(httpx_mock: HTTPXMock): - fixture_file1 = get_fixtures_dir() / "models" / "valid_schemas" / "contract.yml" - fixture_file2 = get_fixtures_dir() / "models" / "valid_schemas" / "rack.yml" - - httpx_mock.add_response( - method="POST", - url="http://mock/api/schema/load?branch=main", - status_code=200, - json={ - "hash": "497c17fbe915062c8c5a698be62130e4", - "previous_hash": "d3f7f4e7161f0ae6538a01d5a42dc661", - "diff": { - "added": {"InfraDevice": {"added": {}, "changed": {}, "removed": {}}}, - "changed": {}, - "removed": {}, - }, - "schema_updated": True, - }, - ) - result = runner.invoke(app=app, args=["load", str(fixture_file1), str(fixture_file2)]) - - assert result.exit_code == 0 - clean_output = remove_ansi_color(result.stdout.replace("\n", "")) - assert f"schema '{fixture_file1}' loaded successfully" in clean_output - assert f"schema '{fixture_file2}' loaded successfully" in clean_output - - content = httpx_mock.get_requests()[0].content.decode("utf8") - content_json = yaml.safe_load(content) - fixture_file1_content = yaml.safe_load(fixture_file1.read_text(encoding="utf-8")) - fixture_file2_content = yaml.safe_load(fixture_file2.read_text(encoding="utf-8")) - assert content_json == {"schemas": [fixture_file1_content, fixture_file2_content]} - - -def test_schema_load_notvalid_namespace(httpx_mock: HTTPXMock): - fixture_file = get_fixtures_dir() / "models" / "non_valid_namespace.json" - - httpx_mock.add_response( - method="POST", - url="http://mock/api/schema/load?branch=main", - status_code=422, - json={ - "detail": [ - { - "type": "string_pattern_mismatch", - "loc": ["body", "schemas", 0, "nodes", 0, "namespace"], - "msg": "String should match pattern '^[A-Z][a-z0-9]+$'", - "input": "OuT", - "ctx": {"pattern": "^[A-Z][a-z0-9]+$"}, - "url": "https://errors.pydantic.dev/2.7/v/string_pattern_mismatch", - }, - { - "type": "value_error", - "loc": ["body", "schemas", 0, "nodes", 0, "attributes", 0, "kind"], - "msg": "Value error, Only valid Attribute Kind are : ['ID', 'Dropdown'] ", - "input": "NotValid", - "ctx": {"error": {}}, - "url": "https://errors.pydantic.dev/2.7/v/value_error", - }, - ] - }, - ) - result = runner.invoke(app=app, args=["load", str(fixture_file)]) - - assert result.exit_code == 1 - - clean_output = remove_ansi_color(result.stdout.replace("\n", "")) - expected_result = ( - "Unable to load the schema: Node: OuTDevice | " - "namespace (OuT) | String should match pattern '^[A-Z]+$' (string_pattern_mismatch) " - " Node: OuTDevice | Attribute: name (NotValid) | Value error, Only valid Attribute Kind " - "are : ['ID', 'Dropdown'] (value_error)" - ) - assert expected_result == clean_output - - content = httpx_mock.get_requests()[0].content.decode("utf8") - content_json = yaml.safe_load(content) - fixture_file_content = yaml.safe_load( - fixture_file.read_text(encoding="utf-8"), - ) - assert content_json == {"schemas": [fixture_file_content]} diff --git a/python_sdk/tests/unit/ctl/test_validate_app.py b/python_sdk/tests/unit/ctl/test_validate_app.py deleted file mode 100644 index 36ce078b65..0000000000 --- a/python_sdk/tests/unit/ctl/test_validate_app.py +++ /dev/null @@ -1,40 +0,0 @@ -import pytest -from typer.testing import CliRunner - -from infrahub_sdk.ctl.utils import get_fixtures_dir -from infrahub_sdk.ctl.validate import app - -runner = CliRunner() - - -def test_validate_schema_valid(): - fixture_file = get_fixtures_dir() / "models" / "valid_model_01.json" - - result = runner.invoke(app=app, args=["schema", str(fixture_file)]) - assert result.exit_code == 0 - assert "Schema is valid" in result.stdout - - -def test_validate_schema_empty(): - fixture_file = get_fixtures_dir() / "models" / "empty.json" - - result = runner.invoke(app=app, args=["schema", str(fixture_file)]) - assert result.exit_code == 1 - assert "'version' | Field required (missing)" in result.stdout - - -def test_validate_schema_non_valid(): - fixture_file = get_fixtures_dir() / "models" / "non_valid_model_01.json" - - result = runner.invoke(app=app, args=["schema", str(fixture_file)]) - assert result.exit_code == 1 - assert "Schema not valid" in result.stdout - - -@pytest.mark.xfail(reason="FIXME: Currently not catching the proper exception") -def test_validate_schema_json_non_valid(): - fixture_file = get_fixtures_dir() / "models" / "non_valid_json_01.json" - - result = runner.invoke(app=app, args=["schema", str(fixture_file)]) - assert result.exit_code == 1 - assert "Invalid JSON file" in result.stdout diff --git a/python_sdk/tests/unit/pytest_plugin/__init__.py b/python_sdk/tests/unit/pytest_plugin/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/python_sdk/tests/unit/pytest_plugin/test_plugin.py b/python_sdk/tests/unit/pytest_plugin/test_plugin.py deleted file mode 100644 index 5d3368207f..0000000000 --- a/python_sdk/tests/unit/pytest_plugin/test_plugin.py +++ /dev/null @@ -1,233 +0,0 @@ -def test_help_message(pytester): - """Make sure that the plugin is loaded by capturing an option it adds in the help message.""" - result = pytester.runpytest("--help") - result.stdout.fnmatch_lines(["*Infrahub configuration file for the repository*"]) - - -def test_without_config(pytester): - """Make sure 0 tests run when test file is not found.""" - result = pytester.runpytest() - result.assert_outcomes() - - -def test_emptyconfig(pytester): - """Make sure that the plugin load the test file properly.""" - pytester.makefile( - ".yml", - test_empty=""" - --- - version: "1.0" - infrahub_tests: [] - """, - ) - - result = pytester.runpytest() - result.assert_outcomes() - - -def test_jinja2_transform_config_missing_directory(pytester): - """Make sure tests raise errors if directories are not found.""" - pytester.makefile( - ".yml", - test_jinja2_transform=""" - --- - version: "1.0" - infrahub_tests: - - resource: "Jinja2Transform" - resource_name: "bgp_config" - tests: - - name: "base" - expect: PASS - spec: - kind: "jinja2-transform-unit-render" - directory: bgp_config/base - """, - ) - pytester.makefile( - ".yml", - infrahub_config=""" - --- - schemas: - - schemas/demo_edge_fabric.yml - - jinja2_transforms: - - name: bgp_config - description: "Template for BGP config base" - query: "bgp_sessions" - template_path: "templates/bgp_config.j2" - - """, - ) - - result = pytester.runpytest("--infrahub-repo-config=infrahub_config.yml") - result.assert_outcomes(errors=1) - - -def test_jinja2_transform_config_missing_input(pytester): - """Make sure tests raise errors if no inputs are provided.""" - pytester.makefile( - ".yml", - test_jinja2_transform=""" - --- - version: "1.0" - infrahub_tests: - - resource: "Jinja2Transform" - resource_name: "bgp_config" - tests: - - name: "base" - expect: PASS - spec: - kind: "jinja2-transform-unit-render" - directory: bgp_config/base - """, - ) - pytester.makefile( - ".yml", - infrahub_config=""" - --- - schemas: - - schemas/demo_edge_fabric.yml - - jinja2_transforms: - - name: bgp_config - description: "Template for BGP config base" - query: "bgp_sessions" - template_path: "templates/bgp_config.j2" - - """, - ) - - pytester.mkdir("bgp_config") - pytester.mkdir("bgp_config/base") - - result = pytester.runpytest("--infrahub-repo-config=infrahub_config.yml") - result.assert_outcomes(errors=1) - - -def test_jinja2_transform_no_expected_output(pytester): - """Make sure tests succeed if no expect outputs are provided.""" - pytester.makefile( - ".yml", - test_jinja2_transform=""" - --- - version: "1.0" - infrahub_tests: - - resource: "Jinja2Transform" - resource_name: "bgp_config" - tests: - - name: "base" - expect: PASS - spec: - kind: "jinja2-transform-unit-render" - directory: bgp_config/base - """, - ) - pytester.makefile( - ".yml", - infrahub_config=""" - --- - schemas: - - schemas/demo_edge_fabric.yml - - jinja2_transforms: - - name: bgp_config - description: "Template for BGP config base" - query: "bgp_sessions" - template_path: "templates/bgp_config.j2" - - """, - ) - - pytester.mkdir("bgp_config") - test_dir = pytester.mkdir("bgp_config/base") - test_input = pytester.makefile(".json", input='{"data": {}}') - pytester.run("mv", test_input, test_dir) - - template_dir = pytester.mkdir("templates") - template = pytester.makefile( - ".j2", - bgp_config=""" - protocols { - bgp { - log-up-down; - bgp-error-tolerance; - } - } - """, - ) - pytester.run("mv", template, template_dir) - - result = pytester.runpytest("--infrahub-repo-config=infrahub_config.yml") - result.assert_outcomes(passed=1) - - -def test_jinja2_transform_unexpected_output(pytester): - """Make sure tests fail if the expected and computed outputs don't match.""" - pytester.makefile( - ".yml", - test_jinja2_transform=""" - --- - version: "1.0" - infrahub_tests: - - resource: "Jinja2Transform" - resource_name: "bgp_config" - tests: - - name: "base" - expect: PASS - spec: - kind: "jinja2-transform-unit-render" - directory: bgp_config/base - """, - ) - pytester.makefile( - ".yml", - infrahub_config=""" - --- - schemas: - - schemas/demo_edge_fabric.yml - - jinja2_transforms: - - name: bgp_config - description: "Template for BGP config base" - query: "bgp_sessions" - template_path: "templates/bgp_config.j2" - - """, - ) - - pytester.mkdir("bgp_config") - test_dir = pytester.mkdir("bgp_config/base") - test_input = pytester.makefile(".json", input='{"data": {}}') - test_output = pytester.makefile( - ".txt", - output=""" - protocols { - bgp { - group ipv4-ibgp { - peer-as 64545; - } - log-up-down; - bgp-error-tolerance; - } - } - """, - ) - pytester.run("mv", test_input, test_dir) - pytester.run("mv", test_output, test_dir) - - template_dir = pytester.mkdir("templates") - template = pytester.makefile( - ".j2", - bgp_config=""" - protocols { - bgp { - log-up-down; - bgp-error-tolerance; - } - } - """, - ) - pytester.run("mv", template, template_dir) - - result = pytester.runpytest("--infrahub-repo-config=infrahub_config.yml") - result.assert_outcomes(failed=1) diff --git a/python_sdk/tests/unit/sdk/__init__.py b/python_sdk/tests/unit/sdk/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/python_sdk/tests/unit/sdk/checks/__init__.py b/python_sdk/tests/unit/sdk/checks/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/python_sdk/tests/unit/sdk/checks/conftest.py b/python_sdk/tests/unit/sdk/checks/conftest.py deleted file mode 100644 index 8aada87d5b..0000000000 --- a/python_sdk/tests/unit/sdk/checks/conftest.py +++ /dev/null @@ -1,21 +0,0 @@ -import pytest -from pytest_httpx import HTTPXMock - -from infrahub_sdk import Config, InfrahubClient - - -@pytest.fixture -async def client() -> InfrahubClient: - return InfrahubClient(config=Config(address="http://mock", insert_tracker=True)) - - -@pytest.fixture -async def mock_gql_query_my_query(httpx_mock: HTTPXMock) -> HTTPXMock: - response = {"data": {"mock": []}} - - httpx_mock.add_response( - method="POST", - json=response, - url="http://localhost:8000/api/query/my_query?branch=main&update_group=false", - ) - return httpx_mock diff --git a/python_sdk/tests/unit/sdk/checks/test_checks.py b/python_sdk/tests/unit/sdk/checks/test_checks.py deleted file mode 100644 index 6dcfbea421..0000000000 --- a/python_sdk/tests/unit/sdk/checks/test_checks.py +++ /dev/null @@ -1,64 +0,0 @@ -import pytest - -from infrahub_sdk import InfrahubClient -from infrahub_sdk.checks import InfrahubCheck - - -async def test_class_init(): - class IFCheckNoQuery(InfrahubCheck): - pass - - class IFCheckWithName(InfrahubCheck): - name = "my_check" - query = "my_query" - - class IFCheckNoName(InfrahubCheck): - query = "my_query" - - with pytest.raises(ValueError) as exc: - check = IFCheckNoQuery() - - assert "A query must be provided" in str(exc.value) - - check = IFCheckWithName() - assert check.name == "my_check" - assert check.root_directory is not None - - check = IFCheckNoName() - assert check.name == "IFCheckNoName" - - check = IFCheckWithName(root_directory="/tmp") - assert check.name == "my_check" - assert check.root_directory == "/tmp" - - -async def test_async_init(client): - class IFCheck(InfrahubCheck): - query = "my_query" - - check = await IFCheck.init() - assert isinstance(check.client, InfrahubClient) - - -async def test_validate_sync_async(mock_gql_query_my_query): - class IFCheckAsync(InfrahubCheck): - query = "my_query" - - async def validate(self, data: dict): - self.log_error("Not valid") - - class IFCheckSync(InfrahubCheck): - query = "my_query" - - def validate(self, data: dict): - self.log_error("Not valid") - - check = await IFCheckAsync.init(branch="main") - await check.run() - - assert check.passed is False - - check = await IFCheckSync.init(branch="main") - await check.run() - - assert check.passed is False diff --git a/python_sdk/tests/unit/sdk/conftest.py b/python_sdk/tests/unit/sdk/conftest.py deleted file mode 100644 index a870cfdbab..0000000000 --- a/python_sdk/tests/unit/sdk/conftest.py +++ /dev/null @@ -1,2335 +0,0 @@ -import re -import sys -from dataclasses import dataclass -from inspect import Parameter -from io import StringIO -from typing import AsyncGenerator, Mapping, Optional - -import pytest -import ujson -from pytest_httpx import HTTPXMock - -from infrahub_sdk import Config, InfrahubClient, InfrahubClientSync -from infrahub_sdk.schema import BranchSupportType, NodeSchema -from infrahub_sdk.utils import get_fixtures_dir - -# pylint: disable=redefined-outer-name,unused-argument - - -@dataclass -class BothClients: - sync: InfrahubClientSync - standard: InfrahubClient - stdout: Optional[StringIO] = None - - -@pytest.fixture -async def client() -> InfrahubClient: - return InfrahubClient(config=Config(address="http://mock", insert_tracker=True, pagination_size=3)) - - -@pytest.fixture -async def clients() -> BothClients: - both = BothClients( - standard=InfrahubClient(config=Config(address="http://mock", insert_tracker=True, pagination_size=3)), - sync=InfrahubClientSync(config=Config(address="http://mock", insert_tracker=True, pagination_size=3)), - ) - return both - - -@pytest.fixture -async def echo_clients(clients: BothClients) -> AsyncGenerator[BothClients, None]: - clients.standard.config.echo_graphql_queries = True - clients.sync.config.echo_graphql_queries = True - clients.stdout = StringIO() - backup_stdout = sys.stdout - sys.stdout = clients.stdout - - yield clients - - sys.stdout = backup_stdout - - clients.standard.config.echo_graphql_queries = False - clients.sync.config.echo_graphql_queries = False - - -@pytest.fixture -def replace_async_return_annotation(): - """Allows for comparison between sync and async return annotations.""" - - def replace_annotation(annotation: str) -> str: - replacements = { - "InfrahubClient": "InfrahubClientSync", - "InfrahubNode": "InfrahubNodeSync", - "list[InfrahubNode]": "list[InfrahubNodeSync]", - "Optional[InfrahubNode]": "Optional[InfrahubNodeSync]", - } - return replacements.get(annotation) or annotation - - return replace_annotation - - -@pytest.fixture -def replace_async_parameter_annotations(replace_async_return_annotation): - """Allows for comparison between sync and async parameter annotations.""" - - def replace_annotations(parameters: Mapping[str, Parameter]) -> tuple[str, str]: - parameter_tuples = [] - for name, parameter in parameters.items(): - parameter_tuples.append((name, replace_async_return_annotation(parameter.annotation))) - - return parameter_tuples - - return replace_annotations - - -@pytest.fixture -def replace_sync_return_annotation() -> str: - """Allows for comparison between sync and async return annotations.""" - - def replace_annotation(annotation: str) -> str: - replacements = { - "InfrahubClientSync": "InfrahubClient", - "InfrahubNodeSync": "InfrahubNode", - "list[InfrahubNodeSync]": "list[InfrahubNode]", - "Optional[InfrahubNodeSync]": "Optional[InfrahubNode]", - } - return replacements.get(annotation) or annotation - - return replace_annotation - - -@pytest.fixture -def replace_sync_parameter_annotations(replace_sync_return_annotation): - """Allows for comparison between sync and async parameter annotations.""" - - def replace_annotations(parameters: Mapping[str, Parameter]) -> tuple[str, str]: - parameter_tuples = [] - for name, parameter in parameters.items(): - parameter_tuples.append((name, replace_sync_return_annotation(parameter.annotation))) - - return parameter_tuples - - return replace_annotations - - -@pytest.fixture -async def location_schema() -> NodeSchema: - data = { - "name": "Location", - "namespace": "Builtin", - "default_filter": "name__value", - "attributes": [ - {"name": "name", "kind": "String", "unique": True}, - {"name": "description", "kind": "String", "optional": True}, - {"name": "type", "kind": "String"}, - ], - "relationships": [ - { - "name": "tags", - "peer": "BuiltinTag", - "optional": True, - "cardinality": "many", - }, - { - "name": "primary_tag", - "peer": "BuiltinTag", - "optional": True, - "cardinality": "one", - }, - { - "name": "member_of_groups", - "peer": "CoreGroup", - "optional": True, - "cardinality": "many", - "kind": "Group", - }, - ], - } - return NodeSchema(**data) # type: ignore - - -@pytest.fixture -async def schema_with_hfid() -> dict[str, NodeSchema]: - data = { - "location": { - "name": "Location", - "namespace": "Builtin", - "default_filter": "name__value", - "human_friendly_id": ["name__value"], - "attributes": [ - {"name": "name", "kind": "String", "unique": True}, - {"name": "description", "kind": "String", "optional": True}, - {"name": "type", "kind": "String"}, - ], - "relationships": [ - { - "name": "tags", - "peer": "BuiltinTag", - "optional": True, - "cardinality": "many", - }, - { - "name": "primary_tag", - "peer": "BuiltinTag", - "optional": True, - "cardinality": "one", - }, - { - "name": "member_of_groups", - "peer": "CoreGroup", - "optional": True, - "cardinality": "many", - "kind": "Group", - }, - ], - }, - "rack": { - "name": "Rack", - "namespace": "Builtin", - "default_filter": "facility_id__value", - "human_friendly_id": ["facility_id__value", "location__name__value"], - "attributes": [ - {"name": "facility_id", "kind": "String", "unique": True}, - {"name": "description", "kind": "String", "optional": True}, - ], - "relationships": [ - {"name": "location", "peer": "BuiltinLocation", "cardinality": "one"}, - { - "name": "tags", - "peer": "BuiltinTag", - "optional": True, - "cardinality": "many", - }, - { - "name": "member_of_groups", - "peer": "CoreGroup", - "optional": True, - "cardinality": "many", - "kind": "Group", - }, - ], - }, - } - return {k: NodeSchema(**v) for k, v in data.items()} # type: ignore - - -@pytest.fixture -async def std_group_schema() -> NodeSchema: - data = { - "name": "StandardGroup", - "namespace": "Core", - "default_filter": "name__value", - "attributes": [ - {"name": "name", "kind": "String", "unique": True}, - {"name": "description", "kind": "String", "optional": True}, - ], - } - return NodeSchema(**data) # type: ignore - - -@pytest.fixture -async def location_data01_no_pagination(): - data = { - "__typename": "BuiltinLocation", - "id": "llllllll-llll-llll-llll-llllllllllll", - "display_label": "dfw1", - "name": { - "is_protected": True, - "is_visible": True, - "owner": None, - "source": None, - "value": "DFW", - }, - "description": { - "is_protected": False, - "is_visible": True, - "owner": None, - "source": None, - "value": None, - }, - "type": { - "is_protected": True, - "is_visible": True, - "owner": None, - "source": None, - "value": "SITE", - }, - "primary_tag": { - "id": "rrrrrrrr-rrrr-rrrr-rrrr-rrrrrrrrrrrr", - "display_label": "red", - "__typename": "RelatedTag", - "_relation__is_protected": True, - "_relation__is_visible": True, - "_relation__owner": None, - "_relation__source": None, - }, - "tags": [ - { - "id": "bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb", - "display_label": "blue", - "__typename": "RelatedTag", - "_relation__is_protected": True, - "_relation__is_visible": True, - "_relation__owner": None, - "_relation__source": None, - } - ], - } - - return data - - -@pytest.fixture -async def location_data02_no_pagination(): - data = { - "__typename": "BuiltinLocation", - "id": "llllllll-llll-llll-llll-llllllllllll", - "display_label": "dfw1", - "name": { - "is_protected": True, - "is_visible": True, - "owner": None, - "source": { - "__typename": "Account", - "display_label": "CRM", - "id": "cccccccc-cccc-cccc-cccc-cccccccccccc", - }, - "value": "dfw1", - }, - "description": { - "is_protected": False, - "is_visible": True, - "owner": None, - "source": None, - "value": None, - }, - "type": { - "is_protected": True, - "is_visible": True, - "owner": None, - "source": { - "__typename": "Account", - "display_label": "CRM", - "id": "cccccccc-cccc-cccc-cccc-cccccccccccc", - }, - "value": "SITE", - }, - "primary_tag": { - "id": "rrrrrrrr-rrrr-rrrr-rrrr-rrrrrrrrrrrr", - "display_label": "red", - "__typename": "RelatedTag", - "_relation__is_protected": True, - "_relation__is_visible": True, - "_relation__owner": None, - "_relation__source": { - "__typename": "Account", - "display_label": "CRM", - "id": "cccccccc-cccc-cccc-cccc-cccccccccccc", - }, - }, - "tags": [ - { - "id": "bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb", - "display_label": "blue", - "__typename": "RelatedTag", - "_relation__is_protected": True, - "_relation__is_visible": True, - "_relation__owner": None, - "_relation__source": { - "__typename": "Account", - "display_label": "CRM", - "id": "cccccccc-cccc-cccc-cccc-cccccccccccc", - }, - } - ], - } - - return data - - -@pytest.fixture -async def location_data01(): - data = { - "node": { - "__typename": "BuiltinLocation", - "id": "llllllll-llll-llll-llll-llllllllllll", - "display_label": "dfw1", - "name": { - "is_protected": True, - "is_visible": True, - "owner": None, - "source": None, - "value": "DFW", - }, - "description": { - "is_protected": False, - "is_visible": True, - "owner": None, - "source": None, - "value": None, - }, - "type": { - "is_protected": True, - "is_visible": True, - "owner": None, - "source": None, - "value": "SITE", - }, - "primary_tag": { - "properties": { - "is_protected": True, - "is_visible": True, - "owner": None, - "source": None, - }, - "node": { - "id": "rrrrrrrr-rrrr-rrrr-rrrr-rrrrrrrrrrrr", - "display_label": "red", - "__typename": "BuiltinTag", - }, - }, - "tags": { - "count": 1, - "edges": [ - { - "properties": { - "is_protected": True, - "is_visible": True, - "owner": None, - "source": None, - }, - "node": { - "id": "bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb", - "display_label": "blue", - "__typename": "BuiltinTag", - }, - } - ], - }, - } - } - - return data - - -@pytest.fixture -async def location_data02(): - data = { - "node": { - "__typename": "BuiltinLocation", - "id": "llllllll-llll-llll-llll-llllllllllll", - "display_label": "dfw1", - "name": { - "is_protected": True, - "is_visible": True, - "owner": None, - "source": { - "__typename": "Account", - "display_label": "CRM", - "id": "cccccccc-cccc-cccc-cccc-cccccccccccc", - }, - "value": "dfw1", - }, - "description": { - "is_protected": False, - "is_visible": True, - "owner": None, - "source": None, - "value": None, - }, - "type": { - "is_protected": True, - "is_visible": True, - "owner": None, - "source": { - "__typename": "Account", - "display_label": "CRM", - "id": "cccccccc-cccc-cccc-cccc-cccccccccccc", - }, - "value": "SITE", - }, - "primary_tag": { - "properties": { - "is_protected": True, - "is_visible": True, - "owner": None, - "source": { - "__typename": "Account", - "display_label": "CRM", - "id": "cccccccc-cccc-cccc-cccc-cccccccccccc", - }, - }, - "node": { - "id": "rrrrrrrr-rrrr-rrrr-rrrr-rrrrrrrrrrrr", - "display_label": "red", - "__typename": "BuiltinTag", - }, - }, - "tags": { - "count": 1, - "edges": [ - { - "properties": { - "is_protected": True, - "is_visible": True, - "owner": None, - "source": { - "__typename": "Account", - "display_label": "CRM", - "id": "cccccccc-cccc-cccc-cccc-cccccccccccc", - }, - }, - "node": { - "id": "bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb", - "display_label": "blue", - "__typename": "BuiltinTag", - }, - } - ], - }, - } - } - - return data - - -@pytest.fixture -async def tag_schema() -> NodeSchema: - data = { - "name": "Tag", - "namespace": "Builtin", - "default_filter": "name__value", - "attributes": [ - {"name": "name", "kind": "String", "unique": True}, - {"name": "description", "kind": "String", "optional": True}, - ], - } - return NodeSchema(**data) # type: ignore - - -@pytest.fixture -async def tag_blue_data_no_pagination(): - data = { - "__typename": "BuiltinTag", - "id": "bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb", - "display_label": "blue", - "name": { - "is_protected": False, - "is_visible": True, - "owner": None, - "source": { - "__typename": "Account", - "display_label": "CRM", - "id": "cccccccc-cccc-cccc-cccc-cccccccccccc", - }, - "value": "blue", - }, - "description": { - "is_protected": False, - "is_visible": True, - "owner": None, - "source": None, - "value": None, - }, - } - return data - - -@pytest.fixture -async def tag_red_data_no_pagination(): - data = { - "__typename": "BuiltinTag", - "id": "rrrrrrrr-rrrr-rrrr-rrrr-rrrrrrrrrrrr", - "display_label": "red", - "name": { - "is_protected": False, - "is_visible": True, - "owner": None, - "source": { - "__typename": "Account", - "display_label": "CRM", - "id": "cccccccc-cccc-cccc-cccc-cccccccccccc", - }, - "value": "red", - }, - "description": { - "is_protected": False, - "is_visible": True, - "owner": None, - "source": None, - "value": None, - }, - } - return data - - -@pytest.fixture -async def tag_green_data_no_pagination(): - data = { - "__typename": "BuiltinTag", - "id": "gggggggg-gggg-gggg-gggg-gggggggggggg", - "display_label": "green", - "name": { - "is_protected": False, - "is_visible": True, - "owner": None, - "source": { - "__typename": "Account", - "display_label": "CRM", - "id": "cccccccc-cccc-cccc-cccc-cccccccccccc", - }, - "value": "green", - }, - "description": { - "is_protected": False, - "is_visible": True, - "owner": None, - "source": None, - "value": None, - }, - } - return data - - -@pytest.fixture -async def tag_blue_data(): - data = { - "node": { - "__typename": "BuiltinTag", - "id": "bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb", - "display_label": "blue", - "name": { - "is_protected": False, - "is_visible": True, - "owner": None, - "source": { - "__typename": "Account", - "display_label": "CRM", - "id": "cccccccc-cccc-cccc-cccc-cccccccccccc", - }, - "value": "blue", - }, - "description": { - "is_protected": False, - "is_visible": True, - "owner": None, - "source": None, - "value": None, - }, - } - } - return data - - -@pytest.fixture -async def tag_red_data(): - data = { - "node": { - "__typename": "BuiltinTag", - "id": "rrrrrrrr-rrrr-rrrr-rrrr-rrrrrrrrrrrr", - "display_label": "red", - "name": { - "is_protected": False, - "is_visible": True, - "owner": None, - "source": { - "__typename": "Account", - "display_label": "CRM", - "id": "cccccccc-cccc-cccc-cccc-cccccccccccc", - }, - "value": "red", - }, - "description": { - "is_protected": False, - "is_visible": True, - "owner": None, - "source": None, - "value": None, - }, - } - } - return data - - -@pytest.fixture -async def tag_green_data(): - data = { - "node": { - "__typename": "BuiltinTag", - "id": "gggggggg-gggg-gggg-gggg-gggggggggggg", - "display_label": "green", - "name": { - "is_protected": False, - "is_visible": True, - "owner": None, - "source": { - "__typename": "Account", - "display_label": "CRM", - "id": "cccccccc-cccc-cccc-cccc-cccccccccccc", - }, - "value": "green", - }, - "description": { - "is_protected": False, - "is_visible": True, - "owner": None, - "source": None, - "value": None, - }, - } - } - return data - - -@pytest.fixture -async def rfile_schema() -> NodeSchema: - data = { - "name": "TransformJinja2", - "namespace": "Core", - "default_filter": "name__value", - "display_label": ["label__value"], - "branch": BranchSupportType.AWARE.value, - "attributes": [ - {"name": "name", "kind": "String", "unique": True}, - {"name": "description", "kind": "String", "optional": True}, - {"name": "template_path", "kind": "String"}, - ], - "relationships": [ - { - "name": "repository", - "peer": "CoreRepository", - "kind": "Attribute", - "identifier": "jinja2__repository", - "cardinality": "one", - "optional": False, - }, - { - "name": "query", - "peer": "CoreGraphQLQuery", - "kind": "Attribute", - "cardinality": "one", - "optional": False, - }, - { - "name": "tags", - "peer": "BuiltinTag", - "optional": True, - "cardinality": "many", - }, - ], - } - return NodeSchema(**data) # type: ignore - - -@pytest.fixture -async def ipaddress_schema() -> NodeSchema: - data = { - "name": "IPAddress", - "namespace": "Infra", - "default_filter": "address__value", - "display_labels": ["address_value"], - "order_by": ["address_value"], - "attributes": [ - {"name": "address", "kind": "IPHost"}, - ], - "relationships": [ - { - "name": "interface", - "peer": "InfraInterfaceL3", - "optional": True, - "cardinality": "one", - "kind": "Parent", - } - ], - } - return NodeSchema(**data) # type: ignore - - -@pytest.fixture -async def ipnetwork_schema() -> NodeSchema: - data = { - "name": "IPNetwork", - "namespace": "Infra", - "default_filter": "network__value", - "display_labels": ["network_value"], - "order_by": ["network_value"], - "attributes": [ - {"name": "network", "kind": "IPNetwork"}, - ], - "relationships": [ - { - "name": "site", - "peer": "BuiltinLocation", - "optional": True, - "cardinality": "one", - "kind": "Parent", - } - ], - } - return NodeSchema(**data) # type: ignore - - -@pytest.fixture -async def ipam_ipprefix_schema() -> NodeSchema: - data = { - "name": "IPNetwork", - "namespace": "Ipam", - "default_filter": "prefix__value", - "display_labels": ["prefix_value"], - "order_by": ["prefix_value"], - "inherit_from": ["BuiltinIPAddress"], - } - return NodeSchema(**data) # type: ignore - - -@pytest.fixture -async def simple_device_schema() -> NodeSchema: - data = { - "name": "Device", - "namespace": "Infra", - "label": "Device", - "default_filter": "name__value", - "order_by": ["name__value"], - "display_labels": ["name__value"], - "attributes": [{"name": "name", "kind": "Text", "unique": True}], - "relationships": [ - { - "name": "primary_address", - "peer": "IpamIPAddress", - "label": "Primary IP Address", - "optional": True, - "cardinality": "one", - "kind": "Attribute", - } - ], - } - return NodeSchema(**data) # type: ignore - - -@pytest.fixture -async def ipam_ipprefix_data(): - data = { - "node": { - "__typename": "IpamIPPrefix", - "id": "llllllll-llll-llll-llll-llllllllllll", - "display_label": "192.0.2.0/24", - "prefix": { - "is_protected": True, - "is_visible": True, - "owner": None, - "source": { - "__typename": "Account", - "display_label": "CRM", - "id": "cccccccc-cccc-cccc-cccc-cccccccccccc", - }, - "value": "192.0.2.0/24", - }, - "description": { - "is_protected": False, - "is_visible": True, - "owner": None, - "source": None, - "value": None, - }, - "member_type": { - "is_protected": True, - "is_visible": True, - "owner": None, - "source": { - "__typename": "Account", - "display_label": "CRM", - "id": "cccccccc-cccc-cccc-cccc-cccccccccccc", - }, - "value": "address", - }, - "is_pool": { - "is_protected": True, - "is_visible": True, - "owner": None, - "source": { - "__typename": "Account", - "display_label": "CRM", - "id": "cccccccc-cccc-cccc-cccc-cccccccccccc", - }, - "value": False, - }, - "ip_namespace": { - "properties": { - "is_protected": True, - "is_visible": True, - "owner": None, - "source": { - "__typename": "Account", - "display_label": "CRM", - "id": "cccccccc-cccc-cccc-cccc-cccccccccccc", - }, - }, - "node": { - "id": "rrrrrrrr-rrrr-rrrr-rrrr-rrrrrrrrrrrr", - "display_label": "default", - "__typename": "IpamNamespace", - }, - }, - } - } - - return data - - -@pytest.fixture -async def ipaddress_pool_schema() -> NodeSchema: - data = { - "name": "IPAddressPool", - "namespace": "Core", - "description": "A pool of IP address resources", - "label": "IP Address Pool", - "default_filter": "name__value", - "order_by": ["name__value"], - "display_labels": ["name__value"], - "include_in_menu": False, - "branch": BranchSupportType.AGNOSTIC.value, - "inherit_from": ["CoreResourcePool"], - "attributes": [ - { - "name": "default_address_type", - "kind": "Text", - "optional": False, - "description": "The object type to create when reserving a resource in the pool", - }, - { - "name": "default_prefix_length", - "kind": "Number", - "optional": True, - }, - ], - "relationships": [ - { - "name": "resources", - "peer": "BuiltinIPPrefix", - "kind": "Attribute", - "identifier": "ipaddresspool__resource", - "cardinality": "many", - "optional": False, - "order_weight": 4000, - }, - { - "name": "ip_namespace", - "peer": "BuiltinIPNamespace", - "kind": "Attribute", - "identifier": "ipaddresspool__ipnamespace", - "cardinality": "one", - "optional": False, - "order_weight": 5000, - }, - ], - } - return NodeSchema(**data) # type: ignore - - -@pytest.fixture -async def ipprefix_pool_schema() -> NodeSchema: - data = { - "name": "IPPrefixPool", - "namespace": "Core", - "description": "A pool of IP prefix resources", - "label": "IP Prefix Pool", - "include_in_menu": False, - "branch": BranchSupportType.AGNOSTIC.value, - "inherit_from": ["CoreResourcePool"], - "attributes": [ - { - "name": "default_prefix_length", - "kind": "Number", - "description": "The default prefix length as an integer for prefixes allocated from this pool.", - "optional": True, - "order_weight": 5000, - }, - { - "name": "default_member_type", - "kind": "Text", - "enum": ["prefix", "address"], - "default_value": "prefix", - "optional": True, - "order_weight": 3000, - }, - { - "name": "default_prefix_type", - "kind": "Text", - "optional": True, - "order_weight": 4000, - }, - ], - "relationships": [ - { - "name": "resources", - "peer": "BuiltinIPPrefix", - "kind": "Attribute", - "identifier": "prefixpool__resource", - "cardinality": "many", - "branch": BranchSupportType.AGNOSTIC.value, - "optional": False, - "order_weight": 6000, - }, - { - "name": "ip_namespace", - "peer": "BuiltinIPNamespace", - "kind": "Attribute", - "identifier": "prefixpool__ipnamespace", - "cardinality": "one", - "branch": BranchSupportType.AGNOSTIC.value, - "optional": False, - "order_weight": 7000, - }, - ], - } - return NodeSchema(**data) # type: ignore - - -@pytest.fixture -async def address_schema() -> NodeSchema: - data = { - "name": "Address", - "namespace": "Infra", - "default_filter": "network__value", - "display_labels": ["network_value"], - "order_by": ["network_value"], - "attributes": [ - {"name": "street_number", "kind": "String", "optional": True}, - {"name": "street_name", "kind": "String", "optional": True}, - {"name": "postal_code", "kind": "String", "optional": True}, - {"name": "computed_address", "kind": "String", "optional": True, "read_only": True}, - ], - "relationships": [], - } - return NodeSchema(**data) # type: ignore - - -@pytest.fixture -async def address_data(): - data = { - "node": { - "__typename": "Address", - "id": "d5994b18-b25e-4261-9e63-17c2844a0b45", - "display_label": "test_address", - "street_number": { - "is_protected": False, - "is_visible": True, - "owner": None, - "source": None, - "value": "1234", - }, - "street_name": { - "is_protected": False, - "is_visible": True, - "owner": None, - "source": None, - "value": "Fake Street", - }, - "postal_code": { - "is_protected": False, - "is_visible": True, - "owner": None, - "source": None, - "value": "123ABC", - }, - "computed_address": { - "is_protected": False, - "is_visible": True, - "owner": None, - "source": None, - "value": "1234 Fake Street 123ABC", - }, - } - } - return data - - -@pytest.fixture -async def device_schema() -> NodeSchema: - data = { - "name": "Device", - "namespace": "Infra", - "label": "Device", - "default_filter": "name__value", - "inherit_from": ["CoreArtifactTarget"], - "order_by": ["name__value"], - "display_labels": ["name__value"], - "attributes": [ - {"name": "name", "kind": "Text", "unique": True}, - {"name": "description", "kind": "Text", "optional": True}, - {"name": "type", "kind": "Text"}, - ], - "relationships": [ - {"name": "site", "peer": "BuiltinLocation", "optional": False, "cardinality": "one", "kind": "Attribute"}, - {"name": "status", "peer": "BuiltinStatus", "optional": False, "cardinality": "one", "kind": "Attribute"}, - {"name": "role", "peer": "BuiltinRole", "optional": False, "cardinality": "one", "kind": "Attribute"}, - { - "name": "interfaces", - "peer": "InfraInterface", - "identifier": "device__interface", - "optional": True, - "cardinality": "many", - "kind": "Component", - }, - { - "name": "asn", - "peer": "InfraAutonomousSystem", - "optional": True, - "cardinality": "one", - "kind": "Attribute", - }, - {"name": "tags", "peer": "BuiltinTag", "optional": True, "cardinality": "many", "kind": "Attribute"}, - { - "name": "primary_address", - "peer": "InfraIPAddress", - "label": "Primary IP Address", - "optional": True, - "cardinality": "one", - "kind": "Attribute", - }, - {"name": "platform", "peer": "InfraPlatform", "optional": True, "cardinality": "one", "kind": "Attribute"}, - {"name": "artifacts", "peer": "CoreArtifact", "optional": True, "cardinality": "many", "kind": "Generic"}, - ], - } - return NodeSchema(**data) # type: ignore - - -@pytest.fixture -async def device_data(): - data = { - "node": { - "id": "1799f647-203c-cd41-3409-c51d55097213", - "display_label": "atl1-edge1", - "__typename": "InfraDevice", - "name": { - "value": "atl1-edge1", - "is_visible": True, - "is_protected": True, - "source": { - "id": "1799f644-d5eb-8e37-3403-c512518ae06a", - "display_label": "Pop-Builder", - "__typename": "CoreAccount", - }, - "owner": None, - }, - "description": {"value": None, "is_visible": True, "is_protected": False, "source": None, "owner": None}, - "type": { - "value": "7280R3", - "is_visible": True, - "is_protected": False, - "source": { - "id": "1799f644-d5eb-8e37-3403-c512518ae06a", - "display_label": "Pop-Builder", - "__typename": "CoreAccount", - }, - "owner": None, - }, - "site": { - "node": { - "id": "1799f646-fa2c-29d0-3406-c5101365ec3a", - "display_label": "atl1", - "__typename": "BuiltinLocation", - }, - "properties": { - "is_visible": True, - "is_protected": True, - "source": { - "id": "1799f644-d5eb-8e37-3403-c512518ae06a", - "display_label": "Pop-Builder", - "__typename": "CoreAccount", - }, - "owner": None, - }, - }, - "status": { - "node": { - "id": "1799f646-c1b3-2ed5-3406-c5102132e63b", - "display_label": "Active", - "__typename": "BuiltinStatus", - }, - "properties": { - "is_visible": True, - "is_protected": None, - "source": None, - "owner": { - "id": "1799f645-a5c5-e0ac-3403-c512c9cff168", - "display_label": "Operation Team", - "__typename": "CoreAccount", - }, - }, - }, - "role": { - "node": { - "id": "1799f646-c1af-2bd0-3407-c51069f6bdae", - "display_label": "Edge", - "__typename": "BuiltinRole", - }, - "properties": { - "is_visible": True, - "is_protected": True, - "source": { - "id": "1799f644-d5eb-8e37-3403-c512518ae06a", - "display_label": "Pop-Builder", - "__typename": "CoreAccount", - }, - "owner": { - "id": "1799f645-b916-a9e8-3407-c51370cacbd0", - "display_label": "Engineering Team", - "__typename": "CoreAccount", - }, - }, - }, - "asn": { - "node": { - "id": "1799f646-6d88-e77f-340d-c51ca48eb24e", - "display_label": "AS64496 64496", - "__typename": "InfraAutonomousSystem", - }, - "properties": { - "is_visible": True, - "is_protected": True, - "source": { - "id": "1799f644-d5eb-8e37-3403-c512518ae06a", - "display_label": "Pop-Builder", - "__typename": "CoreAccount", - }, - "owner": { - "id": "1799f645-b916-a9e8-3407-c51370cacbd0", - "display_label": "Engineering Team", - "__typename": "CoreAccount", - }, - }, - }, - "tags": { - "count": 2, - "edges": [ - { - "node": { - "id": "1799f646-c1b4-c4eb-340f-c51512957ddc", - "display_label": "green", - "__typename": "BuiltinTag", - }, - "properties": {"is_visible": True, "is_protected": None, "source": None, "owner": None}, - }, - { - "node": { - "id": "1799f646-c1b5-123b-3408-c51ed097b328", - "display_label": "red", - "__typename": "BuiltinTag", - }, - "properties": {"is_visible": True, "is_protected": None, "source": None, "owner": None}, - }, - ], - }, - "primary_address": { - "node": { - "id": "1799f647-7d80-0a4b-340f-c511da489224", - "display_label": "172.20.20.20/24", - "__typename": "InfraIPAddress", - }, - "properties": {"is_visible": True, "is_protected": None, "source": None, "owner": None}, - }, - "platform": { - "node": { - "id": "1799f645-e041-134d-3406-c515c08b15fc", - "display_label": "Arista EOS", - "__typename": "InfraPlatform", - }, - "properties": { - "is_visible": True, - "is_protected": True, - "source": { - "id": "1799f644-d5eb-8e37-3403-c512518ae06a", - "display_label": "Pop-Builder", - "__typename": "CoreAccount", - }, - "owner": None, - }, - }, - } - } - return data - - -@pytest.fixture -async def artifact_definition_schema() -> NodeSchema: - data = { - "name": "ArtifactDefinition", - "namespace": "Core", - "label": "Artifact Definition", - "default_filter": "name__value", - "inherit_from": [], - "display_labels": ["name__value"], - "attributes": [ - {"name": "name", "kind": "Text", "unique": True}, - {"name": "artifact_name", "kind": "Text"}, - ], - } - return NodeSchema(**data) # type: ignore - - -@pytest.fixture -async def artifact_definition_data(): - data = { - "node": { - "id": "1799fd6e-cc5d-219f-3371-c514ed70bf23", - "display_label": "Startup Config for Edge devices", - "__typename": "CoreArtifactDefinition", - "name": { - "value": "Startup Config for Edge devices", - "is_visible": True, - "is_protected": True, - "source": { - "id": "1799fd6b-f0a9-9d23-304d-c51b05d142c5", - "display_label": "infrahub-demo-edge", - "__typename": "CoreRepository", - }, - "owner": None, - }, - "artifact_name": { - "value": "startup-config", - "is_visible": True, - "is_protected": True, - "source": { - "id": "1799fd6b-f0a9-9d23-304d-c51b05d142c5", - "display_label": "infrahub-demo-edge", - "__typename": "CoreRepository", - }, - "owner": None, - }, - } - } - return data - - -@pytest.fixture -async def mock_branches_list_query(httpx_mock: HTTPXMock) -> HTTPXMock: - response = { - "data": { - "Branch": [ - { - "id": "eca306cf-662e-4e03-8180-2b788b191d3c", - "name": "main", - "sync_with_git": True, - "is_default": True, - "origin_branch": "main", - "branched_from": "2023-02-17T09:30:17.811719Z", - "has_schema_changes": False, - }, - { - "id": "7d9f817a-b958-4e76-8528-8afd0c689ada", - "name": "cr1234", - "sync_with_git": False, - "is_default": False, - "origin_branch": "main", - "branched_from": "2023-02-17T09:30:17.811719Z", - "has_schema_changes": True, - }, - ] - } - } - - httpx_mock.add_response( - method="POST", - json=response, - match_headers={"X-Infrahub-Tracker": "query-branch-all"}, - ) - return httpx_mock - - -@pytest.fixture -async def mock_repositories_query_no_pagination(httpx_mock: HTTPXMock) -> HTTPXMock: - response1 = { - "data": { - "repository": [ - { - "__typename": "CoreRepository", - "id": "9486cfce-87db-479d-ad73-07d80ba96a0f", - "name": {"value": "infrahub-demo-edge"}, - "location": {"value": "git@github.com:dgarros/infrahub-demo-edge.git"}, - "commit": {"value": "aaaaaaaaaaaaaaaaaaaa"}, - } - ] - } - } - response2 = { - "data": { - "repository": [ - { - "__typename": "CoreRepository", - "id": "9486cfce-87db-479d-ad73-07d80ba96a0f", - "name": {"value": "infrahub-demo-edge"}, - "location": {"value": "git@github.com:dgarros/infrahub-demo-edge.git"}, - "commit": {"value": "bbbbbbbbbbbbbbbbbbbb"}, - } - ] - } - } - - httpx_mock.add_response(method="POST", url="http://mock/graphql/main", json=response1) - httpx_mock.add_response(method="POST", url="http://mock/graphql/cr1234", json=response2) - return httpx_mock - - -@pytest.fixture -async def mock_query_repository_all_01_no_pagination( - httpx_mock: HTTPXMock, client: InfrahubClient, mock_schema_query_01 -) -> HTTPXMock: - response = { - "data": { - "repository": [ - { - "__typename": "CoreRepository", - "id": "9486cfce-87db-479d-ad73-07d80ba96a0f", - "name": {"value": "infrahub-demo-edge"}, - "location": {"value": "git@github.com:opsmill/infrahub-demo-edge.git"}, - "commit": {"value": "aaaaaaaaaaaaaaaaaaaa"}, - }, - { - "__typename": "CoreRepository", - "id": "bfae43e8-5ebb-456c-a946-bf64e930710a", - "name": {"value": "infrahub-demo-core"}, - "location": {"value": "git@github.com:opsmill/infrahub-demo-core.git"}, - "commit": {"value": "bbbbbbbbbbbbbbbbbbbb"}, - }, - ] - } - } - - httpx_mock.add_response( - method="POST", - json=response, - match_headers={"X-Infrahub-Tracker": "query-repository-all"}, - ) - return httpx_mock - - -@pytest.fixture -async def mock_repositories_query(httpx_mock: HTTPXMock) -> HTTPXMock: - response1 = { - "data": { - "CoreGenericRepository": { - "count": 2, - "edges": [ - { - "node": { - "__typename": "CoreRepository", - "id": "9486cfce-87db-479d-ad73-07d80ba96a0f", - "name": {"value": "infrahub-demo-edge"}, - "location": {"value": "git@github.com:dgarros/infrahub-demo-edge.git"}, - "commit": {"value": "aaaaaaaaaaaaaaaaaaaa"}, - "internal_status": {"value": "active"}, - } - }, - { - "node": { - "__typename": "CoreReadOnlyRepository", - "id": "aeff0feb-6a49-406e-b395-de7b7856026d", - "name": {"value": "infrahub-demo-edge-read-only"}, - "location": {"value": "git@github.com:dgarros/infrahub-demo-edge-read-only.git"}, - "commit": {"value": "cccccccccccccccccccc"}, - "internal_status": {"value": "active"}, - } - }, - ], - } - } - } - response2 = { - "data": { - "CoreGenericRepository": { - "count": 1, - "edges": [ - { - "node": { - "__typename": "CoreRepository", - "id": "9486cfce-87db-479d-ad73-07d80ba96a0f", - "name": {"value": "infrahub-demo-edge"}, - "location": {"value": "git@github.com:dgarros/infrahub-demo-edge.git"}, - "commit": {"value": "bbbbbbbbbbbbbbbbbbbb"}, - "internal_status": {"value": "active"}, - } - }, - { - "node": { - "__typename": "CoreReadOnlyRepository", - "id": "aeff0feb-6a49-406e-b395-de7b7856026d", - "name": {"value": "infrahub-demo-edge-read-only"}, - "location": {"value": "git@github.com:dgarros/infrahub-demo-edge-read-only.git"}, - "commit": {"value": "dddddddddddddddddddd"}, - "internal_status": {"value": "active"}, - } - }, - ], - } - } - } - - httpx_mock.add_response(method="POST", url="http://mock/graphql/main", json=response1) - httpx_mock.add_response(method="POST", url="http://mock/graphql/cr1234", json=response2) - return httpx_mock - - -@pytest.fixture -async def mock_query_repository_page1_1( - httpx_mock: HTTPXMock, client: InfrahubClient, mock_schema_query_01 -) -> HTTPXMock: - response = { - "data": { - "CoreRepository": { - "count": 2, - "edges": [ - { - "node": { - "__typename": "CoreRepository", - "id": "9486cfce-87db-479d-ad73-07d80ba96a0f", - "name": {"value": "infrahub-demo-edge"}, - "location": {"value": "git@github.com:opsmill/infrahub-demo-edge.git"}, - "commit": {"value": "aaaaaaaaaaaaaaaaaaaa"}, - }, - }, - { - "node": { - "__typename": "CoreRepository", - "id": "bfae43e8-5ebb-456c-a946-bf64e930710a", - "name": {"value": "infrahub-demo-core"}, - "location": {"value": "git@github.com:opsmill/infrahub-demo-core.git"}, - "commit": {"value": "bbbbbbbbbbbbbbbbbbbb"}, - } - }, - ], - } - } - } - - httpx_mock.add_response( - method="POST", - json=response, - match_headers={"X-Infrahub-Tracker": "query-corerepository-page1"}, - ) - return httpx_mock - - -@pytest.fixture -async def mock_query_corenode_page1_1(httpx_mock: HTTPXMock, client: InfrahubClient, mock_schema_query_02) -> HTTPXMock: - response = { - "data": { - "CoreNode": { - "count": 2, - "edges": [ - { - "node": { - "__typename": "BuiltinTag", - "id": "179068dd-210a-8278-7532-18f23abfdd04", - "display_label": "RED", - } - }, - { - "node": { - "__typename": "BuiltinLocation", - "id": "179068dd-21e7-f5e0-7531-18f2477f33dc", - "display_label": "MyLocation", - } - }, - ], - } - } - } - - httpx_mock.add_response( - method="POST", - json=response, - match_headers={"X-Infrahub-Tracker": "query-corenode-page1"}, - ) - return httpx_mock - - -@pytest.fixture -async def mock_query_repository_page1_empty( - httpx_mock: HTTPXMock, client: InfrahubClient, mock_schema_query_01 -) -> HTTPXMock: - response: dict = {"data": {"CoreRepository": {"edges": []}}} - - httpx_mock.add_response( - method="POST", - json=response, - match_headers={"X-Infrahub-Tracker": "query-corerepository-page1"}, - ) - return httpx_mock - - -@pytest.fixture -async def mock_query_repository_page1_2( - httpx_mock: HTTPXMock, client: InfrahubClient, mock_schema_query_01 -) -> HTTPXMock: - response = { - "data": { - "CoreRepository": { - "count": 5, - "edges": [ - { - "node": { - "__typename": "CoreRepository", - "id": "9486cfce-87db-479d-ad73-07d80ba96a0f", - "name": {"value": "infrahub-demo-edge"}, - "location": {"value": "git@github.com:opsmill/infrahub-demo-edge.git"}, - "commit": {"value": "aaaaaaaaaaaaaaaaaaaa"}, - }, - }, - { - "node": { - "__typename": "CoreRepository", - "id": "bfae43e8-5ebb-456c-a946-bf64e930710a", - "name": {"value": "infrahub-demo-core"}, - "location": {"value": "git@github.com:opsmill/infrahub-demo-core.git"}, - "commit": {"value": "bbbbbbbbbbbbbbbbbbbb"}, - } - }, - { - "node": { - "__typename": "CoreRepository", - "id": "cccccccc-5ebb-456c-a946-bf64e930710a", - "name": {"value": "infrahub-demo-core"}, - "location": {"value": "git@github.com:opsmill/infrahub-demo-core.git"}, - "commit": {"value": "ccccccccccccccccccccccccccccccc"}, - } - }, - ], - } - } - } - - httpx_mock.add_response( - method="POST", - json=response, - match_headers={"X-Infrahub-Tracker": "query-corerepository-page1"}, - ) - return httpx_mock - - -@pytest.fixture -async def mock_query_repository_page2_2( - httpx_mock: HTTPXMock, client: InfrahubClient, mock_schema_query_01 -) -> HTTPXMock: - response = { - "data": { - "CoreRepository": { - "count": 5, - "edges": [ - { - "node": { - "__typename": "CoreRepository", - "id": "dddddddd-87db-479d-ad73-07d80ba96a0f", - "name": {"value": "infrahub-demo-edge"}, - "location": {"value": "git@github.com:opsmill/infrahub-demo-edge.git"}, - "commit": {"value": "dddddddddddddddddddd"}, - }, - }, - { - "node": { - "__typename": "CoreRepository", - "id": "eeeeeeee-5ebb-456c-a946-bf64e930710a", - "name": {"value": "infrahub-demo-core"}, - "location": {"value": "git@github.com:opsmill/infrahub-demo-core.git"}, - "commit": {"value": "eeeeeeeeeeeeeeeeeeeeee"}, - } - }, - ], - } - } - } - - httpx_mock.add_response( - method="POST", - json=response, - match_headers={"X-Infrahub-Tracker": "query-corerepository-page2"}, - ) - return httpx_mock - - -@pytest.fixture -async def mock_schema_query_01(httpx_mock: HTTPXMock) -> HTTPXMock: - response_text = (get_fixtures_dir() / "schema_01.json").read_text(encoding="UTF-8") - - httpx_mock.add_response( - method="GET", - url="http://mock/api/schema?branch=main", - json=ujson.loads(response_text), - ) - return httpx_mock - - -@pytest.fixture -async def mock_schema_query_02(httpx_mock: HTTPXMock) -> HTTPXMock: - response_text = (get_fixtures_dir() / "schema_02.json").read_text(encoding="UTF-8") - httpx_mock.add_response( - method="GET", url=re.compile(r"^http://mock/api/schema\?branch=(main|cr1234)"), json=ujson.loads(response_text) - ) - return httpx_mock - - -@pytest.fixture -async def mock_rest_api_artifact_definition_generate(httpx_mock: HTTPXMock) -> HTTPXMock: - httpx_mock.add_response(method="POST", url=re.compile(r"^http://mock/api/artifact/generate/.*")) - return httpx_mock - - -@pytest.fixture -async def mock_rest_api_artifact_fetch(httpx_mock: HTTPXMock) -> HTTPXMock: - schema_response = (get_fixtures_dir() / "schema_03.json").read_text(encoding="UTF-8") - - httpx_mock.add_response( - method="GET", - url="http://mock/api/schema?branch=main", - json=ujson.loads(schema_response), - ) - - graphql_response = { - "data": { - "CoreArtifact": { - "edges": [ - { - "id": "1799fd71-488b-84e8-3378-c5181c5ee9af", - "display_label": "Startup Config for Edge devices", - "__typename": "CoreArtifact", - "name": { - "value": "Startup Config for Edge devices", - "is_visible": True, - "is_protected": False, - "source": None, - "owner": None, - }, - "status": { - "value": "Ready", - "is_visible": True, - "is_protected": False, - "source": None, - "owner": None, - }, - "content_type": { - "value": "text/plain", - "is_visible": True, - "is_protected": False, - "source": None, - "owner": None, - }, - "checksum": { - "value": "58d949c1a1c0fcd06e79bc032be8373a", - "is_visible": True, - "is_protected": False, - "source": None, - "owner": None, - }, - "storage_id": { - "value": "1799fd71-950c-5a85-3041-c515082800ff", - "is_visible": True, - "is_protected": False, - "source": None, - "owner": None, - }, - "parameters": { - "value": None, - "is_visible": True, - "is_protected": False, - "source": None, - "owner": None, - }, - "object": { - "node": { - "id": "1799f647-203c-cd41-3409-c51d55097213", - "display_label": "atl1-edge1", - "__typename": "InfraDevice", - }, - "properties": {"is_visible": True, "is_protected": None, "source": None, "owner": None}, - }, - "definition": { - "node": { - "id": "1799fd6e-cc5d-219f-3371-c514ed70bf23", - "display_label": "Startup Config for Edge devices", - "__typename": "CoreArtifactDefinition", - }, - "properties": {"is_visible": True, "is_protected": None, "source": None, "owner": None}, - }, - }, - ] - } - } - } - - httpx_mock.add_response(method="POST", url="http://mock/graphql/main", json=graphql_response) - - artifact_content = """!device startup config -ip name-server 1.1.1.1 -""" - - httpx_mock.add_response(method="GET", url=re.compile(r"^http://mock/api/storage/object/.*"), text=artifact_content) - return httpx_mock - - -@pytest.fixture -async def mock_rest_api_artifact_generate(httpx_mock: HTTPXMock) -> HTTPXMock: - schema_response = (get_fixtures_dir() / "schema_04.json").read_text(encoding="UTF-8") - - httpx_mock.add_response( - method="GET", - url="http://mock/api/schema?branch=main", - json=ujson.loads(schema_response), - ) - - artifact_graphql_response = { - "data": { - "CoreArtifact": { - "edges": [ - { - "id": "1799fd71-488b-84e8-3378-c5181c5ee9af", - "display_label": "Startup Config for Edge devices", - "__typename": "CoreArtifact", - "name": { - "value": "Startup Config for Edge devices", - "is_visible": True, - "is_protected": False, - "source": None, - "owner": None, - }, - "status": { - "value": "Ready", - "is_visible": True, - "is_protected": False, - "source": None, - "owner": None, - }, - "content_type": { - "value": "text/plain", - "is_visible": True, - "is_protected": False, - "source": None, - "owner": None, - }, - "checksum": { - "value": "58d949c1a1c0fcd06e79bc032be8373a", - "is_visible": True, - "is_protected": False, - "source": None, - "owner": None, - }, - "storage_id": { - "value": "1799fd71-950c-5a85-3041-c515082800ff", - "is_visible": True, - "is_protected": False, - "source": None, - "owner": None, - }, - "parameters": { - "value": None, - "is_visible": True, - "is_protected": False, - "source": None, - "owner": None, - }, - "object": { - "node": { - "id": "1799f647-203c-cd41-3409-c51d55097213", - "display_label": "atl1-edge1", - "__typename": "InfraDevice", - }, - "properties": {"is_visible": True, "is_protected": None, "source": None, "owner": None}, - }, - "definition": { - "node": { - "id": "1799fd6e-cc5d-219f-3371-c514ed70bf23", - "display_label": "Startup Config for Edge devices", - "__typename": "CoreArtifactDefinition", - }, - "properties": {"is_visible": True, "is_protected": None, "source": None, "owner": None}, - }, - }, - ] - }, - } - } - httpx_mock.add_response(method="POST", url="http://mock/graphql/main", json=artifact_graphql_response) - - artifact_definition_graphql_response = { - "data": { - "CoreArtifactDefinition": { - "count": 1, - "edges": [ - { - "node": { - "id": "1799fd6e-cc5d-219f-3371-c514ed70bf23", - "display_label": "Startup Config for Edge devices", - "__typename": "CoreArtifactDefinition", - "name": { - "value": "Startup Config for Edge devices", - "is_visible": True, - "is_protected": True, - "source": { - "id": "1799fd6b-f0a9-9d23-304d-c51b05d142c5", - "display_label": "infrahub-demo-edge", - "__typename": "CoreRepository", - }, - "owner": None, - }, - "artifact_name": { - "value": "startup-config", - "is_visible": True, - "is_protected": True, - "source": { - "id": "1799fd6b-f0a9-9d23-304d-c51b05d142c5", - "display_label": "infrahub-demo-edge", - "__typename": "CoreRepository", - }, - "owner": None, - }, - "description": { - "value": None, - "is_visible": True, - "is_protected": False, - "source": None, - "owner": None, - }, - "parameters": { - "value": {"device": "name__value"}, - "is_visible": True, - "is_protected": True, - "source": { - "id": "1799fd6b-f0a9-9d23-304d-c51b05d142c5", - "display_label": "infrahub-demo-edge", - "__typename": "CoreRepository", - }, - "owner": None, - }, - "content_type": { - "value": "text/plain", - "is_visible": True, - "is_protected": True, - "source": { - "id": "1799fd6b-f0a9-9d23-304d-c51b05d142c5", - "display_label": "infrahub-demo-edge", - "__typename": "CoreRepository", - }, - "owner": None, - }, - "targets": { - "node": { - "id": "1799f645-e03b-0bae-3400-c51c3f21895c", - "display_label": "edge_router", - "__typename": "CoreStandardGroup", - }, - "properties": { - "is_visible": True, - "is_protected": True, - "source": { - "id": "1799fd6b-f0a9-9d23-304d-c51b05d142c5", - "display_label": "infrahub-demo-edge", - "__typename": "CoreRepository", - }, - "owner": None, - }, - }, - "transformation": { - "node": { - "id": "1799fd6e-791b-c12c-337d-c51ec00bba63", - "display_label": "device_startup", - "__typename": "CoreRFile", - }, - "properties": { - "is_visible": True, - "is_protected": True, - "source": { - "id": "1799fd6b-f0a9-9d23-304d-c51b05d142c5", - "display_label": "infrahub-demo-edge", - "__typename": "CoreRepository", - }, - "owner": None, - }, - }, - } - } - ], - } - } - } - httpx_mock.add_response(method="POST", url="http://mock/graphql/main", json=artifact_definition_graphql_response) - httpx_mock.add_response(method="POST", url=re.compile(r"^http://mock/api/artifact/generate/.*")) - - -@pytest.fixture -async def mock_query_mutation_schema_dropdown_add(httpx_mock: HTTPXMock) -> None: - response = {"data": {"SchemaDropdownAdd": {"ok": True}}} - httpx_mock.add_response(method="POST", url="http://mock/graphql", json=response) - - -@pytest.fixture -async def mock_query_mutation_schema_dropdown_remove(httpx_mock: HTTPXMock) -> None: - response = {"data": {"SchemaDropdownRemove": {"ok": True}}} - httpx_mock.add_response(method="POST", url="http://mock/graphql", json=response) - - -@pytest.fixture -async def mock_query_mutation_schema_enum_add(httpx_mock: HTTPXMock) -> None: - response = {"data": {"SchemaEnumAdd": {"ok": True}}} - httpx_mock.add_response(method="POST", url="http://mock/graphql", json=response) - - -@pytest.fixture -async def mock_query_mutation_schema_enum_remove(httpx_mock: HTTPXMock) -> None: - response = {"data": {"SchemaEnumRemove": {"ok": True}}} - httpx_mock.add_response(method="POST", url="http://mock/graphql", json=response) - - -@pytest.fixture -async def mock_query_mutation_location_create_failed(httpx_mock: HTTPXMock) -> HTTPXMock: - response1 = { - "data": {"BuiltinLocationCreate": {"ok": True, "object": {"id": "17aec828-9814-ce00-3f20-1a053670f1c8"}}} - } - response2 = { - "data": {"BuiltinLocationCreate": None}, - "errors": [ - { - "message": "An object already exist with this value: name: JFK1 at name", - "locations": [{"line": 2, "column": 5}], - "path": ["BuiltinLocationCreate"], - } - ], - } - url_regex = re.compile(r"http://mock/graphql/main") - httpx_mock.add_response(method="POST", url=url_regex, json=response1) - httpx_mock.add_response(method="POST", url=url_regex, json=response2) - return httpx_mock - - -@pytest.fixture -def query_01() -> str: - """Simple query with one document""" - query = """ - query { - TestPerson { - edges { - node { - name { - value - } - cars { - edges { - node { - name { - value - } - } - } - } - } - } - } - } - """ - return query - - -@pytest.fixture -def query_02() -> str: - query = """ - query { - TestPerson { - edges { - node { - name { - value - } - - cars { - edges { - node { - name { - value - } - ... on TestElectricCar { - nbr_engine { - value - } - member_of_groups { - edges { - node { - id - } - } - } - } - ... on TestGazCar { - mpg { - value - is_protected - } - } - } - } - } - } - } - } - } - """ - return query - - -@pytest.fixture -def query_03() -> str: - """Advanced Query with 2 documents""" - query = """ - query FirstQuery { - TestPerson { - edges { - node { - name { - value - } - cars { - edges { - node { - name { - value - } - } - } - } - } - } - } - } - mutation FirstMutation { - TestPersonCreate( - data: { - name: { value: "person1"} - } - ){ - ok - object { - id - } - } - } - """ - return query - - -@pytest.fixture -def query_04() -> str: - """Simple query with variables""" - query = """ - query ($person: String!){ - TestPerson(name__value: $person) { - edges { - node { - name { - value - } - } - } - } - } - """ - return query - - -@pytest.fixture -def query_05() -> str: - query = """ - query MyQuery { - CoreRepository { - edges { - node { - name { - value - } - tags { - edges { - node { - id - } - } - } - } - } - } - } - mutation MyMutation($myvar: String) { - CoreRepositoryCreate (data: { - name: { value: $myvar}, - location: { value: "location1"}, - }) { - ok - } - } - """ - - return query - - -@pytest.fixture -def query_06() -> str: - """Simple query with variables""" - query = """ - query ( - $str1: String, - $str2: String = "default2", - $str3: String! - $int1: Int, - $int2: Int = 12, - $int3: Int! - $bool1: Boolean, - $bool2: Boolean = true, - $bool3: Boolean! - ){ - TestPerson(name__value: $person) { - edges { - node { - name { - value - } - } - } - } - } - """ - return query - - -@pytest.fixture -def bad_query_01() -> str: - query = """ - query { - TestPerson { - edges { - node { - name { - value - } - cars { - edges { - node { - name { - value - } - } - } - } - } - } - """ - return query - - -@pytest.fixture -def query_introspection() -> str: - query = """ - query IntrospectionQuery { - __schema { - queryType { - name - } - mutationType { - name - } - subscriptionType { - name - } - types { - ...FullType - } - directives { - name - description - locations - args { - ...InputValue - } - } - } - } - - fragment FullType on __Type { - kind - name - description - fields(includeDeprecated: true) { - name - description - args { - ...InputValue - } - type { - ...TypeRef - } - isDeprecated - deprecationReason - } - inputFields { - ...InputValue - } - interfaces { - ...TypeRef - } - enumValues(includeDeprecated: true) { - name - description - isDeprecated - deprecationReason - } - possibleTypes { - ...TypeRef - } - } - - fragment InputValue on __InputValue { - name - description - type { - ...TypeRef - } - defaultValue - } - - fragment TypeRef on __Type { - kind - name - ofType { - kind - name - ofType { - kind - name - ofType { - kind - name - ofType { - kind - name - ofType { - kind - name - ofType { - kind - name - ofType { - kind - name - } - } - } - } - } - } - } - } - """ - return query - - -@pytest.fixture -async def mock_schema_query_ipam(httpx_mock: HTTPXMock) -> HTTPXMock: - response_text = (get_fixtures_dir() / "schema_ipam.json").read_text(encoding="UTF-8") - - httpx_mock.add_response(method="GET", url="http://mock/api/schema?branch=main", json=ujson.loads(response_text)) - return httpx_mock diff --git a/python_sdk/tests/unit/sdk/test_artifact.py b/python_sdk/tests/unit/sdk/test_artifact.py deleted file mode 100644 index 56d16f8e2f..0000000000 --- a/python_sdk/tests/unit/sdk/test_artifact.py +++ /dev/null @@ -1,90 +0,0 @@ -import pytest - -from infrahub_sdk.exceptions import FeatureNotSupportedError -from infrahub_sdk.node import InfrahubNode, InfrahubNodeSync - -client_types = ["standard", "sync"] - - -@pytest.mark.parametrize("client_type", client_types) -async def test_node_artifact_generate_raise_featurenotsupported(client, client_type, location_schema, location_data01): - # node does not inherit from CoreArtifactTarget - if client_type == "standard": - node = InfrahubNode(client=client, schema=location_schema, data=location_data01) - with pytest.raises(FeatureNotSupportedError): - await node.artifact_generate("artifact_definition") - else: - node = InfrahubNodeSync(client=client, schema=location_schema, data=location_data01) - with pytest.raises(FeatureNotSupportedError): - node.artifact_generate("artifact_definition") - - -@pytest.mark.parametrize("client_type", client_types) -async def test_node_artifact_fetch_raise_featurenotsupported(client, client_type, location_schema, location_data01): - # node does not inherit from CoreArtifactTarget - if client_type == "standard": - node = InfrahubNode(client=client, schema=location_schema, data=location_data01) - with pytest.raises(FeatureNotSupportedError): - await node.artifact_fetch("artifact_definition") - else: - node = InfrahubNodeSync(client=client, schema=location_schema, data=location_data01) - with pytest.raises(FeatureNotSupportedError): - node.artifact_fetch("artifact_definition") - - -@pytest.mark.parametrize("client_type", client_types) -async def test_node_generate_raise_featurenotsupported(client, client_type, location_schema, location_data01): - # node not of kind CoreArtifactDefinition - if client_type == "standard": - node = InfrahubNode(client=client, schema=location_schema, data=location_data01) - with pytest.raises(FeatureNotSupportedError): - await node.generate("artifact_definition") - else: - node = InfrahubNodeSync(client=client, schema=location_schema, data=location_data01) - with pytest.raises(FeatureNotSupportedError): - node.generate("artifact_definition") - - -@pytest.mark.parametrize("client_type", client_types) -async def test_node_artifact_definition_generate( - clients, - client_type, - mock_rest_api_artifact_definition_generate, - artifact_definition_schema, - artifact_definition_data, -): - if client_type == "standard": - node = InfrahubNode(client=clients.standard, schema=artifact_definition_schema, data=artifact_definition_data) - await node.generate() - else: - node = InfrahubNodeSync(client=clients.sync, schema=artifact_definition_schema, data=artifact_definition_data) - node.generate() - - -@pytest.mark.parametrize("client_type", client_types) -async def test_node_artifact_fetch(clients, client_type, mock_rest_api_artifact_fetch, device_schema, device_data): - if client_type == "standard": - node = InfrahubNode(client=clients.standard, schema=device_schema, data=device_data) - artifact_content = await node.artifact_fetch("startup-config") - else: - node = InfrahubNodeSync(client=clients.sync, schema=device_schema, data=device_data) - artifact_content = node.artifact_fetch("startup-config") - - assert ( - artifact_content - == """!device startup config -ip name-server 1.1.1.1 -""" - ) - - -@pytest.mark.parametrize("client_type", client_types) -async def test_node_artifact_generate( - clients, client_type, mock_rest_api_artifact_generate, device_schema, device_data -): - if client_type == "standard": - node = InfrahubNode(client=clients.standard, schema=device_schema, data=device_data) - await node.artifact_generate("startup-config") - else: - node = InfrahubNodeSync(client=clients.sync, schema=device_schema, data=device_data) - node.artifact_generate("startup-config") diff --git a/python_sdk/tests/unit/sdk/test_batch.py b/python_sdk/tests/unit/sdk/test_batch.py deleted file mode 100644 index a6cba3a5af..0000000000 --- a/python_sdk/tests/unit/sdk/test_batch.py +++ /dev/null @@ -1,50 +0,0 @@ -import pytest -from pytest_httpx import HTTPXMock - -from infrahub_sdk.exceptions import GraphQLError - - -async def test_batch_return_exception( - httpx_mock: HTTPXMock, mock_query_mutation_location_create_failed, mock_schema_query_01, clients -): # pylint: disable=unused-argument - batch = await clients.standard.create_batch(return_exceptions=True) - locations = ["JFK1", "JFK1"] - results = [] - for location_name in locations: - data = {"name": {"value": location_name, "is_protected": True}} - obj = await clients.standard.create(kind="BuiltinLocation", data=data) - batch.add(task=obj.save, node=obj) - results.append(obj) - - result_iter = batch.execute() - # Assert first node success - node, result = await result_iter.__anext__() - assert node == results[0] - assert not isinstance(result, Exception) - - # Assert second node failure - node, result = await result_iter.__anext__() - assert node == results[1] - assert isinstance(result, GraphQLError) - assert "An error occurred while executing the GraphQL Query" in str(result) - - -async def test_batch_exception( - httpx_mock: HTTPXMock, mock_query_mutation_location_create_failed, mock_schema_query_01, clients -): # pylint: disable=unused-argument - batch = await clients.standard.create_batch(return_exceptions=False) - locations = ["JFK1", "JFK1"] - for location_name in locations: - data = {"name": {"value": location_name, "is_protected": True}} - obj = await clients.standard.create(kind="BuiltinLocation", data=data) - batch.add(task=obj.save, node=obj) - - with pytest.raises(GraphQLError) as exc: - async for node, result in batch.execute(): - pass - assert "An error occurred while executing the GraphQL Query" in str(exc.value) - - -async def test_batch_not_implemented_sync(clients): - with pytest.raises(NotImplementedError): - clients.sync.create_batch() diff --git a/python_sdk/tests/unit/sdk/test_branch.py b/python_sdk/tests/unit/sdk/test_branch.py deleted file mode 100644 index df08a5f99e..0000000000 --- a/python_sdk/tests/unit/sdk/test_branch.py +++ /dev/null @@ -1,41 +0,0 @@ -import inspect - -import pytest - -from infrahub_sdk.branch import ( - BranchData, - InfrahubBranchManager, - InfrahubBranchManagerSync, -) - -async_branch_methods = [method for method in dir(InfrahubBranchManager) if not method.startswith("_")] -sync_branch_methods = [method for method in dir(InfrahubBranchManagerSync) if not method.startswith("_")] - -client_types = ["standard", "sync"] - - -def test_method_sanity(): - """Validate that there is at least one public method and that both clients look the same.""" - assert async_branch_methods - assert async_branch_methods == sync_branch_methods - - -@pytest.mark.parametrize("method", async_branch_methods) -def test_validate_method_signature(method): - async_method = getattr(InfrahubBranchManager, method) - sync_method = getattr(InfrahubBranchManagerSync, method) - async_sig = inspect.signature(async_method) - sync_sig = inspect.signature(sync_method) - assert async_sig.parameters == sync_sig.parameters - assert async_sig.return_annotation == sync_sig.return_annotation - - -@pytest.mark.parametrize("client_type", client_types) -async def test_get_branches(clients, mock_branches_list_query, client_type): # pylint: disable=unused-argument - if client_type == "standard": - branches = await clients.standard.branch.all() - else: - branches = clients.sync.branch.all() - - assert len(branches) == 2 - assert isinstance(branches["main"], BranchData) diff --git a/python_sdk/tests/unit/sdk/test_client.py b/python_sdk/tests/unit/sdk/test_client.py deleted file mode 100644 index 50dc0b34c0..0000000000 --- a/python_sdk/tests/unit/sdk/test_client.py +++ /dev/null @@ -1,672 +0,0 @@ -import inspect - -import pytest -from pytest_httpx import HTTPXMock - -from infrahub_sdk import InfrahubClient, InfrahubClientSync -from infrahub_sdk.exceptions import FilterNotFoundError, NodeNotFoundError -from infrahub_sdk.node import InfrahubNode, InfrahubNodeSync - -async_client_methods = [method for method in dir(InfrahubClient) if not method.startswith("_")] -sync_client_methods = [method for method in dir(InfrahubClientSync) if not method.startswith("_")] - -client_types = ["standard", "sync"] - - -async def test_method_sanity(): - """Validate that there is at least one public method and that both clients look the same.""" - assert async_client_methods - assert async_client_methods == sync_client_methods - - -@pytest.mark.parametrize("method", async_client_methods) -async def test_validate_method_signature( - method, - replace_async_return_annotation, - replace_sync_return_annotation, - replace_async_parameter_annotations, - replace_sync_parameter_annotations, -): - async_method = getattr(InfrahubClient, method) - sync_method = getattr(InfrahubClientSync, method) - async_sig = inspect.signature(async_method) - sync_sig = inspect.signature(sync_method) - - assert replace_async_parameter_annotations(async_sig.parameters) == replace_async_parameter_annotations( - sync_sig.parameters - ) - assert replace_sync_parameter_annotations(async_sig.parameters) == replace_sync_parameter_annotations( - sync_sig.parameters - ) - assert async_sig.return_annotation == replace_sync_return_annotation(sync_sig.return_annotation) - assert replace_async_return_annotation(async_sig.return_annotation) == sync_sig.return_annotation - - -def test_init_with_invalid_address(): - with pytest.raises(ValueError) as exc: - InfrahubClient(address="missing-schema") - - assert "The configured address is not a valid url" in str(exc.value) - - -async def test_get_repositories( - client: InfrahubClient, mock_branches_list_query, mock_schema_query_02, mock_repositories_query -): # pylint: disable=unused-argument - repos = await client.get_list_repositories() - - assert len(repos) == 2 - assert repos["infrahub-demo-edge"].repository.get_kind() == "CoreRepository" - assert repos["infrahub-demo-edge"].repository.id == "9486cfce-87db-479d-ad73-07d80ba96a0f" - assert repos["infrahub-demo-edge"].branches == {"cr1234": "bbbbbbbbbbbbbbbbbbbb", "main": "aaaaaaaaaaaaaaaaaaaa"} - assert repos["infrahub-demo-edge-read-only"].repository.get_kind() == "CoreReadOnlyRepository" - assert repos["infrahub-demo-edge-read-only"].repository.id == "aeff0feb-6a49-406e-b395-de7b7856026d" - assert repos["infrahub-demo-edge-read-only"].branches == { - "cr1234": "dddddddddddddddddddd", - "main": "cccccccccccccccccccc", - } - - -@pytest.mark.parametrize("client_type", client_types) -async def test_method_all_with_limit(clients, mock_query_repository_page1_2, client_type): # pylint: disable=unused-argument - if client_type == "standard": - repos = await clients.standard.all(kind="CoreRepository", limit=3) - assert not clients.standard.store._store["CoreRepository"] - - repos = await clients.standard.all(kind="CoreRepository", populate_store=True, limit=3) - assert len(clients.standard.store._store["CoreRepository"]) == 3 - else: - repos = clients.sync.all(kind="CoreRepository", limit=3) - assert not clients.sync.store._store["CoreRepository"] - - repos = clients.sync.all(kind="CoreRepository", populate_store=True, limit=3) - assert len(clients.sync.store._store["CoreRepository"]) == 3 - - assert len(repos) == 3 - - -@pytest.mark.parametrize("client_type", client_types) -async def test_method_all_multiple_pages( - clients, mock_query_repository_page1_2, mock_query_repository_page2_2, client_type -): # pylint: disable=unused-argument - if client_type == "standard": - repos = await clients.standard.all(kind="CoreRepository") - assert not clients.standard.store._store["CoreRepository"] - - repos = await clients.standard.all(kind="CoreRepository", populate_store=True) - assert len(clients.standard.store._store["CoreRepository"]) == 5 - else: - repos = clients.sync.all(kind="CoreRepository") - assert not clients.sync.store._store["CoreRepository"] - - repos = clients.sync.all(kind="CoreRepository", populate_store=True) - assert len(clients.sync.store._store["CoreRepository"]) == 5 - - assert len(repos) == 5 - - -@pytest.mark.parametrize("client_type", client_types) -async def test_method_all_single_page(clients, mock_query_repository_page1_1, client_type): # pylint: disable=unused-argument - if client_type == "standard": - repos = await clients.standard.all(kind="CoreRepository") - assert not clients.standard.store._store["CoreRepository"] - - repos = await clients.standard.all(kind="CoreRepository", populate_store=True) - assert len(clients.standard.store._store["CoreRepository"]) == 2 - else: - repos = clients.sync.all(kind="CoreRepository") - assert not clients.sync.store._store["CoreRepository"] - - repos = clients.sync.all(kind="CoreRepository", populate_store=True) - assert len(clients.sync.store._store["CoreRepository"]) == 2 - - assert len(repos) == 2 - - -@pytest.mark.parametrize("client_type", client_types) -async def test_method_all_generic(clients, mock_query_corenode_page1_1, client_type): # pylint: disable=unused-argument - if client_type == "standard": - nodes = await clients.standard.all(kind="CoreNode") - else: - nodes = clients.sync.all(kind="CoreNode") - - assert len(nodes) == 2 - assert nodes[0].typename == "BuiltinTag" - assert nodes[1].typename == "BuiltinLocation" - - -@pytest.mark.parametrize("client_type", client_types) -async def test_method_get_by_id(httpx_mock: HTTPXMock, clients, mock_schema_query_01, client_type): # pylint: disable=unused-argument - response = { - "data": { - "CoreRepository": { - "edges": [ - { - "node": { - "__typename": "CoreRepository", - "id": "bfae43e8-5ebb-456c-a946-bf64e930710a", - "name": {"value": "infrahub-demo-core"}, - "location": {"value": "git@github.com:opsmill/infrahub-demo-core.git"}, - "commit": {"value": "bbbbbbbbbbbbbbbbbbbb"}, - } - } - ] - } - } - } - - response_id = "bfae43e8-5ebb-456c-a946-bf64e930710a" - httpx_mock.add_response( - method="POST", - json=response, - match_headers={"X-Infrahub-Tracker": "query-corerepository-page1"}, - ) - - if client_type == "standard": - repo = await clients.standard.get(kind="CoreRepository", id=response_id) - assert isinstance(repo, InfrahubNode) - with pytest.raises(NodeNotFoundError): - assert clients.standard.store.get(key=response_id) - - repo = await clients.standard.get(kind="CoreRepository", id=response_id, populate_store=True) - assert clients.standard.store.get(key=response_id) - else: - repo = clients.sync.get(kind="CoreRepository", id=response_id) - assert isinstance(repo, InfrahubNodeSync) - with pytest.raises(NodeNotFoundError): - assert clients.sync.store.get(key=response_id) - - repo = clients.sync.get(kind="CoreRepository", id=response_id, populate_store=True) - assert clients.sync.store.get(key=response_id) - - -@pytest.mark.parametrize("client_type", client_types) -async def test_method_get_by_hfid(httpx_mock: HTTPXMock, clients, mock_schema_query_01, client_type): # pylint: disable=unused-argument - response = { - "data": { - "CoreRepository": { - "edges": [ - { - "node": { - "__typename": "CoreRepository", - "id": "bfae43e8-5ebb-456c-a946-bf64e930710a", - "hfid": ["infrahub-demo-core"], - "name": {"value": "infrahub-demo-core"}, - "location": {"value": "git@github.com:opsmill/infrahub-demo-core.git"}, - "commit": {"value": "bbbbbbbbbbbbbbbbbbbb"}, - } - } - ] - } - } - } - - response_id = "bfae43e8-5ebb-456c-a946-bf64e930710a" - httpx_mock.add_response( - method="POST", - json=response, - match_headers={"X-Infrahub-Tracker": "query-corerepository-page1"}, - ) - - if client_type == "standard": - repo = await clients.standard.get(kind="CoreRepository", hfid=["infrahub-demo-core"]) - assert isinstance(repo, InfrahubNode) - with pytest.raises(NodeNotFoundError): - assert clients.standard.store.get(key=response_id) - - repo = await clients.standard.get(kind="CoreRepository", hfid=["infrahub-demo-core"], populate_store=True) - assert clients.standard.store.get(key=response_id) - else: - repo = clients.sync.get(kind="CoreRepository", hfid=["infrahub-demo-core"]) - assert isinstance(repo, InfrahubNodeSync) - with pytest.raises(NodeNotFoundError): - assert clients.sync.store.get(key="infrahub-demo-core") - - repo = clients.sync.get(kind="CoreRepository", hfid=["infrahub-demo-core"], populate_store=True) - assert clients.sync.store.get(key=response_id) - - -@pytest.mark.parametrize("client_type", client_types) -async def test_method_get_by_default_filter(httpx_mock: HTTPXMock, clients, mock_schema_query_01, client_type): # pylint: disable=unused-argument - response = { - "data": { - "CoreRepository": { - "edges": [ - { - "node": { - "__typename": "CoreRepository", - "id": "bfae43e8-5ebb-456c-a946-bf64e930710a", - "name": {"value": "infrahub-demo-core"}, - "location": {"value": "git@github.com:opsmill/infrahub-demo-core.git"}, - "commit": {"value": "bbbbbbbbbbbbbbbbbbbb"}, - } - } - ] - } - } - } - - response_id = "bfae43e8-5ebb-456c-a946-bf64e930710a" - httpx_mock.add_response( - method="POST", - json=response, - match_headers={"X-Infrahub-Tracker": "query-corerepository-page1"}, - ) - - if client_type == "standard": - repo = await clients.standard.get(kind="CoreRepository", id="infrahub-demo-core") - assert isinstance(repo, InfrahubNode) - with pytest.raises(NodeNotFoundError): - assert clients.standard.store.get(key=response_id) - - repo = await clients.standard.get(kind="CoreRepository", id="infrahub-demo-core", populate_store=True) - assert clients.standard.store.get(key=response_id) - else: - repo = clients.sync.get(kind="CoreRepository", id="infrahub-demo-core") - assert isinstance(repo, InfrahubNodeSync) - with pytest.raises(NodeNotFoundError): - assert clients.sync.store.get(key="infrahub-demo-core") - - repo = clients.sync.get(kind="CoreRepository", id="infrahub-demo-core", populate_store=True) - assert clients.sync.store.get(key=response_id) - - -@pytest.mark.parametrize("client_type", client_types) -async def test_method_get_by_name(httpx_mock: HTTPXMock, clients, mock_schema_query_01, client_type): # pylint: disable=unused-argument - response = { - "data": { - "CoreRepository": { - "edges": [ - { - "node": { - "__typename": "CoreRepository", - "id": "bfae43e8-5ebb-456c-a946-bf64e930710a", - "name": {"value": "infrahub-demo-core"}, - "location": {"value": "git@github.com:opsmill/infrahub-demo-core.git"}, - "commit": {"value": "bbbbbbbbbbbbbbbbbbbb"}, - } - } - ] - } - } - } - - httpx_mock.add_response( - method="POST", - json=response, - match_headers={"X-Infrahub-Tracker": "query-corerepository-page1"}, - ) - - if client_type == "standard": - repo = await clients.standard.get(kind="CoreRepository", name__value="infrahub-demo-core") - assert isinstance(repo, InfrahubNode) - else: - repo = clients.sync.get(kind="CoreRepository", name__value="infrahub-demo-core") - assert isinstance(repo, InfrahubNodeSync) - assert repo.id == "bfae43e8-5ebb-456c-a946-bf64e930710a" - - -@pytest.mark.parametrize("client_type", client_types) -async def test_method_get_not_found(httpx_mock: HTTPXMock, clients, mock_query_repository_page1_empty, client_type): # pylint: disable=unused-argument - with pytest.raises(NodeNotFoundError): - if client_type == "standard": - await clients.standard.get(kind="CoreRepository", name__value="infrahub-demo-core") - else: - clients.sync.get(kind="CoreRepository", name__value="infrahub-demo-core") - - -@pytest.mark.parametrize("client_type", client_types) -async def test_method_get_found_many( - httpx_mock: HTTPXMock, - clients, - mock_schema_query_01, - mock_query_repository_page1_1, - client_type, -): # pylint: disable=unused-argument - with pytest.raises(IndexError): - if client_type == "standard": - await clients.standard.get(kind="CoreRepository", id="bfae43e8-5ebb-456c-a946-bf64e930710a") - else: - clients.sync.get(kind="CoreRepository", id="bfae43e8-5ebb-456c-a946-bf64e930710a") - - -@pytest.mark.parametrize("client_type", client_types) -async def test_method_get_invalid_filter(httpx_mock: HTTPXMock, clients, mock_schema_query_01, client_type): # pylint: disable=unused-argument - with pytest.raises(FilterNotFoundError) as excinfo: - if client_type == "standard": - await clients.standard.get(kind="CoreRepository", name__name="infrahub-demo-core") - else: - clients.sync.get(kind="CoreRepository", name__name="infrahub-demo-core") - assert isinstance(excinfo.value.message, str) - assert "'name__name' is not a valid filter for 'CoreRepository'" in excinfo.value.message - assert "default_branch__value" in excinfo.value.message - assert "default_branch__value" in excinfo.value.filters - - -@pytest.mark.parametrize("client_type", client_types) -async def test_method_filters_many(httpx_mock: HTTPXMock, clients, mock_query_repository_page1_1, client_type): # pylint: disable=unused-argument - if client_type == "standard": - repos = await clients.standard.filters( - kind="CoreRepository", - ids=[ - "bfae43e8-5ebb-456c-a946-bf64e930710a", - "9486cfce-87db-479d-ad73-07d80ba96a0f", - ], - ) - assert len(repos) == 2 - assert not clients.standard.store._store["CoreRepository"] - - repos = await clients.standard.filters( - kind="CoreRepository", - ids=[ - "bfae43e8-5ebb-456c-a946-bf64e930710a", - "9486cfce-87db-479d-ad73-07d80ba96a0f", - ], - populate_store=True, - ) - assert len(clients.standard.store._store["CoreRepository"]) == 2 - assert len(repos) == 2 - else: - repos = clients.sync.filters( - kind="CoreRepository", - ids=[ - "bfae43e8-5ebb-456c-a946-bf64e930710a", - "9486cfce-87db-479d-ad73-07d80ba96a0f", - ], - ) - assert len(repos) == 2 - assert not clients.sync.store._store["CoreRepository"] - - repos = clients.sync.filters( - kind="CoreRepository", - ids=[ - "bfae43e8-5ebb-456c-a946-bf64e930710a", - "9486cfce-87db-479d-ad73-07d80ba96a0f", - ], - populate_store=True, - ) - assert len(clients.sync.store._store["CoreRepository"]) == 2 - assert len(repos) == 2 - - -@pytest.mark.parametrize("client_type", client_types) -async def test_method_filters_empty(httpx_mock: HTTPXMock, clients, mock_query_repository_page1_empty, client_type): # pylint: disable=unused-argument - if client_type == "standard": - repos = await clients.standard.filters( - kind="CoreRepository", - ids=[ - "bfae43e8-5ebb-456c-a946-bf64e930710a", - "9486cfce-87db-479d-ad73-07d80ba96a0f", - ], - ) - else: - repos = clients.sync.filters( - kind="CoreRepository", - ids=[ - "bfae43e8-5ebb-456c-a946-bf64e930710a", - "9486cfce-87db-479d-ad73-07d80ba96a0f", - ], - ) - assert len(repos) == 0 - - -@pytest.mark.parametrize("client_type", client_types) -async def test_allocate_next_ip_address( - httpx_mock: HTTPXMock, - mock_schema_query_ipam: HTTPXMock, - clients, - ipaddress_pool_schema, - ipam_ipprefix_schema, - ipam_ipprefix_data, - client_type, -): - httpx_mock.add_response( - method="POST", - json={ - "data": { - "IPAddressPoolGetResource": { - "ok": True, - "node": { - "id": "17da1246-54f1-a9c0-2784-179f0ec5b128", - "kind": "IpamIPAddress", - "identifier": "test", - "display_label": "192.0.2.0/32", - }, - } - } - }, - match_headers={"X-Infrahub-Tracker": "allocate-ip-loopback"}, - ) - httpx_mock.add_response( - method="POST", - json={ - "data": { - "IpamIPAddress": { - "count": 1, - "edges": [ - { - "node": { - "id": "17d9bd8d-8fc2-70b0-278a-179f425e25cb", - "__typename": "IpamIPAddress", - "address": {"value": "192.0.2.0/32"}, - "description": {"value": "test"}, - } - } - ], - } - } - }, - match_headers={"X-Infrahub-Tracker": "query-ipamipaddress-page1"}, - ) - - if client_type == "standard": - ip_prefix = InfrahubNode(client=clients.standard, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data) - ip_pool = InfrahubNode( - client=clients.standard, - schema=ipaddress_pool_schema, - data={ - "id": "pppppppp-pppp-pppp-pppp-pppppppppppp", - "name": "Core loopbacks", - "default_address_type": "IpamIPAddress", - "default_prefix_length": 32, - "ip_namespace": "ip_namespace", - "resources": [ip_prefix], - }, - ) - ip_address = await clients.standard.allocate_next_ip_address( - resource_pool=ip_pool, - identifier="test", - prefix_length=32, - address_type="IpamIPAddress", - data={"description": "test"}, - tracker="allocate-ip-loopback", - ) - else: - ip_prefix = InfrahubNodeSync(client=clients.sync, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data) - ip_pool = InfrahubNodeSync( - client=clients.sync, - schema=ipaddress_pool_schema, - data={ - "id": "pppppppp-pppp-pppp-pppp-pppppppppppp", - "name": "Core loopbacks", - "default_address_type": "IpamIPAddress", - "default_prefix_length": 32, - "ip_namespace": "ip_namespace", - "resources": [ip_prefix], - }, - ) - ip_address = clients.sync.allocate_next_ip_address( - resource_pool=ip_pool, - identifier="test", - prefix_length=32, - address_type="IpamIPAddress", - data={"description": "test"}, - tracker="allocate-ip-loopback", - ) - - assert ip_address - assert str(ip_address.address.value) == "192.0.2.0/32" - assert ip_address.description.value == "test" - - -@pytest.mark.parametrize("client_type", client_types) -async def test_allocate_next_ip_prefix( - httpx_mock: HTTPXMock, - mock_schema_query_ipam: HTTPXMock, - clients, - ipprefix_pool_schema, - ipam_ipprefix_schema, - ipam_ipprefix_data, - client_type, -): - httpx_mock.add_response( - method="POST", - json={ - "data": { - "IPPrefixPoolGetResource": { - "ok": True, - "node": { - "id": "7d9bd8d-8fc2-70b0-278a-179f425e25cb", - "kind": "IpamIPPrefix", - "identifier": "test", - "display_label": "192.0.2.0/31", - }, - } - } - }, - match_headers={"X-Infrahub-Tracker": "allocate-ip-interco"}, - ) - httpx_mock.add_response( - method="POST", - json={ - "data": { - "IpamIPPrefix": { - "count": 1, - "edges": [ - { - "node": { - "id": "17d9bd8d-8fc2-70b0-278a-179f425e25cb", - "__typename": "IpamIPPrefix", - "prefix": {"value": "192.0.2.0/31"}, - "description": {"value": "test"}, - } - } - ], - } - } - }, - match_headers={"X-Infrahub-Tracker": "query-ipamipprefix-page1"}, - ) - - if client_type == "standard": - ip_prefix = InfrahubNode(client=clients.standard, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data) - ip_pool = InfrahubNode( - client=clients.standard, - schema=ipprefix_pool_schema, - data={ - "id": "pppppppp-pppp-pppp-pppp-pppppppppppp", - "name": "Core intercos", - "default_prefix_type": "IpamIPPrefix", - "default_prefix_length": 31, - "ip_namespace": "ip_namespace", - "resources": [ip_prefix], - }, - ) - ip_prefix = await clients.standard.allocate_next_ip_prefix( - resource_pool=ip_pool, - identifier="test", - prefix_length=31, - prefix_type="IpamIPPrefix", - data={"description": "test"}, - tracker="allocate-ip-interco", - ) - else: - ip_prefix = InfrahubNodeSync(client=clients.sync, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data) - ip_pool = InfrahubNodeSync( - client=clients.sync, - schema=ipprefix_pool_schema, - data={ - "id": "pppppppp-pppp-pppp-pppp-pppppppppppp", - "name": "Core intercos", - "default_prefix_type": "IpamIPPrefix", - "default_prefix_length": 31, - "ip_namespace": "ip_namespace", - "resources": [ip_prefix], - }, - ) - ip_prefix = clients.sync.allocate_next_ip_prefix( - resource_pool=ip_pool, - identifier="test", - prefix_length=31, - prefix_type="IpamIPPrefix", - data={"description": "test"}, - tracker="allocate-ip-interco", - ) - - assert ip_prefix - assert str(ip_prefix.prefix.value) == "192.0.2.0/31" - assert ip_prefix.description.value == "test" - - -EXPECTED_ECHO = """URL: http://mock/graphql -QUERY: - - query GetTags($name: String!) { - BuiltinTag(name__value: $name) { - edges { - node { - id - display_label - } - } - } - } - -VARIABLES: -{ - "name": "red" -} - -""" - - -@pytest.mark.parametrize("client_type", client_types) -async def test_query_echo(httpx_mock: HTTPXMock, echo_clients, client_type): # pylint: disable=unused-argument - httpx_mock.add_response( - method="POST", - json={"data": {"BuiltinTag": {"edges": []}}}, - ) - - query = """ - query GetTags($name: String!) { - BuiltinTag(name__value: $name) { - edges { - node { - id - display_label - } - } - } - } -""" - - variables = {"name": "red"} - - if client_type == "standard": - response = await echo_clients.standard.execute_graphql(query=query, variables=variables) - else: - response = echo_clients.sync.execute_graphql(query=query, variables=variables) - - assert response == {"BuiltinTag": {"edges": []}} - assert echo_clients.stdout.getvalue().splitlines() == EXPECTED_ECHO.splitlines() - - -@pytest.mark.parametrize("client_type", client_types) -async def test_clone(clients, client_type): - if client_type == "standard": - clone = clients.standard.clone() - assert clone.config == clients.standard.config - assert isinstance(clone, InfrahubClient) - else: - clone = clients.sync.clone() - assert clone.config == clients.sync.config - assert isinstance(clone, InfrahubClientSync) diff --git a/python_sdk/tests/unit/sdk/test_config.py b/python_sdk/tests/unit/sdk/test_config.py deleted file mode 100644 index bccc7147e0..0000000000 --- a/python_sdk/tests/unit/sdk/test_config.py +++ /dev/null @@ -1,41 +0,0 @@ -import pytest -from pydantic import ValidationError - -from infrahub_sdk.config import Config - - -def test_combine_authentications(): - with pytest.raises(ValidationError) as exc: - Config(api_token="testing", username="test", password="testpassword") - - assert "Unable to combine password with token based authentication" in str(exc.value) - - -def test_missing_password(): - with pytest.raises(ValidationError) as exc: - Config(username="test") - - assert "Both 'username' and 'password' needs to be set" in str(exc.value) - - -def test_password_authentication(): - config = Config(username="test", password="test-password") - assert config.password_authentication - - -def test_not_password_authentication(): - config = Config() - assert not config.password_authentication - - -def test_config_address(): - address = "http://localhost:8000" - - config = Config(address=address + "/") - assert config.address == address - - config = Config(address=address + "//") - assert config.address == address - - config = Config(address=address) - assert config.address == address diff --git a/python_sdk/tests/unit/sdk/test_graphql.py b/python_sdk/tests/unit/sdk/test_graphql.py deleted file mode 100644 index b9e88b055c..0000000000 --- a/python_sdk/tests/unit/sdk/test_graphql.py +++ /dev/null @@ -1,472 +0,0 @@ -import pytest - -from infrahub_sdk.graphql import Mutation, Query, render_input_block, render_query_block - -# pylint: disable=redefined-outer-name - - -@pytest.fixture -def query_data_no_filter(): - data = { - "device": { - "name": {"value": None}, - "description": {"value": None}, - "interfaces": {"name": {"value": None}}, - } - } - - return data - - -@pytest.fixture -def query_data_alias(): - data = { - "device": { - "name": {"@alias": "new_name", "value": None}, - "description": {"value": {"@alias": "myvalue"}}, - "interfaces": {"@alias": "myinterfaces", "name": {"value": None}}, - } - } - - return data - - -@pytest.fixture -def query_data_fragment(): - data = { - "device": { - "name": {"value": None}, - "...on Builtin": { - "description": {"value": None}, - "interfaces": {"name": {"value": None}}, - }, - } - } - - return data - - -@pytest.fixture -def query_data_empty_filter(): - data = { - "device": { - "@filters": {}, - "name": {"value": None}, - "description": {"value": None}, - "interfaces": {"name": {"value": None}}, - } - } - - return data - - -@pytest.fixture -def query_data_filters_01(): - data = { - "device": { - "@filters": {"name__value": "$name"}, - "name": {"value": None}, - "description": {"value": None}, - "interfaces": { - "@filters": {"enabled__value": "$enabled"}, - "name": {"value": None}, - }, - } - } - return data - - -@pytest.fixture -def query_data_filters_02(): - data = { - "device": { - "@filters": {"name__value": "myname", "integer__value": 44}, - "name": {"value": None}, - "interfaces": { - "@filters": {"enabled__value": True}, - "name": {"value": None}, - }, - } - } - return data - - -@pytest.fixture -def input_data_01(): - data = { - "data": { - "name": {"value": "$name"}, - "some_number": {"value": 88}, - "some_bool": {"value": True}, - "some_list": {"value": ["value1", 33]}, - "query": {"value": "my_query"}, - } - } - return data - - -def test_render_query_block(query_data_no_filter): - lines = render_query_block(data=query_data_no_filter) - - expected_lines = [ - " device {", - " name {", - " value", - " }", - " description {", - " value", - " }", - " interfaces {", - " name {", - " value", - " }", - " }", - " }", - ] - - assert lines == expected_lines - - # Render the query block with an indentation of 2 - lines = render_query_block(data=query_data_no_filter, offset=2, indentation=2) - - expected_lines = [ - " device {", - " name {", - " value", - " }", - " description {", - " value", - " }", - " interfaces {", - " name {", - " value", - " }", - " }", - " }", - ] - - assert lines == expected_lines - - -def test_render_query_block_alias(query_data_alias): - lines = render_query_block(data=query_data_alias) - - expected_lines = [ - " device {", - " new_name: name {", - " value", - " }", - " description {", - " myvalue: value", - " }", - " myinterfaces: interfaces {", - " name {", - " value", - " }", - " }", - " }", - ] - - assert lines == expected_lines - - -def test_render_query_block_fragment(query_data_fragment): - lines = render_query_block(data=query_data_fragment) - - expected_lines = [ - " device {", - " name {", - " value", - " }", - " ...on Builtin {", - " description {", - " value", - " }", - " interfaces {", - " name {", - " value", - " }", - " }", - " }", - " }", - ] - - assert lines == expected_lines - - -def test_render_input_block(input_data_01): - lines = render_input_block(data=input_data_01) - - expected_lines = [ - " data: {", - " name: {", - " value: $name", - " }", - " some_number: {", - " value: 88", - " }", - " some_bool: {", - " value: true", - " }", - " some_list: {", - " value: [", - ' "value1",', - " 33,", - " ]", - " }", - " query: {", - ' value: "my_query"', - " }", - " }", - ] - assert lines == expected_lines - - # Render the input block with an indentation of 2 - lines = render_input_block(data=input_data_01, offset=2, indentation=2) - - expected_lines = [ - " data: {", - " name: {", - " value: $name", - " }", - " some_number: {", - " value: 88", - " }", - " some_bool: {", - " value: true", - " }", - " some_list: {", - " value: [", - ' "value1",', - " 33,", - " ]", - " }", - " query: {", - ' value: "my_query"', - " }", - " }", - ] - assert lines == expected_lines - - -def test_query_rendering_no_vars(query_data_no_filter): - query = Query(query=query_data_no_filter) - - expected_query = """ -query { - device { - name { - value - } - description { - value - } - interfaces { - name { - value - } - } - } -} -""" - assert query.render_first_line() == "query {" - assert query.render() == expected_query - - -def test_query_rendering_empty_filter(query_data_empty_filter): - query = Query(query=query_data_empty_filter) - - expected_query = """ -query { - device { - name { - value - } - description { - value - } - interfaces { - name { - value - } - } - } -} -""" - assert query.render_first_line() == "query {" - assert query.render() == expected_query - - -def test_query_rendering_with_filters_and_vars(query_data_filters_01): - query = Query(query=query_data_filters_01, variables={"name": str, "enabled": bool}) - - expected_query = """ -query ($name: String!, $enabled: Boolean!) { - device(name__value: $name) { - name { - value - } - description { - value - } - interfaces(enabled__value: $enabled) { - name { - value - } - } - } -} -""" - assert query.render_first_line() == "query ($name: String!, $enabled: Boolean!) {" - assert query.render() == expected_query - - -def test_query_rendering_with_filters(query_data_filters_02): - query = Query(query=query_data_filters_02) - - expected_query = """ -query { - device(name__value: "myname", integer__value: 44) { - name { - value - } - interfaces(enabled__value: true) { - name { - value - } - } - } -} -""" - assert query.render() == expected_query - - -def test_mutation_rendering_no_vars(input_data_01): - query_data = {"ok": None, "object": {"id": None}} - - query = Mutation(mutation="myobject_create", query=query_data, input_data=input_data_01) - - expected_query = """ -mutation { - myobject_create( - data: { - name: { - value: $name - } - some_number: { - value: 88 - } - some_bool: { - value: true - } - some_list: { - value: [ - "value1", - 33, - ] - } - query: { - value: "my_query" - } - } - ){ - ok - object { - id - } - } -} -""" - assert query.render_first_line() == "mutation {" - assert query.render() == expected_query - - -def test_mutation_rendering_many_relationships(): - query_data = {"ok": None, "object": {"id": None}} - input_data = { - "data": { - "description": {"value": "JFK Airport"}, - "name": {"value": "JFK1"}, - "tags": [ - {"id": "b44c6a7d-3b9c-466a-b6e3-a547b0ecc965"}, - {"id": "c5dffab1-e3f1-4039-9a1e-c0df1705d612"}, - ], - } - } - - query = Mutation(mutation="myobject_create", query=query_data, input_data=input_data) - - expected_query = """ -mutation { - myobject_create( - data: { - description: { - value: "JFK Airport" - } - name: { - value: "JFK1" - } - tags: [ - { - id: "b44c6a7d-3b9c-466a-b6e3-a547b0ecc965" - }, - { - id: "c5dffab1-e3f1-4039-9a1e-c0df1705d612" - }, - ] - } - ){ - ok - object { - id - } - } -} -""" - assert query.render_first_line() == "mutation {" - assert query.render() == expected_query - - -def test_mutation_rendering_with_vars(input_data_01): - query_data = {"ok": None, "object": {"id": None}} - variables = {"name": str, "description": str, "number": int} - query = Mutation( - mutation="myobject_create", - query=query_data, - input_data=input_data_01, - variables=variables, - ) - - expected_query = """ -mutation ($name: String!, $description: String!, $number: Int!) { - myobject_create( - data: { - name: { - value: $name - } - some_number: { - value: 88 - } - some_bool: { - value: true - } - some_list: { - value: [ - "value1", - 33, - ] - } - query: { - value: "my_query" - } - } - ){ - ok - object { - id - } - } -} -""" - assert query.render_first_line() == "mutation ($name: String!, $description: String!, $number: Int!) {" - assert query.render() == expected_query diff --git a/python_sdk/tests/unit/sdk/test_group_context.py b/python_sdk/tests/unit/sdk/test_group_context.py deleted file mode 100644 index 684bcc8958..0000000000 --- a/python_sdk/tests/unit/sdk/test_group_context.py +++ /dev/null @@ -1,79 +0,0 @@ -import inspect - -import pytest - -from infrahub_sdk.query_groups import InfrahubGroupContext, InfrahubGroupContextBase, InfrahubGroupContextSync - -async_methods = [method for method in dir(InfrahubGroupContext) if not method.startswith("_")] -sync_methods = [method for method in dir(InfrahubGroupContextSync) if not method.startswith("_")] - -client_types = ["standard", "sync"] - - -async def test_method_sanity(): - """Validate that there is at least one public method and that both clients look the same.""" - assert async_methods - assert async_methods == sync_methods - - -@pytest.mark.parametrize("method", async_methods) -async def test_validate_method_signature(method, replace_sync_return_annotation, replace_async_return_annotation): - async_method = getattr(InfrahubGroupContext, method) - sync_method = getattr(InfrahubGroupContextSync, method) - async_sig = inspect.signature(async_method) - sync_sig = inspect.signature(sync_method) - assert async_sig.parameters == sync_sig.parameters - assert async_sig.return_annotation == replace_sync_return_annotation(sync_sig.return_annotation) - assert replace_async_return_annotation(async_sig.return_annotation) == sync_sig.return_annotation - - -def test_set_properties(): - context = InfrahubGroupContextBase() - context.set_properties(identifier="MYID") - assert context.identifier == "MYID" - - context = InfrahubGroupContextBase() - context.set_properties(identifier="MYID", params={"one": 1, "two": "two"}, delete_unused_nodes=True) - assert context.identifier == "MYID" - assert context.params == {"one": 1, "two": "two"} - assert context.delete_unused_nodes is True - - -def test_get_params_as_str(): - context = InfrahubGroupContextBase() - context.set_properties(identifier="MYID", params={"one": 1, "two": "two"}) - assert context._get_params_as_str() == "one: 1, two: two" - - context = InfrahubGroupContextBase() - context.set_properties(identifier="MYID") - assert not context._get_params_as_str() - - -def test_generate_group_name(): - context = InfrahubGroupContextBase() - context.set_properties(identifier="MYID") - assert context._generate_group_name() == "MYID" - - context = InfrahubGroupContextBase() - context.set_properties(identifier="MYID", params={"one": 1, "two": "two"}) - assert context._generate_group_name() == "MYID-11aaec5206c3dca37cbbcaaabf121550" - - context = InfrahubGroupContextBase() - context.set_properties(identifier="MYID", params={"one": 1, "two": "two"}) - assert context._generate_group_name(suffix="xxx") == "MYID-xxx-11aaec5206c3dca37cbbcaaabf121550" - - -def test_generate_group_description(std_group_schema): - context = InfrahubGroupContextBase() - context.set_properties(identifier="MYID") - assert not context._generate_group_description(schema=std_group_schema) - - context = InfrahubGroupContextBase() - context.set_properties(identifier="MYID", params={"one": 1, "two": "two"}) - assert context._generate_group_description(schema=std_group_schema) == "one: 1, two: two" - - assert std_group_schema.attributes[1].name == "description" - std_group_schema.attributes[1].max_length = 20 - context = InfrahubGroupContextBase() - context.set_properties(identifier="MYID", params={"one": "xxxxxxxxxxx", "two": "yyyyyyyyyyy"}) - assert context._generate_group_description(schema=std_group_schema) == "one: xxxxxxxxxx..." diff --git a/python_sdk/tests/unit/sdk/test_node.py b/python_sdk/tests/unit/sdk/test_node.py deleted file mode 100644 index 5749052d2e..0000000000 --- a/python_sdk/tests/unit/sdk/test_node.py +++ /dev/null @@ -1,1725 +0,0 @@ -import inspect -import ipaddress -from typing import TYPE_CHECKING - -import pytest -from pytest_httpx import HTTPXMock - -from infrahub_sdk.exceptions import NodeNotFoundError -from infrahub_sdk.node import ( - SAFE_VALUE, - InfrahubNode, - InfrahubNodeBase, - InfrahubNodeSync, - RelatedNodeBase, - RelationshipManagerBase, -) - -if TYPE_CHECKING: - from infrahub_sdk.client import InfrahubClient, InfrahubClientSync - from infrahub_sdk.schema import GenericSchema - -# pylint: disable=no-member,too-many-lines -# type: ignore[attr-defined] - -async_node_methods = [ - method for method in dir(InfrahubNode) if not method.startswith("_") and method not in ("hfid", "hfid_str") -] -sync_node_methods = [ - method for method in dir(InfrahubNodeSync) if not method.startswith("_") and method not in ("hfid", "hfid_str") -] - -client_types = ["standard", "sync"] - -SAFE_GRAPHQL_VALUES = [ - pytest.param("", id="allow-empty"), - pytest.param("user1", id="allow-normal"), - pytest.param("User Lastname", id="allow-space"), - pytest.param("020a1c39-6071-4bf8-9336-ffb7a001e665", id="allow-uuid"), - pytest.param("user.lastname", id="allow-dots"), - pytest.param("/opt/repos/backbone-links", id="allow-filepaths"), - pytest.param("https://github.com/opsmill/infrahub-demo-edge", id="allow-urls"), -] - -UNSAFE_GRAPHQL_VALUES = [ - pytest.param('No "quote"', id="disallow-quotes"), - pytest.param("Line \n break", id="disallow-linebreaks"), -] - - -async def test_method_sanity(): - """Validate that there is at least one public method and that both clients look the same.""" - assert async_node_methods - assert async_node_methods == sync_node_methods - - -@pytest.mark.parametrize("value", SAFE_GRAPHQL_VALUES) -def test_validate_graphql_value(value: str) -> None: - """All these values are safe and should not be converted""" - assert SAFE_VALUE.match(value) - - -@pytest.mark.parametrize("value", UNSAFE_GRAPHQL_VALUES) -def test_identify_unsafe_graphql_value(value: str) -> None: - """All these values are safe and should not be converted""" - assert not SAFE_VALUE.match(value) - - -@pytest.mark.parametrize("method", async_node_methods) -async def test_validate_method_signature( - method, - replace_async_parameter_annotations, - replace_sync_parameter_annotations, - replace_async_return_annotation, - replace_sync_return_annotation, -): - EXCLUDE_PARAMETERS = ["client"] - async_method = getattr(InfrahubNode, method) - sync_method = getattr(InfrahubNodeSync, method) - async_sig = inspect.signature(async_method) - sync_sig = inspect.signature(sync_method) - - # Extract names of parameters and exclude some from the comparaison like client - async_params_name = async_sig.parameters.keys() - sync_params_name = sync_sig.parameters.keys() - async_params = {key: value for key, value in async_sig.parameters.items() if key not in EXCLUDE_PARAMETERS} - sync_params = {key: value for key, value in sync_sig.parameters.items() if key not in EXCLUDE_PARAMETERS} - - assert async_params_name == sync_params_name - assert replace_sync_parameter_annotations(async_params) == replace_sync_parameter_annotations(sync_params) - assert replace_async_parameter_annotations(async_params) == replace_async_parameter_annotations(sync_params) - assert replace_sync_return_annotation(async_sig.return_annotation) == replace_sync_return_annotation( - sync_sig.return_annotation - ) - assert replace_async_return_annotation(async_sig.return_annotation) == replace_async_return_annotation( - sync_sig.return_annotation - ) - - -@pytest.mark.parametrize("client_type", client_types) -async def test_init_node_no_data(client, location_schema, client_type): - if client_type == "standard": - node = InfrahubNode(client=client, schema=location_schema) - else: - node = InfrahubNodeSync(client=client, schema=location_schema) - assert sorted(node._attributes) == ["description", "name", "type"] - - assert hasattr(node, "name") - assert hasattr(node, "description") - assert hasattr(node, "type") - - -@pytest.mark.parametrize("client_type", client_types) -async def test_node_hfid(client, schema_with_hfid, client_type): - location_data = {"name": {"value": "JFK1"}, "description": {"value": "JFK Airport"}, "type": {"value": "SITE"}} - if client_type == "standard": - location = InfrahubNode(client=client, schema=schema_with_hfid["location"], data=location_data) - else: - location = InfrahubNodeSync(client=client, schema=schema_with_hfid["location"], data=location_data) - - assert location.hfid == [location.name.value] - assert location.get_human_friendly_id_as_string() == "JFK1" - assert location.hfid_str == "BuiltinLocation__JFK1" - - rack_data = {"facility_id": {"value": "RACK1"}, "location": location} - if client_type == "standard": - rack = InfrahubNode(client=client, schema=schema_with_hfid["rack"], data=rack_data) - else: - rack = InfrahubNodeSync(client=client, schema=schema_with_hfid["rack"], data=rack_data) - - assert rack.hfid == [rack.facility_id.value, rack.location.get().name.value] - assert rack.get_human_friendly_id_as_string() == "RACK1__JFK1" - assert rack.hfid_str == "BuiltinRack__RACK1__JFK1" - - -@pytest.mark.parametrize("client_type", client_types) -async def test_init_node_data_user(client, location_schema, client_type): - data = { - "name": {"value": "JFK1"}, - "description": {"value": "JFK Airport"}, - "type": {"value": "SITE"}, - } - if client_type == "standard": - node = InfrahubNode(client=client, schema=location_schema, data=data) - else: - node = InfrahubNodeSync(client=client, schema=location_schema, data=data) - - assert node.name.value == "JFK1" - assert node.name.is_protected is None - assert node.description.value == "JFK Airport" - assert node.type.value == "SITE" - - -@pytest.mark.parametrize("client_type", client_types) -async def test_init_node_data_user_with_relationships(client, location_schema, client_type): - data = { - "name": {"value": "JFK1"}, - "description": {"value": "JFK Airport"}, - "type": {"value": "SITE"}, - "primary_tag": "pppppppp", - "tags": [{"id": "aaaaaa"}, {"id": "bbbb"}], - } - if client_type == "standard": - node = InfrahubNode(client=client, schema=location_schema, data=data) - else: - node = InfrahubNodeSync(client=client, schema=location_schema, data=data) - - assert node.name.value == "JFK1" - assert node.name.is_protected is None - assert node.description.value == "JFK Airport" - assert node.type.value == "SITE" - - assert isinstance(node.tags, RelationshipManagerBase) - assert len(node.tags.peers) == 2 - assert isinstance(node.tags.peers[0], RelatedNodeBase) - assert isinstance(node.primary_tag, RelatedNodeBase) - assert node.primary_tag.id == "pppppppp" - - -@pytest.mark.parametrize("client_type", client_types) -async def test_init_node_data_graphql(client, location_schema, location_data01, client_type): - if client_type == "standard": - node = InfrahubNode(client=client, schema=location_schema, data=location_data01) - else: - node = InfrahubNodeSync(client=client, schema=location_schema, data=location_data01) - - assert node.name.value == "DFW" - assert node.name.is_protected is True - assert node.description.value is None - assert node.type.value == "SITE" - - assert isinstance(node.tags, RelationshipManagerBase) - assert len(node.tags.peers) == 1 - assert isinstance(node.tags.peers[0], RelatedNodeBase) - assert isinstance(node.primary_tag, RelatedNodeBase) - assert node.primary_tag.id == "rrrrrrrr-rrrr-rrrr-rrrr-rrrrrrrrrrrr" - assert node.primary_tag.typename == "BuiltinTag" - - -@pytest.mark.parametrize("client_type", client_types) -async def test_query_data_no_filters(clients, location_schema, client_type): - if client_type == "standard": - client: InfrahubClient = getattr(clients, client_type) # type: ignore[annotation-unchecked] - node = InfrahubNode(client=client, schema=location_schema) - data = await node.generate_query_data() - else: - client: InfrahubClientSync = getattr(clients, client_type) # type: ignore[annotation-unchecked] - node = InfrahubNodeSync(client=client, schema=location_schema) - data = node.generate_query_data() - - assert data == { - "BuiltinLocation": { - "@filters": {}, - "count": None, - "edges": { - "node": { - "__typename": None, - "id": None, - "hfid": None, - "display_label": None, - "name": { - "is_default": None, - "is_from_profile": None, - "is_protected": None, - "is_visible": None, - "owner": { - "__typename": None, - "display_label": None, - "id": None, - }, - "source": { - "__typename": None, - "display_label": None, - "id": None, - }, - "value": None, - }, - "description": { - "is_default": None, - "is_from_profile": None, - "is_protected": None, - "is_visible": None, - "owner": { - "__typename": None, - "display_label": None, - "id": None, - }, - "source": { - "__typename": None, - "display_label": None, - "id": None, - }, - "value": None, - }, - "type": { - "is_default": None, - "is_from_profile": None, - "is_protected": None, - "is_visible": None, - "owner": { - "__typename": None, - "display_label": None, - "id": None, - }, - "source": { - "__typename": None, - "display_label": None, - "id": None, - }, - "value": None, - }, - "primary_tag": { - "properties": { - "is_protected": None, - "is_visible": None, - "owner": { - "__typename": None, - "display_label": None, - "id": None, - }, - "source": { - "__typename": None, - "display_label": None, - "id": None, - }, - }, - "node": { - "id": None, - "hfid": None, - "display_label": None, - "__typename": None, - }, - }, - }, - }, - }, - } - - -@pytest.mark.parametrize("client_type", client_types) -async def test_query_data_node(clients, location_schema, client_type): - if client_type == "standard": - client: InfrahubClient = getattr(clients, client_type) # type: ignore[annotation-unchecked] - node = InfrahubNode(client=client, schema=location_schema) - data = await node.generate_query_data_node() - else: - client: InfrahubClientSync = getattr(clients, client_type) # type: ignore[annotation-unchecked] - node = InfrahubNodeSync(client=client, schema=location_schema) - data = node.generate_query_data_node() - - assert data == { - "name": { - "is_default": None, - "is_from_profile": None, - "is_protected": None, - "is_visible": None, - "owner": {"__typename": None, "display_label": None, "id": None}, - "source": {"__typename": None, "display_label": None, "id": None}, - "value": None, - }, - "description": { - "is_default": None, - "is_from_profile": None, - "is_protected": None, - "is_visible": None, - "owner": {"__typename": None, "display_label": None, "id": None}, - "source": {"__typename": None, "display_label": None, "id": None}, - "value": None, - }, - "type": { - "is_default": None, - "is_from_profile": None, - "is_protected": None, - "is_visible": None, - "owner": {"__typename": None, "display_label": None, "id": None}, - "source": {"__typename": None, "display_label": None, "id": None}, - "value": None, - }, - "primary_tag": { - "properties": { - "is_protected": None, - "is_visible": None, - "owner": { - "__typename": None, - "display_label": None, - "id": None, - }, - "source": { - "__typename": None, - "display_label": None, - "id": None, - }, - }, - "node": { - "id": None, - "hfid": None, - "display_label": None, - "__typename": None, - }, - }, - } - - -@pytest.mark.parametrize("client_type", client_types) -async def test_query_data_with_prefetch_relationships(clients, mock_schema_query_02, client_type): - if client_type == "standard": - client: InfrahubClient = getattr(clients, client_type) # type: ignore[annotation-unchecked] - location_schema: GenericSchema = await client.schema.get(kind="BuiltinLocation") # type: ignore[annotation-unchecked] - node = InfrahubNode(client=client, schema=location_schema) - data = await node.generate_query_data(prefetch_relationships=True) - else: - client: InfrahubClientSync = getattr(clients, client_type) # type: ignore[annotation-unchecked] - location_schema: GenericSchema = client.schema.get(kind="BuiltinLocation") # type: ignore[annotation-unchecked] - node = InfrahubNodeSync(client=client, schema=location_schema) - data = node.generate_query_data(prefetch_relationships=True) - - assert data == { - "BuiltinLocation": { - "@filters": {}, - "count": None, - "edges": { - "node": { - "__typename": None, - "id": None, - "hfid": None, - "display_label": None, - "name": { - "is_default": None, - "is_from_profile": None, - "is_protected": None, - "is_visible": None, - "owner": { - "__typename": None, - "display_label": None, - "id": None, - }, - "source": { - "__typename": None, - "display_label": None, - "id": None, - }, - "value": None, - }, - "description": { - "is_default": None, - "is_from_profile": None, - "is_protected": None, - "is_visible": None, - "owner": { - "__typename": None, - "display_label": None, - "id": None, - }, - "source": { - "__typename": None, - "display_label": None, - "id": None, - }, - "value": None, - }, - "type": { - "is_default": None, - "is_from_profile": None, - "is_protected": None, - "is_visible": None, - "owner": { - "__typename": None, - "display_label": None, - "id": None, - }, - "source": { - "__typename": None, - "display_label": None, - "id": None, - }, - "value": None, - }, - "primary_tag": { - "properties": { - "is_protected": None, - "is_visible": None, - "owner": { - "__typename": None, - "display_label": None, - "id": None, - }, - "source": { - "__typename": None, - "display_label": None, - "id": None, - }, - }, - "node": { - "id": None, - "hfid": None, - "display_label": None, - "__typename": None, - "description": { - "is_default": None, - "is_from_profile": None, - "is_protected": None, - "is_visible": None, - "owner": { - "__typename": None, - "display_label": None, - "id": None, - }, - "source": { - "__typename": None, - "display_label": None, - "id": None, - }, - "value": None, - }, - "name": { - "is_default": None, - "is_from_profile": None, - "is_protected": None, - "is_visible": None, - "owner": { - "__typename": None, - "display_label": None, - "id": None, - }, - "source": { - "__typename": None, - "display_label": None, - "id": None, - }, - "value": None, - }, - }, - }, - }, - }, - }, - } - - -@pytest.mark.parametrize("client_type", client_types) -async def test_query_data_node_with_prefetch_relationships(clients, mock_schema_query_02, client_type): - if client_type == "standard": - client: InfrahubClient = getattr(clients, client_type) # type: ignore[annotation-unchecked] - location_schema: GenericSchema = await client.schema.get(kind="BuiltinLocation") # type: ignore[annotation-unchecked] - node = InfrahubNode(client=client, schema=location_schema) - data = await node.generate_query_data_node(prefetch_relationships=True) - else: - client: InfrahubClientSync = getattr(clients, client_type) # type: ignore[annotation-unchecked] - location_schema: GenericSchema = client.schema.get(kind="BuiltinLocation") # type: ignore[annotation-unchecked] - node = InfrahubNodeSync(client=client, schema=location_schema) - data = node.generate_query_data_node(prefetch_relationships=True) - - assert data == { - "description": { - "is_default": None, - "is_from_profile": None, - "is_protected": None, - "is_visible": None, - "owner": {"__typename": None, "display_label": None, "id": None}, - "source": {"__typename": None, "display_label": None, "id": None}, - "value": None, - }, - "name": { - "is_default": None, - "is_from_profile": None, - "is_protected": None, - "is_visible": None, - "owner": {"__typename": None, "display_label": None, "id": None}, - "source": {"__typename": None, "display_label": None, "id": None}, - "value": None, - }, - "primary_tag": { - "node": { - "__typename": None, - "description": { - "is_default": None, - "is_from_profile": None, - "is_protected": None, - "is_visible": None, - "owner": {"__typename": None, "display_label": None, "id": None}, - "source": {"__typename": None, "display_label": None, "id": None}, - "value": None, - }, - "display_label": None, - "id": None, - "hfid": None, - "name": { - "is_default": None, - "is_from_profile": None, - "is_protected": None, - "is_visible": None, - "owner": {"__typename": None, "display_label": None, "id": None}, - "source": {"__typename": None, "display_label": None, "id": None}, - "value": None, - }, - }, - "properties": { - "is_protected": None, - "is_visible": None, - "owner": {"__typename": None, "display_label": None, "id": None}, - "source": {"__typename": None, "display_label": None, "id": None}, - }, - }, - "type": { - "is_default": None, - "is_from_profile": None, - "is_protected": None, - "is_visible": None, - "owner": {"__typename": None, "display_label": None, "id": None}, - "source": {"__typename": None, "display_label": None, "id": None}, - "value": None, - }, - } - - -@pytest.mark.parametrize("client_type", client_types) -async def test_query_data_generic(clients, mock_schema_query_02, client_type): # pylint: disable=unused-argument - if client_type == "standard": - client: InfrahubClient = getattr(clients, client_type) # type: ignore[annotation-unchecked] - corenode_schema: GenericSchema = await client.schema.get(kind="CoreNode") # type: ignore[annotation-unchecked] - node = InfrahubNode(client=client, schema=corenode_schema) - data = await node.generate_query_data(fragment=False) - else: - client: InfrahubClientSync = getattr(clients, client_type) # type: ignore[annotation-unchecked] - corenode_schema: GenericSchema = client.schema.get(kind="CoreNode") # type: ignore[annotation-unchecked] - node = InfrahubNodeSync(client=client, schema=corenode_schema) - data = node.generate_query_data(fragment=False) - - assert data == { - "CoreNode": { - "@filters": {}, - "count": None, - "edges": { - "node": { - "__typename": None, - "id": None, - "hfid": None, - "display_label": None, - }, - }, - }, - } - - -@pytest.mark.parametrize("client_type", client_types) -async def test_query_data_generic_fragment(clients, mock_schema_query_02, client_type): # pylint: disable=unused-argument - if client_type == "standard": - client: InfrahubClient = getattr(clients, client_type) # type: ignore[annotation-unchecked] - corenode_schema: GenericSchema = await client.schema.get(kind="CoreNode") # type: ignore[annotation-unchecked] - node = InfrahubNode(client=client, schema=corenode_schema) - data = await node.generate_query_data(fragment=True) - else: - client: InfrahubClientSync = getattr(clients, client_type) # type: ignore[annotation-unchecked] - corenode_schema: GenericSchema = client.schema.get(kind="CoreNode") # type: ignore[annotation-unchecked] - node = InfrahubNodeSync(client=client, schema=corenode_schema) - data = node.generate_query_data(fragment=True) - - assert data == { - "CoreNode": { - "@filters": {}, - "count": None, - "edges": { - "node": { - "__typename": None, - "...on BuiltinLocation": { - "description": { - "@alias": "__alias__BuiltinLocation__description", - "is_default": None, - "is_from_profile": None, - "is_protected": None, - "is_visible": None, - "owner": { - "__typename": None, - "display_label": None, - "id": None, - }, - "source": { - "__typename": None, - "display_label": None, - "id": None, - }, - "value": None, - }, - "name": { - "@alias": "__alias__BuiltinLocation__name", - "is_default": None, - "is_from_profile": None, - "is_protected": None, - "is_visible": None, - "owner": { - "__typename": None, - "display_label": None, - "id": None, - }, - "source": { - "__typename": None, - "display_label": None, - "id": None, - }, - "value": None, - }, - "primary_tag": { - "@alias": "__alias__BuiltinLocation__primary_tag", - "node": { - "__typename": None, - "display_label": None, - "id": None, - "hfid": None, - }, - "properties": { - "is_protected": None, - "is_visible": None, - "owner": { - "__typename": None, - "display_label": None, - "id": None, - }, - "source": { - "__typename": None, - "display_label": None, - "id": None, - }, - }, - }, - "type": { - "@alias": "__alias__BuiltinLocation__type", - "is_default": None, - "is_from_profile": None, - "is_protected": None, - "is_visible": None, - "owner": { - "__typename": None, - "display_label": None, - "id": None, - }, - "source": { - "__typename": None, - "display_label": None, - "id": None, - }, - "value": None, - }, - }, - "...on BuiltinTag": { - "description": { - "@alias": "__alias__BuiltinTag__description", - "is_default": None, - "is_from_profile": None, - "is_protected": None, - "is_visible": None, - "owner": { - "__typename": None, - "display_label": None, - "id": None, - }, - "source": { - "__typename": None, - "display_label": None, - "id": None, - }, - "value": None, - }, - "name": { - "@alias": "__alias__BuiltinTag__name", - "is_default": None, - "is_from_profile": None, - "is_protected": None, - "is_visible": None, - "owner": { - "__typename": None, - "display_label": None, - "id": None, - }, - "source": { - "__typename": None, - "display_label": None, - "id": None, - }, - "value": None, - }, - }, - "display_label": None, - "id": None, - "hfid": None, - }, - }, - }, - } - - -@pytest.mark.parametrize("client_type", client_types) -async def test_query_data_include(client, location_schema, client_type): - if client_type == "standard": - node = InfrahubNode(client=client, schema=location_schema) - data = await node.generate_query_data(include=["tags"]) - else: - node = InfrahubNodeSync(client=client, schema=location_schema) - data = node.generate_query_data(include=["tags"]) - - assert data == { - "BuiltinLocation": { - "@filters": {}, - "count": None, - "edges": { - "node": { - "__typename": None, - "id": None, - "hfid": None, - "display_label": None, - "name": { - "is_default": None, - "is_from_profile": None, - "is_protected": None, - "is_visible": None, - "owner": { - "__typename": None, - "display_label": None, - "id": None, - }, - "source": { - "__typename": None, - "display_label": None, - "id": None, - }, - "value": None, - }, - "description": { - "is_default": None, - "is_from_profile": None, - "is_protected": None, - "is_visible": None, - "owner": { - "__typename": None, - "display_label": None, - "id": None, - }, - "source": { - "__typename": None, - "display_label": None, - "id": None, - }, - "value": None, - }, - "type": { - "is_default": None, - "is_from_profile": None, - "is_protected": None, - "is_visible": None, - "owner": { - "__typename": None, - "display_label": None, - "id": None, - }, - "source": { - "__typename": None, - "display_label": None, - "id": None, - }, - "value": None, - }, - "primary_tag": { - "properties": { - "is_protected": None, - "is_visible": None, - "owner": { - "__typename": None, - "display_label": None, - "id": None, - }, - "source": { - "__typename": None, - "display_label": None, - "id": None, - }, - }, - "node": { - "id": None, - "hfid": None, - "display_label": None, - "__typename": None, - }, - }, - "tags": { - "count": None, - "edges": { - "properties": { - "is_protected": None, - "is_visible": None, - "owner": { - "__typename": None, - "display_label": None, - "id": None, - }, - "source": { - "__typename": None, - "display_label": None, - "id": None, - }, - }, - "node": { - "id": None, - "display_label": None, - "__typename": None, - }, - }, - }, - }, - }, - }, - } - - -@pytest.mark.parametrize("client_type", client_types) -async def test_query_data_exclude(client, location_schema, client_type): - if client_type == "standard": - node = InfrahubNode(client=client, schema=location_schema) - data = await node.generate_query_data(exclude=["description", "primary_tag"]) - else: - node = InfrahubNodeSync(client=client, schema=location_schema) - data = node.generate_query_data(exclude=["description", "primary_tag"]) - - assert data == { - "BuiltinLocation": { - "@filters": {}, - "count": None, - "edges": { - "node": { - "__typename": None, - "id": None, - "hfid": None, - "display_label": None, - "name": { - "is_default": None, - "is_from_profile": None, - "is_protected": None, - "is_visible": None, - "owner": { - "__typename": None, - "display_label": None, - "id": None, - }, - "source": { - "__typename": None, - "display_label": None, - "id": None, - }, - "value": None, - }, - "type": { - "is_default": None, - "is_from_profile": None, - "is_protected": None, - "is_visible": None, - "owner": { - "__typename": None, - "display_label": None, - "id": None, - }, - "source": { - "__typename": None, - "display_label": None, - "id": None, - }, - "value": None, - }, - }, - }, - }, - } - - -@pytest.mark.parametrize("client_type", client_types) -async def test_create_input_data(client, location_schema, client_type): - data = {"name": {"value": "JFK1"}, "description": {"value": "JFK Airport"}, "type": {"value": "SITE"}} - - if client_type == "standard": - node = InfrahubNode(client=client, schema=location_schema, data=data) - else: - node = InfrahubNodeSync(client=client, schema=location_schema, data=data) - - assert node._generate_input_data()["data"] == { - "data": {"name": {"value": "JFK1"}, "description": {"value": "JFK Airport"}, "type": {"value": "SITE"}} - } - - -@pytest.mark.parametrize("client_type", client_types) -async def test_create_input_data__with_relationships_02(client, location_schema, client_type): - """Validate input data with variables that needs replacements""" - data = { - "name": {"value": "JFK1"}, - "description": {"value": "JFK\n Airport"}, - "type": {"value": "SITE"}, - "primary_tag": "pppppppp", - "tags": [{"id": "aaaaaa"}, {"id": "bbbb"}], - } - - if client_type == "standard": - node = InfrahubNode(client=client, schema=location_schema, data=data) - else: - node = InfrahubNodeSync(client=client, schema=location_schema, data=data) - - input_data = node._generate_input_data() - assert len(input_data["variables"].keys()) == 1 - key = list(input_data["variables"].keys())[0] - value = input_data["variables"][key] - - expected = { - "data": { - "name": {"value": "JFK1"}, - "description": {"value": f"${key}"}, - "type": {"value": "SITE"}, - "tags": [{"id": "aaaaaa"}, {"id": "bbbb"}], - "primary_tag": {"id": "pppppppp"}, - } - } - assert input_data["data"] == expected - assert value == "JFK\n Airport" - - -@pytest.mark.parametrize("client_type", client_types) -async def test_create_input_data__with_relationships_01(client, location_schema, client_type): - data = { - "name": {"value": "JFK1"}, - "description": {"value": "JFK Airport"}, - "type": {"value": "SITE"}, - "primary_tag": "pppppppp", - "tags": [{"id": "aaaaaa"}, {"id": "bbbb"}], - } - - if client_type == "standard": - node = InfrahubNode(client=client, schema=location_schema, data=data) - else: - node = InfrahubNodeSync(client=client, schema=location_schema, data=data) - - assert node._generate_input_data()["data"] == { - "data": { - "name": {"value": "JFK1"}, - "description": {"value": "JFK Airport"}, - "type": {"value": "SITE"}, - "tags": [{"id": "aaaaaa"}, {"id": "bbbb"}], - "primary_tag": {"id": "pppppppp"}, - } - } - - -@pytest.mark.parametrize("client_type", client_types) -async def test_create_input_data_with_relationships_02(clients, rfile_schema, client_type): - data = { - "name": {"value": "rfile01", "is_protected": True, "source": "ffffffff", "owner": "ffffffff"}, - "template_path": {"value": "mytemplate.j2"}, - "query": {"id": "qqqqqqqq", "source": "ffffffff", "owner": "ffffffff"}, - "repository": {"id": "rrrrrrrr", "source": "ffffffff", "owner": "ffffffff"}, - "tags": [{"id": "t1t1t1t1"}, "t2t2t2t2"], - } - - if client_type == "standard": - node = InfrahubNode(client=clients.standard, schema=rfile_schema, data=data) - else: - node = InfrahubNodeSync(client=clients.sync, schema=rfile_schema, data=data) - - assert node._generate_input_data()["data"] == { - "data": { - "name": { - "is_protected": True, - "owner": "ffffffff", - "source": "ffffffff", - "value": "rfile01", - }, - "query": { - "_relation__owner": "ffffffff", - "_relation__source": "ffffffff", - "id": "qqqqqqqq", - }, - "tags": [{"id": "t1t1t1t1"}, {"id": "t2t2t2t2"}], - "template_path": {"value": "mytemplate.j2"}, - "repository": { - "_relation__owner": "ffffffff", - "_relation__source": "ffffffff", - "id": "rrrrrrrr", - }, - } - } - - -@pytest.mark.parametrize("client_type", client_types) -async def test_create_input_data_with_relationships_03(clients, rfile_schema, client_type): - data = { - "name": {"value": "rfile01", "is_protected": True, "source": "ffffffff"}, - "template_path": {"value": "mytemplate.j2"}, - "query": {"id": "qqqqqqqq", "source": "ffffffff", "owner": "ffffffff", "is_protected": True}, - "repository": {"id": "rrrrrrrr", "source": "ffffffff", "owner": "ffffffff"}, - "tags": [{"id": "t1t1t1t1"}, "t2t2t2t2"], - } - - if client_type == "standard": - node = InfrahubNode(client=clients.standard, schema=rfile_schema, data=data) - else: - node = InfrahubNodeSync(client=clients.sync, schema=rfile_schema, data=data) - - assert node._generate_input_data()["data"] == { - "data": { - "name": {"is_protected": True, "source": "ffffffff", "value": "rfile01"}, - "query": { - "_relation__is_protected": True, - "_relation__owner": "ffffffff", - "_relation__source": "ffffffff", - "id": "qqqqqqqq", - }, - "tags": [{"id": "t1t1t1t1"}, {"id": "t2t2t2t2"}], - "template_path": {"value": "mytemplate.j2"}, - "repository": {"_relation__owner": "ffffffff", "_relation__source": "ffffffff", "id": "rrrrrrrr"}, - } - } - - -@pytest.mark.parametrize("client_type", client_types) -async def test_create_input_data_with_relationships_03_for_update_include_unmodified( - clients, rfile_schema, client_type -): - data = { - "name": {"value": "rfile01", "is_protected": True, "source": "ffffffff"}, - "template_path": {"value": "mytemplate.j2"}, - "query": {"id": "qqqqqqqq", "source": "ffffffff", "owner": "ffffffff", "is_protected": True}, - "repository": {"id": "rrrrrrrr", "source": "ffffffff", "owner": "ffffffff"}, - "tags": [{"id": "t1t1t1t1"}, "t2t2t2t2"], - } - - if client_type == "standard": - node = InfrahubNode(client=clients.standard, schema=rfile_schema, data=data) - else: - node = InfrahubNodeSync(client=clients.sync, schema=rfile_schema, data=data) - - node.template_path.value = "my-changed-template.j2" - assert node._generate_input_data(exclude_unmodified=False)["data"] == { - "data": { - "name": { - "is_protected": True, - "source": "ffffffff", - "value": "rfile01", - }, - "query": { - "id": "qqqqqqqq", - "_relation__is_protected": True, - "_relation__owner": "ffffffff", - "_relation__source": "ffffffff", - }, - "tags": [{"id": "t1t1t1t1"}, {"id": "t2t2t2t2"}], - "template_path": {"value": "my-changed-template.j2"}, - "repository": {"id": "rrrrrrrr", "_relation__owner": "ffffffff", "_relation__source": "ffffffff"}, - } - } - - -@pytest.mark.parametrize("client_type", client_types) -async def test_create_input_data_with_relationships_03_for_update_exclude_unmodified( - clients, rfile_schema, client_type -): - data = { - "name": {"value": "rfile01", "is_protected": True, "source": "ffffffff"}, - "template_path": {"value": "mytemplate.j2"}, - "query": {"id": "qqqqqqqq", "source": "ffffffff", "owner": "ffffffff", "is_protected": True}, - "repository": {"id": "rrrrrrrr", "source": "ffffffff", "owner": "ffffffff"}, - "tags": [{"id": "t1t1t1t1"}, "t2t2t2t2"], - } - - if client_type == "standard": - node = InfrahubNode(client=clients.standard, schema=rfile_schema, data=data) - else: - node = InfrahubNodeSync(client=clients.sync, schema=rfile_schema, data=data) - - node.template_path.value = "my-changed-template.j2" - assert node._generate_input_data(exclude_unmodified=True)["data"] == { - "data": { - "query": { - "id": "qqqqqqqq", - "_relation__is_protected": True, - "_relation__owner": "ffffffff", - "_relation__source": "ffffffff", - }, - "template_path": {"value": "my-changed-template.j2"}, - "repository": {"id": "rrrrrrrr", "_relation__owner": "ffffffff", "_relation__source": "ffffffff"}, - } - } - - -@pytest.mark.parametrize("client_type", client_types) -async def test_create_input_data_with_IPHost_attribute(client, ipaddress_schema, client_type): - data = {"address": {"value": ipaddress.ip_interface("1.1.1.1/24"), "is_protected": True}} - - if client_type == "standard": - ip_address = InfrahubNode(client=client, schema=ipaddress_schema, data=data) - else: - ip_address = InfrahubNodeSync(client=client, schema=ipaddress_schema, data=data) - - assert ip_address._generate_input_data()["data"] == { - "data": {"address": {"value": "1.1.1.1/24", "is_protected": True}} - } - - -@pytest.mark.parametrize("client_type", client_types) -async def test_create_input_data_with_IPNetwork_attribute(client, ipnetwork_schema, client_type): - data = {"network": {"value": ipaddress.ip_network("1.1.1.0/24"), "is_protected": True}} - - if client_type == "standard": - ip_network = InfrahubNode(client=client, schema=ipnetwork_schema, data=data) - else: - ip_network = InfrahubNodeSync(client=client, schema=ipnetwork_schema, data=data) - - assert ip_network._generate_input_data()["data"] == { - "data": {"network": {"value": "1.1.1.0/24", "is_protected": True}} - } - - -@pytest.mark.parametrize("client_type", client_types) -async def test_update_input_data__with_relationships_01( - client, - location_schema, - location_data01, - tag_schema, - tag_blue_data, - tag_green_data, - tag_red_data, - client_type, -): - if client_type == "standard": - location = InfrahubNode(client=client, schema=location_schema, data=location_data01) - tag_green = InfrahubNode(client=client, schema=tag_schema, data=tag_green_data) - tag_blue = InfrahubNode(client=client, schema=tag_schema, data=tag_blue_data) - tag_red = InfrahubNode(client=client, schema=tag_schema, data=tag_red_data) - else: - location = InfrahubNodeSync(client=client, schema=location_schema, data=location_data01) - tag_green = InfrahubNodeSync(client=client, schema=tag_schema, data=tag_green_data) - tag_blue = InfrahubNodeSync(client=client, schema=tag_schema, data=tag_blue_data) - tag_red = InfrahubNodeSync(client=client, schema=tag_schema, data=tag_red_data) - - location.primary_tag = tag_green_data - location.tags.extend([tag_green, tag_red]) - location.tags.remove(tag_blue) - - assert location._generate_input_data()["data"] == { - "data": { - "id": "llllllll-llll-llll-llll-llllllllllll", - "name": {"is_protected": True, "is_visible": True, "value": "DFW"}, - "primary_tag": {"id": "gggggggg-gggg-gggg-gggg-gggggggggggg"}, - "tags": [{"id": "gggggggg-gggg-gggg-gggg-gggggggggggg"}, {"id": "rrrrrrrr-rrrr-rrrr-rrrr-rrrrrrrrrrrr"}], - "type": {"is_protected": True, "is_visible": True, "value": "SITE"}, - }, - } - - -@pytest.mark.parametrize("client_type", client_types) -async def test_update_input_data_with_relationships_02(client, location_schema, location_data02, client_type): - if client_type == "standard": - location = InfrahubNode(client=client, schema=location_schema, data=location_data02) - else: - location = InfrahubNodeSync(client=client, schema=location_schema, data=location_data02) - - assert location._generate_input_data()["data"] == { - "data": { - "id": "llllllll-llll-llll-llll-llllllllllll", - "name": { - "is_protected": True, - "is_visible": True, - "source": "cccccccc-cccc-cccc-cccc-cccccccccccc", - "value": "dfw1", - }, - "primary_tag": { - "_relation__is_protected": True, - "_relation__is_visible": True, - "_relation__source": "cccccccc-cccc-cccc-cccc-cccccccccccc", - "id": "rrrrrrrr-rrrr-rrrr-rrrr-rrrrrrrrrrrr", - }, - "tags": [ - { - "_relation__is_protected": True, - "_relation__is_visible": True, - "_relation__source": "cccccccc-cccc-cccc-cccc-cccccccccccc", - "id": "bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb", - }, - ], - "type": { - "is_protected": True, - "is_visible": True, - "source": "cccccccc-cccc-cccc-cccc-cccccccccccc", - "value": "SITE", - }, - }, - } - - -@pytest.mark.parametrize("client_type", client_types) -async def test_update_input_data_empty_relationship( - client, location_schema, location_data01, tag_schema, tag_blue_data, client_type -): - if client_type == "standard": - location = InfrahubNode(client=client, schema=location_schema, data=location_data01) - tag_blue = InfrahubNode(client=client, schema=tag_schema, data=tag_blue_data) - else: - location = InfrahubNodeSync(client=client, schema=location_schema, data=location_data01) - tag_blue = InfrahubNode(client=client, schema=tag_schema, data=tag_blue_data) - - location.tags.remove(tag_blue) - location.primary_tag = None - - assert location._generate_input_data()["data"] == { - "data": { - "id": "llllllll-llll-llll-llll-llllllllllll", - "name": {"is_protected": True, "is_visible": True, "value": "DFW"}, - # "primary_tag": None, - "tags": [], - "type": {"is_protected": True, "is_visible": True, "value": "SITE"}, - }, - } - - -@pytest.mark.parametrize("client_type", client_types) -async def test_node_get_relationship_from_store( - client, - location_schema, - location_data01, - tag_schema, - tag_red_data, - tag_blue_data, - client_type, -): - if client_type == "standard": - node = InfrahubNode(client=client, schema=location_schema, data=location_data01) - tag_red = InfrahubNode(client=client, schema=tag_schema, data=tag_red_data) - tag_blue = InfrahubNode(client=client, schema=tag_schema, data=tag_blue_data) - else: - node = InfrahubNodeSync(client=client, schema=location_schema, data=location_data01) - tag_red = InfrahubNodeSync(client=client, schema=tag_schema, data=tag_red_data) - tag_blue = InfrahubNodeSync(client=client, schema=tag_schema, data=tag_blue_data) - - client.store.set(key=tag_red.id, node=tag_red) - client.store.set(key=tag_blue.id, node=tag_blue) - - assert node.primary_tag.peer == tag_red - assert node.primary_tag.get() == tag_red - - assert node.tags[0].peer == tag_blue - assert [tag.peer for tag in node.tags] == [tag_blue] - - -@pytest.mark.parametrize("client_type", client_types) -async def test_node_get_relationship_not_in_store(client, location_schema, location_data01, client_type): - if client_type == "standard": - node = InfrahubNode(client=client, schema=location_schema, data=location_data01) - else: - node = InfrahubNodeSync(client=client, schema=location_schema, data=location_data01) - - with pytest.raises(NodeNotFoundError): - node.primary_tag.peer # pylint: disable=pointless-statement - - with pytest.raises(NodeNotFoundError): - node.tags[0].peer # pylint: disable=pointless-statement - - -@pytest.mark.parametrize("client_type", client_types) -async def test_node_fetch_relationship( - httpx_mock: HTTPXMock, - mock_schema_query_01, - clients, - location_schema, - location_data01, - tag_schema, - tag_red_data, - tag_blue_data, - client_type, -): # pylint: disable=unused-argument - response1 = { - "data": { - "BuiltinTag": { - "count": 1, - "edges": [ - tag_red_data, - ], - } - } - } - - httpx_mock.add_response( - method="POST", - json=response1, - match_headers={"X-Infrahub-Tracker": "query-builtintag-page1"}, - ) - - response2 = { - "data": { - "BuiltinTag": { - "count": 1, - "edges": [ - tag_blue_data, - ], - } - } - } - - httpx_mock.add_response( - method="POST", - json=response2, - match_headers={"X-Infrahub-Tracker": "query-builtintag-page1"}, - ) - - if client_type == "standard": - node = InfrahubNode(client=clients.standard, schema=location_schema, data=location_data01) - await node.primary_tag.fetch() # type: ignore[attr-defined] - await node.tags.fetch() # type: ignore[attr-defined] - else: - node = InfrahubNodeSync(client=clients.sync, schema=location_schema, data=location_data01) # type: ignore[assignment] - node.primary_tag.fetch() # type: ignore[attr-defined] - node.tags.fetch() # type: ignore[attr-defined] - - assert isinstance(node.primary_tag.peer, InfrahubNodeBase) # type: ignore[attr-defined] - assert isinstance(node.tags[0].peer, InfrahubNodeBase) # type: ignore[attr-defined] - - -@pytest.mark.parametrize("client_type", client_types) -async def test_node_IPHost_deserialization(client, ipaddress_schema, client_type): - data = { - "id": "aaaaaaaaaaaaaa", - "address": { - "value": "1.1.1.1/24", - "is_protected": True, - }, - } - if client_type == "standard": - ip_address = InfrahubNode(client=client, schema=ipaddress_schema, data=data) - else: - ip_address = InfrahubNodeSync(client=client, schema=ipaddress_schema, data=data) - - assert ip_address.address.value == ipaddress.ip_interface("1.1.1.1/24") - - -@pytest.mark.parametrize("client_type", client_types) -async def test_node_IPNetwork_deserialization(client, ipnetwork_schema, client_type): - data = { - "id": "aaaaaaaaaaaaaa", - "network": { - "value": "1.1.1.0/24", - "is_protected": True, - }, - } - if client_type == "standard": - ip_network = InfrahubNode(client=client, schema=ipnetwork_schema, data=data) - else: - ip_network = InfrahubNodeSync(client=client, schema=ipnetwork_schema, data=data) - - assert ip_network.network.value == ipaddress.ip_network("1.1.1.0/24") - - -@pytest.mark.parametrize("client_type", client_types) -async def test_node_extract(client, location_schema, location_data01, client_type): - if client_type == "standard": - node = InfrahubNode(client=client, schema=location_schema, data=location_data01) - else: - node = InfrahubNodeSync(client=client, schema=location_schema, data=location_data01) - - params = { - "identifier": "id", - "name": "name__value", - "description": "description__value", - } - - assert node.extract(params=params) == { - "description": None, - "identifier": "llllllll-llll-llll-llll-llllllllllll", - "name": "DFW", - } - - -@pytest.mark.parametrize("client_type", client_types) -async def test_read_only_attr( - client, - address_schema, - address_data, - client_type, -): - if client_type == "standard": - address = InfrahubNode(client=client, schema=address_schema, data=address_data) - else: - address = InfrahubNodeSync(client=client, schema=address_schema, data=address_data) - - assert address._generate_input_data()["data"] == { - "data": { - "id": "d5994b18-b25e-4261-9e63-17c2844a0b45", - "street_number": {"is_protected": False, "is_visible": True, "value": "1234"}, - "street_name": {"is_protected": False, "is_visible": True, "value": "Fake Street"}, - "postal_code": {"is_protected": False, "is_visible": True, "value": "123ABC"}, - }, - } - assert address.computed_address.value == "1234 Fake Street 123ABC" - - -@pytest.mark.parametrize("client_type", client_types) -async def test_relationships_excluded_input_data(client, location_schema, client_type): - data = { - "name": {"value": "JFK1"}, - "description": {"value": "JFK Airport"}, - "type": {"value": "SITE"}, - "primary_tag": "pppppppp", - "tags": [{"id": "aaaaaa"}, {"id": "bbbb"}], - } - if client_type == "standard": - node = InfrahubNode(client=client, schema=location_schema, data=data) - else: - node = InfrahubNodeSync(client=client, schema=location_schema, data=data) - - assert node.tags.has_update is False - - -@pytest.mark.parametrize("client_type", client_types) -async def test_create_input_data_with_resource_pool_relationship( - client, ipaddress_pool_schema, ipam_ipprefix_schema, simple_device_schema, ipam_ipprefix_data, client_type -): - if client_type == "standard": - ip_prefix = InfrahubNode(client=client, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data) - ip_pool = InfrahubNode( - client=client, - schema=ipaddress_pool_schema, - data={ - "id": "pppppppp-pppp-pppp-pppp-pppppppppppp", - "name": "Core loopbacks", - "default_address_type": "IpamIPAddress", - "default_prefix_length": 32, - "ip_namespace": "ip_namespace", - "resources": [ip_prefix], - }, - ) - device = InfrahubNode( - client=client, schema=simple_device_schema, data={"name": "device-01", "primary_address": ip_pool} - ) - else: - ip_prefix = InfrahubNodeSync(client=client, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data) - ip_pool = InfrahubNodeSync( - client=client, - schema=ipaddress_pool_schema, - data={ - "id": "pppppppp-pppp-pppp-pppp-pppppppppppp", - "name": "Core loopbacks", - "default_address_type": "IpamIPAddress", - "default_prefix_length": 32, - "ip_namespace": "ip_namespace", - "resources": [ip_prefix], - }, - ) - device = InfrahubNode( - client=client, schema=simple_device_schema, data={"name": "device-01", "primary_address": ip_pool} - ) - - assert device._generate_input_data()["data"] == { - "data": { - "name": {"value": "device-01"}, - "primary_address": {"from_pool": {"id": "pppppppp-pppp-pppp-pppp-pppppppppppp"}}, - }, - } - - -@pytest.mark.parametrize("client_type", client_types) -async def test_create_mutation_query_with_resource_pool_relationship( - client, ipaddress_pool_schema, ipam_ipprefix_schema, simple_device_schema, ipam_ipprefix_data, client_type -): - if client_type == "standard": - ip_prefix = InfrahubNode(client=client, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data) - ip_pool = InfrahubNode( - client=client, - schema=ipaddress_pool_schema, - data={ - "id": "pppppppp-pppp-pppp-pppp-pppppppppppp", - "name": "Core loopbacks", - "default_address_type": "IpamIPAddress", - "default_prefix_length": 32, - "ip_namespace": "ip_namespace", - "resources": [ip_prefix], - }, - ) - device = InfrahubNode( - client=client, schema=simple_device_schema, data={"name": "device-01", "primary_address": ip_pool} - ) - else: - ip_prefix = InfrahubNodeSync(client=client, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data) - ip_pool = InfrahubNodeSync( - client=client, - schema=ipaddress_pool_schema, - data={ - "id": "pppppppp-pppp-pppp-pppp-pppppppppppp", - "name": "Core loopbacks", - "default_address_type": "IpamIPAddress", - "default_prefix_length": 32, - "ip_namespace": "ip_namespace", - "resources": [ip_prefix], - }, - ) - device = InfrahubNode( - client=client, schema=simple_device_schema, data={"name": "device-01", "primary_address": ip_pool} - ) - - assert device._generate_mutation_query() == { - "object": {"id": None, "primary_address": {"node": {"__typename": None, "display_label": None, "id": None}}}, - "ok": None, - } - - -@pytest.mark.parametrize("client_type", client_types) -async def test_get_pool_allocated_resources( - httpx_mock: HTTPXMock, - mock_schema_query_ipam: HTTPXMock, - clients, - ipaddress_pool_schema, - ipam_ipprefix_schema, - ipam_ipprefix_data, - client_type, -): - httpx_mock.add_response( - method="POST", - json={ - "data": { - "InfrahubResourcePoolAllocated": { - "count": 2, - "edges": [ - { - "node": { - "id": "17d9bd8d-8fc2-70b0-278a-179f425e25cb", - "kind": "IpamIPAddress", - "branch": "main", - "identifier": "ip-1", - } - }, - { - "node": { - "id": "17d9bd8e-31ee-acf0-2786-179fb76f2f67", - "kind": "IpamIPAddress", - "branch": "main", - "identifier": "ip-2", - } - }, - ], - } - } - }, - match_headers={"X-Infrahub-Tracker": "get-allocated-resources-page1"}, - ) - httpx_mock.add_response( - method="POST", - json={ - "data": { - "IpamIPAddress": { - "count": 2, - "edges": [ - {"node": {"id": "17d9bd8d-8fc2-70b0-278a-179f425e25cb", "__typename": "IpamIPAddress"}}, - {"node": {"id": "17d9bd8e-31ee-acf0-2786-179fb76f2f67", "__typename": "IpamIPAddress"}}, - ], - } - } - }, - match_headers={"X-Infrahub-Tracker": "query-ipamipaddress-page1"}, - ) - - if client_type == "standard": - client: InfrahubClient = getattr(clients, client_type) # type: ignore[annotation-unchecked] - ip_prefix = InfrahubNode(client=client, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data) - ip_pool = InfrahubNode( - client=client, - schema=ipaddress_pool_schema, - data={ - "id": "pppppppp-pppp-pppp-pppp-pppppppppppp", - "name": "Core loopbacks", - "default_address_type": "IpamIPAddress", - "default_prefix_length": 32, - "ip_namespace": "ip_namespace", - "resources": [ip_prefix], - }, - ) - - resources = await ip_pool.get_pool_allocated_resources(resource=ip_prefix) - assert len(resources) == 2 - assert [resource.id for resource in resources] == [ - "17d9bd8d-8fc2-70b0-278a-179f425e25cb", - "17d9bd8e-31ee-acf0-2786-179fb76f2f67", - ] - else: - client: InfrahubClientSync = getattr(clients, client_type) # type: ignore[annotation-unchecked] - ip_prefix = InfrahubNodeSync(client=client, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data) - ip_pool = InfrahubNodeSync( - client=client, - schema=ipaddress_pool_schema, - data={ - "id": "pppppppp-pppp-pppp-pppp-pppppppppppp", - "name": "Core loopbacks", - "default_address_type": "IpamIPAddress", - "default_prefix_length": 32, - "ip_namespace": "ip_namespace", - "resources": [ip_prefix], - }, - ) - - resources = ip_pool.get_pool_allocated_resources(resource=ip_prefix) - assert len(resources) == 2 - assert [resource.id for resource in resources] == [ - "17d9bd8d-8fc2-70b0-278a-179f425e25cb", - "17d9bd8e-31ee-acf0-2786-179fb76f2f67", - ] - - -@pytest.mark.parametrize("client_type", client_types) -async def test_get_pool_resources_utilization( - httpx_mock: HTTPXMock, clients, ipaddress_pool_schema, ipam_ipprefix_schema, ipam_ipprefix_data, client_type -): - httpx_mock.add_response( - method="POST", - json={ - "data": { - "InfrahubResourcePoolUtilization": { - "count": 1, - "edges": [ - { - "node": { - "id": "17d9bd86-3471-a020-2782-179ff078e58f", - "utilization": 93.75, - "utilization_branches": 0, - "utilization_default_branch": 93.75, - } - } - ], - } - } - }, - match_headers={"X-Infrahub-Tracker": "get-pool-utilization"}, - ) - - if client_type == "standard": - ip_prefix = InfrahubNode(client=clients.standard, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data) - ip_pool = InfrahubNode( - client=clients.standard, - schema=ipaddress_pool_schema, - data={ - "id": "pppppppp-pppp-pppp-pppp-pppppppppppp", - "name": "Core loopbacks", - "default_address_type": "IpamIPAddress", - "default_prefix_length": 32, - "ip_namespace": "ip_namespace", - "resources": [ip_prefix], - }, - ) - - utilizations = await ip_pool.get_pool_resources_utilization() - assert len(utilizations) == 1 - assert utilizations[0]["utilization"] == 93.75 - else: - ip_prefix = InfrahubNodeSync(client=clients.sync, schema=ipam_ipprefix_schema, data=ipam_ipprefix_data) - ip_pool = InfrahubNodeSync( - client=clients.sync, - schema=ipaddress_pool_schema, - data={ - "id": "pppppppp-pppp-pppp-pppp-pppppppppppp", - "name": "Core loopbacks", - "default_address_type": "IpamIPAddress", - "default_prefix_length": 32, - "ip_namespace": "ip_namespace", - "resources": [ip_prefix], - }, - ) - - utilizations = ip_pool.get_pool_resources_utilization() - assert len(utilizations) == 1 - assert utilizations[0]["utilization"] == 93.75 diff --git a/python_sdk/tests/unit/sdk/test_object_store.py b/python_sdk/tests/unit/sdk/test_object_store.py deleted file mode 100644 index 8da5d46039..0000000000 --- a/python_sdk/tests/unit/sdk/test_object_store.py +++ /dev/null @@ -1,79 +0,0 @@ -import inspect - -import pytest -from pytest_httpx import HTTPXMock - -from infrahub_sdk.object_store import ObjectStore, ObjectStoreSync - -# pylint: disable=redefined-outer-name,unused-argument - -async_methods = [method for method in dir(ObjectStore) if not method.startswith("_")] -sync_methods = [method for method in dir(ObjectStoreSync) if not method.startswith("_")] - -client_types = ["standard", "sync"] - -FILE_CONTENT_01 = """ - any content - another content - """ - - -@pytest.fixture -async def mock_get_object_store_01(httpx_mock: HTTPXMock) -> HTTPXMock: - httpx_mock.add_response( - method="GET", - text=FILE_CONTENT_01, - match_headers={"X-Infrahub-Tracker": "object-store-get"}, - ) - return httpx_mock - - -@pytest.fixture -async def mock_upload_object_store_01(httpx_mock: HTTPXMock) -> HTTPXMock: - payload = {"identifier": "xxxxxxxxxx", "checksum": "yyyyyyyyyyyyyy"} - httpx_mock.add_response( - method="POST", - json=payload, - match_headers={"X-Infrahub-Tracker": "object-store-upload"}, - ) - return httpx_mock - - -async def test_method_sanity(): - """Validate that there is at least one public method and that both clients look the same.""" - assert async_methods - assert async_methods == sync_methods - - -@pytest.mark.parametrize("method", async_methods) -async def test_validate_method_signature(method): - async_method = getattr(ObjectStore, method) - sync_method = getattr(ObjectStoreSync, method) - async_sig = inspect.signature(async_method) - sync_sig = inspect.signature(sync_method) - assert async_sig.parameters == sync_sig.parameters - assert async_sig.return_annotation == sync_sig.return_annotation - - -@pytest.mark.parametrize("client_type", client_types) -async def test_object_store_get(client_type, clients, mock_get_object_store_01): - client = getattr(clients, client_type) - - if client_type == "standard": - content = await client.object_store.get(identifier="aaaaaaaaa", tracker="object-store-get") - else: - content = client.object_store.get(identifier="aaaaaaaaa", tracker="object-store-get") - - assert content == FILE_CONTENT_01 - - -@pytest.mark.parametrize("client_type", client_types) -async def test_object_store_upload(client_type, clients, mock_upload_object_store_01): - client = getattr(clients, client_type) - - if client_type == "standard": - response = await client.object_store.upload(content=FILE_CONTENT_01, tracker="object-store-upload") - else: - response = client.object_store.upload(content=FILE_CONTENT_01, tracker="object-store-upload") - - assert response == {"checksum": "yyyyyyyyyyyyyy", "identifier": "xxxxxxxxxx"} diff --git a/python_sdk/tests/unit/sdk/test_query_analyzer.py b/python_sdk/tests/unit/sdk/test_query_analyzer.py deleted file mode 100644 index 46498793fb..0000000000 --- a/python_sdk/tests/unit/sdk/test_query_analyzer.py +++ /dev/null @@ -1,175 +0,0 @@ -import pytest -from graphql import DocumentNode, OperationType -from graphql.error import GraphQLSyntaxError - -from infrahub_sdk.analyzer import GraphQLOperation, GraphQLQueryAnalyzer - - -async def test_analyzer_init_query_only(query_01, bad_query_01): - gqa = GraphQLQueryAnalyzer(query=query_01) - assert isinstance(gqa.document, DocumentNode) - - with pytest.raises(GraphQLSyntaxError): - gqa = GraphQLQueryAnalyzer(query=bad_query_01) - - -async def test_nbr_queries(query_01: str, query_03: str): - gqa = GraphQLQueryAnalyzer(query=query_01) - assert gqa.nbr_queries == 1 - - gqa = GraphQLQueryAnalyzer(query=query_03) - assert gqa.nbr_queries == 2 - - -async def test_query_types(query_01: str, query_03: str, query_introspection: str): - gqa = GraphQLQueryAnalyzer(query=query_01) - assert gqa.operations == [GraphQLOperation(name="TestPerson", operation_type=OperationType.QUERY)] - - gqa = GraphQLQueryAnalyzer(query=query_03) - assert len(gqa.operations) == 2 - assert GraphQLOperation(name="TestPerson", operation_type=OperationType.QUERY) in gqa.operations - assert GraphQLOperation(name="TestPersonCreate", operation_type=OperationType.MUTATION) in gqa.operations - - gqa = GraphQLQueryAnalyzer(query=query_introspection) - assert gqa.operations == [GraphQLOperation(name="__schema", operation_type=OperationType.QUERY)] - - -async def test_get_fields(query_01: str, query_03: str): - gqa = GraphQLQueryAnalyzer(query=query_01) - assert await gqa.get_fields() == { - "TestPerson": { - "edges": { - "node": { - "cars": {"edges": {"node": {"name": {"value": None}}}}, - "name": {"value": None}, - }, - }, - }, - } - - gqa = GraphQLQueryAnalyzer(query=query_03) - assert await gqa.get_fields() == { - "TestPerson": { - "edges": { - "node": { - "cars": {"edges": {"node": {"name": {"value": None}}}}, - "name": {"value": None}, - }, - }, - }, - "TestPersonCreate": {"object": {"id": None}, "ok": None}, - } - - -async def test_calculate_depth(query_01: str, query_02: str, query_03: str, query_04: str): - gqa = GraphQLQueryAnalyzer(query=query_01) - assert await gqa.calculate_depth() == 9 - - gqa = GraphQLQueryAnalyzer(query=query_02) - assert await gqa.calculate_depth() == 11 - - gqa = GraphQLQueryAnalyzer(query=query_03) - assert await gqa.calculate_depth() == 9 - - gqa = GraphQLQueryAnalyzer(query=query_04) - assert await gqa.calculate_depth() == 6 - - -async def test_calculate_height(query_01: str, query_02: str, query_03: str, query_04: str): - gqa = GraphQLQueryAnalyzer(query=query_01) - assert await gqa.calculate_height() == 10 - - gqa = GraphQLQueryAnalyzer(query=query_02) - assert await gqa.calculate_height() == 19 - - gqa = GraphQLQueryAnalyzer(query=query_03) - assert await gqa.calculate_height() == 14 - - gqa = GraphQLQueryAnalyzer(query=query_04) - assert await gqa.calculate_height() == 5 - - -async def test_get_variables(query_01: str, query_04: str, query_05: str, query_06: str): - gqa = GraphQLQueryAnalyzer(query=query_01) - assert gqa.variables == [] - - gqa = GraphQLQueryAnalyzer(query=query_04) - assert [var.model_dump() for var in gqa.variables] == [ - {"default_value": None, "name": "person", "required": True, "type": "String"} - ] - - gqa = GraphQLQueryAnalyzer(query=query_05) - assert [var.model_dump() for var in gqa.variables] == [ - {"default_value": None, "name": "myvar", "required": False, "type": "String"} - ] - - gqa = GraphQLQueryAnalyzer(query=query_06) - assert [var.model_dump() for var in gqa.variables] == [ - { - "default_value": None, - "name": "str1", - "required": False, - "type": "String", - }, - { - "default_value": "default2", - "name": "str2", - "required": False, - "type": "String", - }, - { - "default_value": None, - "name": "str3", - "required": True, - "type": "String", - }, - {"default_value": None, "name": "int1", "required": False, "type": "Int"}, - {"default_value": 12, "name": "int2", "required": False, "type": "Int"}, - {"default_value": None, "name": "int3", "required": True, "type": "Int"}, - { - "default_value": None, - "name": "bool1", - "required": False, - "type": "Boolean", - }, - { - "default_value": True, - "name": "bool2", - "required": False, - "type": "Boolean", - }, - { - "default_value": None, - "name": "bool3", - "required": True, - "type": "Boolean", - }, - ] - - -@pytest.mark.parametrize( - "var_type,var_required", - [("[ID]", False), ("[ID]!", True), ("[ID!]", False), ("[ID!]!", True)], -) -async def test_get_nested_variables(var_type, var_required): - query = ( - """ - query ($ids: %s){ - TestPerson(ids: $ids) { - edges { - node { - name { - value - } - } - } - } - } - """ - % var_type - ) - - gqa = GraphQLQueryAnalyzer(query=query) - assert [var.model_dump() for var in gqa.variables] == [ - {"default_value": None, "name": "ids", "required": var_required, "type": "ID"} - ] diff --git a/python_sdk/tests/unit/sdk/test_schema.py b/python_sdk/tests/unit/sdk/test_schema.py deleted file mode 100644 index 2a39fdb239..0000000000 --- a/python_sdk/tests/unit/sdk/test_schema.py +++ /dev/null @@ -1,314 +0,0 @@ -import inspect -from io import StringIO -from unittest import mock - -import pytest -from rich.console import Console - -from infrahub_sdk import Config, InfrahubClient, InfrahubClientSync, ValidationError -from infrahub_sdk.ctl.schema import display_schema_load_errors -from infrahub_sdk.exceptions import SchemaNotFoundError -from infrahub_sdk.schema import ( - InfrahubCheckDefinitionConfig, - InfrahubJinja2TransformConfig, - InfrahubPythonTransformConfig, - InfrahubRepositoryArtifactDefinitionConfig, - InfrahubRepositoryConfig, - InfrahubSchema, - InfrahubSchemaSync, - NodeSchema, -) - -async_schema_methods = [method for method in dir(InfrahubSchema) if not method.startswith("_")] -sync_schema_methods = [method for method in dir(InfrahubSchemaSync) if not method.startswith("_")] - -client_types = ["standard", "sync"] - - -async def test_method_sanity(): - """Validate that there is at least one public method and that both clients look the same.""" - assert async_schema_methods - assert async_schema_methods == sync_schema_methods - - -@pytest.mark.parametrize("method", async_schema_methods) -async def test_validate_method_signature(method): - async_method = getattr(InfrahubSchema, method) - sync_method = getattr(InfrahubSchemaSync, method) - async_sig = inspect.signature(async_method) - sync_sig = inspect.signature(sync_method) - assert async_sig.parameters == sync_sig.parameters - assert async_sig.return_annotation == sync_sig.return_annotation - - -@pytest.mark.parametrize("client_type", client_types) -async def test_fetch_schema(mock_schema_query_01, client_type): # pylint: disable=unused-argument - if client_type == "standard": - client = InfrahubClient(config=Config(address="http://mock", insert_tracker=True)) - nodes = await client.schema.fetch(branch="main") - else: - client = InfrahubClientSync(config=Config(address="http://mock", insert_tracker=True)) - nodes = client.schema.fetch(branch="main") - - assert len(nodes) == 4 - assert sorted(nodes.keys()) == [ - "BuiltinLocation", - "BuiltinTag", - "CoreGraphQLQuery", - "CoreRepository", - ] - assert isinstance(nodes["BuiltinTag"], NodeSchema) - - -@pytest.mark.parametrize("client_type", client_types) -async def test_schema_data_validation(rfile_schema, client_type): - if client_type == "standard": - client = InfrahubClient(config=Config(address="http://mock", insert_tracker=True)) - else: - client = InfrahubClientSync(config=Config(address="http://mock", insert_tracker=True)) - - client.schema.validate_data_against_schema( - schema=rfile_schema, - data={"name": "some-name", "description": "Some description"}, - ) - - with pytest.raises(ValidationError) as excinfo: - client.schema.validate_data_against_schema( - schema=rfile_schema, data={"name": "some-name", "invalid_field": "yes"} - ) - - assert "invalid_field is not a valid value for CoreTransformJinja2" == excinfo.value.message - - -@pytest.mark.parametrize("client_type", client_types) -async def test_add_dropdown_option(clients, client_type, mock_schema_query_01, mock_query_mutation_schema_dropdown_add): - if client_type == "standard": - await clients.standard.schema.add_dropdown_option("BuiltinTag", "status", "something") - else: - clients.sync.schema.add_dropdown_option("BuiltinTag", "status", "something") - - -@pytest.mark.parametrize("client_type", client_types) -async def test_remove_dropdown_option( - clients, client_type, mock_schema_query_01, mock_query_mutation_schema_dropdown_remove -): - if client_type == "standard": - await clients.standard.schema.remove_dropdown_option("BuiltinTag", "status", "active") - else: - clients.sync.schema.remove_dropdown_option("BuiltinTag", "status", "active") - - -@pytest.mark.parametrize("client_type", client_types) -async def test_add_enum_option(clients, client_type, mock_schema_query_01, mock_query_mutation_schema_enum_add): - if client_type == "standard": - await clients.standard.schema.add_enum_option("BuiltinTag", "mode", "hard") - else: - clients.sync.schema.add_enum_option("BuiltinTag", "mode", "hard") - - -@pytest.mark.parametrize("client_type", client_types) -async def test_remove_enum_option(clients, client_type, mock_schema_query_01, mock_query_mutation_schema_enum_remove): - if client_type == "standard": - await clients.standard.schema.remove_enum_option("BuiltinTag", "mode", "easy") - else: - clients.sync.schema.remove_enum_option("BuiltinTag", "mode", "easy") - - -@pytest.mark.parametrize("client_type", client_types) -async def test_add_dropdown_option_raises(clients, client_type, mock_schema_query_01): - if client_type == "standard": - with pytest.raises(SchemaNotFoundError): - await clients.standard.schema.add_dropdown_option("DoesNotExist", "atribute", "option") - with pytest.raises(ValueError): - await clients.standard.schema.add_dropdown_option("BuiltinTag", "attribute", "option") - else: - with pytest.raises(SchemaNotFoundError): - clients.sync.schema.add_dropdown_option("DoesNotExist", "atribute", "option") - with pytest.raises(ValueError): - clients.sync.schema.add_dropdown_option("BuiltinTag", "attribute", "option") - - -@pytest.mark.parametrize("client_type", client_types) -async def test_add_enum_option_raises(clients, client_type, mock_schema_query_01): - if client_type == "standard": - with pytest.raises(SchemaNotFoundError): - await clients.standard.schema.add_enum_option("DoesNotExist", "atribute", "option") - with pytest.raises(ValueError): - await clients.standard.schema.add_enum_option("BuiltinTag", "attribute", "option") - else: - with pytest.raises(SchemaNotFoundError): - clients.sync.schema.add_enum_option("DoesNotExist", "atribute", "option") - with pytest.raises(ValueError): - clients.sync.schema.add_enum_option("BuiltinTag", "attribute", "option") - - -@pytest.mark.parametrize("client_type", client_types) -async def test_remove_dropdown_option_raises(clients, client_type, mock_schema_query_01): - if client_type == "standard": - with pytest.raises(SchemaNotFoundError): - await clients.standard.schema.remove_dropdown_option("DoesNotExist", "atribute", "option") - with pytest.raises(ValueError): - await clients.standard.schema.remove_dropdown_option("BuiltinTag", "attribute", "option") - else: - with pytest.raises(SchemaNotFoundError): - clients.sync.schema.remove_dropdown_option("DoesNotExist", "atribute", "option") - with pytest.raises(ValueError): - clients.sync.schema.remove_dropdown_option("BuiltinTag", "attribute", "option") - - -@pytest.mark.parametrize("client_type", client_types) -async def test_remove_enum_option_raises(clients, client_type, mock_schema_query_01): - if client_type == "standard": - with pytest.raises(SchemaNotFoundError): - await clients.standard.schema.remove_enum_option("DoesNotExist", "atribute", "option") - with pytest.raises(ValueError): - await clients.standard.schema.remove_enum_option("BuiltinTag", "attribute", "option") - else: - with pytest.raises(SchemaNotFoundError): - clients.sync.schema.add_enum_option("DoesNotExist", "atribute", "option") - with pytest.raises(ValueError): - clients.sync.schema.add_enum_option("BuiltinTag", "attribute", "option") - - -async def test_infrahub_repository_config_getters(): - repo_config = InfrahubRepositoryConfig( - jinja2_transforms=[ - InfrahubJinja2TransformConfig(name="rfile01", query="query01", template_path="."), - InfrahubJinja2TransformConfig(name="rfile02", query="query01", template_path="."), - ], - artifact_definitions=[ - InfrahubRepositoryArtifactDefinitionConfig( - name="artifact01", - parameters={}, - content_type="JSON", - targets="group1", - transformation="transformation01", - ), - InfrahubRepositoryArtifactDefinitionConfig( - name="artifact02", - parameters={}, - content_type="JSON", - targets="group2", - transformation="transformation01", - ), - ], - check_definitions=[ - InfrahubCheckDefinitionConfig(name="check01", file_path=".", parameters={}, class_name="MyClass"), - InfrahubCheckDefinitionConfig(name="check02", file_path=".", parameters={}, class_name="MyClass"), - ], - python_transforms=[ - InfrahubPythonTransformConfig(name="transform01", file_path=".", class_name="MyClass"), - InfrahubPythonTransformConfig(name="transform02", file_path=".", class_name="MyClass"), - ], - ) - - assert repo_config.has_jinja2_transform(name="rfile01") is True - assert repo_config.has_jinja2_transform(name="rfile99") is False - assert isinstance(repo_config.get_jinja2_transform(name="rfile01"), InfrahubJinja2TransformConfig) - - assert repo_config.has_artifact_definition(name="artifact01") is True - assert repo_config.has_artifact_definition(name="artifact99") is False - assert isinstance( - repo_config.get_artifact_definition(name="artifact01"), InfrahubRepositoryArtifactDefinitionConfig - ) - - assert repo_config.has_check_definition(name="check01") is True - assert repo_config.has_check_definition(name="check99") is False - assert isinstance(repo_config.get_check_definition(name="check01"), InfrahubCheckDefinitionConfig) - - assert repo_config.has_python_transform(name="transform01") is True - assert repo_config.has_python_transform(name="transform99") is False - assert isinstance(repo_config.get_python_transform(name="transform01"), InfrahubPythonTransformConfig) - - -async def test_infrahub_repository_config_dups(): - with pytest.raises(ValueError) as exc: - InfrahubRepositoryConfig( - jinja2_transforms=[ - InfrahubJinja2TransformConfig(name="rfile01", query="query01", template_path="."), - InfrahubJinja2TransformConfig(name="rfile02", query="query01", template_path="."), - InfrahubJinja2TransformConfig(name="rfile02", query="query01", template_path="."), - ], - ) - - assert "Found multiples element with the same names: ['rfile02']" in str(exc.value) - - with pytest.raises(ValueError) as exc: - InfrahubRepositoryConfig( - check_definitions=[ - InfrahubCheckDefinitionConfig(name="check01", file_path=".", parameters={}, class_name="MyClass"), - InfrahubCheckDefinitionConfig(name="check01", file_path=".", parameters={}, class_name="MyClass"), - InfrahubCheckDefinitionConfig(name="check02", file_path=".", parameters={}, class_name="MyClass"), - InfrahubCheckDefinitionConfig(name="check02", file_path=".", parameters={}, class_name="MyClass"), - InfrahubCheckDefinitionConfig(name="check02", file_path=".", parameters={}, class_name="MyClass"), - InfrahubCheckDefinitionConfig(name="check03", file_path=".", parameters={}, class_name="MyClass"), - ], - ) - - assert "Found multiples element with the same names: ['check01', 'check02']" in str(exc.value) - - -@mock.patch( - "infrahub_sdk.ctl.schema.get_node", - return_value={ - "name": "Instance", - "namespace": "Cloud", - "attributes": [{"name": "name", "kind": "Text"}, {"name": "status", "kind": "Dropdown"}], - }, -) -async def test_display_schema_load_errors_details_dropdown(mock_get_node): - """Validate error message with details when loading schema.""" - error = { - "detail": [ - { - "type": "value_error", - "loc": ["body", "schemas", 0, "nodes", 0, "attributes", 1], - "msg": "Value error, The property 'choices' is required for kind=Dropdown", - "input": {"name": "status", "kind": "Dropdown"}, - "ctx": {"error": {}}, - "url": "https://errors.pydantic.dev/2.7/v/value_error", - }, - ] - } - - with mock.patch("infrahub_sdk.ctl.schema.console", Console(file=StringIO(), width=1000)) as console: - display_schema_load_errors(response=error, schemas_data=[]) - mock_get_node.assert_called_once() - output = console.file.getvalue() - expected_console = """Unable to load the schema: - Node: CloudInstance | Attribute: status ({'name': 'status', 'kind': 'Dropdown'}) | Value error, The property 'choices' is required for kind=Dropdown (value_error) -""" # noqa: E501 - assert output == expected_console - - -@mock.patch( - "infrahub_sdk.ctl.schema.get_node", - return_value={ - "name": "Instance", - "namespace": "OuT", - "attributes": [{"name": "name", "kind": "Text"}, {"name": "status", "kind": "Dropdown"}], - }, -) -async def test_display_schema_load_errors_details_namespace(mock_get_node): - """Validate error message with details when loading schema.""" - error = { - "detail": [ - { - "type": "string_pattern_mismatch", - "loc": ["body", "schemas", 0, "nodes", 0, "namespace"], - "msg": "String should match pattern '^[A-Z][a-z0-9]+$'", - "input": "OuT", - "ctx": {"pattern": "^[A-Z][a-z0-9]+$"}, - "url": "https://errors.pydantic.dev/2.7/v/string_pattern_mismatch", - }, - ] - } - - with mock.patch("infrahub_sdk.ctl.schema.console", Console(file=StringIO(), width=1000)) as console: - display_schema_load_errors(response=error, schemas_data=[]) - mock_get_node.assert_called_once() - output = console.file.getvalue() - expected_console = """Unable to load the schema: - Node: OuTInstance | namespace (OuT) | String should match pattern '^[A-Z]+$' (string_pattern_mismatch) -""" # noqa: E501 - assert output == expected_console diff --git a/python_sdk/tests/unit/sdk/test_schema_sorter.py b/python_sdk/tests/unit/sdk/test_schema_sorter.py deleted file mode 100644 index 136a193d93..0000000000 --- a/python_sdk/tests/unit/sdk/test_schema_sorter.py +++ /dev/null @@ -1,16 +0,0 @@ -from infrahub_sdk import InfrahubClient -from infrahub_sdk.transfer.schema_sorter import InfrahubSchemaTopologicalSorter - - -async def test_schema_sorter(client: InfrahubClient, mock_schema_query_01): - schemas = await client.schema.all() - topological_sorter = InfrahubSchemaTopologicalSorter() - - result = topological_sorter.get_sorted_node_schema(schemas=schemas.values()) - assert result == [{"BuiltinLocation", "BuiltinTag", "CoreGraphQLQuery", "CoreRepository"}] - - result = topological_sorter.get_sorted_node_schema(schemas=schemas.values(), required_relationships_only=False) - assert result == [{"BuiltinTag"}, {"BuiltinLocation", "CoreGraphQLQuery"}, {"CoreRepository"}] - - result = topological_sorter.get_sorted_node_schema(schemas=schemas.values(), include=["BuiltinLocation"]) - assert result == [{"BuiltinLocation"}] diff --git a/python_sdk/tests/unit/sdk/test_store.py b/python_sdk/tests/unit/sdk/test_store.py deleted file mode 100644 index ea298b39ef..0000000000 --- a/python_sdk/tests/unit/sdk/test_store.py +++ /dev/null @@ -1,43 +0,0 @@ -import pytest - -from infrahub_sdk import InfrahubNode, NodeStore - -client_types = ["standard", "sync"] - - -@pytest.mark.parametrize("client_type", client_types) -def test_node_store_set(client_type, clients, location_schema): - client = getattr(clients, client_type) - data = { - "name": {"value": "JFK1"}, - "description": {"value": "JFK Airport"}, - "type": {"value": "SITE"}, - } - node = InfrahubNode(client=client, schema=location_schema, data=data) - - store = NodeStore() - - store.set(key="mykey", node=node) - - assert store._store["BuiltinLocation"]["mykey"] - - -@pytest.mark.parametrize("client_type", client_types) -def test_node_store_get(client_type, clients, location_schema): - client = getattr(clients, client_type) - data = { - "id": "54f3108c-1f21-44c4-93cf-ec5737587b48", - "name": {"value": "JFK1"}, - "description": {"value": "JFK Airport"}, - "type": {"value": "SITE"}, - } - node = InfrahubNode(client=client, schema=location_schema, data=data) - - store = NodeStore() - - store.set(key="mykey", node=node) - assert store.get(kind="BuiltinLocation", key="mykey").id == node.id - assert store.get(key="mykey").id == node.id - - assert store.get(kind="BuiltinLocation", key="anotherkey", raise_when_missing=False) is None - assert store.get(key="anotherkey", raise_when_missing=False) is None diff --git a/python_sdk/tests/unit/sdk/test_timestamp.py b/python_sdk/tests/unit/sdk/test_timestamp.py deleted file mode 100644 index bcdf18a776..0000000000 --- a/python_sdk/tests/unit/sdk/test_timestamp.py +++ /dev/null @@ -1,57 +0,0 @@ -import pendulum -import pytest - -from infrahub_sdk.timestamp import Timestamp, TimestampFormatError - - -def test_init_empty(): - t1 = Timestamp() - assert isinstance(t1, Timestamp) - assert t1.to_string() == t1.obj.to_iso8601_string() - - t2 = Timestamp(None) - assert isinstance(t2, Timestamp) - assert t2.to_string() == t2.obj.to_iso8601_string() - - -def test_init_timestamp(): - t1 = Timestamp() - t2 = Timestamp(t1) - assert t1.to_string() == t2.to_string() - assert isinstance(t2, Timestamp) - assert t2.to_string() == t2.obj.to_iso8601_string() - - -def test_parse_string(): - REF = "2022-01-01T10:00:00.000000Z" - - assert Timestamp._parse_string(REF) == pendulum.parse(REF) - assert Timestamp._parse_string("5m") - assert Timestamp._parse_string("10min") - assert Timestamp._parse_string("2h") - assert Timestamp._parse_string("10s") - - with pytest.raises(ValueError): - Timestamp._parse_string("notvalid") - - -def test_compare(): - time1 = "2022-01-01T11:00:00.000000Z" - time2 = "2022-02-01T11:00:00.000000Z" - - t11 = Timestamp(time1) - t12 = Timestamp(time1) - - t21 = Timestamp(time2) - - assert t11 < t21 - assert t21 > t12 - assert t11 <= t12 - assert t11 >= t12 - assert t11 == t12 - - -@pytest.mark.parametrize("invalid_str", ["blurple", "1122334455667788", "2023-45-99"]) -def test_invalid_raises_correct_error(invalid_str): - with pytest.raises(TimestampFormatError): - Timestamp(invalid_str) diff --git a/python_sdk/tests/unit/sdk/test_topological_sort.py b/python_sdk/tests/unit/sdk/test_topological_sort.py deleted file mode 100644 index 3e93fa3eaf..0000000000 --- a/python_sdk/tests/unit/sdk/test_topological_sort.py +++ /dev/null @@ -1,95 +0,0 @@ -import pytest - -from infrahub_sdk.topological_sort import DependencyCycleExistsError, topological_sort - - -def test_topological_sort_empty(): - assert topological_sort(dict()) == [] - - -def test_topological_sort_with_cycle_raises_error(): - dependencies = {0: [1, 2], 1: [2], 2: [0]} - - with pytest.raises(DependencyCycleExistsError) as exc: - topological_sort(dependencies) - - assert [0, 1, 2, 0] in exc.value.cycles or [0, 2, 0] in exc.value.cycles - - -def test_topological_sort_with_two_separate_cycles_raises_error(): - dependencies = {0: [1, 2], 1: [2], 2: [0], 4: [5, 6], 5: [1, 6], 6: [4]} - - with pytest.raises(DependencyCycleExistsError) as exc: - topological_sort(dependencies) - - assert [0, 1, 2, 0] in exc.value.cycles or [0, 2, 0] in exc.value.cycles - assert [4, 5, 6, 4] in exc.value.cycles or [4, 6, 4] in exc.value.cycles - - -def test_topological_sort(): - dependencies = {0: [1, 2], 1: [2]} - - ordered = topological_sort(dependencies) - - assert ordered == [{2}, {1}, {0}] - - -def test_topological_sort_2(): - dependencies = { - 0: [1, 2], - 1: [2], - 2: [3], - } - - ordered = topological_sort(dependencies) - - assert ordered == [{3}, {2}, {1}, {0}] - - -def test_topological_sort_disjoint(): - dependencies = { - "a": ["b", "c"], - "b": ["c"], - "c": ["d"], - "e": ["f", "g"], - "f": ["g"], - "g": ["h"], - } - - ordered = topological_sort(dependencies) - - assert ordered == [{"h", "d"}, {"g", "c"}, {"b", "f"}, {"a", "e"}] - - -def test_topological_sort_disjoint_2(): - dependencies = { - "a": ["b"], - "c": ["d"], - "e": ["f"], - } - - ordered = topological_sort(dependencies) - - assert ordered == [{"b", "d", "f"}, {"a", "c", "e"}] - - -def test_topological_sort_binary_tree(): - """ - a - b c - d e f g - hi j k - lm - """ - dependencies = { - "a": ["b", "c"], - "b": ["d", "e"], - "c": ["f", "g"], - "d": ["h", "i"], - "f": ["j", "k"], - "j": ["l", "m"], - } - - ordered = topological_sort(dependencies) - - assert ordered == [{"l", "m", "h", "i", "e", "g", "k"}, {"j", "d"}, {"b", "f"}, {"c"}, {"a"}] diff --git a/python_sdk/tests/unit/sdk/test_utils.py b/python_sdk/tests/unit/sdk/test_utils.py deleted file mode 100644 index 33d2414c5b..0000000000 --- a/python_sdk/tests/unit/sdk/test_utils.py +++ /dev/null @@ -1,206 +0,0 @@ -import tempfile -import uuid -from pathlib import Path - -import pytest -from graphql import parse - -from infrahub_sdk.node import InfrahubNode -from infrahub_sdk.utils import ( - base16decode, - base16encode, - base36decode, - base36encode, - compare_lists, - deep_merge_dict, - dict_hash, - duplicates, - extract_fields, - get_flat_value, - is_valid_url, - is_valid_uuid, - str_to_bool, - write_to_file, -) - - -def test_is_valid_uuid(): - assert is_valid_uuid(uuid.uuid4()) is True - assert is_valid_uuid(uuid.UUID("ba0aecd9-546a-4d77-9187-23e17a20633e")) is True - assert is_valid_uuid("ba0aecd9-546a-4d77-9187-23e17a20633e") is True - - assert is_valid_uuid("xxx-546a-4d77-9187-23e17a20633e") is False - assert is_valid_uuid(222) is False - assert is_valid_uuid(False) is False - assert is_valid_uuid("Not a valid UUID") is False - assert is_valid_uuid(uuid.UUID) is False - - -@pytest.mark.parametrize( - "input,result", - [ - (55, False), - ("https://", False), - ("my-server", False), - ("http://my-server", True), - ("http://my-server:8080", True), - ("http://192.168.1.10", True), - ("/test", True), - ("/", True), - ("http:/192.168.1.10", False), - ], -) -def test_is_valid_url(input, result): - assert is_valid_url(input) is result - - -def test_duplicates(): - assert duplicates([2, 4, 6, 8, 4, 6, 12]) == [4, 6] - assert duplicates(["first", "second", "first", "third", "first", "last"]) == ["first"] - assert not duplicates([2, 8, 4, 6, 12]) - assert duplicates([]) == [] - assert duplicates([None, None]) == [] - - -def test_compare_lists(): - list_a = ["black", "blue", "red"] - list_b = ["black", "green"] - list_c = ["purple", "yellow"] - - both, in1, in2 = compare_lists(list_a, list_b) - assert both == ["black"] - assert in1 == ["blue", "red"] - assert in2 == ["green"] - - both, in1, in2 = compare_lists(list_c, list_b) - assert both == [] - assert in1 == list_c - assert in2 == list_b - - both, in1, in2 = compare_lists(list_c, ["yellow"]) - assert both == ["yellow"] - assert in1 == ["purple"] - assert in2 == [] - - -def test_deep_merge_dict(): - a = {"keyA": 1} - b = {"keyB": {"sub1": 10}} - c = {"keyB": {"sub2": 20}} - assert deep_merge_dict(a, b) == {"keyA": 1, "keyB": {"sub1": 10}} - assert deep_merge_dict(c, b) == {"keyB": {"sub1": 10, "sub2": 20}} - - -def test_str_to_bool(): - assert str_to_bool(True) is True - assert str_to_bool(False) is False - - assert str_to_bool(1) is True - assert str_to_bool(0) is False - - assert str_to_bool("True") is True - assert str_to_bool("TRUE") is True - assert str_to_bool("Yes") is True - assert str_to_bool("yes") is True - assert str_to_bool("1") is True - assert str_to_bool("on") is True - assert str_to_bool("y") is True - - assert str_to_bool("No") is False - assert str_to_bool("False") is False - assert str_to_bool("f") is False - - with pytest.raises(ValueError): - str_to_bool("NotABool") - - with pytest.raises(TypeError): - str_to_bool(tuple("a", "b", "c")) - - -def test_base36(): - assert base36encode(1412823931503067241) == "AQF8AA0006EH" - assert base36decode("AQF8AA0006EH") == 1412823931503067241 - assert base36decode(base36encode(-9223372036721928027)) == -9223372036721928027 - assert base36decode(base36encode(1412823931503067241)) == 1412823931503067241 - - -def test_base16(): - assert base16encode(1412823931503067241) == "139b5be157694069" - assert base16decode("139b5be157694069") == 1412823931503067241 - assert base16decode(base16encode(-9223372036721928027)) == -9223372036721928027 - assert base16decode(base16encode(1412823931503067241)) == 1412823931503067241 - - -def test_get_flat_value(client, tag_schema, tag_green_data): - tag = InfrahubNode(client=client, schema=tag_schema, data=tag_green_data) - assert get_flat_value(obj=tag, key="name__value") == "green" - assert get_flat_value(obj=tag, key="name__source__display_label") == "CRM" - assert get_flat_value(obj=tag, key="name.source.display_label", separator=".") == "CRM" - - -def test_dict_hash(): - assert dict_hash({"a": 1, "b": 2}) == "608de49a4600dbb5b173492759792e4a" - assert dict_hash({"b": 2, "a": 1}) == "608de49a4600dbb5b173492759792e4a" - assert dict_hash({"b": 2, "a": {"c": 1, "d": 2}}) == "4d8f1a3d03e0b487983383d0ff984d13" - assert dict_hash({"b": 2, "a": {"d": 2, "c": 1}}) == "4d8f1a3d03e0b487983383d0ff984d13" - assert dict_hash({}) == "99914b932bd37a50b983c5e7c90ae93b" - - -async def test_extract_fields(query_01): - document = parse(query_01) - expected_response = { - "TestPerson": { - "edges": { - "node": { - "cars": {"edges": {"node": {"name": {"value": None}}}}, - "name": {"value": None}, - }, - }, - }, - } - assert await extract_fields(document.definitions[0].selection_set) == expected_response - - -async def test_extract_fields_fragment(query_02): - document = parse(query_02) - - expected_response = { - "TestPerson": { - "edges": { - "node": { - "cars": { - "edges": { - "node": { - "member_of_groups": { - "edges": {"node": {"id": None}}, - }, - "mpg": {"is_protected": None, "value": None}, - "name": {"value": None}, - "nbr_engine": {"value": None}, - }, - }, - }, - "name": {"value": None}, - }, - }, - }, - } - - assert await extract_fields(document.definitions[0].selection_set) == expected_response - - -def test_write_to_file(): - tmp_dir = tempfile.TemporaryDirectory() - directory = Path(tmp_dir.name) - - with pytest.raises(FileExistsError): - write_to_file(directory, "placeholder") - - assert write_to_file(directory / "file.txt", "placeholder") is True - assert write_to_file(directory / "file.txt", "placeholder") is True - assert write_to_file(directory / "file.txt", "") is True - assert write_to_file(directory / "file.txt", None) is True - assert write_to_file(directory / "file.txt", 1234) is True - assert write_to_file(directory / "file.txt", {"key": "value"}) is True - - tmp_dir.cleanup() diff --git a/python_sdk/tests/unit/sdk/test_uuidt.py b/python_sdk/tests/unit/sdk/test_uuidt.py deleted file mode 100644 index cca7740903..0000000000 --- a/python_sdk/tests/unit/sdk/test_uuidt.py +++ /dev/null @@ -1,27 +0,0 @@ -from uuid import UUID - -from infrahub_sdk.utils import is_valid_uuid -from infrahub_sdk.uuidt import UUIDT - - -def test_uuidt(): - uuid1 = str(UUIDT()) - uuid2 = str(UUIDT()) - uuid3 = str(UUIDT()) - - assert isinstance(UUIDT.new(), UUID) - - assert is_valid_uuid(uuid1) is True - assert is_valid_uuid(uuid2) is True - assert is_valid_uuid(uuid3) is True - - assert uuid1 != uuid2 - assert sorted([uuid3, uuid2, uuid1]) == [uuid1, uuid2, uuid3] - - -def test_uuidt_short(): - short1 = UUIDT().short() - short2 = UUIDT().short() - assert isinstance(short1, str) - assert len(short1) == 8 - assert short1 != short2 diff --git a/python_sdk/tests/unit/test_package.py b/python_sdk/tests/unit/test_package.py deleted file mode 100644 index 635f310d9c..0000000000 --- a/python_sdk/tests/unit/test_package.py +++ /dev/null @@ -1,24 +0,0 @@ -from pathlib import Path - -import pytest -import toml - - -@pytest.fixture -def pyproject_file() -> Path: - return Path("python_sdk/pyproject.toml") - - -def test_pyproject_all_extra_dependencies(pyproject_file): - pyproject = toml.loads(pyproject_file.read_text()) - - try: - extras = pyproject["tool"]["poetry"]["extras"] - except KeyError: - pytest.skip("extras not defined in pyproject.toml") - - all_extras = extras.pop("all", []) - - groups_extras = {dependency for extra in extras.values() for dependency in extra} - - assert set(all_extras) == groups_extras From 8bfe30ab77fbd761ccfeb9f956ddce9f63277bc5 Mon Sep 17 00:00:00 2001 From: Damien Garros Date: Wed, 18 Sep 2024 19:25:32 +0200 Subject: [PATCH 03/10] Add git submodule to opsmill/infrahub-sdk-python --- .gitmodules | 3 +++ python_sdk | 1 + 2 files changed, 4 insertions(+) create mode 100644 .gitmodules create mode 160000 python_sdk diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000000..7f031f247e --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "python_sdk"] + path = python_sdk + url = git@github.com:opsmill/infrahub-sdk-python.git diff --git a/python_sdk b/python_sdk new file mode 160000 index 0000000000..248fdd9b19 --- /dev/null +++ b/python_sdk @@ -0,0 +1 @@ +Subproject commit 248fdd9b193f457e80d986f99a2a25a5dd61075f From 27c44ea75468f8f928ea4ca14ac7be6c05b673d0 Mon Sep 17 00:00:00 2001 From: Pete Crocker Date: Thu, 19 Sep 2024 09:32:27 +0100 Subject: [PATCH 04/10] update discord url (#4375) --- README.md | 2 +- docs/docs/faq/faq.mdx | 10 +++++----- frontend/app/src/screens/errors/error-fallback.tsx | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index d2dc840e30..7388295bc6 100644 --- a/README.md +++ b/README.md @@ -32,7 +32,7 @@ If you'd like to learn more about Infrahub, please refer to the following resour ## Support and Community -If you need help, support for the open-source Infrahub project is provided on [![Join our Discord server](https://img.shields.io/badge/Discord-7289DA?logo=discord&logoColor=white)](https://discord.gg/jXMRp9hXSX) or via [filing an issue on GitHub](https://github.com/opsmill/infrahub/issues). +If you need help, support for the open-source Infrahub project is provided on [![Join our Discord server](https://img.shields.io/badge/Discord-7289DA?logo=discord&logoColor=white)](https://discord.gg/opsmill) or via [filing an issue on GitHub](https://github.com/opsmill/infrahub/issues). ## Contributing diff --git a/docs/docs/faq/faq.mdx b/docs/docs/faq/faq.mdx index f6ae81b23b..e022cfe1e3 100644 --- a/docs/docs/faq/faq.mdx +++ b/docs/docs/faq/faq.mdx @@ -88,20 +88,20 @@ Upcoming features and improvements include: ### How much data can Infrahub handle right now? -The current data handling capabilities of Infrahub are still being actively developed and tested. If you have specific requirements or want to assess Infrahub's performance and scalability in your environment, please reach out to the team at contact@opsmill.com or through the [Discord](https://discord.gg/jXMRp9hXSX) server. +The current data handling capabilities of Infrahub are still being actively developed and tested. If you have specific requirements or want to assess Infrahub's performance and scalability in your environment, please reach out to the team at contact@opsmill.com or through the [Discord](https://discord.gg/opsmill) server. ### Can I deploy Infrahub in production? Yes, Infrahub can be deployed in production but keep in mind we are still in beta so please ensure to have the right backup and safeguard in place. -If you are planning to deploy Infrahub in a critical environment we recommend reaching out to our customer success team via [Discord](https://discord.gg/jXMRp9hXSX) or contact@opsmill.com +If you are planning to deploy Infrahub in a critical environment we recommend reaching out to our customer success team via [Discord](https://discord.gg/opsmill) or contact@opsmill.com ### How can I get involved? We develop Infrahub for customers and with the community. There are a few different ways to get involved with Infrahub: - As you use Infrahub, please submit bugs and feature requests. -- Reach out to OpsMill on [Discord](https://discord.gg/jXMRp9hXSX) and set up a user feedback session to share your thoughts with us. +- Reach out to OpsMill on [Discord](https://discord.gg/opsmill) and set up a user feedback session to share your thoughts with us. - If you are a developer, we are open to pull requests. Please first discuss your intentions via [GitHub Discussions](https://github.com/opsmill/infrahub/discussions) and send a pull request our way to fix it. Please see our [development docs](https://docs.infrahub.app/development/) for a guide to getting started developing for Infrahub. @@ -112,7 +112,7 @@ We maintain a [list of issues that are appropriate to newcomers](https://github. If you need assistance with Infrahub, you can reach out to the community and the development team through the following channels: -- Discord: Join the Infrahub [Discord](https://discord.gg/jXMRp9hXSX) server to ask questions, get support, and engage with other users. +- Discord: Join the Infrahub [Discord](https://discord.gg/opsmill) server to ask questions, get support, and engage with other users. - GitHub Issues: Submit issues or questions on the Infrahub GitHub repository (https://github.com/opsmill/infrahub/issues). ### Will there be paid support or an Enterprise version of Infrahub? @@ -171,4 +171,4 @@ Almost! You’ll definitely feel like a tech genius managing your infrastructure ### Made it this far? We'd love to hear from you -If you've made it this far, please feel free to reach out on the [Discord](https://discord.gg/jXMRp9hXSX) server to share your thoughts or schedule a customer interview session. The Infrahub team is always eager to receive feedback and engage with the community. +If you've made it this far, please feel free to reach out on the [Discord](https://discord.gg/opsmill) server to share your thoughts or schedule a customer interview session. The Infrahub team is always eager to receive feedback and engage with the community. diff --git a/frontend/app/src/screens/errors/error-fallback.tsx b/frontend/app/src/screens/errors/error-fallback.tsx index a7ae933cfb..ccef134ee2 100644 --- a/frontend/app/src/screens/errors/error-fallback.tsx +++ b/frontend/app/src/screens/errors/error-fallback.tsx @@ -86,7 +86,7 @@ function ErrorFallback({ error }: ErrorFallbackProps) { If this was unexpected, please reach out to us on{" "} Discord From 2340c323d8b9af83437188c0d3e2b7a039706713 Mon Sep 17 00:00:00 2001 From: Patrick Ogenstad Date: Thu, 19 Sep 2024 12:52:29 +0200 Subject: [PATCH 05/10] Remove incorrect consumer timeout Fixes #4308 --- .../services/adapters/message_bus/rabbitmq.py | 2 +- .../services/adapters/message_bus/test_rabbitmq.py | 2 +- changelog/4308.fixed.md | 11 +++++++++++ docker-compose.yml | 2 +- tasks/shared.py | 2 +- 5 files changed, 15 insertions(+), 4 deletions(-) create mode 100644 changelog/4308.fixed.md diff --git a/backend/infrahub/services/adapters/message_bus/rabbitmq.py b/backend/infrahub/services/adapters/message_bus/rabbitmq.py index f6398449a0..2ba4421792 100644 --- a/backend/infrahub/services/adapters/message_bus/rabbitmq.py +++ b/backend/infrahub/services/adapters/message_bus/rabbitmq.py @@ -142,7 +142,7 @@ async def _initialize_api_server(self) -> None: queue = await self.channel.declare_queue( f"{self.settings.namespace}.rpcs", durable=True, - arguments={"x-max-priority": 5, "x-consumer-timeout": self.DELIVER_TIMEOUT * 1000}, + arguments={"x-max-priority": 5}, ) self.delayed_exchange = await self.channel.declare_exchange( diff --git a/backend/tests/integration/services/adapters/message_bus/test_rabbitmq.py b/backend/tests/integration/services/adapters/message_bus/test_rabbitmq.py index 3682ef6a9d..099934bd33 100644 --- a/backend/tests/integration/services/adapters/message_bus/test_rabbitmq.py +++ b/backend/tests/integration/services/adapters/message_bus/test_rabbitmq.py @@ -218,7 +218,7 @@ async def test_rabbitmq_initial_setup(rabbitmq_api: RabbitMQManager) -> None: assert ( Queue( name="infrahub.rpcs", - arguments={"x-max-priority": 5, "x-consumer-timeout": 30000}, + arguments={"x-max-priority": 5}, durable=True, exclusive=False, queue_type="classic", diff --git a/changelog/4308.fixed.md b/changelog/4308.fixed.md new file mode 100644 index 0000000000..9d13bee857 --- /dev/null +++ b/changelog/4308.fixed.md @@ -0,0 +1,11 @@ +Fixed incorrect consumer timeout for RabbitMQ queue infrahub.rpcs + +If you are upgrading from a previous version of Infrahub and using the provided Docker Compose files you don't have to take any additional action. However if you are using your own setup for RabbitMQ you will need to manually delete the queue yourself. + +Swap the container name and credentials to RabbitMQ if they are different in your setup: + +```bash +docker exec -it infrahub-message-queue-1 rabbitmqadmin --username infrahub --password infrahub delete queue name=infrahub.rpcs +``` + +After this step Infrahub and the Git agents need to be restarted, when doing so the correct queue will be recreated. diff --git a/docker-compose.yml b/docker-compose.yml index c978cba8fc..7b0821bc24 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -103,7 +103,7 @@ x-infrahub-config: &infrahub_config services: message-queue: - image: ${MESSAGE_QUEUE_DOCKER_IMAGE:-rabbitmq:3.13.1-management} + image: ${MESSAGE_QUEUE_DOCKER_IMAGE:-rabbitmq:3.13.7-management} restart: unless-stopped environment: RABBITMQ_DEFAULT_USER: *broker_username diff --git a/tasks/shared.py b/tasks/shared.py index 50386581a1..81220d42f8 100644 --- a/tasks/shared.py +++ b/tasks/shared.py @@ -30,7 +30,7 @@ class DatabaseType(str, Enum): ) NEO4J_DOCKER_IMAGE = os.getenv("NEO4J_DOCKER_IMAGE", "neo4j:5.19.0-enterprise") MESSAGE_QUEUE_DOCKER_IMAGE = os.getenv( - "MESSAGE_QUEUE_DOCKER_IMAGE", "rabbitmq:3.13.1-management" if not INFRAHUB_USE_NATS else "nats:2.10.14-alpine" + "MESSAGE_QUEUE_DOCKER_IMAGE", "rabbitmq:3.13.7-management" if not INFRAHUB_USE_NATS else "nats:2.10.14-alpine" ) CACHE_DOCKER_IMAGE = os.getenv("CACHE_DOCKER_IMAGE", "redis:7.2.4" if not INFRAHUB_USE_NATS else "nats:2.10.14-alpine") From 068a08eef72a2a9944d57c7c2ba3973d84ec81a5 Mon Sep 17 00:00:00 2001 From: Damien Garros Date: Thu, 19 Sep 2024 13:52:17 +0200 Subject: [PATCH 06/10] Update doc, update ci --- .github/file-filters.yml | 31 --- .github/labeler.yml | 8 - .github/labels.yml | 4 - .github/python_sdk-release-drafter.yml | 24 -- .github/workflows/ci-docker-image.yml | 1 + .github/workflows/ci.yml | 250 ++++++------------ .github/workflows/labels.yml | 6 +- .github/workflows/poetry-check.yml | 2 + .github/workflows/publish-helm-chart.yml | 3 +- .github/workflows/publish-python-sdk.yml | 54 ---- .../update-compose-file-and-chart.yml | 1 + .github/workflows/version-upgrade.yml | 1 + docs/docs/development/backend.mdx | 5 +- docs/docs/development/frontend/readme.mdx | 3 +- docs/docs/guides/installation.mdx | 2 +- poetry.lock | 2 +- 16 files changed, 102 insertions(+), 295 deletions(-) delete mode 100644 .github/python_sdk-release-drafter.yml delete mode 100644 .github/workflows/publish-python-sdk.yml diff --git a/.github/file-filters.yml b/.github/file-filters.yml index a4372602fe..1a9eacdeee 100644 --- a/.github/file-filters.yml +++ b/.github/file-filters.yml @@ -13,24 +13,10 @@ development_files: &development_files backend_files: &backend_files - "backend/**" -sdk_files: &sdk_files - - "python_sdk/**" - -sync_files: &sync_files - - "sync/**" - infrahub_poetry_files: &infrahub_poetry_files - "pyproject.toml" - "poetry.lock" -python_sdk_poetry_files: &python_sdk_poetry_files - - "python_sdk/pyproject.toml" - - "python_sdk/poetry.lock" - -sync_poetry_files: &sync_poetry_files - - "sync/pyproject.toml" - - "sync/poetry.lock" - frontend_files: &frontend_files - "frontend/app/**" @@ -62,7 +48,6 @@ markdown_all: &markdown_all backend_all: - - *sdk_files - *backend_files - *ci_config - *development_files @@ -76,28 +61,12 @@ documentation_all: helm_all: - *helm_files -sdk_all: - - *sdk_files - - *backend_files - - *ci_config - - *development_files - - *python_sdk_poetry_files - -sync_all: - - *sync_files - - *sdk_files - - *backend_files - - *ci_config - - *development_files - - *sync_poetry_files - frontend_all: - *frontend_files - *ci_config - *development_files e2e_all: - - *sdk_files - *backend_files - *frontend_files - *demo_files diff --git a/.github/labeler.yml b/.github/labeler.yml index 3081eff625..f100708da3 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -11,14 +11,6 @@ - changed-files: - any-glob-to-any-file: ["frontend/**"] -"group/python-sdk": - - changed-files: - - any-glob-to-any-file: ["python_sdk/**"] - -"group/sync-engine": - - changed-files: - - any-glob-to-any-file: ["sync/**"] - "type/documentation": - changed-files: - any-glob-to-any-file: ["docs/**"] diff --git a/.github/labels.yml b/.github/labels.yml index 7a130bac3a..9dc5baa1d0 100644 --- a/.github/labels.yml +++ b/.github/labels.yml @@ -20,10 +20,6 @@ description: "Issue related to design or UX" color: "3380ff" -- name: "group/python-sdk" - description: "Issue related to the Python SDK" - color: "56e8e1" - - name: "group/sync-engine" description: "Issue related to the Synchronization engine" color: "05b259" diff --git a/.github/python_sdk-release-drafter.yml b/.github/python_sdk-release-drafter.yml deleted file mode 100644 index abfd6a6c14..0000000000 --- a/.github/python_sdk-release-drafter.yml +++ /dev/null @@ -1,24 +0,0 @@ ---- -categories: - - title: '🚀 Features' - labels: - - 'type/feature' - - title: '🐛 Bug Fixes' - labels: - - 'type/bug' - - title: '🧰 Maintenance' - labels: - - 'type/housekeeping' -change-template: '- $TITLE @$AUTHOR (#$NUMBER)' -exclude-labels: - - 'ci/skip-changelog' -include-labels: - - 'group/python-sdk' -include-pre-releases: true -change-title-escapes: '\<*_&' # You can add # and @ to disable mentions, and add ` to disable code blocks. -tag-prefix: python-sdk-v -tag-template: python-sdk-v -template: | - ## Changelog - - $CHANGES diff --git a/.github/workflows/ci-docker-image.yml b/.github/workflows/ci-docker-image.yml index 4a505be8a5..39cd234552 100644 --- a/.github/workflows/ci-docker-image.yml +++ b/.github/workflows/ci-docker-image.yml @@ -52,6 +52,7 @@ jobs: uses: actions/checkout@v4 with: ref: ${{ inputs.ref }} + submodules: true - name: Set up QEMU uses: docker/setup-qemu-action@v3 diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0de808c002..5db74683b4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -39,20 +39,18 @@ jobs: documentation: ${{ steps.changes.outputs.documentation_all }} frontend: ${{ steps.changes.outputs.frontend_all }} helm: ${{ steps.changes.outputs.helm_all }} - sdk: ${{ steps.changes.outputs.sdk_all }} - sync: ${{ steps.changes.outputs.sync_all }} e2e: ${{ steps.changes.outputs.e2e_all }} python: ${{ steps.changes.outputs.python_all }} javascript: ${{ steps.changes.outputs.javascript_all }} yaml: ${{ steps.changes.outputs.yaml_all }} infrahub_poetry_files: ${{ steps.changes.outputs.infrahub_poetry_files }} - python_sdk_poetry_files: ${{ steps.changes.outputs.python_sdk_poetry_files }} - sync_poetry_files: ${{ steps.changes.outputs.sync_poetry_files }} github_workflows: ${{ steps.changes.outputs.github_workflows }} e2e_tests: ${{ steps.changes.outputs.e2e_test_files }} steps: - name: "Check out repository code" uses: "actions/checkout@v4" + with: + submodules: true - name: Check for file changes uses: dorny/paths-filter@v3 id: changes @@ -69,6 +67,8 @@ jobs: steps: - name: "Check out repository code" uses: "actions/checkout@v4" + with: + submodules: true - name: "Install Helm" uses: azure/setup-helm@v4.2.0 - name: "Linting: helm lint" @@ -82,6 +82,8 @@ jobs: steps: - name: "Check out repository code" uses: "actions/checkout@v4" + with: + submodules: true - name: "Setup environment" run: "pip install yamllint==1.35.1" - name: "Linting: yamllint" @@ -95,6 +97,8 @@ jobs: steps: - name: "Check out repository code" uses: "actions/checkout@v4" + with: + submodules: true - name: Install NodeJS uses: actions/setup-node@v4 with: @@ -116,6 +120,8 @@ jobs: steps: - name: "Check out repository code" uses: "actions/checkout@v4" + with: + submodules: true - name: "Setup environment" run: "pip install ruff==0.5.0" - name: "Linting: ruff check" @@ -131,6 +137,8 @@ jobs: steps: - name: "Check out repository code" uses: "actions/checkout@v4" + with: + submodules: true - name: "Linting: markdownlint" uses: DavidAnson/markdownlint-cli2-action@v16 with: @@ -138,7 +146,6 @@ jobs: globs: | **/*.{md,mdx} !changelog/*.md - !python_sdk/changelog/*.md action-lint: if: needs.files-changed.outputs.github_workflows == 'true' @@ -148,6 +155,8 @@ jobs: steps: - name: "Check out repository code" uses: "actions/checkout@v4" + with: + submodules: true - name: Check workflow files run: | bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash) @@ -167,160 +176,59 @@ jobs: with: directory: "./" - python-sdk-poetry-check: - if: | - needs.files-changed.outputs.python_sdk_poetry_files == 'true' || - github.ref_name == 'stable' || - github.ref_name == 'develop' - needs: - - "files-changed" - uses: "./.github/workflows/poetry-check.yml" - with: - directory: "./python-sdk/" - - sync-poetry-check: - if: | - needs.files-changed.outputs.sync_poetry_files == 'true' || - github.ref_name == 'stable' || - github.ref_name == 'develop' - needs: - - "files-changed" - uses: "./.github/workflows/poetry-check.yml" - with: - directory: "./sync/" - - python-sdk-unit-tests: - strategy: - matrix: - python-version: - - "3.9" - - "3.10" - - "3.11" - - "3.12" - if: | - always() && !cancelled() && - !contains(needs.*.result, 'failure') && - !contains(needs.*.result, 'cancelled') && - needs.files-changed.outputs.sdk == 'true' - needs: ["files-changed", "yaml-lint", "python-lint"] - runs-on: ubuntu-latest - timeout-minutes: 30 - env: - INFRAHUB_DB_TYPE: memgraph - defaults: - run: - working-directory: python_sdk/ - steps: - - name: "Check out repository code" - uses: "actions/checkout@v4" - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - name: "Setup environment" - run: | - pipx install poetry - poetry config virtualenvs.prefer-active-python true - pip install invoke toml - - name: "Install Package" - run: "poetry install --all-extras" - - name: "Mypy Tests" - run: "poetry run mypy --show-error-codes infrahub_sdk/" - - name: "Pylint Tests" - run: "poetry run pylint infrahub_sdk/" - - name: "Unit Tests" - run: "poetry --directory python_sdk run coverage run --source=infrahub_sdk -m pytest python_sdk/tests/unit/" - working-directory: ./ - - name: "Create coverage file" - run: "poetry --directory python_sdk run coverage xml" - working-directory: ./ - - name: "Coveralls : Unit Tests" - uses: coverallsapp/github-action@v2 - continue-on-error: true - env: - COVERALLS_SERVICE_NUMBER: ${{ github.sha }} - with: - flag-name: python-sdk-unit - parallel: true - - - python-sdk-integration-tests: - if: | - always() && !cancelled() && - !contains(needs.*.result, 'failure') && - !contains(needs.*.result, 'cancelled') - needs: ["python-sdk-unit-tests"] - runs-on: - group: huge-runners - timeout-minutes: 30 - env: - INFRAHUB_DB_TYPE: memgraph - steps: - - name: "Check out repository code" - uses: "actions/checkout@v4" - - name: "Install Invoke" - run: "pip install toml invoke" - - - name: "Set environment variables" - run: echo INFRAHUB_BUILD_NAME=infrahub-${{ runner.name }} >> $GITHUB_ENV - - name: "Set environment variables" - run: echo INFRAHUB_IMAGE_VER=local-${{ runner.name }}-${{ github.sha }} >> $GITHUB_ENV - - name: "Clear docker environment" - run: docker compose -p $INFRAHUB_BUILD_NAME down -v --remove-orphans --rmi local - - - name: "Build Test Image" - run: "invoke dev.build" - - name: "Pull External Docker Images" - run: "invoke dev.pull" - - name: "Integration Tests" - run: "invoke sdk.test-integration" - - name: "Coveralls : Integration Tests" - uses: coverallsapp/github-action@v2 - continue-on-error: true - env: - COVERALLS_SERVICE_NUMBER: ${{ github.sha }} - with: - flag-name: python-sdk-integration - parallel: true - - infrahub-sync-unit-tests: - strategy: - matrix: - python-version: - - "3.9" - - "3.10" - - "3.11" - - "3.12" - if: | - always() && !cancelled() && - !contains(needs.*.result, 'failure') && - !contains(needs.*.result, 'cancelled') && - needs.files-changed.outputs.sync == 'true' - needs: ["files-changed", "yaml-lint", "python-lint"] - runs-on: ubuntu-latest - timeout-minutes: 30 - defaults: - run: - working-directory: sync/ - steps: - - name: "Check out repository code" - uses: "actions/checkout@v4" - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - name: "Setup environment" - run: | - pipx install poetry - poetry config virtualenvs.prefer-active-python true - pip install invoke toml - - name: "Install Package" - run: "poetry install" - - name: "Pylint Tests" - run: "poetry run pylint infrahub_sync/" - # - name: "Mypy Tests" - # run: "poetry run mypy --show-error-codes infrahub_sync/" - + # TODO NEED TO REVISIT THIS ONE + # python-sdk-poetry-check: + # if: | + # needs.files-changed.outputs.python_sdk_poetry_files == 'true' || + # github.ref_name == 'stable' || + # github.ref_name == 'develop' + # needs: + # - "files-changed" + # uses: "./.github/workflows/poetry-check.yml" + # with: + # directory: "./python-sdk/" + + # TODO NEED TO REVISIT THIS ONE + # python-sdk-integration-tests: + # if: | + # always() && !cancelled() && + # !contains(needs.*.result, 'failure') && + # !contains(needs.*.result, 'cancelled') + # needs: ["python-sdk-unit-tests"] + # runs-on: + # group: huge-runners + # timeout-minutes: 30 + # env: + # INFRAHUB_DB_TYPE: memgraph + # steps: + # - name: "Check out repository code" + # uses: "actions/checkout@v4" + # with: + # submodules: true + # - name: "Install Invoke" + # run: "pip install toml invoke" + + # - name: "Set environment variables" + # run: echo INFRAHUB_BUILD_NAME=infrahub-${{ runner.name }} >> $GITHUB_ENV + # - name: "Set environment variables" + # run: echo INFRAHUB_IMAGE_VER=local-${{ runner.name }}-${{ github.sha }} >> $GITHUB_ENV + # - name: "Clear docker environment" + # run: docker compose -p $INFRAHUB_BUILD_NAME down -v --remove-orphans --rmi local + + # - name: "Build Test Image" + # run: "invoke dev.build" + # - name: "Pull External Docker Images" + # run: "invoke dev.pull" + # - name: "Integration Tests" + # run: "invoke sdk.test-integration" + # - name: "Coveralls : Integration Tests" + # uses: coverallsapp/github-action@v2 + # continue-on-error: true + # env: + # COVERALLS_SERVICE_NUMBER: ${{ github.sha }} + # with: + # flag-name: python-sdk-integration + # parallel: true backend-tests-unit: if: | @@ -337,6 +245,8 @@ jobs: steps: - name: "Check out repository code" uses: "actions/checkout@v4" + with: + submodules: true - name: "Setup Python environment" run: "pip install toml invoke" - name: "Set environment variables" @@ -384,6 +294,8 @@ jobs: steps: - name: "Check out repository code" uses: "actions/checkout@v4" + with: + submodules: true - name: "Setup Python environment" run: "pip install toml invoke" - name: "Set environment variables" @@ -442,6 +354,8 @@ jobs: steps: - name: "Check out repository code" uses: "actions/checkout@v4" + with: + submodules: true - name: "Setup Python environment" run: "pip install toml invoke" - name: "Set environment variables" @@ -469,6 +383,8 @@ jobs: steps: - name: Check out repository code uses: actions/checkout@v4 + with: + submodules: true - name: Set up Python uses: actions/setup-python@v5 with: @@ -495,6 +411,8 @@ jobs: steps: - name: "Check out repository code" uses: "actions/checkout@v4" + with: + submodules: true - name: Install NodeJS uses: actions/setup-node@v4 with: @@ -547,6 +465,8 @@ jobs: steps: - name: "Check out repository code" uses: "actions/checkout@v4" + with: + submodules: true - name: Install NodeJS uses: actions/setup-node@v4 with: @@ -572,6 +492,8 @@ jobs: steps: - name: "Check out repository code" uses: "actions/checkout@v4" + with: + submodules: true - name: Set up Python uses: actions/setup-python@v5 with: @@ -595,6 +517,8 @@ jobs: steps: - name: "Check out repository code" uses: "actions/checkout@v4" + with: + submodules: true # The official GitHub Action for Vale doesn't work, installing manually instead: # https://github.com/errata-ai/vale-action/issues/103 - name: Download Vale @@ -644,7 +568,8 @@ jobs: steps: - name: Check out repository code uses: actions/checkout@v4 - + with: + submodules: true - name: Install NodeJS uses: actions/setup-node@v4 with: @@ -810,6 +735,8 @@ jobs: steps: - name: Check out repository code uses: actions/checkout@v4 + with: + submodules: true - name: Set up Python uses: actions/setup-python@v5 with: @@ -855,7 +782,6 @@ jobs: - backend-tests-integration - backend-tests-unit - frontend-tests - - python-sdk-integration-tests if: | always() && !cancelled() runs-on: ubuntu-latest @@ -888,5 +814,5 @@ jobs: env: COVERALLS_SERVICE_NUMBER: ${{ github.sha }} with: - carryforward: "backend-unit,backend-integration,frontend-unit,python-sdk-unit,python-sdk-integration" + carryforward: "backend-unit,backend-integration,frontend-unit" parallel-finished: true diff --git a/.github/workflows/labels.yml b/.github/workflows/labels.yml index 8e8d76fb01..79be39703a 100644 --- a/.github/workflows/labels.yml +++ b/.github/workflows/labels.yml @@ -15,11 +15,9 @@ jobs: labeler: runs-on: ubuntu-latest steps: - - - name: Checkout + - name: Checkout uses: actions/checkout@v4 - - - name: Run Labeler + - name: Run Labeler uses: crazy-max/ghaction-github-labeler@v5 with: github-token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/poetry-check.yml b/.github/workflows/poetry-check.yml index 1d6d25eb35..e2ecdcaa28 100644 --- a/.github/workflows/poetry-check.yml +++ b/.github/workflows/poetry-check.yml @@ -14,6 +14,8 @@ jobs: steps: - name: "Check out repository code" uses: "actions/checkout@v4" + with: + submodules: true - name: "Set up Python" uses: "actions/setup-python@v5" with: diff --git a/.github/workflows/publish-helm-chart.yml b/.github/workflows/publish-helm-chart.yml index e9b21f49b7..eef1220871 100644 --- a/.github/workflows/publish-helm-chart.yml +++ b/.github/workflows/publish-helm-chart.yml @@ -35,7 +35,8 @@ jobs: steps: - name: Check out the repo uses: actions/checkout@v4 - + with: + submodules: true - name: Install Helm uses: azure/setup-helm@v4.2.0 diff --git a/.github/workflows/publish-python-sdk.yml b/.github/workflows/publish-python-sdk.yml deleted file mode 100644 index 0cef5537fe..0000000000 --- a/.github/workflows/publish-python-sdk.yml +++ /dev/null @@ -1,54 +0,0 @@ ---- -name: Publish Infrahub Python SDK - -on: # yamllint disable rule:truthy - push: - tags: - - "python-sdk-v*" - -jobs: - publish_to_pypi: - name: "Publish Infrahub SDK to PyPI" - runs-on: "ubuntu-22.04" - steps: - - name: "Set up Python" - uses: "actions/setup-python@v5" - with: - python-version: "3.11" - - - name: "Install Poetry" - uses: "snok/install-poetry@v1" - with: - virtualenvs-create: true - virtualenvs-in-project: true - installer-parallel: true - - - name: "Check out repository code" - uses: "actions/checkout@v4" - - - name: "Cache poetry venv" - uses: "actions/cache@v4" - id: "cached-poetry-dependencies" - with: - path: "./python_sdk/.venv" - key: "venv-${{ runner.os }}-${{ hashFiles('**/poetry.lock') }}" - - - name: "Install Dependencies" - run: "poetry install" - working-directory: "./python_sdk" - if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' - - - name: "Add PyPI secret" - run: "poetry config pypi-token.pypi ${{ secrets.PYPI_TOKEN }}" - - - name: "Poetry build" - run: "poetry build" - working-directory: "./python_sdk" - - - name: "show output" - run: "ls -la dist/" - working-directory: "./python_sdk" - - - name: "Poetry push PyPI" - run: "poetry publish" - working-directory: "./python_sdk" diff --git a/.github/workflows/update-compose-file-and-chart.yml b/.github/workflows/update-compose-file-and-chart.yml index 6f2bebab78..6ed2df29b8 100644 --- a/.github/workflows/update-compose-file-and-chart.yml +++ b/.github/workflows/update-compose-file-and-chart.yml @@ -31,6 +31,7 @@ jobs: uses: "actions/checkout@v4" with: token: ${{ secrets.GH_INFRAHUB_BOT_TOKEN }} + submodules: true - name: Set up Python uses: actions/setup-python@v5 with: diff --git a/.github/workflows/version-upgrade.yml b/.github/workflows/version-upgrade.yml index ef63a7f8a8..8bbb19b194 100644 --- a/.github/workflows/version-upgrade.yml +++ b/.github/workflows/version-upgrade.yml @@ -47,6 +47,7 @@ jobs: uses: "actions/checkout@v4" with: ref: ${{ inputs.commit == '' && 'develop' || inputs.commit }} + submodules: true - name: Set up Python uses: actions/setup-python@v5 with: diff --git a/docs/docs/development/backend.mdx b/docs/docs/development/backend.mdx index 95cb122079..26cb5c1f20 100644 --- a/docs/docs/development/backend.mdx +++ b/docs/docs/development/backend.mdx @@ -11,12 +11,11 @@ In order start developing on Infrahub backend, it is recommended to have a decen * [Poetry](https://python-poetry.org/) to manage our Python virtual environment * [Docker](https://www.docker.com/) and its Compose extension to run dependencies such as the database, cache and queueing system -To fetch Infrahub's code, we will use Git and we will use the `develop` branch. +To fetch Infrahub's code, we will use Git and we will use the `develop` branch (default). ```bash -git clone git@github.com:opsmill/infrahub.git +git clone --recursive git@github.com:opsmill/infrahub.git cd infrahub -git switch develop ``` ## Basic settings diff --git a/docs/docs/development/frontend/readme.mdx b/docs/docs/development/frontend/readme.mdx index 2a1c46b860..013735d345 100644 --- a/docs/docs/development/frontend/readme.mdx +++ b/docs/docs/development/frontend/readme.mdx @@ -28,9 +28,8 @@ For testing, we rely on: To access Infrahub's codebase, use Git and switch to the `develop` branch to access the latest changes. All frontend code resides in `/frontend`. ```bash -git clone git@github.com:opsmill/infrahub.git +git clone --recursive git@github.com:opsmill/infrahub.git cd infrahub/frontend -git switch develop ``` Before contributing, we recommended starting with [Getting set up with frontend](getting-set-up). diff --git a/docs/docs/guides/installation.mdx b/docs/docs/guides/installation.mdx index 3de6d21f88..0a62eaaf0a 100644 --- a/docs/docs/guides/installation.mdx +++ b/docs/docs/guides/installation.mdx @@ -63,7 +63,7 @@ cd ~/source/infrahub/ Next, clone the `stable` branch of the Infrahub GitHub repository into the current directory. (This branch always holds the current stable release) ```bash -git clone -b stable --depth 1 https://github.com/opsmill/infrahub.git +git clone --recursive -b stable --depth 1 https://github.com/opsmill/infrahub.git ``` :::note diff --git a/poetry.lock b/poetry.lock index 8edd5f1a53..6745ea2459 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1762,7 +1762,7 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs [[package]] name = "infrahub-sdk" -version = "0.13.0" +version = "0.13.1-dev0" description = "Python Client to interact with Infrahub" optional = false python-versions = "^3.9" From 5d9ed727b44660f573b47009e7c4a7bedbfe49a6 Mon Sep 17 00:00:00 2001 From: Damien Garros Date: Thu, 19 Sep 2024 18:43:04 +0200 Subject: [PATCH 07/10] switch python sdk to develop branch --- python_sdk | 1 + 1 file changed, 1 insertion(+) create mode 160000 python_sdk diff --git a/python_sdk b/python_sdk new file mode 160000 index 0000000000..653218adc7 --- /dev/null +++ b/python_sdk @@ -0,0 +1 @@ +Subproject commit 653218adc725bec1505b6939d34acb95ac879e37 From 2aa2f1a349ed20afd7f388b8c43958c90bf91b80 Mon Sep 17 00:00:00 2001 From: Damien Garros Date: Thu, 19 Sep 2024 19:25:42 +0200 Subject: [PATCH 08/10] Remove sdk from python-lint --- .github/workflows/ci.yml | 4 ---- 1 file changed, 4 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index fc13fc5c52..501398ad85 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -128,10 +128,6 @@ jobs: run: "ruff check . --exclude python_sdk" - name: "Linting: ruff format" run: "ruff format --check --diff --exclude python_sdk ." - - name: "Linting: ruff check [SDK]" - run: "ruff check python_sdk --config python_sdk/pyproject.toml" - - name: "Linting: ruff format [SDK]" - run: "ruff format --check --diff --config python_sdk/pyproject.toml python_sdk" markdown-lint: if: needs.files-changed.outputs.documentation == 'true' From fa0845a501bd197c50259f62d3b188210bc913a0 Mon Sep 17 00:00:00 2001 From: Damien Garros Date: Thu, 19 Sep 2024 20:05:07 +0200 Subject: [PATCH 09/10] update python_sdk commit --- python_sdk | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python_sdk b/python_sdk index 653218adc7..f5408000f7 160000 --- a/python_sdk +++ b/python_sdk @@ -1 +1 @@ -Subproject commit 653218adc725bec1505b6939d34acb95ac879e37 +Subproject commit f5408000f7344afa02b532ab5eba13084708a854 From 0f11add1245905d2d618d8e6682dcf2ee1c1b2f4 Mon Sep 17 00:00:00 2001 From: Damien Garros Date: Thu, 19 Sep 2024 21:02:34 +0200 Subject: [PATCH 10/10] Update commit to latest develop --- python_sdk | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python_sdk b/python_sdk index f5408000f7..7eb07fbc14 160000 --- a/python_sdk +++ b/python_sdk @@ -1 +1 @@ -Subproject commit f5408000f7344afa02b532ab5eba13084708a854 +Subproject commit 7eb07fbc14701140a645769308faee2b906bcf25