diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 30d41b7..e296b4c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -112,7 +112,7 @@ jobs: ./actionlint -color shell: bash env: - SHELLCHECK_OPTS: --exclude=SC2086 --exclude=SC2046 --exclude=SC2004 + SHELLCHECK_OPTS: --exclude=SC2086 --exclude=SC2046 --exclude=SC2004 --exclude=SC2129 unit-tests: @@ -123,6 +123,7 @@ jobs: - "3.10" - "3.11" - "3.12" + - "3.13" if: | always() && !cancelled() && !contains(needs.*.result, 'failure') && diff --git a/.github/workflows/publish-python-sdk.yml b/.github/workflows/publish-pypi.yml similarity index 59% rename from .github/workflows/publish-python-sdk.yml rename to .github/workflows/publish-pypi.yml index aa561c6..190b229 100644 --- a/.github/workflows/publish-python-sdk.yml +++ b/.github/workflows/publish-pypi.yml @@ -1,10 +1,32 @@ --- -name: Publish Infrahub Python SDK +# yamllint disable rule:truthy +name: Publish Infrahub SDK Package -on: # yamllint disable rule:truthy - push: - tags: - - "v*" +on: + workflow_dispatch: + inputs: + runs-on: + description: "The OS to run the job on" + required: false + default: "ubuntu-22.04" + type: string + publish: + type: boolean + description: Whether to publish the package to Pypi + required: false + default: false + workflow_call: + inputs: + runs-on: + description: "The OS to run the job on" + required: false + default: "ubuntu-22.04" + type: string + publish: + type: boolean + description: Whether to publish the package to Pypi + required: false + default: false jobs: publish_to_pypi: @@ -25,6 +47,8 @@ jobs: - name: "Check out repository code" uses: "actions/checkout@v4" + with: + submodules: true - name: "Cache poetry venv" uses: "actions/cache@v4" @@ -47,4 +71,5 @@ jobs: run: "ls -la dist/" - name: "Poetry push PyPI" + if: ${{ inputs.publish }} run: "poetry publish" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..8bab1a5 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,88 @@ +--- +# yamllint disable rule:truthy rule:line-length +name: New Release + +on: + release: + types: + - published + +jobs: + check_release: + runs-on: ubuntu-22.04 + outputs: + is_prerelease: ${{ steps.release.outputs.is_prerelease }} + is_devrelease: ${{ steps.release.outputs.is_devrelease }} + version: ${{ steps.release.outputs.version }} + major_minor_version: ${{ steps.release.outputs.major_minor_version }} + latest_tag: ${{ steps.release.outputs.latest_tag }} + steps: + - name: "Check out repository code" + uses: "actions/checkout@v4" + with: + submodules: true + + - name: "Set up Python" + uses: "actions/setup-python@v5" + with: + python-version: "3.12" + + - name: "Install Poetry" + uses: "snok/install-poetry@v1" + with: + virtualenvs-create: true + virtualenvs-in-project: true + installer-parallel: true + + - name: "Setup Python environment" + run: | + poetry config virtualenvs.create true --local + poetry env use 3.12 + - name: "Install dependencies" + run: "poetry install --no-interaction --no-ansi" + + - name: "Check prerelease type" + id: release + run: | + echo is_prerelease=$(poetry run python -c "from packaging.version import Version; print(int(Version('$(poetry version -s)').is_prerelease))") >> "$GITHUB_OUTPUT" + echo is_devrelease=$(poetry run python -c "from packaging.version import Version; print(int(Version('$(poetry version -s)').is_devrelease))") >> "$GITHUB_OUTPUT" + echo "version=$(poetry version -s)" >> "$GITHUB_OUTPUT" + echo major_minor_version=$(poetry run python -c "from packaging.version import Version; print(f\"{Version('$(poetry version -s)').major}.{Version('$(poetry version -s)').minor}\")") >> "$GITHUB_OUTPUT" + echo latest_tag=$(curl -L \ + -H "Accept: application/vnd.github+json" \ + -H "Authorization: Bearer ${{ github.token }}" \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + https://api.github.com/repos/${{ github.repository }}/releases/latest \ + | jq -r '.tag_name') >> "$GITHUB_OUTPUT" + + - name: Check tag version + if: github.event.release.tag_name != format('infrahub-v{0}', steps.release.outputs.version) + run: | + echo "Tag version does not match python project version" + exit 1 + + - name: Check prerelease and project version + if: github.event.release.prerelease == true && steps.release.outputs.is_prerelease == 0 && steps.release.outputs.is_devrelease == 0 + run: | + echo "Cannot pre-release a non pre-release or non dev-release version (${{ steps.release.outputs.version }})" + exit 1 + + - name: Check release and project version + if: github.event.release.prerelease == false && (steps.release.outputs.is_prerelease == 1 || steps.release.outputs.is_devrelease == 1) + run: | + echo "Cannot release a pre-release or dev-release version (${{ steps.release.outputs.version }})" + exit 1 + + publish-pypi: + needs: check_release + uses: ./.github/workflows/publish-pypi.yml + secrets: inherit + with: + publish: true + + update-submodule: + needs: check_release + uses: ./.github/workflows/update-submodule.yml + secrets: inherit + with: + version: ${{ github.ref_name }} diff --git a/.github/workflows/update-submodule.yml b/.github/workflows/update-submodule.yml new file mode 100644 index 0000000..c6971de --- /dev/null +++ b/.github/workflows/update-submodule.yml @@ -0,0 +1,45 @@ +--- +# yamllint disable rule:truthy +name: Trigger Submodule update + +on: + workflow_dispatch: + inputs: + runs-on: + description: "The OS to run the job on" + required: false + default: "ubuntu-22.04" + type: string + version: + type: string + required: false + description: The string to extract semver from. + default: '' + workflow_call: + inputs: + runs-on: + description: "The OS to run the job on" + required: false + default: "ubuntu-22.04" + type: string + version: + type: string + required: false + description: The string to extract semver from. + default: '' + +jobs: + trigger-submodule: + runs-on: ubuntu-22.04 + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Trigger submodule update + run: | + echo "${{ inputs.version }}" + curl -X POST \ + -H "Authorization: token ${{ secrets.GH_UPDATE_PACKAGE_OTTO }}" \ + -H "Accept: application/vnd.github.v3+json" \ + https://api.github.com/repos/opsmill/infrahub/dispatches \ + -d "{\"event_type\":\"trigger-submodule-update\", \"client_payload\": {\"version\": \"${{ inputs.version }}\"}}" diff --git a/.yamllint.yml b/.yamllint.yml index ab83454..aab018d 100644 --- a/.yamllint.yml +++ b/.yamllint.yml @@ -14,6 +14,6 @@ rules: # See https://github.com/prettier/prettier/pull/10926 or https://github.com/redhat-developer/vscode-yaml/issues/433 min-spaces-from-content: 1 line-length: - max: 120 + max: 140 allow-non-breakable-words: true allow-non-breakable-inline-mappings: false diff --git a/CHANGELOG.md b/CHANGELOG.md index c08569b..c2f8fe0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,18 +11,23 @@ This project uses [*towncrier*](https://towncrier.readthedocs.io/) and the chang -## [1.1.0](https://github.com/opsmill/infrahub-sdk-python/tree/v1.10.0) - 2024-11-28 +## [1.2.0](https://github.com/opsmill/infrahub-sdk-python/tree/v1.2.0) - 2024-12-19 ### Added -- Added InfrahubClient.schema.wait_until_converged() which allowes you to wait until the schema has converged across all Infrahub workers before proceeding with an operation. The InfrahubClient.schema.load() method has also been updated with a new parameter "wait_until_converged". +- Add batch feature, that use threading, to sync client ([#168](https://github.com/opsmill/infrahub-sdk-python/issues/168)) +- Added InfrahubClient.schema.in_sync method to indicate if a specific branch is in sync across all worker types +- Added Python 3.13 to the list of supported versions ### Fixed -- CTL: `schema load` return a proper error message when authentication is missing or when the user doesn't have the permission to update the schema. ([#127](https://github.com/opsmill/infrahub-sdk-python/issues/127)) -- CTL: List available transforms and generators if no name is provided ([#140](https://github.com/opsmill/infrahub-sdk-python/issues/140)) +- Fix an issue with with `infrahubctl menu load` that would fail while loading the menu -## [1.1.0rc0](https://github.com/opsmill/infrahub-sdk-python/tree/v1.1.0rc0) - 2024-11-26 +## [1.1.0](https://github.com/opsmill/infrahub-sdk-python/tree/v1.10.0) - 2024-11-28 + +### Added + +- Added InfrahubClient.schema.wait_until_converged() which allowes you to wait until the schema has converged across all Infrahub workers before proceeding with an operation. The InfrahubClient.schema.load() method has also been updated with a new parameter "wait_until_converged". ### Fixed diff --git a/infrahub_sdk/batch.py b/infrahub_sdk/batch.py index fa21958..43304a1 100644 --- a/infrahub_sdk/batch.py +++ b/infrahub_sdk/batch.py @@ -1,9 +1,10 @@ import asyncio from collections.abc import AsyncGenerator, Awaitable +from concurrent.futures import ThreadPoolExecutor from dataclasses import dataclass -from typing import Any, Callable, Optional +from typing import Any, Callable, Generator, Optional -from .node import InfrahubNode +from .node import InfrahubNode, InfrahubNodeSync @dataclass @@ -14,13 +15,32 @@ class BatchTask: node: Optional[Any] = None +@dataclass +class BatchTaskSync: + task: Callable[..., Any] + args: tuple[Any, ...] + kwargs: dict[str, Any] + node: Optional[InfrahubNodeSync] = None + + def execute(self, return_exceptions: bool = False) -> tuple[Optional[InfrahubNodeSync], Any]: + """Executes the stored task.""" + result = None + try: + result = self.task(*self.args, **self.kwargs) + except Exception as exc: # pylint: disable=broad-exception-caught + if return_exceptions: + return self.node, exc + raise exc + + return self.node, result + + async def execute_batch_task_in_pool( task: BatchTask, semaphore: asyncio.Semaphore, return_exceptions: bool = False ) -> tuple[Optional[InfrahubNode], Any]: async with semaphore: try: result = await task.task(*task.args, **task.kwargs) - except Exception as exc: # pylint: disable=broad-exception-caught if return_exceptions: return (task.node, exc) @@ -64,3 +84,26 @@ async def execute(self) -> AsyncGenerator: if isinstance(result, Exception) and not self.return_exceptions: raise result yield node, result + + +class InfrahubBatchSync: + def __init__(self, max_concurrent_execution: int = 5, return_exceptions: bool = False): + self._tasks: list[BatchTaskSync] = [] + self.max_concurrent_execution = max_concurrent_execution + self.return_exceptions = return_exceptions + + @property + def num_tasks(self) -> int: + return len(self._tasks) + + def add(self, *args: Any, task: Callable[..., Any], node: Optional[Any] = None, **kwargs: Any) -> None: + self._tasks.append(BatchTaskSync(task=task, node=node, args=args, kwargs=kwargs)) + + def execute(self) -> Generator[tuple[Optional[InfrahubNodeSync], Any], None, None]: + with ThreadPoolExecutor(max_workers=self.max_concurrent_execution) as executor: + futures = [executor.submit(task.execute, return_exceptions=self.return_exceptions) for task in self._tasks] + for future in futures: + node, result = future.result() + if isinstance(result, Exception) and not self.return_exceptions: + raise result + yield node, result diff --git a/infrahub_sdk/client.py b/infrahub_sdk/client.py index cdf7c08..00bdd19 100644 --- a/infrahub_sdk/client.py +++ b/infrahub_sdk/client.py @@ -23,7 +23,7 @@ import ujson from typing_extensions import Self -from .batch import InfrahubBatch +from .batch import InfrahubBatch, InfrahubBatchSync from .branch import ( BranchData, InfrahubBranchManager, @@ -1454,9 +1454,6 @@ def delete(self, kind: Union[str, type[SchemaTypeSync]], id: str, branch: Option node = InfrahubNodeSync(client=self, schema=schema, branch=branch, data={"id": id}) node.delete() - def create_batch(self, return_exceptions: bool = False) -> InfrahubBatch: - raise NotImplementedError("This method hasn't been implemented in the sync client yet.") - def clone(self) -> InfrahubClientSync: """Return a cloned version of the client using the same configuration""" return InfrahubClientSync(config=self.config) @@ -1955,6 +1952,16 @@ def get( return results[0] + def create_batch(self, return_exceptions: bool = False) -> InfrahubBatchSync: + """Create a batch to execute multiple queries concurrently. + + Executing the batch will be performed using a thread pool, meaning it cannot guarantee the execution order. It is not recommended to use such + batch to manipulate objects that depend on each others. + """ + return InfrahubBatchSync( + max_concurrent_execution=self.max_concurrent_execution, return_exceptions=return_exceptions + ) + def get_list_repositories( self, branches: Optional[dict[str, BranchData]] = None, kind: str = "CoreGenericRepository" ) -> dict[str, RepositoryData]: diff --git a/infrahub_sdk/ctl/repository.py b/infrahub_sdk/ctl/repository.py index 8854e53..6f69f9a 100644 --- a/infrahub_sdk/ctl/repository.py +++ b/infrahub_sdk/ctl/repository.py @@ -1,12 +1,14 @@ from pathlib import Path +from typing import Optional import typer import yaml from pydantic import ValidationError from rich.console import Console +from infrahub_sdk.ctl.client import initialize_client + from ..async_typer import AsyncTyper -from ..ctl.client import initialize_client from ..ctl.exceptions import FileNotValidError from ..ctl.utils import init_logging from ..graphql import Mutation @@ -65,7 +67,7 @@ async def add( name: str, location: str, description: str = "", - username: str = "", + username: Optional[str] = None, password: str = "", commit: str = "", read_only: bool = False, @@ -88,10 +90,9 @@ async def add( client = initialize_client() - if username: - credential = await client.create(kind="CorePasswordCredential", name=name, username=username, password=password) - await credential.save() - input_data["data"]["credential"] = {"id": credential.id} + credential = await client.create(kind="CorePasswordCredential", name=name, username=username, password=password) + await credential.save(allow_upsert=True) + input_data["data"]["credential"] = {"id": credential.id} query = Mutation( mutation="CoreReadOnlyRepositoryCreate" if read_only else "CoreRepositoryCreate", diff --git a/infrahub_sdk/ctl/schema.py b/infrahub_sdk/ctl/schema.py index 697db62..3e5d8ce 100644 --- a/infrahub_sdk/ctl/schema.py +++ b/infrahub_sdk/ctl/schema.py @@ -129,7 +129,7 @@ async def load( for schema_file in schemas_data: console.print(f"[green] schema '{schema_file.location}' loaded successfully") else: - console.print("[green] The schema in Infrahub was is already up to date, no changes were required") + console.print("[green] The schema in Infrahub was already up to date, no changes were required") console.print(f"[green] {len(schemas_data)} {schema_definition} processed in {loading_time:.3f} seconds.") diff --git a/infrahub_sdk/schema.py b/infrahub_sdk/schema.py index af7dd2f..f04444e 100644 --- a/infrahub_sdk/schema.py +++ b/infrahub_sdk/schema.py @@ -282,6 +282,12 @@ class RelationshipCardinality(str, Enum): MANY = "many" +class RelationshipDirection(str, Enum): + BIDIR = "bidirectional" + OUTBOUND = "outbound" + INBOUND = "inbound" + + class BranchSupportType(str, Enum): AWARE = "aware" AGNOSTIC = "agnostic" @@ -339,6 +345,7 @@ class RelationshipSchema(BaseModel): state: SchemaState = SchemaState.PRESENT name: str peer: str + direction: RelationshipDirection = RelationshipDirection.BIDIR kind: RelationshipKind = RelationshipKind.GENERIC label: Optional[str] = None description: Optional[str] = None @@ -413,6 +420,21 @@ def get_relationship_by_identifier(self, id: str, raise_on_error: bool = True) - raise ValueError(f"Unable to find the relationship {id}") + def get_matching_relationship( + self, id: str, direction: RelationshipDirection = RelationshipDirection.BIDIR + ) -> RelationshipSchema: + valid_direction = RelationshipDirection.BIDIR + if direction == RelationshipDirection.INBOUND: + valid_direction = RelationshipDirection.OUTBOUND + elif direction == RelationshipDirection.OUTBOUND: + valid_direction = RelationshipDirection.INBOUND + + for item in self.relationships: + if item.identifier == id and item.direction == valid_direction: + return item + + raise ValueError(f"Unable to find the relationship {id} / ({valid_direction.value})") + @property def attribute_names(self) -> list[str]: return [item.name for item in self.attributes] @@ -637,8 +659,7 @@ async def wait_until_converged(self, branch: Optional[str] = None) -> None: """Wait until the schema has converged on the selected branch or the timeout has been reached""" waited = 0 while True: - status = await self.client.execute_graphql(query=SCHEMA_HASH_SYNC_STATUS, branch_name=branch) - if status["InfrahubStatus"]["summary"]["schema_hash_synced"]: + if await self.in_sync(branch=branch): self.client.log.info(f"Schema successfully converged after {waited} seconds") return @@ -649,6 +670,11 @@ async def wait_until_converged(self, branch: Optional[str] = None) -> None: waited += 1 await asyncio.sleep(delay=1) + async def in_sync(self, branch: Optional[str] = None) -> bool: + """Indicate if the schema is in sync across all workers for the provided branch""" + response = await self.client.execute_graphql(query=SCHEMA_HASH_SYNC_STATUS, branch_name=branch) + return response["InfrahubStatus"]["summary"]["schema_hash_synced"] + async def check(self, schemas: list[dict], branch: Optional[str] = None) -> tuple[bool, Optional[dict]]: branch = branch or self.client.default_branch url = f"{self.client.address}/api/schema/check?branch={branch}" @@ -1041,8 +1067,7 @@ def wait_until_converged(self, branch: Optional[str] = None) -> None: """Wait until the schema has converged on the selected branch or the timeout has been reached""" waited = 0 while True: - status = self.client.execute_graphql(query=SCHEMA_HASH_SYNC_STATUS, branch_name=branch) - if status["InfrahubStatus"]["summary"]["schema_hash_synced"]: + if self.in_sync(branch=branch): self.client.log.info(f"Schema successfully converged after {waited} seconds") return @@ -1053,6 +1078,11 @@ def wait_until_converged(self, branch: Optional[str] = None) -> None: waited += 1 sleep(1) + def in_sync(self, branch: Optional[str] = None) -> bool: + """Indicate if the schema is in sync across all workers for the provided branch""" + response = self.client.execute_graphql(query=SCHEMA_HASH_SYNC_STATUS, branch_name=branch) + return response["InfrahubStatus"]["summary"]["schema_hash_synced"] + def check(self, schemas: list[dict], branch: Optional[str] = None) -> tuple[bool, Optional[dict]]: branch = branch or self.client.default_branch url = f"{self.client.address}/api/schema/check?branch={branch}" diff --git a/infrahub_sdk/spec/object.py b/infrahub_sdk/spec/object.py index b9efb20..24278f4 100644 --- a/infrahub_sdk/spec/object.py +++ b/infrahub_sdk/spec/object.py @@ -78,7 +78,7 @@ async def create_node( if rel_schema.identifier is None: raise ValueError("identifier must be defined") - peer_rel = peer_schema.get_relationship_by_identifier(id=rel_schema.identifier) + peer_rel = peer_schema.get_matching_relationship(id=rel_schema.identifier, direction=rel_schema.direction) rel_data = data[rel]["data"] context = {} diff --git a/poetry.lock b/poetry.lock index 6a639d5..3481ce5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1169,51 +1169,57 @@ tests = ["pytest"] [[package]] name = "pyarrow" -version = "14.0.2" +version = "18.1.0" description = "Python library for Apache Arrow" optional = true -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "pyarrow-14.0.2-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:ba9fe808596c5dbd08b3aeffe901e5f81095baaa28e7d5118e01354c64f22807"}, - {file = "pyarrow-14.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:22a768987a16bb46220cef490c56c671993fbee8fd0475febac0b3e16b00a10e"}, - {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dbba05e98f247f17e64303eb876f4a80fcd32f73c7e9ad975a83834d81f3fda"}, - {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a898d134d00b1eca04998e9d286e19653f9d0fcb99587310cd10270907452a6b"}, - {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:87e879323f256cb04267bb365add7208f302df942eb943c93a9dfeb8f44840b1"}, - {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:76fc257559404ea5f1306ea9a3ff0541bf996ff3f7b9209fc517b5e83811fa8e"}, - {file = "pyarrow-14.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0c4a18e00f3a32398a7f31da47fefcd7a927545b396e1f15d0c85c2f2c778cd"}, - {file = "pyarrow-14.0.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:87482af32e5a0c0cce2d12eb3c039dd1d853bd905b04f3f953f147c7a196915b"}, - {file = "pyarrow-14.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:059bd8f12a70519e46cd64e1ba40e97eae55e0cbe1695edd95384653d7626b23"}, - {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f16111f9ab27e60b391c5f6d197510e3ad6654e73857b4e394861fc79c37200"}, - {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06ff1264fe4448e8d02073f5ce45a9f934c0f3db0a04460d0b01ff28befc3696"}, - {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:6dd4f4b472ccf4042f1eab77e6c8bce574543f54d2135c7e396f413046397d5a"}, - {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:32356bfb58b36059773f49e4e214996888eeea3a08893e7dbde44753799b2a02"}, - {file = "pyarrow-14.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:52809ee69d4dbf2241c0e4366d949ba035cbcf48409bf404f071f624ed313a2b"}, - {file = "pyarrow-14.0.2-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:c87824a5ac52be210d32906c715f4ed7053d0180c1060ae3ff9b7e560f53f944"}, - {file = "pyarrow-14.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a25eb2421a58e861f6ca91f43339d215476f4fe159eca603c55950c14f378cc5"}, - {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c1da70d668af5620b8ba0a23f229030a4cd6c5f24a616a146f30d2386fec422"}, - {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2cc61593c8e66194c7cdfae594503e91b926a228fba40b5cf25cc593563bcd07"}, - {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:78ea56f62fb7c0ae8ecb9afdd7893e3a7dbeb0b04106f5c08dbb23f9c0157591"}, - {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:37c233ddbce0c67a76c0985612fef27c0c92aef9413cf5aa56952f359fcb7379"}, - {file = "pyarrow-14.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:e4b123ad0f6add92de898214d404e488167b87b5dd86e9a434126bc2b7a5578d"}, - {file = "pyarrow-14.0.2-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:e354fba8490de258be7687f341bc04aba181fc8aa1f71e4584f9890d9cb2dec2"}, - {file = "pyarrow-14.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:20e003a23a13da963f43e2b432483fdd8c38dc8882cd145f09f21792e1cf22a1"}, - {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc0de7575e841f1595ac07e5bc631084fd06ca8b03c0f2ecece733d23cd5102a"}, - {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66e986dc859712acb0bd45601229021f3ffcdfc49044b64c6d071aaf4fa49e98"}, - {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:f7d029f20ef56673a9730766023459ece397a05001f4e4d13805111d7c2108c0"}, - {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:209bac546942b0d8edc8debda248364f7f668e4aad4741bae58e67d40e5fcf75"}, - {file = "pyarrow-14.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:1e6987c5274fb87d66bb36816afb6f65707546b3c45c44c28e3c4133c010a881"}, - {file = "pyarrow-14.0.2-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:a01d0052d2a294a5f56cc1862933014e696aa08cc7b620e8c0cce5a5d362e976"}, - {file = "pyarrow-14.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a51fee3a7db4d37f8cda3ea96f32530620d43b0489d169b285d774da48ca9785"}, - {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64df2bf1ef2ef14cee531e2dfe03dd924017650ffaa6f9513d7a1bb291e59c15"}, - {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c0fa3bfdb0305ffe09810f9d3e2e50a2787e3a07063001dcd7adae0cee3601a"}, - {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c65bf4fd06584f058420238bc47a316e80dda01ec0dfb3044594128a6c2db794"}, - {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:63ac901baec9369d6aae1cbe6cca11178fb018a8d45068aaf5bb54f94804a866"}, - {file = "pyarrow-14.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:75ee0efe7a87a687ae303d63037d08a48ef9ea0127064df18267252cfe2e9541"}, - {file = "pyarrow-14.0.2.tar.gz", hash = "sha256:36cef6ba12b499d864d1def3e990f97949e0b79400d08b7cf74504ffbd3eb025"}, + {file = "pyarrow-18.1.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:e21488d5cfd3d8b500b3238a6c4b075efabc18f0f6d80b29239737ebd69caa6c"}, + {file = "pyarrow-18.1.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:b516dad76f258a702f7ca0250885fc93d1fa5ac13ad51258e39d402bd9e2e1e4"}, + {file = "pyarrow-18.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f443122c8e31f4c9199cb23dca29ab9427cef990f283f80fe15b8e124bcc49b"}, + {file = "pyarrow-18.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0a03da7f2758645d17b7b4f83c8bffeae5bbb7f974523fe901f36288d2eab71"}, + {file = "pyarrow-18.1.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ba17845efe3aa358ec266cf9cc2800fa73038211fb27968bfa88acd09261a470"}, + {file = "pyarrow-18.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:3c35813c11a059056a22a3bef520461310f2f7eea5c8a11ef9de7062a23f8d56"}, + {file = "pyarrow-18.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9736ba3c85129d72aefa21b4f3bd715bc4190fe4426715abfff90481e7d00812"}, + {file = "pyarrow-18.1.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:eaeabf638408de2772ce3d7793b2668d4bb93807deed1725413b70e3156a7854"}, + {file = "pyarrow-18.1.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:3b2e2239339c538f3464308fd345113f886ad031ef8266c6f004d49769bb074c"}, + {file = "pyarrow-18.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f39a2e0ed32a0970e4e46c262753417a60c43a3246972cfc2d3eb85aedd01b21"}, + {file = "pyarrow-18.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31e9417ba9c42627574bdbfeada7217ad8a4cbbe45b9d6bdd4b62abbca4c6f6"}, + {file = "pyarrow-18.1.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:01c034b576ce0eef554f7c3d8c341714954be9b3f5d5bc7117006b85fcf302fe"}, + {file = "pyarrow-18.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:f266a2c0fc31995a06ebd30bcfdb7f615d7278035ec5b1cd71c48d56daaf30b0"}, + {file = "pyarrow-18.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:d4f13eee18433f99adefaeb7e01d83b59f73360c231d4782d9ddfaf1c3fbde0a"}, + {file = "pyarrow-18.1.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:9f3a76670b263dc41d0ae877f09124ab96ce10e4e48f3e3e4257273cee61ad0d"}, + {file = "pyarrow-18.1.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:da31fbca07c435be88a0c321402c4e31a2ba61593ec7473630769de8346b54ee"}, + {file = "pyarrow-18.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:543ad8459bc438efc46d29a759e1079436290bd583141384c6f7a1068ed6f992"}, + {file = "pyarrow-18.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0743e503c55be0fdb5c08e7d44853da27f19dc854531c0570f9f394ec9671d54"}, + {file = "pyarrow-18.1.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:d4b3d2a34780645bed6414e22dda55a92e0fcd1b8a637fba86800ad737057e33"}, + {file = "pyarrow-18.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:c52f81aa6f6575058d8e2c782bf79d4f9fdc89887f16825ec3a66607a5dd8e30"}, + {file = "pyarrow-18.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:0ad4892617e1a6c7a551cfc827e072a633eaff758fa09f21c4ee548c30bcaf99"}, + {file = "pyarrow-18.1.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:84e314d22231357d473eabec709d0ba285fa706a72377f9cc8e1cb3c8013813b"}, + {file = "pyarrow-18.1.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:f591704ac05dfd0477bb8f8e0bd4b5dc52c1cadf50503858dce3a15db6e46ff2"}, + {file = "pyarrow-18.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:acb7564204d3c40babf93a05624fc6a8ec1ab1def295c363afc40b0c9e66c191"}, + {file = "pyarrow-18.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74de649d1d2ccb778f7c3afff6085bd5092aed4c23df9feeb45dd6b16f3811aa"}, + {file = "pyarrow-18.1.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:f96bd502cb11abb08efea6dab09c003305161cb6c9eafd432e35e76e7fa9b90c"}, + {file = "pyarrow-18.1.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:36ac22d7782554754a3b50201b607d553a8d71b78cdf03b33c1125be4b52397c"}, + {file = "pyarrow-18.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:25dbacab8c5952df0ca6ca0af28f50d45bd31c1ff6fcf79e2d120b4a65ee7181"}, + {file = "pyarrow-18.1.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:6a276190309aba7bc9d5bd2933230458b3521a4317acfefe69a354f2fe59f2bc"}, + {file = "pyarrow-18.1.0-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:ad514dbfcffe30124ce655d72771ae070f30bf850b48bc4d9d3b25993ee0e386"}, + {file = "pyarrow-18.1.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aebc13a11ed3032d8dd6e7171eb6e86d40d67a5639d96c35142bd568b9299324"}, + {file = "pyarrow-18.1.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6cf5c05f3cee251d80e98726b5c7cc9f21bab9e9783673bac58e6dfab57ecc8"}, + {file = "pyarrow-18.1.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:11b676cd410cf162d3f6a70b43fb9e1e40affbc542a1e9ed3681895f2962d3d9"}, + {file = "pyarrow-18.1.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:b76130d835261b38f14fc41fdfb39ad8d672afb84c447126b84d5472244cfaba"}, + {file = "pyarrow-18.1.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:0b331e477e40f07238adc7ba7469c36b908f07c89b95dd4bd3a0ec84a3d1e21e"}, + {file = "pyarrow-18.1.0-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:2c4dd0c9010a25ba03e198fe743b1cc03cd33c08190afff371749c52ccbbaf76"}, + {file = "pyarrow-18.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f97b31b4c4e21ff58c6f330235ff893cc81e23da081b1a4b1c982075e0ed4e9"}, + {file = "pyarrow-18.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a4813cb8ecf1809871fd2d64a8eff740a1bd3691bbe55f01a3cf6c5ec869754"}, + {file = "pyarrow-18.1.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:05a5636ec3eb5cc2a36c6edb534a38ef57b2ab127292a716d00eabb887835f1e"}, + {file = "pyarrow-18.1.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:73eeed32e724ea3568bb06161cad5fa7751e45bc2228e33dcb10c614044165c7"}, + {file = "pyarrow-18.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:a1880dd6772b685e803011a6b43a230c23b566859a6e0c9a276c1e0faf4f4052"}, + {file = "pyarrow-18.1.0.tar.gz", hash = "sha256:9386d3ca9c145b5539a1cfc75df07757dff870168c959b473a0bccbc3abc8c73"}, ] -[package.dependencies] -numpy = ">=1.16.6" +[package.extras] +test = ["cffi", "hypothesis", "pandas", "pytest", "pytz"] [[package]] name = "pydantic" @@ -2065,4 +2071,4 @@ tests = ["Jinja2", "pytest", "pyyaml", "rich"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "780e999965fa84c44b146fa9b5fe656b1c7919142dbf9e05b3996862ce6e256c" +content-hash = "bd7225275eeae26e41660131b9802cc569e73586b16029bced8c7633725b2c95" diff --git a/pyproject.toml b/pyproject.toml index 6c32f59..ef4bdb4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,11 +1,11 @@ [project] name = "infrahub-sdk" -version = "1.1.0" +version = "1.2.0-dev0" requires-python = ">=3.9" [tool.poetry] name = "infrahub-sdk" -version = "1.1.0" +version = "1.2.0" description = "Python Client to interact with Infrahub" authors = ["OpsMill "] readme = "README.md" @@ -21,6 +21,7 @@ classifiers = [ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", ] [tool.poetry.dependencies] @@ -43,7 +44,7 @@ numpy = [ { version = "^1.24.2", optional = true, python = ">=3.9,<3.12" }, { version = "^1.26.2", optional = true, python = ">=3.12" }, ] -pyarrow = { version = "^14", optional = true } +pyarrow = { version = ">=14", optional = true } rich = { version = "^13", optional = true } toml = { version = "^0.10", optional = true } typer = { version = "^0.12.3", optional = true } diff --git a/tests/unit/ctl/test_repository_app.py b/tests/unit/ctl/test_repository_app.py new file mode 100644 index 0000000..fd558dc --- /dev/null +++ b/tests/unit/ctl/test_repository_app.py @@ -0,0 +1,273 @@ +"""Integration tests for infrahubctl commands.""" + +from unittest import mock + +import pytest +from typer.testing import CliRunner + +from infrahub_sdk.client import InfrahubClient +from infrahub_sdk.ctl.cli_commands import app + +runner = CliRunner() + + +@pytest.fixture +def mock_client() -> mock.Mock: + """Fixture for a mocked InfrahubClient.""" + client = mock.create_autospec(InfrahubClient) + return client + + +# --------------------------------------------------------- +# infrahubctl repository command tests +# --------------------------------------------------------- +@mock.patch("infrahub_sdk.ctl.repository.initialize_client") +class TestInfrahubctlRepository: + """Groups the 'infrahubctl repository' test cases.""" + + def test_repo_no_username(self, mock_init_client, mock_client) -> None: + """Case allow no username to be passed in and set it as None rather than blank string that fails.""" + mock_cred = mock.AsyncMock() + mock_cred.id = "1234" + mock_client.create.return_value = mock_cred + + mock_init_client.return_value = mock_client + output = runner.invoke( + app, + [ + "repository", + "add", + "Gitlab", + "https://gitlab.com/FragmentedPacket/nautobot-plugin-ansible-filters.git", + "--password", + "mySup3rSecureP@ssw0rd", + ], + ) + assert output.exit_code == 0 + mock_client.create.assert_called_once() + mock_client.create.assert_called_with( + name="Gitlab", + kind="CorePasswordCredential", + password="mySup3rSecureP@ssw0rd", + username=None, + ) + mock_cred.save.assert_called_once() + mock_cred.save.assert_called_with(allow_upsert=True) + mock_client.execute_graphql.assert_called_once() + mock_client.execute_graphql.assert_called_with( + query=""" +mutation { + CoreRepositoryCreate( + data: { + name: { + value: "Gitlab" + } + location: { + value: "https://gitlab.com/FragmentedPacket/nautobot-plugin-ansible-filters.git" + } + description: { + value: "" + } + commit: { + value: "" + } + credential: { + id: "1234" + } + } + ){ + ok + } +} +""", + branch_name="main", + tracker="mutation-repository-create", + ) + + def test_repo_username(self, mock_init_client, mock_client) -> None: + """Case allow no username to be passed in and set it as None rather than blank string that fails.""" + mock_cred = mock.AsyncMock() + mock_cred.id = "1234" + mock_client.create.return_value = mock_cred + + mock_init_client.return_value = mock_client + output = runner.invoke( + app, + [ + "repository", + "add", + "Gitlab", + "https://gitlab.com/FragmentedPacket/nautobot-plugin-ansible-filters.git", + "--password", + "mySup3rSecureP@ssw0rd", + "--username", + "opsmill", + ], + ) + assert output.exit_code == 0 + mock_client.create.assert_called_once() + mock_client.create.assert_called_with( + name="Gitlab", + kind="CorePasswordCredential", + password="mySup3rSecureP@ssw0rd", + username="opsmill", + ) + mock_cred.save.assert_called_once() + mock_cred.save.assert_called_with(allow_upsert=True) + mock_client.execute_graphql.assert_called_once() + mock_client.execute_graphql.assert_called_with( + query=""" +mutation { + CoreRepositoryCreate( + data: { + name: { + value: "Gitlab" + } + location: { + value: "https://gitlab.com/FragmentedPacket/nautobot-plugin-ansible-filters.git" + } + description: { + value: "" + } + commit: { + value: "" + } + credential: { + id: "1234" + } + } + ){ + ok + } +} +""", + branch_name="main", + tracker="mutation-repository-create", + ) + + def test_repo_readonly_true(self, mock_init_client, mock_client) -> None: + """Case allow no username to be passed in and set it as None rather than blank string that fails.""" + mock_cred = mock.AsyncMock() + mock_cred.id = "1234" + mock_client.create.return_value = mock_cred + + mock_init_client.return_value = mock_client + output = runner.invoke( + app, + [ + "repository", + "add", + "Gitlab", + "https://gitlab.com/FragmentedPacket/nautobot-plugin-ansible-filters.git", + "--password", + "mySup3rSecureP@ssw0rd", + "--read-only", + ], + ) + assert output.exit_code == 0 + mock_client.create.assert_called_once() + mock_client.create.assert_called_with( + name="Gitlab", + kind="CorePasswordCredential", + password="mySup3rSecureP@ssw0rd", + username=None, + ) + mock_cred.save.assert_called_once() + mock_cred.save.assert_called_with(allow_upsert=True) + mock_client.execute_graphql.assert_called_once() + mock_client.execute_graphql.assert_called_with( + query=""" +mutation { + CoreReadOnlyRepositoryCreate( + data: { + name: { + value: "Gitlab" + } + location: { + value: "https://gitlab.com/FragmentedPacket/nautobot-plugin-ansible-filters.git" + } + description: { + value: "" + } + commit: { + value: "" + } + credential: { + id: "1234" + } + } + ){ + ok + } +} +""", + branch_name="main", + tracker="mutation-repository-create", + ) + + def test_repo_description_commit_branch(self, mock_init_client, mock_client) -> None: + """Case allow no username to be passed in and set it as None rather than blank string that fails.""" + mock_cred = mock.AsyncMock() + mock_cred.id = "1234" + mock_client.create.return_value = mock_cred + + mock_init_client.return_value = mock_client + output = runner.invoke( + app, + [ + "repository", + "add", + "Gitlab", + "https://gitlab.com/FragmentedPacket/nautobot-plugin-ansible-filters.git", + "--password", + "mySup3rSecureP@ssw0rd", + "--username", + "opsmill", + "--description", + "This is a test description", + "--commit", + "myHashCommit", + "--branch", + "develop", + ], + ) + assert output.exit_code == 0 + mock_client.create.assert_called_once() + mock_client.create.assert_called_with( + name="Gitlab", + kind="CorePasswordCredential", + password="mySup3rSecureP@ssw0rd", + username="opsmill", + ) + mock_cred.save.assert_called_once() + mock_cred.save.assert_called_with(allow_upsert=True) + mock_client.execute_graphql.assert_called_once() + mock_client.execute_graphql.assert_called_with( + query=""" +mutation { + CoreRepositoryCreate( + data: { + name: { + value: "Gitlab" + } + location: { + value: "https://gitlab.com/FragmentedPacket/nautobot-plugin-ansible-filters.git" + } + description: { + value: "This is a test description" + } + commit: { + value: "myHashCommit" + } + credential: { + id: "1234" + } + } + ){ + ok + } +} +""", + branch_name="develop", + tracker="mutation-repository-create", + ) diff --git a/tests/unit/sdk/conftest.py b/tests/unit/sdk/conftest.py index 7a8f568..c3473b2 100644 --- a/tests/unit/sdk/conftest.py +++ b/tests/unit/sdk/conftest.py @@ -55,27 +55,32 @@ async def echo_clients(clients: BothClients) -> AsyncGenerator[BothClients, None @pytest.fixture -def replace_async_return_annotation(): +def return_annotation_map() -> dict[str, str]: + return { + "type[SchemaType]": "type[SchemaTypeSync]", + "SchemaType": "SchemaTypeSync", + "CoreNode": "CoreNodeSync", + "Optional[CoreNode]": "Optional[CoreNodeSync]", + "Union[str, type[SchemaType]]": "Union[str, type[SchemaTypeSync]]", + "Union[InfrahubNode, SchemaType]": "Union[InfrahubNodeSync, SchemaTypeSync]", + "Union[InfrahubNode, SchemaType, None]": "Union[InfrahubNodeSync, SchemaTypeSync, None]", + "Union[list[InfrahubNode], list[SchemaType]]": "Union[list[InfrahubNodeSync], list[SchemaTypeSync]]", + "InfrahubClient": "InfrahubClientSync", + "InfrahubNode": "InfrahubNodeSync", + "list[InfrahubNode]": "list[InfrahubNodeSync]", + "Optional[InfrahubNode]": "Optional[InfrahubNodeSync]", + "Optional[type[SchemaType]]": "Optional[type[SchemaTypeSync]]", + "Optional[Union[CoreNode, SchemaType]]": "Optional[Union[CoreNodeSync, SchemaTypeSync]]", + "InfrahubBatch": "InfrahubBatchSync", + } + + +@pytest.fixture +def replace_async_return_annotation(return_annotation_map: dict[str, str]): """Allows for comparison between sync and async return annotations.""" def replace_annotation(annotation: str) -> str: - replacements = { - "type[SchemaType]": "type[SchemaTypeSync]", - "SchemaType": "SchemaTypeSync", - "CoreNode": "CoreNodeSync", - "Optional[CoreNode]": "Optional[CoreNodeSync]", - "Union[str, type[SchemaType]]": "Union[str, type[SchemaTypeSync]]", - "Union[InfrahubNode, SchemaType]": "Union[InfrahubNodeSync, SchemaTypeSync]", - "Union[InfrahubNode, SchemaType, None]": "Union[InfrahubNodeSync, SchemaTypeSync, None]", - "Union[list[InfrahubNode], list[SchemaType]]": "Union[list[InfrahubNodeSync], list[SchemaTypeSync]]", - "InfrahubClient": "InfrahubClientSync", - "InfrahubNode": "InfrahubNodeSync", - "list[InfrahubNode]": "list[InfrahubNodeSync]", - "Optional[InfrahubNode]": "Optional[InfrahubNodeSync]", - "Optional[type[SchemaType]]": "Optional[type[SchemaTypeSync]]", - "Optional[Union[CoreNode, SchemaType]]": "Optional[Union[CoreNodeSync, SchemaTypeSync]]", - } - return replacements.get(annotation) or annotation + return return_annotation_map.get(annotation) or annotation return replace_annotation @@ -95,26 +100,11 @@ def replace_annotations(parameters: Mapping[str, Parameter]) -> tuple[str, str]: @pytest.fixture -def replace_sync_return_annotation() -> str: +def replace_sync_return_annotation(return_annotation_map: dict[str, str]) -> str: """Allows for comparison between sync and async return annotations.""" def replace_annotation(annotation: str) -> str: - replacements = { - "type[SchemaTypeSync]": "type[SchemaType]", - "SchemaTypeSync": "SchemaType", - "CoreNodeSync": "CoreNode", - "Optional[CoreNodeSync]": "Optional[CoreNode]", - "Union[str, type[SchemaTypeSync]]": "Union[str, type[SchemaType]]", - "Union[InfrahubNodeSync, SchemaTypeSync]": "Union[InfrahubNode, SchemaType]", - "Union[InfrahubNodeSync, SchemaTypeSync, None]": "Union[InfrahubNode, SchemaType, None]", - "Union[list[InfrahubNodeSync], list[SchemaTypeSync]]": "Union[list[InfrahubNode], list[SchemaType]]", - "InfrahubClientSync": "InfrahubClient", - "InfrahubNodeSync": "InfrahubNode", - "list[InfrahubNodeSync]": "list[InfrahubNode]", - "Optional[InfrahubNodeSync]": "Optional[InfrahubNode]", - "Optional[type[SchemaTypeSync]]": "Optional[type[SchemaType]]", - "Optional[Union[CoreNodeSync, SchemaTypeSync]]": "Optional[Union[CoreNode, SchemaType]]", - } + replacements = {v: k for k, v in return_annotation_map.items()} return replacements.get(annotation) or annotation return replace_annotation diff --git a/tests/unit/sdk/test_batch.py b/tests/unit/sdk/test_batch.py index 83beaa5..8b49817 100644 --- a/tests/unit/sdk/test_batch.py +++ b/tests/unit/sdk/test_batch.py @@ -11,48 +11,117 @@ from tests.unit.sdk.conftest import BothClients +client_types = ["standard", "sync"] + +@pytest.mark.parametrize("client_type", client_types) +async def test_batch_execution(clients: BothClients, client_type: str): + r: list[int] = [] + tasks_number = 10 + + if client_type == "standard": + + async def test_func() -> int: + return 1 + + batch = await clients.standard.create_batch() + for _ in range(tasks_number): + batch.add(task=test_func) + + assert batch.num_tasks == tasks_number + async for _, result in batch.execute(): + r.append(result) + else: + + def test_func() -> int: + return 1 + + batch = clients.sync.create_batch() + for _ in range(tasks_number): + batch.add(task=test_func) + + assert batch.num_tasks == tasks_number + for _, result in batch.execute(): + r.append(result) + + assert r == [1] * tasks_number + + +@pytest.mark.parametrize("client_type", client_types) async def test_batch_return_exception( - httpx_mock: HTTPXMock, mock_query_mutation_location_create_failed, mock_schema_query_01, clients: BothClients + httpx_mock: HTTPXMock, + mock_query_mutation_location_create_failed, + mock_schema_query_01, + clients: BothClients, + client_type: str, ): # pylint: disable=unused-argument - batch = await clients.standard.create_batch(return_exceptions=True) - locations = ["JFK1", "JFK1"] - results = [] - for location_name in locations: - data = {"name": {"value": location_name, "is_protected": True}} - obj = await clients.standard.create(kind="BuiltinLocation", data=data) - batch.add(task=obj.save, node=obj) - results.append(obj) - - result_iter = batch.execute() - # Assert first node success - node, result = await result_iter.__anext__() - assert node == results[0] - assert not isinstance(result, Exception) - - # Assert second node failure - node, result = await result_iter.__anext__() - assert node == results[1] - assert isinstance(result, GraphQLError) - assert "An error occurred while executing the GraphQL Query" in str(result) + if client_type == "standard": + batch = await clients.standard.create_batch(return_exceptions=True) + locations = ["JFK1", "JFK1"] + results = [] + for location_name in locations: + data = {"name": {"value": location_name, "is_protected": True}} + obj = await clients.standard.create(kind="BuiltinLocation", data=data) + batch.add(task=obj.save, node=obj) + results.append(obj) + + result_iter = batch.execute() + # Assert first node success + node, result = await result_iter.__anext__() + assert node == results[0] + assert not isinstance(result, Exception) + # Assert second node failure + node, result = await result_iter.__anext__() + assert node == results[1] + assert isinstance(result, GraphQLError) + assert "An error occurred while executing the GraphQL Query" in str(result) + else: + batch = clients.sync.create_batch(return_exceptions=True) + locations = ["JFK1", "JFK1"] + results = [] + for location_name in locations: + data = {"name": {"value": location_name, "is_protected": True}} + obj = clients.sync.create(kind="BuiltinLocation", data=data) + batch.add(task=obj.save, node=obj) + results.append(obj) + results = [r for _, r in batch.execute()] + # Must have one exception and one graphqlerror + assert len(results) == 2 + assert any(isinstance(r, Exception) for r in results) + assert any(isinstance(r, GraphQLError) for r in results) + + +@pytest.mark.parametrize("client_type", client_types) async def test_batch_exception( - httpx_mock: HTTPXMock, mock_query_mutation_location_create_failed, mock_schema_query_01, clients: BothClients + httpx_mock: HTTPXMock, + mock_query_mutation_location_create_failed, + mock_schema_query_01, + clients: BothClients, + client_type: str, ): # pylint: disable=unused-argument - batch = await clients.standard.create_batch(return_exceptions=False) - locations = ["JFK1", "JFK1"] - for location_name in locations: - data = {"name": {"value": location_name, "is_protected": True}} - obj = await clients.standard.create(kind="BuiltinLocation", data=data) - batch.add(task=obj.save, node=obj) - - with pytest.raises(GraphQLError) as exc: - async for _, _ in batch.execute(): - pass - assert "An error occurred while executing the GraphQL Query" in str(exc.value) - - -async def test_batch_not_implemented_sync(clients: BothClients): - with pytest.raises(NotImplementedError): - clients.sync.create_batch() + if client_type == "standard": + batch = await clients.standard.create_batch(return_exceptions=False) + locations = ["JFK1", "JFK1"] + for location_name in locations: + data = {"name": {"value": location_name, "is_protected": True}} + obj = await clients.standard.create(kind="BuiltinLocation", data=data) + batch.add(task=obj.save, node=obj) + + with pytest.raises(GraphQLError) as exc: + async for _, _ in batch.execute(): + pass + assert "An error occurred while executing the GraphQL Query" in str(exc.value) + else: + batch = clients.sync.create_batch(return_exceptions=False) + locations = ["JFK1", "JFK1"] + for location_name in locations: + data = {"name": {"value": location_name, "is_protected": True}} + obj = clients.sync.create(kind="BuiltinLocation", data=data) + batch.add(task=obj.save, node=obj) + + with pytest.raises(GraphQLError) as exc: + for _, _ in batch.execute(): + pass + assert "An error occurred while executing the GraphQL Query" in str(exc.value)