Skip to content

Commit

Permalink
IFC-500 Please ASYNC linting rules (#4970)
Browse files Browse the repository at this point in the history
  • Loading branch information
gmazoyer authored Nov 20, 2024
1 parent b8ab36c commit 3f6a022
Show file tree
Hide file tree
Showing 8 changed files with 111 additions and 128 deletions.
3 changes: 1 addition & 2 deletions backend/infrahub/cli/git_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,8 +150,7 @@ async def start(

log.info("Initialized Git Agent ...")

while not shutdown_event.is_set():
await asyncio.sleep(1)
await shutdown_event.wait()

log.info("Shutdown of Git agent requested")

Expand Down
3 changes: 2 additions & 1 deletion backend/infrahub/graphql/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,8 @@ async def _handle_http_request(
) -> JSONResponse:
if request.app.state.response_delay:
self.logger.info(f"Adding response delay of {request.app.state.response_delay} seconds")
time.sleep(request.app.state.response_delay)
# This is on purpose
time.sleep(request.app.state.response_delay) # noqa: ASYNC251

try:
operations = await _get_operation_from_request(request)
Expand Down
15 changes: 5 additions & 10 deletions backend/infrahub/lock.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from __future__ import annotations

import asyncio
import time
import uuid
from asyncio import Lock as LocalLock
Expand Down Expand Up @@ -127,6 +128,7 @@ def __init__(
self.in_multi: bool = in_multi
self.lock_type: str = "multi" if self.in_multi else "individual"
self.acquire_time: Optional[int] = None
self.event = asyncio.Event()

if not self.connection or (self.use_local is None and name.startswith("local.")):
self.use_local = True
Expand Down Expand Up @@ -156,6 +158,7 @@ async def acquire(self) -> None:
else:
await self.local.acquire()
self.acquire_time = time.time_ns()
self.event.clear()

async def release(self) -> None:
duration_ns = time.time_ns() - self.acquire_time
Expand All @@ -164,6 +167,7 @@ async def release(self) -> None:
await self.remote.release()
else:
self.local.release()
self.event.set()

async def locked(self) -> bool:
if not self.use_local:
Expand Down Expand Up @@ -238,23 +242,14 @@ def initialization(self) -> LocalLock:
return self.get(name=GLOBAL_INIT_LOCK)

async def local_schema_wait(self) -> None:
await self.wait_until_available(name=LOCAL_SCHEMA_LOCK)
await self.get(name=LOCAL_SCHEMA_LOCK).event.wait()

def global_schema_lock(self) -> InfrahubMultiLock:
return InfrahubMultiLock(_registry=self, locks=[LOCAL_SCHEMA_LOCK, GLOBAL_SCHEMA_LOCK])

def global_graph_lock(self) -> InfrahubMultiLock:
return InfrahubMultiLock(_registry=self, locks=[LOCAL_SCHEMA_LOCK, GLOBAL_GRAPH_LOCK, GLOBAL_SCHEMA_LOCK])

async def wait_until_available(self, name: str) -> None:
"""Wait until a given lock is available.
This allow to block functions what shouldnt process during an event
but it's not a blocker if multiple of them happen at the same time.
"""
while await self.get(name=name).locked():
await sleep(0.1)


def initialize_lock(local_only: bool = False, service: Optional[InfrahubServices] = None) -> None:
global registry # pylint: disable=global-statement
Expand Down
12 changes: 5 additions & 7 deletions backend/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,12 +138,12 @@ async def default_ipnamespace(db: InfrahubDatabase, register_core_models_schema)


@pytest.fixture
def local_storage_dir(tmp_path) -> str:
storage_dir = os.path.join(str(tmp_path), "storage")
os.mkdir(storage_dir)
def local_storage_dir(tmp_path: Path) -> Path:
storage_dir = tmp_path / "storage"
storage_dir.mkdir()

config.SETTINGS.storage.driver = config.StorageDriver.FileSystemStorage
config.SETTINGS.storage.local.path_ = storage_dir
config.SETTINGS.storage.local.path_ = str(storage_dir)

return storage_dir

Expand Down Expand Up @@ -719,9 +719,7 @@ class TestHelper:
@staticmethod
def schema_file(file_name: str) -> dict:
"""Return the contents of a schema file as a dictionary"""
file_content = Path(os.path.join(TestHelper.get_fixtures_dir(), f"schemas/{file_name}")).read_text(
encoding="utf-8"
)
file_content = (TestHelper.get_fixtures_dir() / "schemas" / file_name).read_text(encoding="utf-8")

return ujson.loads(file_content)

Expand Down
23 changes: 10 additions & 13 deletions backend/tests/unit/api/test_60_storage.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import hashlib
import os
from pathlib import Path

from fastapi.testclient import TestClient
Expand All @@ -12,16 +11,16 @@ async def test_file_upload(
db: InfrahubDatabase,
client: TestClient,
helper,
local_storage_dir: str,
local_storage_dir: Path,
admin_headers,
default_branch: Branch,
authentication_base,
):
fixture_dir = helper.get_fixtures_dir()

files_dir = os.path.join(fixture_dir, "schemas")
filenames = [item.name for item in os.scandir(files_dir) if item.is_file()]
file_path = Path(os.path.join(files_dir, filenames[0]))
files_dir = fixture_dir / "schemas"
filenames = [item.name for item in files_dir.iterdir() if item.is_file()]
file_path = files_dir / filenames[0]

file_content = file_path.read_bytes()
file_checksum = hashlib.md5(file_content, usedforsecurity=False).hexdigest()
Expand All @@ -34,8 +33,7 @@ async def test_file_upload(
assert data["checksum"] == file_checksum
assert data["identifier"]

file_in_storage = Path(os.path.join(local_storage_dir, data["identifier"]))

file_in_storage: Path = local_storage_dir / data["identifier"]
assert file_in_storage.exists()
assert file_in_storage.read_bytes() == file_content

Expand All @@ -44,16 +42,16 @@ async def test_content_upload(
db: InfrahubDatabase,
client: TestClient,
helper,
local_storage_dir: str,
local_storage_dir: Path,
admin_headers,
default_branch: Branch,
authentication_base,
):
fixture_dir = helper.get_fixtures_dir()
files_dir = os.path.join(fixture_dir, "schemas")
filenames = [item.name for item in os.scandir(files_dir) if item.is_file()]
files_dir = fixture_dir / "schemas"
filenames = [item.name for item in files_dir.iterdir() if item.is_file()]

file_content = Path(os.path.join(files_dir, filenames[0])).read_bytes()
file_content = (files_dir / filenames[0]).read_bytes()
file_checksum = hashlib.md5(file_content, usedforsecurity=False).hexdigest()

with client:
Expand All @@ -64,7 +62,6 @@ async def test_content_upload(
assert data["checksum"] == file_checksum
assert data["identifier"]

file_in_storage = Path(os.path.join(local_storage_dir, data["identifier"]))

file_in_storage: Path = local_storage_dir / data["identifier"]
assert file_in_storage.exists()
assert file_in_storage.read_bytes() == file_content
9 changes: 4 additions & 5 deletions backend/tests/unit/conftest.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import os
import shutil
from itertools import islice
from pathlib import Path
Expand Down Expand Up @@ -135,14 +134,14 @@ def s3_storage_bucket() -> str:


@pytest.fixture
def file1_in_storage(local_storage_dir, helper) -> str:
def file1_in_storage(local_storage_dir: Path, helper) -> str:
fixture_dir = helper.get_fixtures_dir()
file1_identifier = str(UUIDT())

files_dir = os.path.join(fixture_dir, "schemas")
files_dir = fixture_dir / "schemas"

filenames = [item.name for item in os.scandir(files_dir) if item.is_file()]
shutil.copyfile(os.path.join(files_dir, filenames[0]), os.path.join(local_storage_dir, file1_identifier))
filenames = [item.name for item in files_dir.iterdir() if item.is_file()]
shutil.copyfile(files_dir / filenames[0], local_storage_dir / file1_identifier)

return file1_identifier

Expand Down
15 changes: 6 additions & 9 deletions backend/tests/unit/storage/test_local_storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,10 @@
from infrahub.storage import InfrahubObjectStorage


async def test_init_local(helper, local_storage_dir: str, file1_in_storage: str):
async def test_init_local(helper, local_storage_dir: Path, file1_in_storage: str):
storage = await InfrahubObjectStorage.init(settings=config.SETTINGS.storage)
assert isinstance(storage._storage, fastapi_storages.FileSystemStorage)
assert config.SETTINGS.storage.local.path_ == local_storage_dir
assert config.SETTINGS.storage.local.path_ == str(local_storage_dir)


async def test_init_s3(helper, s3_storage_bucket: str):
Expand All @@ -22,22 +22,19 @@ async def test_init_s3(helper, s3_storage_bucket: str):
assert config.SETTINGS.storage.s3.endpoint_url == "storage.googleapis.com"


async def test_retrieve_file(helper, local_storage_dir: str, file1_in_storage: str):
async def test_retrieve_file(helper, local_storage_dir: Path, file1_in_storage: str):
storage = await InfrahubObjectStorage.init(settings=config.SETTINGS.storage)
file1 = storage.retrieve(identifier=file1_in_storage)
assert file1


async def test_retrieve_file_does_not_exist(helper, local_storage_dir: str):
async def test_retrieve_file_does_not_exist(helper, local_storage_dir: Path):
storage = await InfrahubObjectStorage.init(settings=config.SETTINGS.storage)
with pytest.raises(NodeNotFoundError):
storage.retrieve(identifier="doesnotexist")


async def test_store_file(
helper,
local_storage_dir: str,
):
async def test_store_file(helper, local_storage_dir: Path):
storage = await InfrahubObjectStorage.init(settings=config.SETTINGS.storage)

fixture_dir = helper.get_fixtures_dir()
Expand All @@ -48,6 +45,6 @@ async def test_store_file(
identifier = str(UUIDT())
storage.store(identifier=identifier, content=content_file1)

file1 = Path(os.path.join(local_storage_dir, identifier))
file1 = local_storage_dir / identifier
assert file1.exists()
assert file1.read_bytes() == content_file1
Loading

0 comments on commit 3f6a022

Please sign in to comment.