Skip to content

Commit

Permalink
fix(backend): speedup load_schema_from_db by removing pagination
Browse files Browse the repository at this point in the history
Signed-off-by: Fatih Acar <fatih@opsmill.com>
  • Loading branch information
fatih-acar committed Jan 8, 2025
1 parent d77991a commit 4e4229c
Showing 1 changed file with 9 additions and 1 deletion.
10 changes: 9 additions & 1 deletion backend/infrahub/core/schema/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,9 @@

from typing import TYPE_CHECKING, Any, Optional, Union

from infrahub import lock
from opentelemetry import trace

from infrahub import config, lock
from infrahub.core.manager import NodeManager
from infrahub.core.models import (
HashableModelDiff,
Expand Down Expand Up @@ -607,6 +609,7 @@ async def load_schema(
self.set_schema_branch(name=branch.name, schema=branch_schema)
return branch_schema

@trace.get_tracer(__name__).start_as_current_span("load_schema_from_db")
async def load_schema_from_db(
self,
db: InfrahubDatabase,
Expand Down Expand Up @@ -661,6 +664,9 @@ async def load_schema_from_db(
if removed_node in schema.node_names:
schema.delete(name=removed_node)

old_query_size_limit = config.SETTINGS.database.query_size_limit
config.SETTINGS.database.query_size_limit = 100000 # we grab 15% performance due to pagination in SchemaNodes when doing _enrich_node_dicts_with_relationships

if not has_filters or filters["generics"]:
generic_schema = self.get(name="SchemaGeneric", branch=branch)
for schema_node in await self.query(
Expand Down Expand Up @@ -688,6 +694,8 @@ async def load_schema_from_db(
schema=await self.convert_node_schema_to_schema(schema_node=schema_node, db=db),
)

config.SETTINGS.database.query_size_limit = old_query_size_limit

schema.process(validate_schema=validate_schema)

return schema
Expand Down

0 comments on commit 4e4229c

Please sign in to comment.