Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions backend/infrahub/core/migrations/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from .schema.node_attribute_remove import NodeAttributeRemoveMigration
from .schema.node_kind_update import NodeKindUpdateMigration
from .schema.node_remove import NodeRemoveMigration
from .schema.node_uniqueness_constraints_update import NodeUniquenessConstraintsUpdateMigration
from .schema.placeholder_dummy import PlaceholderDummyMigration
from .shared import SchemaMigration

Expand All @@ -17,6 +18,7 @@
"node.name.update": NodeKindUpdateMigration,
"node.namespace.update": NodeKindUpdateMigration,
"node.relationship.remove": PlaceholderDummyMigration,
"node.uniqueness_constraints.update": NodeUniquenessConstraintsUpdateMigration,
"attribute.name.update": AttributeNameUpdateMigration,
"attribute.branch.update": None,
"attribute.kind.update": AttributeKindUpdateMigration,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -124,12 +124,18 @@ async def execute(
all_queries: list[type[AttributeMigrationQuery]] = []

# Check profile support changes
previous_profile_support_condition = self.previous_schema.check_if_attr_supports_profiles(
attribute_schema=self.previous_attribute_schema
)
new_profile_support_condition = self.new_schema.check_if_attr_supports_profiles(
attribute_schema=self.new_attribute_schema
)
if (
isinstance(self.new_schema, (NodeSchema, GenericSchema))
and self.new_schema.generate_profile
and self.previous_attribute_schema.support_profiles != self.new_attribute_schema.support_profiles
and previous_profile_support_condition != new_profile_support_condition
):
if self.new_attribute_schema.support_profiles:
if new_profile_support_condition:
all_queries.append(ProfilesAttributeAddMigrationQuery)
else:
all_queries.append(ProfilesAttributeRemoveMigrationQuery)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def _get_node_kinds(self, schema: MainSchemaTypes, new_attribute_schema: Attribu
schema_kinds = [schema.kind]
if not isinstance(schema, (NodeSchema, GenericSchema)):
return schema_kinds
if new_attribute_schema.support_profiles:
if schema.check_if_attr_supports_profiles(attribute_schema=new_attribute_schema):
schema_kinds.append(f"Profile{schema.kind}")
if isinstance(schema, GenericSchema) and schema.used_by:
schema_kinds.extend([f"Profile{kind}" for kind in schema.used_by])
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
from __future__ import annotations

from typing import TYPE_CHECKING, Sequence

from infrahub.core.constants import SchemaPathType
from infrahub.core.path import SchemaPath
from infrahub.core.schema.generic_schema import GenericSchema
from infrahub.core.schema.node_schema import NodeSchema

from ..query import MigrationBaseQuery # noqa: TC001
from ..shared import AttributeSchemaMigration, MigrationInput, MigrationResult, SchemaMigration
from .attribute_supports_generated_schema import (
ProfilesAttributeAddMigrationQuery,
ProfilesAttributeRemoveMigrationQuery,
)

if TYPE_CHECKING:
from infrahub.core.branch.models import Branch


class NodeUniquenessConstraintsUpdateMigration(SchemaMigration):
name: str = "node.uniqueness_constraints.update"
queries: Sequence[type[MigrationBaseQuery]] = []

async def execute(
self,
migration_input: MigrationInput,
branch: Branch,
queries: Sequence[type[MigrationBaseQuery]] | None = None, # noqa: ARG002
) -> MigrationResult:
result = MigrationResult()

if not isinstance(self.new_schema, (NodeSchema, GenericSchema)):
return result

for attr in self.new_schema.attributes:
prev_attr = self.previous_schema.get_attribute_or_none(name=attr.name) or attr
previous_supports_profiles = self.previous_schema.check_if_attr_supports_profiles(
attribute_schema=prev_attr
)
new_supports_profiles = self.new_schema.check_if_attr_supports_profiles(attribute_schema=attr)

if not self.new_schema.generate_profile or previous_supports_profiles == new_supports_profiles:
continue

attr_migration = AttributeSchemaMigration(
name=f"node.uniqueness_constraints.update.{attr.name}",
queries=[
ProfilesAttributeRemoveMigrationQuery
if new_supports_profiles is False
else ProfilesAttributeAddMigrationQuery
],
new_node_schema=self.new_node_schema,
previous_node_schema=self.previous_node_schema,
schema_path=SchemaPath(
path_type=SchemaPathType.ATTRIBUTE,
schema_kind=self.new_schema.kind,
field_name=attr.name,
),
)
Comment thread
ajtmccarty marked this conversation as resolved.

attr_result = await attr_migration.execute(migration_input=migration_input, branch=branch)
result.errors.extend(attr_result.errors)
result.nbr_migrations_executed += attr_result.nbr_migrations_executed
if result.errors:
break

return result
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def __init__(self, db: InfrahubDatabase, branch: Branch | None = None) -> None:
def _get_required_attributes_names(self, schema: NodeSchema) -> set[str]:
attr_names: set[str] = set()
for attr_schema in schema.attributes:
if attr_schema.support_profiles and not attr_schema.optional:
if not attr_schema.optional and schema.check_if_attr_supports_profiles(attribute_schema=attr_schema):
attr_names.add(attr_schema.name)
return attr_names

Expand Down
1 change: 1 addition & 0 deletions backend/infrahub/core/schema/attribute_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@ def is_deprecated(self) -> bool:

@property
def support_profiles(self) -> bool:
# use check_if_attr_supports_profiles from MainSchemaTypes as that also checks uniqueness constraint
return self.read_only is False and self.unique is False

@property
Expand Down
15 changes: 15 additions & 0 deletions backend/infrahub/core/schema/basenode_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -687,6 +687,21 @@ def update(self, other: HashableModel) -> Self:

return self

def _check_attr_in_uniqueness_constraint(self, attr: str) -> bool:
"""Return True if ``attr`` appears in any uniqueness constraint path."""
if not self.uniqueness_constraints:
return False
for constraint_paths in self.uniqueness_constraints:
for constraint_path in constraint_paths:
if constraint_path.startswith(f"{attr}__") or constraint_path == attr:
return True
return False

def check_if_attr_supports_profiles(self, attribute_schema: AttributeSchema) -> bool:
return attribute_schema.support_profiles and not self._check_attr_in_uniqueness_constraint(
attr=attribute_schema.name
)


@dataclass
class SchemaUniquenessConstraintPath:
Expand Down
2 changes: 1 addition & 1 deletion backend/infrahub/core/schema/definitions/internal.py
Original file line number Diff line number Diff line change
Expand Up @@ -350,7 +350,7 @@ def to_dict(self) -> dict[str, Any]:
internal_kind=list[list[str]],
description="List of multi-element uniqueness constraints that can combine relationships and attributes",
optional=True,
extra={"update": UpdateSupport.VALIDATE_CONSTRAINT},
extra={"update": UpdateSupport.MIGRATION_REQUIRED},
),
SchemaAttribute(
name="documentation",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ class GeneratedBaseNodeSchema(HashableModel):
uniqueness_constraints: list[list[str]] | None = Field(
default=None,
description="List of multi-element uniqueness constraints that can combine relationships and attributes",
json_schema_extra={"update": "validate_constraint"},
json_schema_extra={"update": "migration_required"},
)
documentation: str | None = Field(
default=None,
Expand Down
2 changes: 1 addition & 1 deletion backend/infrahub/core/schema/schema_branch.py
Original file line number Diff line number Diff line change
Expand Up @@ -2480,7 +2480,7 @@ def generate_profile_from_node(self, node: NodeSchema) -> ProfileSchema:
)
Comment thread
cubic-dev-ai[bot] marked this conversation as resolved.

for node_attr in node.attributes:
if not node_attr.support_profiles:
Comment thread
ajtmccarty marked this conversation as resolved.
if not node.check_if_attr_supports_profiles(attribute_schema=node_attr):
continue
attr_schema_class = get_attribute_schema_class_for_kind(kind=node_attr.kind)
attr = attr_schema_class(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,47 +17,9 @@
from infrahub.core.timestamp import Timestamp
from infrahub.database import InfrahubDatabase
from tests.db_snapshot import DbSnapshotter
from tests.helpers.db_validation import assert_attribute_path_status
from tests.helpers.edge_timestamps import assert_edge_timestamps

LATEST_ATTRIBUTE_PATH_STATUS_QUERY = """
MATCH (node:%(label)s)
CALL (node) {
MATCH (node)-[r1:HAS_ATTRIBUTE]->(attr:Attribute {name: $attr_name})
WHERE r1.branch = $branch_name
RETURN r1, attr
ORDER BY r1.branch_level DESC, r1.from DESC
LIMIT 1
}
CALL (attr) {
MATCH (attr)-[r2:HAS_VALUE]->(av)
WHERE r2.branch = $branch_name
RETURN r2
ORDER BY r2.branch_level DESC, r2.from DESC
LIMIT 1
}
RETURN node.uuid AS node_id, r1.status AS has_attr_status, r2.status AS has_val_status
"""


async def assert_attribute_path_status(
db: InfrahubDatabase,
node_label: str,
attr_name: str,
branch_name: str,
expected_status: str,
) -> None:
"""Assert that the latest HAS_ATTRIBUTE->HAS_VALUE path has the expected status for all instances."""
query = LATEST_ATTRIBUTE_PATH_STATUS_QUERY % {"label": node_label}
results = await db.execute_query(query=query, params={"attr_name": attr_name, "branch_name": branch_name})
assert len(results) > 0, f"No {node_label} nodes found with attribute {attr_name!r}"
for record in results:
assert record["has_attr_status"] == expected_status, (
f"Node {record['node_id']}: HAS_ATTRIBUTE status is {record['has_attr_status']!r}, expected {expected_status!r}"
)
assert record["has_val_status"] == expected_status, (
f"Node {record['node_id']}: HAS_VALUE status is {record['has_val_status']!r}, expected {expected_status!r}"
)


@pytest.fixture
async def car_person_schema(
Expand Down
Loading
Loading