Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions changelog/265.fixed.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Allow SDK tracking feature to continue after encountering delete errors due to impacted nodes having already been deleted by cascade delete.
4 changes: 2 additions & 2 deletions infrahub_sdk/ctl/check.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,8 @@ def run(
"""Locate and execute all checks under the defined path."""

log_level = "DEBUG" if debug else "INFO"
FORMAT = "%(message)s"
logging.basicConfig(level=log_level, format=FORMAT, datefmt="[%X]", handlers=[RichHandler()])
format_str = "%(message)s"
logging.basicConfig(level=log_level, format=format_str, datefmt="[%X]", handlers=[RichHandler()])

repository_config = get_repository_config(find_repository_config_file())

Expand Down
4 changes: 2 additions & 2 deletions infrahub_sdk/ctl/cli_commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,8 +152,8 @@ async def run(
logging.getLogger("httpcore").setLevel(logging.ERROR)

log_level = "DEBUG" if debug else "INFO"
FORMAT = "%(message)s"
logging.basicConfig(level=log_level, format=FORMAT, datefmt="[%X]", handlers=[RichHandler()])
format_str = "%(message)s"
logging.basicConfig(level=log_level, format=format_str, datefmt="[%X]", handlers=[RichHandler()])
log = logging.getLogger("infrahubctl")

variables_dict = parse_cli_vars(variables)
Expand Down
4 changes: 2 additions & 2 deletions infrahub_sdk/ctl/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,8 +45,8 @@ def init_logging(debug: bool = False) -> None:
logging.getLogger("httpcore").setLevel(logging.ERROR)

log_level = "DEBUG" if debug else "INFO"
FORMAT = "%(message)s"
logging.basicConfig(level=log_level, format=FORMAT, datefmt="[%X]", handlers=[RichHandler(show_path=debug)])
format_str = "%(message)s"
logging.basicConfig(level=log_level, format=format_str, datefmt="[%X]", handlers=[RichHandler(show_path=debug)])
logging.getLogger("infrahubctl")


Expand Down
2 changes: 1 addition & 1 deletion infrahub_sdk/generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def __init__(
self.params = params or {}
self.generator_instance = generator_instance
self._client: InfrahubClient | None = None
self.logger = logger if logger else logging.getLogger("infrahub.tasks")
self.logger = logger or logging.getLogger("infrahub.tasks")
self.request_context = request_context
self.execute_in_proposed_change = execute_in_proposed_change
self.execute_after_merge = execute_after_merge
Expand Down
20 changes: 10 additions & 10 deletions infrahub_sdk/graphql/renderers.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,30 +134,30 @@ def render_query_block(data: dict, offset: int = 4, indentation: int = 4, conver
>>> render_query_block(data)
[' u: user(id: 123) {', ' name', ' }']
"""
FILTERS_KEY = "@filters"
ALIAS_KEY = "@alias"
KEYWORDS_TO_SKIP = [FILTERS_KEY, ALIAS_KEY]
filters_key = "@filters"
alias_key = "@alias"
keywords_to_skip = [filters_key, alias_key]

offset_str = " " * offset
lines = []
for key, value in data.items():
if key in KEYWORDS_TO_SKIP:
if key in keywords_to_skip:
continue
if value is None:
lines.append(f"{offset_str}{key}")
elif isinstance(value, dict) and len(value) == 1 and ALIAS_KEY in value and value[ALIAS_KEY]:
lines.append(f"{offset_str}{value[ALIAS_KEY]}: {key}")
elif isinstance(value, dict) and len(value) == 1 and alias_key in value and value[alias_key]:
lines.append(f"{offset_str}{value[alias_key]}: {key}")
elif isinstance(value, dict):
if value.get(ALIAS_KEY):
key_str = f"{value[ALIAS_KEY]}: {key}"
if value.get(alias_key):
key_str = f"{value[alias_key]}: {key}"
else:
key_str = key

if value.get(FILTERS_KEY):
if value.get(filters_key):
filters_str = ", ".join(
[
f"{key2}: {convert_to_graphql_as_string(value=value2, convert_enum=convert_enum)}"
for key2, value2 in value[FILTERS_KEY].items()
for key2, value2 in value[filters_key].items()
]
)
lines.append(f"{offset_str}{key_str}({filters_str}) " + "{")
Expand Down
10 changes: 8 additions & 2 deletions infrahub_sdk/query_groups.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from typing import TYPE_CHECKING, Any

from .constants import InfrahubClientMode
from .exceptions import NodeNotFoundError
from .exceptions import GraphQLError, NodeNotFoundError
from .utils import dict_hash

if TYPE_CHECKING:
Expand Down Expand Up @@ -109,7 +109,13 @@ async def delete_unused(self) -> None:
if self.previous_members and self.unused_member_ids:
for member in self.previous_members:
if member.id in self.unused_member_ids and member.typename:
await self.client.delete(kind=member.typename, id=member.id)
try:
await self.client.delete(kind=member.typename, id=member.id)
except GraphQLError as exc:
if not exc.message or "Unable to find the node" not in exc.message:
# If the node already has been deleted, skip the error as it would have been deleted
# by the cascade delete of another node
raise

async def add_related_nodes(self, ids: list[str], update_group_context: bool | None = None) -> None:
"""
Expand Down
5 changes: 1 addition & 4 deletions infrahub_sdk/schema/repository.py
Original file line number Diff line number Diff line change
Expand Up @@ -216,10 +216,7 @@ def unique_items(cls, v: list[Any]) -> list[Any]:
return v

def _has_resource(self, resource_id: str, resource_type: type[ResourceClass], resource_field: str = "name") -> bool:
for item in getattr(self, RESOURCE_MAP[resource_type]):
if getattr(item, resource_field) == resource_id:
return True
return False
return any(getattr(item, resource_field) == resource_id for item in getattr(self, RESOURCE_MAP[resource_type]))

def _get_resource(
self, resource_id: str, resource_type: type[ResourceClass], resource_field: str = "name"
Expand Down
4 changes: 2 additions & 2 deletions infrahub_sdk/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ def str_to_bool(value: str | bool | int) -> bool:
if not isinstance(value, str):
raise TypeError(f"{value} must be a string")

MAP = {
str_to_bool_map = {
"y": True,
"yes": True,
"t": True,
Expand All @@ -188,7 +188,7 @@ def str_to_bool(value: str | bool | int) -> bool:
"0": False,
}
try:
return MAP[value.lower()]
return str_to_bool_map[value.lower()]
except KeyError as exc:
raise ValueError(f"{value} can not be converted into a boolean") from exc

Expand Down
3 changes: 0 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -239,10 +239,8 @@ ignore = [
##################################################################################################
"B008", # Do not perform function call `typer.Option` in argument defaults; instead, perform the call within the function, or read the default from a module-level singleton variable
"B904", # Within an `except` clause, raise exceptions with `raise ... from err` or `raise ... from None` to distinguish them from errors in exception handling
"FURB110", # Replace ternary `if` expression with `or` operator
"INP001", # File declares a package, but is nested under an implicit namespace package.
"N802", # Function name should be lowercase
"N806", # Variable in function should be lowercase
"PERF203", # `try`-`except` within a loop incurs performance overhead
"PERF401", # Use a list comprehension to create a transformed list
"PLR0912", # Too many branches
Expand All @@ -257,7 +255,6 @@ ignore = [
"S311", # Standard pseudo-random generators are not suitable for cryptographic purposes
"S701", # By default, jinja2 sets `autoescape` to `False`. Consider using `autoescape=True`
"SIM108", # Use ternary operator `key_str = f"{value[ALIAS_KEY]}: {key}" if ALIAS_KEY in value and value[ALIAS_KEY] else key` instead of `if`-`else`-block
"SIM110", # Use `return any(getattr(item, resource_field) == resource_id for item in getattr(self, RESOURCE_MAP[resource_type]))` instead of `for` loop
"TC003", # Move standard library import `collections.abc.Iterable` into a type-checking block
"UP031", # Use format specifiers instead of percent format
]
Expand Down
6 changes: 3 additions & 3 deletions tests/unit/sdk/test_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ async def test_validate_method_signature(
replace_async_return_annotation: Callable[[str], str],
replace_sync_return_annotation: Callable[[str], str],
) -> None:
EXCLUDE_PARAMETERS = ["client"]
exclude_parameters = ["client"]
async_method = getattr(InfrahubNode, method)
sync_method = getattr(InfrahubNodeSync, method)
async_sig = inspect.signature(async_method)
Expand All @@ -115,8 +115,8 @@ async def test_validate_method_signature(
# Extract names of parameters and exclude some from the comparaison like client
async_params_name = async_sig.parameters.keys()
sync_params_name = sync_sig.parameters.keys()
async_params = {key: value for key, value in async_sig.parameters.items() if key not in EXCLUDE_PARAMETERS}
sync_params = {key: value for key, value in sync_sig.parameters.items() if key not in EXCLUDE_PARAMETERS}
async_params = {key: value for key, value in async_sig.parameters.items() if key not in exclude_parameters}
sync_params = {key: value for key, value in sync_sig.parameters.items() if key not in exclude_parameters}

assert async_params_name == sync_params_name
assert replace_sync_parameter_annotations(async_params) == replace_sync_parameter_annotations(sync_params)
Expand Down
4 changes: 2 additions & 2 deletions tests/unit/sdk/test_timestamp.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,9 @@ def test_init_timestamp() -> None:


def test_parse_string() -> None:
REF = "2022-01-01T10:00:00.000000Z"
ref = "2022-01-01T10:00:00.000000Z"

assert Timestamp._parse_string(REF).to_instant() == Instant.parse_iso(REF)
assert Timestamp._parse_string(ref).to_instant() == Instant.parse_iso(ref)
assert Timestamp._parse_string("5m")
assert Timestamp._parse_string("10min")
assert Timestamp._parse_string("2h")
Expand Down