Skip to content

chore(launchpad): change size analysis upload logic to create/update size metrics table #94656

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
97 changes: 55 additions & 42 deletions src/sentry/preprod/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ def assemble_preprod_artifact_size_analysis(
"""
Creates a size analysis file for a preprod artifact from uploaded chunks.
"""
from sentry.preprod.models import PreprodArtifact
from sentry.preprod.models import PreprodArtifact, PreprodArtifactSizeMetrics

logger.info(
"Starting preprod artifact size analysis assembly",
Expand Down Expand Up @@ -187,49 +187,62 @@ def assemble_preprod_artifact_size_analysis(
if assemble_result is None:
return

with transaction.atomic(router.db_for_write(PreprodArtifact)):
# Update existing PreprodArtifact with size analysis file
try:
preprod_artifact = PreprodArtifact.objects.get(
project=project,
id=artifact_id,
)
except PreprodArtifact.DoesNotExist:
# Ideally this should never happen
logger.exception(
"PreprodArtifact not found during size analysis assembly",
extra={
"artifact_id": artifact_id,
"project_id": project_id,
"organization_id": org_id,
},
)
# Clean up the assembled file since we can't associate it with an artifact
try:
preprod_artifact = PreprodArtifact.objects.get(
project=project,
id=artifact_id,
)
preprod_artifact.analysis_file_id = assemble_result.bundle.id
preprod_artifact.state = PreprodArtifact.ArtifactState.PROCESSED
preprod_artifact.save(update_fields=["analysis_file_id", "state", "date_updated"])
# Close the temporary file handle first
if (
hasattr(assemble_result, "bundle_temp_file")
and assemble_result.bundle_temp_file
):
assemble_result.bundle_temp_file.close()
# Then delete the file object
assemble_result.bundle.delete()
except Exception:
pass # Ignore cleanup errors
raise Exception(f"PreprodArtifact with id {artifact_id} does not exist")

logger.info(
"Updated preprod artifact with size analysis file",
extra={
"preprod_artifact_id": preprod_artifact.id,
"analysis_file_id": assemble_result.bundle.id,
"project_id": project_id,
"organization_id": org_id,
},
)
except PreprodArtifact.DoesNotExist:
# Ideally this should never happen
logger.exception(
"PreprodArtifact not found during size analysis assembly",
extra={
"artifact_id": artifact_id,
"project_id": project_id,
"organization_id": org_id,
},
)
# Clean up the assembled file since we can't associate it with an artifact
try:
# Close the temporary file handle first
if (
hasattr(assemble_result, "bundle_temp_file")
and assemble_result.bundle_temp_file
):
assemble_result.bundle_temp_file.close()
# Then delete the file object
assemble_result.bundle.delete()
except Exception:
pass # Ignore cleanup errors
raise Exception(f"PreprodArtifact with id {artifact_id} does not exist")
# Update artifact state in its own transaction with proper database routing
with transaction.atomic(router.db_for_write(PreprodArtifact)):
preprod_artifact.state = PreprodArtifact.ArtifactState.PROCESSED
preprod_artifact.save(update_fields=["state", "date_updated"])

# Update size metrics in its own transaction
with transaction.atomic(router.db_for_write(PreprodArtifactSizeMetrics)):
size_metrics, created = PreprodArtifactSizeMetrics.objects.update_or_create(
preprod_artifact=preprod_artifact,
defaults={
"analysis_file_id": assemble_result.bundle.id,
"metrics_artifact_type": PreprodArtifactSizeMetrics.MetricsArtifactType.MAIN_ARTIFACT, # TODO: parse this from the treemap json
"state": PreprodArtifactSizeMetrics.SizeAnalysisState.COMPLETED,
},
)

logger.info(
"Created or updated preprod artifact size metrics with analysis file",
extra={
"preprod_artifact_id": preprod_artifact.id,
"size_metrics_id": size_metrics.id,
"analysis_file_id": assemble_result.bundle.id,
"was_created": created,
"project_id": project_id,
"organization_id": org_id,
},
)

logger.info(
"Finished preprod artifact size analysis assembly",
Expand Down
57 changes: 48 additions & 9 deletions tests/sentry/preprod/test_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,11 @@

from sentry.models.files.file import File
from sentry.models.files.fileblob import FileBlob
from sentry.preprod.models import PreprodArtifact, PreprodBuildConfiguration
from sentry.preprod.models import (
PreprodArtifact,
PreprodArtifactSizeMetrics,
PreprodBuildConfiguration,
)
from sentry.preprod.tasks import assemble_preprod_artifact, assemble_preprod_artifact_size_analysis
from sentry.tasks.assemble import (
AssembleTask,
Expand Down Expand Up @@ -323,14 +327,48 @@ def test_assemble_preprod_artifact_size_analysis_success(self):
assert status == ChunkFileState.OK
assert details is None

# Verify size analysis file and artifact update
# Verify size analysis file and size metrics creation
size_files = File.objects.filter(type="preprod.size_analysis")
assert len(size_files) == 1
assert size_files[0].name.startswith("preprod-size-analysis-")

# Verify PreprodArtifactSizeMetrics record was created
size_metrics = PreprodArtifactSizeMetrics.objects.filter(
preprod_artifact=self.preprod_artifact
)
assert len(size_metrics) == 1
assert size_metrics[0].analysis_file_id == size_files[0].id
assert size_metrics[0].state == PreprodArtifactSizeMetrics.SizeAnalysisState.COMPLETED
assert (
size_metrics[0].metrics_artifact_type
== PreprodArtifactSizeMetrics.MetricsArtifactType.MAIN_ARTIFACT
)

def test_assemble_preprod_artifact_size_analysis_update_existing(self):
# Create an existing size metrics record
existing_size_metrics = PreprodArtifactSizeMetrics.objects.create(
preprod_artifact=self.preprod_artifact,
state=PreprodArtifactSizeMetrics.SizeAnalysisState.PENDING,
)

status, details = self._run_task_and_verify_status(b"test size analysis update content")

assert status == ChunkFileState.OK
assert details is None

# Verify size analysis file was created
size_files = File.objects.filter(type="preprod.size_analysis")
assert len(size_files) == 1
assert size_files[0].name.startswith("preprod-size-analysis-")

self.preprod_artifact.refresh_from_db()
assert self.preprod_artifact.analysis_file_id == size_files[0].id
assert self.preprod_artifact.state == PreprodArtifact.ArtifactState.PROCESSED
# Verify existing PreprodArtifactSizeMetrics record was updated (not created new)
size_metrics = PreprodArtifactSizeMetrics.objects.filter(
preprod_artifact=self.preprod_artifact
)
assert len(size_metrics) == 1 # Should still be only 1 record
assert size_metrics[0].id == existing_size_metrics.id # Should be the same record
assert size_metrics[0].analysis_file_id == size_files[0].id
assert size_metrics[0].state == PreprodArtifactSizeMetrics.SizeAnalysisState.COMPLETED

def test_assemble_preprod_artifact_size_analysis_error_cases(self):
# Test nonexistent artifact
Expand Down Expand Up @@ -359,7 +397,8 @@ def test_assemble_preprod_artifact_size_analysis_error_cases(self):
status, details = self._run_task_and_verify_status(b"nonexistent project", project_id=99999)
assert status == ChunkFileState.ERROR

# Verify artifact was not updated for error cases
self.preprod_artifact.refresh_from_db()
assert self.preprod_artifact.analysis_file_id is None
assert self.preprod_artifact.state == PreprodArtifact.ArtifactState.UPLOADED
# Verify no size metrics were created for error cases
size_metrics = PreprodArtifactSizeMetrics.objects.filter(
preprod_artifact=self.preprod_artifact
)
assert len(size_metrics) == 0
Loading