Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -180,3 +180,7 @@ Thumbs.db

# Internal Autodesk directories
.adsk/

# Study Files folder - ignore everything except zip files
tests/api/integration_tests/study_files/*
!tests/api/integration_tests/study_files/*.zip
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ pathspec==0.12.1
polib==1.2.0
pre-commit==4.2.0
pydata-sphinx-theme==0.16.1
pygetwindow==0.0.9
pylint==3.3.4
pytest==8.3.4
sphinx==8.1.3
Expand Down
458 changes: 262 additions & 196 deletions tests/api/integration_tests/README.md

Large diffs are not rendered by default.

98 changes: 85 additions & 13 deletions tests/api/integration_tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,16 +8,32 @@
import json
from pathlib import Path
import pytest
import tempfile
import zipfile
from moldflow import Synergy, Project, ItemType
from tests.api.integration_tests.constants import (
FileSet,
ModelType,
STUDY_FILES_DIR,
DATA_DIR,
DataFile,
MID_DOE_MODEL_FILE,
MID_DOE_MODEL_NAME,
DEFAULT_WINDOW_SIZE_X,
DEFAULT_WINDOW_SIZE_Y,
DEFAULT_WINDOW_POSITION_X,
DEFAULT_WINDOW_POSITION_Y,
)


def unzip_study_files():
"""
Unzip the study files.
"""
for zip_file in STUDY_FILES_DIR.glob("*.zip"):
with zipfile.ZipFile(zip_file, 'r') as zip_ref:
zip_ref.extractall(STUDY_FILES_DIR)


def generate_file_map(
study_files_dir: str = STUDY_FILES_DIR,
) -> dict[FileSet, dict[ModelType, str]]:
Expand All @@ -28,12 +44,17 @@ def generate_file_map(
file_map = {}
for file_set in FileSet:
set_dir = Path(study_files_dir) / file_set.value
if file_set == FileSet.SINGLE:
file_map[file_set.name] = {MID_DOE_MODEL_NAME: str(set_dir / MID_DOE_MODEL_FILE)}
continue

file_map[file_set.name] = {
model_type: str(set_dir / f"{model_type.value}.sdy") for model_type in ModelType
}
return file_map


unzip_study_files()
FILE_SETS = generate_file_map()


Expand Down Expand Up @@ -62,6 +83,15 @@ def pytest_generate_tests(metafunc):
)

file_set = marker.args[0]
if file_set == FileSet.SINGLE:
metafunc.parametrize(
"study_file",
[(MID_DOE_MODEL_NAME, FILE_SETS[file_set.name][MID_DOE_MODEL_NAME])],
ids=[file_set.value],
scope="class",
)
return

file_set_name = file_set.name
params = list(FILE_SETS[file_set_name].items())
ids = [f"{file_set}-{model_type.value}" for model_type, _ in params]
Expand All @@ -74,9 +104,17 @@ def synergy_fixture():
"""
Fixture to create a real Synergy instance for integration testing.
"""
synergy_instance = Synergy()
synergy_instance = Synergy(logging=False)
synergy_instance.silence(True)
synergy_instance.set_application_window_pos(
DEFAULT_WINDOW_POSITION_X,
DEFAULT_WINDOW_POSITION_Y,
DEFAULT_WINDOW_SIZE_X,
DEFAULT_WINDOW_SIZE_Y,
)
yield synergy_instance
synergy_instance.quit(False)
if synergy_instance.synergy is not None:
synergy_instance.quit(False)


@pytest.fixture(scope="class", name="project")
Expand Down Expand Up @@ -115,7 +153,10 @@ def opened_study_fixture(project: Project, study_file):
Opens a study file inside an already-open project.
"""
model_type, _ = study_file
study = project.open_item_by_name(model_type.value, ItemType.STUDY)
if model_type == MID_DOE_MODEL_NAME:
study = project.open_item_by_name(model_type, ItemType.STUDY)
else:
study = project.open_item_by_name(model_type.value, ItemType.STUDY)
return study


Expand All @@ -133,20 +174,36 @@ def expected_data_fixture(request):
"""
Load the expected data JSON file once per test class.

Expects the test class to define a class attribute:
`json_file_name = DataFile.MESH_SUMMARY` (for example)
Automatically derives the JSON filename from pytest markers.
Looks for markers like @pytest.mark.mesh_summary or @pytest.mark.synergy
and converts them to filenames like "mesh_summary_data.json" or "synergy_data.json".

Falls back to class attribute `json_file_name` if no suitable marker is found.
"""
json_file_name = getattr(request.cls, "json_file_name", None)
if not json_file_name:
pytest.skip("Test class missing `json_file_name` attribute.")
# Try to derive filename from pytest markers first
json_file_name = None
marker_list = getattr(request.cls, "pytestmark", [])

json_file = json_file_name.value if isinstance(json_file_name, DataFile) else json_file_name
json_path = Path(DATA_DIR) / json_file
# Look for data-related markers (excluding common ones like 'integration', 'file_set')
excluded_markers = {'integration', 'file_set', 'parametrize'}
for marker in marker_list:
if marker.name not in excluded_markers:
# Convert marker name to filename: mesh_summary -> mesh_summary_data.json
json_file_name = f"{marker.name}_data.json"
break

json_path = Path(DATA_DIR) / json_file_name
if not json_path.exists():
pytest.skip(f"Expected data file not found: {json_path}")

with open(json_path, "r", encoding="utf-8") as f:
return json.load(f)
try:
with open(json_path, "r", encoding="utf-8") as f:
return json.load(f)
except json.JSONDecodeError:
pytest.skip(
f"Expected data file is not valid JSON: {json_path}. "
"Please run the data generation script to create/update the file."
)


@pytest.fixture(name="expected_values")
Expand All @@ -161,3 +218,18 @@ def expected_values_fixture(expected_data, study_file):
if not model_data:
pytest.skip(f"No expected values found for model type: {model_type.value}")
return model_data


@pytest.fixture(name="expected_values_general")
def expected_values_general_fixture(expected_data):
"""
Returns expected values for the general case.
"""
return expected_data


@pytest.fixture(scope="class")
def temp_dir():
"""Create a temporary directory for integration testing."""
with tempfile.TemporaryDirectory() as tmpdir:
yield tmpdir
55 changes: 45 additions & 10 deletions tests/api/integration_tests/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,34 +5,69 @@
Constants for integration tests.
"""

from dataclasses import dataclass
from datetime import datetime
from pathlib import Path
from enum import Enum

INTEGRATION_TESTS_DIR = Path(__file__).parent
STUDY_FILES_DIR = INTEGRATION_TESTS_DIR / "study_files"
DATA_DIR = INTEGRATION_TESTS_DIR / "data"
METADATA_FILE_NAME = "metadata.json"
METADATA_FILE = Path(DATA_DIR) / METADATA_FILE_NAME
TEST_PROJECT_NAME = "test_project"
MID_DOE_MODEL_FILE = "mid_doe_model.sdy"
MID_DOE_MODEL_NAME = "mid_doe_model"

DEFAULT_WINDOW_SIZE_X = 2560
DEFAULT_WINDOW_SIZE_Y = 1440
DEFAULT_WINDOW_POSITION_X = 0
DEFAULT_WINDOW_POSITION_Y = 0

class DataFile(Enum):
"""
DataFile enum defines the different types of data files.
"""
SYNERGY_VERSION = "2026"
SYNERGY_WINDOW_TITLE = f"Autodesk Moldflow Insight {SYNERGY_VERSION}"

METADATA_DATE_FORMAT = "%Y-%m-%d"
METADATA_TIME_FORMAT = "%H:%M:%S"


TEMP_FILE_PREFIX = "temp_"
GENERATE_DATA_FUNCTION_PREFIX = "generate_"
GENERATE_DATA_FUNCTION_SUFFIX = "_data"
DATA_FILE_SUFFIX = "_data"
DATA_FILE_EXTENSION = ".json"


@dataclass
class Metadata:
date: datetime
time: datetime
build_number: str
version: str

MESH_SUMMARY = "mesh_summary_data.json"
def to_dict(self):
return {
"date": self.date.strftime(METADATA_DATE_FORMAT),
"time": self.time.strftime(METADATA_TIME_FORMAT),
"build_number": self.build_number,
"version": self.version,
}


class FileSet(Enum):
"""
FileSet enum defines the different categories of study files.

RAW: Unmeshed Unanalyzed Files
SINGLE: Single Analyzed File for short tests
# RAW: Unmeshed Unanalyzed Files
MESHED: Meshed Unanalyzed Files
ANALYZED: Meshed Analyzed Files
# ANALYZED: Meshed Analyzed Files
"""

# RAW = "Raw"
MESHED = "Meshed"
# ANALYZED = "Analyzed"
SINGLE = "single_study"
# RAW = "raw_studies"
MESHED = "meshed_studies"
# ANALYZED = "analyzed_studies"


class ModelType(Enum):
Expand Down
63 changes: 41 additions & 22 deletions tests/api/integration_tests/data/data_generation/generate_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,15 +9,25 @@
"""

import docopt
import logging
import sys
from datetime import datetime
from moldflow import Synergy
from tests.api.integration_tests.data.data_generation.generate_data_helper import generate_json
from tests.api.integration_tests.constants import FileSet, DataFile


@generate_json(json_file_name=DataFile.MESH_SUMMARY, file_set=FileSet.MESHED)
def generate_mesh_summary(synergy: Synergy = None):
from tests.api.integration_tests.data.data_generation.generate_data_helper import (
generate_json,
clean_up_temp_files,
get_generate_data_functions,
get_available_markers,
fetch_data_on_markers,
)
from tests.api.integration_tests.data.data_generation.generate_data_logger import (
generate_data_logger,
)
from tests.api.integration_tests.constants import FileSet
from tests.api.integration_tests.conftest import unzip_study_files


@generate_json(file_set=FileSet.MESHED)
def generate_mesh_summary_data(synergy: Synergy = None):
"""
Extract mesh summary data from a study.
Returns a dict with relevant properties.
Expand Down Expand Up @@ -57,34 +67,43 @@ def generate_mesh_summary(synergy: Synergy = None):
}


GENERATE_FUNCTIONS = {"mesh_summary": generate_mesh_summary}
@generate_json(file_set=None)
def generate_synergy_data(synergy: Synergy = None):
"""
Generate data for the Synergy class.
Returns a dict with relevant properties.
"""

build_number_parts = synergy.build_number.split(".")
build_number_major_minor = ".".join(build_number_parts[:2])

return {"version": synergy.version, "build_number": build_number_major_minor}


def main():
"""Main entry point for this script"""
args = docopt.docopt(__doc__)
DATE_TIME = datetime.now()

try:
markers = args.get('<markers>') or []
if len(markers) > 0:

for marker in markers:
generate_function = GENERATE_FUNCTIONS.get(marker)
unzip_study_files()
generate_functions = get_generate_data_functions(globals())

if generate_function:
generate_function()
else:
logging.error('FAILURE: No generate function found for marker: %s', marker)
return 1
for marker in markers:
if marker not in generate_functions.keys():
generate_data_logger.error(f'Invalid marker: {marker}')
generate_data_logger.error(get_available_markers(generate_functions))
return 0

if len(markers) > 0:
fetch_data_on_markers(markers, generate_functions, DATE_TIME)
else:
logging.info('Generating all data')

for generate_function in GENERATE_FUNCTIONS.values():
generate_function()
fetch_data_on_markers(generate_functions.keys(), generate_functions, DATE_TIME)

except Exception as err:
logging.error('FAILURE: %s', err, exc_info=True)
generate_data_logger.error(f'FAILURE: {err}')
clean_up_temp_files()
return 1

return 0
Expand Down
Loading