-
Notifications
You must be signed in to change notification settings - Fork 21
Issue #542 - folder structure rearrange - submission generation #574
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
973e9c2
5e7f196
4f8818e
b046b5e
0c7a5d4
2cbb243
95e2634
a563ef0
cb1a504
3318471
7f95181
b6f07c0
f90af77
8f07511
9481038
c54f4ba
4fe0fbe
51963e2
5980174
fd0904e
c6e9d42
751856f
4cc558b
741c482
10e5b1a
d7f46a5
48df9dc
c7f8c1b
15196a0
158d91a
aa6df9c
17d6fb7
a563476
c3ba940
0fb76e0
c74c9c8
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -93,18 +93,18 @@ def generate_submission(env, state, inp, submission_division): | |
| if system_meta_default['framework'] == '': | ||
| system_meta_default['framework'] = "pytorch" | ||
|
|
||
| system_meta = {} | ||
| system_meta_tmp = {} | ||
| if 'CM_MLPERF_SUBMISSION_SYSTEM_TYPE' in env: | ||
| system_meta['system_type'] = env['CM_MLPERF_SUBMISSION_SYSTEM_TYPE'] | ||
| system_meta_tmp['system_type'] = env['CM_MLPERF_SUBMISSION_SYSTEM_TYPE'] | ||
|
|
||
| if submission_division != "": | ||
| system_meta['division'] = submission_division | ||
| system_meta_tmp['division'] = submission_division | ||
| division = submission_division | ||
| else: | ||
| division = system_meta_default['division'] | ||
|
|
||
| if 'CM_MLPERF_SUBMISSION_CATEGORY' in env: | ||
| system_meta['system_type'] = env['CM_MLPERF_SUBMISSION_CATEGORY'].replace("-", ",") | ||
| system_meta_tmp['system_type'] = env['CM_MLPERF_SUBMISSION_CATEGORY'].replace("-", ",") | ||
|
|
||
| duplicate= (env.get('CM_MLPERF_DUPLICATE_SCENARIO_RESULTS', 'no') in ["yes", "True"]) | ||
|
|
||
|
|
@@ -121,23 +121,23 @@ def generate_submission(env, state, inp, submission_division): | |
| # Check submitter | ||
| if env.get('CM_MLPERF_SUBMITTER'): | ||
| submitter = env['CM_MLPERF_SUBMITTER'] | ||
| system_meta['submitter'] = submitter | ||
| system_meta_tmp['submitter'] = submitter | ||
| else: | ||
| submitter = system_meta_default['submitter'] | ||
| env['CM_MLPERF_SUBMITTER'] = submitter | ||
|
|
||
| print('* MLPerf inference submitter: {}'.format(submitter)) | ||
|
|
||
| if 'Collective' not in system_meta_default.get('sw_notes'): | ||
| system_meta['sw_notes'] = "Automated by MLCommons CM v{}. ".format(cmind.__version__) + system_meta_default['sw_notes'] | ||
| system_meta_tmp['sw_notes'] = "Automated by MLCommons CM v{}. ".format(cmind.__version__) + system_meta_default['sw_notes'] | ||
|
|
||
| if env.get('CM_MLPERF_SUT_SW_NOTES_EXTRA','') != '': | ||
| sw_notes = f"{system_meta['sw_notes']} {env['CM_MLPERF_SUT_SW_NOTES_EXTRA']}" | ||
| system_meta['sw_notes'] = sw_notes | ||
| sw_notes = f"{system_meta_tmp['sw_notes']} {env['CM_MLPERF_SUT_SW_NOTES_EXTRA']}" | ||
| system_meta_tmp['sw_notes'] = sw_notes | ||
|
|
||
| if env.get('CM_MLPERF_SUT_HW_NOTES_EXTRA','') != '': | ||
| hw_notes = f"{system_meta['hw_notes']} {env['CM_MLPERF_SUT_HW_NOTES_EXTRA']}" | ||
| system_meta['hw_notes'] = hw_notes | ||
| hw_notes = f"{system_meta_tmp['hw_notes']} {env['CM_MLPERF_SUT_HW_NOTES_EXTRA']}" | ||
| system_meta_tmp['hw_notes'] = hw_notes | ||
|
|
||
| path_submission=os.path.join(path_submission_division, submitter) | ||
| if not os.path.isdir(path_submission): | ||
|
|
@@ -149,6 +149,8 @@ def generate_submission(env, state, inp, submission_division): | |
| code_path = os.path.join(path_submission, "code") | ||
|
|
||
| for res in results: | ||
| system_meta = {} | ||
| system_meta.update(system_meta_tmp) | ||
| result_path = os.path.join(results_dir, res) | ||
| # variable to check whether the sut_meta.json is present in the root folder | ||
| saved_system_meta_file_path = os.path.join(result_path, 'system_meta.json') | ||
|
|
@@ -295,6 +297,9 @@ def generate_submission(env, state, inp, submission_division): | |
| if not all([os.path.exists(os.path.join(result_scenario_path, "performance", "run_1", f)) for f in files_to_check]): | ||
| continue | ||
|
|
||
| if not os.path.isdir(measurement_scenario_path): | ||
| os.makedirs(measurement_scenario_path) | ||
|
|
||
| for mode in modes: | ||
| result_mode_path = os.path.join(result_scenario_path, mode) | ||
| submission_mode_path = os.path.join(submission_scenario_path, mode) | ||
|
|
@@ -307,9 +312,6 @@ def generate_submission(env, state, inp, submission_division): | |
| if os.path.exists(submission_results_path): | ||
| shutil.rmtree(submission_results_path) | ||
|
|
||
| if not os.path.isdir(submission_measurement_path): | ||
| os.makedirs(submission_measurement_path) | ||
|
|
||
| if mode=='performance': | ||
|
|
||
| if os.path.exists(os.path.join(result_mode_path, "power")): | ||
|
|
@@ -342,41 +344,60 @@ def generate_submission(env, state, inp, submission_division): | |
| submission_results_path=os.path.join(submission_mode_path, 'run_1') | ||
|
|
||
| if not os.path.exists(saved_system_meta_file_path): | ||
| saved_system_meta_file_path = os.path.join(result_mode_path, "system_meta.json") | ||
| if os.path.exists(saved_system_meta_file_path): | ||
| with open(saved_system_meta_file_path, "r") as f: | ||
| saved_system_meta = json.load(f) | ||
| for key in list(saved_system_meta): | ||
| if saved_system_meta[key]==None or str(saved_system_meta[key]).strip() == '': | ||
| del(saved_system_meta[key]) | ||
| system_meta = {**saved_system_meta, **system_meta} #override the saved meta with the user inputs | ||
| if os.path.exists(os.path.join(result_mode_path, "system_meta.json")): | ||
| saved_system_meta_file_path = os.path.join(result_mode_path, "system_meta.json") | ||
| else: | ||
| print("WARNING: system_meta.json was not found in the performance run directory inside the results folder. CM is automatically creating one using the system defaults. Please modify them as required.") | ||
| system_meta = {**system_meta_default, **system_meta} #add any missing fields from the defaults, if system_meta.json is not detected, default one will be written | ||
| print("WARNING: system_meta.json was not found in the SUT root or mode directory inside the results folder. CM is automatically creating one using the system defaults. Please modify them as required.") | ||
| if os.path.exists(saved_system_meta_file_path): | ||
| with open(saved_system_meta_file_path, "r") as f: | ||
| saved_system_meta = json.load(f) | ||
| for key in list(saved_system_meta): | ||
| if saved_system_meta[key]==None or str(saved_system_meta[key]).strip() == '': | ||
| del(saved_system_meta[key]) | ||
| if saved_system_meta["division"] != "" and submission_division == "": | ||
| system_meta["division"] = saved_system_meta["division"] | ||
| system_meta = {**saved_system_meta, **system_meta} #override the saved meta with the user inputs | ||
| system_meta = {**system_meta_default, **system_meta} #add any missing fields from the defaults, if system_meta.json is not detected, default one will be written | ||
| print(system_meta) | ||
| # check if framework version is there in system_meta, if not try to fill it from sut_info | ||
| if system_meta['framework'] == "": | ||
| system_meta['framework'] = sut_info.get('framework', '') + sut_info.get('framework_version', '') | ||
| if system_meta['framework'] == "": | ||
| print("WARNING: framework field could not be filled from system_meta.json or sut_info.json. This will trigger error in submission checker") | ||
|
|
||
| if not os.path.isdir(submission_results_path): | ||
| os.makedirs(submission_results_path) | ||
|
|
||
| #if division == "closed" and not os.path.isdir(submission_compliance_path): | ||
| # os.makedirs(submission_compliance_path) | ||
|
|
||
| mlperf_inference_conf_path = os.path.join(result_mode_path, "mlperf.conf") | ||
| if os.path.exists(mlperf_inference_conf_path): | ||
| shutil.copy(mlperf_inference_conf_path, os.path.join(submission_measurement_path, 'mlperf.conf')) | ||
| user_conf_path = os.path.join(result_mode_path, "user.conf") | ||
| user_conf_path = os.path.join(result_scenario_path, "user.conf") | ||
| if os.path.exists(user_conf_path): | ||
| shutil.copy(user_conf_path, os.path.join(submission_measurement_path, 'user.conf')) | ||
| measurements_json_path = os.path.join(result_mode_path, "measurements.json") | ||
| # get model precision | ||
| model_precision = "fp32" | ||
| shutil.copy(user_conf_path, os.path.join(measurement_scenario_path, 'user.conf')) | ||
| else: | ||
| user_conf_path = os.path.join(result_mode_path, "user.conf") | ||
| if os.path.exists(user_conf_path): | ||
| shutil.copy(user_conf_path, os.path.join(submission_measurement_path, 'user.conf')) | ||
| else: | ||
| if mode.lower() == "performance": | ||
| return {"return":1, "error":f"user.conf missing in both paths: {user_conf_path} and {os.path.join(result_scenario_path, 'user.conf')}"} | ||
|
|
||
| measurements_json_path = os.path.join(result_scenario_path, "measurements.json") | ||
| target_measurement_json_path = measurement_scenario_path | ||
| if not os.path.exists(measurements_json_path): | ||
| measurements_json_path = os.path.join(result_mode_path, "measurements.json") | ||
| target_measurement_json_path = submission_measurement_path | ||
|
|
||
| if os.path.exists(measurements_json_path): | ||
| with open(measurements_json_path, "r") as f: | ||
| measurements_json = json.load(f) | ||
| model_precision = measurements_json.get("weight_data_types", "fp32") | ||
| if os.path.exists(measurements_json_path): | ||
| # This line can be removed once the PR in the inference repo is merged. | ||
| shutil.copy(measurements_json_path, os.path.join(submission_measurement_path, sub_res+'.json')) | ||
| shutil.copy(measurements_json_path, os.path.join(submission_measurement_path, 'model-info.json')) | ||
| shutil.copy(measurements_json_path, os.path.join(target_measurement_json_path, sub_res+'.json')) | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. PR is merged. So, we can remove this line |
||
| shutil.copy(measurements_json_path, os.path.join(target_measurement_json_path, 'model-info.json')) | ||
| else: | ||
| if mode.lower() == "performance": | ||
| return {"return":1, "error":f"measurements.json missing in both paths: {measurements_json_path} and {os.path.join(result_scenario_path, 'user.conf')}"} | ||
|
|
||
| files = [] | ||
| readme = False | ||
|
|
||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -358,9 +358,10 @@ variations: | |
| add_deps_recursive: | ||
| submission-checker: | ||
| tags: _short-run | ||
|
|
||
| default: 'true' | ||
| env: | ||
| CM_MLPERF_SUBMISSION_DIVISION: open | ||
| CM_RUN_MLPERF_SUBMISSION_PREPROCESSOR: off | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Theres already an env in this variation
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. missed due to space in between... fixed:) |
||
| CM_MLPERF_SUBMISSION_GENERATION_STYLE: short | ||
| group: submission-generation-style | ||
|
|
||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
needed here?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Hi @arjunsuresh , i have placed it above the for loop as
submission_measurement_pathandmeasurement_scenario_pathare the same.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
okay 👍