Skip to content
Merged
4 changes: 2 additions & 2 deletions .github/workflows/test-nvidia-mlperf-implementation.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ name: MLPerf Inference Nvidia implementations

on:
schedule:
- cron: "31 2 * * *" #to be adjusted
- cron: "29 20 * * *" #to be adjusted

jobs:
build_nvidia:
Expand All @@ -21,5 +21,5 @@ jobs:
source gh_action/bin/activate
export CM_REPOS=$HOME/GH_CM
pip install --upgrade cm4mlops
cm run script --tags=run-mlperf,inference,_all-scenarios,_submission,_full,_r4.1-dev --execution_mode=valid --gpu_name=rtx_4090 --model=${{ matrix.model }} --submitter="MLCommons" --hw_name=RTX4090x2 --implementation=nvidia --backend=tensorrt --category=datacenter,edge --division=closed --docker_dt=yes --docker_it=no --docker_cm_repo=gateoverflow@cm4mlops --adr.compiler.tags=gcc --device=cuda --use_dataset_from_host=yes --results_dir=$HOME/gh_action_results --submission_dir=$HOME/gh_action_submissions --clean --docker --quiet
cm run script --tags=run-mlperf,inference,_all-scenarios,_submission,_full,_r4.1-dev --preprocess_submission=yes --execution_mode=valid --gpu_name=rtx_4090 --pull_changes=yes --model=${{ matrix.model }} --submitter="MLCommons" --hw_name=RTX4090x2 --implementation=nvidia --backend=tensorrt --category=datacenter,edge --division=closed --docker_dt=yes --docker_it=no --docker_cm_repo=gateoverflow@cm4mlops --adr.compiler.tags=gcc --device=cuda --use_dataset_from_host=yes --results_dir=$HOME/gh_action_results --submission_dir=$HOME/gh_action_submissions --clean --docker --quiet
cm run script --tags=push,github,mlperf,inference,submission --repo_url=https://github.com/gateoverflow/mlperf_inference_unofficial_submissions_v5.0 --repo_branch=main --commit_message="Results from GH action on NVIDIA_RTX4090x2" --quiet --submission_dir=$HOME/gh_action_submissions --hw_name=RTX4090x2
2 changes: 1 addition & 1 deletion .github/workflows/test-scc24-sdxl.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ name: MLPerf inference SDXL (SCC)

on:
schedule:
- cron: "5 2 * * *"
- cron: "20 14 * * *"

jobs:
build_reference:
Expand Down
4 changes: 4 additions & 0 deletions script/app-mlperf-inference-mlcommons-python/customize.py
Original file line number Diff line number Diff line change
Expand Up @@ -290,6 +290,10 @@ def get_run_cmd_reference(os_info, env, scenario_extra_options, mode_extra_optio

elif "stable-diffusion-xl" in env['CM_MODEL']:
env['RUN_DIR'] = os.path.join(env['CM_MLPERF_INFERENCE_SOURCE'], "text_to_image")
if env.get('+PYTHONPATH', '') == '':
env['+PYTHONPATH'] = []
env['+PYTHONPATH'].append(os.path.join(env['CM_MLPERF_INFERENCE_SOURCE'], "text_to_image", "tools", "fid"))

backend = env['CM_MLPERF_BACKEND']
device = env['CM_MLPERF_DEVICE'] if env['CM_MLPERF_DEVICE'] not in [ "gpu", "rocm" ] else "cuda"
max_batchsize = env.get('CM_MLPERF_LOADGEN_MAX_BATCHSIZE', '1')
Expand Down
2 changes: 1 addition & 1 deletion script/process-mlperf-accuracy/customize.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ def preprocess(i):


elif dataset == "coco2014":
env['+PYTHONPATH'] = [ os.path.join(env['CM_MLPERF_INFERENCE_SOURCE'], "text_to_image", "tools") ]
env['+PYTHONPATH'] = [ os.path.join(env['CM_MLPERF_INFERENCE_SOURCE'], "text_to_image", "tools") , os.path.join(env['CM_MLPERF_INFERENCE_SOURCE'], "text_to_image", "tools", "fid") ]
extra_options = ""

if env.get('CM_SDXL_STATISTICS_FILE_PATH', '') != '':
Expand Down
1 change: 1 addition & 0 deletions script/run-mlperf-inference-app/customize.py
Original file line number Diff line number Diff line change
Expand Up @@ -203,6 +203,7 @@ def preprocess(i):
inp = {}
if str(docker_dt).lower() in ["yes", "true", "1"]:
env['CM_DOCKER_REUSE_EXISTING_CONTAINER'] = 'no' # turning it off for the first run and after that we turn it on
env['CM_DOCKER_DETACHED_MODE'] = 'yes'

if env.get('CM_DOCKER_IMAGE_NAME', '') != '':
docker_extra_input['docker_image_name'] = env['CM_DOCKER_IMAGE_NAME']
Expand Down
3 changes: 2 additions & 1 deletion tests/script/test_docker.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,8 @@
'image_name':'cm-script-app-image-classification-onnx-py',
'env': {
'CM_DOCKER_RUN_SCRIPT_TAGS': 'app,image-classification,onnx,python',
'CM_MLOPS_REPO': 'ctuning@mlcommons-ck',
'CM_MLOPS_REPO': 'mlcommons@cm4mlops',
'CM_MLOPS_REPO_BRANCH': 'mlperf-inference',
'CM_DOCKER_IMAGE_BASE': 'ubuntu:22.04'
},
'quiet': 'yes'
Expand Down