From dde159653c08a20cac7278525f35c557d4ec72f4 Mon Sep 17 00:00:00 2001 From: Arjun Suresh Date: Tue, 22 Oct 2024 02:26:44 +0530 Subject: [PATCH 1/2] Do not pass mlperf_conf for the reference implementation if version >=4.1.1 --- .../customize.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/script/app-mlperf-inference-mlcommons-python/customize.py b/script/app-mlperf-inference-mlcommons-python/customize.py index 0376293484..76b5352f6a 100644 --- a/script/app-mlperf-inference-mlcommons-python/customize.py +++ b/script/app-mlperf-inference-mlcommons-python/customize.py @@ -70,10 +70,15 @@ def preprocess(i): x="" if os_info['platform'] == 'windows' else "'" - if "llama2-70b" in env['CM_MODEL'] or "mixtral-8x7b" in env["CM_MODEL"]: - env['CM_MLPERF_LOADGEN_EXTRA_OPTIONS'] += " --mlperf-conf " + x+ env['CM_MLPERF_CONF'] + x + + inference_src_version = env.get('CM_MLPERF_INFERENCE_SOURCE_VERSION', '') + if inference_src_version and inference_src_version >= (4,1,1): + pass # mlperf_conf is automatically loaded by the loadgen else: - env['CM_MLPERF_LOADGEN_EXTRA_OPTIONS'] += " --mlperf_conf "+ x + env['CM_MLPERF_CONF'] + x + if "llama2-70b" in env['CM_MODEL'] or "mixtral-8x7b" in env["CM_MODEL"]: + env['CM_MLPERF_LOADGEN_EXTRA_OPTIONS'] += " --mlperf-conf " + x+ env['CM_MLPERF_CONF'] + x + else: + env['CM_MLPERF_LOADGEN_EXTRA_OPTIONS'] += " --mlperf_conf "+ x + env['CM_MLPERF_CONF'] + x if env.get('CM_NETWORK_LOADGEN', '') != "lon" and env.get('CM_MLPERF_INFERENCE_API_SERVER','')=='' and "llama2-70b" not in env['CM_MODEL']: env['MODEL_DIR'] = env.get('CM_ML_MODEL_PATH') From 6d0f66371334be66f448e87e4958031c9b68f018 Mon Sep 17 00:00:00 2001 From: Arjun Suresh Date: Tue, 22 Oct 2024 02:33:34 +0530 Subject: [PATCH 2/2] Fix syntax --- script/get-mlperf-inference-src/customize.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/script/get-mlperf-inference-src/customize.py b/script/get-mlperf-inference-src/customize.py index de2b83b9cc..e8e94ae0a2 100644 --- a/script/get-mlperf-inference-src/customize.py +++ b/script/get-mlperf-inference-src/customize.py @@ -102,7 +102,7 @@ def postprocess(i): env['+PYTHONPATH'].append(os.path.join(env['CM_MLPERF_INFERENCE_CLASSIFICATION_AND_DETECTION_PATH'], 'python')) if os.path.exists(os.path.join(inference_root, "loadgen", "VERSION")): - with open(os.path.join(inference_root, "loadgen", "VERSION") as f: + with open(os.path.join(inference_root, "loadgen", "VERSION")) as f: version_info = f.read() env['CM_MLPERF_INFERENCE_SOURCE_VERSION'] = version_info