Skip to content

Commit

Permalink
Merge pull request #180 from mlcommons/dev
Browse files Browse the repository at this point in the history
20240816: regular sync of mlperf-inference & dev with main
  • Loading branch information
ctuning-admin authored Aug 19, 2024
2 parents 736289a + 8d96640 commit dd0c358
Show file tree
Hide file tree
Showing 64 changed files with 2,204 additions and 431 deletions.
7 changes: 2 additions & 5 deletions .github/workflows/test-mlperf-inference-abtf-poc.yml
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ jobs:
cm pull repo mlcommons@cm4abtf --branch=poc
- name: Test MLPerf Inference ABTF POC using ${{ matrix.backend }} on docker
run: |
cm run script --tags=run-abtf,inference,_poc-demo --adr.compiler.tags=gcc --quiet -v
cm run script --tags=run-abtf,inference,_poc-demo --test_query_count=5 --adr.compiler.tags=gcc --quiet -v
build2:
runs-on: ${{ matrix.os }}
Expand All @@ -48,9 +48,6 @@ jobs:
python-version: [ "3.8", "3.12" ]
backend: [ "pytorch" ]
implementation: [ "python" ]
exclude:
- os: ubuntu-24.04
python-version: "3.8"

steps:
- uses: actions/checkout@v3
Expand Down Expand Up @@ -92,4 +89,4 @@ jobs:
cm pull repo mlcommons@cm4abtf --branch=poc
- name: Test MLPerf Inference ABTF POC using ${{ matrix.backend }} on ${{ matrix.os }}
run: |
cm run script --tags=run-abtf,inference,_poc-demo --quiet --env.CM_MLPERF_LOADGEN_BUILD_FROM_SRC=off -v
cm run script --tags=run-abtf,inference,_poc-demo --quiet --env.CM_MLPERF_LOADGEN_BUILD_FROM_SRC=off -v
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@ jobs:
run: |
python3 -m pip install cmind
cm pull repo --url=${{ github.event.pull_request.head.repo.html_url }} --checkout=${{ github.event.pull_request.head.ref }}
cm run script --quiet --tags=get,sys-utils-cm
- name: Test MLPerf Inference Bert (DeepSparse, TF, ONNX, PyTorch)
run: |
cm run script --tags=run,mlperf,inference,generate-run-cmds,_submission,_short --submitter="cTuning" --model=bert-99 --backend=${{ matrix.backend }} --device=cpu --scenario=Offline --test_query_count=5 --precision=${{ matrix.precision }} --target_qps=1 -v --quiet
1 change: 0 additions & 1 deletion .github/workflows/test-mlperf-inference-gptj.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@ jobs:
run: |
python3 -m pip install cmind
cm pull repo --url=${{ github.event.pull_request.head.repo.html_url }} --checkout=${{ github.event.pull_request.head.ref }}
cm run script --quiet --tags=get,sys-utils-cm
- name: Test MLPerf Inference GPTJ
run: |
cm run script --tags=run,mlperf,inference,generate-run-cmds,_submission,_short --submitter="cTuning" --model=gptj --backend=${{ matrix.backend }} --device=cpu --scenario=Offline --test_query_count=1 --precision=${{ matrix.precision }} --target_qps=1 --quiet
1 change: 0 additions & 1 deletion .github/workflows/test-mlperf-inference-resnet50.yml
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,6 @@ jobs:
run: |
python3 -m pip install cmind
cm pull repo --url=${{ github.event.pull_request.head.repo.html_url }} --checkout=${{ github.event.pull_request.head.ref }}
cm run script --quiet --tags=get,sys-utils-cm
- name: Test MLPerf Inference ResNet50
run: |
cm run script --tags=run,mlperf,inference,generate-run-cmds,_submission,_short --submitter="cTuning" --hw_name=default --model=resnet50 --implementation=${{ matrix.implementation }} --backend=${{ matrix.backend }} --device=cpu --scenario=Offline --test_query_count=500 --target_qps=1 -v --quiet
1 change: 0 additions & 1 deletion .github/workflows/test-mlperf-inference-retinanet.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@ jobs:
run: |
python3 -m pip install cmind
cm pull repo --url=${{ github.event.pull_request.head.repo.html_url }} --checkout=${{ github.event.pull_request.head.ref }}
cm run script --quiet --tags=get,sys-utils-cm
- name: Test MLPerf Inference Retinanet using ${{ matrix.backend }}
run: |
cm run script --tags=run,mlperf,inference,generate-run-cmds,_submission,_short --submitter="cTuning" --hw_name=default --model=retinanet --implementation=${{ matrix.implementation }} --backend=${{ matrix.backend }} --device=cpu --scenario=Offline --test_query_count=5 --adr.compiler.tags=gcc --quiet -v --target_qps=1
35 changes: 24 additions & 11 deletions automation/script/module.py
Original file line number Diff line number Diff line change
Expand Up @@ -1329,6 +1329,8 @@ def _run(self, i):
if "add_deps_recursive" in versions_meta:
self._merge_dicts_with_tags(add_deps_recursive, versions_meta['add_deps_recursive'])

env['CM_TMP_CURRENT_SCRIPT_PATH'] = path

# Run chain of docker dependencies if current run cmd is from inside a docker container
docker_deps = []
if i.get('docker_run_deps'):
Expand Down Expand Up @@ -1446,6 +1448,7 @@ def _run(self, i):
if pip_version_string != '':
logging.debug(recursion_spaces+' # potential PIP version string (if needed): '+pip_version_string)


# Check if pre-process and detect
if 'preprocess' in dir(customize_code) and not fake_run:

Expand Down Expand Up @@ -2915,6 +2918,10 @@ def _run_deps(self, deps, clean_env_keys_deps, env, state, const, const_state, a
if from_cache and not d.get("dynamic", None):
continue

if d.get('env'):
r = update_env_with_values(d['env'], False, env) #to update env local to a dependency
if r['return']>0: return r

update_tags_from_env_with_prefix = d.get("update_tags_from_env_with_prefix", {})
for t in update_tags_from_env_with_prefix:
for key in update_tags_from_env_with_prefix[t]:
Expand Down Expand Up @@ -3034,9 +3041,6 @@ def _run_deps(self, deps, clean_env_keys_deps, env, state, const, const_state, a

utils.merge_dicts({'dict1':ii, 'dict2':d, 'append_lists':True, 'append_unique':True})

r = update_env_with_values(ii['env']) #to update env local to a dependency
if r['return']>0: return r

r = self.cmind.access(ii)
if r['return']>0: return r

Expand Down Expand Up @@ -4274,9 +4278,13 @@ def find_cached_script(i):
dependent_cached_path = cached_script.meta.get('dependent_cached_path', '')
if dependent_cached_path:
if not os.path.exists(dependent_cached_path):
#Need to rm this cache entry
skip_cached_script = True
continue
#TODO Need to restrict the below check to within container env
i['tmp_dep_cached_path'] = dependent_cached_path
r = utils.call_internal_module(self_obj, __file__, 'module_misc', 'get_container_path_script', i)
if not os.path.exists(r['value_env']):
#Need to rm this cache entry
skip_cached_script = True
continue

if not skip_cached_script:
cached_script_version = cached_script.meta.get('version', '')
Expand Down Expand Up @@ -4351,7 +4359,7 @@ def any_enable_or_skip_script(meta, env):
return False

############################################################################################################
def update_env_with_values(env, fail_on_not_found=False):
def update_env_with_values(env, fail_on_not_found=False, extra_env={}):
"""
Update any env key used as part of values in meta
"""
Expand Down Expand Up @@ -4382,14 +4390,19 @@ def update_env_with_values(env, fail_on_not_found=False):
continue

for tmp_value in tmp_values:
if tmp_value not in env and fail_on_not_found:
if tmp_value not in env and tmp_value not in extra_env and fail_on_not_found:
return {'return':1, 'error':'variable {} is not in env'.format(tmp_value)}
found_env = {}
if tmp_value in env:
found_env = env
elif tmp_value in extra_env:
found_env = extra_env
if found_env:
if type(value) == str:
value = value.replace("<<<"+tmp_value+">>>", str(env[tmp_value]))
value = value.replace("<<<"+tmp_value+">>>", str(found_env[tmp_value]))
elif type(value) == list:
for i,val in enumerate(value):
value[i] = value[i].replace("<<<"+tmp_value+">>>", str(env[tmp_value]))
value[i] = value[i].replace("<<<"+tmp_value+">>>", str(found_env[tmp_value]))

env[key] = value

Expand Down Expand Up @@ -4618,7 +4631,7 @@ def prepare_and_run_script_with_postprocessing(i, postprocess="postprocess"):
repo_to_report = 'https://github.com/'+script_repo_alias.replace('@','/')+'/issues'

if repo_to_report == '':
repo_to_report = 'https://github.com/mlcommons/ck/issues'
repo_to_report = 'https://github.com/mlcommons/cm4mlops/issues'

note = '''
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Expand Down
9 changes: 8 additions & 1 deletion automation/script/module_misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -1422,6 +1422,7 @@ def dockerfile(i):

variations = meta.get('variations', {})
docker_settings = meta.get('docker', {})
docker_settings['dockerfile_env'] = dockerfile_env
state['docker'] = docker_settings
add_deps_recursive = i.get('add_deps_recursive', {})

Expand All @@ -1430,6 +1431,7 @@ def dockerfile(i):
return r

docker_settings = state['docker']
dockerfile_env = docker_settings['dockerfile_env']

if not docker_settings.get('run', True) and not i.get('docker_run_override', False):
print("docker.run set to False in _cm.json")
Expand Down Expand Up @@ -1598,6 +1600,11 @@ def get_host_path(value):

return value

def get_container_path_script(i):
tmp_dep_cached_path = i['tmp_dep_cached_path']
value_mnt,value_env = get_container_path(tmp_dep_cached_path)
return {'return': 0, 'value_mnt': value_mnt, 'value_env': value_env}

def get_container_path(value):
path_split = value.split(os.sep)
if len(path_split) == 1:
Expand Down Expand Up @@ -1739,7 +1746,7 @@ def docker(i):

image_repo = i.get('docker_image_repo','')
if image_repo == '':
image_repo = 'cknowledge'
image_repo = 'local'

# Host system needs to have docker
r = self_module.cmind.access({'action':'run',
Expand Down
Loading

0 comments on commit dd0c358

Please sign in to comment.