15
15
---
16
16
inference-optimization-inc-ipex-quantization-notebook-${PYTHON_VERSION:-3.9}-cpu :
17
17
cmd : papermill --log-output jupyter/inc-ipex-quantization/quantize_with_inc.ipynb result.ipynb -k pytorch-cpu --cwd jupyter/inc-ipex-quantization
18
- img : ${REGISTRY}/${REPO} :b-${GITHUB_RUN_NUMBER:-0}-inference-optimization-${RELEASE:-2024.2.0}-py${PYTHON_VERSION:-3.9}
18
+ img : amr-registry.caas.intel.com/aiops/aikit-products-dev :b-${GITHUB_RUN_NUMBER:-0}-inference-optimization-${RELEASE:-2024.2.0}-py${PYTHON_VERSION:-3.9}
19
19
notebook : True
20
20
inference-optimization-inc-ipex-quantization-notebook-${PYTHON_VERSION:-3.9}-gpu :
21
21
cmd : papermill --log-output jupyter/inc-ipex-quantization/quantize_with_inc.ipynb result.ipynb -k pytorch-gpu --cwd jupyter/inc-ipex-quantization
22
- img : ${REGISTRY}/${REPO} :b-${GITHUB_RUN_NUMBER:-0}-inference-optimization-${RELEASE:-2024.2.0}-py${PYTHON_VERSION:-3.9}
22
+ img : amr-registry.caas.intel.com/aiops/aikit-products-dev :b-${GITHUB_RUN_NUMBER:-0}-inference-optimization-${RELEASE:-2024.2.0}-py${PYTHON_VERSION:-3.9}
23
23
notebook : True
24
24
device : ["/dev/dri"]
25
25
26
26
inference-optimization-inc-itex-notebook-${PYTHON_VERSION:-3.9}-cpu :
27
27
cmd : papermill --log-output jupyter/inc-itex/inc_sample_tensorflow.ipynb result.ipynb -k tensorflow-cpu --cwd jupyter/inc-itex
28
- img : ${REGISTRY}/${REPO} :b-${GITHUB_RUN_NUMBER:-0}-inference-optimization-${RELEASE:-2024.2.0}-py${PYTHON_VERSION:-3.9}
28
+ img : amr-registry.caas.intel.com/aiops/aikit-products-dev :b-${GITHUB_RUN_NUMBER:-0}-inference-optimization-${RELEASE:-2024.2.0}-py${PYTHON_VERSION:-3.9}
29
29
notebook : True
30
30
31
31
# Status: Commented due to out of resources error
32
32
# inference-optimization-inc-itex-notebook-${PYTHON_VERSION:-3.9}-gpu:
33
33
# cmd: papermill --log-output jupyter/inc-itex/inc_sample_tensorflow.ipynb result.ipynb -k tensorflow-gpu --cwd jupyter/inc-itex
34
- # img: ${REGISTRY}/${REPO} :b-${GITHUB_RUN_NUMBER:-0}-inference-optimization-${RELEASE:-2024.2.0}-py${PYTHON_VERSION:-3.9}
34
+ # img: amr-registry.caas.intel.com/aiops/aikit-products-dev :b-${GITHUB_RUN_NUMBER:-0}-inference-optimization-${RELEASE:-2024.2.0}-py${PYTHON_VERSION:-3.9}
35
35
# notebook: True
36
36
37
37
inference-optimization-inc-tensorflow-${PYTHON_VERSION:-3.9}-cpu :
38
38
cmd : conda run -n tensorflow-cpu sample-tests/neural_compressor/tensorflow/run.sh cpu
39
- img : ${REGISTRY}/${REPO} :b-${GITHUB_RUN_NUMBER:-0}-inference-optimization-${RELEASE:-2024.2.0}-py${PYTHON_VERSION:-3.9}
39
+ img : amr-registry.caas.intel.com/aiops/aikit-products-dev :b-${GITHUB_RUN_NUMBER:-0}-inference-optimization-${RELEASE:-2024.2.0}-py${PYTHON_VERSION:-3.9}
40
40
inference-optimization-inc-tensorflow-${PYTHON_VERSION:-3.9}-gpu :
41
41
cmd : conda run -n tensorflow-gpu sample-tests/neural_compressor/tensorflow/run.sh gpu
42
- img : ${REGISTRY}/${REPO} :b-${GITHUB_RUN_NUMBER:-0}-inference-optimization-${RELEASE:-2024.2.0}-py${PYTHON_VERSION:-3.9}
42
+ img : amr-registry.caas.intel.com/aiops/aikit-products-dev :b-${GITHUB_RUN_NUMBER:-0}-inference-optimization-${RELEASE:-2024.2.0}-py${PYTHON_VERSION:-3.9}
43
43
device : ["/dev/dri"]
44
44
45
45
inference-optimization-inc-torch-${PYTHON_VERSION:-3.9}-cpu :
46
46
cmd : conda run -n pytorch-cpu sample-tests/neural_compressor/torch/run.sh cpu
47
- img : ${REGISTRY}/${REPO} :b-${GITHUB_RUN_NUMBER:-0}-inference-optimization-${RELEASE:-2024.2.0}-py${PYTHON_VERSION:-3.9}
47
+ img : amr-registry.caas.intel.com/aiops/aikit-products-dev :b-${GITHUB_RUN_NUMBER:-0}-inference-optimization-${RELEASE:-2024.2.0}-py${PYTHON_VERSION:-3.9}
48
48
49
49
inference-optimization-ipex-${PYTHON_VERSION:-3.9}-cpu :
50
50
cmd : conda run -n pytorch-cpu python -W ignore sample-tests/intel_extension_for_pytorch/test_ipex.py --device cpu --ipex
51
- img : ${REGISTRY}/${REPO} :b-${GITHUB_RUN_NUMBER:-0}-inference-optimization-${RELEASE:-2024.2.0}-py${PYTHON_VERSION:-3.9}
51
+ img : amr-registry.caas.intel.com/aiops/aikit-products-dev :b-${GITHUB_RUN_NUMBER:-0}-inference-optimization-${RELEASE:-2024.2.0}-py${PYTHON_VERSION:-3.9}
52
52
inference-optimization-ipex-${PYTHON_VERSION:-3.9}-gpu :
53
53
cmd : conda run -n pytorch-gpu python -W ignore sample-tests/intel_extension_for_pytorch/test_ipex.py --device xpu --ipex
54
- img : ${REGISTRY}/${REPO} :b-${GITHUB_RUN_NUMBER:-0}-inference-optimization-${RELEASE:-2024.2.0}-py${PYTHON_VERSION:-3.9}
54
+ img : amr-registry.caas.intel.com/aiops/aikit-products-dev :b-${GITHUB_RUN_NUMBER:-0}-inference-optimization-${RELEASE:-2024.2.0}-py${PYTHON_VERSION:-3.9}
55
55
device : ["/dev/dri"]
56
56
57
57
inference-optimization-itex-${PYTHON_VERSION:-3.9}-cpu :
58
58
cmd : conda run -n tensorflow-cpu python -W ignore sample-tests/intel_extension_for_tensorflow/test_itex.py
59
- img : ${REGISTRY}/${REPO} :b-${GITHUB_RUN_NUMBER:-0}-inference-optimization-${RELEASE:-2024.2.0}-py${PYTHON_VERSION:-3.9}
59
+ img : amr-registry.caas.intel.com/aiops/aikit-products-dev :b-${GITHUB_RUN_NUMBER:-0}-inference-optimization-${RELEASE:-2024.2.0}-py${PYTHON_VERSION:-3.9}
60
60
inference-optimization-itex-${PYTHON_VERSION:-3.9}-gpu :
61
61
cmd : conda run -n tensorflow-gpu python -W ignore sample-tests/intel_extension_for_tensorflow/test_itex.py
62
- img : ${REGISTRY}/${REPO} :b-${GITHUB_RUN_NUMBER:-0}-inference-optimization-${RELEASE:-2024.2.0}-py${PYTHON_VERSION:-3.9}
62
+ img : amr-registry.caas.intel.com/aiops/aikit-products-dev :b-${GITHUB_RUN_NUMBER:-0}-inference-optimization-${RELEASE:-2024.2.0}-py${PYTHON_VERSION:-3.9}
63
63
device : ["/dev/dri"]
64
64
65
65
inference-optimization-itex-inference-notebook-${PYTHON_VERSION:-3.9}-cpu :
66
66
cmd : papermill --log-output jupyter/itex-inference/tutorial_optimize_TensorFlow_pretrained_model.ipynb result.ipynb -k tensorflow-cpu --cwd jupyter/itex-inference
67
- img : ${REGISTRY}/${REPO} :b-${GITHUB_RUN_NUMBER:-0}-inference-optimization-${RELEASE:-2024.2.0}-py${PYTHON_VERSION:-3.9}
67
+ img : amr-registry.caas.intel.com/aiops/aikit-products-dev :b-${GITHUB_RUN_NUMBER:-0}-inference-optimization-${RELEASE:-2024.2.0}-py${PYTHON_VERSION:-3.9}
68
68
notebook : True
69
69
70
70
# Need update from TensorFlow v1 to V2
@@ -75,24 +75,24 @@ inference-optimization-itex-inference-notebook-${PYTHON_VERSION:-3.9}-cpu:
75
75
76
76
inference-optimization-onnx-${PYTHON_VERSION:-3.9}-cpu :
77
77
cmd : conda run -n tensorflow-cpu sample-tests/onnx/run.sh
78
- img : ${REGISTRY}/${REPO} :b-${GITHUB_RUN_NUMBER:-0}-inference-optimization-${RELEASE:-2024.2.0}-py${PYTHON_VERSION:-3.9}
78
+ img : amr-registry.caas.intel.com/aiops/aikit-products-dev :b-${GITHUB_RUN_NUMBER:-0}-inference-optimization-${RELEASE:-2024.2.0}-py${PYTHON_VERSION:-3.9}
79
79
inference-optimization-onnx-${PYTHON_VERSION:-3.9}-gpu :
80
80
cmd : conda run -n tensorflow-gpu sample-tests/onnx/run.sh
81
- img : ${REGISTRY}/${REPO} :b-${GITHUB_RUN_NUMBER:-0}-inference-optimization-${RELEASE:-2024.2.0}-py${PYTHON_VERSION:-3.9}
81
+ img : amr-registry.caas.intel.com/aiops/aikit-products-dev :b-${GITHUB_RUN_NUMBER:-0}-inference-optimization-${RELEASE:-2024.2.0}-py${PYTHON_VERSION:-3.9}
82
82
device : ["/dev/dri"]
83
83
84
84
inference-optimization-tensorflow-dataset-librarian-${PYTHON_VERSION:-3.9}-cpu :
85
85
cmd : conda run -n tensorflow-cpu bash -c 'yes | python -m dataset_librarian.dataset -n msmarco --download -d ~/msmarco'
86
- img : ${REGISTRY}/${REPO} :b-${GITHUB_RUN_NUMBER:-0}-inference-optimization-${RELEASE:-2024.2.0}-py${PYTHON_VERSION:-3.9}
86
+ img : amr-registry.caas.intel.com/aiops/aikit-products-dev :b-${GITHUB_RUN_NUMBER:-0}-inference-optimization-${RELEASE:-2024.2.0}-py${PYTHON_VERSION:-3.9}
87
87
inference-optimization-tensorflow-dataset-librarian-${PYTHON_VERSION:-3.9}-gpu :
88
88
cmd : conda run -n tensorflow-gpu bash -c 'yes | python -m dataset_librarian.dataset -n msmarco --download -d ~/msmarco'
89
- img : ${REGISTRY}/${REPO} :b-${GITHUB_RUN_NUMBER:-0}-inference-optimization-${RELEASE:-2024.2.0}-py${PYTHON_VERSION:-3.9}
89
+ img : amr-registry.caas.intel.com/aiops/aikit-products-dev :b-${GITHUB_RUN_NUMBER:-0}-inference-optimization-${RELEASE:-2024.2.0}-py${PYTHON_VERSION:-3.9}
90
90
device : ["/dev/dri"]
91
91
92
92
inference-optimization-torch-dataset-librarian-${PYTHON_VERSION:-3.9}-cpu :
93
93
cmd : conda run -n pytorch-cpu bash -c 'yes | python -m dataset_librarian.dataset -n msmarco --download -d ~/msmarco'
94
- img : ${REGISTRY}/${REPO} :b-${GITHUB_RUN_NUMBER:-0}-inference-optimization-${RELEASE:-2024.2.0}-py${PYTHON_VERSION:-3.9}
94
+ img : amr-registry.caas.intel.com/aiops/aikit-products-dev :b-${GITHUB_RUN_NUMBER:-0}-inference-optimization-${RELEASE:-2024.2.0}-py${PYTHON_VERSION:-3.9}
95
95
inference-optimization-torch-dataset-librarian-${PYTHON_VERSION:-3.9}-gpu :
96
96
cmd : conda run -n pytorch-gpu bash -c 'yes | python -m dataset_librarian.dataset -n msmarco --download -d ~/msmarco'
97
- img : ${REGISTRY}/${REPO} :b-${GITHUB_RUN_NUMBER:-0}-inference-optimization-${RELEASE:-2024.2.0}-py${PYTHON_VERSION:-3.9}
97
+ img : amr-registry.caas.intel.com/aiops/aikit-products-dev :b-${GITHUB_RUN_NUMBER:-0}-inference-optimization-${RELEASE:-2024.2.0}-py${PYTHON_VERSION:-3.9}
98
98
device : ["/dev/dri"]
0 commit comments