Skip to content

Commit ae42b54

Browse files
committed
fix PR review comments
Signed-off-by: Srikanth Ramakrishna <srikanth.ramakrishna@intel.com>
1 parent 212f01b commit ae42b54

File tree

5 files changed

+18
-17
lines changed

5 files changed

+18
-17
lines changed

pytorch/Dockerfile

+7-5
Original file line numberDiff line numberDiff line change
@@ -184,16 +184,16 @@ RUN apt-get update && \
184184

185185
RUN rm -rf /etc/apt/sources.list.d/intel-gpu-jammy.list /etc/apt/sources.list.d/oneAPI.list
186186

187-
FROM ipex-xpu-base AS ipex-xpu-base-req
187+
ENV LD_LIBRARY_PATH=/opt/intel/oneapi/redist/lib:$LD_LIBRARY_PATH
188+
189+
FROM ipex-xpu-base AS ipex-xpu-base-wheels
188190

189191
WORKDIR /
190192
COPY xpu-requirements.txt .
191193

192194
RUN python -m pip install --no-cache-dir -r xpu-requirements.txt && \
193195
rm -rf xpu-requirements.txt
194196

195-
ENV LD_LIBRARY_PATH=/opt/intel/oneapi/redist/lib:$LD_LIBRARY_PATH
196-
197197
FROM ipex-xpu-base AS ipex-xpu-jupyter
198198

199199
WORKDIR /jupyter
@@ -258,13 +258,15 @@ COPY serving/torchserve-requirements.txt .
258258
COPY requirements.txt .
259259

260260
RUN python -m pip install --no-cache-dir -r requirements.txt && \
261-
python -m pip install --no-cache-dir -r torchserve-requirements.txt
261+
python -m pip install --no-cache-dir -r torchserve-requirements.txt && \
262+
rm -rf requirements.txt torchserve-requirements.txt
262263

263264
FROM torchserve-base AS compile-xpu
264265

265266
COPY serving/torchserve-xpu-requirements.txt .
266267

267-
RUN python -m pip install --no-cache-dir -r torchserve-xpu-requirements.txt
268+
RUN python -m pip install --no-cache-dir -r torchserve-xpu-requirements.txt && \
269+
rm -rf torchserve-xpu-requirements.txt
268270

269271
FROM torchserve-base AS torchserve-cpu
270272

pytorch/docker-compose.yaml

+2-1
Original file line numberDiff line numberDiff line change
@@ -122,7 +122,7 @@ services:
122122
org.opencontainers.base.name: "intel/python:3.10-core"
123123
org.opencontainers.image.title: "Intel® Extension for PyTorch XPU Base Image"
124124
org.opencontainers.image.version: ${IPEX_VERSION:-2.1.40}-xpu-${PACKAGE_OPTION:-pip}-base
125-
target: ipex-xpu-base-req
125+
target: ipex-xpu-base-wheels
126126
command: >
127127
python -c "import torch;print(torch.device('xpu'));import
128128
intel_extension_for_pytorch as
@@ -190,6 +190,7 @@ services:
190190
labels:
191191
dependency.apt.numactl: true
192192
dependency.apt.openjdk-17-jdk: true
193+
dependency.apt.xpu-smi: ${XPU_SMI_VERSION:-1.2.38}
193194
dependency.idp: false
194195
dependency.python.pip: serving/torchserve-xpu-requirements.txt
195196
docs: serving

pytorch/serving/README.md

+4-5
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ The [Torchserve Model Archiver](https://github.com/pytorch/serve/blob/master/mod
1212

1313
Follow the instructions found in the link above depending on whether you are intending to archive a model or a workflow. Use the provided container rather than installing the archiver with the example command below:
1414

15-
#### For CPU
15+
#### Create a Model Archive for CPU device
1616

1717
```bash
1818
curl -O https://download.pytorch.org/models/squeezenet1_1-b8a52dc0.pth
@@ -29,7 +29,7 @@ docker run --rm -it \
2929
--export-path /home/model-server
3030
```
3131

32-
### For XPU
32+
### Create a Model Archive for XPU device
3333

3434
Use a squeezenet model [optimized](./model-store/ipex_squeezenet.py) for XPU using Intel® Extension for PyTorch*.
3535

@@ -52,7 +52,7 @@ docker run --rm -it \
5252

5353
Test Torchserve with the new archived model. The example below is for the squeezenet model.
5454

55-
#### For CPU
55+
#### Run Torchserve for CPU device
5656

5757
```bash
5858
# Assuming that the above pre-archived model is in the current working directory
@@ -63,8 +63,7 @@ docker run -d --rm --name server \
6363
intel/intel-optimized-pytorch:2.4.0-serving-cpu
6464
```
6565

66-
#### For XPU
67-
66+
#### Run Torchserve for XPU device
6867
```bash
6968
# Assuming that the above pre-archived model is in the current working directory
7069
docker run -d --rm --name server \

pytorch/serving/config-xpu.properties

-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
21
inference_address=http://0.0.0.0:8080
32
management_address=http://0.0.0.0:8081
43
metrics_address=http://0.0.0.0:8082

pytorch/serving/tests.yaml

+5-5
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15-
ipex-serving-model-archive:
15+
ipex-serving-cpu-model-archive:
1616
img: ${REGISTRY}/${REPO}:b-${GITHUB_RUN_NUMBER:-0}-${BASE_IMAGE_NAME:-ubuntu}-${BASE_IMAGE_TAG:-22.04}-py${PYTHON_VERSION:-3.10}-torchserve-cpu
1717
cmd: /home/model-server/model-archive/mar-test.sh cpu
1818
entrypoint: /bin/bash
@@ -35,7 +35,7 @@ ipex-serving-xpu-model-archive:
3535
dst: /home/model-server/model-store
3636
user: root
3737
workdir: /home/model-server/model-archive
38-
ipex-serving-workflow-archive:
38+
ipex-serving-cpu-workflow-archive:
3939
img: ${REGISTRY}/${REPO}:b-${GITHUB_RUN_NUMBER:-0}-${BASE_IMAGE_NAME:-ubuntu}-${BASE_IMAGE_TAG:-22.04}-py${PYTHON_VERSION:-3.10}-torchserve-cpu
4040
cmd: /home/model-server/model-archive/war-test.sh
4141
entrypoint: /bin/bash
@@ -46,7 +46,7 @@ ipex-serving-workflow-archive:
4646
dst: /home/model-server/wf-store
4747
user: root
4848
workdir: /home/model-server/model-archive
49-
ipex-serving-rest-workflow:
49+
ipex-serving-cpu-rest-workflow:
5050
img: ${REGISTRY}/${REPO}:b-${GITHUB_RUN_NUMBER:-0}-${BASE_IMAGE_NAME:-ubuntu}-${BASE_IMAGE_TAG:-22.04}-py${PYTHON_VERSION:-3.10}-torchserve-cpu
5151
cmd: bash /home/model-server/wf-store/rest-test.sh
5252
serving: True
@@ -72,7 +72,7 @@ ipex-serving-xpu-rest-workflow:
7272
ENABLE_TORCH_PROFILER: 'true'
7373
shm_size: 1g
7474
workdir: /home/model-server/wf-store
75-
ipex-serving-rest-inference:
75+
ipex-serving-cpu-rest-inference:
7676
img: ${REGISTRY}/${REPO}:b-${GITHUB_RUN_NUMBER:-0}-${BASE_IMAGE_NAME:-ubuntu}-${BASE_IMAGE_TAG:-22.04}-py${PYTHON_VERSION:-3.10}-torchserve-cpu
7777
cmd: bash /home/model-server/model-store/rest-test.sh
7878
serving: True
@@ -85,7 +85,7 @@ ipex-serving-rest-inference:
8585
ENABLE_TORCH_PROFILER: 'true'
8686
shm_size: 1g
8787
workdir: /home/model-server/model-store
88-
ipex-serving-grpc-inference:
88+
ipex-serving-cpu-grpc-inference:
8989
img: ${REGISTRY}/${REPO}:b-${GITHUB_RUN_NUMBER:-0}-${BASE_IMAGE_NAME:-ubuntu}-${BASE_IMAGE_TAG:-22.04}-py${PYTHON_VERSION:-3.10}-torchserve-cpu
9090
cmd: bash /home/model-server/model-store/grpc-test.sh
9191
serving: True

0 commit comments

Comments
 (0)