Skip to content

Commit b7c890f

Browse files
authored
Update default HUGGINGFACE_HUB_CACHE env variable in TEI (#152)
1 parent 3b88895 commit b7c890f

File tree

2 files changed

+2
-2
lines changed

2 files changed

+2
-2
lines changed

huggingface/pytorch/tei/docker/1.7.0/cpu/Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,7 @@ RUN --mount=type=secret,id=actions_cache_url,env=ACTIONS_CACHE_URL \
7474

7575
FROM debian:bookworm-slim AS base
7676

77-
ENV HUGGINGFACE_HUB_CACHE=/data \
77+
ENV HUGGINGFACE_HUB_CACHE=/opt/ml/model \
7878
PORT=80 \
7979
HF_HUB_USER_AGENT_ORIGIN=aws:sagemaker:cpu:inference:tei \
8080
MKL_ENABLE_INSTRUCTIONS=AVX512_E4 \

huggingface/pytorch/tei/docker/1.7.0/gpu/Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -95,7 +95,7 @@ FROM nvidia/cuda:12.2.0-runtime-ubuntu22.04 AS base
9595

9696
ARG DEFAULT_USE_FLASH_ATTENTION=True
9797

98-
ENV HUGGINGFACE_HUB_CACHE=/data \
98+
ENV HUGGINGFACE_HUB_CACHE=/opt/ml/model \
9999
PORT=80 \
100100
USE_FLASH_ATTENTION=$DEFAULT_USE_FLASH_ATTENTION \
101101
HF_HUB_USER_AGENT_ORIGIN=aws:sagemaker:gpu-cuda:inference:tei

0 commit comments

Comments
 (0)