Spaces:
Running
Running
File size: 5,618 Bytes
6f1c297 2b7aae2 6f1c297 2b7aae2 e8cbdd9 6f1c297 2b7aae2 6f1c297 2b7aae2 6f1c297 2b7aae2 a523941 6f1c297 a523941 6f1c297 a523941 6f1c297 a523941 6f1c297 a523941 6f1c297 a523941 6f1c297 a523941 6f1c297 a523941 6f1c297 a523941 6f1c297 44f8844 6f1c297 a523941 6f1c297 a523941 6f1c297 a523941 6f1c297 a523941 6f1c297 a523941 6f1c297 2b7aae2 3b2dc51 95c4896 3b2dc51 ad438b8 6f1c297 a523941 6f1c297 2b7aae2 6f1c297 a523941 6f1c297 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 |
###############################################################################
# STARRY HuggingFace Space — Full Deployment with ML Predictors (CPU mode)
# nginx (reverse proxy) + omr-service (Fastify) + cluster-server (NestJS)
# + 7 Python ML predictors via supervisord (layout, gauge, mask, semantic,
# loc, ocr, brackets)
#
# This Dockerfile expects pre-built artifacts copied by export-hf.sh:
# dist/ — frontend build
# backend/cluster-server/dist/ — compiled NestJS
# backend/omr/dist/ — rollup bundle
# backend/omr-service/src/ — TypeScript source (runs via tsx)
# backend/python-services/ — Python ML predictors
# backend/libs/ — shared TS libs
###############################################################################
FROM node:20-slim
ENV DEBIAN_FRONTEND=noninteractive
# --- System deps ---
RUN apt-get update && apt-get install -y --no-install-recommends \
postgresql postgresql-client \
nginx \
tini \
python3 python3-pip make g++ pkg-config \
libzmq3-dev libfontconfig1 curl \
supervisor \
&& rm -rf /var/lib/apt/lists/*
# Install tsx globally
RUN npm install -g tsx
# --- Python ML deps (CPU-only PyTorch + TensorFlow) ---
RUN pip install --no-cache-dir --break-system-packages \
"numpy>=1.26.0,<2.0.0" \
torch torchvision \
--index-url https://download.pytorch.org/whl/cpu
RUN pip install --no-cache-dir --break-system-packages \
"numpy>=1.26.0,<2.0.0" \
tensorflow-cpu \
tf_keras \
"opencv-python-headless<4.11" \
Pillow>=8.0.0 \
PyYAML>=5.4.0 \
pyzmq>=22.0.0 \
msgpack>=1.0.0
ENV TF_USE_LEGACY_KERAS=1
ENV PYTHONUNBUFFERED=1
# --- node user already has UID 1000 in node:20-slim ---
# Ensure home directory exists
RUN mkdir -p /home/node && chown node:node /home/node
# --- Configure PostgreSQL to run as user (UID 1000) ---
ENV PGDATA=/home/node/pgdata
RUN mkdir -p $PGDATA /home/node/run/postgresql \
&& chown -R node:node $PGDATA /home/node/run/postgresql \
&& chmod 700 $PGDATA
# Initialize PostgreSQL as user
USER node
RUN /usr/lib/postgresql/15/bin/initdb -D $PGDATA \
&& echo "unix_socket_directories = '/home/node/run/postgresql'" >> $PGDATA/postgresql.conf \
&& echo "listen_addresses = '127.0.0.1'" >> $PGDATA/postgresql.conf \
&& echo "port = 5432" >> $PGDATA/postgresql.conf \
&& echo "local all all trust" > $PGDATA/pg_hba.conf \
&& echo "host all all 127.0.0.1/32 trust" >> $PGDATA/pg_hba.conf
USER root
# --- Configure nginx ---
RUN mkdir -p /var/log/nginx /var/lib/nginx/body /var/lib/nginx/proxy /var/lib/nginx/fastcgi \
&& chown -R node:node /var/log/nginx /var/lib/nginx /run /etc/nginx
# --- Set up app directory ---
ENV HOME=/home/node
WORKDIR /home/node/app
# --- cluster-server: install production deps ---
COPY --chown=node backend/cluster-server/package.json backend/cluster-server/package-lock.json* ./backend/cluster-server/
RUN cd backend/cluster-server && npm ci --omit=dev --legacy-peer-deps 2>/dev/null || npm install --omit=dev --legacy-peer-deps
# --- omr bundle (pre-built) ---
COPY --chown=node backend/omr/package.json ./backend/omr/
COPY --chown=node backend/omr/dist/ ./backend/omr/dist/
# --- omr-service: install production deps ---
COPY --chown=node backend/omr-service/package.json backend/omr-service/package-lock.json* ./backend/omr-service/
RUN cd backend/omr-service && ONNXRUNTIME_NODE_INSTALL=skip npm install --omit=dev 2>/dev/null \
&& rm -rf node_modules/onnxruntime-node/node_modules/onnxruntime-common 2>/dev/null; true
# --- Root-level deps (for omr bundle externals) ---
RUN npm init -y > /dev/null 2>&1 \
&& npm install --no-package-lock \
js-sha1 math-erf lodash spark-md5 portfinder python-shell \
msgpackr "yargs@^17" \
&& npm install --no-package-lock skia-canvas gl 2>/dev/null; true
# --- Copy pre-built frontend ---
COPY --chown=node dist/ ./dist/
# --- Copy cluster-server dist ---
COPY --chown=node backend/cluster-server/dist/ ./backend/cluster-server/dist/
RUN ln -sf /home/node/app/backend/cluster-server/dist/src/migrations /home/node/app/backend/cluster-server/dist/migrations
# --- Copy omr-service source ---
COPY --chown=node backend/omr-service/src/ ./backend/omr-service/src/
COPY --chown=node backend/omr-service/tsconfig.json ./backend/omr-service/
# --- Gauge renderer and shared files ---
COPY --chown=node backend/libs/gauge-renderer.ts ./backend/libs/
COPY --chown=node backend/omr/src/gauge-server.ts ./backend/omr/src/
COPY --chown=node src/pages/playground/scripts/shaders.ts ./src/pages/playground/scripts/
RUN ln -sf /home/node/app/backend/omr-service/node_modules /home/node/app/backend/node_modules
# --- Root tsconfig ---
COPY --chown=node tsconfig.json ./
# --- Python ML services ---
COPY --chown=node backend/python-services/ ./backend/python-services/
# --- Supervisord config ---
COPY --chown=node supervisord.conf ./supervisord.conf
# --- Example score seed data + upload images ---
COPY --chown=node seed-data.sql.gz ./seed-data.sql.gz
COPY --chown=node seed-uploads.tar.gz ./seed-uploads.tar.gz
# --- Config files ---
COPY --chown=node docker-entrypoint.sh ./docker-entrypoint.sh
COPY --chown=node nginx.conf /etc/nginx/nginx.conf
RUN chmod +x docker-entrypoint.sh
# Directories
RUN mkdir -p /tmp/starry-uploads /home/node/log/supervisor /home/node/run /home/node/app/models \
&& chown -R node:node /tmp/starry-uploads /home/node/log /home/node/run /home/node/app/models
USER node
EXPOSE 7860
ENTRYPOINT ["tini", "--"]
CMD ["./docker-entrypoint.sh"]
|