starry / Dockerfile
k-l-lambda's picture
update: export from starry-refactor 2026-02-21 21:33
3b2dc51
###############################################################################
# STARRY HuggingFace Space — Full Deployment with ML Predictors (CPU mode)
# nginx (reverse proxy) + omr-service (Fastify) + cluster-server (NestJS)
# + 7 Python ML predictors via supervisord (layout, gauge, mask, semantic,
# loc, ocr, brackets)
#
# This Dockerfile expects pre-built artifacts copied by export-hf.sh:
# dist/ — frontend build
# backend/cluster-server/dist/ — compiled NestJS
# backend/omr/dist/ — rollup bundle
# backend/omr-service/src/ — TypeScript source (runs via tsx)
# backend/python-services/ — Python ML predictors
# backend/libs/ — shared TS libs
###############################################################################
FROM node:20-slim
ENV DEBIAN_FRONTEND=noninteractive
# --- System deps ---
RUN apt-get update && apt-get install -y --no-install-recommends \
postgresql postgresql-client \
nginx \
tini \
python3 python3-pip make g++ pkg-config \
libzmq3-dev libfontconfig1 curl \
supervisor \
&& rm -rf /var/lib/apt/lists/*
# Install tsx globally
RUN npm install -g tsx
# --- Python ML deps (CPU-only PyTorch + TensorFlow) ---
RUN pip install --no-cache-dir --break-system-packages \
"numpy>=1.26.0,<2.0.0" \
torch torchvision \
--index-url https://download.pytorch.org/whl/cpu
RUN pip install --no-cache-dir --break-system-packages \
"numpy>=1.26.0,<2.0.0" \
tensorflow-cpu \
tf_keras \
"opencv-python-headless<4.11" \
Pillow>=8.0.0 \
PyYAML>=5.4.0 \
pyzmq>=22.0.0 \
msgpack>=1.0.0
ENV TF_USE_LEGACY_KERAS=1
ENV PYTHONUNBUFFERED=1
# --- node user already has UID 1000 in node:20-slim ---
# Ensure home directory exists
RUN mkdir -p /home/node && chown node:node /home/node
# --- Configure PostgreSQL to run as user (UID 1000) ---
ENV PGDATA=/home/node/pgdata
RUN mkdir -p $PGDATA /home/node/run/postgresql \
&& chown -R node:node $PGDATA /home/node/run/postgresql \
&& chmod 700 $PGDATA
# Initialize PostgreSQL as user
USER node
RUN /usr/lib/postgresql/15/bin/initdb -D $PGDATA \
&& echo "unix_socket_directories = '/home/node/run/postgresql'" >> $PGDATA/postgresql.conf \
&& echo "listen_addresses = '127.0.0.1'" >> $PGDATA/postgresql.conf \
&& echo "port = 5432" >> $PGDATA/postgresql.conf \
&& echo "local all all trust" > $PGDATA/pg_hba.conf \
&& echo "host all all 127.0.0.1/32 trust" >> $PGDATA/pg_hba.conf
USER root
# --- Configure nginx ---
RUN mkdir -p /var/log/nginx /var/lib/nginx/body /var/lib/nginx/proxy /var/lib/nginx/fastcgi \
&& chown -R node:node /var/log/nginx /var/lib/nginx /run /etc/nginx
# --- Set up app directory ---
ENV HOME=/home/node
WORKDIR /home/node/app
# --- cluster-server: install production deps ---
COPY --chown=node backend/cluster-server/package.json backend/cluster-server/package-lock.json* ./backend/cluster-server/
RUN cd backend/cluster-server && npm ci --omit=dev --legacy-peer-deps 2>/dev/null || npm install --omit=dev --legacy-peer-deps
# --- omr bundle (pre-built) ---
COPY --chown=node backend/omr/package.json ./backend/omr/
COPY --chown=node backend/omr/dist/ ./backend/omr/dist/
# --- omr-service: install production deps ---
COPY --chown=node backend/omr-service/package.json backend/omr-service/package-lock.json* ./backend/omr-service/
RUN cd backend/omr-service && ONNXRUNTIME_NODE_INSTALL=skip npm install --omit=dev 2>/dev/null \
&& rm -rf node_modules/onnxruntime-node/node_modules/onnxruntime-common 2>/dev/null; true
# --- Root-level deps (for omr bundle externals) ---
RUN npm init -y > /dev/null 2>&1 \
&& npm install --no-package-lock \
js-sha1 math-erf lodash spark-md5 portfinder python-shell \
msgpackr "yargs@^17" \
&& npm install --no-package-lock skia-canvas gl 2>/dev/null; true
# --- Copy pre-built frontend ---
COPY --chown=node dist/ ./dist/
# --- Copy cluster-server dist ---
COPY --chown=node backend/cluster-server/dist/ ./backend/cluster-server/dist/
RUN ln -sf /home/node/app/backend/cluster-server/dist/src/migrations /home/node/app/backend/cluster-server/dist/migrations
# --- Copy omr-service source ---
COPY --chown=node backend/omr-service/src/ ./backend/omr-service/src/
COPY --chown=node backend/omr-service/tsconfig.json ./backend/omr-service/
# --- Gauge renderer and shared files ---
COPY --chown=node backend/libs/gauge-renderer.ts ./backend/libs/
COPY --chown=node backend/omr/src/gauge-server.ts ./backend/omr/src/
COPY --chown=node src/pages/playground/scripts/shaders.ts ./src/pages/playground/scripts/
RUN ln -sf /home/node/app/backend/omr-service/node_modules /home/node/app/backend/node_modules
# --- Root tsconfig ---
COPY --chown=node tsconfig.json ./
# --- Python ML services ---
COPY --chown=node backend/python-services/ ./backend/python-services/
# --- Supervisord config ---
COPY --chown=node supervisord.conf ./supervisord.conf
# --- Example score seed data + upload images ---
COPY --chown=node seed-data.sql.gz ./seed-data.sql.gz
COPY --chown=node seed-uploads.tar.gz ./seed-uploads.tar.gz
# --- Config files ---
COPY --chown=node docker-entrypoint.sh ./docker-entrypoint.sh
COPY --chown=node nginx.conf /etc/nginx/nginx.conf
RUN chmod +x docker-entrypoint.sh
# Directories
RUN mkdir -p /tmp/starry-uploads /home/node/log/supervisor /home/node/run /home/node/app/models \
&& chown -R node:node /tmp/starry-uploads /home/node/log /home/node/run /home/node/app/models
USER node
EXPOSE 7860
ENTRYPOINT ["tini", "--"]
CMD ["./docker-entrypoint.sh"]