File size: 2,279 Bytes
2b7aae2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
#!/bin/bash
# Start all STARRY ML prediction services in a single container.
# Usage: docker run ... starry-ml:latest bash /app/scripts/start-all.sh

set -e

echo "=== Starting all STARRY prediction services ==="

export TF_USE_LEGACY_KERAS=1
export PYTHONPATH=/app/deep-starry:/app/starry-ocr
export LOG_DIR=${LOG_DIR:-/tmp/starry-logs}
export CUDA_VISIBLE_DEVICES=${CUDA_VISIBLE_DEVICES:-0}
mkdir -p "$LOG_DIR"

PIDS=()

# --- PyTorch services (GPU) - start sequentially to avoid CUDA init race ---

echo "[layout]    Starting on port 12022..."
python /app/deep-starry/streamPredictor.py \
    /models/starry-dist/20221125-scorelayout-1121-residue-u-d4-w64-d4-w64 \
    -p 12022 -dv cuda -m layout &
PIDS+=($!)
sleep 3

echo "[mask]      Starting on port 12024..."
python /app/deep-starry/streamPredictor.py \
    /models/starry-dist/20210918-scorewidgets.mask-unet-5-32 \
    -p 12024 -dv cuda -m mask &
PIDS+=($!)
sleep 3

echo "[semantic]  Starting on port 12025..."
python /app/deep-starry/streamPredictor.py \
    /models/starry-dist/202302-semanticCluster \
    -p 12025 -dv cuda -m semanticCluster &
PIDS+=($!)
sleep 3

echo "[gauge]     Starting on port 12023..."
python /app/deep-starry/streamPredictor.py \
    /models/starry-dist/scoregauge-unet-d6-w32-0611 \
    -p 12023 -dv cuda -m gauge &
PIDS+=($!)
sleep 3

echo "[loc]       Starting on port 12026..."
cd /app/starry-ocr && python locPredictor.py \
    -w /models/ocr-dist/DB_gc_loc/v6/model_epoch_88_minibatch_15300 \
    -p 12026 -dv cuda &
PIDS+=($!)
cd /app
sleep 2

# --- TensorFlow services (CPU) ---

echo "[ocr]       Starting on port 12027 (CPU)..."
CUDA_VISIBLE_DEVICES=-1 python /app/starry-ocr/ocrPredictor.py \
    /models/ocr-dist/ocr.yaml \
    -p 12027 &
PIDS+=($!)
sleep 1

echo "[brackets]  Starting on port 12028 (CPU)..."
CUDA_VISIBLE_DEVICES=-1 python /app/starry-ocr/bracketsPredictor.py \
    /models/ocr-dist/brackets.yaml \
    -p 12028 &
PIDS+=($!)

echo ""
echo "=== All ${#PIDS[@]} services started (PIDs: ${PIDS[*]}) ==="
echo "    Ports: 12022(layout) 12023(gauge) 12024(mask) 12025(semantic) 12026(loc) 12027(ocr) 12028(brackets)"
echo ""

# Handle shutdown
trap 'echo "Stopping all services..."; kill "${PIDS[@]}" 2>/dev/null; wait' SIGTERM SIGINT

# Wait for all children
wait