p2002814 commited on
Commit
205132a
·
1 Parent(s): dd1c3d0

Fix: use /app/cache for Hugging Face models to avoid permission errors in Docker

Browse files
Files changed (1) hide show
  1. utils/mlp.py +1 -7
utils/mlp.py CHANGED
@@ -1,9 +1,3 @@
1
- import os
2
-
3
- # For Huggingface
4
- os.environ["TRANSFORMERS_CACHE"] = "/app/cache"
5
- os.makedirs("/app/cache", exist_ok=True)
6
-
7
  import torch
8
  import joblib
9
  from sentence_transformers import SentenceTransformer
@@ -39,5 +33,5 @@ def load_model_and_metadata(model_path: str, model_type: str = "pytorch"):
39
  best_threshold = checkpoint.get('best_threshold', 0.5)
40
  label_encoder = checkpoint['label_encoder']
41
 
42
- embedding_model = SentenceTransformer('all-mpnet-base-v2')
43
  return model, embedding_model, best_threshold, label_encoder
 
 
 
 
 
 
 
1
  import torch
2
  import joblib
3
  from sentence_transformers import SentenceTransformer
 
33
  best_threshold = checkpoint.get('best_threshold', 0.5)
34
  label_encoder = checkpoint['label_encoder']
35
 
36
+ embedding_model = SentenceTransformer('all-mpnet-base-v2', cache_folder='/app/cache')
37
  return model, embedding_model, best_threshold, label_encoder