yuvraj-singh-9886 commited on
Commit
c41577c
·
1 Parent(s): 3b70c60

Fix HF authentication with secure environment variable handling

Browse files

- Use environment variables for HF token (HF_TOKEN or HUGGINGFACE_HUB_TOKEN)
- Add proper error handling for missing tokens
- Remove duplicate UI elements
- No hardcoded tokens for security compliance

Files changed (1) hide show
  1. app.py +29 -13
app.py CHANGED
@@ -4,6 +4,7 @@ import torch
4
  import torch.nn.functional as F
5
  import os
6
  import sys
 
7
 
8
  from config import ModelArgs, get_args
9
  from model import DeepSeekV3, initialize_tokenizer
@@ -27,13 +28,37 @@ def initialize_app():
27
  # Initialize model args
28
  model_args = ModelArgs()
29
 
30
- # Initialize tokenizer (no HF token needed for basic operation)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31
  if tk is None:
32
- tk = Tokenizer(hf_token=None)
33
- tk = tk.ready_tokenizer()
 
 
 
 
 
34
 
35
  # Initialize the global tokenizer in model.py
36
- initialize_tokenizer(hf_token=None)
37
 
38
  def load_model(model_path, device, model_args):
39
  """Load model from checkpoint"""
@@ -138,15 +163,6 @@ def create_interface():
138
  label="Top-k"
139
  )
140
 
141
- with gr.Row():
142
- top_k_slider = gr.Slider(
143
- minimum=1,
144
- maximum=100,
145
- value=50,
146
- step=1,
147
- label="Top-k"
148
- )
149
-
150
  generate_btn = gr.Button("🎯 Generate Text", variant="primary", size="lg")
151
 
152
  with gr.Column(scale=3):
 
4
  import torch.nn.functional as F
5
  import os
6
  import sys
7
+ from huggingface_hub import login
8
 
9
  from config import ModelArgs, get_args
10
  from model import DeepSeekV3, initialize_tokenizer
 
28
  # Initialize model args
29
  model_args = ModelArgs()
30
 
31
+ # Get HF token from environment variables (set in HF Spaces secrets)
32
+ hf_token = os.getenv('HF_TOKEN') or os.getenv('HUGGINGFACE_HUB_TOKEN')
33
+
34
+ if not hf_token:
35
+ print("Warning: No HF_TOKEN found in environment variables.")
36
+ print("Please set HF_TOKEN in your Hugging Face Spaces secrets.")
37
+ print("Go to Settings -> Repository secrets -> New secret")
38
+ print("Name: HF_TOKEN, Value: your_huggingface_token")
39
+ # For now, we'll try to continue without authentication
40
+ hf_token = None
41
+
42
+ # Login to Hugging Face Hub for gated model access
43
+ if hf_token:
44
+ try:
45
+ login(token=hf_token, add_to_git_credential=False)
46
+ print("Successfully logged in to Hugging Face Hub")
47
+ except Exception as e:
48
+ print(f"Warning: Could not login to HF Hub: {e}")
49
+
50
+ # Initialize tokenizer with HF token for gated model access
51
  if tk is None:
52
+ try:
53
+ tk = Tokenizer(hf_token=hf_token)
54
+ tk = tk.ready_tokenizer()
55
+ print("Tokenizer initialized successfully")
56
+ except Exception as e:
57
+ print(f"Error initializing tokenizer: {e}")
58
+ raise e
59
 
60
  # Initialize the global tokenizer in model.py
61
+ initialize_tokenizer(hf_token=hf_token)
62
 
63
  def load_model(model_path, device, model_args):
64
  """Load model from checkpoint"""
 
163
  label="Top-k"
164
  )
165
 
 
 
 
 
 
 
 
 
 
166
  generate_btn = gr.Button("🎯 Generate Text", variant="primary", size="lg")
167
 
168
  with gr.Column(scale=3):