daviondk commited on
Commit
eb612a0
·
1 Parent(s): 05c8b77

fix cache again

Browse files
Files changed (1) hide show
  1. app.py +1 -0
app.py CHANGED
@@ -154,6 +154,7 @@ def bot_response(history, model_selection, max_tokens, temperature, top_k, top_p
154
  tokenizer = AutoTokenizer.from_pretrained(MODELS[model_name]["base_model"])
155
  model = AutoModelForCausalLM.from_pretrained(**load_kwargs).to("cuda")
156
  reward_model = RewardModel(model_name=MODELS[model_name]["reward_repo_id"])
 
157
  CACHE["tokenizer"] = tokenizer
158
  CACHE["model"] = model
159
  CACHE["reward_model"] = reward_model
 
154
  tokenizer = AutoTokenizer.from_pretrained(MODELS[model_name]["base_model"])
155
  model = AutoModelForCausalLM.from_pretrained(**load_kwargs).to("cuda")
156
  reward_model = RewardModel(model_name=MODELS[model_name]["reward_repo_id"])
157
+ CACHE["model_name"] = model_name
158
  CACHE["tokenizer"] = tokenizer
159
  CACHE["model"] = model
160
  CACHE["reward_model"] = reward_model