Spaces:
Sleeping
Sleeping
| from transformers import AutoTokenizer, AutoModelForCausalLM | |
| from peft import PeftModel | |
| BASE_MODEL = "deepseek-ai/deepseek-coder-1.3b-base" | |
| LORA_REPO = "your-username/deepseek-lora-monthly" | |
| tokenizer = AutoTokenizer.from_pretrained(BASE_MODEL, trust_remote_code=True) | |
| base = AutoModelForCausalLM.from_pretrained(BASE_MODEL, trust_remote_code=True) | |
| model = PeftModel.from_pretrained(base, LORA_REPO) | |
| def generate_response(prompt:str)->str: | |
| inputs = tokenizer(prompt, return_tensors="pt") | |
| out = model.generate(**inputs, max_new_tokens=200) | |
| return tokenizer.decode(out[0], skip_special_tokens=True) | |