Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| from transformers import AutoModelForCausalLM, AutoTokenizer | |
| # Load the AuthorMist model and tokenizer | |
| model_name = "authormist/authormist-originality" | |
| tokenizer = AutoTokenizer.from_pretrained(model_name) | |
| model = AutoModelForCausalLM.from_pretrained(model_name) | |
| # Function to humanize input text | |
| def humanize_text(text): | |
| prompt = f"Please paraphrase the following text to make it human-like:\n\n{text}\n\nParaphrased text:" | |
| inputs = tokenizer(prompt, return_tensors="pt") | |
| outputs = model.generate(inputs.input_ids, max_new_tokens=512, temperature=0.7, top_p=0.9) | |
| response = tokenizer.decode(outputs[0], skip_special_tokens=True) | |
| # Extract only the paraphrased part | |
| if "Paraphrased text:" in response: | |
| return response.split("Paraphrased text:")[1].strip() | |
| return response.strip() | |
| # Launch Gradio UI | |
| gr.Interface( | |
| fn=humanize_text, | |
| inputs=gr.Textbox(lines=10, placeholder="Paste AI-written text here..."), | |
| outputs="text", | |
| title="AuthorMist AI Humanizer", | |
| description="Turns AI-generated text into human-like writing to reduce detection by tools like GPTZero." | |
| ).launch() | |