Spaces:
Sleeping
Sleeping
| from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline | |
| import gradio as gr | |
| from transformers import AutoTokenizer, AutoModelForCausalLM | |
| tokenizer = AutoTokenizer.from_pretrained("ashercn97/awesome-prompts-merged") | |
| model = AutoModelForCausalLM.from_pretrained("ashercn97/awesome-prompts-merged") | |
| pipeline = pipeline("text2text-generation", model=model, tokenizer=tokenizer) | |
| def generate(prompt): | |
| form = """Below is an instruction that describes a task, paired with an input that provides further context. Write a response that appropriately completes the request.\n\n | |
| ### Instruction:\n | |
| {}\n | |
| ### Response: | |
| """.format(prompt) | |
| prompts = [form] | |
| results = pipeline(prompts, max_length=150) | |
| output = results[0] | |
| return results[0] | |
| input_component = gr.Textbox(label = "Input a persona, e.g. photographer", value = "photographer") | |
| output_component = gr.Textbox(label = "Prompt") | |
| examples = [["photographer"], ["developer"]] | |
| description = "This app generates ChatGPT prompts, it's based on a BART model trained on [this dataset](https://huggingface.co/datasets/fka/awesome-chatgpt-prompts). π Simply enter a persona that you want the prompt to be generated based on. π§π»π§π»βππ§π»βπ¨π§π»βπ¬π§π»βπ»π§πΌβπ«π§π½βπΎ" | |
| gr.Interface(generate, inputs = input_component, outputs=output_component, examples=examples, title = "π¨π»βπ€ ChatGPT Prompt Generator π¨π»βπ€", description=description).launch() | |