ailab/MagicPrompt-Stable-Diffusion/app.py

37 lines
1.2 KiB
Python

import gradio as gr
from transformers import pipeline, set_seed
import random, re
gpt2_pipe = pipeline('text-generation', model='Gustavosta/MagicPrompt-Stable-Diffusion', tokenizer='gpt2')
def generate_prompt(text):
seed = random.randint(100, 1000000)
set_seed(seed)
response = gpt2_pipe(text, max_length=(len(text) + random.randint(60, 90)), num_return_sequences=4)
response_list = []
for x in response:
resp = x['generated_text'].strip()
if resp != text and len(resp) > (len(text) + 4) and resp.endswith((":", "-", "")) is False:
response_list.append(resp+'\n')
response_end = "\n".join(response_list)
response_end = re.sub('[^ ]+\.[^ ]+','', response_end)
response_end = response_end.replace("<", "").replace(">", "")
if response_end != "":
return response_end
demo = gr.Interface(fn=generate_prompt,
inputs='text',
outputs='text',
examples=[["A new hours out of fiend"], ["Third compendium of prague"]],
title = "生成prompt"
)
if __name__ == "__main__":
demo.queue(concurrency_count=3)
demo.launch(server_name = "0.0.0.0", server_port = 7028)