soiz commited on
Commit
0b7ce46
·
verified ·
1 Parent(s): c9ba591

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +26 -33
app.py CHANGED
@@ -1,56 +1,49 @@
1
  from transformers import pipeline, set_seed
2
- import gradio as grad, random, re
 
3
 
 
4
 
 
5
  gpt2_pipe = pipeline('text-generation', model='Gustavosta/MagicPrompt-Stable-Diffusion', tokenizer='gpt2')
6
  with open("ideas.txt", "r") as f:
7
- line = f.readlines()
8
 
9
-
10
- def generate(starting_text):
11
  for count in range(4):
12
  seed = random.randint(100, 1000000)
13
  set_seed(seed)
14
 
 
15
  if starting_text == "":
16
- starting_text: str = line[random.randrange(0, len(line))].replace("\n", "").lower().capitalize()
17
- starting_text: str = re.sub(r"[,:\-–.!;?_]", '', starting_text)
18
- print(starting_text)
19
-
20
  response = gpt2_pipe(starting_text, max_length=random.randint(60, 90), num_return_sequences=4)
21
  response_list = []
22
  for x in response:
23
  resp = x['generated_text'].strip()
24
- if resp != starting_text and len(resp) > (len(starting_text) + 4) and resp.endswith((":", "-", "—")) is False:
25
- response_list.append(resp+'\n')
26
-
 
27
  response_end = "\n".join(response_list)
28
- response_end = re.sub('[^ ]+\.[^ ]+','', response_end)
29
  response_end = response_end.replace("<", "").replace(">", "")
30
 
31
- if response_end != "":
32
  return response_end
33
  if count == 4:
34
  return response_end
35
 
 
 
 
 
 
 
36
 
37
- txt = grad.Textbox(lines=1, label="Initial Text", placeholder="English Text here")
38
- out = grad.Textbox(lines=4, label="Generated Prompts")
39
-
40
- examples = []
41
- for x in range(8):
42
- examples.append(line[random.randrange(0, len(line))].replace("\n", "").lower().capitalize())
43
-
44
- title = "Stable Diffusion Prompt Generator"
45
- description = 'This is a demo of the model series: "MagicPrompt", in this case, aimed at: Stable Diffusion. To use it, simply submit your text or click on one of the examples.<b><br><br>To learn more about the model, go to the link: https://huggingface.co/Gustavosta/MagicPrompt-Stable-Diffusion<br>'
46
- article = "<div><center><img src='https://visitor-badge.glitch.me/badge?page_id=_Stable_Diffusion' alt='visitor badge'></center></div>"
47
-
48
- grad.Interface(fn=generate,
49
- inputs=txt,
50
- outputs=out,
51
- examples=examples,
52
- title=title,
53
- description=description,
54
- article=article,
55
- allow_flagging='never',
56
- cache_examples=False).queue(concurrency_count=1, api_open=False).launch(show_api=False, show_error=True)
 
1
  from transformers import pipeline, set_seed
2
+ from flask import Flask, request, jsonify
3
+ import random, re
4
 
5
+ app = Flask(__name__)
6
 
7
+ # Initialize the GPT-2 pipeline
8
  gpt2_pipe = pipeline('text-generation', model='Gustavosta/MagicPrompt-Stable-Diffusion', tokenizer='gpt2')
9
  with open("ideas.txt", "r") as f:
10
+ lines = f.readlines()
11
 
12
+ def generate_prompt(starting_text):
 
13
  for count in range(4):
14
  seed = random.randint(100, 1000000)
15
  set_seed(seed)
16
 
17
+ # Choose a random line from the file if the input text is empty
18
  if starting_text == "":
19
+ starting_text = lines[random.randrange(0, len(lines))].strip().lower().capitalize()
20
+ starting_text = re.sub(r"[,:\-–.!;?_]", '', starting_text)
21
+
22
+ # Generate text
23
  response = gpt2_pipe(starting_text, max_length=random.randint(60, 90), num_return_sequences=4)
24
  response_list = []
25
  for x in response:
26
  resp = x['generated_text'].strip()
27
+ if resp != starting_text and len(resp) > (len(starting_text) + 4) and not resp.endswith((":", "-", "—")):
28
+ response_list.append(resp + '\n')
29
+
30
+ # Clean the generated text
31
  response_end = "\n".join(response_list)
32
+ response_end = re.sub(r'[^ ]+\.[^ ]+', '', response_end) # Removes strings like 'abc.xyz'
33
  response_end = response_end.replace("<", "").replace(">", "")
34
 
35
+ if response_end:
36
  return response_end
37
  if count == 4:
38
  return response_end
39
 
40
+ # Define the API endpoint
41
+ @app.route('/', methods=['GET'])
42
+ def generate_api():
43
+ starting_text = request.args.get('text', default="", type=str)
44
+ result = generate_prompt(starting_text)
45
+ return jsonify({"generated_text": result})
46
 
47
+ if __name__ == '__main__':
48
+ # Run the Flask app on port 7860
49
+ app.run(host='0.0.0.0', port=7860)