debisoft commited on
Commit
5e807c3
·
1 Parent(s): 9669d30
Files changed (1) hide show
  1. app.py +0 -7
app.py CHANGED
@@ -1,4 +1,3 @@
1
- import openai
2
  import os
3
  import re
4
  from datetime import datetime
@@ -18,12 +17,6 @@ model_dir = "model"
18
  model, tokenizer = load_model_tokenizer_for_generate(model_dir)
19
 
20
  def get_completion(prompt, model="dolly-v0-70m"):
21
- messages = [{"role": "user", "content": prompt}]
22
- response = openai.ChatCompletion.create(
23
- model=model,
24
- messages=messages,
25
- temperature=0, # this is the degree of randomness of the model's output
26
- )
27
  # Examples from https://www.databricks.com/blog/2023/03/24/hello-dolly-democratizing-magic-chatgpt-open-models.html
28
  instructions = [prompt]
29
  # set some additional pipeline args
 
 
1
  import os
2
  import re
3
  from datetime import datetime
 
17
  model, tokenizer = load_model_tokenizer_for_generate(model_dir)
18
 
19
  def get_completion(prompt, model="dolly-v0-70m"):
 
 
 
 
 
 
20
  # Examples from https://www.databricks.com/blog/2023/03/24/hello-dolly-democratizing-magic-chatgpt-open-models.html
21
  instructions = [prompt]
22
  # set some additional pipeline args