MVPilgrim commited on
Commit
c0a9876
·
1 Parent(s): c282183
Files changed (1) hide show
  1. app.py +6 -6
app.py CHANGED
@@ -449,12 +449,12 @@ try:
449
 
450
  #modelOutput = llm(
451
  modelOutput = llm.create_chat_completion(
452
- prompt,
453
- max_tokens=max_tokens,
454
- temperature=temperature,
455
- top_p=top_p,
456
- echo=echoVal,
457
- stop=stop,
458
  )
459
  result = modelOutput["choices"][0]["text"].strip()
460
  logger.info(f"### llmResult: {result}")
 
449
 
450
  #modelOutput = llm(
451
  modelOutput = llm.create_chat_completion(
452
+ prompt
453
+ #max_tokens=max_tokens,
454
+ #temperature=temperature,
455
+ #top_p=top_p,
456
+ #echo=echoVal,
457
+ #stop=stop,
458
  )
459
  result = modelOutput["choices"][0]["text"].strip()
460
  logger.info(f"### llmResult: {result}")