Spaces:
Running
Running
MVPilgrim
commited on
Commit
·
c0a9876
1
Parent(s):
c282183
debug
Browse files
app.py
CHANGED
|
@@ -449,12 +449,12 @@ try:
|
|
| 449 |
|
| 450 |
#modelOutput = llm(
|
| 451 |
modelOutput = llm.create_chat_completion(
|
| 452 |
-
prompt
|
| 453 |
-
max_tokens=max_tokens,
|
| 454 |
-
temperature=temperature,
|
| 455 |
-
top_p=top_p,
|
| 456 |
-
echo=echoVal,
|
| 457 |
-
stop=stop,
|
| 458 |
)
|
| 459 |
result = modelOutput["choices"][0]["text"].strip()
|
| 460 |
logger.info(f"### llmResult: {result}")
|
|
|
|
| 449 |
|
| 450 |
#modelOutput = llm(
|
| 451 |
modelOutput = llm.create_chat_completion(
|
| 452 |
+
prompt
|
| 453 |
+
#max_tokens=max_tokens,
|
| 454 |
+
#temperature=temperature,
|
| 455 |
+
#top_p=top_p,
|
| 456 |
+
#echo=echoVal,
|
| 457 |
+
#stop=stop,
|
| 458 |
)
|
| 459 |
result = modelOutput["choices"][0]["text"].strip()
|
| 460 |
logger.info(f"### llmResult: {result}")
|