Spaces:
Runtime error
Runtime error
fix bug the function Parameters
Browse files
app.py
CHANGED
|
@@ -5,8 +5,6 @@ import re
|
|
| 5 |
|
| 6 |
# from vllm import LLM, SamplingParams
|
| 7 |
import pandas as pd
|
| 8 |
-
from collections import Counter
|
| 9 |
-
from datasets import load_dataset, Dataset, concatenate_datasets
|
| 10 |
from dataclasses import dataclass
|
| 11 |
from concurrent.futures import ThreadPoolExecutor, TimeoutError
|
| 12 |
import os
|
|
@@ -20,8 +18,6 @@ import subprocess
|
|
| 20 |
import tempfile
|
| 21 |
from contextlib import contextmanager
|
| 22 |
from typing import Tuple
|
| 23 |
-
from transformers import PreTrainedTokenizer, set_seed
|
| 24 |
-
import torch
|
| 25 |
from tqdm import tqdm
|
| 26 |
import time
|
| 27 |
from sympy import N, simplify
|
|
@@ -31,7 +27,7 @@ from pathlib import Path
|
|
| 31 |
from openai import OpenAI
|
| 32 |
|
| 33 |
client = OpenAI(
|
| 34 |
-
base_url=
|
| 35 |
api_key=os.environ.get("HF_TOKEN"),
|
| 36 |
)
|
| 37 |
|
|
@@ -543,7 +539,7 @@ config = Config(
|
|
| 543 |
print(f"=== Running submission with config ===\n\n{config}")
|
| 544 |
|
| 545 |
|
| 546 |
-
def generate(message):
|
| 547 |
chat_completion = client.chat.completions.create(
|
| 548 |
model="tgi",
|
| 549 |
messages=message,
|
|
@@ -718,6 +714,7 @@ with gr.Blocks() as demo:
|
|
| 718 |
out = gr.Markdown()
|
| 719 |
|
| 720 |
btn = gr.Button("Run")
|
|
|
|
| 721 |
btn.click(fn=solve_problem, inputs=[inp, temperature], outputs=out)
|
| 722 |
|
| 723 |
|
|
|
|
| 5 |
|
| 6 |
# from vllm import LLM, SamplingParams
|
| 7 |
import pandas as pd
|
|
|
|
|
|
|
| 8 |
from dataclasses import dataclass
|
| 9 |
from concurrent.futures import ThreadPoolExecutor, TimeoutError
|
| 10 |
import os
|
|
|
|
| 18 |
import tempfile
|
| 19 |
from contextlib import contextmanager
|
| 20 |
from typing import Tuple
|
|
|
|
|
|
|
| 21 |
from tqdm import tqdm
|
| 22 |
import time
|
| 23 |
from sympy import N, simplify
|
|
|
|
| 27 |
from openai import OpenAI
|
| 28 |
|
| 29 |
client = OpenAI(
|
| 30 |
+
base_url=os.environ.get("SERVER_URL"),
|
| 31 |
api_key=os.environ.get("HF_TOKEN"),
|
| 32 |
)
|
| 33 |
|
|
|
|
| 539 |
print(f"=== Running submission with config ===\n\n{config}")
|
| 540 |
|
| 541 |
|
| 542 |
+
def generate(message, temperature):
|
| 543 |
chat_completion = client.chat.completions.create(
|
| 544 |
model="tgi",
|
| 545 |
messages=message,
|
|
|
|
| 714 |
out = gr.Markdown()
|
| 715 |
|
| 716 |
btn = gr.Button("Run")
|
| 717 |
+
|
| 718 |
btn.click(fn=solve_problem, inputs=[inp, temperature], outputs=out)
|
| 719 |
|
| 720 |
|