aiqtech commited on
Commit
9d0c25b
ยท
verified ยท
1 Parent(s): c0a7a64

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -20
app.py CHANGED
@@ -36,8 +36,6 @@ def get_prompt(act):
36
  matching_prompt = prompts_df[prompts_df['act'] == act]['prompt'].values
37
  return matching_prompt[0] if len(matching_prompt) > 0 else None
38
 
39
-
40
-
41
  def respond(
42
  message,
43
  history: list[tuple[str, str]],
@@ -51,20 +49,12 @@ def respond(
51
  ๋ฐ˜๋“œ์‹œ ํ•œ๊ธ€๋กœ ๋‹ต๋ณ€ํ• ๊ฒƒ.
52
  """
53
 
54
- messages = [{"role": "system", "content": f"{system_prefix} {system_message}"}]
55
-
56
- for val in history:
57
- if val[0]:
58
- messages.append({"role": "user", "content": val[0]})
59
- if val[1]:
60
- messages.append({"role": "assistant", "content": val[1]})
61
-
62
- # ์‚ฌ์šฉ์ž ์ž…๋ ฅ์— ๋”ฐ๋ฅธ ํ”„๋กฌํ”„ํŠธ ์„ ํƒ
63
- prompt = get_prompt(message)
64
- if prompt:
65
- message = prompt
66
-
67
- messages.append({"role": "user", "content": message})
68
 
69
  API_URL = "https://api-inference.huggingface.co/models/meta-llama/Meta-Llama-3-70B-Instruct"
70
  headers = {"Authorization": f"Bearer {os.getenv('HF_TOKEN')}"}
@@ -75,12 +65,12 @@ def respond(
75
 
76
  try:
77
  payload = {
78
- "inputs": messages,
79
  "parameters": {
80
  "max_new_tokens": max_tokens,
81
  "temperature": temperature,
82
  "top_p": top_p,
83
- "stream": False # ์ŠคํŠธ๋ฆฌ๋ฐ์„ ๋น„ํ™œ์„ฑํ™”
84
  },
85
  }
86
  raw_response = query(payload)
@@ -101,8 +91,6 @@ def respond(
101
 
102
  yield response
103
 
104
- # ๋‚˜๋จธ์ง€ ์ฝ”๋“œ๋Š” ๊ทธ๋Œ€๋กœ ์œ ์ง€
105
-
106
  demo = gr.ChatInterface(
107
  respond,
108
  title="AI Auto Paper",
 
36
  matching_prompt = prompts_df[prompts_df['act'] == act]['prompt'].values
37
  return matching_prompt[0] if len(matching_prompt) > 0 else None
38
 
 
 
39
  def respond(
40
  message,
41
  history: list[tuple[str, str]],
 
49
  ๋ฐ˜๋“œ์‹œ ํ•œ๊ธ€๋กœ ๋‹ต๋ณ€ํ• ๊ฒƒ.
50
  """
51
 
52
+ full_prompt = f"{system_prefix} {system_message}\n\n"
53
+
54
+ for user, assistant in history:
55
+ full_prompt += f"Human: {user}\nAI: {assistant}\n"
56
+
57
+ full_prompt += f"Human: {message}\nAI:"
 
 
 
 
 
 
 
 
58
 
59
  API_URL = "https://api-inference.huggingface.co/models/meta-llama/Meta-Llama-3-70B-Instruct"
60
  headers = {"Authorization": f"Bearer {os.getenv('HF_TOKEN')}"}
 
65
 
66
  try:
67
  payload = {
68
+ "inputs": full_prompt,
69
  "parameters": {
70
  "max_new_tokens": max_tokens,
71
  "temperature": temperature,
72
  "top_p": top_p,
73
+ "return_full_text": False
74
  },
75
  }
76
  raw_response = query(payload)
 
91
 
92
  yield response
93
 
 
 
94
  demo = gr.ChatInterface(
95
  respond,
96
  title="AI Auto Paper",