CISCai commited on
Commit
76d5089
·
verified ·
1 Parent(s): b649d4d

allow enable_thinking, thinking, reasoning_effort, etc settings

Browse files
Files changed (3) hide show
  1. README.md +1 -1
  2. app.py +9 -5
  3. requirements.txt +3 -3
README.md CHANGED
@@ -4,7 +4,7 @@ emoji: 💬📝
4
  colorFrom: purple
5
  colorTo: indigo
6
  sdk: gradio
7
- sdk_version: 5.38.0
8
  python_version: 3.11
9
  app_file: app.py
10
  pinned: false
 
4
  colorFrom: purple
5
  colorTo: indigo
6
  sdk: gradio
7
+ sdk_version: 5.42.0
8
  python_version: 3.11
9
  app_file: app.py
10
  pinned: false
app.py CHANGED
@@ -1369,7 +1369,11 @@ You can freely edit and test GGUF chat template(s) (and are encouraged to do so)
1369
 
1370
  tools = settings.get("tools")
1371
  documents = settings.get("documents")
1372
- add_generation_prompt = settings.get("add_generation_prompt")
 
 
 
 
1373
 
1374
  cleanup_settings = []
1375
  for k in settings.keys():
@@ -1387,7 +1391,7 @@ You can freely edit and test GGUF chat template(s) (and are encouraged to do so)
1387
  chat_rag_output = None
1388
  inverse_output = None
1389
  try:
1390
- chat_output = tokenizer.apply_chat_template(messages, tools = tools, documents = documents, chat_template = template, add_generation_prompt = add_generation_prompt, tokenize = False)
1391
  except Exception as e:
1392
  gr.Warning(
1393
  message = str(e),
@@ -1395,7 +1399,7 @@ You can freely edit and test GGUF chat template(s) (and are encouraged to do so)
1395
  title = "Chat Template Error",
1396
  )
1397
  try:
1398
- chat_tool_use_output = tokenizer.apply_chat_template(messages, tools = tools or [], chat_template = template_tool_use, add_generation_prompt = add_generation_prompt, tokenize = False) if template_tool_use else None
1399
  except Exception as e:
1400
  gr.Warning(
1401
  message = str(e),
@@ -1403,7 +1407,7 @@ You can freely edit and test GGUF chat template(s) (and are encouraged to do so)
1403
  title = "Tool Use Template Error",
1404
  )
1405
  try:
1406
- chat_rag_output = tokenizer.apply_chat_template(messages, documents = documents or [], chat_template = template_rag, add_generation_prompt = add_generation_prompt, tokenize = False) if template_rag else None
1407
  except Exception as e:
1408
  gr.Warning(
1409
  message = str(e),
@@ -1411,7 +1415,7 @@ You can freely edit and test GGUF chat template(s) (and are encouraged to do so)
1411
  title = "RAG Template Error",
1412
  )
1413
  try:
1414
- inverse_output = tokenizer.apply_inverse_template(messages, inverse_template = template_inverse) if template_inverse else None
1415
  except Exception as e:
1416
  gr.Warning(
1417
  message = str(e),
 
1369
 
1370
  tools = settings.get("tools")
1371
  documents = settings.get("documents")
1372
+ allowed_chat_kwargs = ("add_generation_prompt", "enable_thinking", "thinking", "reasoning_effort", "builtin_tools", "model_identity", "controls")
1373
+ chat_template_kwargs = {}
1374
+ for allow in allowed_chat_kwargs:
1375
+ if allow in settings:
1376
+ chat_template_kwargs[allow] = settings.get(allow)
1377
 
1378
  cleanup_settings = []
1379
  for k in settings.keys():
 
1391
  chat_rag_output = None
1392
  inverse_output = None
1393
  try:
1394
+ chat_output = tokenizer.apply_chat_template(messages, tools = tools, documents = documents, chat_template = template, tokenize = False, **chat_template_kwargs)
1395
  except Exception as e:
1396
  gr.Warning(
1397
  message = str(e),
 
1399
  title = "Chat Template Error",
1400
  )
1401
  try:
1402
+ chat_tool_use_output = tokenizer.apply_chat_template(messages, tools = tools or [], chat_template = template_tool_use, tokenize = False, **chat_template_kwargs) if template_tool_use else None
1403
  except Exception as e:
1404
  gr.Warning(
1405
  message = str(e),
 
1407
  title = "Tool Use Template Error",
1408
  )
1409
  try:
1410
+ chat_rag_output = tokenizer.apply_chat_template(messages, documents = documents or [], chat_template = template_rag, tokenize = False, **chat_template_kwargs) if template_rag else None
1411
  except Exception as e:
1412
  gr.Warning(
1413
  message = str(e),
 
1415
  title = "RAG Template Error",
1416
  )
1417
  try:
1418
+ inverse_output = tokenizer.apply_inverse_template(messages, inverse_template = template_inverse, **chat_template_kwargs) if template_inverse else None
1419
  except Exception as e:
1420
  gr.Warning(
1421
  message = str(e),
requirements.txt CHANGED
@@ -1,5 +1,5 @@
1
- gradio[oauth]==5.38.0
2
- huggingface_hub==0.33.4
3
  # gradio_huggingfacehub_search==0.0.8
4
- transformers==4.53.2
5
  https://huggingface.co/spaces/CISCai/chat-template-editor/resolve/main/gradio_huggingfacehub_search-0.0.8-py3-none-any.whl
 
1
+ gradio[oauth]==5.42.0
2
+ huggingface_hub==0.34.4
3
  # gradio_huggingfacehub_search==0.0.8
4
+ transformers==4.55.0
5
  https://huggingface.co/spaces/CISCai/chat-template-editor/resolve/main/gradio_huggingfacehub_search-0.0.8-py3-none-any.whl