SemanticSearchPOC / semsearchDbgUI.py
MVPilgrim
dbg.
51e6285
raw
history blame
3.72 kB
import weaviate
from weaviate.connect import ConnectionParams
from weaviate.classes.init import AdditionalConfig, Timeout
from sentence_transformers import SentenceTransformer
from langchain_community.document_loaders import BSHTMLLoader
from pathlib import Path
from lxml import html
import logging
from semantic_text_splitter import HuggingFaceTextSplitter
from tokenizers import Tokenizer
import json
import os
import re
import logging
import llama_cpp
from llama_cpp import Llama
import ipywidgets as widgets
from IPython.display import display, clear_output
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
######################################################################
# MAINLINE
#
logger.info("#### MAINLINE ENTERED.")
#####################################################################
# Create UI widgets.
output_widget = widgets.Output()
with output_widget:
print("### Create widgets entered.")
systemTextArea = widgets.Textarea(
value='',
placeholder='Enter System Prompt.',
description='Sys Prompt: ',
disabled=False,
layout=widgets.Layout(width='300px', height='80px')
)
userTextArea = widgets.Textarea(
value='',
placeholder='Enter User Prompt.',
description='User Prompt: ',
disabled=False,
layout=widgets.Layout(width='435px', height='110px')
)
ragPromptTextArea = widgets.Textarea(
value='',
placeholder='App generated prompt with RAG information.',
description='RAG Prompt: ',
disabled=False,
layout=widgets.Layout(width='580px', height='180px')
)
responseTextArea = widgets.Textarea(
value='',
placeholder='LLM generated response.',
description='LLM Resp: ',
disabled=False,
layout=widgets.Layout(width='780px', height='200px')
)
selectRag = widgets.Checkbox(
value=False,
description='Use RAG',
disabled=False
)
submitButton = widgets.Button(
description='Run Model.',
disabled=False,
button_style='', # 'success', 'info', 'warning', 'danger' or ''
tooltip='Click',
icon='check' # (FontAwesome names without the `fa-` prefix)
)
# Display UI
logger.debug("### Before displaying UI: ")
display(systemTextArea)
display(userTextArea)
display(ragPromptTextArea)
display(responseTextArea)
display(selectRag)
display(submitButton)
def runLLM(prompt):
max_tokens = 1000
temperature = 0.3
top_p = 0.1
echo = True
stop = ["Q", "\n"]
modelOutput = llm(
prompt,
max_tokens=max_tokens,
temperature=temperature,
top_p=top_p,
echo=echo,
stop=stop,
)
result = modelOutput["choices"][0]["text"].strip()
return(result)
def setPrompt(pprompt,ragFlag):
logger.info("\n### setPrompt() entered. ragFlag: ",ragFlag)
if ragFlag:
ragPrompt = getRagData(pprompt)
userPrompt = pprompt + "\n" + ragPrompt
prompt = userPrompt
userPrompt = "Using this information: " + ragPrompt \
+ "process the following statement or question and produce a a response" \
+ intialPrompt
else:
userPrompt = pprompt
#prompt = f""" <s> [INST] <<SYS>> {systemTextArea.value} </SYS>> Q: {userPrompt} A: [/INST]"""
return userPrompt
def on_submitButton_clicked(b):
with output_widget:
clear_output(wait=True)
ragPromptTextArea.value = ""
responseTextArea.value = ""
log.debug(f"### selectRag: {selectRag.value}")
prompt = setPrompt(userTextArea.value,selectRag.value)
log.debug("### prompt: " + prompt)
runLLM(prompt)
logger.info("\n### Before calling submitButton.on_click().")
submitButton.on_click(on_submitButton_clicked)
display(output_widget)