MVPilgrim commited on
Commit
2281b34
·
1 Parent(s): ee5fc0e
Files changed (1) hide show
  1. app.py +10 -33
app.py CHANGED
@@ -493,15 +493,16 @@ try:
493
  echoVal = True
494
  stop = ["Q", "\n"]
495
 
496
- #modelOutput = llm(
497
- modelOutput = llm.create_chat_completion(
498
- prompt
499
- #max_tokens=max_tokens,
500
- #temperature=temperature,
501
- #top_p=top_p,
502
- #echo=echoVal,
503
- #stop=stop,
504
- )
 
505
  result = modelOutput["choices"][0]["message"]["content"]
506
  #result = str(modelOutput)
507
  logger.debug(f"### llmResult: {result}")
@@ -538,24 +539,6 @@ try:
538
  return fullPrompt
539
 
540
 
541
- #################################################
542
- # Format text for easier reading in text areas. #
543
- #################################################
544
- def prettyPrint(text):
545
- try:
546
- logger.info("### prettyPrint entered.")
547
- logger.info(f"### text: {text}")
548
- text = text.replace("\\n", "\n")
549
- outstr = io.StringIO()
550
- pprint.pprint(object=text,stream=outstr,indent=1,width=60)
551
- prettyText = outstr.getvalue()
552
- logger.info("### prettyPrint exited.")
553
- return prettyText
554
- except Exception as e:
555
- logger.error(f"### prettyPrint() e: {e}")
556
- return None
557
-
558
-
559
  #####################################
560
  # Run the LLM with the user prompt. #
561
  #####################################
@@ -565,16 +548,11 @@ try:
565
  st.session_state.sysTAtext = st.session_state.sysTA
566
  logger.debug(f"sysTAtext: {st.session_state.sysTAtext}")
567
 
568
- #st.session_state.userpTAtext = st.session_state.userpTA
569
  wrklist = setPrompt(st.session_state.userpTA,st.selectRag)
570
- #wrkList = [ wrkList ]
571
- #st.session_state.userpTA = prettyPrint(wrkList)
572
  st.session_state.userpTA = wrklist[1]["content"]
573
  logger.debug(f"userpTAtext: {st.session_state.userpTA}")
574
 
575
- #st.session_state.rspTAtext = runLLM(st.session_state.userpTAtext)
576
  rsp = runLLM(wrklist)
577
- #st.session_state.rspTA = prettyPrint(rsp)
578
  st.session_state.rspTA = rsp
579
  logger.debug(f"rspTAtext: {st.session_state.rspTA}")
580
 
@@ -587,7 +565,6 @@ try:
587
  def on_getAllRagDataButton_Clicked():
588
  logger = st.session_state.logger
589
  logger.info("### on_getAllRagButton_Clicked entered.")
590
- #st.session_state.ragpTA = prettyPrint(getAllRagData())
591
  st.session_state.ragpTA = getAllRagData();
592
  logger.info("### on_getAllRagButton_Clicked exited.")
593
 
 
493
  echoVal = True
494
  stop = ["Q", "\n"]
495
 
496
+ modelOutput = ""
497
+ with st.spinner('Translating...'):
498
+ modelOutput = llm.create_chat_completion(
499
+ prompt
500
+ #max_tokens=max_tokens,
501
+ #temperature=temperature,
502
+ #top_p=top_p,
503
+ #echo=echoVal,
504
+ #stop=stop,
505
+ )
506
  result = modelOutput["choices"][0]["message"]["content"]
507
  #result = str(modelOutput)
508
  logger.debug(f"### llmResult: {result}")
 
539
  return fullPrompt
540
 
541
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
542
  #####################################
543
  # Run the LLM with the user prompt. #
544
  #####################################
 
548
  st.session_state.sysTAtext = st.session_state.sysTA
549
  logger.debug(f"sysTAtext: {st.session_state.sysTAtext}")
550
 
 
551
  wrklist = setPrompt(st.session_state.userpTA,st.selectRag)
 
 
552
  st.session_state.userpTA = wrklist[1]["content"]
553
  logger.debug(f"userpTAtext: {st.session_state.userpTA}")
554
 
 
555
  rsp = runLLM(wrklist)
 
556
  st.session_state.rspTA = rsp
557
  logger.debug(f"rspTAtext: {st.session_state.rspTA}")
558
 
 
565
  def on_getAllRagDataButton_Clicked():
566
  logger = st.session_state.logger
567
  logger.info("### on_getAllRagButton_Clicked entered.")
 
568
  st.session_state.ragpTA = getAllRagData();
569
  logger.info("### on_getAllRagButton_Clicked exited.")
570