Update pages/type_text.py
Browse files- pages/type_text.py +1 -4
pages/type_text.py
CHANGED
@@ -147,8 +147,6 @@ def load_model():
|
|
147 |
|
148 |
model = load_model()
|
149 |
|
150 |
-
|
151 |
-
|
152 |
# Semantic search, Compute cosine similarity between INTdesc_embedding and SBS descriptions
|
153 |
INTdesc_embedding = model.encode(INTdesc_input)
|
154 |
SBScorpus_embeddings = model.encode(SBScorpus)
|
@@ -212,10 +210,9 @@ if INTdesc_input and st.button(":blue[Map to SBS codes]", key="run_st_model"): #
|
|
212 |
{"role": "user", "content": prompt},
|
213 |
]
|
214 |
|
215 |
-
st.warning("It may take several minutes for Reasoning Model to analyze above 5 options and output results below") # optional warning
|
216 |
st.write("") # optional Add some space before the Reasoning Model output
|
217 |
st.subheader("Reasoning Model Output:") # Added a subheader for clarity
|
218 |
-
with st.spinner("Running Reasoning Model..."): # Added spinner for Reasoning model
|
219 |
outputs = pipe(
|
220 |
messages,
|
221 |
max_new_tokens=256,
|
|
|
147 |
|
148 |
model = load_model()
|
149 |
|
|
|
|
|
150 |
# Semantic search, Compute cosine similarity between INTdesc_embedding and SBS descriptions
|
151 |
INTdesc_embedding = model.encode(INTdesc_input)
|
152 |
SBScorpus_embeddings = model.encode(SBScorpus)
|
|
|
210 |
{"role": "user", "content": prompt},
|
211 |
]
|
212 |
|
|
|
213 |
st.write("") # optional Add some space before the Reasoning Model output
|
214 |
st.subheader("Reasoning Model Output:") # Added a subheader for clarity
|
215 |
+
with st.spinner("Running Reasoning Model... may take several minutes, grab a coffee"): # Added spinner for Reasoning model
|
216 |
outputs = pipe(
|
217 |
messages,
|
218 |
max_new_tokens=256,
|