Commit
·
5c7ec8d
1
Parent(s):
57a59dc
trial
Browse files- app.py +3 -1
- llm_inference.py +16 -0
app.py
CHANGED
|
@@ -106,11 +106,13 @@ def create_interface():
|
|
| 106 |
# Generate Text with LLM
|
| 107 |
def generate_text_with_llm(output_prompt, long_talk, compress, compression_level, custom_base_prompt, provider, api_key, model_selected):
|
| 108 |
global selected_prompt_type
|
|
|
|
| 109 |
result = llm_node.generate(
|
| 110 |
input_text=output_prompt,
|
| 111 |
long_talk=long_talk,
|
| 112 |
compress=compress,
|
| 113 |
compression_level=compression_level,
|
|
|
|
| 114 |
prompt_type=selected_prompt_type,
|
| 115 |
custom_base_prompt=custom_base_prompt,
|
| 116 |
provider=provider,
|
|
@@ -131,4 +133,4 @@ def create_interface():
|
|
| 131 |
|
| 132 |
if __name__ == "__main__":
|
| 133 |
demo = create_interface()
|
| 134 |
-
demo.launch()
|
|
|
|
| 106 |
# Generate Text with LLM
|
| 107 |
def generate_text_with_llm(output_prompt, long_talk, compress, compression_level, custom_base_prompt, provider, api_key, model_selected):
|
| 108 |
global selected_prompt_type
|
| 109 |
+
poster = False # Set a default value or modify as needed
|
| 110 |
result = llm_node.generate(
|
| 111 |
input_text=output_prompt,
|
| 112 |
long_talk=long_talk,
|
| 113 |
compress=compress,
|
| 114 |
compression_level=compression_level,
|
| 115 |
+
poster=poster, # Added the missing 'poster' argument
|
| 116 |
prompt_type=selected_prompt_type,
|
| 117 |
custom_base_prompt=custom_base_prompt,
|
| 118 |
provider=provider,
|
|
|
|
| 133 |
|
| 134 |
if __name__ == "__main__":
|
| 135 |
demo = create_interface()
|
| 136 |
+
demo.launch(share=True)
|
llm_inference.py
CHANGED
|
@@ -18,6 +18,22 @@ class LLMInferenceNode:
|
|
| 18 |
base_url="https://api.sambanova.ai/v1",
|
| 19 |
)
|
| 20 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 21 |
def generate(
|
| 22 |
self,
|
| 23 |
input_text,
|
|
|
|
| 18 |
base_url="https://api.sambanova.ai/v1",
|
| 19 |
)
|
| 20 |
|
| 21 |
+
def generate_prompt(self, dynamic_seed, prompt_type, custom_input):
|
| 22 |
+
"""
|
| 23 |
+
Generates a prompt based on the provided seed, prompt type, and custom input.
|
| 24 |
+
"""
|
| 25 |
+
import random
|
| 26 |
+
|
| 27 |
+
random.seed(dynamic_seed)
|
| 28 |
+
if custom_input and custom_input.strip():
|
| 29 |
+
prompt = custom_input
|
| 30 |
+
else:
|
| 31 |
+
prompt = f"Generate a {prompt_type.lower()} prompt."
|
| 32 |
+
|
| 33 |
+
# Additional logic can be added here if needed
|
| 34 |
+
print(f"Generated prompt: {prompt}") # Debug statement
|
| 35 |
+
return prompt
|
| 36 |
+
|
| 37 |
def generate(
|
| 38 |
self,
|
| 39 |
input_text,
|