Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
|
@@ -32,69 +32,8 @@ current_model = None # Store the currently loaded model
|
|
| 32 |
repo = None # Store the Hugging Face Repository object
|
| 33 |
model_descriptions = {} # Store model descriptions
|
| 34 |
|
| 35 |
-
# --- Constants ---
|
| 36 |
-
PREFIX = """Date: {date_time_str}
|
| 37 |
-
Purpose: {purpose}
|
| 38 |
-
Agent Name: {agent_name}
|
| 39 |
-
"""
|
| 40 |
-
|
| 41 |
-
LOG_PROMPT = """Prompt:
|
| 42 |
-
{content}
|
| 43 |
-
"""
|
| 44 |
-
|
| 45 |
-
LOG_RESPONSE = """Response:
|
| 46 |
-
{resp}
|
| 47 |
-
"""
|
| 48 |
-
|
| 49 |
# --- Functions ---
|
| 50 |
-
def
|
| 51 |
-
prompt = ""
|
| 52 |
-
for user_prompt, bot_response in history[-max_history_turns:]:
|
| 53 |
-
prompt += f"Human: {user_prompt}\nAssistant: {bot_response}\n"
|
| 54 |
-
prompt += f"Human: {message}\nAssistant:"
|
| 55 |
-
return prompt
|
| 56 |
-
|
| 57 |
-
def generate_response(
|
| 58 |
-
prompt: str,
|
| 59 |
-
history: List[Tuple[str, str]],
|
| 60 |
-
agent_name: str = "Generic Agent",
|
| 61 |
-
sys_prompt: str = "",
|
| 62 |
-
temperature: float = TEMPERATURE,
|
| 63 |
-
max_new_tokens: int = MAX_TOKENS,
|
| 64 |
-
top_p: float = TOP_P,
|
| 65 |
-
repetition_penalty: float = REPETITION_PENALTY,
|
| 66 |
-
) -> str:
|
| 67 |
-
global current_model
|
| 68 |
-
if current_model is None:
|
| 69 |
-
return "Error: Please load a model first."
|
| 70 |
-
|
| 71 |
-
date_time_str = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
| 72 |
-
full_prompt = PREFIX.format(
|
| 73 |
-
date_time_str=date_time_str,
|
| 74 |
-
purpose=sys_prompt,
|
| 75 |
-
agent_name=agent_name
|
| 76 |
-
) + format_prompt(prompt, history)
|
| 77 |
-
|
| 78 |
-
if VERBOSE:
|
| 79 |
-
logging.info(LOG_PROMPT.format(content=full_prompt))
|
| 80 |
-
|
| 81 |
-
response = current_model(
|
| 82 |
-
full_prompt,
|
| 83 |
-
max_new_tokens=max_new_tokens,
|
| 84 |
-
temperature=temperature,
|
| 85 |
-
top_p=top_p,
|
| 86 |
-
repetition_penalty=repetition_penalty,
|
| 87 |
-
do_sample=True
|
| 88 |
-
)[0]['generated_text']
|
| 89 |
-
|
| 90 |
-
assistant_response = response.split("Assistant:")[-1].strip()
|
| 91 |
-
|
| 92 |
-
if VERBOSE:
|
| 93 |
-
logging.info(LOG_RESPONSE.format(resp=assistant_response))
|
| 94 |
-
|
| 95 |
-
return assistant_response
|
| 96 |
-
|
| 97 |
-
def load_hf_model(model_name: str):
|
| 98 |
"""Loads a language model and fetches its description."""
|
| 99 |
global current_model, model_descriptions
|
| 100 |
try:
|
|
@@ -114,7 +53,21 @@ def load_hf_model(model_name: str):
|
|
| 114 |
except Exception as e:
|
| 115 |
return f"Error loading model: {str(e)}"
|
| 116 |
|
| 117 |
-
def
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 118 |
"""Executes a shell command and returns the output."""
|
| 119 |
try:
|
| 120 |
if project_path:
|
|
@@ -128,7 +81,7 @@ def execute_command(command: str, project_path: str = None) -> str:
|
|
| 128 |
except Exception as e:
|
| 129 |
return f"Error executing command: {str(e)}"
|
| 130 |
|
| 131 |
-
def
|
| 132 |
"""Creates a new Hugging Face project."""
|
| 133 |
global repo
|
| 134 |
try:
|
|
@@ -150,7 +103,7 @@ def create_hf_project(project_name: str, project_path: str = DEFAULT_PROJECT_PAT
|
|
| 150 |
except Exception as e:
|
| 151 |
return f"Error creating Hugging Face project: {str(e)}"
|
| 152 |
|
| 153 |
-
def
|
| 154 |
"""Lists files in the project directory."""
|
| 155 |
try:
|
| 156 |
files = os.listdir(project_path)
|
|
@@ -160,7 +113,7 @@ def list_project_files(project_path: str = DEFAULT_PROJECT_PATH) -> str:
|
|
| 160 |
except Exception as e:
|
| 161 |
return f"Error listing project files: {str(e)}"
|
| 162 |
|
| 163 |
-
def
|
| 164 |
"""Reads and returns the content of a file in the project."""
|
| 165 |
try:
|
| 166 |
full_path = os.path.join(project_path, file_path)
|
|
@@ -170,7 +123,7 @@ def read_file_content(file_path: str, project_path: str = DEFAULT_PROJECT_PATH)
|
|
| 170 |
except Exception as e:
|
| 171 |
return f"Error reading file: {str(e)}"
|
| 172 |
|
| 173 |
-
def
|
| 174 |
"""Writes content to a file in the project."""
|
| 175 |
try:
|
| 176 |
full_path = os.path.join(project_path, file_path)
|
|
@@ -180,7 +133,7 @@ def write_to_file(file_path: str, content: str, project_path: str = DEFAULT_PROJ
|
|
| 180 |
except Exception as e:
|
| 181 |
return f"Error writing to file: {str(e)}"
|
| 182 |
|
| 183 |
-
def
|
| 184 |
"""Provides a preview of the project, if applicable."""
|
| 185 |
# Assuming a simple HTML preview for now
|
| 186 |
try:
|
|
@@ -248,7 +201,7 @@ def main():
|
|
| 248 |
# --- Event handler to load the selected model ---
|
| 249 |
def load_selected_model(model_name):
|
| 250 |
global current_model
|
| 251 |
-
load_output =
|
| 252 |
if current_model:
|
| 253 |
return f"Model '{model_name}' loaded successfully!"
|
| 254 |
else:
|
|
@@ -293,11 +246,11 @@ def main():
|
|
| 293 |
run_command_button = gr.Button("Run Command")
|
| 294 |
preview_button = gr.Button("Preview Project")
|
| 295 |
|
| 296 |
-
create_project_button.click(
|
| 297 |
-
read_button.click(
|
| 298 |
-
write_button.click(
|
| 299 |
-
run_command_button.click(
|
| 300 |
-
preview_button.click(
|
| 301 |
|
| 302 |
demo.launch()
|
| 303 |
|
|
|
|
| 32 |
repo = None # Store the Hugging Face Repository object
|
| 33 |
model_descriptions = {} # Store model descriptions
|
| 34 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 35 |
# --- Functions ---
|
| 36 |
+
def load_model(model_name: str):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 37 |
"""Loads a language model and fetches its description."""
|
| 38 |
global current_model, model_descriptions
|
| 39 |
try:
|
|
|
|
| 53 |
except Exception as e:
|
| 54 |
return f"Error loading model: {str(e)}"
|
| 55 |
|
| 56 |
+
def model_selection():
|
| 57 |
+
st.title("Model Selection")
|
| 58 |
+
st.write("Select a model to use for code generation:")
|
| 59 |
+
models = ["distilbert", "t5", "codellama-7b", "geminai-1.5b"]
|
| 60 |
+
selected_model = st.selectbox("Select a model:", models)
|
| 61 |
+
if selected_model:
|
| 62 |
+
model = load_model(selected_model)
|
| 63 |
+
if model:
|
| 64 |
+
st.write(f"Model {selected_model} imported successfully!")
|
| 65 |
+
return model
|
| 66 |
+
else:
|
| 67 |
+
st.write(f"Error importing model {selected_model}.")
|
| 68 |
+
return None
|
| 69 |
+
|
| 70 |
+
def run_command(command: str, project_path: str = None) -> str:
|
| 71 |
"""Executes a shell command and returns the output."""
|
| 72 |
try:
|
| 73 |
if project_path:
|
|
|
|
| 81 |
except Exception as e:
|
| 82 |
return f"Error executing command: {str(e)}"
|
| 83 |
|
| 84 |
+
def create_project(project_name: str, project_path: str = DEFAULT_PROJECT_PATH):
|
| 85 |
"""Creates a new Hugging Face project."""
|
| 86 |
global repo
|
| 87 |
try:
|
|
|
|
| 103 |
except Exception as e:
|
| 104 |
return f"Error creating Hugging Face project: {str(e)}"
|
| 105 |
|
| 106 |
+
def list_files(project_path: str = DEFAULT_PROJECT_PATH) -> str:
|
| 107 |
"""Lists files in the project directory."""
|
| 108 |
try:
|
| 109 |
files = os.listdir(project_path)
|
|
|
|
| 113 |
except Exception as e:
|
| 114 |
return f"Error listing project files: {str(e)}"
|
| 115 |
|
| 116 |
+
def read_file(file_path: str, project_path: str = DEFAULT_PROJECT_PATH) -> str:
|
| 117 |
"""Reads and returns the content of a file in the project."""
|
| 118 |
try:
|
| 119 |
full_path = os.path.join(project_path, file_path)
|
|
|
|
| 123 |
except Exception as e:
|
| 124 |
return f"Error reading file: {str(e)}"
|
| 125 |
|
| 126 |
+
def write_file(file_path: str, content: str, project_path: str = DEFAULT_PROJECT_PATH) -> str:
|
| 127 |
"""Writes content to a file in the project."""
|
| 128 |
try:
|
| 129 |
full_path = os.path.join(project_path, file_path)
|
|
|
|
| 133 |
except Exception as e:
|
| 134 |
return f"Error writing to file: {str(e)}"
|
| 135 |
|
| 136 |
+
def preview(project_path: str = DEFAULT_PROJECT_PATH):
|
| 137 |
"""Provides a preview of the project, if applicable."""
|
| 138 |
# Assuming a simple HTML preview for now
|
| 139 |
try:
|
|
|
|
| 201 |
# --- Event handler to load the selected model ---
|
| 202 |
def load_selected_model(model_name):
|
| 203 |
global current_model
|
| 204 |
+
load_output = load_model(model_name)
|
| 205 |
if current_model:
|
| 206 |
return f"Model '{model_name}' loaded successfully!"
|
| 207 |
else:
|
|
|
|
| 246 |
run_command_button = gr.Button("Run Command")
|
| 247 |
preview_button = gr.Button("Preview Project")
|
| 248 |
|
| 249 |
+
create_project_button.click(create_project, inputs=[project_name], outputs=project_output)
|
| 250 |
+
read_button.click(read_file, inputs=file_path, outputs=file_content)
|
| 251 |
+
write_button.click(write_file, inputs=[file_path, file_content], outputs=project_output)
|
| 252 |
+
run_command_button.click(run_command, inputs=command_input, outputs=command_output)
|
| 253 |
+
preview_button.click(preview, outputs=project_output)
|
| 254 |
|
| 255 |
demo.launch()
|
| 256 |
|