Spaces:
Runtime error
Runtime error
| import spaces | |
| import gradio as gr | |
| from llama_cpp import Llama | |
| from huggingface_hub import hf_hub_download | |
| import random | |
| model_path = hf_hub_download( | |
| repo_id="AstroMLab/AstroSage-8B-GGUF", | |
| filename="AstroSage-8B-Q8_0.gguf" | |
| ) | |
| llm = Llama( | |
| model_path=model_path, | |
| n_ctx=2048, | |
| chat_format="llama-3", | |
| n_gpu_layers=-1, # ensure all layers are on GPU | |
| ) | |
| # Placeholder responses for when context is empty | |
| GREETING_MESSAGES = [ | |
| "Greetings! I am AstroSage, your guide to the cosmos. What would you like to explore today?", | |
| "Welcome to our cosmic journey! I am AstroSage. How may I assist you in understanding the universe?", | |
| "AstroSage here. Ready to explore the mysteries of space and time. How may I be of assistance?", | |
| "The universe awaits! I'm AstroSage. What astronomical wonders shall we discuss?", | |
| ] | |
| def user(user_message, history): | |
| """Add user message to chat history.""" | |
| if history is None: | |
| history = [] | |
| return "", history + [{"role": "user", "content": user_message}] | |
| def bot(history): | |
| """Yield the chatbot response for streaming.""" | |
| if not history: | |
| history = [] | |
| # Prepare the messages for the model | |
| messages = [ | |
| { | |
| "role": "system", | |
| "content": "You are AstroSage, an intelligent AI assistant specializing in astronomy, astrophysics, and cosmology. Provide accurate, scientific information while making complex concepts accessible. You're enthusiastic about space exploration and maintain a sense of wonder about the cosmos." | |
| } | |
| ] | |
| # Add chat history | |
| for message in history[:-1]: # Exclude the last message which we just added | |
| messages.append({"role": message["role"], "content": message["content"]}) | |
| # Add the current user message | |
| messages.append({"role": "user", "content": history[-1]["content"]}) | |
| # Start generating the response | |
| history.append({"role": "assistant", "content": ""}) | |
| # Stream the response | |
| response = llm.create_chat_completion( | |
| messages=messages, | |
| max_tokens=512, | |
| temperature=0.7, | |
| top_p=0.95, | |
| stream=True, | |
| ) | |
| for chunk in response: | |
| if chunk and "content" in chunk["choices"][0]["delta"]: | |
| history[-1]["content"] += chunk["choices"][0]["delta"]["content"] | |
| yield history | |
| def initial_greeting(): | |
| """Return properly formatted initial greeting.""" | |
| return [{"role": "assistant", "content": random.choice(GREETING_MESSAGES)}] | |
| # Custom CSS for a space theme | |
| custom_css = """ | |
| #component-0 { | |
| background-color: #1a1a2e; | |
| border-radius: 15px; | |
| padding: 20px; | |
| } | |
| .dark { | |
| background-color: #0f0f1a; | |
| } | |
| .contain { | |
| max-width: 1200px !important; | |
| } | |
| """ | |
| # Create the Gradio interface | |
| with gr.Blocks(css=custom_css, theme=gr.themes.Soft(primary_hue="indigo", neutral_hue="slate")) as demo: | |
| gr.Markdown( | |
| """ | |
| # π AstroSage: Your Cosmic AI Companion | |
| Welcome to AstroSage, an advanced AI assistant specializing in astronomy, astrophysics, and cosmology. | |
| Powered by the AstroSage-Llama-3.1-8B model, I'm here to help you explore the wonders of the universe! | |
| ### What Can I Help You With? | |
| - πͺ Explanations of astronomical phenomena | |
| - π Space exploration and missions | |
| - β Stars, galaxies, and cosmology | |
| - π Planetary science and exoplanets | |
| - π Astrophysics concepts and theories | |
| - π Astronomical instruments and observations | |
| Just type your question below and let's embark on a cosmic journey together! | |
| """ | |
| ) | |
| chatbot = gr.Chatbot( | |
| label="Chat with AstroSage", | |
| bubble_full_width=False, | |
| show_label=True, | |
| height=450, | |
| type="messages" | |
| ) | |
| with gr.Row(): | |
| msg = gr.Textbox( | |
| label="Type your message here", | |
| placeholder="Ask me anything about space and astronomy...", | |
| scale=9 | |
| ) | |
| clear = gr.Button("Clear Chat", scale=1) | |
| # Example questions for quick start | |
| gr.Examples( | |
| examples=[ | |
| "What is a black hole and how does it form?", | |
| "Can you explain the life cycle of a star?", | |
| "What are exoplanets and how do we detect them?", | |
| "Tell me about the James Webb Space Telescope.", | |
| "What is dark matter and why is it important?" | |
| ], | |
| inputs=msg, | |
| label="Example Questions" | |
| ) | |
| # Set up the message chain with streaming | |
| msg.submit( | |
| user, | |
| [msg, chatbot], | |
| [msg, chatbot], | |
| queue=False | |
| ).then( | |
| bot, | |
| chatbot, | |
| chatbot | |
| ) | |
| # Clear button functionality | |
| clear.click(lambda: None, None, chatbot, queue=False) | |
| # Initial greeting | |
| demo.load(initial_greeting, None, chatbot, queue=False) | |
| # Launch the app | |
| if __name__ == "__main__": | |
| demo.launch() | |