Spaces:
Sleeping
Sleeping
File size: 7,185 Bytes
e42f508 52e4886 e42f508 52e4886 637fb50 e42f508 d0caafa 52e4886 e42f508 52e4886 e42f508 52e4886 e42f508 52e4886 e42f508 52e4886 e42f508 52e4886 e42f508 52e4886 a785f2a 4611bf9 e42f508 52e4886 e42f508 52e4886 e42f508 52e4886 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 |
import streamlit as st
from streamlit_chat import message
from streamlit_extras.colored_header import colored_header
from streamlit_extras.add_vertical_space import add_vertical_space
import requests
from gradio_client import Client
st.set_page_config(page_title="HugChat - An LLM-powered Streamlit app")
API_TOKEN = st.secrets['HF_TOKEN']
API_URL = "https://api-inference.huggingface.co/models/mistralai/Mixtral-8x7B-Instruct-v0.1"
headers = {"Authorization": f"Bearer {str(API_TOKEN)}"}
def get_text():
input_text = st.text_input("You: ", "", key="input")
return input_text
def query(payload):
response = requests.post(API_URL, headers=headers, json=payload)
return response.json()
def translate(text,source="English",target="Moroccan Arabic"):
client = Client("https://facebook-seamless-m4t-v2-large.hf.space/--replicas/2bmbx/")
result = client.predict(
text, # str in 'Input text' Textbox component
source, # Literal[Afrikaans, Amharic, Armenian, Assamese, Basque, Belarusian, Bengali, Bosnian, Bulgarian, Burmese, Cantonese, Catalan, Cebuano, Central Kurdish, Croatian, Czech, Danish, Dutch, Egyptian Arabic, English, Estonian, Finnish, French, Galician, Ganda, Georgian, German, Greek, Gujarati, Halh Mongolian, Hebrew, Hindi, Hungarian, Icelandic, Igbo, Indonesian, Irish, Italian, Japanese, Javanese, Kannada, Kazakh, Khmer, Korean, Kyrgyz, Lao, Lithuanian, Luo, Macedonian, Maithili, Malayalam, Maltese, Mandarin Chinese, Marathi, Meitei, Modern Standard Arabic, Moroccan Arabic, Nepali, North Azerbaijani, Northern Uzbek, Norwegian Bokmål, Norwegian Nynorsk, Nyanja, Odia, Polish, Portuguese, Punjabi, Romanian, Russian, Serbian, Shona, Sindhi, Slovak, Slovenian, Somali, Southern Pashto, Spanish, Standard Latvian, Standard Malay, Swahili, Swedish, Tagalog, Tajik, Tamil, Telugu, Thai, Turkish, Ukrainian, Urdu, Vietnamese, Welsh, West Central Oromo, Western Persian, Yoruba, Zulu] in 'Source language' Dropdown component
target, # Literal[Afrikaans, Amharic, Armenian, Assamese, Basque, Belarusian, Bengali, Bosnian, Bulgarian, Burmese, Cantonese, Catalan, Cebuano, Central Kurdish, Croatian, Czech, Danish, Dutch, Egyptian Arabic, English, Estonian, Finnish, French, Galician, Ganda, Georgian, German, Greek, Gujarati, Halh Mongolian, Hebrew, Hindi, Hungarian, Icelandic, Igbo, Indonesian, Irish, Italian, Japanese, Javanese, Kannada, Kazakh, Khmer, Korean, Kyrgyz, Lao, Lithuanian, Luo, Macedonian, Maithili, Malayalam, Maltese, Mandarin Chinese, Marathi, Meitei, Modern Standard Arabic, Moroccan Arabic, Nepali, North Azerbaijani, Northern Uzbek, Norwegian Bokmål, Norwegian Nynorsk, Nyanja, Odia, Polish, Portuguese, Punjabi, Romanian, Russian, Serbian, Shona, Sindhi, Slovak, Slovenian, Somali, Southern Pashto, Spanish, Standard Latvian, Standard Malay, Swahili, Swedish, Tagalog, Tajik, Tamil, Telugu, Thai, Turkish, Ukrainian, Urdu, Vietnamese, Welsh, West Central Oromo, Western Persian, Yoruba, Zulu] in 'Target language' Dropdown component
api_name="/t2tt"
)
print(result)
return result
# Function to generate a response from the chatbot
def generate_response(user_input):
user_input_translated = str(translate(user_input, "Moroccan Arabic", "English"))
name = 'Abbas'
date = 'December'
location = 'Benguerir, Morocco'
soil_type = 'red soil'
humidity = '40%'
weather = 'Sunny'
temp = '19C'
agriculture = 'olives'
# Add your chatbot logic here
# For simplicity, the bot echoes the user's input in this example
instruction = f'''
<s> [INST] You are an agriculture expert, and my name is {name} Given the following informations, prevailing weather conditions, specific land type, chosen type of agriculture, and soil composition of a designated area, answer the question below
Location: {location},
Current Month : {date}
land type: {soil_type}
humidity: {humidity}
weather: {weather}
temperature: {temp}
agriculture: {agriculture} Question: {user_input_translated}[/INST]</s>
'''
prompt = f'''
You are an agriculture expert, Given the following informations, geographical coordinates (latitude and longitude), prevailing weather conditions, specific land type, chosen type of agriculture, and soil composition of a designated area, request the LLM to provide detailed insights and predictions on optimal agricultural practices, potential crop yields, and recommended soil management strategies, or answer the question below
Location: {location},
land type: {soil_type}
humidity: {humidity}
weather: {weather}
temperature: {temp}
agriculture: {agriculture}
'''
# output = query({"inputs": f'''
# PROMPT: {prompt}
# QUESTION: {user_input}
# ANSWER:
# ''',})
output = query({"inputs": instruction, "parameters":{"max_new_tokens":250, "temperature":1, "return_full_text":False}})
# print(headers)
print(instruction)
print(output)
return f"Bot: {translate(output[0]['generated_text'])}"
def main():
# Sidebar contents
with st.sidebar:
st.title('🤗💬 HugChat App')
st.markdown('''
## About
This app is an LLM-powered chatbot built using:
- [Streamlit](https://streamlit.io/)
- [HugChat](https://github.com/Soulter/hugging-chat-api)
- [OpenAssistant/oasst-sft-6-llama-30b-xor](https://huggingface.co/OpenAssistant/oasst-sft-6-llama-30b-xor) LLM model
💡 Note: No API key required!
''')
add_vertical_space(5)
st.write('Made with ❤️ by [Data Professor](https://youtube.com/dataprofessor)')
# Generate empty lists for generated and past.
## generated stores AI generated responses
if 'generated' not in st.session_state:
st.session_state['generated'] = ["I'm HugChat, How may I help you?"]
## past stores User's questions
if 'past' not in st.session_state:
st.session_state['past'] = ['Hi!']
# Layout of input/response containers
input_container = st.container()
if st.button("Clear Chat"):
st.session_state['past'] = []
st.session_state['generated'] = []
colored_header(label='', description='', color_name='blue-30')
response_container = st.container()
# User input
## Function for taking user provided prompt as input
## Applying the user input box
with input_container:
user_input = get_text()
# Response output
## Function for taking user prompt as input followed by producing AI generated responses
## Conditional display of AI generated responses as a function of user provided prompts
with response_container:
if user_input:
response = generate_response(user_input)
st.session_state.past.append(user_input)
st.session_state.generated.append(response)
if st.session_state['generated']:
for i in range(len(st.session_state['generated'])):
message(st.session_state['past'][i], is_user=True, key=str(i) + '_user')
message(st.session_state["generated"][i], key=str(i))
if __name__ == "__main__":
main() |