Spaces:
Sleeping
Sleeping
File size: 2,923 Bytes
0712e23 f05bf99 0712e23 6830e68 cf00b12 f05bf99 94c2cbf f05bf99 0712e23 6830e68 0712e23 69df8a4 25f58bf 0712e23 6830e68 69df8a4 25f58bf 0712e23 cf00b12 a436de7 4dd3a9a a436de7 cf00b12 69df8a4 6830e68 0712e23 69df8a4 0712e23 cf00b12 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 |
import os
import gspread
from oauth2client.service_account import ServiceAccountCredentials
from pymongo import MongoClient
# Read the authentication token from the environment variable
hugging_face_token = os.getenv("HUGGING_FACE_TOKEN")
replicate_token = os.getenv("REPLICATE_TOKEN")
groq_token = os.getenv("GROQ_TOKEN")
atlas_token = os.getenv("ATLAS_TOKEN")
open_ruter_token = os.getenv("OPEN_RUTER_TOKEN")
#atlas configuration
class AtlasClient:
def __init__(self, dbname):
self.mongodb_client = MongoClient(atlas_token)
self.database = self.mongodb_client[dbname]
# A quick way to test if we can connect to Atlas instance
def ping(self):
self.mongodb_client.admin.command("ping")
def add(self, item, collection_name):
collection = self.database[collection_name]
collection.insert_one(item)
# Google Sheets configuration
def init_google_sheets_client():
scope = ["https://spreadsheets.google.com/feeds", "https://www.googleapis.com/auth/drive"]
creds = ServiceAccountCredentials.from_json_keyfile_name('tokyo-portal-326513-90aee094bab9.json', scope)
return gspread.authorize(creds)
# Google Sheets name
google_sheets_name = "Chatbot Test"
# Define available models
huggingface_tokenizer = {
"Meta-Llama-3-8B-Instruct": "meta-llama/Meta-Llama-3-8B-Instruct",
"Llama-2-7B-Chat": "meta-llama/Llama-2-7b-chat-hf",
"mistralai/mistral-7b-instruct-v0.2": "mistralai/Mistral-7B-Instruct-v0.2",
"Meta-Llama-3-70B-Instruct":"meta-llama/Meta-Llama-3-70B-Instruct",
}
#Avaiable models for replicate
replicate_model= {
"Meta-Llama-3-8B-Instruct": "meta/meta-llama-3-8b-instruct",
"Llama-2-7B-Chat": "meta/llama-2-7b-chat",
"mistralai/mistral-7b-instruct-v0.2": "mistralai/mistral-7b-instruct-v0.2",
"Meta-Llama-3-70B-Instruct":"meta/meta-llama-3-70b-instruct",
}
groq_model = {
"llama3-8b-8192": "llama3-8b-8192",
"llama-guard-3-8b": "llama-guard-3-8b",
"gemma-7b-it": "gemma-7b-it",
"llama3-70b-8192": "llama3-70b-8192",
}
custom_model = {
"rodrisouza/Llama-3-8B-Finetuning-Stories": "rodrisouza/Llama-3-8B-Finetuning-Stories"
}
openai_model = {
"meta-llama/llama-3.1-70b-instruct:free": "meta-llama/llama-3.1-70b-instruct:free",
"meta-llama/llama-3.1-8b-instruct:free": "meta-llama/llama-3.1-8b-instruct:free",
"mistralai/mistral-7b-instruct:free": "mistralai/mistral-7b-instruct:free",
"google/gemma-2-9b-it:free": "google/gemma-2-9b-it:free",
}
# Default model (first in list)
default_model_name = list(replicate_model.items())[0][0]
# Define available user names
user_names = ["Laura Musto", "Brian Carpenter", "Germán Capdehourat", "Isabel Amigo", "Aiala Rosá", "Luis Chiruzzo", "Ignacio Sastre", "Santiago Góngora", "Ignacio Remersaro", "Rodrigo Souza"]
MAX_INTERACTIONS = 5
QUESTION_PROMPT = "Please ask a simple question about the story to encourage interaction."
|