|
import gradio as gr |
|
import os |
|
import logging |
|
import sys |
|
logging.basicConfig(stream=sys.stdout, level=logging.INFO) |
|
logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout)) |
|
|
|
GOOGLE_API_KEY = "AIzaSyDYhyRoOWBJWOb4bqY5wmFLrBo4HTwQDko" |
|
os.environ["GOOGLE_API_KEY"] = GOOGLE_API_KEY |
|
|
|
from g4f import Provider, models |
|
from langchain.llms.base import LLM |
|
|
|
from llama_index.llms.langchain import LangChainLLM |
|
from langchain_g4f import G4FLLM |
|
|
|
from llama_index.core import ( |
|
ServiceContext, |
|
SimpleDirectoryReader, |
|
StorageContext, |
|
VectorStoreIndex, |
|
set_global_service_context, |
|
) |
|
|
|
from llama_index.embeddings.gemini import GeminiEmbedding |
|
import g4f |
|
g4f.debug.logging = True |
|
from llama_index.core import Settings |
|
|
|
|
|
|
|
llm= LLM = G4FLLM( |
|
model=models.gpt_35_turbo_16k, |
|
) |
|
|
|
llm = LangChainLLM(llm=llm) |
|
|
|
model_name = "models/embedding-001" |
|
|
|
|
|
|
|
embed_model = GeminiEmbedding( |
|
model_name=model_name, api_key=GOOGLE_API_KEY, title="this is a document" |
|
) |
|
Settings.embed_model = embed_model |
|
|
|
documents = ( |
|
SimpleDirectoryReader( |
|
input_dir = 'data', |
|
required_exts = [".pdf"]) |
|
.load_data() |
|
) |
|
|
|
|
|
|
|
|
|
service_context = ( |
|
ServiceContext |
|
.from_defaults( |
|
llm=llm, |
|
embed_model=embed_model, |
|
chunk_size=545 |
|
) |
|
) |
|
set_global_service_context(service_context) |
|
print("node passer11") |
|
|
|
nodes = ( |
|
service_context |
|
.node_parser |
|
.get_nodes_from_documents(documents) |
|
) |
|
print("node passer") |
|
|
|
|
|
storage_context = StorageContext.from_defaults() |
|
storage_context.docstore.add_documents(nodes) |
|
print("node passer") |
|
|
|
index = ( |
|
VectorStoreIndex |
|
.from_documents( |
|
documents, |
|
storage_context=storage_context, |
|
llm=llm |
|
) |
|
) |
|
print("node passer") |
|
query_engine = index.as_query_engine() |
|
|
|
|
|
|
|
|
|
def greet(name): |
|
response = query_engine.query(name) |
|
print(response) |
|
return response |
|
|
|
iface = gr.Interface(fn=greet, inputs="text", outputs="text") |
|
iface.launch() |