import gradio as gr from faiss import write_index, read_index from langchain import PromptTemplate from langchain.chains import LLMChain from langchain.document_loaders import TextLoader from langchain.text_splitter import CharacterTextSplitter from langchain.vectorstores import FAISS from langchain.text_splitter import RecursiveCharacterTextSplitter from langchain.document_loaders import UnstructuredFileLoader from langchain.document_loaders.recursive_url_loader import RecursiveUrlLoader from langchain.document_loaders import UnstructuredURLLoader from langchain.document_loaders.csv_loader import CSVLoader from langchain import LLMChain from langchain.llms import GPT4All from langchain.embeddings import GPT4AllEmbeddings from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler from langchain.callbacks.base import BaseCallbackManager from langchain.document_loaders import DataFrameLoader import pandas as pd import sqlite3 def loadModels(): #model = GPT4All("orca-mini-3b.ggmlv3.q4_0.bin") callback_manager = BaseCallbackManager([StreamingStdOutCallbackHandler()]) llm = GPT4All(model="orca-mini-3b.ggmlv3.q4_0.bin",temp=0.1,streaming=True)#callback_manager=callback_manager, verbose=True,repeat_last_n=0 embeddings = GPT4AllEmbeddings() return llm, embeddings llm,emb=loadModels() con = sqlite3.connect("motor.sqlite") copies_df = pd.read_sql_query("SELECT * from copies", con) copiesT = copies_df[copies_df.copy_start =="T"] copiesT=copiesT[["copy_message","id","name"]] data = copiesT B=DataFrameLoader(data,page_content_column="copy_message") B2=DataFrameLoader(data,page_content_column="name") documents=B.load() documents2=B2.load() try: db=FAISS.load_local(folder_path="Copies",embeddings=emb) except: db = FAISS.from_documents(documents, emb) FAISS.save_local(db,folder_path="Copies") try: db2=FAISS.load_local(folder_path="names",embeddings=emb) except: db2 = FAISS.from_documents(documents2, emb) FAISS.save_local(db2,folder_path="names") def FinderDb(query,dbs): Sal = dbs.similarity_search_with_score(query,3) page_content=[] id=[] d=[] for output in Sal: page_content.append(output[0].page_content) id.append(output[0].metadata["id"]) d.append(output[1]) espacio=""" ######################## """ page_content=espacio.join(page_content) return page_content,d,id def QARequest(Pregunta): query = Pregunta page_content,d,id=FinderDb(query,db) page_content2,d2,id2=FinderDb(query,db2) return page_content,d,id,page_content2,d2,id2 with gr.Blocks() as demo: Pregunta = gr.Textbox(label="Pregunta") #Respuesta = gr.Textbox(label="Respuesta") #id = gr.Textbox(label="id") #metrica=gr.Textbox(label="metrica") Respuesta2 = gr.Textbox(label="Respuesta2") id2 = gr.Textbox(label="id2") metrica2=gr.Textbox(label="metrica2") Enviar_btn = gr.Button("Responder") Enviar_btn.click(fn=QARequest, inputs=Pregunta, outputs=[Respuesta2,metrica2,id2], api_name="Respuestas") # Respuesta,metrica,id, demo.launch() #