Dockerize full
This commit is contained in:
parent
f37e295ad9
commit
415e798021
25
README.md
25
README.md
|
@ -1,12 +1,17 @@
|
||||||
pip install llama-index
|
El sistema funciona en un docker
|
||||||
pip install llama-index-llms-groq
|
para generar:
|
||||||
pip install llama-index-embeddings-huggingface
|
|
||||||
pip install llama-parse
|
docker build -t supertutor-app .
|
||||||
pip install chromadb
|
|
||||||
pip install llama-index-vector-stores-chroma
|
|
||||||
pip install llama-index-embeddings-huggingface
|
|
||||||
pip install python-fasthtml
|
|
||||||
pip install grok
|
|
||||||
pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cpu
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
para ejecutar:
|
||||||
|
|
||||||
|
docker run -it -p 7884:7884 --rm fastapi-app
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
si tienes problemas con permisos:
|
||||||
|
|
||||||
|
sudo chmod 666 /var/run/docker.sock
|
||||||
|
|
||||||
|
|
96
main.py
96
main.py
|
@ -12,7 +12,7 @@ from llama_index.core import Settings
|
||||||
#from llama_index.embeddings.huggingface_api import (
|
#from llama_index.embeddings.huggingface_api import (
|
||||||
# HuggingFaceInferenceAPIEmbedding,
|
# HuggingFaceInferenceAPIEmbedding,
|
||||||
#)
|
#)
|
||||||
|
import shutil
|
||||||
import chromadb
|
import chromadb
|
||||||
import os
|
import os
|
||||||
import threading
|
import threading
|
||||||
|
@ -22,6 +22,9 @@ import json
|
||||||
#from llama_index.llms.ollama import Ollama
|
#from llama_index.llms.ollama import Ollama
|
||||||
from llama_index.core.base.response.schema import Response
|
from llama_index.core.base.response.schema import Response
|
||||||
from groq import Groq
|
from groq import Groq
|
||||||
|
from fastapi import File as FileFast
|
||||||
|
from fastapi import UploadFile as UploadFileFast
|
||||||
|
from fastapi import Form as FormFast
|
||||||
#import shutil
|
#import shutil
|
||||||
pwd = os.getcwd()
|
pwd = os.getcwd()
|
||||||
def extractConfig(nameModel="SystemData",relPath=os.path.join(pwd,"conf/experiment_config.json"),dataOut="keyantrophics"):
|
def extractConfig(nameModel="SystemData",relPath=os.path.join(pwd,"conf/experiment_config.json"),dataOut="keyantrophics"):
|
||||||
|
@ -78,9 +81,9 @@ def checkInfoSources(user:str):
|
||||||
subdir = [Option(file.name,value="static/"+user+"/"+file.name) for file in files if (file.is_dir() and file.name!="chroma_db") ]
|
subdir = [Option(file.name,value="static/"+user+"/"+file.name) for file in files if (file.is_dir() and file.name!="chroma_db") ]
|
||||||
|
|
||||||
userdata=user
|
userdata=user
|
||||||
print("Cambio",userdata)
|
|
||||||
return Form(
|
return Form(
|
||||||
H3("Grupos de archivos",cls="col-xs-3"),
|
H3("Grupo de archivos",cls="col-xs-3"),
|
||||||
Select(
|
Select(
|
||||||
*subdir,name="data",cls="col-xs-3"),
|
*subdir,name="data",cls="col-xs-3"),
|
||||||
Input(id="name-db", name="collection", placeholder="Enter a collection name",cls="col-xs-4"),
|
Input(id="name-db", name="collection", placeholder="Enter a collection name",cls="col-xs-4"),
|
||||||
|
@ -163,7 +166,7 @@ def loadCollection(data:str):
|
||||||
def queryPrompt(question:str):
|
def queryPrompt(question:str):
|
||||||
#index=load_create_db(collection="my_collection")
|
#index=load_create_db(collection="my_collection")
|
||||||
|
|
||||||
query_engine = index.as_query_engine(similarity_top_k=15,vector_store_query_mode="default",response_mode="tree_summarize")
|
query_engine = index.as_query_engine(similarity_top_k=5,vector_store_query_mode="default",response_mode="tree_summarize")
|
||||||
summary_prompt = (
|
summary_prompt = (
|
||||||
"Por favor, genera un resumen completo y detallado del material dado. "
|
"Por favor, genera un resumen completo y detallado del material dado. "
|
||||||
"Incluye los principales temas, argumentos y conclusiones. "
|
"Incluye los principales temas, argumentos y conclusiones. "
|
||||||
|
@ -191,7 +194,7 @@ def queryPrompt(question:str):
|
||||||
response2 = query_engine.query(tematic_prompt)
|
response2 = query_engine.query(tematic_prompt)
|
||||||
response3 = query_engine.query(issues_prompt)
|
response3 = query_engine.query(issues_prompt)
|
||||||
response4 = query_engine.query(Question_prompt)
|
response4 = query_engine.query(Question_prompt)
|
||||||
Output="<H1>Summary</H1>"+str(response)+"<H1>Tematic</H1>"+str(response2)+"<H1>Issues</H1>"+str(response3)+"<H1>Questions</H1>"+str(response4)
|
Output="<H1>Resumen</H1>"+str(response)+"<H1>Tematica</H1>"+str(response2)+"<H1>Problemas</H1>"+str(response3)+"<H1>Preguntas</H1>"+str(response4)
|
||||||
|
|
||||||
return Output
|
return Output
|
||||||
|
|
||||||
|
@ -239,10 +242,6 @@ Verificate if previous context is related to the previous response, if not, say
|
||||||
The format of output is a json with keys 'coherencia', 'exactitud', 'relacion_con_el_contexto' and 'comentario' .
|
The format of output is a json with keys 'coherencia', 'exactitud', 'relacion_con_el_contexto' and 'comentario' .
|
||||||
'coherencia', 'exactitud', 'relacion_con_el_contexto' are numeric variables with max value is 10"""%(response,ContextNodes,message)
|
'coherencia', 'exactitud', 'relacion_con_el_contexto' are numeric variables with max value is 10"""%(response,ContextNodes,message)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
print(chat_engine.__dict__)
|
|
||||||
|
|
||||||
chat_completion = client.chat.completions.create(
|
chat_completion = client.chat.completions.create(
|
||||||
#
|
#
|
||||||
# Required parameters
|
# Required parameters
|
||||||
|
@ -302,7 +301,7 @@ def SummarySources():
|
||||||
return Form("Este es muy caro para documentos grandes y tarda mucho",
|
return Form("Este es muy caro para documentos grandes y tarda mucho",
|
||||||
Select(
|
Select(
|
||||||
*subdir,name="data"),
|
*subdir,name="data"),
|
||||||
Input( name="query", placeholder="Enter a query"),
|
Input( name="query", placeholder="Dar una pregunta"),
|
||||||
Button("Submit",type="submit"), hx_post="/SummaryMake",hx_swap="innerHTML",hx_target="#summaryR" )
|
Button("Submit",type="submit"), hx_post="/SummaryMake",hx_swap="innerHTML",hx_target="#summaryR" )
|
||||||
|
|
||||||
@app.post("/SummaryMake")
|
@app.post("/SummaryMake")
|
||||||
|
@ -374,18 +373,18 @@ def home():
|
||||||
Div(Div(id="options",hx_target="this",hx_swap="outerHTML",hx_get="/listmodelactives",hx_trigger="click from:#buttonMenuuser delay:3s"),cls="col-xs-12"),
|
Div(Div(id="options",hx_target="this",hx_swap="outerHTML",hx_get="/listmodelactives",hx_trigger="click from:#buttonMenuuser delay:3s"),cls="col-xs-12"),
|
||||||
Div(Div(id="Infomodel"),cls="col-xs-12"),
|
Div(Div(id="Infomodel"),cls="col-xs-12"),
|
||||||
#Div("Resumen",Div(id="summary",hx_target="this",hx_swap="outerHTML",hx_get="/SummarySources",hx_trigger="click from:#buttonMenuuser"),Div(id="summaryR")),
|
#Div("Resumen",Div(id="summary",hx_target="this",hx_swap="outerHTML",hx_get="/SummarySources",hx_trigger="click from:#buttonMenuuser"),Div(id="summaryR")),
|
||||||
Div(
|
Div(H3("Chat para preguntarle al material de estudios "),
|
||||||
Div(
|
Div(
|
||||||
Form(
|
Form(
|
||||||
Input(id="question", name="message", placeholder="Enter a message"),
|
Input(id="question", name="message", placeholder="Dar una pregunta"),
|
||||||
Button("Submit",type="submit"), hx_post="/chatData",hx_swap="afterend",hx_target="#questionR" ),
|
Button("Submit",type="submit"), hx_post="/chatData",hx_swap="afterend",hx_target="#questionR" ),
|
||||||
Div(id="questionR")
|
Div(id="questionR")
|
||||||
,id="questions"),
|
,id="questions"),
|
||||||
cls="col-xs-6"),
|
cls="col-xs-6"),
|
||||||
Div(
|
Div(H3("Este genera informacion general del material, pero es intensivo en unso del api. No USAR."),
|
||||||
Div(
|
Div(
|
||||||
Form(
|
Form(
|
||||||
Input(id="query", name="question", placeholder="Enter a query"),
|
Input(id="query", name="question", placeholder="Dar una pregunta"),
|
||||||
Button("Submit",type="submit"), hx_post="/queryprompt",hx_swap="innerHTML",hx_target="#queryR" ),
|
Button("Submit",type="submit"), hx_post="/queryprompt",hx_swap="innerHTML",hx_target="#queryR" ),
|
||||||
Div(id="queryR"),
|
Div(id="queryR"),
|
||||||
id="query"),
|
id="query"),
|
||||||
|
@ -394,14 +393,71 @@ def home():
|
||||||
))
|
))
|
||||||
return page
|
return page
|
||||||
|
|
||||||
# @app.post("/upload")
|
@app.get("/fileup")
|
||||||
# def upload(data: UploadFile = File(...),user : str = Form(...), dir: str = Form(...)):
|
def fileup():
|
||||||
# filename="static/"+user+dir+data.filename
|
return Div(
|
||||||
|
P('Upload data Super tutor'),
|
||||||
|
Div(H2("Subir Archivos"),Form(
|
||||||
|
Input(type='file', name='file',cls="col-xs-3"),
|
||||||
|
Input( name='dir',placeholder="Enter a directory",cls="col-xs-2"),
|
||||||
|
Input( name='user',placeholder="Enter a user",cls="col-xs-2"),
|
||||||
|
Button('Upload', type='submit',cls="col-xs-4"),
|
||||||
|
hx_post="/upload",
|
||||||
|
hx_target="#info",
|
||||||
|
hx_swap="innerHTML",
|
||||||
|
enctype="multipart/form-data",
|
||||||
|
),cls="col-xs-12"),
|
||||||
|
Div(H2("Crear Usuario"),Form(
|
||||||
|
Input( name='user',placeholder="Enter a user",cls="col-xs-7"),
|
||||||
|
Button('Upload', type='submit',cls="col-xs-5"),
|
||||||
|
hx_post="/createuser",
|
||||||
|
hx_target="#info",
|
||||||
|
hx_swap="innerHTML",
|
||||||
|
enctype="multipart/form-data",
|
||||||
|
),cls="col-xs-12"),
|
||||||
|
Div(H2("Crear grupo de Archivos"),Form(
|
||||||
|
Input( name='dir',placeholder="Enter a directory",cls="col-xs-4"),
|
||||||
|
Input( name='user',placeholder="Enter a user",cls="col-xs-3"),
|
||||||
|
Button('Upload', type='submit',cls="col-xs-5"),
|
||||||
|
hx_post="/creategroup",
|
||||||
|
hx_target="#info",
|
||||||
|
hx_swap="innerHTML",
|
||||||
|
enctype="multipart/form-data",
|
||||||
|
),cls="col-xs-12"),
|
||||||
|
Div(id="info")
|
||||||
|
|
||||||
# with open(f"{filename}", "wb") as buffer:
|
|
||||||
# shutil.copyfileobj(data.file, buffer)
|
|
||||||
|
|
||||||
# app.mount("/static", StaticFiles(directory="static"), name="static")
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@app.post("/upload")
|
||||||
|
def upload(file: UploadFile,dir : str = Form(...),user : str = Form(...)):
|
||||||
|
filenameB="static/"+user+"/"+dir+"/"+file.filename
|
||||||
|
pathB="static/"+user+"/"+dir
|
||||||
|
if not os.path.exists(pathB):
|
||||||
|
os.makedirs(pathB)
|
||||||
|
if not os.path.exists(filenameB):
|
||||||
|
with open(f"{filenameB}", "wb") as buffer:
|
||||||
|
shutil.copyfileobj(file.file, buffer)
|
||||||
|
return P("Se ha subido %s"%(filenameB))
|
||||||
|
|
||||||
|
|
||||||
|
@app.post("/createuser")
|
||||||
|
def createuser(user : str = Form(...)):
|
||||||
|
pathB="static/"+user
|
||||||
|
if not os.path.exists(pathB):
|
||||||
|
os.makedirs(pathB)
|
||||||
|
return P("Se ha creado %s"%(pathB))
|
||||||
|
|
||||||
|
@app.post("/creategroup")
|
||||||
|
def createuser(user : str = Form(...),dir : str = Form(...)):
|
||||||
|
pathB="static/"+user+"/"+dir
|
||||||
|
if not os.path.exists(pathB):
|
||||||
|
os.makedirs(pathB)
|
||||||
|
return P("Se ha creado %s"%(pathB))
|
||||||
|
|
||||||
|
|
||||||
|
app.mount("/static", StaticFiles(directory="static"), name="static")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,21 @@
|
||||||
|
# Usar una imagen base de Python
|
||||||
|
FROM python:3.11.10-bookworm
|
||||||
|
# Establecer el directorio de trabajo en el contenedor
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copiar el archivo requirements.txt y otros archivos necesarios
|
||||||
|
COPY requirements.txt ./
|
||||||
|
|
||||||
|
# Instalar las dependencias de Python
|
||||||
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
# Copiar todo el código de la aplicación al contenedor
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
# Expone el puerto que usará Uvicorn
|
||||||
|
EXPOSE 7884
|
||||||
|
|
||||||
|
# Especificar el comando
|
||||||
|
|
||||||
|
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7884", "--reload"]
|
||||||
|
|
Loading…
Reference in New Issue