Dockerized the app

This commit is contained in:
Mario Gil 2024-10-08 15:42:08 -05:00
parent c4b727ab19
commit f37e295ad9
3 changed files with 175 additions and 8 deletions

21
dockerfile Normal file
View File

@ -0,0 +1,21 @@
# Usar una imagen base de Python
FROM python:3.11.10-bookworm
# Establecer el directorio de trabajo en el contenedor
WORKDIR /app
# Copiar el archivo requirements.txt y otros archivos necesarios
COPY requirements.txt ./
# Instalar las dependencias de Python
RUN pip install --no-cache-dir -r requirements.txt
# Copiar todo el código de la aplicación al contenedor
COPY . .
# Expone el puerto que usará Uvicorn
EXPOSE 7884
# Especificar el comando
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7884", "--reload"]

16
main.py
View File

@ -9,9 +9,9 @@ from llama_index.core import SummaryIndex
from llama_index.llms.groq import Groq as GroqLLamaIndex
from chromadb import PersistentClient
from llama_index.core import Settings
from llama_index.embeddings.huggingface_api import (
HuggingFaceInferenceAPIEmbedding,
)
#from llama_index.embeddings.huggingface_api import (
# HuggingFaceInferenceAPIEmbedding,
#)
import chromadb
import os
@ -19,10 +19,10 @@ import threading
import time
from llama_index.core.memory import ChatMemoryBuffer
import json
from llama_index.llms.ollama import Ollama
#from llama_index.llms.ollama import Ollama
from llama_index.core.base.response.schema import Response
from groq import Groq
import shutil
#import shutil
pwd = os.getcwd()
def extractConfig(nameModel="SystemData",relPath=os.path.join(pwd,"conf/experiment_config.json"),dataOut="keyantrophics"):
configPath=os.path.join(os.getcwd(),relPath)
@ -50,8 +50,8 @@ Settings.llm = llm_localLlamma
gridlink = Link(rel="stylesheet", href="https://cdnjs.cloudflare.com/ajax/libs/flexboxgrid/6.3.1/flexboxgrid.min.css", type="text/css")
app= FastHTML(hdrs=(picolink, gridlink))
colorpico=Link(rel="stylesheet", href="https://cdn.jsdelivr.net/npm/@picocss/pico@2/css/pico.colors.min.css")
app= FastHTML(hdrs=(picolink, gridlink,colorpico))
@ -293,7 +293,7 @@ The format of output is a json with keys 'coherencia', 'exactitud', 'relacion_co
)
return P(message),P(response),P(chat_completion.choices[0].message.content)
return H6(message),P(response,cls="pico-color-pink-500"),P(chat_completion.choices[0].message.content,cls="pico-color-pink-500")
@app.get("/SummarySources")
def SummarySources():

146
requirements.txt Normal file
View File

@ -0,0 +1,146 @@
aiohappyeyeballs==2.4.3
aiohttp==3.10.9
aiosignal==1.3.1
annotated-types==0.7.0
anyio==4.6.0
asgiref==3.8.1
attrs==24.2.0
backoff==2.2.1
bcrypt==4.2.0
beautifulsoup4==4.12.3
build==1.2.2.post1
cachetools==5.5.0
certifi==2024.8.30
charset-normalizer==3.3.2
chroma-hnswlib==0.7.6
chromadb==0.5.7
click==8.1.7
coloredlogs==15.0.1
dataclasses-json==0.6.7
Deprecated==1.2.14
dirtyjson==1.0.8
distro==1.9.0
durationpy==0.9
fastapi==0.115.0
fastcore==1.7.11
fastlite==0.0.11
filelock==3.16.1
flatbuffers==24.3.25
frozenlist==1.4.1
fsspec==2024.9.0
google-auth==2.35.0
googleapis-common-protos==1.65.0
greenlet==3.1.1
groq==0.11.0
grpcio==1.66.2
h11==0.14.0
httpcore==1.0.6
httptools==0.6.1
httpx==0.27.2
huggingface-hub==0.25.1
humanfriendly==10.0
idna==3.10
importlib_metadata==8.4.0
importlib_resources==6.4.5
itsdangerous==2.2.0
jiter==0.6.1
joblib==1.4.2
kubernetes==31.0.0
llama-cloud==0.1.2
llama-index==0.11.13
llama-index-agent-openai==0.3.4
llama-index-cli==0.3.1
llama-index-core==0.11.16
llama-index-embeddings-huggingface==0.3.1
llama-index-embeddings-openai==0.2.5
llama-index-indices-managed-llama-cloud==0.4.0
llama-index-legacy==0.9.48.post3
llama-index-llms-openai==0.2.12
llama-index-llms-groq==0.2.0
llama-index-multi-modal-llms-openai==0.2.2
llama-index-program-openai==0.2.0
llama-index-question-gen-openai==0.2.0
llama-index-readers-file==0.2.2
llama-index-readers-llama-parse==0.3.0
llama-index-vector-stores-chroma==0.2.0
llama-parse==0.5.7
markdown-it-py==3.0.0
marshmallow==3.22.0
mdurl==0.1.2
mmh3==5.0.1
monotonic==1.6
mpmath==1.3.0
multidict==6.1.0
mypy-extensions==1.0.0
nest-asyncio==1.6.0
networkx==3.3
nltk==3.9.1
numpy==1.26.4
oauthlib==3.2.2
onnxruntime==1.19.2
openai==1.51.1
opentelemetry-api==1.27.0
opentelemetry-exporter-otlp-proto-common==1.27.0
opentelemetry-exporter-otlp-proto-grpc==1.27.0
opentelemetry-instrumentation==0.48b0
opentelemetry-instrumentation-asgi==0.48b0
opentelemetry-instrumentation-fastapi==0.48b0
opentelemetry-proto==1.27.0
opentelemetry-sdk==1.27.0
opentelemetry-semantic-conventions==0.48b0
opentelemetry-util-http==0.48b0
orjson==3.10.7
overrides==7.7.0
packaging==24.1
pandas==2.2.3
pillow==10.4.0
posthog==3.7.0
propcache==0.2.0
protobuf==4.25.5
pyasn1==0.6.1
pyasn1_modules==0.4.1
pydantic==2.9.2
pydantic_core==2.23.4
Pygments==2.18.0
pypdf==4.3.1
PyPika==0.48.9
pyproject_hooks==1.2.0
pysqlite3-binary==0.5.3.post1
pysqlite3-binary==0.5.3.post1
python-dateutil==2.9.0.post0
python-dotenv==1.0.1
python-fasthtml==0.6.7
python-multipart==0.0.12
pytz==2024.2
PyYAML==6.0.2
regex==2024.9.11
requests==2.32.3
requests-oauthlib==2.0.0
rich==13.9.2
rsa==4.9
shellingham==1.5.4
six==1.16.0
sniffio==1.3.1
soupsieve==2.6
SQLAlchemy==2.0.35
sqlite-minutils==3.37.0.post3
starlette==0.38.6
striprtf==0.0.26
sympy==1.13.3
tenacity==8.5.0
tiktoken==0.8.0
tokenizers==0.20.0
tqdm==4.66.5
typer==0.12.5
typing-inspect==0.9.0
typing_extensions==4.12.2
tzdata==2024.2
urllib3==2.2.3
uvicorn==0.31.0
uvloop==0.20.0
watchfiles==0.24.0
websocket-client==1.8.0
websockets==13.1
wrapt==1.16.0
yarl==1.14.0
zipp==3.20.2