Spaces:
Build error
Build error
Simplify Dockerfile and app.py configuration
Browse files- Remove commented-out HuggingFace and environment-related code
- Add user creation in Dockerfile for improved security
- Update base image to standard Python 3.9
- Modify file copying to use chown for better permissions
- Adjust CMD to use port 7860 consistently
- Dockerfile +9 -8
- app.py +13 -13
Dockerfile
CHANGED
|
@@ -1,15 +1,16 @@
|
|
| 1 |
-
FROM python:3.9
|
| 2 |
|
|
|
|
| 3 |
WORKDIR /app
|
| 4 |
|
| 5 |
-
COPY requirements.txt .
|
| 6 |
-
RUN pip install --no-cache-dir -r requirements.txt
|
| 7 |
|
| 8 |
-
COPY .
|
| 9 |
|
| 10 |
-
EXPOSE 8000
|
| 11 |
|
| 12 |
-
RUN --mount=type=secret,id=HUGGINGFACE_TOKEN,mode=0444,required=true \
|
| 13 |
-
|
| 14 |
|
| 15 |
-
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "
|
|
|
|
| 1 |
+
FROM python:3.9
|
| 2 |
|
| 3 |
+
RUN useradd -m -u 1000 user
|
| 4 |
WORKDIR /app
|
| 5 |
|
| 6 |
+
COPY --chown=user ./requirements.txt requirements.txt
|
| 7 |
+
RUN pip install --no-cache-dir --upgrade -r requirements.txt
|
| 8 |
|
| 9 |
+
COPY --chown=user . /app
|
| 10 |
|
| 11 |
+
# EXPOSE 8000
|
| 12 |
|
| 13 |
+
# RUN --mount=type=secret,id=HUGGINGFACE_TOKEN,mode=0444,required=true \
|
| 14 |
+
# test -f /run/secrets/HUGGINGFACE_TOKEN && echo "Secret exists!"
|
| 15 |
|
| 16 |
+
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
|
app.py
CHANGED
|
@@ -1,17 +1,17 @@
|
|
| 1 |
from fastapi import FastAPI, HTTPException
|
| 2 |
-
from pydantic import BaseModel
|
| 3 |
-
from huggingface_hub import InferenceClient
|
| 4 |
|
| 5 |
-
from langchain_core.messages import HumanMessage, AIMessage
|
| 6 |
-
from langgraph.checkpoint.memory import MemorySaver
|
| 7 |
-
from langgraph.graph import START, MessagesState, StateGraph
|
| 8 |
|
| 9 |
-
import os
|
| 10 |
-
from dotenv import load_dotenv
|
| 11 |
-
load_dotenv()
|
| 12 |
|
| 13 |
-
HUGGINGFACE_TOKEN = os.environ.get("HUGGINGFACE_TOKEN", os.getenv("HUGGINGFACE_TOKEN"))
|
| 14 |
-
print(HUGGINGFACE_TOKEN)
|
| 15 |
|
| 16 |
# Initialize the HuggingFace model
|
| 17 |
# model = InferenceClient(
|
|
@@ -109,6 +109,6 @@ async def api_home():
|
|
| 109 |
# except Exception as e:
|
| 110 |
# raise HTTPException(status_code=500, detail=f"Error al generar texto: {str(e)}")
|
| 111 |
|
| 112 |
-
if __name__ == "__main__":
|
| 113 |
-
|
| 114 |
-
|
|
|
|
| 1 |
from fastapi import FastAPI, HTTPException
|
| 2 |
+
# from pydantic import BaseModel
|
| 3 |
+
# from huggingface_hub import InferenceClient
|
| 4 |
|
| 5 |
+
# from langchain_core.messages import HumanMessage, AIMessage
|
| 6 |
+
# from langgraph.checkpoint.memory import MemorySaver
|
| 7 |
+
# from langgraph.graph import START, MessagesState, StateGraph
|
| 8 |
|
| 9 |
+
# import os
|
| 10 |
+
# from dotenv import load_dotenv
|
| 11 |
+
# load_dotenv()
|
| 12 |
|
| 13 |
+
# HUGGINGFACE_TOKEN = os.environ.get("HUGGINGFACE_TOKEN", os.getenv("HUGGINGFACE_TOKEN"))
|
| 14 |
+
# print(HUGGINGFACE_TOKEN)
|
| 15 |
|
| 16 |
# Initialize the HuggingFace model
|
| 17 |
# model = InferenceClient(
|
|
|
|
| 109 |
# except Exception as e:
|
| 110 |
# raise HTTPException(status_code=500, detail=f"Error al generar texto: {str(e)}")
|
| 111 |
|
| 112 |
+
# if __name__ == "__main__":
|
| 113 |
+
# import uvicorn
|
| 114 |
+
# uvicorn.run(app, host="0.0.0.0", port=7860)
|