Spaces:
Build error
Build error
Add environment variable loading for HuggingFace token
Browse files- Load HuggingFace token from .env file
- Pass token to InferenceClient initialization
- Update generate endpoint route to remove '/api' prefix
- Print token for debugging purposes
app.py
CHANGED
|
@@ -6,9 +6,17 @@ from langchain_core.messages import HumanMessage, AIMessage
|
|
| 6 |
from langgraph.checkpoint.memory import MemorySaver
|
| 7 |
from langgraph.graph import START, MessagesState, StateGraph
|
| 8 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 9 |
# Initialize the HuggingFace model
|
| 10 |
model = InferenceClient(
|
| 11 |
model="HuggingFaceTB/SmolLM2-1.7B-Instruct",
|
|
|
|
| 12 |
)
|
| 13 |
|
| 14 |
# Define the function that calls the model
|
|
@@ -68,7 +76,7 @@ async def api_home():
|
|
| 68 |
return {"detail": "Welcome to FastAPI, Langchain, Docker tutorial"}
|
| 69 |
|
| 70 |
# Generate endpoint
|
| 71 |
-
@app.post("/
|
| 72 |
async def generate(request: QueryRequest):
|
| 73 |
"""
|
| 74 |
Endpoint to generate text using the language model
|
|
|
|
| 6 |
from langgraph.checkpoint.memory import MemorySaver
|
| 7 |
from langgraph.graph import START, MessagesState, StateGraph
|
| 8 |
|
| 9 |
+
import os
|
| 10 |
+
from dotenv import load_dotenv
|
| 11 |
+
load_dotenv()
|
| 12 |
+
|
| 13 |
+
HUGGINGFACE_TOKEN = os.environ.get("HUGGINGFACE_TOKEN", os.getenv("HUGGINGFACE_TOKEN"))
|
| 14 |
+
print(HUGGINGFACE_TOKEN)
|
| 15 |
+
|
| 16 |
# Initialize the HuggingFace model
|
| 17 |
model = InferenceClient(
|
| 18 |
model="HuggingFaceTB/SmolLM2-1.7B-Instruct",
|
| 19 |
+
api_key=os.getenv("HUGGINGFACE_TOKEN")
|
| 20 |
)
|
| 21 |
|
| 22 |
# Define the function that calls the model
|
|
|
|
| 76 |
return {"detail": "Welcome to FastAPI, Langchain, Docker tutorial"}
|
| 77 |
|
| 78 |
# Generate endpoint
|
| 79 |
+
@app.post("/generate")
|
| 80 |
async def generate(request: QueryRequest):
|
| 81 |
"""
|
| 82 |
Endpoint to generate text using the language model
|