File size: 1,103 Bytes
81382ed
9045974
 
81382ed
9045974
47beb92
81382ed
109af35
47beb92
9045974
 
 
 
 
47beb92
 
 
81382ed
47beb92
 
 
 
 
 
 
 
 
 
 
 
109af35
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
import gradio as gr
import requests
import os

HF_API_KEY = os.getenv('TOKEN')
MODEL_NAME = "deepseek-ai/DeepSeek-R1-Distill-Llama-8B" 

def chat(prompt):
    # Make the API request
    response = requests.post(
        f"https://api-inference.huggingface.co/models/{MODEL_NAME}",
        headers={"Authorization": f"Bearer {HF_API_KEY}"},
        json={"inputs": prompt}
    )
    
    # Debug: print the raw response to see the actual structure
    print(response.json())  # This will help you inspect the returned data

    # Safe check: return the response if it's structured as expected
    try:
        # Try accessing the 'generated_text' field, assuming a list-like response
        generated_text = response.json()[0]["generated_text"]
        return generated_text
    except (KeyError, IndexError) as e:
        # Handle the case where the structure is different
        print(f"Error: {e}")
        return f"Error processing response: {response.json()}"

# Set up Gradio interface
iface = gr.Interface(fn=chat, inputs="text", outputs="text", title="DeepSeek 7B Chatbot (API)")
iface.launch()