deepsearch / app.py
akhaliq's picture
akhaliq HF Staff
update
92b96d1
raw
history blame
1.77 kB
import os
import gradio as gr
from huggingface_hub import InferenceClient
from dotenv import load_dotenv
# Load environment variables
load_dotenv()
# Initialize Hugging Face client
client = InferenceClient(
provider="novita",
api_key=os.getenv("HF_TOKEN")
)
def chat(message, history):
"""
Process chat messages using Hugging Face's Inference Provider
"""
try:
# Format the conversation history
messages = []
for human, assistant in history:
messages.append({"role": "user", "content": human})
messages.append({"role": "assistant", "content": assistant})
messages.append({"role": "user", "content": message})
# Get response from the model
completion = client.chat.completions.create(
model="deepseek-ai/DeepSeek-V3-0324",
messages=messages,
temperature=0.7,
max_tokens=1000
)
return completion.choices[0].message.content
except Exception as e:
return f"Error: {str(e)}"
# Create Gradio interface
with gr.Blocks(title="DeepSearch - AI Search Assistant") as demo:
gr.Markdown("# DeepSearch")
gr.Markdown("Ask anything and get AI-powered responses using state-of-the-art language models.")
chatbot = gr.ChatInterface(
fn=chat,
examples=[
"What is the capital of France?",
"Explain quantum computing in simple terms",
"Write a short poem about artificial intelligence"
],
title="DeepSearch Chat",
description="Ask me anything!",
theme=gr.themes.Soft(),
retry_btn=None,
undo_btn=None,
clear_btn="Clear",
)
if __name__ == "__main__":
demo.launch(share=True)