|
import os |
|
import json |
|
import subprocess |
|
import gradio as gr |
|
from threading import Thread |
|
from huggingface_hub import hf_hub_download |
|
from llama_cpp import Llama |
|
from datetime import datetime |
|
|
|
|
|
MODEL_ID = "large-traversaal/Alif-1.0-8B-Instruct" |
|
MODEL_FILE = "model-Q8_0.gguf" |
|
|
|
model_path_file = hf_hub_download(MODEL_ID, filename=MODEL_FILE) |
|
|
|
|
|
llama = Llama( |
|
model_path=model_path_file, |
|
n_gpu_layers=40, |
|
n_threads=8, |
|
n_batch=512, |
|
n_ctx=4096, |
|
verbose=True |
|
) |
|
|
|
CHAT_TEMPLATE = "Alif Chat" |
|
CONTEXT_LENGTH = 4096 |
|
COLOR = "blue" |
|
EMOJI = "💬" |
|
DESCRIPTION = "Urdu AI Chatbot powered by Llama.cpp" |
|
|
|
|
|
def generate_response(message, history, system_prompt, temperature, max_new_tokens, top_k, repetition_penalty, top_p): |
|
chat_prompt = f"You are an Urdu Chatbot. Write an appropriate response for the given instruction: {message} Response:" |
|
response = llama(chat_prompt, max_tokens=max_new_tokens, stop=["Q:", "\n"], echo=False, stream=True) |
|
|
|
text = "" |
|
for chunk in response: |
|
content = chunk["choices"][0]["text"] |
|
if content: |
|
text += content |
|
yield text |
|
|
|
|
|
with gr.Blocks() as demo: |
|
chatbot = gr.Chatbot(label="Urdu Chatbot", likeable=True, render=False) |
|
chat = gr.ChatInterface( |
|
generate_response, |
|
chatbot=chatbot, |
|
title=EMOJI + " " + "Alif-1.0 Chatbot", |
|
description=DESCRIPTION, |
|
examples=[ |
|
["شہر کراچی کے بارے میں بتاؤ"], |
|
["قابل تجدید توانائی کیا ہے؟"], |
|
["پاکستان کی تاریخ کے بارے میں بتائیں۔"] |
|
], |
|
additional_inputs_accordion=gr.Accordion(label="⚙️ Parameters", open=False, render=False), |
|
additional_inputs=[ |
|
gr.Textbox("", label="System prompt", render=False), |
|
gr.Slider(0, 1, 0.6, label="Temperature", render=False), |
|
gr.Slider(128, CONTEXT_LENGTH, 1024, label="Max new tokens", render=False), |
|
gr.Slider(1, 80, 40, step=1, label="Top K sampling", render=False), |
|
gr.Slider(0, 2, 1.1, label="Repetition penalty", render=False), |
|
gr.Slider(0, 1, 0.95, label="Top P sampling", render=False), |
|
], |
|
theme=gr.themes.Soft(primary_hue=COLOR), |
|
) |
|
|
|
demo.queue(max_size=20).launch(share=True) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|