|
import os |
|
import gradio as gr |
|
from http import HTTPStatus |
|
import openai |
|
from typing import Generator, List, Optional, Tuple, Dict |
|
from urllib.error import HTTPError |
|
|
|
api_url = 'https://huggingface.co' |
|
print(api_url) |
|
|
|
API_KEY ='sk-proj-meQZDqkBQos8ugPoVeXmT3BlbkFJw6b4a3XDUybSRPFkv5DZ' |
|
CUSTOM_JS = os.getenv('CUSTOM_JS', None) |
|
oai_client = openai.OpenAI(api_key=API_KEY, base_url=api_url ) |
|
|
|
History = List[Tuple[str, str]] |
|
Messages = List[Dict[str, str]] |
|
|
|
def clear_session() -> History: |
|
return '', [] |
|
|
|
def history_to_messages(history: History) -> Messages: |
|
messages = [] |
|
for h in history: |
|
messages.append({'role': 'user', 'content': h[0].strip()}) |
|
messages.append({'role': 'assistant', 'content': h[1].strip()}) |
|
return messages |
|
|
|
def messages_to_history(messages: Messages) -> Tuple[str, History]: |
|
history = [] |
|
for q, r in zip(messages[0::2], messages[1::2]): |
|
history.append([q['content'], r['content']]) |
|
return history |
|
|
|
def model_chat(query: Optional[str], history: Optional[History]) -> Generator[Tuple[str, History], None, None]: |
|
if query is None: |
|
query = '' |
|
if history is None: |
|
history = [] |
|
if not query.strip(): |
|
return |
|
messages = history_to_messages(history) |
|
messages.append({'role': 'user', 'content': query.strip()}) |
|
gen = oai_client.chat.completions.create( |
|
model='dicta-il/dictalm2.0-instruct', |
|
messages=messages, |
|
temperature=0.7, |
|
max_tokens=1024, |
|
top_p=0.9, |
|
stream=True |
|
) |
|
full_response = '' |
|
for completion in gen: |
|
text = completion.choices[0].delta.content |
|
full_response += text or '' |
|
yield full_response |
|
|
|
with gr.Blocks(css=''' |
|
.gr-group {direction: rtl;} |
|
.chatbot{text-align:right;} |
|
.dicta-header { |
|
background-color: var(--input-background-fill); /* Replace with desired background color */ |
|
border-radius: 10px; |
|
padding: 20px; |
|
text-align: center; |
|
display: flex; |
|
flex-direction: row; |
|
align-items: center; |
|
box-shadow: var(--block-shadow); |
|
border-color: var(--block-border-color); |
|
border-width: 1px; |
|
} |
|
|
|
|
|
@media (max-width: 768px) { |
|
.dicta-header { |
|
flex-direction: column; /* Change to vertical for mobile devices */ |
|
} |
|
} |
|
.chatbot.prose { |
|
font-size: 1.2em; |
|
} |
|
.dicta-logo { |
|
width: 150px; /* Replace with actual logo width as desired */ |
|
height: auto; |
|
margin-bottom: 20px; |
|
} |
|
.dicta-intro-text { |
|
margin-bottom: 20px; |
|
text-align: center; |
|
display: flex; |
|
flex-direction: column; |
|
align-items: center; |
|
width: 100%; |
|
font-size: 1.1em; |
|
} |
|
|
|
textarea { |
|
font-size: 1.2em; |
|
} |
|
''', js=CUSTOM_JS) as demo: |
|
gr.Markdown(""" |
|
<div class="dicta-header"> |
|
|
|
<div class="dicta-intro-text"> |
|
<h1>DictaLM 2.0 - Instruct Chat Demo</h1> |
|
<p>Welcome to the interactive demo of DictaLM-2.0. Explore the capabilities of our model and see how it can assist with your tasks.<br/> |
|
<span dir='rtl'> 讘专讜讻讬诐 讛讘讗讬诐 诇讚诪讜 讛讗讬谞讟专讗拽讟讬讘讬 砖诇 DictaLM-2.0. 讞拽专讜 讗转 讬讻讜诇讜转 讛诪讜讚诇 砖诇谞讜 讜专讗讜 讻讬爪讚 讛讜讗 讬讻讜诇 诇住讬讬注 诇讻诐 讘诪砖讬诪讜转讬讻诐.</span><br/> |
|
<span dir='rtl'> 讛诪讜讚诇 诪砖讜讞专专 诇谞讞诇转 讛讻诇诇 讜讗驻砖专 诇讛讜专讬讚讜 讘拽讬砖讜专: <a href="https://huggingface.co/dicta-il/dictalm2.0-instruct">讻讗谉</a></span></p> |
|
</div> |
|
</div> |
|
""") |
|
|
|
interface = gr.ChatInterface(model_chat, fill_height=False) |
|
interface.chatbot.rtl = True |
|
interface.textbox.placeholder = "讛讻谞住 砖讗诇讛 讘注讘专讬转 (讗讜 讘讗谞讙诇讬转!)" |
|
interface.textbox.rtl = True |
|
interface.textbox.text_align = 'right' |
|
interface.theme_css += '.gr-group {direction: rtl !important;}' |
|
|
|
demo.queue(api_open=False).launch(max_threads=20, share=False) |
|
|