Spaces:
Sleeping
Sleeping
| import streamlit as st | |
| from langchain.prompts import PromptTemplate | |
| #from langchain.llms import CTransformers | |
| from langchain import HuggingFaceHub | |
| # Function to get response from Llama 2 model | |
| def getLlamaresponse(input_text, no_words, blog_style): | |
| #llm = CTransformers(model = 'models\llama-2-7b-chat.Q8_0.gguf', model_type = 'llama',config ={'max_new_tokens': 256, 'temperature':0.01}) | |
| llm = HuggingFaceHub( | |
| repo_id='meta-llama/Llama-2-7b-hf', | |
| model_kwargs={'max_new_tokens': 256, 'temperature':0.01} | |
| ) | |
| ## Prompt Template | |
| template = """ | |
| write a blog for {blog_style} job profile for a topic {input_text} within {no_words} words. | |
| """ | |
| prompt = PromptTemplate(input_variables = ['blog_style','input_text', 'no_words'], template = template) | |
| # Generate Response from llama2 | |
| response = llm(prompt.format(blog_style = blog_style, input_text= input_text,no_words = no_words)) | |
| print(response) | |
| return(response) | |
| st.set_page_config(page_title="Generate Blogs", | |
| page_icon='π€', | |
| layout='centered', | |
| initial_sidebar_state='collapsed') | |
| st.header("Generate Blogs π€") | |
| input_text=st.text_input("Enter the Blog Topic") | |
| col1,col2 = st.columns([5,5]) | |
| with col1: | |
| no_words = st.text_input('No of Words') | |
| with col2: | |
| blog_style = st.selectbox('Writing the blog for ', ('Researchers','Data Scientists','Common People'), index = 0) | |
| submit = st.button('Generate') | |
| if submit: | |
| st.write(getLlamaresponse(input_text, no_words, blog_style)) |