GenBlogDemo / app.py
Krzysztof Krystian Jankowski
updated prompt and settings
8a41d71
raw
history blame
1.6 kB
import streamlit as st
from langchain.prompts import PromptTemplate
from langchain_community.llms import CTransformers
def getLlamaResponse(input_text, no_words, blog_style):
llm=CTransformers(model="TheBloke/TinyLlama-1.1B-Chat-v0.3-GGUF",
model_type='llama',
config={'max_new_tokens':256,
'repetition_penalty': 1.1,
'temperature':0.6})
# create a prompt
template="""
<|im_start|>system
You are a helping bot, ghost writer for weblog.<|im_end|>
<|im_start|>user
Write a blog post about the topic: {input_text} in {blog_style} style. The blog should be {no_words} words long.
<|im_end|>
<|im_start|>assistant
"""
prompt=PromptTemplate(input_variables=["blog_style", "input_text", "no_words"],
template=template)
# generate the response
response=llm.invoke(prompt.format(blog_style=blog_style, input_text=input_text, no_words=no_words))
return response
# Streamlit UI
st.set_page_config(page_title="GenBlog Demo",
page_icon="📚",
layout="centered",
initial_sidebar_state='collapsed')
st.header("GenBlog Demo 📚")
input_text=st.text_input("Enter the Blog Topic")
col1, col2 = st.columns([5, 5])
with col1:
no_words=st.text_input("Enter the number of words", value=100)
with col2:
blog_style=st.selectbox("Select the Blog Style", ["Personal", "Research", "Story Driven"])
submit=st.button("Generate Blog")
if submit:
st.write(getLlamaResponse(input_text, no_words, blog_style))