GenBlogDemo / app.py
Krzysztof Krystian Jankowski
additional info
9f6dce5
raw
history blame
1.83 kB
import streamlit as st
from langchain.prompts import PromptTemplate
from langchain_community.llms import CTransformers
model="TheBloke/TinyLlama-1.1B-Chat-v0.3-GGUF"
config = {'max_new_tokens': 128,
'repetition_penalty': 1.1,
'temperature':0.4,
'top_k':50,
'top_p':0.9}
def getLlamaResponse(input_text, no_words, blog_style):
llm=CTransformers(model=model,
model_type='llama',
config=config)
# create a prompt
template="""
You are a ghost writer helping writing posts for a weblog. Do not provide any instructions just write the post.
The post should be {no_words} words long.
Write a blog post about the topic: {input_text} in {blog_style} style.
"""
prompt=PromptTemplate(input_variables=["blog_style", "input_text", "no_words"],
template=template)
# generate the response
response=llm.invoke(prompt.format(blog_style=blog_style, input_text=input_text, no_words=no_words))
return response
# Streamlit UI
st.set_page_config(page_title="GenBlog Demo",
page_icon="📚",
layout="centered",
initial_sidebar_state='collapsed')
st.header("GenBlog Demo 📚")
st.write("This is a demo of the GenBlog model. Enter a blog topic, the number of words and the blog style to generate a blog post.")
st.write("Based on the TinyLlama model by TheBloke.")
input_text=st.text_input("Enter the Blog Topic")
col1, col2 = st.columns([5, 5])
with col1:
no_words=st.text_input("Enter the number of words", value=100)
with col2:
blog_style=st.selectbox("Select the Blog Style", ["Personal", "Research Paper", "Humor, casual"])
submit=st.button("Generate Blog")
if submit:
st.write(getLlamaResponse(input_text, no_words, blog_style))