Spaces:
Running
Running
File size: 3,966 Bytes
ae3e5cb 345f753 ae3e5cb c5f042e ae3e5cb c5f042e ae3e5cb 253f402 ae3e5cb ef3b34d ae3e5cb ef3b34d ae3e5cb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 |
import os
from langchain_groq import ChatGroq
from langchain.prompts import ChatPromptTemplate
from langchain_core.output_parsers import StrOutputParser
from langchain_core.runnables import RunnablePassthrough
from typing import Dict
import gradio as gr # Import Gradio
# Step 3: Set the environment variable for the Groq API Key
os.environ["GROQ_API_KEY"] = "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" # Updated API Key
# Step 4: Define helper functions for structured book generation
def create_book_agent(
model_name: str = "llama-3.1-8b-instant", # Updated model name
temperature: float = 0.7,
max_tokens: int = 16384, # Increased token limit
**kwargs
) -> ChatGroq:
"""Create a LangChain agent for book writing."""
prompt_template = ChatPromptTemplate.from_messages([
("system", "You are a creative writer. Write high-quality, engaging books for any genre."),
("human", "{input}")
])
llm = ChatGroq(model=model_name, temperature=temperature, max_tokens=max_tokens, **kwargs)
chain = prompt_template | llm | StrOutputParser()
return chain
def generate_chapter(title: str, synopsis: str, agent) -> str:
"""Generate a full chapter given a title and synopsis."""
query = f"Write a detailed chapter based on the following synopsis:\n\nTitle: {title}\n\nSynopsis: {synopsis}"
try:
return agent.invoke({"input": query})
except Exception as e:
print(f"An error occurred while generating the chapter: {e}")
return ""
def write_book(agent, title: str, outline: Dict[str, str]) -> str:
"""
Generate a complete book.
Args:
agent: The LangChain agent for generating text.
title (str): The title of the book.
outline (Dict[str, str]): A dictionary with chapter titles as keys and synopses as values.
Returns:
str: The full book as a single string.
"""
book = f"# {title}\n\n"
for chapter_title, chapter_synopsis in outline.items():
book += f"## {chapter_title}\n\n"
chapter_text = generate_chapter(chapter_title, chapter_synopsis, agent)
book += chapter_text + "\n\n"
return book
# Step 5: Create the agent
book_agent = create_book_agent()
# Step 6: Gradio interface
def gradio_interface():
"""Create a Gradio interface for book generation."""
with gr.Blocks() as demo:
gr.Markdown("## Book Generator")
gr.Markdown("This application was created by iLL-Ai AaronAllton and a team of Groq agents that write books.") # Updated note
book_title = gr.Textbox(label="Book Title")
book_outline = gr.Textbox(label="Book Outline (Structured format, e.g., 'Chapter 1: Synopsis 1; Chapter 2: Synopsis 2')") # Updated prompt
generate_button = gr.Button("Generate Book")
output = gr.Textbox(label="Generated Book", interactive=False)
def generate_book_interface(title, outline):
try:
# Normalize the outline input
outline_dict = {}
chapters = outline.split(';') # Split by semicolon for each chapter
for chapter in chapters:
if ':' in chapter:
title, synopsis = chapter.split(':', 1)
outline_dict[title.strip()] = synopsis.strip()
else:
# Handle cases where the input might not follow the expected format
outline_dict[chapter.strip()] = "No synopsis provided."
print(f"Processed Outline: {outline_dict}") # Debug statement
return write_book(book_agent, title, outline_dict)
except Exception as e:
return f"An error occurred: {e}"
generate_button.click(generate_book_interface, inputs=[book_title, book_outline], outputs=output)
demo.launch(share=True)
if __name__ == "__main__":
gradio_interface()
|