import streamlit as st import torch from transformers import AutoTokenizer, AutoModelForCausalLM # Initialize tokenizer and model for Microsoft's Phi-2 tokenizer = AutoTokenizer.from_pretrained("microsoft/phi-2", trust_remote_code=True) model = AutoModelForCausalLM.from_pretrained("microsoft/phi-2", torch_dtype="auto", device_map="auto", trust_remote_code=True) # Function to generate text using Phi-2 model def generate_text(prompt): with torch.no_grad(): token_ids = tokenizer.encode(prompt, add_special_tokens=False, return_tensors="pt") output_ids = model.generate( token_ids.to(model.device), max_new_tokens=512, do_sample=True, temperature=0.4 ) return tokenizer.decode(output_ids[0][token_ids.size(1):]) # Streamlit app interface st.title("Lesson Plan Generator") # User input for subject and education level subject = st.text_input("Subject:", "Mathematics") level = st.text_input("Education Level:", "High School") # Streamlit Session State for storing learning objectives if 'learning_objectives' not in st.session_state: st.session_state.learning_objectives = "" # Generate Learning Objectives Button if st.button("Generate Learning Objectives"): # Prompt for generating learning objectives objectives_prompt = f"Generate learning objectives for a {level} level {subject} lesson." learning_objectives = generate_text(objectives_prompt) # Save and display generated objectives st.session_state.learning_objectives = learning_objectives.strip() st.write(f"### Learning Objectives:\n{learning_objectives.strip()}") # Generate Lesson Plan Button if st.button("Generate Lesson Plan"): # Construct the prompt for lesson plan lesson_plan_prompt = f""" Generate a lesson plan for a {level} grade teacher planning to teach their class about {subject}. Include objectives, activities, assessment methods, procedure, resources, and notes. Use the following objectives: {st.session_state.learning_objectives} """ # Generate lesson plan lesson_plan = generate_text(lesson_plan_prompt) st.write(f"### Lesson Plan:\n{lesson_plan.strip()}")