File size: 3,138 Bytes
0aa79e7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
# app/services/recipe_generator.py
from typing import List, Dict
from transformers import AutoTokenizer, AutoModelForCausalLM
import torch
import os

class RecipeGenerator:
    def __init__(self):
        # Set cache directory to a writable location
        os.environ['TRANSFORMERS_CACHE'] = '/tmp/huggingface'
        
        # Create cache directory if it doesn't exist
        os.makedirs('/tmp/huggingface', exist_ok=True)
        
        # Initialize your fine-tuned model and tokenizer
        try:
            self.tokenizer = AutoTokenizer.from_pretrained("flax-community/t5-recipe-generation", cache_dir='/tmp/huggingface')
            self.model = AutoModelForCausalLM.from_pretrained("flax-community/t5-recipe-generation", cache_dir='/tmp/huggingface')
        except Exception as e:
            print(f"Error loading model: {str(e)}")
            # Provide a fallback or raise the error as needed
            raise

    async def generate(self, ingredients: List[str]) -> Dict[str, List[str]]:
        try:
            # Format ingredients for input
            input_text = f"Generate a recipe using these ingredients: {', '.join(ingredients)}"
            
            # Tokenize and generate
            inputs = self.tokenizer(input_text, return_tensors="pt", padding=True)
            outputs = self.model.generate(
                inputs.input_ids,
                max_length=512,
                num_return_sequences=1,
                temperature=0.7,
                top_p=0.9,
                do_sample=True
            )
            
            # Decode and parse the generated recipe
            generated_text = self.tokenizer.decode(outputs[0], skip_special_tokens=True)
            
            # Parse the generated text into structured format
            lines = generated_text.split('\n')
            title = lines[0] if lines else "Generated Recipe"
            
            # Initialize lists
            ingredients_list = []
            instructions_list = []
            
            # Simple parsing logic
            current_section = None
            for line in lines[1:]:
                if "Ingredients:" in line:
                    current_section = "ingredients"
                elif "Instructions:" in line:
                    current_section = "instructions"
                elif line.strip():
                    if current_section == "ingredients":
                        ingredients_list.append(line.strip())
                    elif current_section == "instructions":
                        instructions_list.append(line.strip())
            
            return {
                "title": title,
                "ingredients": ingredients_list or ["No ingredients generated"],
                "instructions": instructions_list or ["No instructions generated"]
            }
        except Exception as e:
            print(f"Error generating recipe: {str(e)}")
            return {
                "title": "Error Generating Recipe",
                "ingredients": ["Error occurred while generating recipe"],
                "instructions": ["Please try again later"]
            }