File size: 275 Bytes
4a6aa55
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
from transformers import pipeline
import tensorflow
import torch


def llm_predict(input_text, model_name):
    pipe = pipeline("text2text-generation", model=model_name)
    generated_text = pipe(input_text, max_new_tokens=1000)
    return generated_text[0]['generated_text']