import gradio as gr import pickle from nltk.tokenize import word_tokenize import nltk nltk.download('all') with open('LogisticRegression_classifier.pickle','rb') as fp: LogisticRegression_classifier = pickle.load(fp) with open('word_features5k.pickle','rb') as fp: word_features = pickle.load(fp) def find_features(news): words = word_tokenize(news) features = {} for w in word_features: features[w] = (w in words) return features def fn(news): return LogisticRegression_classifier.classify(find_features(news)) iface = gr.Interface( fn = fn, inputs = 'text', outputs = 'text' ) url = iface.launch() # return url[]