import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as plt import numpy as np import gradio as gr def sales_projections(employee_data): sales_data = employee_data.iloc[:, 1:4].astype("int").to_numpy() regression_values = np.apply_along_axis( lambda row: np.array(np.poly1d(np.polyfit([0, 1, 2], row, 2))), 0, sales_data ) projected_months = np.repeat( np.expand_dims(np.arange(3, 12), 0), len(sales_data), axis=0 ) projected_values = np.array( [ month * month * regression[0] + month * regression[1] + regression[2] for month, regression in zip(projected_months, regression_values) ] ) plt.plot(projected_values.T) plt.legend(employee_data["Name"]) return employee_data, plt.gcf(), regression_values demo = gr.Blocks() with demo: with gr.Tabs(): with gr.TabItem("Greedy Search"): gr.Dropdown(["DistilGPT2", "GPT2", "OPT 1.3B", "GPTJ-6B", "T5 small", "T5 base", "T5 large", "T5 3B"]) with gr.TabItem("Sample"): gr.Button("New Tiger") with gr.TabItem("Beam Search"): gr.Button("New Tiger") with gr.TabItem("Benchmark Information"): gr.Dataframe( headers=["Parameter", "Value"], value=[ ["Transformers Version", "4.22.dev0"], ["TensorFlow Version", "2.9.1"], ["Pytorch Version", "1.11.0"], ["OS", "22.04 LTS (3090) / Debian 10 (other GPUs)"], ["CUDA", "11.6 (3090) / 11.3 (others GPUs)"], ["Is there code to reproduce?", "Yes -- https://gist.github.com/gante/f0017e3f13ac11b0c02e4e4db351f52f"], ], ) demo.launch()