himanishprak23 commited on
Commit
fb61f5b
·
verified ·
1 Parent(s): 9117c21

updated the correct path

Browse files
Files changed (1) hide show
  1. app.py +13 -5
app.py CHANGED
@@ -3,6 +3,7 @@ import pandas as pd
3
  from tensorflow.keras.preprocessing.text import Tokenizer
4
  from tensorflow.keras.preprocessing.sequence import pad_sequences
5
  from tensorflow.keras.models import load_model
 
6
  from huggingface_hub import hf_hub_download
7
  import numpy as np
8
  import tensorflow as tf
@@ -16,7 +17,15 @@ rnn_filename = "model_rnn_1.keras"
16
  lstm_model_path = hf_hub_download(repo_id=repo_id, filename=lstm_filename)
17
  rnn_model_path = hf_hub_download(repo_id=repo_id, filename=rnn_filename)
18
 
19
- data_text_path = "/Users/himanishprakash/NLP-Application/code/data_preprocess/df_commentary.csv"
 
 
 
 
 
 
 
 
20
 
21
  # Load the LSTM model
22
  lstm_model = load_model(lstm_model_path)
@@ -24,8 +33,7 @@ lstm_model = load_model(lstm_model_path)
24
  # Load the RNN model
25
  rnn_model = load_model(rnn_model_path)
26
 
27
- # Load the data for tokenizer fitting
28
- data_text = pd.read_csv(data_text_path)
29
 
30
  # Check the embedding layer's input dimension for LSTM
31
  embedding_layer = lstm_model.layers[0]
@@ -162,7 +170,7 @@ def generate_with_all_models(commentary_text, num_words, max_length, temperature
162
  return lstm_output, rnn_output, gpt2_finetuned_output, gpt2_base_output
163
 
164
  # Create the Gradio interface
165
- commentrymodel = gr.Interface(
166
  fn=generate_with_all_models,
167
  inputs=[
168
  gr.Textbox(lines=2, placeholder="Enter commentary text here...", label="Prompt"),
@@ -182,4 +190,4 @@ commentrymodel = gr.Interface(
182
 
183
  # Launch the app
184
  if __name__ == "__main__":
185
- commentrymodel.launch()
 
3
  from tensorflow.keras.preprocessing.text import Tokenizer
4
  from tensorflow.keras.preprocessing.sequence import pad_sequences
5
  from tensorflow.keras.models import load_model
6
+ from datasets import load_dataset
7
  from huggingface_hub import hf_hub_download
8
  import numpy as np
9
  import tensorflow as tf
 
17
  lstm_model_path = hf_hub_download(repo_id=repo_id, filename=lstm_filename)
18
  rnn_model_path = hf_hub_download(repo_id=repo_id, filename=rnn_filename)
19
 
20
+ # Specify the repository and the CSV file name
21
+ # Specify the repository and the CSV file name
22
+ repo_path = "himanishprak23/commentry_Data"
23
+ file_name = "df_commentary_new.csv"
24
+
25
+ # Load the dataset
26
+ dataset = load_dataset(repo_path, data_files=file_name, split='train')
27
+ data_text = dataset.to_pandas()
28
+
29
 
30
  # Load the LSTM model
31
  lstm_model = load_model(lstm_model_path)
 
33
  # Load the RNN model
34
  rnn_model = load_model(rnn_model_path)
35
 
36
+
 
37
 
38
  # Check the embedding layer's input dimension for LSTM
39
  embedding_layer = lstm_model.layers[0]
 
170
  return lstm_output, rnn_output, gpt2_finetuned_output, gpt2_base_output
171
 
172
  # Create the Gradio interface
173
+ iface = gr.Interface(
174
  fn=generate_with_all_models,
175
  inputs=[
176
  gr.Textbox(lines=2, placeholder="Enter commentary text here...", label="Prompt"),
 
190
 
191
  # Launch the app
192
  if __name__ == "__main__":
193
+ iface.launch()