m7n commited on
Commit
90743f0
·
1 Parent(s): b3a9c9a

added explicit ask for token.

Browse files
Files changed (1) hide show
  1. app.py +24 -9
app.py CHANGED
@@ -1,4 +1,5 @@
1
  import spaces # necessary to run on Zero.
 
2
 
3
  import time
4
  print(f"Starting up: {time.strftime('%Y-%m-%d %H:%M:%S')}")
@@ -110,20 +111,22 @@ def no_op_decorator(func):
110
  # #duration=120
111
 
112
  # @decorator_to_use
113
- @spaces.GPU
114
  def create_embeddings(texts_to_embedd):
115
  """Create embeddings for the input texts using the loaded model."""
116
  return model.encode(texts_to_embedd, show_progress_bar=True, batch_size=192)
117
 
118
 
119
- @spaces.GPU
120
- def predict(text_input, sample_size_slider, reduce_sample_checkbox, sample_reduction_method,
121
- plot_time_checkbox, locally_approximate_publication_date_checkbox,
122
- download_csv_checkbox, download_png_checkbox,citation_graph_checkbox, progress=gr.Progress()):
 
123
  """
124
  Main prediction pipeline that processes OpenAlex queries and creates visualizations.
125
 
126
  Args:
 
127
  text_input (str): OpenAlex query URL
128
  sample_size_slider (int): Maximum number of samples to process
129
  reduce_sample_checkbox (bool): Whether to reduce sample size
@@ -135,6 +138,10 @@ def predict(text_input, sample_size_slider, reduce_sample_checkbox, sample_reduc
135
  Returns:
136
  tuple: (link to visualization, iframe HTML)
137
  """
 
 
 
 
138
  # Check if input is empty or whitespace
139
  print(f"Input: {text_input}")
140
  if not text_input or text_input.isspace():
@@ -629,10 +636,18 @@ with gr.Blocks(theme=theme, css="""
629
  queue=False
630
  ).then(
631
  fn=predict,
632
- inputs=[text_input, sample_size_slider, reduce_sample_checkbox,
633
- sample_reduction_method, plot_time_checkbox,
634
- locally_approximate_publication_date_checkbox,
635
- download_csv_checkbox, download_png_checkbox,citation_graph_checkbox],
 
 
 
 
 
 
 
 
636
  outputs=[html, html_download, csv_download, png_download, cancel_btn]
637
  )
638
 
 
1
  import spaces # necessary to run on Zero.
2
+ from spaces.zero.client import _get_token
3
 
4
  import time
5
  print(f"Starting up: {time.strftime('%Y-%m-%d %H:%M:%S')}")
 
111
  # #duration=120
112
 
113
  # @decorator_to_use
114
+ @spaces.GPU(duration=4*60)
115
  def create_embeddings(texts_to_embedd):
116
  """Create embeddings for the input texts using the loaded model."""
117
  return model.encode(texts_to_embedd, show_progress_bar=True, batch_size=192)
118
 
119
 
120
+ def predict(request: gr.Request, text_input, sample_size_slider, reduce_sample_checkbox,
121
+ sample_reduction_method, plot_time_checkbox,
122
+ locally_approximate_publication_date_checkbox,
123
+ download_csv_checkbox, download_png_checkbox, citation_graph_checkbox,
124
+ progress=gr.Progress()):
125
  """
126
  Main prediction pipeline that processes OpenAlex queries and creates visualizations.
127
 
128
  Args:
129
+ request (gr.Request): Gradio request object
130
  text_input (str): OpenAlex query URL
131
  sample_size_slider (int): Maximum number of samples to process
132
  reduce_sample_checkbox (bool): Whether to reduce sample size
 
138
  Returns:
139
  tuple: (link to visualization, iframe HTML)
140
  """
141
+ # Get the authentication token
142
+ token = _get_token(request)
143
+ print(f"Token: {token}")
144
+ print(f"Request: {request}")
145
  # Check if input is empty or whitespace
146
  print(f"Input: {text_input}")
147
  if not text_input or text_input.isspace():
 
636
  queue=False
637
  ).then(
638
  fn=predict,
639
+ inputs=[
640
+ gr.Request,
641
+ text_input,
642
+ sample_size_slider,
643
+ reduce_sample_checkbox,
644
+ sample_reduction_method,
645
+ plot_time_checkbox,
646
+ locally_approximate_publication_date_checkbox,
647
+ download_csv_checkbox,
648
+ download_png_checkbox,
649
+ citation_graph_checkbox
650
+ ],
651
  outputs=[html, html_download, csv_download, png_download, cancel_btn]
652
  )
653