m7n commited on
Commit
60c1b0a
·
1 Parent(s): fafb001

run without ssr on local

Browse files
Files changed (1) hide show
  1. app.py +23 -9
app.py CHANGED
@@ -1,5 +1,6 @@
1
- import spaces # necessary to run on Zero.
2
- from spaces.zero.client import _get_token
 
3
 
4
  import time
5
  print(f"Starting up: {time.strftime('%Y-%m-%d %H:%M:%S')}")
@@ -41,8 +42,12 @@ from sklearn.neighbors import NearestNeighbors
41
  def is_running_in_hf_space():
42
  return "SPACE_ID" in os.environ
43
 
 
 
 
 
44
  #if is_running_in_hf_space():
45
- import spaces # necessary to run on Zero.
46
  #print(f"Spaces version: {spaces.__version__}")
47
 
48
  import datamapplot
@@ -131,12 +136,18 @@ def no_op_decorator(func):
131
  # decorator_to_use = spaces.GPU() if is_running_in_hf_space() else no_op_decorator
132
  # #duration=120
133
 
134
- # @decorator_to_use
135
- @spaces.GPU(duration=4*60)
136
- def create_embeddings(texts_to_embedd):
137
- """Create embeddings for the input texts using the loaded model."""
138
- return model.encode(texts_to_embedd, show_progress_bar=True, batch_size=192)
139
 
 
 
 
 
 
 
 
 
 
 
 
140
 
141
  def predict(request: gr.Request, text_input, sample_size_slider, reduce_sample_checkbox,
142
  sample_reduction_method, plot_time_checkbox,
@@ -698,7 +709,10 @@ with gr.Blocks(theme=theme, css="""
698
  # demo.launch(server_name="0.0.0.0", server_port=7860, share=True,allowed_paths=["/static"])
699
 
700
  # Mount Gradio app to FastAPI
701
- app = gr.mount_gradio_app(app, demo, path="/",ssr_mode=True)
 
 
 
702
 
703
  # Run both servers
704
  if __name__ == "__main__":
 
1
+
2
+
3
+
4
 
5
  import time
6
  print(f"Starting up: {time.strftime('%Y-%m-%d %H:%M:%S')}")
 
42
  def is_running_in_hf_space():
43
  return "SPACE_ID" in os.environ
44
 
45
+ if is_running_in_hf_space():
46
+ import spaces # necessary to run on Zero.
47
+ from spaces.zero.client import _get_token
48
+
49
  #if is_running_in_hf_space():
50
+ #import spaces # necessary to run on Zero.
51
  #print(f"Spaces version: {spaces.__version__}")
52
 
53
  import datamapplot
 
136
  # decorator_to_use = spaces.GPU() if is_running_in_hf_space() else no_op_decorator
137
  # #duration=120
138
 
 
 
 
 
 
139
 
140
+ if is_running_in_hf_space():
141
+ @spaces.GPU(duration=4*60)
142
+ def create_embeddings(texts_to_embedd):
143
+ """Create embeddings for the input texts using the loaded model."""
144
+ return model.encode(texts_to_embedd, show_progress_bar=True, batch_size=192)
145
+ else:
146
+ def create_embeddings(texts_to_embedd):
147
+ """Create embeddings for the input texts using the loaded model."""
148
+ return model.encode(texts_to_embedd, show_progress_bar=True, batch_size=192)
149
+
150
+
151
 
152
  def predict(request: gr.Request, text_input, sample_size_slider, reduce_sample_checkbox,
153
  sample_reduction_method, plot_time_checkbox,
 
709
  # demo.launch(server_name="0.0.0.0", server_port=7860, share=True,allowed_paths=["/static"])
710
 
711
  # Mount Gradio app to FastAPI
712
+ if is_running_in_hf_space():
713
+ app = gr.mount_gradio_app(app, demo, path="/",ssr_mode=True)
714
+ else:
715
+ app = gr.mount_gradio_app(app, demo, path="/",ssr_mode=False)
716
 
717
  # Run both servers
718
  if __name__ == "__main__":