mgbam commited on
Commit
bbec1c1
·
verified ·
1 Parent(s): ad21232

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -5
app.py CHANGED
@@ -1,4 +1,4 @@
1
- # app.py
2
 
3
  from typing import Optional, Dict, List, Tuple
4
  import gradio as gr
@@ -32,7 +32,8 @@ def generation_code(
32
  _current_model_name: str,
33
  enable_search: bool,
34
  language: str,
35
- provider: str
 
36
  ) -> Tuple[str, History, str, List[Dict[str, str]]]:
37
  # Initialize inputs
38
  if query is None:
@@ -60,7 +61,8 @@ def generation_code(
60
  messages.append({'role': 'user', 'content': final_query})
61
 
62
  # Model inference
63
- client = get_inference_client(_current_model_name, provider)
 
64
  resp = client.chat.completions.create(
65
  model=_current_model_name,
66
  messages=messages,
@@ -85,6 +87,7 @@ def generation_code(
85
  new_history = _history + [(query, code_str)]
86
  chat_msgs = history_to_chatbot_messages(new_history)
87
 
 
88
  return code_str, new_history, preview_html, chat_msgs
89
 
90
  # Build UI
@@ -130,7 +133,8 @@ with gr.Blocks(theme=gr.themes.Base(), title="AnyCoder - AI Code Generator") as
130
  with gr.Tab("Preview"):
131
  preview_out = gr.HTML(label="Live Preview")
132
  with gr.Tab("History"):
133
- chat_out = gr.Chatbot(label="History")
 
134
 
135
  # Events
136
  load_btn.click(
@@ -151,6 +155,8 @@ with gr.Blocks(theme=gr.themes.Base(), title="AnyCoder - AI Code Generator") as
151
  outputs=[model_state]
152
  )
153
 
 
 
154
  gen_btn.click(
155
  fn=generation_code,
156
  inputs=[
@@ -167,4 +173,5 @@ with gr.Blocks(theme=gr.themes.Base(), title="AnyCoder - AI Code Generator") as
167
  )
168
 
169
  if __name__ == '__main__':
170
- demo.queue().launch()
 
 
1
+ # app.py (REVISED)
2
 
3
  from typing import Optional, Dict, List, Tuple
4
  import gradio as gr
 
32
  _current_model_name: str,
33
  enable_search: bool,
34
  language: str,
35
+ provider: str,
36
+ hf_token: str # <-- CHANGE 1: Accept the user's HF token from Gradio
37
  ) -> Tuple[str, History, str, List[Dict[str, str]]]:
38
  # Initialize inputs
39
  if query is None:
 
61
  messages.append({'role': 'user', 'content': final_query})
62
 
63
  # Model inference
64
+ # <-- CHANGE 2: Pass the user's token to the client constructor
65
+ client = get_inference_client(_current_model_name, provider, user_token=hf_token)
66
  resp = client.chat.completions.create(
67
  model=_current_model_name,
68
  messages=messages,
 
87
  new_history = _history + [(query, code_str)]
88
  chat_msgs = history_to_chatbot_messages(new_history)
89
 
90
+ # The return values are now correct for the updated UI components
91
  return code_str, new_history, preview_html, chat_msgs
92
 
93
  # Build UI
 
133
  with gr.Tab("Preview"):
134
  preview_out = gr.HTML(label="Live Preview")
135
  with gr.Tab("History"):
136
+ # <-- CHANGE 3: Fix the Gradio error by specifying the modern 'messages' format.
137
+ chat_out = gr.Chatbot(label="History", type="messages")
138
 
139
  # Events
140
  load_btn.click(
 
155
  outputs=[model_state]
156
  )
157
 
158
+ # Note: Gradio automatically passes the user's token to any function that
159
+ # has an 'hf_token' parameter. You do NOT need to add it to the 'inputs' list.
160
  gen_btn.click(
161
  fn=generation_code,
162
  inputs=[
 
173
  )
174
 
175
  if __name__ == '__main__':
176
+ # <-- CHANGE 4: Launch with hf_token='must' to require login and get user tokens.
177
+ demo.queue().launch(hf_token="must")