MadsGalsgaard commited on
Commit
0661764
·
verified ·
1 Parent(s): 786bf8b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -2
app.py CHANGED
@@ -296,11 +296,11 @@ model_name = "meta-llama/Meta-Llama-3.1-8B-Instruct"
296
  # Initialize Inference Client with model and token
297
  inference_client = InferenceClient()
298
 
299
- def chat_completion(prompt):
300
  # Pass user input through Hugging Face model
301
  response = inference_client.chat(
302
  model=model_name,
303
- messages=[{"role": "user", "content": prompt}],
304
  max_tokens=500,
305
  stream=False
306
  )
@@ -308,6 +308,7 @@ def chat_completion(prompt):
308
  # Extract content from the response
309
  response_text = response['choices'][0]['delta']['content']
310
 
 
311
  return response_text
312
 
313
  # Create Gradio chat interface
 
296
  # Initialize Inference Client with model and token
297
  inference_client = InferenceClient()
298
 
299
+ def chat_completion(message, history):
300
  # Pass user input through Hugging Face model
301
  response = inference_client.chat(
302
  model=model_name,
303
+ messages=[{"role": "user", "content": message}],
304
  max_tokens=500,
305
  stream=False
306
  )
 
308
  # Extract content from the response
309
  response_text = response['choices'][0]['delta']['content']
310
 
311
+ # Return response and updated history
312
  return response_text
313
 
314
  # Create Gradio chat interface