tanveeshsingh commited on
Commit
6a747af
·
1 Parent(s): b9f1d59
Files changed (1) hide show
  1. app.py +5 -2
app.py CHANGED
@@ -21,7 +21,8 @@ def llama_guard_classify(conv_prefix, response):
21
  api_key=LLAMA_API_KEY
22
  )
23
  conv = conv_prefix
24
- conv.append(response)
 
25
  output = client.chat.completions.create(
26
  model=model_name,
27
  messages=conv,
@@ -88,6 +89,8 @@ def process_inputs(category,conv_prefix, response_content):
88
  output = classify_prompt(category,conv_prefix, response)
89
  if category=='response':
90
  llama_output = llama_guard_classify(conv_prefix, response)
 
 
91
  else:
92
  llama_output = 'NA'
93
  return output,llama_output
@@ -208,7 +211,7 @@ with gr.Blocks(css=dark_css) as demo:
208
  # Two text outputs, placed side by side for model outputs
209
  with gr.Row():
210
  with gr.Column():
211
- collinear_output = gr.Textbox(label="Collinear Guard (~3B) Output", lines=3)
212
  with gr.Column():
213
  llama_output = gr.Textbox(label="LLaMA-Guard 3 (8B) Output", lines=3)
214
 
 
21
  api_key=LLAMA_API_KEY
22
  )
23
  conv = conv_prefix
24
+ if response:
25
+ conv.append(response)
26
  output = client.chat.completions.create(
27
  model=model_name,
28
  messages=conv,
 
89
  output = classify_prompt(category,conv_prefix, response)
90
  if category=='response':
91
  llama_output = llama_guard_classify(conv_prefix, response)
92
+ elif category=='prompt':
93
+ llama_output = llama_guard_classify(conv_prefix, None)
94
  else:
95
  llama_output = 'NA'
96
  return output,llama_output
 
211
  # Two text outputs, placed side by side for model outputs
212
  with gr.Row():
213
  with gr.Column():
214
+ collinear_output = gr.Textbox(label="Collinear Guard Nano(~3B) Output", lines=3)
215
  with gr.Column():
216
  llama_output = gr.Textbox(label="LLaMA-Guard 3 (8B) Output", lines=3)
217