kamran-r123 commited on
Commit
f16f571
·
verified ·
1 Parent(s): 18e2b5b

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +1 -4
main.py CHANGED
@@ -6,7 +6,7 @@ import prompt_style
6
 
7
 
8
  model_id = "failspy/Meta-Llama-3-8B-Instruct-abliterated-v3"
9
- client = None
10
 
11
  class Item(BaseModel):
12
  prompt: str
@@ -45,9 +45,6 @@ def generate(item: Item):
45
  seed=item.seed,
46
  )
47
 
48
- if client is None:
49
- client = InferenceClient(token=item.token, model=model_id)
50
-
51
  formatted_prompt = format_prompt(item)
52
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
53
  output = ""
 
6
 
7
 
8
  model_id = "failspy/Meta-Llama-3-8B-Instruct-abliterated-v3"
9
+ client = InferenceClient(model=model_id)
10
 
11
  class Item(BaseModel):
12
  prompt: str
 
45
  seed=item.seed,
46
  )
47
 
 
 
 
48
  formatted_prompt = format_prompt(item)
49
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
50
  output = ""