msy127 commited on
Commit
eae089b
·
1 Parent(s): 53f7e0c

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -1
app.py CHANGED
@@ -5,6 +5,7 @@ from dotenv import load_dotenv
5
  load_dotenv()
6
  import os
7
  hf_token = os.getenv("HUGGINGFACEHUB_API_TOKEN")
 
8
 
9
 
10
  # [선택1] 거대모델 랭체인 Custom LLM (HF InferenceClient) - 70B가 무료!!!, openai보다 성능 안떨어짐 (스트리밍은 아직 안됨)
@@ -32,7 +33,7 @@ class CustomInferenceClient(LLM, KwArgsModel):
32
 
33
  def __init__(self, model_name: str, hf_token: str, kwargs: Optional[Dict[str, Any]] = None):
34
  # inference_client = InferenceClient(model=model_name, token=hf_token)
35
- inference_client = InferenceClient(model=model_name, token=os.getenv("HUGGINGFACEHUB_API_TOKEN"))
36
  super().__init__(
37
  model_name=model_name,
38
  hf_token=hf_token,
 
5
  load_dotenv()
6
  import os
7
  hf_token = os.getenv("HUGGINGFACEHUB_API_TOKEN")
8
+ # os.environ['HUGGINGFACE_TOKEN'] = os.getenv("HUGGINGFACEHUB_API_TOKEN")
9
 
10
 
11
  # [선택1] 거대모델 랭체인 Custom LLM (HF InferenceClient) - 70B가 무료!!!, openai보다 성능 안떨어짐 (스트리밍은 아직 안됨)
 
33
 
34
  def __init__(self, model_name: str, hf_token: str, kwargs: Optional[Dict[str, Any]] = None):
35
  # inference_client = InferenceClient(model=model_name, token=hf_token)
36
+ inference_client = InferenceClient(model=model_name, token=hf_token)
37
  super().__init__(
38
  model_name=model_name,
39
  hf_token=hf_token,