Spaces:
Running
Running
Remove default HF access token
Browse files- app.py +1 -2
- global_config.py +0 -2
- helpers/llm_helper.py +2 -8
app.py
CHANGED
@@ -184,8 +184,7 @@ with st.sidebar:
|
|
184 |
api_key_token = st.text_input(
|
185 |
label=(
|
186 |
'3: Paste your API key/access token:\n\n'
|
187 |
-
'*Mandatory* for
|
188 |
-
' *Optional* for HF Mistral LLMs but still encouraged.\n\n'
|
189 |
),
|
190 |
type='password',
|
191 |
key='api_key_input'
|
|
|
184 |
api_key_token = st.text_input(
|
185 |
label=(
|
186 |
'3: Paste your API key/access token:\n\n'
|
187 |
+
'*Mandatory* for all providers.'
|
|
|
188 |
),
|
189 |
type='password',
|
190 |
key='api_key_input'
|
global_config.py
CHANGED
@@ -87,8 +87,6 @@ class GlobalConfig:
|
|
87 |
LLM_MODEL_MIN_OUTPUT_LENGTH = 100
|
88 |
LLM_MODEL_MAX_INPUT_LENGTH = 400 # characters
|
89 |
|
90 |
-
HUGGINGFACEHUB_API_TOKEN = os.environ.get('HUGGINGFACEHUB_API_TOKEN', '')
|
91 |
-
|
92 |
LOG_LEVEL = 'DEBUG'
|
93 |
COUNT_TOKENS = False
|
94 |
APP_STRINGS_FILE = 'strings.json'
|
|
|
87 |
LLM_MODEL_MIN_OUTPUT_LENGTH = 100
|
88 |
LLM_MODEL_MAX_INPUT_LENGTH = 400 # characters
|
89 |
|
|
|
|
|
90 |
LOG_LEVEL = 'DEBUG'
|
91 |
COUNT_TOKENS = False
|
92 |
APP_STRINGS_FILE = 'strings.json'
|
helpers/llm_helper.py
CHANGED
@@ -22,7 +22,6 @@ LLM_PROVIDER_MODEL_REGEX = re.compile(r'\[(.*?)\](.*)')
|
|
22 |
OLLAMA_MODEL_REGEX = re.compile(r'[a-zA-Z0-9._:-]+$')
|
23 |
# 94 characters long, only containing alphanumeric characters, hyphens, and underscores
|
24 |
API_KEY_REGEX = re.compile(r'^[a-zA-Z0-9_-]{6,94}$')
|
25 |
-
HF_API_HEADERS = {'Authorization': f'Bearer {GlobalConfig.HUGGINGFACEHUB_API_TOKEN}'}
|
26 |
REQUEST_TIMEOUT = 35
|
27 |
|
28 |
|
@@ -95,12 +94,7 @@ def is_valid_llm_provider_model(
|
|
95 |
if not provider or not model or provider not in GlobalConfig.VALID_PROVIDERS:
|
96 |
return False
|
97 |
|
98 |
-
if
|
99 |
-
GlobalConfig.PROVIDER_GOOGLE_GEMINI,
|
100 |
-
GlobalConfig.PROVIDER_COHERE,
|
101 |
-
GlobalConfig.PROVIDER_TOGETHER_AI,
|
102 |
-
GlobalConfig.PROVIDER_AZURE_OPENAI,
|
103 |
-
] and not api_key:
|
104 |
return False
|
105 |
|
106 |
if api_key and API_KEY_REGEX.match(api_key) is None:
|
@@ -150,7 +144,7 @@ def get_langchain_llm(
|
|
150 |
temperature=GlobalConfig.LLM_MODEL_TEMPERATURE,
|
151 |
repetition_penalty=1.03,
|
152 |
streaming=True,
|
153 |
-
huggingfacehub_api_token=api_key
|
154 |
return_full_text=False,
|
155 |
stop_sequences=['</s>'],
|
156 |
)
|
|
|
22 |
OLLAMA_MODEL_REGEX = re.compile(r'[a-zA-Z0-9._:-]+$')
|
23 |
# 94 characters long, only containing alphanumeric characters, hyphens, and underscores
|
24 |
API_KEY_REGEX = re.compile(r'^[a-zA-Z0-9_-]{6,94}$')
|
|
|
25 |
REQUEST_TIMEOUT = 35
|
26 |
|
27 |
|
|
|
94 |
if not provider or not model or provider not in GlobalConfig.VALID_PROVIDERS:
|
95 |
return False
|
96 |
|
97 |
+
if not api_key:
|
|
|
|
|
|
|
|
|
|
|
98 |
return False
|
99 |
|
100 |
if api_key and API_KEY_REGEX.match(api_key) is None:
|
|
|
144 |
temperature=GlobalConfig.LLM_MODEL_TEMPERATURE,
|
145 |
repetition_penalty=1.03,
|
146 |
streaming=True,
|
147 |
+
huggingfacehub_api_token=api_key,
|
148 |
return_full_text=False,
|
149 |
stop_sequences=['</s>'],
|
150 |
)
|