sujalrajpoot commited on
Commit
475154a
·
verified ·
1 Parent(s): a69e4a6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -12
app.py CHANGED
@@ -18,18 +18,7 @@ SPECIAL_API_KEY = os.getenv("SPECIAL_API_KEY")
18
  ENDPOINT = os.getenv("ENDPOINT")
19
  SYSTEM_PROMPT = os.getenv("SYSTEM_PROMPT")
20
  EXCEPTION = os.getenv("EXCEPTION")
21
- AVAILABLE_MODELS = [
22
- "deepseek-r1",
23
- "gpt-4-turbo",
24
- "gpt-4",
25
- "gpt-3.5-turbo",
26
- "mistral-7b",
27
- "gemini-pro",
28
- "claude-3-sonnet",
29
- "claude-3-haiku",
30
- "llama-3-8b",
31
- "llama-3-70b"
32
- ]
33
 
34
  # Track API statistics
35
  api_usage = {} # Stores {api_key: {"count": X, "reset_time": timestamp}}
@@ -193,6 +182,9 @@ def chat():
193
  top_p = data.get("top_p", 1)
194
  max_tokens = data.get("max_tokens", 4000)
195
 
 
 
 
196
  api_payload = {
197
  "messages": [
198
  {'role': 'system', 'content': SYSTEM_PROMPT},
 
18
  ENDPOINT = os.getenv("ENDPOINT")
19
  SYSTEM_PROMPT = os.getenv("SYSTEM_PROMPT")
20
  EXCEPTION = os.getenv("EXCEPTION")
21
+ AVAILABLE_MODELS = ["gpt-3.5-turbo","gpt-3.5-turbo-202201","gpt-4o","gpt-4o-2024-05-13","o1-preview","chatgpt-4o-latest","claude-3-5-sonnet","claude-sonnet-3.5","claude-3-5-sonnet-20240620","deepseek-r1","deepseek-llm-67b-chat","llama-3.1-405b","llama-3.1-70b","llama-3.1-8b","meta-llama/Llama-3.2-90B-Vision-Instruct","meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo","meta-llama/Meta-Llama-3.1-8B-Instruct","meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo","Meta-Llama-3.1-405B-Instruct-Turbo","Meta-Llama-3.3-70B-Instruct-Turbo","mistral","mistral-large","mistralai/Mixtral-8x22B-Instruct-v0.1","Qwen/Qwen2.5-72B-Instruc","Qwen/Qwen2.5-Coder-32B-Instruct","Qwen-QwQ-32B-Preview","gemini-pro","gemini-1.5-pro","gemini-1.5-pro-latest","gemini-1.5-flash","blackboxai","blackboxai-pro","openchat/openchat-3.6-8b","dbrx-instruct","Nous-Hermes-2-Mixtral-8x7B-DPO"]
 
 
 
 
 
 
 
 
 
 
 
22
 
23
  # Track API statistics
24
  api_usage = {} # Stores {api_key: {"count": X, "reset_time": timestamp}}
 
182
  top_p = data.get("top_p", 1)
183
  max_tokens = data.get("max_tokens", 4000)
184
 
185
+ if model not in AVAILABLE_MODELS:
186
+ return jsonify({"error": f"Invalid Model. Available Models are: {', '.join(AVAILABLE_MODELS)}"}), 404
187
+
188
  api_payload = {
189
  "messages": [
190
  {'role': 'system', 'content': SYSTEM_PROMPT},