Spaces:
Running
Running
Update pipeline.json
Browse files- pipeline.json +16 -17
pipeline.json
CHANGED
@@ -2,10 +2,9 @@
|
|
2 |
"api": {
|
3 |
"models": {
|
4 |
|
5 |
-
|
6 |
-
"Mistral-Nemo-Instruct-2407": {
|
7 |
"type": "Text Generation",
|
8 |
-
"api_url": "https://api-inference.huggingface.co/models/
|
9 |
"headers": {
|
10 |
"Content-Type": "application/json",
|
11 |
"x-use-cache": "false",
|
@@ -16,9 +15,9 @@
|
|
16 |
}
|
17 |
},
|
18 |
|
19 |
-
|
20 |
-
"type": "
|
21 |
-
"api_url": "https://api-inference.huggingface.co/models/
|
22 |
"headers": {
|
23 |
"Content-Type": "application/json",
|
24 |
"x-use-cache": "false",
|
@@ -29,9 +28,11 @@
|
|
29 |
}
|
30 |
},
|
31 |
|
32 |
-
|
|
|
|
|
33 |
"type": "Text Generation",
|
34 |
-
"api_url": "https://api-inference.huggingface.co/models/
|
35 |
"headers": {
|
36 |
"Content-Type": "application/json",
|
37 |
"x-use-cache": "false",
|
@@ -41,10 +42,10 @@
|
|
41 |
}
|
42 |
}
|
43 |
},
|
44 |
-
|
45 |
-
"
|
46 |
-
"type": "
|
47 |
-
"api_url": "https://api-inference.huggingface.co/models/
|
48 |
"headers": {
|
49 |
"Content-Type": "application/json",
|
50 |
"x-use-cache": "false",
|
@@ -55,10 +56,9 @@
|
|
55 |
}
|
56 |
},
|
57 |
|
58 |
-
|
59 |
-
"Meta-Llama-3-8B-Instruct": {
|
60 |
"type": "Text Generation",
|
61 |
-
"api_url": "https://api-inference.huggingface.co/models/
|
62 |
"headers": {
|
63 |
"Content-Type": "application/json",
|
64 |
"x-use-cache": "false",
|
@@ -67,8 +67,7 @@
|
|
67 |
"Authorization": "Bearer os.environ.get('auth')"
|
68 |
}
|
69 |
}
|
70 |
-
}
|
71 |
-
|
72 |
|
73 |
}
|
74 |
},
|
|
|
2 |
"api": {
|
3 |
"models": {
|
4 |
|
5 |
+
"Phi-3.5-mini-instruct": {
|
|
|
6 |
"type": "Text Generation",
|
7 |
+
"api_url": "https://api-inference.huggingface.co/models/microsoft/Phi-3.5-mini-instruct/v1/chat/completions",
|
8 |
"headers": {
|
9 |
"Content-Type": "application/json",
|
10 |
"x-use-cache": "false",
|
|
|
15 |
}
|
16 |
},
|
17 |
|
18 |
+
"Llama-3.2-11B-Vision-Instruct": {
|
19 |
+
"type": "image-text-to-text",
|
20 |
+
"api_url": "https://api-inference.huggingface.co/models/meta-llama/Llama-3.2-11B-Vision-Instruct/v1/chat/completions",
|
21 |
"headers": {
|
22 |
"Content-Type": "application/json",
|
23 |
"x-use-cache": "false",
|
|
|
28 |
}
|
29 |
},
|
30 |
|
31 |
+
|
32 |
+
|
33 |
+
"Meta-Llama-3-8B-Instruct": {
|
34 |
"type": "Text Generation",
|
35 |
+
"api_url": "https://api-inference.huggingface.co/models/meta-llama/Meta-Llama-3-8B-Instruct/v1/chat/completions",
|
36 |
"headers": {
|
37 |
"Content-Type": "application/json",
|
38 |
"x-use-cache": "false",
|
|
|
42 |
}
|
43 |
}
|
44 |
},
|
45 |
+
|
46 |
+
"Mistral-Nemo-Instruct-2407": {
|
47 |
+
"type": "Text Generation",
|
48 |
+
"api_url": "https://api-inference.huggingface.co/models/mistralai/Mistral-Nemo-Instruct-2407/v1/chat/completions",
|
49 |
"headers": {
|
50 |
"Content-Type": "application/json",
|
51 |
"x-use-cache": "false",
|
|
|
56 |
}
|
57 |
},
|
58 |
|
59 |
+
"SmallThinker-3B-Preview": {
|
|
|
60 |
"type": "Text Generation",
|
61 |
+
"api_url": "https://api-inference.huggingface.co/models/PowerInfer/SmallThinker-3B-Preview/v1/chat/completions",
|
62 |
"headers": {
|
63 |
"Content-Type": "application/json",
|
64 |
"x-use-cache": "false",
|
|
|
67 |
"Authorization": "Bearer os.environ.get('auth')"
|
68 |
}
|
69 |
}
|
70 |
+
},
|
|
|
71 |
|
72 |
}
|
73 |
},
|