mishig HF staff commited on
Commit
7705415
·
1 Parent(s): 51a1671

improve widget token placeholder

Browse files
src/lib/components/InferencePlayground/InferencePlaygroundCodeSnippets.svelte CHANGED
@@ -91,7 +91,7 @@
91
  label: "Streaming API",
92
  code: `import { HfInference } from "@huggingface/inference"
93
 
94
- const inference = new HfInference("your HF token")
95
 
96
  let out = "";
97
 
@@ -113,7 +113,7 @@ for await (const chunk of inference.chatCompletionStream({
113
  label: "Non-Streaming API",
114
  code: `import { HfInference } from '@huggingface/inference'
115
 
116
- const inference = new HfInference("your access token")
117
 
118
  const out = await inference.chatCompletion({
119
  model: "${conversation.model.id}",
@@ -154,7 +154,7 @@ console.log(out.choices[0].message);`,
154
  label: "Streaming API",
155
  code: `from huggingface_hub import InferenceClient
156
 
157
- client = InferenceClient(api_key="your HF token")
158
 
159
  messages = ${formattedMessages({ sep: ",\n\t", start: `[\n\t`, end: `\n]` })}
160
 
@@ -175,7 +175,7 @@ for chunk in output:
175
  code: `from huggingface_hub import InferenceClient
176
 
177
  model_id="${conversation.model.id}"
178
- client = InferenceClient(api_key="your HF token")
179
 
180
  messages = ${formattedMessages({ sep: ",\n\t", start: `[\n\t`, end: `\n]` })}
181
 
 
91
  label: "Streaming API",
92
  code: `import { HfInference } from "@huggingface/inference"
93
 
94
+ const inference = new HfInference("YOUR_HF_TOKEN")
95
 
96
  let out = "";
97
 
 
113
  label: "Non-Streaming API",
114
  code: `import { HfInference } from '@huggingface/inference'
115
 
116
+ const inference = new HfInference("YOUR_HF_TOKEN")
117
 
118
  const out = await inference.chatCompletion({
119
  model: "${conversation.model.id}",
 
154
  label: "Streaming API",
155
  code: `from huggingface_hub import InferenceClient
156
 
157
+ client = InferenceClient(api_key="YOUR_HF_TOKEN")
158
 
159
  messages = ${formattedMessages({ sep: ",\n\t", start: `[\n\t`, end: `\n]` })}
160
 
 
175
  code: `from huggingface_hub import InferenceClient
176
 
177
  model_id="${conversation.model.id}"
178
+ client = InferenceClient(api_key="YOUR_HF_TOKEN")
179
 
180
  messages = ${formattedMessages({ sep: ",\n\t", start: `[\n\t`, end: `\n]` })}
181