Ƭ InferenceSnippetOptions: { billTo?: string ; streaming?: boolean } & Record\<string, unknown>
inference/src/snippets/getInferenceSnippets.ts:17
▸ getInferenceSnippets(model, accessToken, provider, inferenceProviderMapping?, opts?): InferenceSnippet[]
| Name | Type |
|---|---|
model | ModelDataMinimal |
accessToken | string |
provider | "black-forest-labs" | "cerebras" | "cohere" | "fal-ai" | "fireworks-ai" | "groq" | "hf-inference" | "hyperbolic" | "nebius" | "novita" | "nscale" | "openai" | "replicate" | "sambanova" | "together" |
inferenceProviderMapping? | InferenceProviderModelMapping |
opts? | Record\<string, unknown> |
InferenceSnippet[]
inference/src/snippets/getInferenceSnippets.ts:322
< > Update on GitHub