Ƭ InferenceSnippetOptions: { accessToken?: string ; billTo?: string ; directRequest?: boolean ; endpointUrl?: string ; streaming?: boolean } & Record\<string, unknown>
inference/src/snippets/getInferenceSnippets.ts:17
▸ getInferenceSnippets(model, provider, inferenceProviderMapping?, opts?): InferenceSnippet[]
| Name | Type |
|---|---|
model | ModelDataMinimal |
provider | "black-forest-labs" | "cerebras" | "cohere" | "fal-ai" | "featherless-ai" | "fireworks-ai" | "groq" | "hf-inference" | "hyperbolic" | "nebius" | "novita" | "nscale" | "openai" | "ovhcloud" | "replicate" | "sambanova" | "together" | "auto" |
inferenceProviderMapping? | InferenceProviderMappingEntry |
opts? | Record\<string, unknown> |
InferenceSnippet[]
inference/src/snippets/getInferenceSnippets.ts:370
< > Update on GitHub