File size: 1,734 Bytes
60216ec b924465 7956c78 5853d12 9662103 5853d12 8744c54 b924465 2fc66df 7956c78 60216ec 7956c78 4038e22 b924465 7956c78 60216ec d30fa0a 88afc1d fd28154 cefaaf5 d36fc40 fd28154 7956c78 b924465 7956c78 b924465 7956c78 5853d12 7956c78 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 |
import { env } from "$env/dynamic/private";
import type { Model, ModelWithTokenizer } from "$lib/types";
import type { PageServerLoad } from "./$types";
let cache: { models: ModelWithTokenizer[] } | undefined;
export const load: PageServerLoad = async ({ fetch }) => {
if (cache) {
console.log("Skipping load, using in memory cache");
return cache;
}
const apiUrl =
"https://huggingface.co/api/models?pipeline_tag=text-generation&filter=conversational&inference_provider=all&limit=100&expand[]=inferenceProviderMapping&expand[]=config&expand[]=library_name&expand[]=pipeline_tag&expand[]=tags&expand[]=mask_token&expand[]=trendingScore";
const HF_TOKEN = env.HF_TOKEN;
const res = await fetch(apiUrl, {
headers: {
Authorization: `Bearer ${HF_TOKEN}`,
},
});
if (!res.ok) {
console.error(`Error fetching warm models`, res.status, res.statusText);
return { models: [] };
}
const compatibleModels: Model[] = await res.json();
compatibleModels.sort((a, b) => a.id.toLowerCase().localeCompare(b.id.toLowerCase()));
const promises = compatibleModels.map(async model => {
const configUrl = `https://huggingface.co/${model.id}/raw/main/tokenizer_config.json`;
const res = await fetch(configUrl, {
headers: {
Authorization: `Bearer ${HF_TOKEN}`,
},
});
if (!res.ok) {
console.error(`Error fetching tokenizer file for ${model.id}`, res.status, res.statusText);
return null; // Ignore failed requests by returning null
}
const tokenizerConfig = await res.json();
return { ...model, tokenizerConfig } satisfies ModelWithTokenizer;
});
const models: ModelWithTokenizer[] = (await Promise.all(promises)).filter(model => model !== null);
cache = { models };
return { models };
};
|