import{_ as e}from"./index-D8Tp3Qg8.js";import{U as a,W as t,Y as n,F as o,_ as s,f as l,o as i,j as d,V as c,$ as r,a0 as m,a1 as p,Z as g}from"./vue-vendor-BhBNFBI8.js";import"./antd-vendor-BaRhxx_v.js";const h={class:"section model-family-section",ref:"sectionRef"},u={class:"model-cards-row"},v=["data-index"],f={class:"model-card-content"},k={style:{"text-align":"center"}},S={class:"model-btns"},b={class:"model-btn-links"},B={style:{cursor:"pointer"},class:"tag-icon"},O=["innerHTML"];const A=e({name:"ModelFamilySection",setup(){const e=[{parameters:"52B",context:"32K"},{parameters:"7B",context:"8K"}],a=["We provide a 100B-token subset randomly sampled from the CCI4.0-M2-v1 dataset, along with the open-source baseline model OpenSeek-Small-Baseline-v1. Both the model and dataset serve as the baseline.","OpenSeek-Small-v1 is the first-stage production model of the OpenSeek project. It adopts the sparse expert architecture(MoE) with 1.4B total parameters and 0.4B activated parameters, trained on 720B tokens. Compared to models of similar size, it achieves strong performance with less training data, demonstrating the efficiency of the dataset."],t=l([]),n=l(null);let o=null,s=!1;return i((()=>{t.value=new Array(e.length).fill(null),o=new IntersectionObserver((e=>{e.forEach((e=>{if(e.isIntersecting&&!s){s=!0,console.log("Section visible, starting animations");const a=document.querySelectorAll(".model-card");console.log("Found cards:",a.length),a.forEach(((e,a)=>{console.log(`Animating card ${a}`),setTimeout((()=>{e&&(e.classList.add("card-visible"),e.classList.remove("card-hidden"))}),400*a)})),o.unobserve(e.target)}}))}),{threshold:.1,rootMargin:"0px 0px -50px 0px"}),n.value&&(console.log("Observing section"),o.observe(n.value))})),d((()=>{o&&o.disconnect()})),{models:e,modelCards:t,sectionRef:n,modelNames:["OpenSeek-Small-v1-Baseline","OpenSeek-Small-v1"],modelDescriptions:a,modelLinks:[{model:"https://huggingface.co/BAAI/OpenSeek-Small-v1-Baseline",data:"https://huggingface.co/datasets/BAAI/OpenSeek-Pretrain-100B",code:"https://github.com/FlagAI-Open/OpenSeek/tree/main/openseek/baseline",wandb:"https://wandb.ai/openseek-team/OpenSeek-Small-v1-Baseline",evaluation:"https://huggingface.co/BAAI/OpenSeek-Small-v1-Baseline#evalation"},{model:"https://huggingface.co/BAAI/OpenSeek-Small-v1",data:"https://huggingface.co/collections/BAAI/cci40-68199d90bbc798680df16d7c",code:"https://github.com/FlagAI-Open/OpenSeek/tree/main/configs/OpenSeek-Small-v1",wandb:"https://wandb.ai/openseek-team/OpenSeek-Small-v1",evaluation:"https://huggingface.co/BAAI/OpenSeek-Small-v1#benchmark-performance"}],getTagColor:e=>({model:"processing",data:"magenta",code:"geekblue",wandb:"purple",evaluation:"cyan"}[e]||"default"),getTagIcon:e=>({model:"π€",data:"π",code:"π»",wandb:"π",evaluation:"π"}[e]||""),formatLinkText:e=>e.charAt(0).toUpperCase()+e.slice(1),openLink:e=>{e&&"#"!==e&&window.open(e,"_blank")}}}},[["render",function(e,l,i,d,A,w){const x=c("a-tag");return t(),a("section",h,[l[1]||(l[1]=n("h2",{class:"section-title"},"Model Family",-1)),n("div",u,[(t(!0),a(o,null,s(d.models,((e,i)=>(t(),a("div",{key:i,class:r(["model-card","card-hidden"]),ref_for:!0,ref:e=>{e&&(d.modelCards[i]=e)},"data-index":i},[l[0]||(l[0]=n("div",{class:"model-card-bg"},[n("div",{class:"model-card-glow"}),n("div",{class:"model-card-grid"})],-1)),n("div",f,[n("h3",k,m(d.modelNames[i]),1),n("div",S,[n("div",b,[(t(!0),a(o,null,s(d.modelLinks[i],((e,a)=>(t(),p(x,{key:a,color:d.getTagColor(a),class:"model-link-tag","data-key":a,bordered:"false",onClick:a=>d.openLink(e)},{default:g((()=>[n("span",B,m(d.getTagIcon(a)),1),n("span",null,m(d.formatLinkText(a)),1)])),_:2},1032,["color","data-key","onClick"])))),128))])]),n("p",{class:"model-desc unified-card-text",innerHTML:d.modelDescriptions[i]},null,8,O)])],8,v)))),128))])],512)}],["__scopeId","data-v-fbc74da6"]]);export{A as default}; | |