Raju2024's picture
Upload 1072 files
e3278e4 verified
"use strict";(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[699],{92699:function(e,n,t){t.d(n,{Z:function(){return k}});var s=t(57437),l=t(2265),o=t(99376),r=t(19250),a=t(20831),i=t(12514),c=t(12485),d=t(18135),m=t(35242),p=t(29706),u=t(77991),h=t(84264),x=t(96761),_=t(23639),g=t(77565),j=t(75957),y=t(51369),b=t(17906),k=e=>{var n;let{accessToken:t,publicPage:k,premiumUser:f}=e,[Z,v]=(0,l.useState)(!1),[w,N]=(0,l.useState)(null),[S,M]=(0,l.useState)(!1),[A,I]=(0,l.useState)(!1),[O,P]=(0,l.useState)(null),C=(0,o.useRouter)();(0,l.useEffect)(()=>{t&&(async()=>{try{let e=await (0,r.kn)(t);console.log("ModelHubData:",e),N(e.data),(0,r.E9)(t,"enable_public_model_hub").then(e=>{console.log("data: ".concat(JSON.stringify(e))),!0==e.field_value&&v(!0)}).catch(e=>{})}catch(e){console.error("There was an error fetching the model data",e)}})()},[t,k]);let U=e=>{P(e),M(!0)},T=async()=>{t&&(0,r.jA)(t,"enable_public_model_hub",!0).then(e=>{I(!0)})},z=()=>{M(!1),I(!1),P(null)},H=()=>{M(!1),I(!1),P(null)},E=e=>{navigator.clipboard.writeText(e)};return(0,s.jsxs)("div",{children:[k&&Z||!1==k?(0,s.jsxs)("div",{className:"w-full m-2 mt-2 p-8",children:[(0,s.jsx)("div",{className:"relative w-full"}),(0,s.jsxs)("div",{className:"flex ".concat(k?"justify-between":"items-center"),children:[(0,s.jsx)(x.Z,{className:"ml-8 text-center ",children:"Model Hub"}),!1==k?f?(0,s.jsx)(a.Z,{className:"ml-4",onClick:()=>T(),children:"✨ Make Public"}):(0,s.jsx)(a.Z,{className:"ml-4",children:(0,s.jsx)("a",{href:"https://forms.gle/W3U4PZpJGFHWtHyA9",target:"_blank",children:"✨ Make Public"})}):(0,s.jsxs)("div",{className:"flex justify-between items-center",children:[(0,s.jsx)("p",{children:"Filter by key:"}),(0,s.jsx)(h.Z,{className:"bg-gray-200 pr-2 pl-2 pt-1 pb-1 text-center",children:"/ui/model_hub?key=<YOUR_KEY>"})]})]}),(0,s.jsx)("div",{className:"grid grid-cols-2 gap-6 sm:grid-cols-3 lg:grid-cols-4 pr-8",children:w&&w.map(e=>(0,s.jsxs)(i.Z,{className:"mt-5 mx-8",children:[(0,s.jsxs)("pre",{className:"flex justify-between",children:[(0,s.jsx)(x.Z,{children:e.model_group}),(0,s.jsx)(j.Z,{title:e.model_group,children:(0,s.jsx)(_.Z,{onClick:()=>E(e.model_group),style:{cursor:"pointer",marginRight:"10px"}})})]}),(0,s.jsxs)("div",{className:"my-5",children:[(0,s.jsxs)(h.Z,{children:["Max Input Tokens:"," ",(null==e?void 0:e.max_input_tokens)?null==e?void 0:e.max_input_tokens:"Unknown"]}),(0,s.jsxs)(h.Z,{children:["Max Output Tokens:"," ",(null==e?void 0:e.max_output_tokens)?null==e?void 0:e.max_output_tokens:"Unknown"]}),(0,s.jsxs)(h.Z,{children:["Input Cost Per 1M Tokens (USD):"," ",(null==e?void 0:e.input_cost_per_token)?"$".concat((1e6*e.input_cost_per_token).toFixed(2)):"Unknown"]}),(0,s.jsxs)(h.Z,{children:["Output Cost Per 1M Tokens (USD):"," ",(null==e?void 0:e.output_cost_per_token)?"$".concat((1e6*e.output_cost_per_token).toFixed(2)):"Unknown"]})]}),(0,s.jsx)("div",{style:{marginTop:"auto",textAlign:"right"},children:(0,s.jsxs)("a",{href:"#",onClick:()=>U(e),style:{color:"#1890ff",fontSize:"smaller"},children:["View more ",(0,s.jsx)(g.Z,{})]})})]},e.model_group))})]}):(0,s.jsxs)(i.Z,{className:"mx-auto max-w-xl mt-10",children:[(0,s.jsx)(h.Z,{className:"text-xl text-center mb-2 text-black",children:"Public Model Hub not enabled."}),(0,s.jsx)("p",{className:"text-base text-center text-slate-800",children:"Ask your proxy admin to enable this on their Admin UI."})]}),(0,s.jsx)(y.Z,{title:"Public Model Hub",width:600,visible:A,footer:null,onOk:z,onCancel:H,children:(0,s.jsxs)("div",{className:"pt-5 pb-5",children:[(0,s.jsxs)("div",{className:"flex justify-between mb-4",children:[(0,s.jsx)(h.Z,{className:"text-base mr-2",children:"Shareable Link:"}),(0,s.jsx)(h.Z,{className:"max-w-sm ml-2 bg-gray-200 pr-2 pl-2 pt-1 pb-1 text-center rounded",children:"<proxy_base_url>/ui/model_hub?key=<YOUR_API_KEY>"})]}),(0,s.jsx)("div",{className:"flex justify-end",children:(0,s.jsx)(a.Z,{onClick:()=>{C.replace("/model_hub?key=".concat(t))},children:"See Page"})})]})}),(0,s.jsx)(y.Z,{title:O&&O.model_group?O.model_group:"Unknown Model",width:800,visible:S,footer:null,onOk:z,onCancel:H,children:O&&(0,s.jsxs)("div",{children:[(0,s.jsx)("p",{className:"mb-4",children:(0,s.jsx)("strong",{children:"Model Information & Usage"})}),(0,s.jsxs)(d.Z,{children:[(0,s.jsxs)(m.Z,{children:[(0,s.jsx)(c.Z,{children:"Model Information"}),(0,s.jsx)(c.Z,{children:"OpenAI Python SDK"}),(0,s.jsx)(c.Z,{children:"Supported OpenAI Params"}),(0,s.jsx)(c.Z,{children:"LlamaIndex"}),(0,s.jsx)(c.Z,{children:"Langchain Py"})]}),(0,s.jsxs)(u.Z,{children:[(0,s.jsx)(p.Z,{children:(0,s.jsxs)(h.Z,{children:[(0,s.jsx)("strong",{children:"Model Group:"}),(0,s.jsx)("pre",{children:JSON.stringify(O,null,2)})]})}),(0,s.jsx)(p.Z,{children:(0,s.jsx)(b.Z,{language:"python",children:'\nimport openai\nclient = openai.OpenAI(\n api_key="your_api_key",\n base_url="http://0.0.0.0:4000" # LiteLLM Proxy is OpenAI compatible, Read More: https://docs.litellm.ai/docs/proxy/user_keys\n)\n\nresponse = client.chat.completions.create(\n model="'.concat(O.model_group,'", # model to send to the proxy\n messages = [\n {\n "role": "user",\n "content": "this is a test request, write a short poem"\n }\n ]\n)\n\nprint(response)\n ')})}),(0,s.jsx)(p.Z,{children:(0,s.jsx)(b.Z,{language:"python",children:"".concat(null===(n=O.supported_openai_params)||void 0===n?void 0:n.map(e=>"".concat(e,"\n")).join(""))})}),(0,s.jsx)(p.Z,{children:(0,s.jsx)(b.Z,{language:"python",children:'\nimport os, dotenv\n\nfrom llama_index.llms import AzureOpenAI\nfrom llama_index.embeddings import AzureOpenAIEmbedding\nfrom llama_index import VectorStoreIndex, SimpleDirectoryReader, ServiceContext\n\nllm = AzureOpenAI(\n engine="'.concat(O.model_group,'", # model_name on litellm proxy\n temperature=0.0,\n azure_endpoint="http://0.0.0.0:4000", # litellm proxy endpoint\n api_key="sk-1234", # litellm proxy API Key\n api_version="2023-07-01-preview",\n)\n\nembed_model = AzureOpenAIEmbedding(\n deployment_name="azure-embedding-model",\n azure_endpoint="http://0.0.0.0:4000",\n api_key="sk-1234",\n api_version="2023-07-01-preview",\n)\n\n\ndocuments = SimpleDirectoryReader("llama_index_data").load_data()\nservice_context = ServiceContext.from_defaults(llm=llm, embed_model=embed_model)\nindex = VectorStoreIndex.from_documents(documents, service_context=service_context)\n\nquery_engine = index.as_query_engine()\nresponse = query_engine.query("What did the author do growing up?")\nprint(response)\n\n ')})}),(0,s.jsx)(p.Z,{children:(0,s.jsx)(b.Z,{language:"python",children:'\nfrom langchain.chat_models import ChatOpenAI\nfrom langchain.prompts.chat import (\n ChatPromptTemplate,\n HumanMessagePromptTemplate,\n SystemMessagePromptTemplate,\n)\nfrom langchain.schema import HumanMessage, SystemMessage\n\nchat = ChatOpenAI(\n openai_api_base="http://0.0.0.0:4000",\n model = "'.concat(O.model_group,'",\n temperature=0.1\n)\n\nmessages = [\n SystemMessage(\n content="You are a helpful assistant that im using to make a test request to."\n ),\n HumanMessage(\n content="test from litellm. tell me why it\'s amazing in 1 sentence"\n ),\n]\nresponse = chat(messages)\n\nprint(response)\n\n ')})})]})]})]})})]})}}}]);