File size: 1,588 Bytes
e3278e4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
import { Hono } from 'hono'
import { Context } from 'hono';
import { bearerAuth } from 'hono/bearer-auth'
import OpenAI from "openai";

const openai = new OpenAI({
  apiKey: "sk-1234",
  baseURL: "https://openai-endpoint.ishaanjaffer0324.workers.dev"
});

async function call_proxy() {
  const completion = await openai.chat.completions.create({
    messages: [{ role: "system", content: "You are a helpful assistant." }],
    model: "gpt-3.5-turbo",
  });

  return completion
}

const app = new Hono()

// Middleware for API Key Authentication
const apiKeyAuth = async (c: Context, next: Function) => {
  const apiKey = c.req.header('Authorization');
  if (!apiKey || apiKey !== 'Bearer sk-1234') {
    return c.text('Unauthorized', 401);
  }
  await next();
};


app.use('/*', apiKeyAuth)


app.get('/', (c) => {
  return c.text('Hello Hono!')
})




// Handler for chat completions
const chatCompletionHandler = async (c: Context) => {
  // Assuming your logic for handling chat completion goes here
  // For demonstration, just returning a simple JSON response
  const response = await call_proxy()
  return c.json(response);
};

// Register the above handler for different POST routes with the apiKeyAuth middleware
app.post('/v1/chat/completions', chatCompletionHandler);
app.post('/chat/completions', chatCompletionHandler);

// Example showing how you might handle dynamic segments within the URL
// Here, using ':model*' to capture the rest of the path as a parameter 'model'
app.post('/openai/deployments/:model*/chat/completions', chatCompletionHandler);


export default app