File size: 1,171 Bytes
bac242d
e28e6dd
a367787
b6af10b
bac242d
81e40a8
e28e6dd
eb9892c
 
e28e6dd
fc986a8
 
e28e6dd
 
 
fc986a8
eb9892c
fc986a8
 
 
 
113ca35
fc986a8
bac242d
e28e6dd
bac242d
fc986a8
b6af10b
 
113ca35
b6af10b
e28e6dd
113ca35
b6af10b
bac242d
e28e6dd
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from transformers import AutoTokenizer, AutoModelForSequenceClassification
import torch
import os

# === FastAPI 配置 ===
app = FastAPI()

# 解决 CSP 限制的关键配置
app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],
    allow_methods=["*"],
    allow_headers=["*"],
)

# === 模型加载 ===
os.environ["HF_HOME"] = "/app/.cache/huggingface"
model = AutoModelForSequenceClassification.from_pretrained("mrm8488/codebert-base-finetuned-detect-insecure-code")
tokenizer = AutoTokenizer.from_pretrained("mrm8488/codebert-base-finetuned-detect-insecure-code")

# === HTTP API 接口 ===
@app.post("/detect")
async def detect(code: str):
    try:
        inputs = tokenizer(code[:2000], return_tensors="pt", truncation=True, max_length=512)
        with torch.no_grad():
            outputs = model(**inputs)
        label_id = outputs.logits.argmax().item()
        return {
            "label": int(label_id),  # 严格返回 0/1
            "score": outputs.logits.softmax(dim=-1)[0][label_id].item()
        }
    except Exception as e:
        return {"error": str(e)}