Spaces:
Sleeping
Sleeping
app update
Browse files
app.py
CHANGED
@@ -73,7 +73,7 @@ class Item(BaseModel):
|
|
73 |
@app.post("/predict/", response_model=dict)
|
74 |
async def predict(item: Item):
|
75 |
model = TransformerEncoder()
|
76 |
-
model
|
77 |
tokenizer = Tokenizer.from_file("tokenizer.json")
|
78 |
|
79 |
predict_list = predict_fonk(model=model, device=device, example=item.text, tokenizer=tokenizer)
|
@@ -81,3 +81,7 @@ async def predict(item: Item):
|
|
81 |
#Buraya model'in çıktısı gelecek
|
82 |
#Çıktı formatı aşağıdaki örnek gibi olacak
|
83 |
return predict_list
|
|
|
|
|
|
|
|
|
|
73 |
@app.post("/predict/", response_model=dict)
|
74 |
async def predict(item: Item):
|
75 |
model = TransformerEncoder()
|
76 |
+
model = load_model_to_cpu(model, "model.pth")
|
77 |
tokenizer = Tokenizer.from_file("tokenizer.json")
|
78 |
|
79 |
predict_list = predict_fonk(model=model, device=device, example=item.text, tokenizer=tokenizer)
|
|
|
81 |
#Buraya model'in çıktısı gelecek
|
82 |
#Çıktı formatı aşağıdaki örnek gibi olacak
|
83 |
return predict_list
|
84 |
+
|
85 |
+
|
86 |
+
if __name__=="__main__":
|
87 |
+
uvicorn.run(app,host="0.0.0.0",port=8000)
|
model.py
CHANGED
@@ -145,6 +145,5 @@ class TransformerEncoder(nn.Module):
|
|
145 |
|
146 |
def load_model_to_cpu(model, path="model.pth"):
|
147 |
checkpoint = torch.load(path, map_location=torch.device('cpu'))
|
148 |
-
model.load_state_dict(checkpoint
|
149 |
-
|
150 |
-
return model, epoch
|
|
|
145 |
|
146 |
def load_model_to_cpu(model, path="model.pth"):
|
147 |
checkpoint = torch.load(path, map_location=torch.device('cpu'))
|
148 |
+
model.load_state_dict(checkpoint)
|
149 |
+
return model
|
|