Spaces:
Sleeping
Sleeping
Update cord_inference.py
Browse files- cord_inference.py +3 -3
cord_inference.py
CHANGED
@@ -9,9 +9,9 @@ labels = ["O", "B-MENU.NM", "B-MENU.NUM", "B-MENU.UNITPRICE", "B-MENU.CNT", "B-M
|
|
9 |
id2label = {v: k for v, k in enumerate(labels)}
|
10 |
label2id = {k: v for v, k in enumerate(labels)}
|
11 |
|
12 |
-
tokenizer = LayoutLMv3TokenizerFast.from_pretrained("
|
13 |
-
processor = LayoutLMv3Processor.from_pretrained("
|
14 |
-
model = LayoutLMv3ForTokenClassification.from_pretrained("
|
15 |
|
16 |
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
|
17 |
model.to(device)
|
|
|
9 |
id2label = {v: k for v, k in enumerate(labels)}
|
10 |
label2id = {k: v for v, k in enumerate(labels)}
|
11 |
|
12 |
+
tokenizer = LayoutLMv3TokenizerFast.from_pretrained("nielsr/layoutlmv3-finetuned-cord", apply_ocr=False)
|
13 |
+
processor = LayoutLMv3Processor.from_pretrained("nielsr/layoutlmv3-finetuned-cord", apply_ocr=False)
|
14 |
+
model = LayoutLMv3ForTokenClassification.from_pretrained("nielsr/layoutlmv3-finetuned-cord")
|
15 |
|
16 |
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
|
17 |
model.to(device)
|