Added ONNX inference Code
Browse files
README.md
CHANGED
|
@@ -80,6 +80,48 @@ loaded_model = AutoModelForSequenceClassification.from_pretrained(model_name)
|
|
| 80 |
|
| 81 |
|
| 82 |
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 83 |
text = """
|
| 84 |
PROGRAM Triangle
|
| 85 |
IMPLICIT NONE
|
|
|
|
| 80 |
|
| 81 |
|
| 82 |
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
|
| 83 |
+
text = """
|
| 84 |
+
PROGRAM Triangle
|
| 85 |
+
IMPLICIT NONE
|
| 86 |
+
REAL :: a, b, c, Area
|
| 87 |
+
PRINT *, 'Welcome, please enter the&
|
| 88 |
+
&lengths of the 3 sides.'
|
| 89 |
+
READ *, a, b, c
|
| 90 |
+
PRINT *, 'Triangle''s area: ', Area(a,b,c)
|
| 91 |
+
END PROGRAM Triangle
|
| 92 |
+
FUNCTION Area(x,y,z)
|
| 93 |
+
IMPLICIT NONE
|
| 94 |
+
REAL :: Area ! function type
|
| 95 |
+
REAL, INTENT( IN ) :: x, y, z
|
| 96 |
+
REAL :: theta, height
|
| 97 |
+
theta = ACOS((x**2+y**2-z**2)/(2.0*x*y))
|
| 98 |
+
height = x*SIN(theta); Area = 0.5*y*height
|
| 99 |
+
END FUNCTION Area
|
| 100 |
+
|
| 101 |
+
"""
|
| 102 |
+
inputs = loaded_tokenizer(text, return_tensors="pt",truncation=True)
|
| 103 |
+
with torch.no_grad():
|
| 104 |
+
logits = loaded_model(**inputs).logits
|
| 105 |
+
predicted_class_id = logits.argmax().item()
|
| 106 |
+
loaded_model.config.id2label[predicted_class_id]
|
| 107 |
+
```
|
| 108 |
+
|
| 109 |
+
Optimum with ONNX
|
| 110 |
+
|
| 111 |
+
Loading the model requires the 🤗 Optimum library installed.
|
| 112 |
+
```shell
|
| 113 |
+
pip install transformers optimum[onnxruntime] optimum
|
| 114 |
+
```
|
| 115 |
+
|
| 116 |
+
```python
|
| 117 |
+
model_path = "philomath-1209/programming-language-identification"
|
| 118 |
+
|
| 119 |
+
from transformers import pipeline, AutoTokenizer
|
| 120 |
+
from optimum.onnxruntime import ORTModelForSequenceClassification
|
| 121 |
+
|
| 122 |
+
tokenizer = AutoTokenizer.from_pretrained(model_path)
|
| 123 |
+
model = ORTModelForSequenceClassification.from_pretrained(model_path, export=True)
|
| 124 |
+
|
| 125 |
text = """
|
| 126 |
PROGRAM Triangle
|
| 127 |
IMPLICIT NONE
|