File size: 1,395 Bytes
a446b0b
d09bd16
a446b0b
 
d09bd16
 
 
 
 
 
40cd566
 
 
 
 
efe5117
a446b0b
 
 
 
 
 
0935f1b
a446b0b
 
 
 
 
 
 
 
 
 
 
 
 
df89157
0935f1b
a446b0b
 
 
 
133dc65
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
from fairseq.models.roberta import RobertaModel
import nltk
import sys

# conditionally load in nltk.punkt
try:
    nltk.data.find("tokenizers/punkt")
except LookupError:
    nltk.download("punkt")

try:
    nltk.data.find("sentiment/vader_lexicon.zip")
except LookupError:
    nltk.download("vader_lexicon")

PATH_TO_COSMIC = "./Model/COSMIC"
EXTRACTORS_PATH = PATH_TO_COSMIC + "/feature_extraction"
EPIK_MODEL_DIR = PATH_TO_COSMIC + "/erc_training"

sys.path.append(PATH_TO_COSMIC)
sys.path.append(EXTRACTORS_PATH)
sys.path.append(EPIK_MODEL_DIR)
sys.path.append(".")
from Model.COSMIC.feature_extraction.comet.csk_feature_extract import (
    CSKFeatureExtractor,
)

from Model.COSMIC.erc_training.predict_epik import parse_cosmic_args, load_model

roberta = RobertaModel.from_pretrained(
    EXTRACTORS_PATH + "/checkpoints/epik/",
    checkpoint_file="checkpoint_best.pt",
    data_name_or_path="../../epik-bin",
)
roberta.eval()

# decide = 0 for running with gpu, device="cpu" for running with gpu
comet = CSKFeatureExtractor(dir=EXTRACTORS_PATH, device="cpu")

cosmic_args = parse_cosmic_args()

COSMIC_MODEL = load_model(EPIK_MODEL_DIR + "/epik/best_model.pt", cosmic_args)

PATH_TO_DEBERTA = "./Model/DeBERTa"
sys.path.append(PATH_TO_DEBERTA)

from Model.DeBERTa.deberta import load_model, deberta_init

cfg, tokenizer = deberta_init()
deberta_model = load_model(cfg, PATH_TO_DEBERTA)