Joshua Lochner commited on
Commit
bdfb4b1
·
1 Parent(s): df05196

Remove unused imports

Browse files
Files changed (5) hide show
  1. src/evaluate.py +2 -7
  2. src/predict.py +2 -10
  3. src/segment.py +0 -1
  4. src/train.py +2 -4
  5. src/utils.py +0 -4
src/evaluate.py CHANGED
@@ -4,14 +4,9 @@ import re
4
  import requests
5
  from model import get_model_tokenizer
6
  from utils import jaccard
7
- from datasets import load_dataset
8
- from transformers import (
9
- AutoModelForSeq2SeqLM,
10
- AutoTokenizer,
11
- HfArgumentParser
12
- )
13
  from preprocess import DatasetArguments, get_words
14
- from shared import device, GeneralArguments
15
  from predict import ClassifierArguments, predict, TrainingOutputArguments
16
  from segment import extract_segment, word_start, word_end, SegmentationArguments, add_labels_to_words
17
  import pandas as pd
 
4
  import requests
5
  from model import get_model_tokenizer
6
  from utils import jaccard
7
+ from transformers import HfArgumentParser
 
 
 
 
 
8
  from preprocess import DatasetArguments, get_words
9
+ from shared import GeneralArguments
10
  from predict import ClassifierArguments, predict, TrainingOutputArguments
11
  from segment import extract_segment, word_start, word_end, SegmentationArguments, add_labels_to_words
12
  import pandas as pd
src/predict.py CHANGED
@@ -1,6 +1,5 @@
1
- from shared import START_SEGMENT_TEMPLATE, END_SEGMENT_TEMPLATE
2
  from utils import re_findall
3
- from shared import OutputArguments
4
  from typing import Optional
5
  from segment import (
6
  generate_segments,
@@ -15,19 +14,12 @@ from segment import (
15
  import preprocess
16
  from errors import TranscriptError
17
  from model import get_classifier_vectorizer, get_model_tokenizer
18
- from transformers import (
19
- AutoModelForSeq2SeqLM,
20
- AutoTokenizer,
21
- HfArgumentParser
22
- )
23
  from transformers.trainer_utils import get_last_checkpoint
24
  from dataclasses import dataclass, field
25
- from shared import device
26
  import logging
27
 
28
- import re
29
 
30
- from shared import seconds_to_time
31
  @dataclass
32
  class TrainingOutputArguments:
33
 
 
 
1
  from utils import re_findall
2
+ from shared import START_SEGMENT_TEMPLATE, END_SEGMENT_TEMPLATE, OutputArguments, device, seconds_to_time
3
  from typing import Optional
4
  from segment import (
5
  generate_segments,
 
14
  import preprocess
15
  from errors import TranscriptError
16
  from model import get_classifier_vectorizer, get_model_tokenizer
17
+ from transformers import HfArgumentParser
 
 
 
 
18
  from transformers.trainer_utils import get_last_checkpoint
19
  from dataclasses import dataclass, field
 
20
  import logging
21
 
 
22
 
 
23
  @dataclass
24
  class TrainingOutputArguments:
25
 
src/segment.py CHANGED
@@ -1,5 +1,4 @@
1
  import preprocess
2
- from shared import CustomTokens
3
  from dataclasses import dataclass, field
4
 
5
 
 
1
  import preprocess
 
2
  from dataclasses import dataclass, field
3
 
4
 
src/train.py CHANGED
@@ -1,6 +1,6 @@
1
  from preprocess import load_datasets, DatasetArguments
2
  from predict import ClassifierArguments, SEGMENT_MATCH_RE, CATEGORIES
3
- from shared import CustomTokens, device, GeneralArguments, OutputArguments
4
  from model import ModelArguments
5
  import transformers
6
  import logging
@@ -14,9 +14,7 @@ from transformers import (
14
  DataCollatorForSeq2Seq,
15
  HfArgumentParser,
16
  Seq2SeqTrainer,
17
- Seq2SeqTrainingArguments,
18
- AutoTokenizer,
19
- AutoModelForSeq2SeqLM
20
  )
21
 
22
  from transformers.trainer_utils import get_last_checkpoint
 
1
  from preprocess import load_datasets, DatasetArguments
2
  from predict import ClassifierArguments, SEGMENT_MATCH_RE, CATEGORIES
3
+ from shared import CustomTokens, GeneralArguments, OutputArguments
4
  from model import ModelArguments
5
  import transformers
6
  import logging
 
14
  DataCollatorForSeq2Seq,
15
  HfArgumentParser,
16
  Seq2SeqTrainer,
17
+ Seq2SeqTrainingArguments
 
 
18
  )
19
 
20
  from transformers.trainer_utils import get_last_checkpoint
src/utils.py CHANGED
@@ -1,14 +1,10 @@
1
  import re
2
-
3
  import os
4
  import signal
5
  import logging
6
- import sys
7
  from time import sleep, time
8
- from random import random, randint
9
  from multiprocessing import JoinableQueue, Event, Process
10
  from queue import Empty
11
- from typing import Optional
12
 
13
  logger = logging.getLogger(__name__)
14
 
 
1
  import re
 
2
  import os
3
  import signal
4
  import logging
 
5
  from time import sleep, time
 
6
  from multiprocessing import JoinableQueue, Event, Process
7
  from queue import Empty
 
8
 
9
  logger = logging.getLogger(__name__)
10