File size: 678 Bytes
19f124c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
from transformers import PretrainedConfig

class EmuruConfig(PretrainedConfig):
    model_type = "emuru"  # Unique identifier for your model

    def __init__(self, 
                 t5_config='google-t5/t5-large', 
                 vae_config='blowing-up-groundhogs/emuru_vae',
                 tokenizer_config='google/byt5-small',
                 slices_per_query=1,
                 vae_channels=1,
                 **kwargs):
        super().__init__(**kwargs)
        self.t5_config = t5_config
        self.vae_config = vae_config
        self.tokenizer_config = tokenizer_config
        self.slices_per_query = slices_per_query
        self.vae_channels = vae_channels