discrete_weights / config.yaml
Hack90's picture
Upload config.yaml
4b3c600 verified
defaults:
- _self_
- override hydra/launcher: submitit_slurm
compute:
ngpus: 1
nodes: 1
logging:
log_freq: 100
log_lr_every: ${logging.log_freq}
log_file_name: stdout.log
enable_wandb: True
entity: flows
project: flow_matching
group: null
data:
train: DNA-LLM/experiment_one_viral_genomes_train_set_v2
valid: DNA-LLM/experiment_one_viral_genomes_val_set_v2
cache_dir: /huggingface/
num_workers: 8
training:
batch_size: 64
snapshot: 2000 # 2000
eval_freq: 20000 # 20000
perplexity_freq: 200000 #2000
seed: 42
eval:
batch_size: 64
sample_batch_size: 16
perplexity: True
perplexity_batch_size: 16
optim:
weight_decay: 0.03
optimizer: AdamW
lr: 3e-4
beta1: 0.9
beta2: 0.95
eps: 1e-8
warmup: 2500
grad_clip: 1.
eta_min_ratio: 0.1
fused: false
n_iters: 1000000
log_lr_every: ${logging.log_lr_every}
flow:
source_distribution: uniform # [uniform, mask]
loss_function: cross_entropy # [cross_entropy, generalized_kl]
exponent: 1.
scheduler_type: polynomial
sampling_steps: 2048
model:
hidden_size: 768
cond_dim: 128
length: 2048
n_blocks: 12
n_heads: 12
dropout: 0.1
compile: true
hydra_dir: /user/hassanahmed.hassan/u12592/.project/dir.lustre-grete/learning-nucleoTIDEs/flow_matching-main/model_runs
hydra:
run:
dir: ${hydra_dir}/${now:%Y.%m.%d}/${now:%H%M%S}
sweep:
dir: ${hydra_dir}/${now:%Y.%m.%d}/${now:%H%M%S}
subdir: ${hydra.job.num}
launcher:
max_num_timeout: 100000
timeout_min: 4320
partition: learn
qos: # TODO: change it to your own qos
gpus_per_node: ${compute.ngpus}
mem_gb: 1760
cpus_per_task: 32
nodes: ${compute.nodes}