# @package _group_ common: fp16: true log_format: json log_interval: 200 tensorboard_logdir: tblog seed: 1337 checkpoint: save_interval: 1000000 keep_last_epochs: 5 save_interval_updates: 1000 keep_interval_updates_pattern: 10000 keep_interval_updates: 5 best_checkpoint_metric: accuracy maximize_best_checkpoint_metric: true distributed_training: ddp_backend: c10d find_unused_parameters: true distributed_world_size: 1 nprocs_per_node: 8 criterion: _name: "label_smoothed_cross_entropy" task: _name: "translation_from_jst" dataset: num_workers: 0 max_tokens: 4096 skip_invalid_size_inputs_valid_test: true validate_after_updates: ${model.freeze_finetune_updates} validate_interval: ${checkpoint.save_interval} validate_interval_updates: ${checkpoint.save_interval_updates} train_subset: train_clean_100 valid_subset: dev_clean required_batch_size_multiple: 1 optimizer: _name: adam adam_betas: (0.9,0.98) adam_eps: 1e-06 weight_decay: 0.0 lr_scheduler: _name: tri_stage phase_ratio: [0.1, 0.4, 0.5] final_lr_scale: 0.05 model: _name: hubert_t2c w2v_path: ??? layerdrop: 0.1 decoder_layerdrop: 0.1 activation_dropout: 0.1 feature_grad_mult: 0.0 freeze_finetune_updates: 0 hydra: job: config: override_dirname: kv_sep: '-' item_sep: '__' exclude_keys: - run - task.data - task.label_dir - model.w2v_path - dataset.train_subset - dataset.valid_subset run: dir: ??? sweep: dir: ??? subdir: ${hydra.job.config_name}__${hydra.job.override_dirname}