nr_frozen_epochs: 0.3 | |
keep_embeddings_frozen: true | |
optimizer: AdamW | |
warmup_steps: 0 | |
encoder_learning_rate: 1.0e-06 | |
learning_rate: 1.5e-05 | |
layerwise_decay: 0.95 | |
encoder_model: XLM-RoBERTa | |
pretrained_model: microsoft/infoxlm-large | |
pool: avg | |
layer: mix | |
layer_transformation: sparsemax | |
layer_norm: false | |
loss: mse | |
dropout: 0.1 | |
batch_size: 1 | |
train_data: | |
- /content/COMET/data/train.csv | |
validation_data: | |
- /content/COMET/data/val.csv | |
class_identifier: unified_metric | |
load_pretrained_weights: true | |
local_files_only: false | |
sent_layer: mix | |
word_layer: 24 | |
hidden_sizes: | |
- 3072 | |
- 1024 | |
activations: Tanh | |
final_activation: null | |
input_segments: | |
- mt | |
- src | |
word_level_training: false | |
loss_lambda: 0.65 | |
error_labels: | |
- minor | |
- major | |
cross_entropy_weights: null | |