{ | |
"base_model": "gpt2", | |
"vocab_size": 50257, | |
"context_length": 1024, | |
"emb_dim": 768, | |
"n_layers": 12, | |
"n_heads": 12, | |
"num_classes": 2 | |
} |
{ | |
"base_model": "gpt2", | |
"vocab_size": 50257, | |
"context_length": 1024, | |
"emb_dim": 768, | |
"n_layers": 12, | |
"n_heads": 12, | |
"num_classes": 2 | |
} |