Zainab984 commited on
Commit
4771115
·
1 Parent(s): 16abcf6

Upload config

Browse files
Files changed (1) hide show
  1. config.json +8 -5
config.json CHANGED
@@ -1,15 +1,20 @@
1
  {
2
  "_name_or_path": "prajjwal1/bert-tiny",
3
- "architectures": [
4
- "BertForSequenceClassification"
5
- ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "classifier_dropout": null,
8
  "hidden_act": "gelu",
9
  "hidden_dropout_prob": 0.1,
10
  "hidden_size": 128,
 
 
 
 
11
  "initializer_range": 0.02,
12
  "intermediate_size": 512,
 
 
 
 
13
  "layer_norm_eps": 1e-12,
14
  "max_position_embeddings": 512,
15
  "model_type": "bert",
@@ -17,8 +22,6 @@
17
  "num_hidden_layers": 2,
18
  "pad_token_id": 0,
19
  "position_embedding_type": "absolute",
20
- "problem_type": "single_label_classification",
21
- "torch_dtype": "float32",
22
  "transformers_version": "4.35.2",
23
  "type_vocab_size": 2,
24
  "use_cache": true,
 
1
  {
2
  "_name_or_path": "prajjwal1/bert-tiny",
 
 
 
3
  "attention_probs_dropout_prob": 0.1,
4
  "classifier_dropout": null,
5
  "hidden_act": "gelu",
6
  "hidden_dropout_prob": 0.1,
7
  "hidden_size": 128,
8
+ "id2label": {
9
+ "0": "Taken",
10
+ "1": "Not Taken"
11
+ },
12
  "initializer_range": 0.02,
13
  "intermediate_size": 512,
14
+ "label2id": {
15
+ "Not Taken": 1,
16
+ "Taken": 0
17
+ },
18
  "layer_norm_eps": 1e-12,
19
  "max_position_embeddings": 512,
20
  "model_type": "bert",
 
22
  "num_hidden_layers": 2,
23
  "pad_token_id": 0,
24
  "position_embedding_type": "absolute",
 
 
25
  "transformers_version": "4.35.2",
26
  "type_vocab_size": 2,
27
  "use_cache": true,