Upload folder using huggingface_hub
Browse files- model.safetensors +1 -1
- optimizer.pt +1 -1
- scheduler.pt +1 -1
- trainer_state.json +2412 -4
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2815117504
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ac758eace2f5b8381943498de0e1a2959379d124ae34d0ffdf0156645a347cb9
|
3 |
size 2815117504
|
optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2486357050
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:82f5c6ab3621acc9799b13e859b13528a921eb159860e69defb7f88c275d7163
|
3 |
size 2486357050
|
scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1064
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cc113a37cb5f7a0ce0d70f0ebfc357409819d441107b4df4d6919c2244a21866
|
3 |
size 1064
|
trainer_state.json
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
-
"epoch": 1.
|
5 |
"eval_steps": 500,
|
6 |
-
"global_step":
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
@@ -4907,6 +4907,2414 @@
|
|
4907 |
"learning_rate": 5.379780262160237e-06,
|
4908 |
"loss": 1.7222,
|
4909 |
"step": 700
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
4910 |
}
|
4911 |
],
|
4912 |
"logging_steps": 1,
|
@@ -4921,12 +7329,12 @@
|
|
4921 |
"should_evaluate": false,
|
4922 |
"should_log": false,
|
4923 |
"should_save": true,
|
4924 |
-
"should_training_stop":
|
4925 |
},
|
4926 |
"attributes": {}
|
4927 |
}
|
4928 |
},
|
4929 |
-
"total_flos":
|
4930 |
"train_batch_size": 10,
|
4931 |
"trial_name": null,
|
4932 |
"trial_params": null
|
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
+
"epoch": 1.9990426041168023,
|
5 |
"eval_steps": 500,
|
6 |
+
"global_step": 1044,
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
|
|
4907 |
"learning_rate": 5.379780262160237e-06,
|
4908 |
"loss": 1.7222,
|
4909 |
"step": 700
|
4910 |
+
},
|
4911 |
+
{
|
4912 |
+
"epoch": 1.3422690282431786,
|
4913 |
+
"grad_norm": 1.734375,
|
4914 |
+
"learning_rate": 5.3516886967272485e-06,
|
4915 |
+
"loss": 1.7227,
|
4916 |
+
"step": 701
|
4917 |
+
},
|
4918 |
+
{
|
4919 |
+
"epoch": 1.344183820009574,
|
4920 |
+
"grad_norm": 1.75,
|
4921 |
+
"learning_rate": 5.323643845318135e-06,
|
4922 |
+
"loss": 1.7426,
|
4923 |
+
"step": 702
|
4924 |
+
},
|
4925 |
+
{
|
4926 |
+
"epoch": 1.3460986117759695,
|
4927 |
+
"grad_norm": 1.6875,
|
4928 |
+
"learning_rate": 5.295645989774565e-06,
|
4929 |
+
"loss": 1.7319,
|
4930 |
+
"step": 703
|
4931 |
+
},
|
4932 |
+
{
|
4933 |
+
"epoch": 1.3480134035423648,
|
4934 |
+
"grad_norm": 1.75,
|
4935 |
+
"learning_rate": 5.26769541146593e-06,
|
4936 |
+
"loss": 1.6958,
|
4937 |
+
"step": 704
|
4938 |
+
},
|
4939 |
+
{
|
4940 |
+
"epoch": 1.34992819530876,
|
4941 |
+
"grad_norm": 1.734375,
|
4942 |
+
"learning_rate": 5.239792391286492e-06,
|
4943 |
+
"loss": 1.682,
|
4944 |
+
"step": 705
|
4945 |
+
},
|
4946 |
+
{
|
4947 |
+
"epoch": 1.3518429870751556,
|
4948 |
+
"grad_norm": 1.65625,
|
4949 |
+
"learning_rate": 5.211937209652567e-06,
|
4950 |
+
"loss": 1.6474,
|
4951 |
+
"step": 706
|
4952 |
+
},
|
4953 |
+
{
|
4954 |
+
"epoch": 1.353757778841551,
|
4955 |
+
"grad_norm": 1.734375,
|
4956 |
+
"learning_rate": 5.1841301464997206e-06,
|
4957 |
+
"loss": 1.6762,
|
4958 |
+
"step": 707
|
4959 |
+
},
|
4960 |
+
{
|
4961 |
+
"epoch": 1.3556725706079464,
|
4962 |
+
"grad_norm": 1.7734375,
|
4963 |
+
"learning_rate": 5.156371481279928e-06,
|
4964 |
+
"loss": 1.6729,
|
4965 |
+
"step": 708
|
4966 |
+
},
|
4967 |
+
{
|
4968 |
+
"epoch": 1.3575873623743417,
|
4969 |
+
"grad_norm": 1.7109375,
|
4970 |
+
"learning_rate": 5.128661492958793e-06,
|
4971 |
+
"loss": 1.7092,
|
4972 |
+
"step": 709
|
4973 |
+
},
|
4974 |
+
{
|
4975 |
+
"epoch": 1.3595021541407373,
|
4976 |
+
"grad_norm": 1.7890625,
|
4977 |
+
"learning_rate": 5.101000460012731e-06,
|
4978 |
+
"loss": 1.6943,
|
4979 |
+
"step": 710
|
4980 |
+
},
|
4981 |
+
{
|
4982 |
+
"epoch": 1.3614169459071326,
|
4983 |
+
"grad_norm": 1.671875,
|
4984 |
+
"learning_rate": 5.073388660426164e-06,
|
4985 |
+
"loss": 1.6758,
|
4986 |
+
"step": 711
|
4987 |
+
},
|
4988 |
+
{
|
4989 |
+
"epoch": 1.363331737673528,
|
4990 |
+
"grad_norm": 1.78125,
|
4991 |
+
"learning_rate": 5.04582637168874e-06,
|
4992 |
+
"loss": 1.671,
|
4993 |
+
"step": 712
|
4994 |
+
},
|
4995 |
+
{
|
4996 |
+
"epoch": 1.3652465294399234,
|
4997 |
+
"grad_norm": 1.7109375,
|
4998 |
+
"learning_rate": 5.018313870792544e-06,
|
4999 |
+
"loss": 1.6675,
|
5000 |
+
"step": 713
|
5001 |
+
},
|
5002 |
+
{
|
5003 |
+
"epoch": 1.3671613212063187,
|
5004 |
+
"grad_norm": 1.7109375,
|
5005 |
+
"learning_rate": 4.990851434229295e-06,
|
5006 |
+
"loss": 1.7074,
|
5007 |
+
"step": 714
|
5008 |
+
},
|
5009 |
+
{
|
5010 |
+
"epoch": 1.3690761129727143,
|
5011 |
+
"grad_norm": 1.7265625,
|
5012 |
+
"learning_rate": 4.9634393379875986e-06,
|
5013 |
+
"loss": 1.6558,
|
5014 |
+
"step": 715
|
5015 |
+
},
|
5016 |
+
{
|
5017 |
+
"epoch": 1.3709909047391096,
|
5018 |
+
"grad_norm": 1.703125,
|
5019 |
+
"learning_rate": 4.936077857550141e-06,
|
5020 |
+
"loss": 1.7064,
|
5021 |
+
"step": 716
|
5022 |
+
},
|
5023 |
+
{
|
5024 |
+
"epoch": 1.372905696505505,
|
5025 |
+
"grad_norm": 1.640625,
|
5026 |
+
"learning_rate": 4.908767267890952e-06,
|
5027 |
+
"loss": 1.646,
|
5028 |
+
"step": 717
|
5029 |
+
},
|
5030 |
+
{
|
5031 |
+
"epoch": 1.3748204882719004,
|
5032 |
+
"grad_norm": 1.7109375,
|
5033 |
+
"learning_rate": 4.8815078434726075e-06,
|
5034 |
+
"loss": 1.7207,
|
5035 |
+
"step": 718
|
5036 |
+
},
|
5037 |
+
{
|
5038 |
+
"epoch": 1.3767352800382957,
|
5039 |
+
"grad_norm": 1.734375,
|
5040 |
+
"learning_rate": 4.854299858243505e-06,
|
5041 |
+
"loss": 1.7459,
|
5042 |
+
"step": 719
|
5043 |
+
},
|
5044 |
+
{
|
5045 |
+
"epoch": 1.3786500718046912,
|
5046 |
+
"grad_norm": 1.6796875,
|
5047 |
+
"learning_rate": 4.827143585635085e-06,
|
5048 |
+
"loss": 1.677,
|
5049 |
+
"step": 720
|
5050 |
+
},
|
5051 |
+
{
|
5052 |
+
"epoch": 1.3805648635710868,
|
5053 |
+
"grad_norm": 1.796875,
|
5054 |
+
"learning_rate": 4.800039298559101e-06,
|
5055 |
+
"loss": 1.682,
|
5056 |
+
"step": 721
|
5057 |
+
},
|
5058 |
+
{
|
5059 |
+
"epoch": 1.382479655337482,
|
5060 |
+
"grad_norm": 1.7265625,
|
5061 |
+
"learning_rate": 4.772987269404855e-06,
|
5062 |
+
"loss": 1.6784,
|
5063 |
+
"step": 722
|
5064 |
+
},
|
5065 |
+
{
|
5066 |
+
"epoch": 1.3843944471038774,
|
5067 |
+
"grad_norm": 1.703125,
|
5068 |
+
"learning_rate": 4.745987770036494e-06,
|
5069 |
+
"loss": 1.7062,
|
5070 |
+
"step": 723
|
5071 |
+
},
|
5072 |
+
{
|
5073 |
+
"epoch": 1.386309238870273,
|
5074 |
+
"grad_norm": 1.6953125,
|
5075 |
+
"learning_rate": 4.719041071790238e-06,
|
5076 |
+
"loss": 1.6879,
|
5077 |
+
"step": 724
|
5078 |
+
},
|
5079 |
+
{
|
5080 |
+
"epoch": 1.3882240306366682,
|
5081 |
+
"grad_norm": 1.6875,
|
5082 |
+
"learning_rate": 4.692147445471687e-06,
|
5083 |
+
"loss": 1.7091,
|
5084 |
+
"step": 725
|
5085 |
+
},
|
5086 |
+
{
|
5087 |
+
"epoch": 1.3901388224030637,
|
5088 |
+
"grad_norm": 1.7265625,
|
5089 |
+
"learning_rate": 4.665307161353073e-06,
|
5090 |
+
"loss": 1.721,
|
5091 |
+
"step": 726
|
5092 |
+
},
|
5093 |
+
{
|
5094 |
+
"epoch": 1.392053614169459,
|
5095 |
+
"grad_norm": 1.671875,
|
5096 |
+
"learning_rate": 4.638520489170572e-06,
|
5097 |
+
"loss": 1.6974,
|
5098 |
+
"step": 727
|
5099 |
+
},
|
5100 |
+
{
|
5101 |
+
"epoch": 1.3939684059358544,
|
5102 |
+
"grad_norm": 1.71875,
|
5103 |
+
"learning_rate": 4.611787698121558e-06,
|
5104 |
+
"loss": 1.7036,
|
5105 |
+
"step": 728
|
5106 |
+
},
|
5107 |
+
{
|
5108 |
+
"epoch": 1.39588319770225,
|
5109 |
+
"grad_norm": 1.7109375,
|
5110 |
+
"learning_rate": 4.585109056861936e-06,
|
5111 |
+
"loss": 1.6918,
|
5112 |
+
"step": 729
|
5113 |
+
},
|
5114 |
+
{
|
5115 |
+
"epoch": 1.3977979894686454,
|
5116 |
+
"grad_norm": 1.75,
|
5117 |
+
"learning_rate": 4.558484833503407e-06,
|
5118 |
+
"loss": 1.6522,
|
5119 |
+
"step": 730
|
5120 |
+
},
|
5121 |
+
{
|
5122 |
+
"epoch": 1.3997127812350407,
|
5123 |
+
"grad_norm": 1.6953125,
|
5124 |
+
"learning_rate": 4.531915295610805e-06,
|
5125 |
+
"loss": 1.682,
|
5126 |
+
"step": 731
|
5127 |
+
},
|
5128 |
+
{
|
5129 |
+
"epoch": 1.401627573001436,
|
5130 |
+
"grad_norm": 1.7109375,
|
5131 |
+
"learning_rate": 4.505400710199376e-06,
|
5132 |
+
"loss": 1.7143,
|
5133 |
+
"step": 732
|
5134 |
+
},
|
5135 |
+
{
|
5136 |
+
"epoch": 1.4035423647678316,
|
5137 |
+
"grad_norm": 1.78125,
|
5138 |
+
"learning_rate": 4.478941343732125e-06,
|
5139 |
+
"loss": 1.7184,
|
5140 |
+
"step": 733
|
5141 |
+
},
|
5142 |
+
{
|
5143 |
+
"epoch": 1.4054571565342269,
|
5144 |
+
"grad_norm": 1.7734375,
|
5145 |
+
"learning_rate": 4.452537462117123e-06,
|
5146 |
+
"loss": 1.7365,
|
5147 |
+
"step": 734
|
5148 |
+
},
|
5149 |
+
{
|
5150 |
+
"epoch": 1.4073719483006224,
|
5151 |
+
"grad_norm": 1.7578125,
|
5152 |
+
"learning_rate": 4.426189330704826e-06,
|
5153 |
+
"loss": 1.6457,
|
5154 |
+
"step": 735
|
5155 |
+
},
|
5156 |
+
{
|
5157 |
+
"epoch": 1.4092867400670177,
|
5158 |
+
"grad_norm": 1.703125,
|
5159 |
+
"learning_rate": 4.3998972142854334e-06,
|
5160 |
+
"loss": 1.6866,
|
5161 |
+
"step": 736
|
5162 |
+
},
|
5163 |
+
{
|
5164 |
+
"epoch": 1.411201531833413,
|
5165 |
+
"grad_norm": 1.6796875,
|
5166 |
+
"learning_rate": 4.373661377086195e-06,
|
5167 |
+
"loss": 1.6959,
|
5168 |
+
"step": 737
|
5169 |
+
},
|
5170 |
+
{
|
5171 |
+
"epoch": 1.4131163235998085,
|
5172 |
+
"grad_norm": 1.71875,
|
5173 |
+
"learning_rate": 4.3474820827687894e-06,
|
5174 |
+
"loss": 1.7323,
|
5175 |
+
"step": 738
|
5176 |
+
},
|
5177 |
+
{
|
5178 |
+
"epoch": 1.4150311153662039,
|
5179 |
+
"grad_norm": 1.6953125,
|
5180 |
+
"learning_rate": 4.321359594426644e-06,
|
5181 |
+
"loss": 1.6982,
|
5182 |
+
"step": 739
|
5183 |
+
},
|
5184 |
+
{
|
5185 |
+
"epoch": 1.4169459071325994,
|
5186 |
+
"grad_norm": 1.671875,
|
5187 |
+
"learning_rate": 4.295294174582315e-06,
|
5188 |
+
"loss": 1.6859,
|
5189 |
+
"step": 740
|
5190 |
+
},
|
5191 |
+
{
|
5192 |
+
"epoch": 1.4188606988989947,
|
5193 |
+
"grad_norm": 1.6953125,
|
5194 |
+
"learning_rate": 4.2692860851848295e-06,
|
5195 |
+
"loss": 1.6616,
|
5196 |
+
"step": 741
|
5197 |
+
},
|
5198 |
+
{
|
5199 |
+
"epoch": 1.4207754906653902,
|
5200 |
+
"grad_norm": 1.75,
|
5201 |
+
"learning_rate": 4.243335587607074e-06,
|
5202 |
+
"loss": 1.6983,
|
5203 |
+
"step": 742
|
5204 |
+
},
|
5205 |
+
{
|
5206 |
+
"epoch": 1.4226902824317855,
|
5207 |
+
"grad_norm": 1.6875,
|
5208 |
+
"learning_rate": 4.217442942643138e-06,
|
5209 |
+
"loss": 1.6405,
|
5210 |
+
"step": 743
|
5211 |
+
},
|
5212 |
+
{
|
5213 |
+
"epoch": 1.424605074198181,
|
5214 |
+
"grad_norm": 1.65625,
|
5215 |
+
"learning_rate": 4.191608410505732e-06,
|
5216 |
+
"loss": 1.6706,
|
5217 |
+
"step": 744
|
5218 |
+
},
|
5219 |
+
{
|
5220 |
+
"epoch": 1.4265198659645764,
|
5221 |
+
"grad_norm": 1.875,
|
5222 |
+
"learning_rate": 4.165832250823534e-06,
|
5223 |
+
"loss": 1.7247,
|
5224 |
+
"step": 745
|
5225 |
+
},
|
5226 |
+
{
|
5227 |
+
"epoch": 1.4284346577309717,
|
5228 |
+
"grad_norm": 1.7578125,
|
5229 |
+
"learning_rate": 4.140114722638609e-06,
|
5230 |
+
"loss": 1.713,
|
5231 |
+
"step": 746
|
5232 |
+
},
|
5233 |
+
{
|
5234 |
+
"epoch": 1.4303494494973672,
|
5235 |
+
"grad_norm": 1.671875,
|
5236 |
+
"learning_rate": 4.114456084403785e-06,
|
5237 |
+
"loss": 1.7519,
|
5238 |
+
"step": 747
|
5239 |
+
},
|
5240 |
+
{
|
5241 |
+
"epoch": 1.4322642412637625,
|
5242 |
+
"grad_norm": 1.765625,
|
5243 |
+
"learning_rate": 4.088856593980078e-06,
|
5244 |
+
"loss": 1.7403,
|
5245 |
+
"step": 748
|
5246 |
+
},
|
5247 |
+
{
|
5248 |
+
"epoch": 1.434179033030158,
|
5249 |
+
"grad_norm": 1.65625,
|
5250 |
+
"learning_rate": 4.06331650863407e-06,
|
5251 |
+
"loss": 1.6786,
|
5252 |
+
"step": 749
|
5253 |
+
},
|
5254 |
+
{
|
5255 |
+
"epoch": 1.4360938247965533,
|
5256 |
+
"grad_norm": 1.7265625,
|
5257 |
+
"learning_rate": 4.03783608503536e-06,
|
5258 |
+
"loss": 1.6476,
|
5259 |
+
"step": 750
|
5260 |
+
},
|
5261 |
+
{
|
5262 |
+
"epoch": 1.4380086165629486,
|
5263 |
+
"grad_norm": 1.796875,
|
5264 |
+
"learning_rate": 4.0124155792539496e-06,
|
5265 |
+
"loss": 1.8036,
|
5266 |
+
"step": 751
|
5267 |
+
},
|
5268 |
+
{
|
5269 |
+
"epoch": 1.4399234083293442,
|
5270 |
+
"grad_norm": 1.703125,
|
5271 |
+
"learning_rate": 3.987055246757701e-06,
|
5272 |
+
"loss": 1.7387,
|
5273 |
+
"step": 752
|
5274 |
+
},
|
5275 |
+
{
|
5276 |
+
"epoch": 1.4418382000957397,
|
5277 |
+
"grad_norm": 1.6875,
|
5278 |
+
"learning_rate": 3.961755342409737e-06,
|
5279 |
+
"loss": 1.7148,
|
5280 |
+
"step": 753
|
5281 |
+
},
|
5282 |
+
{
|
5283 |
+
"epoch": 1.443752991862135,
|
5284 |
+
"grad_norm": 1.6484375,
|
5285 |
+
"learning_rate": 3.936516120465914e-06,
|
5286 |
+
"loss": 1.621,
|
5287 |
+
"step": 754
|
5288 |
+
},
|
5289 |
+
{
|
5290 |
+
"epoch": 1.4456677836285303,
|
5291 |
+
"grad_norm": 1.65625,
|
5292 |
+
"learning_rate": 3.911337834572235e-06,
|
5293 |
+
"loss": 1.6647,
|
5294 |
+
"step": 755
|
5295 |
+
},
|
5296 |
+
{
|
5297 |
+
"epoch": 1.4475825753949259,
|
5298 |
+
"grad_norm": 1.75,
|
5299 |
+
"learning_rate": 3.886220737762328e-06,
|
5300 |
+
"loss": 1.6833,
|
5301 |
+
"step": 756
|
5302 |
+
},
|
5303 |
+
{
|
5304 |
+
"epoch": 1.4494973671613212,
|
5305 |
+
"grad_norm": 1.734375,
|
5306 |
+
"learning_rate": 3.861165082454888e-06,
|
5307 |
+
"loss": 1.7302,
|
5308 |
+
"step": 757
|
5309 |
+
},
|
5310 |
+
{
|
5311 |
+
"epoch": 1.4514121589277167,
|
5312 |
+
"grad_norm": 1.6953125,
|
5313 |
+
"learning_rate": 3.836171120451131e-06,
|
5314 |
+
"loss": 1.7396,
|
5315 |
+
"step": 758
|
5316 |
+
},
|
5317 |
+
{
|
5318 |
+
"epoch": 1.453326950694112,
|
5319 |
+
"grad_norm": 1.7734375,
|
5320 |
+
"learning_rate": 3.811239102932289e-06,
|
5321 |
+
"loss": 1.7763,
|
5322 |
+
"step": 759
|
5323 |
+
},
|
5324 |
+
{
|
5325 |
+
"epoch": 1.4552417424605073,
|
5326 |
+
"grad_norm": 1.7109375,
|
5327 |
+
"learning_rate": 3.7863692804570707e-06,
|
5328 |
+
"loss": 1.734,
|
5329 |
+
"step": 760
|
5330 |
+
},
|
5331 |
+
{
|
5332 |
+
"epoch": 1.4571565342269028,
|
5333 |
+
"grad_norm": 1.734375,
|
5334 |
+
"learning_rate": 3.761561902959139e-06,
|
5335 |
+
"loss": 1.6783,
|
5336 |
+
"step": 761
|
5337 |
+
},
|
5338 |
+
{
|
5339 |
+
"epoch": 1.4590713259932984,
|
5340 |
+
"grad_norm": 1.703125,
|
5341 |
+
"learning_rate": 3.7368172197446007e-06,
|
5342 |
+
"loss": 1.6689,
|
5343 |
+
"step": 762
|
5344 |
+
},
|
5345 |
+
{
|
5346 |
+
"epoch": 1.4609861177596937,
|
5347 |
+
"grad_norm": 1.7265625,
|
5348 |
+
"learning_rate": 3.7121354794895216e-06,
|
5349 |
+
"loss": 1.6886,
|
5350 |
+
"step": 763
|
5351 |
+
},
|
5352 |
+
{
|
5353 |
+
"epoch": 1.462900909526089,
|
5354 |
+
"grad_norm": 1.65625,
|
5355 |
+
"learning_rate": 3.6875169302373938e-06,
|
5356 |
+
"loss": 1.6309,
|
5357 |
+
"step": 764
|
5358 |
+
},
|
5359 |
+
{
|
5360 |
+
"epoch": 1.4648157012924845,
|
5361 |
+
"grad_norm": 1.7421875,
|
5362 |
+
"learning_rate": 3.6629618193966744e-06,
|
5363 |
+
"loss": 1.7063,
|
5364 |
+
"step": 765
|
5365 |
+
},
|
5366 |
+
{
|
5367 |
+
"epoch": 1.4667304930588798,
|
5368 |
+
"grad_norm": 1.7265625,
|
5369 |
+
"learning_rate": 3.6384703937382714e-06,
|
5370 |
+
"loss": 1.7162,
|
5371 |
+
"step": 766
|
5372 |
+
},
|
5373 |
+
{
|
5374 |
+
"epoch": 1.4686452848252753,
|
5375 |
+
"grad_norm": 1.71875,
|
5376 |
+
"learning_rate": 3.6140428993930922e-06,
|
5377 |
+
"loss": 1.7338,
|
5378 |
+
"step": 767
|
5379 |
+
},
|
5380 |
+
{
|
5381 |
+
"epoch": 1.4705600765916707,
|
5382 |
+
"grad_norm": 1.734375,
|
5383 |
+
"learning_rate": 3.589679581849539e-06,
|
5384 |
+
"loss": 1.6977,
|
5385 |
+
"step": 768
|
5386 |
+
},
|
5387 |
+
{
|
5388 |
+
"epoch": 1.472474868358066,
|
5389 |
+
"grad_norm": 1.71875,
|
5390 |
+
"learning_rate": 3.5653806859510743e-06,
|
5391 |
+
"loss": 1.6789,
|
5392 |
+
"step": 769
|
5393 |
+
},
|
5394 |
+
{
|
5395 |
+
"epoch": 1.4743896601244615,
|
5396 |
+
"grad_norm": 1.7421875,
|
5397 |
+
"learning_rate": 3.5411464558937302e-06,
|
5398 |
+
"loss": 1.6898,
|
5399 |
+
"step": 770
|
5400 |
+
},
|
5401 |
+
{
|
5402 |
+
"epoch": 1.4763044518908568,
|
5403 |
+
"grad_norm": 1.703125,
|
5404 |
+
"learning_rate": 3.5169771352236782e-06,
|
5405 |
+
"loss": 1.7292,
|
5406 |
+
"step": 771
|
5407 |
+
},
|
5408 |
+
{
|
5409 |
+
"epoch": 1.4782192436572523,
|
5410 |
+
"grad_norm": 1.703125,
|
5411 |
+
"learning_rate": 3.4928729668347616e-06,
|
5412 |
+
"loss": 1.7046,
|
5413 |
+
"step": 772
|
5414 |
+
},
|
5415 |
+
{
|
5416 |
+
"epoch": 1.4801340354236476,
|
5417 |
+
"grad_norm": 1.734375,
|
5418 |
+
"learning_rate": 3.4688341929660776e-06,
|
5419 |
+
"loss": 1.7028,
|
5420 |
+
"step": 773
|
5421 |
+
},
|
5422 |
+
{
|
5423 |
+
"epoch": 1.4820488271900432,
|
5424 |
+
"grad_norm": 1.6875,
|
5425 |
+
"learning_rate": 3.444861055199512e-06,
|
5426 |
+
"loss": 1.6834,
|
5427 |
+
"step": 774
|
5428 |
+
},
|
5429 |
+
{
|
5430 |
+
"epoch": 1.4839636189564385,
|
5431 |
+
"grad_norm": 1.6875,
|
5432 |
+
"learning_rate": 3.420953794457349e-06,
|
5433 |
+
"loss": 1.7299,
|
5434 |
+
"step": 775
|
5435 |
+
},
|
5436 |
+
{
|
5437 |
+
"epoch": 1.485878410722834,
|
5438 |
+
"grad_norm": 1.6640625,
|
5439 |
+
"learning_rate": 3.397112650999811e-06,
|
5440 |
+
"loss": 1.6711,
|
5441 |
+
"step": 776
|
5442 |
+
},
|
5443 |
+
{
|
5444 |
+
"epoch": 1.4877932024892293,
|
5445 |
+
"grad_norm": 1.7265625,
|
5446 |
+
"learning_rate": 3.37333786442268e-06,
|
5447 |
+
"loss": 1.6332,
|
5448 |
+
"step": 777
|
5449 |
+
},
|
5450 |
+
{
|
5451 |
+
"epoch": 1.4897079942556246,
|
5452 |
+
"grad_norm": 1.671875,
|
5453 |
+
"learning_rate": 3.349629673654858e-06,
|
5454 |
+
"loss": 1.673,
|
5455 |
+
"step": 778
|
5456 |
+
},
|
5457 |
+
{
|
5458 |
+
"epoch": 1.4916227860220201,
|
5459 |
+
"grad_norm": 1.65625,
|
5460 |
+
"learning_rate": 3.32598831695599e-06,
|
5461 |
+
"loss": 1.6594,
|
5462 |
+
"step": 779
|
5463 |
+
},
|
5464 |
+
{
|
5465 |
+
"epoch": 1.4935375777884154,
|
5466 |
+
"grad_norm": 1.671875,
|
5467 |
+
"learning_rate": 3.3024140319140617e-06,
|
5468 |
+
"loss": 1.6547,
|
5469 |
+
"step": 780
|
5470 |
+
},
|
5471 |
+
{
|
5472 |
+
"epoch": 1.495452369554811,
|
5473 |
+
"grad_norm": 1.6875,
|
5474 |
+
"learning_rate": 3.2789070554430003e-06,
|
5475 |
+
"loss": 1.6371,
|
5476 |
+
"step": 781
|
5477 |
+
},
|
5478 |
+
{
|
5479 |
+
"epoch": 1.4973671613212063,
|
5480 |
+
"grad_norm": 1.7578125,
|
5481 |
+
"learning_rate": 3.2554676237803117e-06,
|
5482 |
+
"loss": 1.7326,
|
5483 |
+
"step": 782
|
5484 |
+
},
|
5485 |
+
{
|
5486 |
+
"epoch": 1.4992819530876016,
|
5487 |
+
"grad_norm": 1.6953125,
|
5488 |
+
"learning_rate": 3.2320959724847e-06,
|
5489 |
+
"loss": 1.7118,
|
5490 |
+
"step": 783
|
5491 |
+
},
|
5492 |
+
{
|
5493 |
+
"epoch": 1.5011967448539971,
|
5494 |
+
"grad_norm": 1.7109375,
|
5495 |
+
"learning_rate": 3.2087923364336904e-06,
|
5496 |
+
"loss": 1.8037,
|
5497 |
+
"step": 784
|
5498 |
+
},
|
5499 |
+
{
|
5500 |
+
"epoch": 1.5031115366203927,
|
5501 |
+
"grad_norm": 1.703125,
|
5502 |
+
"learning_rate": 3.1855569498212857e-06,
|
5503 |
+
"loss": 1.7526,
|
5504 |
+
"step": 785
|
5505 |
+
},
|
5506 |
+
{
|
5507 |
+
"epoch": 1.505026328386788,
|
5508 |
+
"grad_norm": 1.703125,
|
5509 |
+
"learning_rate": 3.1623900461555933e-06,
|
5510 |
+
"loss": 1.665,
|
5511 |
+
"step": 786
|
5512 |
+
},
|
5513 |
+
{
|
5514 |
+
"epoch": 1.5069411201531833,
|
5515 |
+
"grad_norm": 1.7890625,
|
5516 |
+
"learning_rate": 3.1392918582565037e-06,
|
5517 |
+
"loss": 1.6528,
|
5518 |
+
"step": 787
|
5519 |
+
},
|
5520 |
+
{
|
5521 |
+
"epoch": 1.5088559119195788,
|
5522 |
+
"grad_norm": 1.703125,
|
5523 |
+
"learning_rate": 3.1162626182533207e-06,
|
5524 |
+
"loss": 1.7152,
|
5525 |
+
"step": 788
|
5526 |
+
},
|
5527 |
+
{
|
5528 |
+
"epoch": 1.510770703685974,
|
5529 |
+
"grad_norm": 1.6796875,
|
5530 |
+
"learning_rate": 3.093302557582457e-06,
|
5531 |
+
"loss": 1.661,
|
5532 |
+
"step": 789
|
5533 |
+
},
|
5534 |
+
{
|
5535 |
+
"epoch": 1.5126854954523696,
|
5536 |
+
"grad_norm": 1.6953125,
|
5537 |
+
"learning_rate": 3.070411906985088e-06,
|
5538 |
+
"loss": 1.7286,
|
5539 |
+
"step": 790
|
5540 |
+
},
|
5541 |
+
{
|
5542 |
+
"epoch": 1.514600287218765,
|
5543 |
+
"grad_norm": 1.703125,
|
5544 |
+
"learning_rate": 3.0475908965048374e-06,
|
5545 |
+
"loss": 1.7394,
|
5546 |
+
"step": 791
|
5547 |
+
},
|
5548 |
+
{
|
5549 |
+
"epoch": 1.5165150789851602,
|
5550 |
+
"grad_norm": 1.6796875,
|
5551 |
+
"learning_rate": 3.0248397554854813e-06,
|
5552 |
+
"loss": 1.6925,
|
5553 |
+
"step": 792
|
5554 |
+
},
|
5555 |
+
{
|
5556 |
+
"epoch": 1.5184298707515558,
|
5557 |
+
"grad_norm": 1.6640625,
|
5558 |
+
"learning_rate": 3.002158712568615e-06,
|
5559 |
+
"loss": 1.6607,
|
5560 |
+
"step": 793
|
5561 |
+
},
|
5562 |
+
{
|
5563 |
+
"epoch": 1.5203446625179513,
|
5564 |
+
"grad_norm": 1.71875,
|
5565 |
+
"learning_rate": 2.979547995691383e-06,
|
5566 |
+
"loss": 1.6779,
|
5567 |
+
"step": 794
|
5568 |
+
},
|
5569 |
+
{
|
5570 |
+
"epoch": 1.5222594542843466,
|
5571 |
+
"grad_norm": 1.7265625,
|
5572 |
+
"learning_rate": 2.9570078320841644e-06,
|
5573 |
+
"loss": 1.7353,
|
5574 |
+
"step": 795
|
5575 |
+
},
|
5576 |
+
{
|
5577 |
+
"epoch": 1.524174246050742,
|
5578 |
+
"grad_norm": 1.703125,
|
5579 |
+
"learning_rate": 2.9345384482683148e-06,
|
5580 |
+
"loss": 1.7012,
|
5581 |
+
"step": 796
|
5582 |
+
},
|
5583 |
+
{
|
5584 |
+
"epoch": 1.5260890378171372,
|
5585 |
+
"grad_norm": 1.65625,
|
5586 |
+
"learning_rate": 2.9121400700538593e-06,
|
5587 |
+
"loss": 1.6287,
|
5588 |
+
"step": 797
|
5589 |
+
},
|
5590 |
+
{
|
5591 |
+
"epoch": 1.5280038295835328,
|
5592 |
+
"grad_norm": 1.6953125,
|
5593 |
+
"learning_rate": 2.8898129225372564e-06,
|
5594 |
+
"loss": 1.6926,
|
5595 |
+
"step": 798
|
5596 |
+
},
|
5597 |
+
{
|
5598 |
+
"epoch": 1.5299186213499283,
|
5599 |
+
"grad_norm": 1.71875,
|
5600 |
+
"learning_rate": 2.867557230099104e-06,
|
5601 |
+
"loss": 1.6822,
|
5602 |
+
"step": 799
|
5603 |
+
},
|
5604 |
+
{
|
5605 |
+
"epoch": 1.5318334131163236,
|
5606 |
+
"grad_norm": 1.6640625,
|
5607 |
+
"learning_rate": 2.845373216401913e-06,
|
5608 |
+
"loss": 1.7203,
|
5609 |
+
"step": 800
|
5610 |
+
},
|
5611 |
+
{
|
5612 |
+
"epoch": 1.533748204882719,
|
5613 |
+
"grad_norm": 1.71875,
|
5614 |
+
"learning_rate": 2.823261104387833e-06,
|
5615 |
+
"loss": 1.7103,
|
5616 |
+
"step": 801
|
5617 |
+
},
|
5618 |
+
{
|
5619 |
+
"epoch": 1.5356629966491144,
|
5620 |
+
"grad_norm": 1.703125,
|
5621 |
+
"learning_rate": 2.801221116276436e-06,
|
5622 |
+
"loss": 1.6772,
|
5623 |
+
"step": 802
|
5624 |
+
},
|
5625 |
+
{
|
5626 |
+
"epoch": 1.53757778841551,
|
5627 |
+
"grad_norm": 1.6953125,
|
5628 |
+
"learning_rate": 2.7792534735624687e-06,
|
5629 |
+
"loss": 1.7132,
|
5630 |
+
"step": 803
|
5631 |
+
},
|
5632 |
+
{
|
5633 |
+
"epoch": 1.5394925801819053,
|
5634 |
+
"grad_norm": 1.75,
|
5635 |
+
"learning_rate": 2.757358397013625e-06,
|
5636 |
+
"loss": 1.8025,
|
5637 |
+
"step": 804
|
5638 |
+
},
|
5639 |
+
{
|
5640 |
+
"epoch": 1.5414073719483006,
|
5641 |
+
"grad_norm": 1.6796875,
|
5642 |
+
"learning_rate": 2.7355361066683393e-06,
|
5643 |
+
"loss": 1.6785,
|
5644 |
+
"step": 805
|
5645 |
+
},
|
5646 |
+
{
|
5647 |
+
"epoch": 1.5433221637146959,
|
5648 |
+
"grad_norm": 1.703125,
|
5649 |
+
"learning_rate": 2.7137868218335674e-06,
|
5650 |
+
"loss": 1.6791,
|
5651 |
+
"step": 806
|
5652 |
+
},
|
5653 |
+
{
|
5654 |
+
"epoch": 1.5452369554810914,
|
5655 |
+
"grad_norm": 1.734375,
|
5656 |
+
"learning_rate": 2.692110761082577e-06,
|
5657 |
+
"loss": 1.7253,
|
5658 |
+
"step": 807
|
5659 |
+
},
|
5660 |
+
{
|
5661 |
+
"epoch": 1.547151747247487,
|
5662 |
+
"grad_norm": 1.6796875,
|
5663 |
+
"learning_rate": 2.670508142252766e-06,
|
5664 |
+
"loss": 1.7035,
|
5665 |
+
"step": 808
|
5666 |
+
},
|
5667 |
+
{
|
5668 |
+
"epoch": 1.5490665390138822,
|
5669 |
+
"grad_norm": 1.7421875,
|
5670 |
+
"learning_rate": 2.648979182443454e-06,
|
5671 |
+
"loss": 1.7488,
|
5672 |
+
"step": 809
|
5673 |
+
},
|
5674 |
+
{
|
5675 |
+
"epoch": 1.5509813307802776,
|
5676 |
+
"grad_norm": 1.671875,
|
5677 |
+
"learning_rate": 2.6275240980137272e-06,
|
5678 |
+
"loss": 1.704,
|
5679 |
+
"step": 810
|
5680 |
+
},
|
5681 |
+
{
|
5682 |
+
"epoch": 1.552896122546673,
|
5683 |
+
"grad_norm": 1.7109375,
|
5684 |
+
"learning_rate": 2.6061431045802286e-06,
|
5685 |
+
"loss": 1.7235,
|
5686 |
+
"step": 811
|
5687 |
+
},
|
5688 |
+
{
|
5689 |
+
"epoch": 1.5548109143130686,
|
5690 |
+
"grad_norm": 1.6875,
|
5691 |
+
"learning_rate": 2.5848364170150307e-06,
|
5692 |
+
"loss": 1.6652,
|
5693 |
+
"step": 812
|
5694 |
+
},
|
5695 |
+
{
|
5696 |
+
"epoch": 1.556725706079464,
|
5697 |
+
"grad_norm": 1.703125,
|
5698 |
+
"learning_rate": 2.563604249443438e-06,
|
5699 |
+
"loss": 1.6524,
|
5700 |
+
"step": 813
|
5701 |
+
},
|
5702 |
+
{
|
5703 |
+
"epoch": 1.5586404978458592,
|
5704 |
+
"grad_norm": 1.71875,
|
5705 |
+
"learning_rate": 2.542446815241867e-06,
|
5706 |
+
"loss": 1.697,
|
5707 |
+
"step": 814
|
5708 |
+
},
|
5709 |
+
{
|
5710 |
+
"epoch": 1.5605552896122545,
|
5711 |
+
"grad_norm": 1.640625,
|
5712 |
+
"learning_rate": 2.521364327035678e-06,
|
5713 |
+
"loss": 1.6973,
|
5714 |
+
"step": 815
|
5715 |
+
},
|
5716 |
+
{
|
5717 |
+
"epoch": 1.56247008137865,
|
5718 |
+
"grad_norm": 1.6953125,
|
5719 |
+
"learning_rate": 2.5003569966970574e-06,
|
5720 |
+
"loss": 1.7513,
|
5721 |
+
"step": 816
|
5722 |
+
},
|
5723 |
+
{
|
5724 |
+
"epoch": 1.5643848731450456,
|
5725 |
+
"grad_norm": 1.6796875,
|
5726 |
+
"learning_rate": 2.4794250353428707e-06,
|
5727 |
+
"loss": 1.6145,
|
5728 |
+
"step": 817
|
5729 |
+
},
|
5730 |
+
{
|
5731 |
+
"epoch": 1.566299664911441,
|
5732 |
+
"grad_norm": 1.7890625,
|
5733 |
+
"learning_rate": 2.458568653332557e-06,
|
5734 |
+
"loss": 1.7153,
|
5735 |
+
"step": 818
|
5736 |
+
},
|
5737 |
+
{
|
5738 |
+
"epoch": 1.5682144566778362,
|
5739 |
+
"grad_norm": 1.6953125,
|
5740 |
+
"learning_rate": 2.437788060266002e-06,
|
5741 |
+
"loss": 1.6331,
|
5742 |
+
"step": 819
|
5743 |
+
},
|
5744 |
+
{
|
5745 |
+
"epoch": 1.5701292484442317,
|
5746 |
+
"grad_norm": 1.671875,
|
5747 |
+
"learning_rate": 2.4170834649814366e-06,
|
5748 |
+
"loss": 1.6747,
|
5749 |
+
"step": 820
|
5750 |
+
},
|
5751 |
+
{
|
5752 |
+
"epoch": 1.572044040210627,
|
5753 |
+
"grad_norm": 1.671875,
|
5754 |
+
"learning_rate": 2.3964550755533468e-06,
|
5755 |
+
"loss": 1.6055,
|
5756 |
+
"step": 821
|
5757 |
+
},
|
5758 |
+
{
|
5759 |
+
"epoch": 1.5739588319770226,
|
5760 |
+
"grad_norm": 1.6796875,
|
5761 |
+
"learning_rate": 2.375903099290362e-06,
|
5762 |
+
"loss": 1.6992,
|
5763 |
+
"step": 822
|
5764 |
+
},
|
5765 |
+
{
|
5766 |
+
"epoch": 1.5758736237434179,
|
5767 |
+
"grad_norm": 1.65625,
|
5768 |
+
"learning_rate": 2.355427742733197e-06,
|
5769 |
+
"loss": 1.6433,
|
5770 |
+
"step": 823
|
5771 |
+
},
|
5772 |
+
{
|
5773 |
+
"epoch": 1.5777884155098132,
|
5774 |
+
"grad_norm": 1.6875,
|
5775 |
+
"learning_rate": 2.335029211652552e-06,
|
5776 |
+
"loss": 1.7133,
|
5777 |
+
"step": 824
|
5778 |
+
},
|
5779 |
+
{
|
5780 |
+
"epoch": 1.5797032072762087,
|
5781 |
+
"grad_norm": 1.6875,
|
5782 |
+
"learning_rate": 2.314707711047063e-06,
|
5783 |
+
"loss": 1.7327,
|
5784 |
+
"step": 825
|
5785 |
+
},
|
5786 |
+
{
|
5787 |
+
"epoch": 1.5816179990426043,
|
5788 |
+
"grad_norm": 1.6796875,
|
5789 |
+
"learning_rate": 2.294463445141233e-06,
|
5790 |
+
"loss": 1.7631,
|
5791 |
+
"step": 826
|
5792 |
+
},
|
5793 |
+
{
|
5794 |
+
"epoch": 1.5835327908089996,
|
5795 |
+
"grad_norm": 1.6953125,
|
5796 |
+
"learning_rate": 2.2742966173833835e-06,
|
5797 |
+
"loss": 1.7577,
|
5798 |
+
"step": 827
|
5799 |
+
},
|
5800 |
+
{
|
5801 |
+
"epoch": 1.5854475825753949,
|
5802 |
+
"grad_norm": 1.6875,
|
5803 |
+
"learning_rate": 2.254207430443599e-06,
|
5804 |
+
"loss": 1.7896,
|
5805 |
+
"step": 828
|
5806 |
+
},
|
5807 |
+
{
|
5808 |
+
"epoch": 1.5873623743417902,
|
5809 |
+
"grad_norm": 1.7109375,
|
5810 |
+
"learning_rate": 2.2341960862117118e-06,
|
5811 |
+
"loss": 1.6763,
|
5812 |
+
"step": 829
|
5813 |
+
},
|
5814 |
+
{
|
5815 |
+
"epoch": 1.5892771661081857,
|
5816 |
+
"grad_norm": 1.640625,
|
5817 |
+
"learning_rate": 2.214262785795248e-06,
|
5818 |
+
"loss": 1.6878,
|
5819 |
+
"step": 830
|
5820 |
+
},
|
5821 |
+
{
|
5822 |
+
"epoch": 1.5911919578745812,
|
5823 |
+
"grad_norm": 1.640625,
|
5824 |
+
"learning_rate": 2.1944077295174284e-06,
|
5825 |
+
"loss": 1.6532,
|
5826 |
+
"step": 831
|
5827 |
+
},
|
5828 |
+
{
|
5829 |
+
"epoch": 1.5931067496409765,
|
5830 |
+
"grad_norm": 1.765625,
|
5831 |
+
"learning_rate": 2.174631116915137e-06,
|
5832 |
+
"loss": 1.7702,
|
5833 |
+
"step": 832
|
5834 |
+
},
|
5835 |
+
{
|
5836 |
+
"epoch": 1.5950215414073718,
|
5837 |
+
"grad_norm": 1.6796875,
|
5838 |
+
"learning_rate": 2.1549331467369327e-06,
|
5839 |
+
"loss": 1.7115,
|
5840 |
+
"step": 833
|
5841 |
+
},
|
5842 |
+
{
|
5843 |
+
"epoch": 1.5969363331737674,
|
5844 |
+
"grad_norm": 1.6484375,
|
5845 |
+
"learning_rate": 2.1353140169410347e-06,
|
5846 |
+
"loss": 1.6486,
|
5847 |
+
"step": 834
|
5848 |
+
},
|
5849 |
+
{
|
5850 |
+
"epoch": 1.598851124940163,
|
5851 |
+
"grad_norm": 1.6484375,
|
5852 |
+
"learning_rate": 2.1157739246933507e-06,
|
5853 |
+
"loss": 1.7097,
|
5854 |
+
"step": 835
|
5855 |
+
},
|
5856 |
+
{
|
5857 |
+
"epoch": 1.6007659167065582,
|
5858 |
+
"grad_norm": 1.6875,
|
5859 |
+
"learning_rate": 2.0963130663654785e-06,
|
5860 |
+
"loss": 1.7174,
|
5861 |
+
"step": 836
|
5862 |
+
},
|
5863 |
+
{
|
5864 |
+
"epoch": 1.6026807084729535,
|
5865 |
+
"grad_norm": 1.671875,
|
5866 |
+
"learning_rate": 2.0769316375327497e-06,
|
5867 |
+
"loss": 1.6954,
|
5868 |
+
"step": 837
|
5869 |
+
},
|
5870 |
+
{
|
5871 |
+
"epoch": 1.6045955002393488,
|
5872 |
+
"grad_norm": 1.671875,
|
5873 |
+
"learning_rate": 2.0576298329722445e-06,
|
5874 |
+
"loss": 1.6773,
|
5875 |
+
"step": 838
|
5876 |
+
},
|
5877 |
+
{
|
5878 |
+
"epoch": 1.6065102920057444,
|
5879 |
+
"grad_norm": 1.65625,
|
5880 |
+
"learning_rate": 2.038407846660855e-06,
|
5881 |
+
"loss": 1.6202,
|
5882 |
+
"step": 839
|
5883 |
+
},
|
5884 |
+
{
|
5885 |
+
"epoch": 1.6084250837721399,
|
5886 |
+
"grad_norm": 1.71875,
|
5887 |
+
"learning_rate": 2.019265871773316e-06,
|
5888 |
+
"loss": 1.7177,
|
5889 |
+
"step": 840
|
5890 |
+
},
|
5891 |
+
{
|
5892 |
+
"epoch": 1.6103398755385352,
|
5893 |
+
"grad_norm": 1.6875,
|
5894 |
+
"learning_rate": 2.0002041006802843e-06,
|
5895 |
+
"loss": 1.7824,
|
5896 |
+
"step": 841
|
5897 |
+
},
|
5898 |
+
{
|
5899 |
+
"epoch": 1.6122546673049305,
|
5900 |
+
"grad_norm": 1.6953125,
|
5901 |
+
"learning_rate": 1.981222724946383e-06,
|
5902 |
+
"loss": 1.7061,
|
5903 |
+
"step": 842
|
5904 |
+
},
|
5905 |
+
{
|
5906 |
+
"epoch": 1.614169459071326,
|
5907 |
+
"grad_norm": 1.6875,
|
5908 |
+
"learning_rate": 1.9623219353283005e-06,
|
5909 |
+
"loss": 1.7551,
|
5910 |
+
"step": 843
|
5911 |
+
},
|
5912 |
+
{
|
5913 |
+
"epoch": 1.6160842508377216,
|
5914 |
+
"grad_norm": 1.6953125,
|
5915 |
+
"learning_rate": 1.943501921772848e-06,
|
5916 |
+
"loss": 1.7165,
|
5917 |
+
"step": 844
|
5918 |
+
},
|
5919 |
+
{
|
5920 |
+
"epoch": 1.6179990426041169,
|
5921 |
+
"grad_norm": 1.6796875,
|
5922 |
+
"learning_rate": 1.9247628734150725e-06,
|
5923 |
+
"loss": 1.6687,
|
5924 |
+
"step": 845
|
5925 |
+
},
|
5926 |
+
{
|
5927 |
+
"epoch": 1.6199138343705122,
|
5928 |
+
"grad_norm": 1.6640625,
|
5929 |
+
"learning_rate": 1.9061049785763419e-06,
|
5930 |
+
"loss": 1.6888,
|
5931 |
+
"step": 846
|
5932 |
+
},
|
5933 |
+
{
|
5934 |
+
"epoch": 1.6218286261369075,
|
5935 |
+
"grad_norm": 1.6484375,
|
5936 |
+
"learning_rate": 1.8875284247624625e-06,
|
5937 |
+
"loss": 1.6674,
|
5938 |
+
"step": 847
|
5939 |
+
},
|
5940 |
+
{
|
5941 |
+
"epoch": 1.623743417903303,
|
5942 |
+
"grad_norm": 1.734375,
|
5943 |
+
"learning_rate": 1.8690333986617827e-06,
|
5944 |
+
"loss": 1.7384,
|
5945 |
+
"step": 848
|
5946 |
+
},
|
5947 |
+
{
|
5948 |
+
"epoch": 1.6256582096696985,
|
5949 |
+
"grad_norm": 1.65625,
|
5950 |
+
"learning_rate": 1.8506200861433287e-06,
|
5951 |
+
"loss": 1.6367,
|
5952 |
+
"step": 849
|
5953 |
+
},
|
5954 |
+
{
|
5955 |
+
"epoch": 1.6275730014360938,
|
5956 |
+
"grad_norm": 1.6640625,
|
5957 |
+
"learning_rate": 1.832288672254936e-06,
|
5958 |
+
"loss": 1.6592,
|
5959 |
+
"step": 850
|
5960 |
+
},
|
5961 |
+
{
|
5962 |
+
"epoch": 1.6294877932024892,
|
5963 |
+
"grad_norm": 1.6953125,
|
5964 |
+
"learning_rate": 1.8140393412213719e-06,
|
5965 |
+
"loss": 1.7263,
|
5966 |
+
"step": 851
|
5967 |
+
},
|
5968 |
+
{
|
5969 |
+
"epoch": 1.6314025849688847,
|
5970 |
+
"grad_norm": 1.671875,
|
5971 |
+
"learning_rate": 1.7958722764425119e-06,
|
5972 |
+
"loss": 1.6543,
|
5973 |
+
"step": 852
|
5974 |
+
},
|
5975 |
+
{
|
5976 |
+
"epoch": 1.63331737673528,
|
5977 |
+
"grad_norm": 1.671875,
|
5978 |
+
"learning_rate": 1.7777876604914712e-06,
|
5979 |
+
"loss": 1.7082,
|
5980 |
+
"step": 853
|
5981 |
+
},
|
5982 |
+
{
|
5983 |
+
"epoch": 1.6352321685016755,
|
5984 |
+
"grad_norm": 1.6953125,
|
5985 |
+
"learning_rate": 1.7597856751127919e-06,
|
5986 |
+
"loss": 1.7153,
|
5987 |
+
"step": 854
|
5988 |
+
},
|
5989 |
+
{
|
5990 |
+
"epoch": 1.6371469602680708,
|
5991 |
+
"grad_norm": 1.6796875,
|
5992 |
+
"learning_rate": 1.7418665012205927e-06,
|
5993 |
+
"loss": 1.65,
|
5994 |
+
"step": 855
|
5995 |
+
},
|
5996 |
+
{
|
5997 |
+
"epoch": 1.6390617520344661,
|
5998 |
+
"grad_norm": 1.6875,
|
5999 |
+
"learning_rate": 1.7240303188967767e-06,
|
6000 |
+
"loss": 1.6985,
|
6001 |
+
"step": 856
|
6002 |
+
},
|
6003 |
+
{
|
6004 |
+
"epoch": 1.6409765438008617,
|
6005 |
+
"grad_norm": 1.703125,
|
6006 |
+
"learning_rate": 1.7062773073891958e-06,
|
6007 |
+
"loss": 1.6766,
|
6008 |
+
"step": 857
|
6009 |
+
},
|
6010 |
+
{
|
6011 |
+
"epoch": 1.6428913355672572,
|
6012 |
+
"grad_norm": 1.6875,
|
6013 |
+
"learning_rate": 1.6886076451098766e-06,
|
6014 |
+
"loss": 1.6786,
|
6015 |
+
"step": 858
|
6016 |
+
},
|
6017 |
+
{
|
6018 |
+
"epoch": 1.6448061273336525,
|
6019 |
+
"grad_norm": 1.6953125,
|
6020 |
+
"learning_rate": 1.6710215096331971e-06,
|
6021 |
+
"loss": 1.7329,
|
6022 |
+
"step": 859
|
6023 |
+
},
|
6024 |
+
{
|
6025 |
+
"epoch": 1.6467209191000478,
|
6026 |
+
"grad_norm": 1.6953125,
|
6027 |
+
"learning_rate": 1.6535190776941323e-06,
|
6028 |
+
"loss": 1.7428,
|
6029 |
+
"step": 860
|
6030 |
+
},
|
6031 |
+
{
|
6032 |
+
"epoch": 1.6486357108664431,
|
6033 |
+
"grad_norm": 1.671875,
|
6034 |
+
"learning_rate": 1.6361005251864525e-06,
|
6035 |
+
"loss": 1.6936,
|
6036 |
+
"step": 861
|
6037 |
+
},
|
6038 |
+
{
|
6039 |
+
"epoch": 1.6505505026328386,
|
6040 |
+
"grad_norm": 1.6875,
|
6041 |
+
"learning_rate": 1.6187660271609773e-06,
|
6042 |
+
"loss": 1.7386,
|
6043 |
+
"step": 862
|
6044 |
+
},
|
6045 |
+
{
|
6046 |
+
"epoch": 1.6524652943992342,
|
6047 |
+
"grad_norm": 1.7734375,
|
6048 |
+
"learning_rate": 1.6015157578237939e-06,
|
6049 |
+
"loss": 1.7213,
|
6050 |
+
"step": 863
|
6051 |
+
},
|
6052 |
+
{
|
6053 |
+
"epoch": 1.6543800861656295,
|
6054 |
+
"grad_norm": 1.75,
|
6055 |
+
"learning_rate": 1.584349890534531e-06,
|
6056 |
+
"loss": 1.6877,
|
6057 |
+
"step": 864
|
6058 |
+
},
|
6059 |
+
{
|
6060 |
+
"epoch": 1.6562948779320248,
|
6061 |
+
"grad_norm": 1.6953125,
|
6062 |
+
"learning_rate": 1.5672685978045931e-06,
|
6063 |
+
"loss": 1.7153,
|
6064 |
+
"step": 865
|
6065 |
+
},
|
6066 |
+
{
|
6067 |
+
"epoch": 1.6582096696984203,
|
6068 |
+
"grad_norm": 1.734375,
|
6069 |
+
"learning_rate": 1.5502720512954472e-06,
|
6070 |
+
"loss": 1.7155,
|
6071 |
+
"step": 866
|
6072 |
+
},
|
6073 |
+
{
|
6074 |
+
"epoch": 1.6601244614648158,
|
6075 |
+
"grad_norm": 1.671875,
|
6076 |
+
"learning_rate": 1.5333604218168785e-06,
|
6077 |
+
"loss": 1.7235,
|
6078 |
+
"step": 867
|
6079 |
+
},
|
6080 |
+
{
|
6081 |
+
"epoch": 1.6620392532312112,
|
6082 |
+
"grad_norm": 1.6484375,
|
6083 |
+
"learning_rate": 1.5165338793252937e-06,
|
6084 |
+
"loss": 1.6423,
|
6085 |
+
"step": 868
|
6086 |
+
},
|
6087 |
+
{
|
6088 |
+
"epoch": 1.6639540449976065,
|
6089 |
+
"grad_norm": 1.765625,
|
6090 |
+
"learning_rate": 1.4997925929219937e-06,
|
6091 |
+
"loss": 1.7088,
|
6092 |
+
"step": 869
|
6093 |
+
},
|
6094 |
+
{
|
6095 |
+
"epoch": 1.6658688367640018,
|
6096 |
+
"grad_norm": 1.65625,
|
6097 |
+
"learning_rate": 1.483136730851492e-06,
|
6098 |
+
"loss": 1.6486,
|
6099 |
+
"step": 870
|
6100 |
+
},
|
6101 |
+
{
|
6102 |
+
"epoch": 1.6677836285303973,
|
6103 |
+
"grad_norm": 1.6796875,
|
6104 |
+
"learning_rate": 1.4665664604998053e-06,
|
6105 |
+
"loss": 1.6938,
|
6106 |
+
"step": 871
|
6107 |
+
},
|
6108 |
+
{
|
6109 |
+
"epoch": 1.6696984202967928,
|
6110 |
+
"grad_norm": 1.671875,
|
6111 |
+
"learning_rate": 1.4500819483927898e-06,
|
6112 |
+
"loss": 1.6819,
|
6113 |
+
"step": 872
|
6114 |
+
},
|
6115 |
+
{
|
6116 |
+
"epoch": 1.6716132120631881,
|
6117 |
+
"grad_norm": 1.6953125,
|
6118 |
+
"learning_rate": 1.4336833601944577e-06,
|
6119 |
+
"loss": 1.6385,
|
6120 |
+
"step": 873
|
6121 |
+
},
|
6122 |
+
{
|
6123 |
+
"epoch": 1.6735280038295834,
|
6124 |
+
"grad_norm": 1.6875,
|
6125 |
+
"learning_rate": 1.4173708607053071e-06,
|
6126 |
+
"loss": 1.6798,
|
6127 |
+
"step": 874
|
6128 |
+
},
|
6129 |
+
{
|
6130 |
+
"epoch": 1.675442795595979,
|
6131 |
+
"grad_norm": 1.6796875,
|
6132 |
+
"learning_rate": 1.4011446138606822e-06,
|
6133 |
+
"loss": 1.7299,
|
6134 |
+
"step": 875
|
6135 |
+
},
|
6136 |
+
{
|
6137 |
+
"epoch": 1.6773575873623745,
|
6138 |
+
"grad_norm": 1.71875,
|
6139 |
+
"learning_rate": 1.3850047827291057e-06,
|
6140 |
+
"loss": 1.715,
|
6141 |
+
"step": 876
|
6142 |
+
},
|
6143 |
+
{
|
6144 |
+
"epoch": 1.6792723791287698,
|
6145 |
+
"grad_norm": 1.6953125,
|
6146 |
+
"learning_rate": 1.3689515295106626e-06,
|
6147 |
+
"loss": 1.6405,
|
6148 |
+
"step": 877
|
6149 |
+
},
|
6150 |
+
{
|
6151 |
+
"epoch": 1.6811871708951651,
|
6152 |
+
"grad_norm": 1.65625,
|
6153 |
+
"learning_rate": 1.352985015535344e-06,
|
6154 |
+
"loss": 1.7398,
|
6155 |
+
"step": 878
|
6156 |
+
},
|
6157 |
+
{
|
6158 |
+
"epoch": 1.6831019626615604,
|
6159 |
+
"grad_norm": 1.671875,
|
6160 |
+
"learning_rate": 1.3371054012614527e-06,
|
6161 |
+
"loss": 1.6731,
|
6162 |
+
"step": 879
|
6163 |
+
},
|
6164 |
+
{
|
6165 |
+
"epoch": 1.685016754427956,
|
6166 |
+
"grad_norm": 1.6953125,
|
6167 |
+
"learning_rate": 1.3213128462739656e-06,
|
6168 |
+
"loss": 1.6651,
|
6169 |
+
"step": 880
|
6170 |
+
},
|
6171 |
+
{
|
6172 |
+
"epoch": 1.6869315461943515,
|
6173 |
+
"grad_norm": 1.6640625,
|
6174 |
+
"learning_rate": 1.3056075092829546e-06,
|
6175 |
+
"loss": 1.6424,
|
6176 |
+
"step": 881
|
6177 |
+
},
|
6178 |
+
{
|
6179 |
+
"epoch": 1.6888463379607468,
|
6180 |
+
"grad_norm": 1.6796875,
|
6181 |
+
"learning_rate": 1.2899895481219672e-06,
|
6182 |
+
"loss": 1.6476,
|
6183 |
+
"step": 882
|
6184 |
+
},
|
6185 |
+
{
|
6186 |
+
"epoch": 1.690761129727142,
|
6187 |
+
"grad_norm": 1.7109375,
|
6188 |
+
"learning_rate": 1.2744591197464618e-06,
|
6189 |
+
"loss": 1.7422,
|
6190 |
+
"step": 883
|
6191 |
+
},
|
6192 |
+
{
|
6193 |
+
"epoch": 1.6926759214935376,
|
6194 |
+
"grad_norm": 1.6640625,
|
6195 |
+
"learning_rate": 1.2590163802322108e-06,
|
6196 |
+
"loss": 1.6761,
|
6197 |
+
"step": 884
|
6198 |
+
},
|
6199 |
+
{
|
6200 |
+
"epoch": 1.694590713259933,
|
6201 |
+
"grad_norm": 1.6953125,
|
6202 |
+
"learning_rate": 1.2436614847737526e-06,
|
6203 |
+
"loss": 1.7296,
|
6204 |
+
"step": 885
|
6205 |
+
},
|
6206 |
+
{
|
6207 |
+
"epoch": 1.6965055050263285,
|
6208 |
+
"grad_norm": 1.6640625,
|
6209 |
+
"learning_rate": 1.2283945876828107e-06,
|
6210 |
+
"loss": 1.671,
|
6211 |
+
"step": 886
|
6212 |
+
},
|
6213 |
+
{
|
6214 |
+
"epoch": 1.6984202967927238,
|
6215 |
+
"grad_norm": 1.6875,
|
6216 |
+
"learning_rate": 1.2132158423867645e-06,
|
6217 |
+
"loss": 1.7288,
|
6218 |
+
"step": 887
|
6219 |
+
},
|
6220 |
+
{
|
6221 |
+
"epoch": 1.700335088559119,
|
6222 |
+
"grad_norm": 1.734375,
|
6223 |
+
"learning_rate": 1.198125401427085e-06,
|
6224 |
+
"loss": 1.7769,
|
6225 |
+
"step": 888
|
6226 |
+
},
|
6227 |
+
{
|
6228 |
+
"epoch": 1.7022498803255146,
|
6229 |
+
"grad_norm": 1.6953125,
|
6230 |
+
"learning_rate": 1.1831234164578242e-06,
|
6231 |
+
"loss": 1.7237,
|
6232 |
+
"step": 889
|
6233 |
+
},
|
6234 |
+
{
|
6235 |
+
"epoch": 1.7041646720919101,
|
6236 |
+
"grad_norm": 1.7421875,
|
6237 |
+
"learning_rate": 1.1682100382440686e-06,
|
6238 |
+
"loss": 1.7282,
|
6239 |
+
"step": 890
|
6240 |
+
},
|
6241 |
+
{
|
6242 |
+
"epoch": 1.7060794638583054,
|
6243 |
+
"grad_norm": 1.7578125,
|
6244 |
+
"learning_rate": 1.1533854166604486e-06,
|
6245 |
+
"loss": 1.7385,
|
6246 |
+
"step": 891
|
6247 |
+
},
|
6248 |
+
{
|
6249 |
+
"epoch": 1.7079942556247008,
|
6250 |
+
"grad_norm": 1.6875,
|
6251 |
+
"learning_rate": 1.1386497006896058e-06,
|
6252 |
+
"loss": 1.6813,
|
6253 |
+
"step": 892
|
6254 |
+
},
|
6255 |
+
{
|
6256 |
+
"epoch": 1.709909047391096,
|
6257 |
+
"grad_norm": 1.6953125,
|
6258 |
+
"learning_rate": 1.1240030384207202e-06,
|
6259 |
+
"loss": 1.7269,
|
6260 |
+
"step": 893
|
6261 |
+
},
|
6262 |
+
{
|
6263 |
+
"epoch": 1.7118238391574916,
|
6264 |
+
"grad_norm": 1.6796875,
|
6265 |
+
"learning_rate": 1.1094455770480017e-06,
|
6266 |
+
"loss": 1.6812,
|
6267 |
+
"step": 894
|
6268 |
+
},
|
6269 |
+
{
|
6270 |
+
"epoch": 1.7137386309238871,
|
6271 |
+
"grad_norm": 1.671875,
|
6272 |
+
"learning_rate": 1.0949774628692278e-06,
|
6273 |
+
"loss": 1.6795,
|
6274 |
+
"step": 895
|
6275 |
+
},
|
6276 |
+
{
|
6277 |
+
"epoch": 1.7156534226902824,
|
6278 |
+
"grad_norm": 1.6484375,
|
6279 |
+
"learning_rate": 1.0805988412842638e-06,
|
6280 |
+
"loss": 1.7112,
|
6281 |
+
"step": 896
|
6282 |
+
},
|
6283 |
+
{
|
6284 |
+
"epoch": 1.7175682144566777,
|
6285 |
+
"grad_norm": 1.6796875,
|
6286 |
+
"learning_rate": 1.0663098567935981e-06,
|
6287 |
+
"loss": 1.7174,
|
6288 |
+
"step": 897
|
6289 |
+
},
|
6290 |
+
{
|
6291 |
+
"epoch": 1.7194830062230733,
|
6292 |
+
"grad_norm": 1.6796875,
|
6293 |
+
"learning_rate": 1.0521106529969016e-06,
|
6294 |
+
"loss": 1.6405,
|
6295 |
+
"step": 898
|
6296 |
+
},
|
6297 |
+
{
|
6298 |
+
"epoch": 1.7213977979894688,
|
6299 |
+
"grad_norm": 1.6953125,
|
6300 |
+
"learning_rate": 1.0380013725915783e-06,
|
6301 |
+
"loss": 1.7008,
|
6302 |
+
"step": 899
|
6303 |
+
},
|
6304 |
+
{
|
6305 |
+
"epoch": 1.723312589755864,
|
6306 |
+
"grad_norm": 1.78125,
|
6307 |
+
"learning_rate": 1.0239821573713228e-06,
|
6308 |
+
"loss": 1.6936,
|
6309 |
+
"step": 900
|
6310 |
+
},
|
6311 |
+
{
|
6312 |
+
"epoch": 1.7252273815222594,
|
6313 |
+
"grad_norm": 1.75,
|
6314 |
+
"learning_rate": 1.0100531482247155e-06,
|
6315 |
+
"loss": 1.7914,
|
6316 |
+
"step": 901
|
6317 |
+
},
|
6318 |
+
{
|
6319 |
+
"epoch": 1.7271421732886547,
|
6320 |
+
"grad_norm": 1.640625,
|
6321 |
+
"learning_rate": 9.962144851337863e-07,
|
6322 |
+
"loss": 1.654,
|
6323 |
+
"step": 902
|
6324 |
+
},
|
6325 |
+
{
|
6326 |
+
"epoch": 1.7290569650550502,
|
6327 |
+
"grad_norm": 1.7578125,
|
6328 |
+
"learning_rate": 9.824663071726204e-07,
|
6329 |
+
"loss": 1.7272,
|
6330 |
+
"step": 903
|
6331 |
+
},
|
6332 |
+
{
|
6333 |
+
"epoch": 1.7309717568214458,
|
6334 |
+
"grad_norm": 1.671875,
|
6335 |
+
"learning_rate": 9.688087525059575e-07,
|
6336 |
+
"loss": 1.7288,
|
6337 |
+
"step": 904
|
6338 |
+
},
|
6339 |
+
{
|
6340 |
+
"epoch": 1.732886548587841,
|
6341 |
+
"grad_norm": 1.6484375,
|
6342 |
+
"learning_rate": 9.55241958387796e-07,
|
6343 |
+
"loss": 1.6932,
|
6344 |
+
"step": 905
|
6345 |
+
},
|
6346 |
+
{
|
6347 |
+
"epoch": 1.7348013403542364,
|
6348 |
+
"grad_norm": 1.6640625,
|
6349 |
+
"learning_rate": 9.417660611600299e-07,
|
6350 |
+
"loss": 1.6952,
|
6351 |
+
"step": 906
|
6352 |
+
},
|
6353 |
+
{
|
6354 |
+
"epoch": 1.736716132120632,
|
6355 |
+
"grad_norm": 1.6796875,
|
6356 |
+
"learning_rate": 9.283811962510603e-07,
|
6357 |
+
"loss": 1.7722,
|
6358 |
+
"step": 907
|
6359 |
+
},
|
6360 |
+
{
|
6361 |
+
"epoch": 1.7386309238870274,
|
6362 |
+
"grad_norm": 1.6953125,
|
6363 |
+
"learning_rate": 9.150874981744507e-07,
|
6364 |
+
"loss": 1.6527,
|
6365 |
+
"step": 908
|
6366 |
+
},
|
6367 |
+
{
|
6368 |
+
"epoch": 1.7405457156534228,
|
6369 |
+
"grad_norm": 1.6875,
|
6370 |
+
"learning_rate": 9.018851005275586e-07,
|
6371 |
+
"loss": 1.7071,
|
6372 |
+
"step": 909
|
6373 |
+
},
|
6374 |
+
{
|
6375 |
+
"epoch": 1.742460507419818,
|
6376 |
+
"grad_norm": 1.7109375,
|
6377 |
+
"learning_rate": 8.887741359902113e-07,
|
6378 |
+
"loss": 1.7559,
|
6379 |
+
"step": 910
|
6380 |
+
},
|
6381 |
+
{
|
6382 |
+
"epoch": 1.7443752991862134,
|
6383 |
+
"grad_norm": 1.703125,
|
6384 |
+
"learning_rate": 8.757547363233543e-07,
|
6385 |
+
"loss": 1.6998,
|
6386 |
+
"step": 911
|
6387 |
+
},
|
6388 |
+
{
|
6389 |
+
"epoch": 1.746290090952609,
|
6390 |
+
"grad_norm": 1.65625,
|
6391 |
+
"learning_rate": 8.628270323677424e-07,
|
6392 |
+
"loss": 1.593,
|
6393 |
+
"step": 912
|
6394 |
+
},
|
6395 |
+
{
|
6396 |
+
"epoch": 1.7482048827190044,
|
6397 |
+
"grad_norm": 1.765625,
|
6398 |
+
"learning_rate": 8.499911540426131e-07,
|
6399 |
+
"loss": 1.7532,
|
6400 |
+
"step": 913
|
6401 |
+
},
|
6402 |
+
{
|
6403 |
+
"epoch": 1.7501196744853997,
|
6404 |
+
"grad_norm": 1.6796875,
|
6405 |
+
"learning_rate": 8.372472303443924e-07,
|
6406 |
+
"loss": 1.696,
|
6407 |
+
"step": 914
|
6408 |
+
},
|
6409 |
+
{
|
6410 |
+
"epoch": 1.752034466251795,
|
6411 |
+
"grad_norm": 1.703125,
|
6412 |
+
"learning_rate": 8.245953893453829e-07,
|
6413 |
+
"loss": 1.7278,
|
6414 |
+
"step": 915
|
6415 |
+
},
|
6416 |
+
{
|
6417 |
+
"epoch": 1.7539492580181906,
|
6418 |
+
"grad_norm": 1.6953125,
|
6419 |
+
"learning_rate": 8.120357581924931e-07,
|
6420 |
+
"loss": 1.7215,
|
6421 |
+
"step": 916
|
6422 |
+
},
|
6423 |
+
{
|
6424 |
+
"epoch": 1.7558640497845859,
|
6425 |
+
"grad_norm": 1.6640625,
|
6426 |
+
"learning_rate": 7.99568463105953e-07,
|
6427 |
+
"loss": 1.6702,
|
6428 |
+
"step": 917
|
6429 |
+
},
|
6430 |
+
{
|
6431 |
+
"epoch": 1.7577788415509814,
|
6432 |
+
"grad_norm": 1.65625,
|
6433 |
+
"learning_rate": 7.87193629378038e-07,
|
6434 |
+
"loss": 1.6721,
|
6435 |
+
"step": 918
|
6436 |
+
},
|
6437 |
+
{
|
6438 |
+
"epoch": 1.7596936333173767,
|
6439 |
+
"grad_norm": 1.6796875,
|
6440 |
+
"learning_rate": 7.749113813718234e-07,
|
6441 |
+
"loss": 1.7008,
|
6442 |
+
"step": 919
|
6443 |
+
},
|
6444 |
+
{
|
6445 |
+
"epoch": 1.761608425083772,
|
6446 |
+
"grad_norm": 1.6328125,
|
6447 |
+
"learning_rate": 7.627218425199278e-07,
|
6448 |
+
"loss": 1.6697,
|
6449 |
+
"step": 920
|
6450 |
+
},
|
6451 |
+
{
|
6452 |
+
"epoch": 1.7635232168501676,
|
6453 |
+
"grad_norm": 1.734375,
|
6454 |
+
"learning_rate": 7.506251353232663e-07,
|
6455 |
+
"loss": 1.7305,
|
6456 |
+
"step": 921
|
6457 |
+
},
|
6458 |
+
{
|
6459 |
+
"epoch": 1.765438008616563,
|
6460 |
+
"grad_norm": 1.75,
|
6461 |
+
"learning_rate": 7.386213813498344e-07,
|
6462 |
+
"loss": 1.7425,
|
6463 |
+
"step": 922
|
6464 |
+
},
|
6465 |
+
{
|
6466 |
+
"epoch": 1.7673528003829584,
|
6467 |
+
"grad_norm": 1.7421875,
|
6468 |
+
"learning_rate": 7.267107012334707e-07,
|
6469 |
+
"loss": 1.7188,
|
6470 |
+
"step": 923
|
6471 |
+
},
|
6472 |
+
{
|
6473 |
+
"epoch": 1.7692675921493537,
|
6474 |
+
"grad_norm": 1.7109375,
|
6475 |
+
"learning_rate": 7.148932146726572e-07,
|
6476 |
+
"loss": 1.668,
|
6477 |
+
"step": 924
|
6478 |
+
},
|
6479 |
+
{
|
6480 |
+
"epoch": 1.771182383915749,
|
6481 |
+
"grad_norm": 1.6953125,
|
6482 |
+
"learning_rate": 7.031690404293046e-07,
|
6483 |
+
"loss": 1.6772,
|
6484 |
+
"step": 925
|
6485 |
+
},
|
6486 |
+
{
|
6487 |
+
"epoch": 1.7730971756821445,
|
6488 |
+
"grad_norm": 1.6953125,
|
6489 |
+
"learning_rate": 6.915382963275741e-07,
|
6490 |
+
"loss": 1.704,
|
6491 |
+
"step": 926
|
6492 |
+
},
|
6493 |
+
{
|
6494 |
+
"epoch": 1.77501196744854,
|
6495 |
+
"grad_norm": 1.65625,
|
6496 |
+
"learning_rate": 6.800010992526729e-07,
|
6497 |
+
"loss": 1.6764,
|
6498 |
+
"step": 927
|
6499 |
+
},
|
6500 |
+
{
|
6501 |
+
"epoch": 1.7769267592149354,
|
6502 |
+
"grad_norm": 1.6796875,
|
6503 |
+
"learning_rate": 6.685575651497022e-07,
|
6504 |
+
"loss": 1.7017,
|
6505 |
+
"step": 928
|
6506 |
+
},
|
6507 |
+
{
|
6508 |
+
"epoch": 1.7788415509813307,
|
6509 |
+
"grad_norm": 1.671875,
|
6510 |
+
"learning_rate": 6.572078090224721e-07,
|
6511 |
+
"loss": 1.7396,
|
6512 |
+
"step": 929
|
6513 |
+
},
|
6514 |
+
{
|
6515 |
+
"epoch": 1.7807563427477262,
|
6516 |
+
"grad_norm": 1.671875,
|
6517 |
+
"learning_rate": 6.459519449323592e-07,
|
6518 |
+
"loss": 1.7057,
|
6519 |
+
"step": 930
|
6520 |
+
},
|
6521 |
+
{
|
6522 |
+
"epoch": 1.7826711345141217,
|
6523 |
+
"grad_norm": 1.703125,
|
6524 |
+
"learning_rate": 6.347900859971534e-07,
|
6525 |
+
"loss": 1.6723,
|
6526 |
+
"step": 931
|
6527 |
+
},
|
6528 |
+
{
|
6529 |
+
"epoch": 1.784585926280517,
|
6530 |
+
"grad_norm": 1.7109375,
|
6531 |
+
"learning_rate": 6.237223443899221e-07,
|
6532 |
+
"loss": 1.7167,
|
6533 |
+
"step": 932
|
6534 |
+
},
|
6535 |
+
{
|
6536 |
+
"epoch": 1.7865007180469124,
|
6537 |
+
"grad_norm": 1.7734375,
|
6538 |
+
"learning_rate": 6.127488313378894e-07,
|
6539 |
+
"loss": 1.7802,
|
6540 |
+
"step": 933
|
6541 |
+
},
|
6542 |
+
{
|
6543 |
+
"epoch": 1.7884155098133077,
|
6544 |
+
"grad_norm": 1.6953125,
|
6545 |
+
"learning_rate": 6.018696571213045e-07,
|
6546 |
+
"loss": 1.6742,
|
6547 |
+
"step": 934
|
6548 |
+
},
|
6549 |
+
{
|
6550 |
+
"epoch": 1.7903303015797032,
|
6551 |
+
"grad_norm": 1.6875,
|
6552 |
+
"learning_rate": 5.910849310723499e-07,
|
6553 |
+
"loss": 1.7288,
|
6554 |
+
"step": 935
|
6555 |
+
},
|
6556 |
+
{
|
6557 |
+
"epoch": 1.7922450933460987,
|
6558 |
+
"grad_norm": 1.65625,
|
6559 |
+
"learning_rate": 5.803947615740291e-07,
|
6560 |
+
"loss": 1.6607,
|
6561 |
+
"step": 936
|
6562 |
+
},
|
6563 |
+
{
|
6564 |
+
"epoch": 1.794159885112494,
|
6565 |
+
"grad_norm": 1.6796875,
|
6566 |
+
"learning_rate": 5.697992560590882e-07,
|
6567 |
+
"loss": 1.7431,
|
6568 |
+
"step": 937
|
6569 |
+
},
|
6570 |
+
{
|
6571 |
+
"epoch": 1.7960746768788893,
|
6572 |
+
"grad_norm": 1.6796875,
|
6573 |
+
"learning_rate": 5.59298521008923e-07,
|
6574 |
+
"loss": 1.6916,
|
6575 |
+
"step": 938
|
6576 |
+
},
|
6577 |
+
{
|
6578 |
+
"epoch": 1.7979894686452849,
|
6579 |
+
"grad_norm": 1.6875,
|
6580 |
+
"learning_rate": 5.488926619525248e-07,
|
6581 |
+
"loss": 1.6678,
|
6582 |
+
"step": 939
|
6583 |
+
},
|
6584 |
+
{
|
6585 |
+
"epoch": 1.7999042604116804,
|
6586 |
+
"grad_norm": 1.703125,
|
6587 |
+
"learning_rate": 5.385817834654095e-07,
|
6588 |
+
"loss": 1.673,
|
6589 |
+
"step": 940
|
6590 |
+
},
|
6591 |
+
{
|
6592 |
+
"epoch": 1.8018190521780757,
|
6593 |
+
"grad_norm": 1.6796875,
|
6594 |
+
"learning_rate": 5.283659891685656e-07,
|
6595 |
+
"loss": 1.7494,
|
6596 |
+
"step": 941
|
6597 |
+
},
|
6598 |
+
{
|
6599 |
+
"epoch": 1.803733843944471,
|
6600 |
+
"grad_norm": 1.671875,
|
6601 |
+
"learning_rate": 5.18245381727418e-07,
|
6602 |
+
"loss": 1.667,
|
6603 |
+
"step": 942
|
6604 |
+
},
|
6605 |
+
{
|
6606 |
+
"epoch": 1.8056486357108663,
|
6607 |
+
"grad_norm": 1.6796875,
|
6608 |
+
"learning_rate": 5.082200628507994e-07,
|
6609 |
+
"loss": 1.7009,
|
6610 |
+
"step": 943
|
6611 |
+
},
|
6612 |
+
{
|
6613 |
+
"epoch": 1.8075634274772618,
|
6614 |
+
"grad_norm": 1.6640625,
|
6615 |
+
"learning_rate": 4.982901332899159e-07,
|
6616 |
+
"loss": 1.681,
|
6617 |
+
"step": 944
|
6618 |
+
},
|
6619 |
+
{
|
6620 |
+
"epoch": 1.8094782192436574,
|
6621 |
+
"grad_norm": 1.671875,
|
6622 |
+
"learning_rate": 4.884556928373462e-07,
|
6623 |
+
"loss": 1.6666,
|
6624 |
+
"step": 945
|
6625 |
+
},
|
6626 |
+
{
|
6627 |
+
"epoch": 1.8113930110100527,
|
6628 |
+
"grad_norm": 1.6953125,
|
6629 |
+
"learning_rate": 4.787168403260323e-07,
|
6630 |
+
"loss": 1.6944,
|
6631 |
+
"step": 946
|
6632 |
+
},
|
6633 |
+
{
|
6634 |
+
"epoch": 1.813307802776448,
|
6635 |
+
"grad_norm": 1.6953125,
|
6636 |
+
"learning_rate": 4.690736736282908e-07,
|
6637 |
+
"loss": 1.6373,
|
6638 |
+
"step": 947
|
6639 |
+
},
|
6640 |
+
{
|
6641 |
+
"epoch": 1.8152225945428435,
|
6642 |
+
"grad_norm": 1.6484375,
|
6643 |
+
"learning_rate": 4.595262896548236e-07,
|
6644 |
+
"loss": 1.709,
|
6645 |
+
"step": 948
|
6646 |
+
},
|
6647 |
+
{
|
6648 |
+
"epoch": 1.8171373863092388,
|
6649 |
+
"grad_norm": 1.6875,
|
6650 |
+
"learning_rate": 4.500747843537523e-07,
|
6651 |
+
"loss": 1.7181,
|
6652 |
+
"step": 949
|
6653 |
+
},
|
6654 |
+
{
|
6655 |
+
"epoch": 1.8190521780756344,
|
6656 |
+
"grad_norm": 1.71875,
|
6657 |
+
"learning_rate": 4.407192527096404e-07,
|
6658 |
+
"loss": 1.6296,
|
6659 |
+
"step": 950
|
6660 |
+
},
|
6661 |
+
{
|
6662 |
+
"epoch": 1.8209669698420297,
|
6663 |
+
"grad_norm": 1.7265625,
|
6664 |
+
"learning_rate": 4.3145978874255757e-07,
|
6665 |
+
"loss": 1.6935,
|
6666 |
+
"step": 951
|
6667 |
+
},
|
6668 |
+
{
|
6669 |
+
"epoch": 1.822881761608425,
|
6670 |
+
"grad_norm": 1.71875,
|
6671 |
+
"learning_rate": 4.222964855071154e-07,
|
6672 |
+
"loss": 1.6726,
|
6673 |
+
"step": 952
|
6674 |
+
},
|
6675 |
+
{
|
6676 |
+
"epoch": 1.8247965533748205,
|
6677 |
+
"grad_norm": 1.671875,
|
6678 |
+
"learning_rate": 4.1322943509154887e-07,
|
6679 |
+
"loss": 1.6841,
|
6680 |
+
"step": 953
|
6681 |
+
},
|
6682 |
+
{
|
6683 |
+
"epoch": 1.826711345141216,
|
6684 |
+
"grad_norm": 1.703125,
|
6685 |
+
"learning_rate": 4.042587286167754e-07,
|
6686 |
+
"loss": 1.6808,
|
6687 |
+
"step": 954
|
6688 |
+
},
|
6689 |
+
{
|
6690 |
+
"epoch": 1.8286261369076113,
|
6691 |
+
"grad_norm": 1.7265625,
|
6692 |
+
"learning_rate": 3.953844562354936e-07,
|
6693 |
+
"loss": 1.7918,
|
6694 |
+
"step": 955
|
6695 |
+
},
|
6696 |
+
{
|
6697 |
+
"epoch": 1.8305409286740066,
|
6698 |
+
"grad_norm": 1.71875,
|
6699 |
+
"learning_rate": 3.8660670713126735e-07,
|
6700 |
+
"loss": 1.7129,
|
6701 |
+
"step": 956
|
6702 |
+
},
|
6703 |
+
{
|
6704 |
+
"epoch": 1.832455720440402,
|
6705 |
+
"grad_norm": 1.6796875,
|
6706 |
+
"learning_rate": 3.7792556951763424e-07,
|
6707 |
+
"loss": 1.6843,
|
6708 |
+
"step": 957
|
6709 |
+
},
|
6710 |
+
{
|
6711 |
+
"epoch": 1.8343705122067975,
|
6712 |
+
"grad_norm": 1.7109375,
|
6713 |
+
"learning_rate": 3.6934113063721634e-07,
|
6714 |
+
"loss": 1.7275,
|
6715 |
+
"step": 958
|
6716 |
+
},
|
6717 |
+
{
|
6718 |
+
"epoch": 1.836285303973193,
|
6719 |
+
"grad_norm": 1.6875,
|
6720 |
+
"learning_rate": 3.6085347676084736e-07,
|
6721 |
+
"loss": 1.7158,
|
6722 |
+
"step": 959
|
6723 |
+
},
|
6724 |
+
{
|
6725 |
+
"epoch": 1.8382000957395883,
|
6726 |
+
"grad_norm": 1.625,
|
6727 |
+
"learning_rate": 3.5246269318669924e-07,
|
6728 |
+
"loss": 1.6048,
|
6729 |
+
"step": 960
|
6730 |
+
},
|
6731 |
+
{
|
6732 |
+
"epoch": 1.8401148875059836,
|
6733 |
+
"grad_norm": 1.875,
|
6734 |
+
"learning_rate": 3.441688642394292e-07,
|
6735 |
+
"loss": 1.6986,
|
6736 |
+
"step": 961
|
6737 |
+
},
|
6738 |
+
{
|
6739 |
+
"epoch": 1.8420296792723792,
|
6740 |
+
"grad_norm": 1.6640625,
|
6741 |
+
"learning_rate": 3.359720732693361e-07,
|
6742 |
+
"loss": 1.6519,
|
6743 |
+
"step": 962
|
6744 |
+
},
|
6745 |
+
{
|
6746 |
+
"epoch": 1.8439444710387747,
|
6747 |
+
"grad_norm": 1.6796875,
|
6748 |
+
"learning_rate": 3.2787240265151674e-07,
|
6749 |
+
"loss": 1.7094,
|
6750 |
+
"step": 963
|
6751 |
+
},
|
6752 |
+
{
|
6753 |
+
"epoch": 1.84585926280517,
|
6754 |
+
"grad_norm": 1.671875,
|
6755 |
+
"learning_rate": 3.1986993378503526e-07,
|
6756 |
+
"loss": 1.678,
|
6757 |
+
"step": 964
|
6758 |
+
},
|
6759 |
+
{
|
6760 |
+
"epoch": 1.8477740545715653,
|
6761 |
+
"grad_norm": 1.6640625,
|
6762 |
+
"learning_rate": 3.11964747092115e-07,
|
6763 |
+
"loss": 1.6928,
|
6764 |
+
"step": 965
|
6765 |
+
},
|
6766 |
+
{
|
6767 |
+
"epoch": 1.8496888463379606,
|
6768 |
+
"grad_norm": 1.6796875,
|
6769 |
+
"learning_rate": 3.041569220173235e-07,
|
6770 |
+
"loss": 1.7484,
|
6771 |
+
"step": 966
|
6772 |
+
},
|
6773 |
+
{
|
6774 |
+
"epoch": 1.8516036381043561,
|
6775 |
+
"grad_norm": 1.75,
|
6776 |
+
"learning_rate": 2.9644653702677553e-07,
|
6777 |
+
"loss": 1.7354,
|
6778 |
+
"step": 967
|
6779 |
+
},
|
6780 |
+
{
|
6781 |
+
"epoch": 1.8535184298707517,
|
6782 |
+
"grad_norm": 1.6875,
|
6783 |
+
"learning_rate": 2.888336696073435e-07,
|
6784 |
+
"loss": 1.6876,
|
6785 |
+
"step": 968
|
6786 |
+
},
|
6787 |
+
{
|
6788 |
+
"epoch": 1.855433221637147,
|
6789 |
+
"grad_norm": 1.671875,
|
6790 |
+
"learning_rate": 2.8131839626588056e-07,
|
6791 |
+
"loss": 1.6774,
|
6792 |
+
"step": 969
|
6793 |
+
},
|
6794 |
+
{
|
6795 |
+
"epoch": 1.8573480134035423,
|
6796 |
+
"grad_norm": 1.6875,
|
6797 |
+
"learning_rate": 2.7390079252845205e-07,
|
6798 |
+
"loss": 1.6936,
|
6799 |
+
"step": 970
|
6800 |
+
},
|
6801 |
+
{
|
6802 |
+
"epoch": 1.8592628051699378,
|
6803 |
+
"grad_norm": 1.6953125,
|
6804 |
+
"learning_rate": 2.6658093293957187e-07,
|
6805 |
+
"loss": 1.7162,
|
6806 |
+
"step": 971
|
6807 |
+
},
|
6808 |
+
{
|
6809 |
+
"epoch": 1.8611775969363333,
|
6810 |
+
"grad_norm": 1.6796875,
|
6811 |
+
"learning_rate": 2.5935889106146305e-07,
|
6812 |
+
"loss": 1.7387,
|
6813 |
+
"step": 972
|
6814 |
+
},
|
6815 |
+
{
|
6816 |
+
"epoch": 1.8630923887027286,
|
6817 |
+
"grad_norm": 1.6875,
|
6818 |
+
"learning_rate": 2.522347394733049e-07,
|
6819 |
+
"loss": 1.6896,
|
6820 |
+
"step": 973
|
6821 |
+
},
|
6822 |
+
{
|
6823 |
+
"epoch": 1.865007180469124,
|
6824 |
+
"grad_norm": 1.671875,
|
6825 |
+
"learning_rate": 2.45208549770517e-07,
|
6826 |
+
"loss": 1.7292,
|
6827 |
+
"step": 974
|
6828 |
+
},
|
6829 |
+
{
|
6830 |
+
"epoch": 1.8669219722355193,
|
6831 |
+
"grad_norm": 1.6796875,
|
6832 |
+
"learning_rate": 2.382803925640309e-07,
|
6833 |
+
"loss": 1.7433,
|
6834 |
+
"step": 975
|
6835 |
+
},
|
6836 |
+
{
|
6837 |
+
"epoch": 1.8688367640019148,
|
6838 |
+
"grad_norm": 1.7109375,
|
6839 |
+
"learning_rate": 2.314503374795829e-07,
|
6840 |
+
"loss": 1.6975,
|
6841 |
+
"step": 976
|
6842 |
+
},
|
6843 |
+
{
|
6844 |
+
"epoch": 1.8707515557683103,
|
6845 |
+
"grad_norm": 1.703125,
|
6846 |
+
"learning_rate": 2.247184531570168e-07,
|
6847 |
+
"loss": 1.7252,
|
6848 |
+
"step": 977
|
6849 |
+
},
|
6850 |
+
{
|
6851 |
+
"epoch": 1.8726663475347056,
|
6852 |
+
"grad_norm": 1.71875,
|
6853 |
+
"learning_rate": 2.1808480724959004e-07,
|
6854 |
+
"loss": 1.7407,
|
6855 |
+
"step": 978
|
6856 |
+
},
|
6857 |
+
{
|
6858 |
+
"epoch": 1.874581139301101,
|
6859 |
+
"grad_norm": 1.6796875,
|
6860 |
+
"learning_rate": 2.1154946642329644e-07,
|
6861 |
+
"loss": 1.7345,
|
6862 |
+
"step": 979
|
6863 |
+
},
|
6864 |
+
{
|
6865 |
+
"epoch": 1.8764959310674965,
|
6866 |
+
"grad_norm": 1.703125,
|
6867 |
+
"learning_rate": 2.051124963561979e-07,
|
6868 |
+
"loss": 1.7497,
|
6869 |
+
"step": 980
|
6870 |
+
},
|
6871 |
+
{
|
6872 |
+
"epoch": 1.8784107228338918,
|
6873 |
+
"grad_norm": 1.6953125,
|
6874 |
+
"learning_rate": 1.9877396173775598e-07,
|
6875 |
+
"loss": 1.7163,
|
6876 |
+
"step": 981
|
6877 |
+
},
|
6878 |
+
{
|
6879 |
+
"epoch": 1.8803255146002873,
|
6880 |
+
"grad_norm": 1.6875,
|
6881 |
+
"learning_rate": 1.9253392626819468e-07,
|
6882 |
+
"loss": 1.6973,
|
6883 |
+
"step": 982
|
6884 |
+
},
|
6885 |
+
{
|
6886 |
+
"epoch": 1.8822403063666826,
|
6887 |
+
"grad_norm": 1.6796875,
|
6888 |
+
"learning_rate": 1.8639245265784866e-07,
|
6889 |
+
"loss": 1.6974,
|
6890 |
+
"step": 983
|
6891 |
+
},
|
6892 |
+
{
|
6893 |
+
"epoch": 1.884155098133078,
|
6894 |
+
"grad_norm": 1.7109375,
|
6895 |
+
"learning_rate": 1.8034960262654276e-07,
|
6896 |
+
"loss": 1.7159,
|
6897 |
+
"step": 984
|
6898 |
+
},
|
6899 |
+
{
|
6900 |
+
"epoch": 1.8860698898994734,
|
6901 |
+
"grad_norm": 1.6484375,
|
6902 |
+
"learning_rate": 1.744054369029591e-07,
|
6903 |
+
"loss": 1.6499,
|
6904 |
+
"step": 985
|
6905 |
+
},
|
6906 |
+
{
|
6907 |
+
"epoch": 1.887984681665869,
|
6908 |
+
"grad_norm": 1.671875,
|
6909 |
+
"learning_rate": 1.6856001522404296e-07,
|
6910 |
+
"loss": 1.7192,
|
6911 |
+
"step": 986
|
6912 |
+
},
|
6913 |
+
{
|
6914 |
+
"epoch": 1.8898994734322643,
|
6915 |
+
"grad_norm": 1.6796875,
|
6916 |
+
"learning_rate": 1.6281339633438698e-07,
|
6917 |
+
"loss": 1.7233,
|
6918 |
+
"step": 987
|
6919 |
+
},
|
6920 |
+
{
|
6921 |
+
"epoch": 1.8918142651986596,
|
6922 |
+
"grad_norm": 1.6875,
|
6923 |
+
"learning_rate": 1.5716563798565232e-07,
|
6924 |
+
"loss": 1.7212,
|
6925 |
+
"step": 988
|
6926 |
+
},
|
6927 |
+
{
|
6928 |
+
"epoch": 1.893729056965055,
|
6929 |
+
"grad_norm": 1.6953125,
|
6930 |
+
"learning_rate": 1.5161679693598274e-07,
|
6931 |
+
"loss": 1.6976,
|
6932 |
+
"step": 989
|
6933 |
+
},
|
6934 |
+
{
|
6935 |
+
"epoch": 1.8956438487314504,
|
6936 |
+
"grad_norm": 1.7109375,
|
6937 |
+
"learning_rate": 1.4616692894943274e-07,
|
6938 |
+
"loss": 1.7309,
|
6939 |
+
"step": 990
|
6940 |
+
},
|
6941 |
+
{
|
6942 |
+
"epoch": 1.897558640497846,
|
6943 |
+
"grad_norm": 1.734375,
|
6944 |
+
"learning_rate": 1.4081608879541241e-07,
|
6945 |
+
"loss": 1.7336,
|
6946 |
+
"step": 991
|
6947 |
+
},
|
6948 |
+
{
|
6949 |
+
"epoch": 1.8994734322642413,
|
6950 |
+
"grad_norm": 1.65625,
|
6951 |
+
"learning_rate": 1.3556433024813353e-07,
|
6952 |
+
"loss": 1.7123,
|
6953 |
+
"step": 992
|
6954 |
+
},
|
6955 |
+
{
|
6956 |
+
"epoch": 1.9013882240306366,
|
6957 |
+
"grad_norm": 1.671875,
|
6958 |
+
"learning_rate": 1.304117060860688e-07,
|
6959 |
+
"loss": 1.6961,
|
6960 |
+
"step": 993
|
6961 |
+
},
|
6962 |
+
{
|
6963 |
+
"epoch": 1.903303015797032,
|
6964 |
+
"grad_norm": 1.6640625,
|
6965 |
+
"learning_rate": 1.2535826809142339e-07,
|
6966 |
+
"loss": 1.6591,
|
6967 |
+
"step": 994
|
6968 |
+
},
|
6969 |
+
{
|
6970 |
+
"epoch": 1.9052178075634276,
|
6971 |
+
"grad_norm": 1.6796875,
|
6972 |
+
"learning_rate": 1.2040406704961316e-07,
|
6973 |
+
"loss": 1.6986,
|
6974 |
+
"step": 995
|
6975 |
+
},
|
6976 |
+
{
|
6977 |
+
"epoch": 1.907132599329823,
|
6978 |
+
"grad_norm": 1.71875,
|
6979 |
+
"learning_rate": 1.15549152748754e-07,
|
6980 |
+
"loss": 1.6602,
|
6981 |
+
"step": 996
|
6982 |
+
},
|
6983 |
+
{
|
6984 |
+
"epoch": 1.9090473910962182,
|
6985 |
+
"grad_norm": 1.6484375,
|
6986 |
+
"learning_rate": 1.1079357397916435e-07,
|
6987 |
+
"loss": 1.6829,
|
6988 |
+
"step": 997
|
6989 |
+
},
|
6990 |
+
{
|
6991 |
+
"epoch": 1.9109621828626135,
|
6992 |
+
"grad_norm": 1.6640625,
|
6993 |
+
"learning_rate": 1.061373785328701e-07,
|
6994 |
+
"loss": 1.688,
|
6995 |
+
"step": 998
|
6996 |
+
},
|
6997 |
+
{
|
6998 |
+
"epoch": 1.912876974629009,
|
6999 |
+
"grad_norm": 1.671875,
|
7000 |
+
"learning_rate": 1.015806132031305e-07,
|
7001 |
+
"loss": 1.7246,
|
7002 |
+
"step": 999
|
7003 |
+
},
|
7004 |
+
{
|
7005 |
+
"epoch": 1.9147917663954046,
|
7006 |
+
"grad_norm": 1.6796875,
|
7007 |
+
"learning_rate": 9.712332378395861e-08,
|
7008 |
+
"loss": 1.7222,
|
7009 |
+
"step": 1000
|
7010 |
+
},
|
7011 |
+
{
|
7012 |
+
"epoch": 1.9167065581618,
|
7013 |
+
"grad_norm": 1.6640625,
|
7014 |
+
"learning_rate": 9.276555506967378e-08,
|
7015 |
+
"loss": 1.7414,
|
7016 |
+
"step": 1001
|
7017 |
+
},
|
7018 |
+
{
|
7019 |
+
"epoch": 1.9186213499281952,
|
7020 |
+
"grad_norm": 1.6796875,
|
7021 |
+
"learning_rate": 8.850735085443763e-08,
|
7022 |
+
"loss": 1.6952,
|
7023 |
+
"step": 1002
|
7024 |
+
},
|
7025 |
+
{
|
7026 |
+
"epoch": 1.9205361416945907,
|
7027 |
+
"grad_norm": 1.671875,
|
7028 |
+
"learning_rate": 8.434875393182662e-08,
|
7029 |
+
"loss": 1.671,
|
7030 |
+
"step": 1003
|
7031 |
+
},
|
7032 |
+
{
|
7033 |
+
"epoch": 1.9224509334609863,
|
7034 |
+
"grad_norm": 1.671875,
|
7035 |
+
"learning_rate": 8.028980609439241e-08,
|
7036 |
+
"loss": 1.6717,
|
7037 |
+
"step": 1004
|
7038 |
+
},
|
7039 |
+
{
|
7040 |
+
"epoch": 1.9243657252273816,
|
7041 |
+
"grad_norm": 1.703125,
|
7042 |
+
"learning_rate": 7.633054813324769e-08,
|
7043 |
+
"loss": 1.6772,
|
7044 |
+
"step": 1005
|
7045 |
+
},
|
7046 |
+
{
|
7047 |
+
"epoch": 1.926280516993777,
|
7048 |
+
"grad_norm": 1.6640625,
|
7049 |
+
"learning_rate": 7.247101983765104e-08,
|
7050 |
+
"loss": 1.7314,
|
7051 |
+
"step": 1006
|
7052 |
+
},
|
7053 |
+
{
|
7054 |
+
"epoch": 1.9281953087601722,
|
7055 |
+
"grad_norm": 1.6484375,
|
7056 |
+
"learning_rate": 6.871125999461604e-08,
|
7057 |
+
"loss": 1.5927,
|
7058 |
+
"step": 1007
|
7059 |
+
},
|
7060 |
+
{
|
7061 |
+
"epoch": 1.9301101005265677,
|
7062 |
+
"grad_norm": 1.671875,
|
7063 |
+
"learning_rate": 6.505130638850831e-08,
|
7064 |
+
"loss": 1.6729,
|
7065 |
+
"step": 1008
|
7066 |
+
},
|
7067 |
+
{
|
7068 |
+
"epoch": 1.9320248922929633,
|
7069 |
+
"grad_norm": 1.6640625,
|
7070 |
+
"learning_rate": 6.14911958006792e-08,
|
7071 |
+
"loss": 1.7042,
|
7072 |
+
"step": 1009
|
7073 |
+
},
|
7074 |
+
{
|
7075 |
+
"epoch": 1.9339396840593586,
|
7076 |
+
"grad_norm": 1.703125,
|
7077 |
+
"learning_rate": 5.803096400908703e-08,
|
7078 |
+
"loss": 1.8112,
|
7079 |
+
"step": 1010
|
7080 |
+
},
|
7081 |
+
{
|
7082 |
+
"epoch": 1.9358544758257539,
|
7083 |
+
"grad_norm": 1.71875,
|
7084 |
+
"learning_rate": 5.46706457879409e-08,
|
7085 |
+
"loss": 1.7101,
|
7086 |
+
"step": 1011
|
7087 |
+
},
|
7088 |
+
{
|
7089 |
+
"epoch": 1.9377692675921494,
|
7090 |
+
"grad_norm": 1.671875,
|
7091 |
+
"learning_rate": 5.141027490735195e-08,
|
7092 |
+
"loss": 1.782,
|
7093 |
+
"step": 1012
|
7094 |
+
},
|
7095 |
+
{
|
7096 |
+
"epoch": 1.9396840593585447,
|
7097 |
+
"grad_norm": 1.6640625,
|
7098 |
+
"learning_rate": 4.824988413299037e-08,
|
7099 |
+
"loss": 1.6928,
|
7100 |
+
"step": 1013
|
7101 |
+
},
|
7102 |
+
{
|
7103 |
+
"epoch": 1.9415988511249402,
|
7104 |
+
"grad_norm": 1.7734375,
|
7105 |
+
"learning_rate": 4.5189505225762266e-08,
|
7106 |
+
"loss": 1.7131,
|
7107 |
+
"step": 1014
|
7108 |
+
},
|
7109 |
+
{
|
7110 |
+
"epoch": 1.9435136428913355,
|
7111 |
+
"grad_norm": 1.6875,
|
7112 |
+
"learning_rate": 4.2229168941484434e-08,
|
7113 |
+
"loss": 1.7265,
|
7114 |
+
"step": 1015
|
7115 |
+
},
|
7116 |
+
{
|
7117 |
+
"epoch": 1.9454284346577309,
|
7118 |
+
"grad_norm": 1.65625,
|
7119 |
+
"learning_rate": 3.9368905030578994e-08,
|
7120 |
+
"loss": 1.6424,
|
7121 |
+
"step": 1016
|
7122 |
+
},
|
7123 |
+
{
|
7124 |
+
"epoch": 1.9473432264241264,
|
7125 |
+
"grad_norm": 1.71875,
|
7126 |
+
"learning_rate": 3.6608742237769227e-08,
|
7127 |
+
"loss": 1.7135,
|
7128 |
+
"step": 1017
|
7129 |
+
},
|
7130 |
+
{
|
7131 |
+
"epoch": 1.949258018190522,
|
7132 |
+
"grad_norm": 1.7109375,
|
7133 |
+
"learning_rate": 3.394870830180197e-08,
|
7134 |
+
"loss": 1.7292,
|
7135 |
+
"step": 1018
|
7136 |
+
},
|
7137 |
+
{
|
7138 |
+
"epoch": 1.9511728099569172,
|
7139 |
+
"grad_norm": 1.671875,
|
7140 |
+
"learning_rate": 3.1388829955153466e-08,
|
7141 |
+
"loss": 1.6791,
|
7142 |
+
"step": 1019
|
7143 |
+
},
|
7144 |
+
{
|
7145 |
+
"epoch": 1.9530876017233125,
|
7146 |
+
"grad_norm": 1.71875,
|
7147 |
+
"learning_rate": 2.892913292377508e-08,
|
7148 |
+
"loss": 1.6906,
|
7149 |
+
"step": 1020
|
7150 |
+
},
|
7151 |
+
{
|
7152 |
+
"epoch": 1.9550023934897078,
|
7153 |
+
"grad_norm": 1.671875,
|
7154 |
+
"learning_rate": 2.656964192682354e-08,
|
7155 |
+
"loss": 1.698,
|
7156 |
+
"step": 1021
|
7157 |
+
},
|
7158 |
+
{
|
7159 |
+
"epoch": 1.9569171852561034,
|
7160 |
+
"grad_norm": 1.6953125,
|
7161 |
+
"learning_rate": 2.431038067642111e-08,
|
7162 |
+
"loss": 1.6888,
|
7163 |
+
"step": 1022
|
7164 |
+
},
|
7165 |
+
{
|
7166 |
+
"epoch": 1.958831977022499,
|
7167 |
+
"grad_norm": 1.6875,
|
7168 |
+
"learning_rate": 2.2151371877412452e-08,
|
7169 |
+
"loss": 1.6964,
|
7170 |
+
"step": 1023
|
7171 |
+
},
|
7172 |
+
{
|
7173 |
+
"epoch": 1.9607467687888942,
|
7174 |
+
"grad_norm": 1.671875,
|
7175 |
+
"learning_rate": 2.0092637227134836e-08,
|
7176 |
+
"loss": 1.6933,
|
7177 |
+
"step": 1024
|
7178 |
+
},
|
7179 |
+
{
|
7180 |
+
"epoch": 1.9626615605552895,
|
7181 |
+
"grad_norm": 1.6796875,
|
7182 |
+
"learning_rate": 1.8134197415207165e-08,
|
7183 |
+
"loss": 1.689,
|
7184 |
+
"step": 1025
|
7185 |
+
},
|
7186 |
+
{
|
7187 |
+
"epoch": 1.964576352321685,
|
7188 |
+
"grad_norm": 1.6328125,
|
7189 |
+
"learning_rate": 1.627607212331572e-08,
|
7190 |
+
"loss": 1.6823,
|
7191 |
+
"step": 1026
|
7192 |
+
},
|
7193 |
+
{
|
7194 |
+
"epoch": 1.9664911440880806,
|
7195 |
+
"grad_norm": 1.6953125,
|
7196 |
+
"learning_rate": 1.451828002501654e-08,
|
7197 |
+
"loss": 1.7396,
|
7198 |
+
"step": 1027
|
7199 |
+
},
|
7200 |
+
{
|
7201 |
+
"epoch": 1.9684059358544759,
|
7202 |
+
"grad_norm": 1.640625,
|
7203 |
+
"learning_rate": 1.286083878555111e-08,
|
7204 |
+
"loss": 1.6459,
|
7205 |
+
"step": 1028
|
7206 |
+
},
|
7207 |
+
{
|
7208 |
+
"epoch": 1.9703207276208712,
|
7209 |
+
"grad_norm": 1.671875,
|
7210 |
+
"learning_rate": 1.1303765061668748e-08,
|
7211 |
+
"loss": 1.6741,
|
7212 |
+
"step": 1029
|
7213 |
+
},
|
7214 |
+
{
|
7215 |
+
"epoch": 1.9722355193872665,
|
7216 |
+
"grad_norm": 1.6796875,
|
7217 |
+
"learning_rate": 9.847074501456722e-09,
|
7218 |
+
"loss": 1.6616,
|
7219 |
+
"step": 1030
|
7220 |
+
},
|
7221 |
+
{
|
7222 |
+
"epoch": 1.974150311153662,
|
7223 |
+
"grad_norm": 1.65625,
|
7224 |
+
"learning_rate": 8.490781744181498e-09,
|
7225 |
+
"loss": 1.7448,
|
7226 |
+
"step": 1031
|
7227 |
+
},
|
7228 |
+
{
|
7229 |
+
"epoch": 1.9760651029200575,
|
7230 |
+
"grad_norm": 1.6640625,
|
7231 |
+
"learning_rate": 7.234900420147739e-09,
|
7232 |
+
"loss": 1.6673,
|
7233 |
+
"step": 1032
|
7234 |
+
},
|
7235 |
+
{
|
7236 |
+
"epoch": 1.9779798946864529,
|
7237 |
+
"grad_norm": 1.65625,
|
7238 |
+
"learning_rate": 6.079443150556197e-09,
|
7239 |
+
"loss": 1.7032,
|
7240 |
+
"step": 1033
|
7241 |
+
},
|
7242 |
+
{
|
7243 |
+
"epoch": 1.9798946864528482,
|
7244 |
+
"grad_norm": 1.6640625,
|
7245 |
+
"learning_rate": 5.0244215473782556e-09,
|
7246 |
+
"loss": 1.6774,
|
7247 |
+
"step": 1034
|
7248 |
+
},
|
7249 |
+
{
|
7250 |
+
"epoch": 1.9818094782192437,
|
7251 |
+
"grad_norm": 1.71875,
|
7252 |
+
"learning_rate": 4.069846213238249e-09,
|
7253 |
+
"loss": 1.7234,
|
7254 |
+
"step": 1035
|
7255 |
+
},
|
7256 |
+
{
|
7257 |
+
"epoch": 1.9837242699856392,
|
7258 |
+
"grad_norm": 1.734375,
|
7259 |
+
"learning_rate": 3.2157267413113203e-09,
|
7260 |
+
"loss": 1.6599,
|
7261 |
+
"step": 1036
|
7262 |
+
},
|
7263 |
+
{
|
7264 |
+
"epoch": 1.9856390617520345,
|
7265 |
+
"grad_norm": 1.6875,
|
7266 |
+
"learning_rate": 2.4620717152201713e-09,
|
7267 |
+
"loss": 1.7449,
|
7268 |
+
"step": 1037
|
7269 |
+
},
|
7270 |
+
{
|
7271 |
+
"epoch": 1.9875538535184298,
|
7272 |
+
"grad_norm": 1.640625,
|
7273 |
+
"learning_rate": 1.8088887089551255e-09,
|
7274 |
+
"loss": 1.6684,
|
7275 |
+
"step": 1038
|
7276 |
+
},
|
7277 |
+
{
|
7278 |
+
"epoch": 1.9894686452848251,
|
7279 |
+
"grad_norm": 1.6875,
|
7280 |
+
"learning_rate": 1.256184286793083e-09,
|
7281 |
+
"loss": 1.641,
|
7282 |
+
"step": 1039
|
7283 |
+
},
|
7284 |
+
{
|
7285 |
+
"epoch": 1.9913834370512207,
|
7286 |
+
"grad_norm": 1.6796875,
|
7287 |
+
"learning_rate": 8.039640032342366e-10,
|
7288 |
+
"loss": 1.7045,
|
7289 |
+
"step": 1040
|
7290 |
+
},
|
7291 |
+
{
|
7292 |
+
"epoch": 1.9932982288176162,
|
7293 |
+
"grad_norm": 1.65625,
|
7294 |
+
"learning_rate": 4.522324029465619e-10,
|
7295 |
+
"loss": 1.7222,
|
7296 |
+
"step": 1041
|
7297 |
+
},
|
7298 |
+
{
|
7299 |
+
"epoch": 1.9952130205840115,
|
7300 |
+
"grad_norm": 1.71875,
|
7301 |
+
"learning_rate": 2.0099302071807658e-10,
|
7302 |
+
"loss": 1.6946,
|
7303 |
+
"step": 1042
|
7304 |
+
},
|
7305 |
+
{
|
7306 |
+
"epoch": 1.9971278123504068,
|
7307 |
+
"grad_norm": 1.6796875,
|
7308 |
+
"learning_rate": 5.024838142464461e-11,
|
7309 |
+
"loss": 1.7026,
|
7310 |
+
"step": 1043
|
7311 |
+
},
|
7312 |
+
{
|
7313 |
+
"epoch": 1.9990426041168023,
|
7314 |
+
"grad_norm": 1.671875,
|
7315 |
+
"learning_rate": 0.0,
|
7316 |
+
"loss": 1.7507,
|
7317 |
+
"step": 1044
|
7318 |
}
|
7319 |
],
|
7320 |
"logging_steps": 1,
|
|
|
7329 |
"should_evaluate": false,
|
7330 |
"should_log": false,
|
7331 |
"should_save": true,
|
7332 |
+
"should_training_stop": true
|
7333 |
},
|
7334 |
"attributes": {}
|
7335 |
}
|
7336 |
},
|
7337 |
+
"total_flos": 6.28007906539733e+17,
|
7338 |
"train_batch_size": 10,
|
7339 |
"trial_name": null,
|
7340 |
"trial_params": null
|