Training in progress, step 12375
Browse files- logs/attn_loss_fn=None, attn_weight=0, gradient_accumulation_steps=1, hs_loss_fn=mse, hs_weight=2.0, learning_rate=0.0004, lr_scheduler_kwargs=__num_cycles___4_, lr_scheduler_type=cosine_with_restarts, max/events.out.tfevents.1723832112.93d6cbb3ad53 +3 -0
- logs/attn_loss_fn=None, attn_weight=0, gradient_accumulation_steps=1, hs_loss_fn=mse, hs_weight=2.0, learning_rate=0.0004, lr_scheduler_type=cosine_with_restarts, max_grad_norm=None, num_cycles=4, optim=pa/events.out.tfevents.1723830017.93d6cbb3ad53 +3 -0
- model.safetensors +1 -1
logs/attn_loss_fn=None, attn_weight=0, gradient_accumulation_steps=1, hs_loss_fn=mse, hs_weight=2.0, learning_rate=0.0004, lr_scheduler_kwargs=__num_cycles___4_, lr_scheduler_type=cosine_with_restarts, max/events.out.tfevents.1723832112.93d6cbb3ad53
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c8c2ba97f711d07fcb552d1d3a3ad87f0df28b7d1dc31d76e7b64cb284ca6e7f
|
3 |
+
size 5185087
|
logs/attn_loss_fn=None, attn_weight=0, gradient_accumulation_steps=1, hs_loss_fn=mse, hs_weight=2.0, learning_rate=0.0004, lr_scheduler_type=cosine_with_restarts, max_grad_norm=None, num_cycles=4, optim=pa/events.out.tfevents.1723830017.93d6cbb3ad53
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:80a89f7469dc262155f189c57ab240ef2a1df380df11739ba072a2ea1cf9fb20
|
3 |
+
size 4931125
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 137033984
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d0fb2a2484cd2ddfc1ca74f378aecd493ed96dc95efbcd19968d8b21725ce360
|
3 |
size 137033984
|