DuongTrongChi commited on
Commit
6670c4e
1 Parent(s): 073be20

Training in progress, step 289, checkpoint

Browse files
last-checkpoint/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:afd25c170355ea632cfa5da978f90f702ff16dbc4d51afcebf4989ad9d0f333b
3
  size 100198584
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e5ba6698fd44d93d0d95a7529def37a9cede5e4d09940a0ab3a40fd14872cf1f
3
  size 100198584
last-checkpoint/optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:73e0747b869366afbdebd37788920b02982df01eccc71e682aec61e6a7645b33
3
- size 50675156
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4758eb51404ba0b3f4b0b36fcf9c00f2cdf4fdf570c90bf394f1816ab1cc5a08
3
+ size 50675604
last-checkpoint/scheduler.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d1c9368a0cddac000b7996e7cc550ed94e1405683912f0e0601527c458e30db8
3
  size 1064
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7e7a65992fdf54fe4538029cd8ec72e942bb69b362a7899f495d5668d43ce184
3
  size 1064
last-checkpoint/trainer_state.json CHANGED
@@ -1,9 +1,9 @@
1
  {
2
  "best_metric": null,
3
  "best_model_checkpoint": null,
4
- "epoch": 0.3404255319148936,
5
  "eval_steps": 500,
6
- "global_step": 233,
7
  "is_hyper_param_search": false,
8
  "is_local_process_zero": true,
9
  "is_world_process_zero": true,
@@ -1638,6 +1638,398 @@
1638
  "learning_rate": 1.5445205479452056e-05,
1639
  "loss": 1.1876,
1640
  "step": 233
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1641
  }
1642
  ],
1643
  "logging_steps": 1,
@@ -1657,7 +2049,7 @@
1657
  "attributes": {}
1658
  }
1659
  },
1660
- "total_flos": 2.619657311583191e+17,
1661
  "train_batch_size": 4,
1662
  "trial_name": null,
1663
  "trial_params": null
 
1
  {
2
  "best_metric": null,
3
  "best_model_checkpoint": null,
4
+ "epoch": 0.4222445438772715,
5
  "eval_steps": 500,
6
+ "global_step": 289,
7
  "is_hyper_param_search": false,
8
  "is_local_process_zero": true,
9
  "is_world_process_zero": true,
 
1638
  "learning_rate": 1.5445205479452056e-05,
1639
  "loss": 1.1876,
1640
  "step": 233
1641
+ },
1642
+ {
1643
+ "epoch": 0.3418865856999361,
1644
+ "grad_norm": 0.09288407117128372,
1645
+ "learning_rate": 1.541095890410959e-05,
1646
+ "loss": 1.2163,
1647
+ "step": 234
1648
+ },
1649
+ {
1650
+ "epoch": 0.34334763948497854,
1651
+ "grad_norm": 0.09983450174331665,
1652
+ "learning_rate": 1.5376712328767125e-05,
1653
+ "loss": 1.2118,
1654
+ "step": 235
1655
+ },
1656
+ {
1657
+ "epoch": 0.344808693270021,
1658
+ "grad_norm": 0.10181832313537598,
1659
+ "learning_rate": 1.5342465753424658e-05,
1660
+ "loss": 1.2189,
1661
+ "step": 236
1662
+ },
1663
+ {
1664
+ "epoch": 0.34626974705506347,
1665
+ "grad_norm": 0.10253550857305527,
1666
+ "learning_rate": 1.5308219178082195e-05,
1667
+ "loss": 1.2415,
1668
+ "step": 237
1669
+ },
1670
+ {
1671
+ "epoch": 0.34773080084010594,
1672
+ "grad_norm": 0.09938843548297882,
1673
+ "learning_rate": 1.5273972602739728e-05,
1674
+ "loss": 1.2391,
1675
+ "step": 238
1676
+ },
1677
+ {
1678
+ "epoch": 0.3491918546251484,
1679
+ "grad_norm": 0.09904040396213531,
1680
+ "learning_rate": 1.523972602739726e-05,
1681
+ "loss": 1.147,
1682
+ "step": 239
1683
+ },
1684
+ {
1685
+ "epoch": 0.35065290841019087,
1686
+ "grad_norm": 0.1011345386505127,
1687
+ "learning_rate": 1.5205479452054797e-05,
1688
+ "loss": 1.2801,
1689
+ "step": 240
1690
+ },
1691
+ {
1692
+ "epoch": 0.35211396219523333,
1693
+ "grad_norm": 0.10546337813138962,
1694
+ "learning_rate": 1.517123287671233e-05,
1695
+ "loss": 1.2179,
1696
+ "step": 241
1697
+ },
1698
+ {
1699
+ "epoch": 0.3535750159802758,
1700
+ "grad_norm": 0.09379958361387253,
1701
+ "learning_rate": 1.5136986301369865e-05,
1702
+ "loss": 1.2078,
1703
+ "step": 242
1704
+ },
1705
+ {
1706
+ "epoch": 0.35503606976531826,
1707
+ "grad_norm": 0.09210502356290817,
1708
+ "learning_rate": 1.5102739726027398e-05,
1709
+ "loss": 1.2126,
1710
+ "step": 243
1711
+ },
1712
+ {
1713
+ "epoch": 0.3564971235503607,
1714
+ "grad_norm": 0.0911347046494484,
1715
+ "learning_rate": 1.5068493150684933e-05,
1716
+ "loss": 1.2353,
1717
+ "step": 244
1718
+ },
1719
+ {
1720
+ "epoch": 0.35795817733540314,
1721
+ "grad_norm": 0.10343588888645172,
1722
+ "learning_rate": 1.5034246575342466e-05,
1723
+ "loss": 1.2063,
1724
+ "step": 245
1725
+ },
1726
+ {
1727
+ "epoch": 0.3594192311204456,
1728
+ "grad_norm": 0.10774116218090057,
1729
+ "learning_rate": 1.5000000000000002e-05,
1730
+ "loss": 1.1895,
1731
+ "step": 246
1732
+ },
1733
+ {
1734
+ "epoch": 0.36088028490548807,
1735
+ "grad_norm": 0.09309092164039612,
1736
+ "learning_rate": 1.4965753424657537e-05,
1737
+ "loss": 1.2661,
1738
+ "step": 247
1739
+ },
1740
+ {
1741
+ "epoch": 0.36234133869053053,
1742
+ "grad_norm": 0.09687670320272446,
1743
+ "learning_rate": 1.493150684931507e-05,
1744
+ "loss": 1.2154,
1745
+ "step": 248
1746
+ },
1747
+ {
1748
+ "epoch": 0.363802392475573,
1749
+ "grad_norm": 0.09125279635190964,
1750
+ "learning_rate": 1.4897260273972605e-05,
1751
+ "loss": 1.2367,
1752
+ "step": 249
1753
+ },
1754
+ {
1755
+ "epoch": 0.36526344626061547,
1756
+ "grad_norm": 0.11119771748781204,
1757
+ "learning_rate": 1.4863013698630138e-05,
1758
+ "loss": 1.1032,
1759
+ "step": 250
1760
+ },
1761
+ {
1762
+ "epoch": 0.36672450004565793,
1763
+ "grad_norm": 0.11136704683303833,
1764
+ "learning_rate": 1.4828767123287672e-05,
1765
+ "loss": 1.1679,
1766
+ "step": 251
1767
+ },
1768
+ {
1769
+ "epoch": 0.3681855538307004,
1770
+ "grad_norm": 0.11276744306087494,
1771
+ "learning_rate": 1.4794520547945205e-05,
1772
+ "loss": 1.1268,
1773
+ "step": 252
1774
+ },
1775
+ {
1776
+ "epoch": 0.36964660761574286,
1777
+ "grad_norm": 0.10903234779834747,
1778
+ "learning_rate": 1.4760273972602742e-05,
1779
+ "loss": 1.1681,
1780
+ "step": 253
1781
+ },
1782
+ {
1783
+ "epoch": 0.3711076614007853,
1784
+ "grad_norm": 0.09586647897958755,
1785
+ "learning_rate": 1.4726027397260275e-05,
1786
+ "loss": 1.2486,
1787
+ "step": 254
1788
+ },
1789
+ {
1790
+ "epoch": 0.3725687151858278,
1791
+ "grad_norm": 0.09748208522796631,
1792
+ "learning_rate": 1.469178082191781e-05,
1793
+ "loss": 1.3186,
1794
+ "step": 255
1795
+ },
1796
+ {
1797
+ "epoch": 0.37402976897087026,
1798
+ "grad_norm": 0.10351759195327759,
1799
+ "learning_rate": 1.4657534246575344e-05,
1800
+ "loss": 1.1344,
1801
+ "step": 256
1802
+ },
1803
+ {
1804
+ "epoch": 0.3754908227559127,
1805
+ "grad_norm": 0.11112543940544128,
1806
+ "learning_rate": 1.4623287671232877e-05,
1807
+ "loss": 1.2433,
1808
+ "step": 257
1809
+ },
1810
+ {
1811
+ "epoch": 0.3769518765409552,
1812
+ "grad_norm": 0.09291627258062363,
1813
+ "learning_rate": 1.4589041095890412e-05,
1814
+ "loss": 1.1717,
1815
+ "step": 258
1816
+ },
1817
+ {
1818
+ "epoch": 0.3784129303259976,
1819
+ "grad_norm": 0.11696401238441467,
1820
+ "learning_rate": 1.4554794520547945e-05,
1821
+ "loss": 1.1889,
1822
+ "step": 259
1823
+ },
1824
+ {
1825
+ "epoch": 0.37987398411104006,
1826
+ "grad_norm": 0.09728217869997025,
1827
+ "learning_rate": 1.4520547945205482e-05,
1828
+ "loss": 1.1756,
1829
+ "step": 260
1830
+ },
1831
+ {
1832
+ "epoch": 0.38133503789608253,
1833
+ "grad_norm": 0.10936015099287033,
1834
+ "learning_rate": 1.4486301369863015e-05,
1835
+ "loss": 1.1526,
1836
+ "step": 261
1837
+ },
1838
+ {
1839
+ "epoch": 0.382796091681125,
1840
+ "grad_norm": 0.09887027740478516,
1841
+ "learning_rate": 1.445205479452055e-05,
1842
+ "loss": 1.1556,
1843
+ "step": 262
1844
+ },
1845
+ {
1846
+ "epoch": 0.38425714546616746,
1847
+ "grad_norm": 0.09080694615840912,
1848
+ "learning_rate": 1.4417808219178084e-05,
1849
+ "loss": 1.1592,
1850
+ "step": 263
1851
+ },
1852
+ {
1853
+ "epoch": 0.3857181992512099,
1854
+ "grad_norm": 0.09273724257946014,
1855
+ "learning_rate": 1.4383561643835617e-05,
1856
+ "loss": 1.1994,
1857
+ "step": 264
1858
+ },
1859
+ {
1860
+ "epoch": 0.3871792530362524,
1861
+ "grad_norm": 0.10300930589437485,
1862
+ "learning_rate": 1.4349315068493152e-05,
1863
+ "loss": 1.2292,
1864
+ "step": 265
1865
+ },
1866
+ {
1867
+ "epoch": 0.38864030682129486,
1868
+ "grad_norm": 0.10504985600709915,
1869
+ "learning_rate": 1.4315068493150685e-05,
1870
+ "loss": 1.1623,
1871
+ "step": 266
1872
+ },
1873
+ {
1874
+ "epoch": 0.3901013606063373,
1875
+ "grad_norm": 0.10123489052057266,
1876
+ "learning_rate": 1.4280821917808221e-05,
1877
+ "loss": 1.1435,
1878
+ "step": 267
1879
+ },
1880
+ {
1881
+ "epoch": 0.3915624143913798,
1882
+ "grad_norm": 0.09777438640594482,
1883
+ "learning_rate": 1.4246575342465754e-05,
1884
+ "loss": 1.2014,
1885
+ "step": 268
1886
+ },
1887
+ {
1888
+ "epoch": 0.39302346817642225,
1889
+ "grad_norm": 0.10096925497055054,
1890
+ "learning_rate": 1.421232876712329e-05,
1891
+ "loss": 1.243,
1892
+ "step": 269
1893
+ },
1894
+ {
1895
+ "epoch": 0.3944845219614647,
1896
+ "grad_norm": 0.11357256770133972,
1897
+ "learning_rate": 1.4178082191780822e-05,
1898
+ "loss": 1.1434,
1899
+ "step": 270
1900
+ },
1901
+ {
1902
+ "epoch": 0.3959455757465072,
1903
+ "grad_norm": 0.09454260766506195,
1904
+ "learning_rate": 1.4143835616438357e-05,
1905
+ "loss": 1.257,
1906
+ "step": 271
1907
+ },
1908
+ {
1909
+ "epoch": 0.39740662953154965,
1910
+ "grad_norm": 0.0995330736041069,
1911
+ "learning_rate": 1.4109589041095892e-05,
1912
+ "loss": 1.2173,
1913
+ "step": 272
1914
+ },
1915
+ {
1916
+ "epoch": 0.3988676833165921,
1917
+ "grad_norm": 0.09766160696744919,
1918
+ "learning_rate": 1.4075342465753425e-05,
1919
+ "loss": 1.1523,
1920
+ "step": 273
1921
+ },
1922
+ {
1923
+ "epoch": 0.4003287371016346,
1924
+ "grad_norm": 0.09799221158027649,
1925
+ "learning_rate": 1.4041095890410961e-05,
1926
+ "loss": 1.2785,
1927
+ "step": 274
1928
+ },
1929
+ {
1930
+ "epoch": 0.401789790886677,
1931
+ "grad_norm": 0.11043940484523773,
1932
+ "learning_rate": 1.4006849315068494e-05,
1933
+ "loss": 1.1715,
1934
+ "step": 275
1935
+ },
1936
+ {
1937
+ "epoch": 0.40325084467171946,
1938
+ "grad_norm": 0.09611232578754425,
1939
+ "learning_rate": 1.3972602739726029e-05,
1940
+ "loss": 1.1689,
1941
+ "step": 276
1942
+ },
1943
+ {
1944
+ "epoch": 0.4047118984567619,
1945
+ "grad_norm": 0.0990489274263382,
1946
+ "learning_rate": 1.3938356164383562e-05,
1947
+ "loss": 1.2633,
1948
+ "step": 277
1949
+ },
1950
+ {
1951
+ "epoch": 0.4061729522418044,
1952
+ "grad_norm": 0.09898124635219574,
1953
+ "learning_rate": 1.3904109589041097e-05,
1954
+ "loss": 1.2649,
1955
+ "step": 278
1956
+ },
1957
+ {
1958
+ "epoch": 0.40763400602684685,
1959
+ "grad_norm": 0.10052936524152756,
1960
+ "learning_rate": 1.3869863013698633e-05,
1961
+ "loss": 1.1938,
1962
+ "step": 279
1963
+ },
1964
+ {
1965
+ "epoch": 0.4090950598118893,
1966
+ "grad_norm": 0.10725940018892288,
1967
+ "learning_rate": 1.3835616438356164e-05,
1968
+ "loss": 1.2371,
1969
+ "step": 280
1970
+ },
1971
+ {
1972
+ "epoch": 0.4105561135969318,
1973
+ "grad_norm": 0.10019299387931824,
1974
+ "learning_rate": 1.3801369863013701e-05,
1975
+ "loss": 1.2738,
1976
+ "step": 281
1977
+ },
1978
+ {
1979
+ "epoch": 0.41201716738197425,
1980
+ "grad_norm": 0.10612376034259796,
1981
+ "learning_rate": 1.3767123287671234e-05,
1982
+ "loss": 1.1666,
1983
+ "step": 282
1984
+ },
1985
+ {
1986
+ "epoch": 0.4134782211670167,
1987
+ "grad_norm": 0.1012573391199112,
1988
+ "learning_rate": 1.3732876712328769e-05,
1989
+ "loss": 1.2489,
1990
+ "step": 283
1991
+ },
1992
+ {
1993
+ "epoch": 0.4149392749520592,
1994
+ "grad_norm": 0.10012490302324295,
1995
+ "learning_rate": 1.3698630136986302e-05,
1996
+ "loss": 1.2934,
1997
+ "step": 284
1998
+ },
1999
+ {
2000
+ "epoch": 0.41640032873710164,
2001
+ "grad_norm": 0.09684241563081741,
2002
+ "learning_rate": 1.3664383561643836e-05,
2003
+ "loss": 1.2547,
2004
+ "step": 285
2005
+ },
2006
+ {
2007
+ "epoch": 0.4178613825221441,
2008
+ "grad_norm": 0.10791518539190292,
2009
+ "learning_rate": 1.363013698630137e-05,
2010
+ "loss": 1.2118,
2011
+ "step": 286
2012
+ },
2013
+ {
2014
+ "epoch": 0.4193224363071866,
2015
+ "grad_norm": 0.10714226961135864,
2016
+ "learning_rate": 1.3595890410958906e-05,
2017
+ "loss": 1.218,
2018
+ "step": 287
2019
+ },
2020
+ {
2021
+ "epoch": 0.42078349009222904,
2022
+ "grad_norm": 0.10130172967910767,
2023
+ "learning_rate": 1.356164383561644e-05,
2024
+ "loss": 1.2244,
2025
+ "step": 288
2026
+ },
2027
+ {
2028
+ "epoch": 0.4222445438772715,
2029
+ "grad_norm": 0.0985652357339859,
2030
+ "learning_rate": 1.3527397260273974e-05,
2031
+ "loss": 1.2228,
2032
+ "step": 289
2033
  }
2034
  ],
2035
  "logging_steps": 1,
 
2049
  "attributes": {}
2050
  }
2051
  },
2052
+ "total_flos": 3.2530992653758464e+17,
2053
  "train_batch_size": 4,
2054
  "trial_name": null,
2055
  "trial_params": null