File size: 3,374 Bytes
c087685
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
{
    "model_card": {
        "Date & Time": "2024-07-22T14:22:53.866930",
        "Model Card": [
            "https://huggingface.co/google/t5-v1_1-base"
        ],
        "License Information": [
            "apache-2.0"
        ],
        "Citation Information": [
            "\n@inproceedings{Wolf_Transformers_State-of-the-Art_Natural_2020,\n  author = {Wolf, Thomas and Debut, Lysandre and Sanh, Victor and Chaumond, Julien",
            "\n@Misc{peft,\n  title =        {PEFT: State-of-the-art Parameter-Efficient Fine-Tuning methods},\n  author =       {Sourab Mangrulkar and Sylvain Gugger and Lysandre Debut and Younes"
        ]
    },
    "data_card": {
        "Generate Research Paper Abstracts": {
            "Date & Time": "2024-07-22T13:38:20.754417",
            "Model Name": [
                "openai-community/gpt2"
            ],
            "Model Card": [
                "https://huggingface.co/openai-community/gpt2"
            ],
            "License Information": [
                "mit"
            ],
            "Citation Information": [
                "\n@inproceedings{Wolf_Transformers_State-of-the-Art_Natural_2020,\n  author = {Wolf, Thomas and Debut, Lysandre and Sanh, Victor and Chaumond, Julien",
                "@article{radford2019language,\n  title={Language Models are Unsupervised Multitask Learners},\n  author={Radford, Alec and Wu, Jeff and Child, Rewon and Luan, David and Amodei, Dario and Sutskever, Ilya},\n  year={2019}\n}"
            ]
        },
        "Generate LinkedIn post from Abstracts": {
            "Date & Time": "2024-07-22T13:41:35.938694",
            "Model Name": [
                "openai-community/gpt2"
            ],
            "Model Card": [
                "https://huggingface.co/openai-community/gpt2"
            ],
            "License Information": [
                "mit"
            ],
            "Citation Information": [
                "\n@inproceedings{Wolf_Transformers_State-of-the-Art_Natural_2020,\n  author = {Wolf, Thomas and Debut, Lysandre and Sanh, Victor and Chaumond, Julien",
                "@article{radford2019language,\n  title={Language Models are Unsupervised Multitask Learners},\n  author={Radford, Alec and Wu, Jeff and Child, Rewon and Luan, David and Amodei, Dario and Sutskever, Ilya},\n  year={2019}\n}"
            ]
        },
        "Generate LinkedIn post from Abstracts (train split)": {
            "Date & Time": "2024-07-22T13:41:35.979386"
        }
    },
    "__version__": "0.28.0",
    "datetime": "2024-07-22T13:41:38.886949",
    "type": "TrainHFFineTune",
    "name": "Train an Abstract => LinkedIn Post Model",
    "version": 1.0,
    "fingerprint": "39a75dc45318b92e",
    "req_versions": {
        "dill": "0.3.8",
        "sqlitedict": "2.1.0",
        "torch": "2.3.1",
        "numpy": "1.26.4",
        "transformers": "4.42.4",
        "datasets": "2.20.0",
        "huggingface_hub": "0.24.0",
        "accelerate": "0.32.1",
        "peft": "0.11.1",
        "tiktoken": "0.7.0",
        "tokenizers": "0.19.1",
        "openai": "1.37.0",
        "ctransformers": "0.2.27",
        "optimum": "1.21.2",
        "bitsandbytes": "0.42.0",
        "litellm": "1.31.14",
        "trl": "0.8.1",
        "setfit": "1.0.3"
    },
    "interpreter": "3.10.10 (main, Jul 17 2023, 08:26:46) [Clang 14.0.3 (clang-1403.0.22.14.1)]"
}