File size: 2,056 Bytes
23aa310
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
{
    "os": "Windows-10-10.0.19041-SP0",
    "python": "3.8.10",
    "heartbeatAt": "2023-04-22T07:52:57.094978",
    "startedAt": "2023-04-22T07:52:55.495061",
    "docker": null,
    "cuda": null,
    "args": [
        "--do_train",
        "--train_file",
        ".\\datasets\\Zettels\\train.json",
        "--validation_file",
        ".\\datasets\\Zettels\\dev.json",
        "--prompt_column",
        "content",
        "--response_column",
        "summary",
        "--overwrite_cache",
        "--model_name_or_path",
        "..\\models\\chatglm-6b-int4",
        "--output_dir",
        "output\\adgen-chatglm-6b-pt-128-2e-2",
        "--overwrite_output_dir",
        "--max_source_length",
        "64",
        "--max_target_length",
        "64",
        "--per_device_train_batch_size",
        "1",
        "--per_device_eval_batch_size",
        "1",
        "--gradient_accumulation_steps",
        "16",
        "--predict_with_generate",
        "--max_steps",
        "1000",
        "--logging_steps",
        "10",
        "--save_steps",
        "10",
        "--learning_rate",
        "2e-2",
        "--pre_seq_len",
        "128",
        "--quantization_bit",
        "4"
    ],
    "state": "running",
    "program": "main.py",
    "codePath": "ptuning\\main.py",
    "git": {
        "remote": "https://github.com/THUDM/ChatGLM-6B",
        "commit": "01e6313abf4122d789d6e68128856af52847b355"
    },
    "cpu_count": 6,
    "cpu_count_logical": 12,
    "cpu_freq": {
        "current": 2592.0,
        "min": 0.0,
        "max": 2592.0
    },
    "cpu_freq_per_core": [
        {
            "current": 2592.0,
            "min": 0.0,
            "max": 2592.0
        }
    ],
    "disk": {
        "total": 500.32030868530273,
        "used": 239.2071533203125
    },
    "gpu": "NVIDIA GeForce RTX 2060",
    "gpu_count": 1,
    "gpu_devices": [
        {
            "name": "NVIDIA GeForce RTX 2060",
            "memory_total": 6442450944
        }
    ],
    "memory": {
        "total": 63.87089538574219
    }
}