superbigtree commited on
Commit
8b2294d
1 Parent(s): 0a3d2e3

Model save

Browse files
README.md CHANGED
@@ -2,7 +2,7 @@
2
  tags:
3
  - generated_from_trainer
4
  datasets:
5
- - ydshieh/coco_dataset_script
6
  model-index:
7
  - name: clip-roberta-finetuned
8
  results: []
@@ -13,9 +13,7 @@ should probably proofread and complete it, then remove this comment. -->
13
 
14
  # clip-roberta-finetuned
15
 
16
- This model was trained from scratch on the ydshieh/coco_dataset_script 2017 dataset.
17
- It achieves the following results on the evaluation set:
18
- - Loss: 1.5655
19
 
20
  ## Model description
21
 
 
2
  tags:
3
  - generated_from_trainer
4
  datasets:
5
+ - coco_dataset_script
6
  model-index:
7
  - name: clip-roberta-finetuned
8
  results: []
 
13
 
14
  # clip-roberta-finetuned
15
 
16
+ This model was trained from scratch on the coco_dataset_script dataset.
 
 
17
 
18
  ## Model description
19
 
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5ae90a56ad5fa99b23589a23d4776c009005c5b2fa9aee61e2775a96fa80772d
3
  size 851603588
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6ef5ec3a3c1aac37becbf41c636d1ca0a4b90c6a259779c6e57369689386d29a
3
  size 851603588
runs/Feb13_00-22-07_28fc6ffbaa77/events.out.tfevents.1707783741.28fc6ffbaa77.51283.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:067ceb23420b3009ddda0ba78cadbaecfd4ac66abc00a189e46bc24700a9afb2
3
- size 9286
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6298a0c0c30f468f32318579787335e7d8fe91d6c4f4d3a0f66297c1139783fc
3
+ size 9640
tokenizer.json CHANGED
@@ -1,21 +1,7 @@
1
  {
2
  "version": "1.0",
3
- "truncation": {
4
- "direction": "Right",
5
- "max_length": 128,
6
- "strategy": "LongestFirst",
7
- "stride": 0
8
- },
9
- "padding": {
10
- "strategy": {
11
- "Fixed": 128
12
- },
13
- "direction": "Right",
14
- "pad_to_multiple_of": null,
15
- "pad_id": 1,
16
- "pad_type_id": 0,
17
- "pad_token": "<pad>"
18
- },
19
  "added_tokens": [
20
  {
21
  "id": 0,
 
1
  {
2
  "version": "1.0",
3
+ "truncation": null,
4
+ "padding": null,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  "added_tokens": [
6
  {
7
  "id": 0,