File size: 72,624 Bytes
87d67d4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1aace24
 
87d67d4
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
1782369
1aace24
1782369
 
1aace24
87d67d4
 
1aace24
 
 
 
 
87d67d4
 
1aace24
 
 
87d67d4
 
1aace24
 
1782369
 
1aace24
 
1782369
87d67d4
1aace24
 
87d67d4
 
1aace24
87d67d4
 
1aace24
 
 
87d67d4
 
1aace24
 
87d67d4
 
1aace24
87d67d4
 
1aace24
 
 
 
 
 
87d67d4
 
1aace24
 
87d67d4
 
1aace24
87d67d4
 
1aace24
 
 
 
 
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
 
 
1782369
 
1aace24
 
 
 
1782369
 
1aace24
 
 
1782369
 
1aace24
1782369
 
1aace24
 
 
 
 
 
 
 
 
 
 
1782369
 
1aace24
1782369
87d67d4
1aace24
 
 
 
 
87d67d4
 
1aace24
87d67d4
 
1aace24
 
87d67d4
 
1aace24
 
 
 
87d67d4
 
1aace24
 
 
87d67d4
 
1aace24
 
 
 
 
 
 
 
 
87d67d4
 
1aace24
 
 
 
 
 
 
 
 
87d67d4
 
1aace24
 
87d67d4
 
1aace24
 
 
 
 
 
 
87d67d4
 
1aace24
 
 
 
 
 
 
87d67d4
 
1aace24
87d67d4
 
1aace24
 
 
 
 
 
87d67d4
 
1aace24
87d67d4
 
1aace24
 
87d67d4
 
1aace24
 
87d67d4
 
1aace24
 
87d67d4
 
1aace24
 
87d67d4
1782369
1aace24
 
1782369
 
1aace24
1782369
 
1aace24
 
1782369
 
1aace24
 
 
 
1782369
 
1aace24
1782369
 
1aace24
 
1782369
 
1aace24
 
 
 
1782369
 
1aace24
 
731ef3e
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
 
 
 
 
 
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
 
87d67d4
 
1aace24
87d67d4
 
1aace24
 
87d67d4
 
1aace24
1782369
 
1aace24
 
1782369
 
1aace24
1782369
 
1aace24
 
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
 
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
 
 
87d67d4
 
1aace24
 
 
87d67d4
 
1aace24
 
87d67d4
 
1782369
 
 
1aace24
 
1782369
 
1aace24
1782369
 
1aace24
 
1782369
 
1aace24
1782369
 
1aace24
1782369
 
1aace24
1782369
 
1aace24
1782369
 
1aace24
87d67d4
 
1aace24
 
 
 
 
 
 
 
1782369
87d67d4
1aace24
 
 
87d67d4
 
1aace24
87d67d4
 
1aace24
 
 
 
 
87d67d4
 
1aace24
87d67d4
 
1aace24
 
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
 
87d67d4
 
1aace24
 
 
 
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
 
87d67d4
 
1aace24
87d67d4
1782369
1aace24
87d67d4
 
1aace24
 
 
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
 
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
 
87d67d4
 
1aace24
 
 
 
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
 
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
 
87d67d4
 
1aace24
87d67d4
 
1aace24
 
 
 
 
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
1782369
87d67d4
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
 
 
 
 
87d67d4
 
1aace24
87d67d4
 
1aace24
 
 
 
 
87d67d4
 
1aace24
87d67d4
 
1aace24
 
 
87d67d4
 
1aace24
 
87d67d4
 
1aace24
87d67d4
 
1aace24
 
 
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
 
87d67d4
 
1aace24
87d67d4
 
1aace24
 
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
 
 
 
87d67d4
 
1aace24
 
 
 
 
 
 
87d67d4
 
1aace24
 
 
 
 
 
 
 
87d67d4
 
1aace24
 
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
1782369
1aace24
1782369
 
1aace24
 
 
 
 
 
 
87d67d4
 
1aace24
87d67d4
 
1aace24
 
87d67d4
1782369
1aace24
 
1782369
 
1aace24
 
1782369
 
1aace24
1782369
 
1aace24
 
 
 
1782369
 
1aace24
 
1782369
 
1aace24
1782369
 
1aace24
1782369
 
1aace24
 
 
 
1782369
 
1aace24
1782369
 
1aace24
 
 
1782369
 
1aace24
1782369
 
1aace24
1782369
 
1aace24
87d67d4
 
 
 
 
 
 
 
 
 
 
1782369
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1782369
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1782369
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
 
 
 
 
 
 
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
 
 
 
 
 
 
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
 
 
 
 
 
 
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
1aace24
87d67d4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1aace24
 
 
87d67d4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1782369
1aace24
 
 
1782369
1aace24
 
 
1782369
1aace24
 
 
 
 
 
87d67d4
 
 
 
 
1aace24
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
87d67d4
 
 
 
 
 
 
1aace24
 
 
 
 
 
 
 
 
 
 
 
 
 
 
87d67d4
 
 
 
 
1782369
 
1aace24
 
 
 
 
 
 
 
 
 
 
 
 
 
 
87d67d4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1aace24
 
 
 
87d67d4
1aace24
 
 
 
 
87d67d4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1aace24
 
 
87d67d4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
1504
1505
1506
1507
1508
1509
1510
1511
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522
1523
1524
1525
1526
1527
1528
1529
1530
1531
1532
1533
1534
1535
1536
1537
1538
1539
1540
1541
1542
1543
1544
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554
1555
1556
1557
1558
1559
1560
1561
1562
1563
1564
1565
1566
1567
1568
1569
1570
1571
1572
1573
1574
1575
1576
1577
1578
1579
1580
1581
1582
1583
1584
1585
1586
1587
1588
1589
1590
1591
1592
1593
1594
1595
1596
1597
1598
1599
1600
1601
1602
1603
1604
1605
1606
1607
1608
1609
1610
1611
1612
1613
1614
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624
1625
1626
1627
1628
1629
1630
1631
1632
1633
1634
1635
1636
1637
1638
1639
1640
1641
1642
1643
---
base_model: Snowflake/snowflake-arctic-embed-m
datasets: []
language:
- en
library_name: sentence-transformers
license: apache-2.0
metrics:
- cosine_accuracy@1
- cosine_accuracy@3
- cosine_accuracy@5
- cosine_accuracy@10
- cosine_precision@1
- cosine_precision@3
- cosine_precision@5
- cosine_precision@10
- cosine_recall@1
- cosine_recall@3
- cosine_recall@5
- cosine_recall@10
- cosine_ndcg@10
- cosine_mrr@10
- cosine_map@100
pipeline_tag: sentence-similarity
tags:
- sentence-transformers
- sentence-similarity
- feature-extraction
- generated_from_trainer
- dataset_size:1490
- loss:MatryoshkaLoss
- loss:MultipleNegativesRankingLoss
widget:
- source_sentence: What is the RESOURCE NAME for the kubernetes-cluster in the ZenML
    documentation?
  sentences:
  - '                                                 ┃┠──────────────────┼─────────────────────────────────────────────────────────────────────┨


    ┃ RESOURCE TYPES   │ 🌀 kubernetes-cluster                                               ┃


    ┠──────────────────┼─────────────────────────────────────────────────────────────────────┨


    ┃ RESOURCE NAME    │ arn:aws:eks:us-east-1:715803424590:cluster/zenhacks-cluster         ┃


    ┠──────────────────┼─────────────────────────────────────────────────────────────────────┨


    ┃ SECRET ID        │                                                                     ┃


    ┠──────────────────┼─────────────────────────────────────────────────────────────────────┨


    ┃ SESSION DURATION │ N/A                                                                 ┃


    ┠──────────────────┼─────────────────────────────────────────────────────────────────────┨


    ┃ EXPIRES IN       │ 11h59m57s                                                           ┃


    ┠──────────────────┼─────────────────────────────────────────────────────────────────────┨


    ┃ OWNER            │ default                                                             ┃


    ┠──────────────────┼─────────────────────────────────────────────────────────────────────┨


    ┃ WORKSPACE        │ default                                                             ┃


    ┠──────────────────┼─────────────────────────────────────────────────────────────────────┨


    ┃ SHARED           │ ➖                                                                  ┃


    ┠──────────────────┼─────────────────────────────────────────────────────────────────────┨


    ┃ CREATED_AT       │ 2023-06-16 10:17:46.931091                                          ┃


    ┠──────────────────┼─────────────────────────────────────────────────────────────────────┨


    ┃ UPDATED_AT       │ 2023-06-16 10:17:46.931094                                          ┃


    ┗━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┛


    Configuration'
  - 'urns it with the configuration of the cloud stack.Based on the stack info and
    pipeline specification, the client builds and pushes an image to the container
    registry. The image contains the environment needed to execute the pipeline and
    the code of the steps.


    The client creates a run in the orchestrator. For example, in the case of the
    Skypilot orchestrator, it creates a virtual machine in the cloud with some commands
    to pull and run a Docker image from the specified container registry.


    The orchestrator pulls the appropriate image from the container registry as it''s
    executing the pipeline (each step has an image).


    As each pipeline runs, it stores artifacts physically in the artifact store. Of
    course, this artifact store needs to be some form of cloud storage.


    As each pipeline runs, it reports status back to the ZenML server and optionally
    queries the server for metadata.


    Provisioning and registering a Skypilot orchestrator alongside a container registry


    While there are detailed docs on how to set up a Skypilot orchestrator and a container
    registry on each public cloud, we have put the most relevant details here for
    convenience:


    In order to launch a pipeline on AWS with the SkyPilot orchestrator, the first
    thing that you need to do is to install the AWS and Skypilot integrations:


    zenml integration install aws skypilot_aws -y


    Before we start registering any components, there is another step that we have
    to execute. As we explained in the previous section, components such as orchestrators
    and container registries often require you to set up the right permissions. In
    ZenML, this process is simplified with the use of Service Connectors. For this
    example, we need to use the IAM role authentication method of our AWS service
    connector:


    AWS_PROFILE=<AWS_PROFILE> zenml service-connector register cloud_connector --type
    aws --auto-configure


    Once the service connector is set up, we can register a Skypilot orchestrator:


    zenml orchestrator register skypilot_orchestrator -f vm_aws'
  - 'pose -f /path/to/docker-compose.yml -p zenml up -dYou need to visit the ZenML
    dashboard at http://localhost:8080 to activate the server by creating an initial
    admin account. You can then connect your client to the server with the web login
    flow:


    zenml connect --url http://localhost:8080


    Tearing down the installation is as simple as running:


    docker-compose -p zenml down


    Database backup and recovery


    An automated database backup and recovery feature is enabled by default for all
    Docker deployments. The ZenML server will automatically back up the database in-memory
    before every database schema migration and restore it if the migration fails.


    The database backup automatically created by the ZenML server is only temporary
    and only used as an immediate recovery in case of database migration failures.
    It is not meant to be used as a long-term backup solution. If you need to back
    up your database for long-term storage, you should use a dedicated backup solution.


    Several database backup strategies are supported, depending on where and how the
    backup is stored. The strategy can be configured by means of the ZENML_STORE_BACKUP_STRATEGY
    environment variable:


    disabled - no backup is performed


    in-memory - the database schema and data are stored in memory. This is the fastest
    backup strategy, but the backup is not persisted across container restarts, so
    no manual intervention is possible in case the automatic DB recovery fails after
    a failed DB migration. Adequate memory resources should be allocated to the ZenML
    server container when using this backup strategy with larger databases. This is
    the default backup strategy.'
- source_sentence: What are the benefits of deploying ZenML to a production environment?
  sentences:
  - 'graph that includes custom TRANSFORMER and ROUTER.If you are looking for a more
    easy way to deploy your models locally, you can use the MLflow Model Deployer
    flavor.


    How to deploy it?


    ZenML provides a Seldon Core flavor build on top of the Seldon Core Integration
    to allow you to deploy and use your models in a production-grade environment.
    In order to use the integration you need to install it on your local machine to
    be able to register a Seldon Core Model deployer with ZenML and add it to your
    stack:


    zenml integration install seldon -y


    To deploy and make use of the Seldon Core integration we need to have the following
    prerequisites:


    access to a Kubernetes cluster. This can be configured using the kubernetes_context
    configuration attribute to point to a local kubectl context or an in-cluster configuration,
    but the recommended approach is to use a Service Connector to link the Seldon
    Deployer Stack Component to a Kubernetes cluster.


    Seldon Core needs to be preinstalled and running in the target Kubernetes cluster.
    Check out the official Seldon Core installation instructions or the EKS installation
    example below.


    models deployed with Seldon Core need to be stored in some form of persistent
    shared storage that is accessible from the Kubernetes cluster where Seldon Core
    is installed (e.g. AWS S3, GCS, Azure Blob Storage, etc.). You can use one of
    the supported remote artifact store flavors to store your models as part of your
    stack. For a smoother experience running Seldon Core with a cloud artifact store,
    we also recommend configuring explicit credentials for the artifact store. The
    Seldon Core model deployer knows how to automatically convert those credentials
    in the format needed by Seldon Core model servers to authenticate to the storage
    back-end where models are stored.


    Since the Seldon Model Deployer is interacting with the Seldon Core model server
    deployed on a Kubernetes cluster, you need to provide a set of configuration parameters.
    These parameters are:'
  - 'S Secrets Manager accounts or regions may be used.Always make sure that the backup
    Secrets Store is configured to use a different location than the primary Secrets
    Store. The location can be different in terms of the Secrets Store back-end type
    (e.g. internal database vs. AWS Secrets Manager) or the actual location of the
    Secrets Store back-end (e.g. different AWS Secrets Manager account or region,
    GCP Secret Manager project or Azure Key Vault''s vault).


    Using the same location for both the primary and backup Secrets Store will not
    provide any additional benefits and may even result in unexpected behavior.


    When a backup secrets store is in use, the ZenML Server will always attempt to
    read and write secret values from/to the primary Secrets Store first while ensuring
    to keep the backup Secrets Store in sync. If the primary Secrets Store is unreachable,
    if the secret values are not found there or any otherwise unexpected error occurs,
    the ZenML Server falls back to reading and writing from/to the backup Secrets
    Store. Only if the backup Secrets Store is also unavailable, the ZenML Server
    will return an error.


    In addition to the hidden backup operations, users can also explicitly trigger
    a backup operation by using the zenml secret backup CLI command. This command
    will attempt to read all secrets from the primary Secrets Store and write them
    to the backup Secrets Store. Similarly, the zenml secret restore CLI command can
    be used to restore secrets from the backup Secrets Store to the primary Secrets
    Store. These CLI commands are useful for migrating secrets from one Secrets Store
    to another.


    Secrets migration strategy


    Sometimes you may need to change the external provider or location where secrets
    values are stored by the Secrets Store. The immediate implication of this is that
    the ZenML server will no longer be able to access existing secrets with the new
    configuration until they are also manually copied to the new location. Some examples
    of such changes include:'
  - '🤔Deploying ZenML


    Why do we need to deploy ZenML?


    Moving your ZenML Server to a production environment offers several benefits over
    staying local:


    Scalability: Production environments are designed to handle large-scale workloads,
    allowing your models to process more data and deliver faster results.


    Reliability: Production-grade infrastructure ensures high availability and fault
    tolerance, minimizing downtime and ensuring consistent performance.


    Collaboration: A shared production environment enables seamless collaboration
    between team members, making it easier to iterate on models and share insights.


    Despite these advantages, transitioning to production can be challenging due to
    the complexities involved in setting up the needed infrastructure.


    ZenML Server


    When you first get started with ZenML, it relies with the following architecture
    on your machine.


    The SQLite database that you can see in this diagram is used to store information
    about pipelines, pipeline runs, stacks, and other configurations. Users can run
    the zenml up command to spin up a local REST server to serve the dashboard. The
    diagram for this looks as follows:


    In Scenario 2, the zenml up command implicitly connects the client to the server.


    Currently the ZenML server supports a legacy and a brand-new version of the dashboard.
    To use the legacy version simply use the following command zenml up --legacy


    In order to move into production, the ZenML server needs to be deployed somewhere
    centrally so that the different cloud stack components can read from and write
    to the server. Additionally, this also allows all your team members to connect
    to it and share stacks and pipelines.


    Deploying a ZenML Server'
- source_sentence: What is the tenant_id value in the configuration section?
  sentences:
  - '─────────────────────────────────────────────────┨┃ OWNER            │ default                                                                        ┃


    ┠──────────────────┼────────────────────────────────────────────────────────────────────────────────┨


    ┃ WORKSPACE        │ default                                                                        ┃


    ┠──────────────────┼────────────────────────────────────────────────────────────────────────────────┨


    ┃ SHARED           │ ➖                                                                             ┃


    ┠──────────────────┼────────────────────────────────────────────────────────────────────────────────┨


    ┃ CREATED_AT       │ 2023-06-20 19:16:26.802374                                                     ┃


    ┠──────────────────┼────────────────────────────────────────────────────────────────────────────────┨


    ┃ UPDATED_AT       │ 2023-06-20 19:16:26.802378                                                     ┃


    ┗━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┛


    Configuration


    ┏━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓


    ┃ PROPERTY      │ VALUE                                ┃


    ┠───────────────┼──────────────────────────────────────┨


    ┃ tenant_id     │ a79ff333-8f45-4a74-a42e-68871c17b7fb ┃


    ┠───────────────┼──────────────────────────────────────┨


    ┃ client_id     │ 8926254a-8c3f-430a-a2fd-bdab234d491e ┃


    ┠───────────────┼──────────────────────────────────────┨


    ┃ client_secret │ [HIDDEN]                             ┃


    ┗━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┛


    Azure Access Token


    Uses temporary Azure access tokens explicitly configured by the user or auto-configured
    from a local environment.'
  - ' should pick the one that best fits your use case.If you already have one or
    more GCP Service Connectors configured in your ZenML deployment, you can check
    which of them can be used to access generic GCP resources like the GCP Image Builder
    required for your GCP Image Builder by running e.g.:


    zenml service-connector list-resources --resource-type gcp-generic


    Example Command Output


    The following ''gcp-generic'' resources can be accessed by service connectors
    configured in your workspace:


    ┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┓


    ┃             CONNECTOR ID             │ CONNECTOR NAME │ CONNECTOR TYPE │ RESOURCE
    TYPE  │ RESOURCE NAMES ┃


    ┠──────────────────────────────────────┼────────────────┼────────────────┼────────────────┼────────────────┨


    ┃ bfdb657d-d808-47e7-9974-9ba6e4919d83 │ gcp-generic    │ 🔵 gcp         │ 🔵 gcp-generic
    │ zenml-core     ┃


    ┗━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━┛


    After having set up or decided on a GCP Service Connector to use to authenticate
    to GCP, you can register the GCP Image Builder as follows:


    zenml image-builder register <IMAGE_BUILDER_NAME> \


    --flavor=gcp \


    --cloud_builder_image=<BUILDER_IMAGE_NAME> \


    --network=<DOCKER_NETWORK> \


    --build_timeout=<BUILD_TIMEOUT_IN_SECONDS>


    # Connect the GCP Image Builder to GCP via a GCP Service Connector


    zenml image-builder connect <IMAGE_BUILDER_NAME> -i


    A non-interactive version that connects the GCP Image Builder to a target GCP
    Service Connector:


    zenml image-builder connect <IMAGE_BUILDER_NAME> --connector <CONNECTOR_ID>


    Example Command Output


    $ zenml image-builder connect gcp-image-builder --connector gcp-generic


    Successfully connected image builder `gcp-image-builder` to the following resources:


    ┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┓'
  - 'gistry or even more than one type of AWS resource:zenml service-connector register
    --type aws -i


    A non-interactive CLI example that leverages the AWS CLI configuration on your
    local machine to auto-configure an AWS Service Connector targeting an ECR registry
    is:


    zenml service-connector register <CONNECTOR_NAME> --type aws --resource-type docker-registry
    --auto-configure


    Example Command Output


    $ zenml service-connector register aws-us-east-1 --type aws --resource-type docker-registry
    --auto-configure


    ⠸ Registering service connector ''aws-us-east-1''...


    Successfully registered service connector `aws-us-east-1` with access to the following
    resources:


    ┏━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓


    ┃   RESOURCE TYPE    │ RESOURCE NAMES                               ┃


    ┠────────────────────┼──────────────────────────────────────────────┨


    ┃ 🐳 docker-registry │ 715803424590.dkr.ecr.us-east-1.amazonaws.com ┃


    ┗━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┛


    Note: Please remember to grant the entity associated with your AWS credentials
    permissions to read and write to one or more ECR repositories as well as to list
    accessible ECR repositories. For a full list of permissions required to use an
    AWS Service Connector to access an ECR registry, please refer to the AWS Service
    Connector ECR registry resource type documentation or read the documentation available
    in the interactive CLI commands and dashboard. The AWS Service Connector supports
    many different authentication methods with different levels of security and convenience.
    You should pick the one that best fits your use case.


    If you already have one or more AWS Service Connectors configured in your ZenML
    deployment, you can check which of them can be used to access the ECR registry
    you want to use for your AWS Container Registry by running e.g.:


    zenml service-connector list-resources --connector-type aws --resource-type docker-registry


    Example Command Output'
- source_sentence: How can I customize the Docker settings for individual steps in
    a ZenML pipeline?
  sentences:
  - '🌎Environment Variables


    How to control ZenML behavior with environmental variables.


    There are a few pre-defined environmental variables that can be used to control
    the behavior of ZenML. See the list below with default values and options:


    Logging verbosity


    export ZENML_LOGGING_VERBOSITY=INFO


    Choose from INFO, WARN, ERROR, CRITICAL, DEBUG.


    Disable step logs


    Usually, ZenML stores step logs in the artifact store, but this can sometimes
    cause performance bottlenecks, especially if the code utilizes progress bars.


    If you want to configure whether logged output from steps is stored or not, set
    the ZENML_DISABLE_STEP_LOGS_STORAGE environment variable to true. Note that this
    will mean that logs from your steps will no longer be stored and thus won''t be
    visible on the dashboard anymore.


    export ZENML_DISABLE_STEP_LOGS_STORAGE=false


    ZenML repository path


    To configure where ZenML will install and look for its repository, set the environment
    variable ZENML_REPOSITORY_PATH.


    export ZENML_REPOSITORY_PATH=/path/to/somewhere


    Analytics


    Please see our full page on what analytics are tracked and how you can opt out,
    but the quick summary is that you can set this to false if you want to opt out
    of analytics.


    export ZENML_ANALYTICS_OPT_IN=false


    Debug mode


    Setting to true switches to developer mode:


    export ZENML_DEBUG=true


    Active stack


    Setting the ZENML_ACTIVE_STACK_ID to a specific UUID will make the corresponding
    stack the active stack:


    export ZENML_ACTIVE_STACK_ID=<UUID-OF-YOUR-STACK>


    Prevent pipeline execution


    When true, this prevents a pipeline from executing:


    export ZENML_PREVENT_PIPELINE_EXECUTION=false


    Disable rich traceback


    Set to false to disable the rich traceback:


    export ZENML_ENABLE_RICH_TRACEBACK=true


    Disable colourful logging


    If you wish to disable colourful logging, set the following environment variable:


    ZENML_LOGGING_COLORS_DISABLED=true'
  - 'pd.Series(model.predict(data))


    return predictionsHowever, this approach has the downside that if the step is
    cached, then it could lead to unexpected results. You could simply disable the
    cache in the above step or the corresponding pipeline. However, one other way
    of achieving this would be to resolve the artifact at the pipeline level:


    from typing_extensions import Annotated


    from zenml import get_pipeline_context, pipeline, Model


    from zenml.enums import ModelStages


    import pandas as pd


    from sklearn.base import ClassifierMixin


    @step


    def predict(


    model: ClassifierMixin,


    data: pd.DataFrame,


    ) -> Annotated[pd.Series, "predictions"]:


    predictions = pd.Series(model.predict(data))


    return predictions


    @pipeline(


    model=Model(


    name="iris_classifier",


    # Using the production stage


    version=ModelStages.PRODUCTION,


    ),


    def do_predictions():


    # model name and version are derived from pipeline context


    model = get_pipeline_context().model


    inference_data = load_data()


    predict(


    # Here, we load in the `trained_model` from a trainer step


    model=model.get_model_artifact("trained_model"),


    data=inference_data,


    if __name__ == "__main__":


    do_predictions()


    Ultimately, both approaches are fine. You should decide which one to use based
    on your own preferences.


    PreviousLoad artifacts into memory


    NextVisualizing artifacts


    Last updated 15 days ago'
  - 'Docker settings on a step


    You have the option to customize the Docker settings at a step level.


    By default every step of a pipeline uses the same Docker image that is defined
    at the pipeline level. Sometimes your steps will have special requirements that
    make it necessary to define a different Docker image for one or many steps. This
    can easily be accomplished by adding the DockerSettings to the step decorator
    directly.


    from zenml import step


    from zenml.config import DockerSettings


    @step(


    settings={


    "docker": DockerSettings(


    parent_image="pytorch/pytorch:1.12.1-cuda11.3-cudnn8-runtime"


    def training(...):


    ...


    Alternatively, this can also be done within the configuration file.


    steps:


    training:


    settings:


    docker:


    parent_image: pytorch/pytorch:2.2.0-cuda11.8-cudnn8-runtime


    required_integrations:


    gcp


    github


    requirements:


    zenml  # Make sure to include ZenML for other parent images


    numpy


    PreviousDocker settings on a pipeline


    NextSpecify pip dependencies and apt packages


    Last updated 19 days ago'
- source_sentence: How do I configure the Kubernetes Service Connector to connect
    ZenML to Kubernetes clusters?
  sentences:
  - 'Kubernetes Service Connector


    Configuring Kubernetes Service Connectors to connect ZenML to Kubernetes clusters.


    The ZenML Kubernetes service connector facilitates authenticating and connecting
    to a Kubernetes cluster. The connector can be used to access to any generic Kubernetes
    cluster by providing pre-authenticated Kubernetes python clients to Stack Components
    that are linked to it and also allows configuring the local Kubernetes CLI (i.e.
    kubectl).


    Prerequisites


    The Kubernetes Service Connector is part of the Kubernetes ZenML integration.
    You can either install the entire integration or use a pypi extra to install it
    independently of the integration:


    pip install "zenml[connectors-kubernetes]" installs only prerequisites for the
    Kubernetes Service Connector Type


    zenml integration install kubernetes installs the entire Kubernetes ZenML integration


    A local Kubernetes CLI (i.e. kubectl ) and setting up local kubectl configuration
    contexts is not required to access Kubernetes clusters in your Stack Components
    through the Kubernetes Service Connector.


    $ zenml service-connector list-types --type kubernetes


    ┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━┯━━━━━━━┯━━━━━━━━┓


    ┃             NAME             │ TYPE          │ RESOURCE TYPES        │ AUTH
    METHODS │ LOCAL │ REMOTE ┃


    ┠──────────────────────────────┼───────────────┼───────────────────────┼──────────────┼───────┼────────┨


    ┃ Kubernetes Service Connector │ 🌀 kubernetes │ 🌀 kubernetes-cluster │ password     │
    ✅    │ ✅     ┃


    ┃                              │               │                       │ token        │       │        ┃


    ┗━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━┷━━━━━━━┷━━━━━━━━┛


    Resource Types


    The Kubernetes Service Connector only supports authenticating to and granting
    access to a generic Kubernetes cluster. This type of resource is identified by
    the kubernetes-cluster Resource Type.'
  - 'to the container registry.


    Authentication MethodsIntegrating and using an Azure Container Registry in your
    pipelines is not possible without employing some form of authentication. If you''re
    looking for a quick way to get started locally, you can use the Local Authentication
    method. However, the recommended way to authenticate to the Azure cloud platform
    is through an Azure Service Connector. This is particularly useful if you are
    configuring ZenML stacks that combine the Azure Container Registry with other
    remote stack components also running in Azure.


    This method uses the Docker client authentication available in the environment
    where the ZenML code is running. On your local machine, this is the quickest way
    to configure an Azure Container Registry. You don''t need to supply credentials
    explicitly when you register the Azure Container Registry, as it leverages the
    local credentials and configuration that the Azure CLI and Docker client store
    on your local machine. However, you will need to install and set up the Azure
    CLI on your machine as a prerequisite, as covered in the Azure CLI documentation,
    before you register the Azure Container Registry.


    With the Azure CLI installed and set up with credentials, you need to login to
    the container registry so Docker can pull and push images:


    # Fill your REGISTRY_NAME in the placeholder in the following command.


    # You can find the REGISTRY_NAME as part of your registry URI: `<REGISTRY_NAME>.azurecr.io`


    az acr login --name=<REGISTRY_NAME>


    Stacks using the Azure Container Registry set up with local authentication are
    not portable across environments. To make ZenML pipelines fully portable, it is
    recommended to use an Azure Service Connector to link your Azure Container Registry
    to the remote ACR registry.'
  - 'he Post-execution workflow has changed as follows:The get_pipelines and get_pipeline
    methods have been moved out of the Repository (i.e. the new Client ) class and
    lie directly in the post_execution module now. To use the user has to do:


    from zenml.post_execution import get_pipelines, get_pipeline


    New methods to directly get a run have been introduced: get_run and get_unlisted_runs
    method has been introduced to get unlisted runs.


    Usage remains largely similar. Please read the new docs for post-execution to
    inform yourself of what further has changed.


    How to migrate: Replace all post-execution workflows from the paradigm of Repository.get_pipelines
    or Repository.get_pipeline_run to the corresponding post_execution methods.


    📡Future Changes


    While this rehaul is big and will break previous releases, we do have some more
    work left to do. However we also expect this to be the last big rehaul of ZenML
    before our 1.0.0 release, and no other release will be so hard breaking as this
    one. Currently planned future breaking changes are:


    Following the metadata store, the secrets manager stack component might move out
    of the stack.


    ZenML StepContext might be deprecated.


    🐞 Reporting Bugs


    While we have tried our best to document everything that has changed, we realize
    that mistakes can be made and smaller changes overlooked. If this is the case,
    or you encounter a bug at any time, the ZenML core team and community are available
    around the clock on the growing Slack community.


    For bug reports, please also consider submitting a GitHub Issue.


    Lastly, if the new changes have left you desiring a feature, then consider adding
    it to our public feature voting board. Before doing so, do check what is already
    on there and consider upvoting the features you desire the most.


    PreviousMigration guide


    NextMigration guide 0.23.0 → 0.30.0


    Last updated 12 days ago'
model-index:
- name: zenml/finetuned-snowflake-arctic-embed-m
  results:
  - task:
      type: information-retrieval
      name: Information Retrieval
    dataset:
      name: dim 384
      type: dim_384
    metrics:
    - type: cosine_accuracy@1
      value: 0.3614457831325301
      name: Cosine Accuracy@1
    - type: cosine_accuracy@3
      value: 0.6024096385542169
      name: Cosine Accuracy@3
    - type: cosine_accuracy@5
      value: 0.6987951807228916
      name: Cosine Accuracy@5
    - type: cosine_accuracy@10
      value: 0.7831325301204819
      name: Cosine Accuracy@10
    - type: cosine_precision@1
      value: 0.3614457831325301
      name: Cosine Precision@1
    - type: cosine_precision@3
      value: 0.2008032128514056
      name: Cosine Precision@3
    - type: cosine_precision@5
      value: 0.1397590361445783
      name: Cosine Precision@5
    - type: cosine_precision@10
      value: 0.07831325301204817
      name: Cosine Precision@10
    - type: cosine_recall@1
      value: 0.3614457831325301
      name: Cosine Recall@1
    - type: cosine_recall@3
      value: 0.6024096385542169
      name: Cosine Recall@3
    - type: cosine_recall@5
      value: 0.6987951807228916
      name: Cosine Recall@5
    - type: cosine_recall@10
      value: 0.7831325301204819
      name: Cosine Recall@10
    - type: cosine_ndcg@10
      value: 0.5756072832948543
      name: Cosine Ndcg@10
    - type: cosine_mrr@10
      value: 0.5091365461847391
      name: Cosine Mrr@10
    - type: cosine_map@100
      value: 0.5165480061197206
      name: Cosine Map@100
  - task:
      type: information-retrieval
      name: Information Retrieval
    dataset:
      name: dim 256
      type: dim_256
    metrics:
    - type: cosine_accuracy@1
      value: 0.3674698795180723
      name: Cosine Accuracy@1
    - type: cosine_accuracy@3
      value: 0.6144578313253012
      name: Cosine Accuracy@3
    - type: cosine_accuracy@5
      value: 0.6987951807228916
      name: Cosine Accuracy@5
    - type: cosine_accuracy@10
      value: 0.7710843373493976
      name: Cosine Accuracy@10
    - type: cosine_precision@1
      value: 0.3674698795180723
      name: Cosine Precision@1
    - type: cosine_precision@3
      value: 0.2048192771084337
      name: Cosine Precision@3
    - type: cosine_precision@5
      value: 0.1397590361445783
      name: Cosine Precision@5
    - type: cosine_precision@10
      value: 0.07710843373493974
      name: Cosine Precision@10
    - type: cosine_recall@1
      value: 0.3674698795180723
      name: Cosine Recall@1
    - type: cosine_recall@3
      value: 0.6144578313253012
      name: Cosine Recall@3
    - type: cosine_recall@5
      value: 0.6987951807228916
      name: Cosine Recall@5
    - type: cosine_recall@10
      value: 0.7710843373493976
      name: Cosine Recall@10
    - type: cosine_ndcg@10
      value: 0.5732430988480587
      name: Cosine Ndcg@10
    - type: cosine_mrr@10
      value: 0.509569229298145
      name: Cosine Mrr@10
    - type: cosine_map@100
      value: 0.5167702755195493
      name: Cosine Map@100
  - task:
      type: information-retrieval
      name: Information Retrieval
    dataset:
      name: dim 128
      type: dim_128
    metrics:
    - type: cosine_accuracy@1
      value: 0.29518072289156627
      name: Cosine Accuracy@1
    - type: cosine_accuracy@3
      value: 0.5542168674698795
      name: Cosine Accuracy@3
    - type: cosine_accuracy@5
      value: 0.6506024096385542
      name: Cosine Accuracy@5
    - type: cosine_accuracy@10
      value: 0.7469879518072289
      name: Cosine Accuracy@10
    - type: cosine_precision@1
      value: 0.29518072289156627
      name: Cosine Precision@1
    - type: cosine_precision@3
      value: 0.18473895582329317
      name: Cosine Precision@3
    - type: cosine_precision@5
      value: 0.1301204819277108
      name: Cosine Precision@5
    - type: cosine_precision@10
      value: 0.07469879518072288
      name: Cosine Precision@10
    - type: cosine_recall@1
      value: 0.29518072289156627
      name: Cosine Recall@1
    - type: cosine_recall@3
      value: 0.5542168674698795
      name: Cosine Recall@3
    - type: cosine_recall@5
      value: 0.6506024096385542
      name: Cosine Recall@5
    - type: cosine_recall@10
      value: 0.7469879518072289
      name: Cosine Recall@10
    - type: cosine_ndcg@10
      value: 0.5199227959343978
      name: Cosine Ndcg@10
    - type: cosine_mrr@10
      value: 0.44722939376553855
      name: Cosine Mrr@10
    - type: cosine_map@100
      value: 0.4541483656933914
      name: Cosine Map@100
  - task:
      type: information-retrieval
      name: Information Retrieval
    dataset:
      name: dim 64
      type: dim_64
    metrics:
    - type: cosine_accuracy@1
      value: 0.28313253012048195
      name: Cosine Accuracy@1
    - type: cosine_accuracy@3
      value: 0.5180722891566265
      name: Cosine Accuracy@3
    - type: cosine_accuracy@5
      value: 0.5843373493975904
      name: Cosine Accuracy@5
    - type: cosine_accuracy@10
      value: 0.6746987951807228
      name: Cosine Accuracy@10
    - type: cosine_precision@1
      value: 0.28313253012048195
      name: Cosine Precision@1
    - type: cosine_precision@3
      value: 0.17269076305220882
      name: Cosine Precision@3
    - type: cosine_precision@5
      value: 0.11686746987951806
      name: Cosine Precision@5
    - type: cosine_precision@10
      value: 0.06746987951807228
      name: Cosine Precision@10
    - type: cosine_recall@1
      value: 0.28313253012048195
      name: Cosine Recall@1
    - type: cosine_recall@3
      value: 0.5180722891566265
      name: Cosine Recall@3
    - type: cosine_recall@5
      value: 0.5843373493975904
      name: Cosine Recall@5
    - type: cosine_recall@10
      value: 0.6746987951807228
      name: Cosine Recall@10
    - type: cosine_ndcg@10
      value: 0.47987356927913916
      name: Cosine Ndcg@10
    - type: cosine_mrr@10
      value: 0.4177519602218399
      name: Cosine Mrr@10
    - type: cosine_map@100
      value: 0.4261749847732839
      name: Cosine Map@100
---

# zenml/finetuned-snowflake-arctic-embed-m

This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [Snowflake/snowflake-arctic-embed-m](https://huggingface.co/Snowflake/snowflake-arctic-embed-m). It maps sentences & paragraphs to a 768-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more.

## Model Details

### Model Description
- **Model Type:** Sentence Transformer
- **Base model:** [Snowflake/snowflake-arctic-embed-m](https://huggingface.co/Snowflake/snowflake-arctic-embed-m) <!-- at revision 71bc94c8f9ea1e54fba11167004205a65e5da2cc -->
- **Maximum Sequence Length:** 512 tokens
- **Output Dimensionality:** 768 tokens
- **Similarity Function:** Cosine Similarity
<!-- - **Training Dataset:** Unknown -->
- **Language:** en
- **License:** apache-2.0

### Model Sources

- **Documentation:** [Sentence Transformers Documentation](https://sbert.net)
- **Repository:** [Sentence Transformers on GitHub](https://github.com/UKPLab/sentence-transformers)
- **Hugging Face:** [Sentence Transformers on Hugging Face](https://huggingface.co/models?library=sentence-transformers)

### Full Model Architecture

```
SentenceTransformer(
  (0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: BertModel 
  (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
  (2): Normalize()
)
```

## Usage

### Direct Usage (Sentence Transformers)

First install the Sentence Transformers library:

```bash
pip install -U sentence-transformers
```

Then you can load this model and run inference.
```python
from sentence_transformers import SentenceTransformer

# Download from the 🤗 Hub
model = SentenceTransformer("zenml/finetuned-snowflake-arctic-embed-m")
# Run inference
sentences = [
    'How do I configure the Kubernetes Service Connector to connect ZenML to Kubernetes clusters?',
    'Kubernetes Service Connector\n\nConfiguring Kubernetes Service Connectors to connect ZenML to Kubernetes clusters.\n\nThe ZenML Kubernetes service connector facilitates authenticating and connecting to a Kubernetes cluster. The connector can be used to access to any generic Kubernetes cluster by providing pre-authenticated Kubernetes python clients to Stack Components that are linked to it and also allows configuring the local Kubernetes CLI (i.e. kubectl).\n\nPrerequisites\n\nThe Kubernetes Service Connector is part of the Kubernetes ZenML integration. You can either install the entire integration or use a pypi extra to install it independently of the integration:\n\npip install "zenml[connectors-kubernetes]" installs only prerequisites for the Kubernetes Service Connector Type\n\nzenml integration install kubernetes installs the entire Kubernetes ZenML integration\n\nA local Kubernetes CLI (i.e. kubectl ) and setting up local kubectl configuration contexts is not required to access Kubernetes clusters in your Stack Components through the Kubernetes Service Connector.\n\n$ zenml service-connector list-types --type kubernetes\n\n┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━┯━━━━━━━┯━━━━━━━━┓\n\n┃             NAME             │ TYPE          │ RESOURCE TYPES        │ AUTH METHODS │ LOCAL │ REMOTE ┃\n\n┠──────────────────────────────┼───────────────┼───────────────────────┼──────────────┼───────┼────────┨\n\n┃ Kubernetes Service Connector │ 🌀 kubernetes │ 🌀 kubernetes-cluster │ password     │ ✅    │ ✅     ┃\n\n┃                              │               │                       │ token        │       │        ┃\n\n┗━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━┷━━━━━━━┷━━━━━━━━┛\n\nResource Types\n\nThe Kubernetes Service Connector only supports authenticating to and granting access to a generic Kubernetes cluster. This type of resource is identified by the kubernetes-cluster Resource Type.',
    'he Post-execution workflow has changed as follows:The get_pipelines and get_pipeline methods have been moved out of the Repository (i.e. the new Client ) class and lie directly in the post_execution module now. To use the user has to do:\n\nfrom zenml.post_execution import get_pipelines, get_pipeline\n\nNew methods to directly get a run have been introduced: get_run and get_unlisted_runs method has been introduced to get unlisted runs.\n\nUsage remains largely similar. Please read the new docs for post-execution to inform yourself of what further has changed.\n\nHow to migrate: Replace all post-execution workflows from the paradigm of Repository.get_pipelines or Repository.get_pipeline_run to the corresponding post_execution methods.\n\n📡Future Changes\n\nWhile this rehaul is big and will break previous releases, we do have some more work left to do. However we also expect this to be the last big rehaul of ZenML before our 1.0.0 release, and no other release will be so hard breaking as this one. Currently planned future breaking changes are:\n\nFollowing the metadata store, the secrets manager stack component might move out of the stack.\n\nZenML StepContext might be deprecated.\n\n🐞 Reporting Bugs\n\nWhile we have tried our best to document everything that has changed, we realize that mistakes can be made and smaller changes overlooked. If this is the case, or you encounter a bug at any time, the ZenML core team and community are available around the clock on the growing Slack community.\n\nFor bug reports, please also consider submitting a GitHub Issue.\n\nLastly, if the new changes have left you desiring a feature, then consider adding it to our public feature voting board. Before doing so, do check what is already on there and consider upvoting the features you desire the most.\n\nPreviousMigration guide\n\nNextMigration guide 0.23.0 → 0.30.0\n\nLast updated 12 days ago',
]
embeddings = model.encode(sentences)
print(embeddings.shape)
# [3, 768]

# Get the similarity scores for the embeddings
similarities = model.similarity(embeddings, embeddings)
print(similarities.shape)
# [3, 3]
```

<!--
### Direct Usage (Transformers)

<details><summary>Click to see the direct usage in Transformers</summary>

</details>
-->

<!--
### Downstream Usage (Sentence Transformers)

You can finetune this model on your own dataset.

<details><summary>Click to expand</summary>

</details>
-->

<!--
### Out-of-Scope Use

*List how the model may foreseeably be misused and address what users ought not to do with the model.*
-->

## Evaluation

### Metrics

#### Information Retrieval
* Dataset: `dim_384`
* Evaluated with [<code>InformationRetrievalEvaluator</code>](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.InformationRetrievalEvaluator)

| Metric              | Value      |
|:--------------------|:-----------|
| cosine_accuracy@1   | 0.3614     |
| cosine_accuracy@3   | 0.6024     |
| cosine_accuracy@5   | 0.6988     |
| cosine_accuracy@10  | 0.7831     |
| cosine_precision@1  | 0.3614     |
| cosine_precision@3  | 0.2008     |
| cosine_precision@5  | 0.1398     |
| cosine_precision@10 | 0.0783     |
| cosine_recall@1     | 0.3614     |
| cosine_recall@3     | 0.6024     |
| cosine_recall@5     | 0.6988     |
| cosine_recall@10    | 0.7831     |
| cosine_ndcg@10      | 0.5756     |
| cosine_mrr@10       | 0.5091     |
| **cosine_map@100**  | **0.5165** |

#### Information Retrieval
* Dataset: `dim_256`
* Evaluated with [<code>InformationRetrievalEvaluator</code>](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.InformationRetrievalEvaluator)

| Metric              | Value      |
|:--------------------|:-----------|
| cosine_accuracy@1   | 0.3675     |
| cosine_accuracy@3   | 0.6145     |
| cosine_accuracy@5   | 0.6988     |
| cosine_accuracy@10  | 0.7711     |
| cosine_precision@1  | 0.3675     |
| cosine_precision@3  | 0.2048     |
| cosine_precision@5  | 0.1398     |
| cosine_precision@10 | 0.0771     |
| cosine_recall@1     | 0.3675     |
| cosine_recall@3     | 0.6145     |
| cosine_recall@5     | 0.6988     |
| cosine_recall@10    | 0.7711     |
| cosine_ndcg@10      | 0.5732     |
| cosine_mrr@10       | 0.5096     |
| **cosine_map@100**  | **0.5168** |

#### Information Retrieval
* Dataset: `dim_128`
* Evaluated with [<code>InformationRetrievalEvaluator</code>](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.InformationRetrievalEvaluator)

| Metric              | Value      |
|:--------------------|:-----------|
| cosine_accuracy@1   | 0.2952     |
| cosine_accuracy@3   | 0.5542     |
| cosine_accuracy@5   | 0.6506     |
| cosine_accuracy@10  | 0.747      |
| cosine_precision@1  | 0.2952     |
| cosine_precision@3  | 0.1847     |
| cosine_precision@5  | 0.1301     |
| cosine_precision@10 | 0.0747     |
| cosine_recall@1     | 0.2952     |
| cosine_recall@3     | 0.5542     |
| cosine_recall@5     | 0.6506     |
| cosine_recall@10    | 0.747      |
| cosine_ndcg@10      | 0.5199     |
| cosine_mrr@10       | 0.4472     |
| **cosine_map@100**  | **0.4541** |

#### Information Retrieval
* Dataset: `dim_64`
* Evaluated with [<code>InformationRetrievalEvaluator</code>](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.InformationRetrievalEvaluator)

| Metric              | Value      |
|:--------------------|:-----------|
| cosine_accuracy@1   | 0.2831     |
| cosine_accuracy@3   | 0.5181     |
| cosine_accuracy@5   | 0.5843     |
| cosine_accuracy@10  | 0.6747     |
| cosine_precision@1  | 0.2831     |
| cosine_precision@3  | 0.1727     |
| cosine_precision@5  | 0.1169     |
| cosine_precision@10 | 0.0675     |
| cosine_recall@1     | 0.2831     |
| cosine_recall@3     | 0.5181     |
| cosine_recall@5     | 0.5843     |
| cosine_recall@10    | 0.6747     |
| cosine_ndcg@10      | 0.4799     |
| cosine_mrr@10       | 0.4178     |
| **cosine_map@100**  | **0.4262** |

<!--
## Bias, Risks and Limitations

*What are the known or foreseeable issues stemming from this model? You could also flag here known failure cases or weaknesses of the model.*
-->

<!--
### Recommendations

*What are recommendations with respect to the foreseeable issues? For example, filtering explicit content.*
-->

## Training Details

### Training Dataset

#### Unnamed Dataset


* Size: 1,490 training samples
* Columns: <code>positive</code> and <code>anchor</code>
* Approximate statistics based on the first 1000 samples:
  |         | positive                                                                         | anchor                                                                               |
  |:--------|:---------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------|
  | type    | string                                                                           | string                                                                               |
  | details | <ul><li>min: 9 tokens</li><li>mean: 21.2 tokens</li><li>max: 49 tokens</li></ul> | <ul><li>min: 21 tokens</li><li>mean: 376.51 tokens</li><li>max: 512 tokens</li></ul> |
* Samples:
  | positive                                                                                                                  | anchor                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                              |
  |:--------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
  | <code>How is the verification process different for multi-instance and single-instance Service Connectors?</code>         | <code>ing resources:<br><br>┏━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┓┃ RESOURCE TYPE │ RESOURCE NAMES ┃<br><br>┠───────────────┼────────────────┨<br><br>┃ 📦 s3-bucket  │ s3://zenfiles  ┃<br><br>┗━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━┛<br><br>The following might help understand the difference between scopes:<br><br>the difference between a multi-instance and a multi-type Service Connector is that the Resource Type scope is locked to a particular value during configuration for the multi-instance Service Connector<br><br>similarly, the difference between a multi-instance and a multi-type Service Connector is that the Resource Name (Resource ID) scope is locked to a particular value during configuration for the single-instance Service Connector<br><br>Service Connector Verification<br><br>When registering Service Connectors, the authentication configuration and credentials are automatically verified to ensure that they can indeed be used to gain access to the target resources:<br><br>for multi-type Service Connectors, this verification means checking that the configured credentials can be used to authenticate successfully to the remote service, as well as listing all resources that the credentials have permission to access for each Resource Type supported by the Service Connector Type.<br><br>for multi-instance Service Connectors, this verification step means listing all resources that the credentials have permission to access in addition to validating that the credentials can be used to authenticate to the target service or platform.<br><br>for single-instance Service Connectors, the verification step simply checks that the configured credentials have permission to access the target resource.<br><br>The verification can also be performed later on an already registered Service Connector. Furthermore, for multi-type and multi-instance Service Connectors, the verification operation can be scoped to a Resource Type and a Resource Name.<br><br>The following shows how a multi-type, a multi-instance and a single-instance Service Connector can be verified with multiple scopes after registration.</code> |
  | <code>What are the benefits of using the SkyPilot VM Orchestrator in ZenML for running machine learning workloads?</code> | <code>Skypilot VM Orchestrator<br><br>Orchestrating your pipelines to run on VMs using SkyPilot.<br><br>The SkyPilot VM Orchestrator is an integration provided by ZenML that allows you to provision and manage virtual machines (VMs) on any cloud provider supported by the SkyPilot framework. This integration is designed to simplify the process of running machine learning workloads on the cloud, offering cost savings, high GPU availability, and managed execution, We recommend using the SkyPilot VM Orchestrator if you need access to GPUs for your workloads, but don't want to deal with the complexities of managing cloud infrastructure or expensive managed solutions.<br><br>This component is only meant to be used within the context of a remote ZenML deployment scenario. Usage with a local ZenML deployment may lead to unexpected behavior!<br><br>SkyPilot VM Orchestrator is currently supported only for Python 3.8 and 3.9.<br><br>When to use it<br><br>You should use the SkyPilot VM Orchestrator if:<br><br>you want to maximize cost savings by leveraging spot VMs and auto-picking the cheapest VM/zone/region/cloud.<br><br>you want to ensure high GPU availability by provisioning VMs in all zones/regions/clouds you have access to.<br><br>you don't need a built-in UI of the orchestrator. (You can still use ZenML's Dashboard to view and monitor your pipelines/artifacts.)<br><br>you're not willing to maintain Kubernetes-based solutions or pay for managed solutions like Sagemaker.<br><br>How it works<br><br>The orchestrator leverages the SkyPilot framework to handle the provisioning and scaling of VMs. It automatically manages the process of launching VMs for your pipelines, with support for both on-demand and managed spot VMs. While you can select the VM type you want to use, the orchestrator also includes an optimizer that automatically selects the cheapest VM/zone/region/cloud for your workloads. Finally, the orchestrator includes an autostop feature that cleans up idle clusters, preventing unnecessary cloud costs.</code>                                                                          |
  | <code>How do I register a GCS Artifact Store using the ZenML CLI?</code>                                                  | <code>se Python <3.11 together with the GCP integration.The GCS Artifact Store flavor is provided by the GCP ZenML integration, you need to install it on your local machine to be able to register a GCS Artifact Store and add it to your stack:<br><br>zenml integration install gcp -y<br><br>The only configuration parameter mandatory for registering a GCS Artifact Store is the root path URI, which needs to point to a GCS bucket and take the form gs://bucket-name. Please read the Google Cloud Storage documentation on how to configure a GCS bucket.<br><br>With the URI to your GCS bucket known, registering a GCS Artifact Store can be done as follows:<br><br># Register the GCS artifact store<br><br>zenml artifact-store register gs_store -f gcp --path=gs://bucket-name<br><br># Register and set a stack with the new artifact store<br><br>zenml stack register custom_stack -a gs_store ... --set<br><br>Depending on your use case, however, you may also need to provide additional configuration parameters pertaining to authentication to match your deployment scenario.<br><br>Infrastructure Deployment<br><br>A GCS Artifact Store can be deployed directly from the ZenML CLI:<br><br>zenml artifact-store deploy gcs_artifact_store --flavor=gcp --provider=gcp ...<br><br>You can pass other configurations specific to the stack components as key-value arguments. If you don't provide a name, a random one is generated for you. For more information about how to work use the CLI for this, please refer to the dedicated documentation section.<br><br>Authentication Methods<br><br>Integrating and using a GCS Artifact Store in your pipelines is not possible without employing some form of authentication. If you're looking for a quick way to get started locally, you can use the Implicit Authentication method. However, the recommended way to authenticate to the GCP cloud platform is through a GCP Service Connector. This is particularly useful if you are configuring ZenML stacks that combine the GCS Artifact Store with other remote stack components also running in GCP.</code>                                           |
* Loss: [<code>MatryoshkaLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#matryoshkaloss) with these parameters:
  ```json
  {
      "loss": "MultipleNegativesRankingLoss",
      "matryoshka_dims": [
          384,
          256,
          128,
          64
      ],
      "matryoshka_weights": [
          1,
          1,
          1,
          1
      ],
      "n_dims_per_step": -1
  }
  ```

### Training Hyperparameters
#### Non-Default Hyperparameters

- `eval_strategy`: epoch
- `per_device_train_batch_size`: 32
- `per_device_eval_batch_size`: 16
- `gradient_accumulation_steps`: 16
- `learning_rate`: 2e-05
- `num_train_epochs`: 4
- `lr_scheduler_type`: cosine
- `warmup_ratio`: 0.1
- `bf16`: True
- `tf32`: True
- `load_best_model_at_end`: True
- `optim`: adamw_torch_fused
- `batch_sampler`: no_duplicates

#### All Hyperparameters
<details><summary>Click to expand</summary>

- `overwrite_output_dir`: False
- `do_predict`: False
- `eval_strategy`: epoch
- `prediction_loss_only`: True
- `per_device_train_batch_size`: 32
- `per_device_eval_batch_size`: 16
- `per_gpu_train_batch_size`: None
- `per_gpu_eval_batch_size`: None
- `gradient_accumulation_steps`: 16
- `eval_accumulation_steps`: None
- `learning_rate`: 2e-05
- `weight_decay`: 0.0
- `adam_beta1`: 0.9
- `adam_beta2`: 0.999
- `adam_epsilon`: 1e-08
- `max_grad_norm`: 1.0
- `num_train_epochs`: 4
- `max_steps`: -1
- `lr_scheduler_type`: cosine
- `lr_scheduler_kwargs`: {}
- `warmup_ratio`: 0.1
- `warmup_steps`: 0
- `log_level`: passive
- `log_level_replica`: warning
- `log_on_each_node`: True
- `logging_nan_inf_filter`: True
- `save_safetensors`: True
- `save_on_each_node`: False
- `save_only_model`: False
- `restore_callback_states_from_checkpoint`: False
- `no_cuda`: False
- `use_cpu`: False
- `use_mps_device`: False
- `seed`: 42
- `data_seed`: None
- `jit_mode_eval`: False
- `use_ipex`: False
- `bf16`: True
- `fp16`: False
- `fp16_opt_level`: O1
- `half_precision_backend`: auto
- `bf16_full_eval`: False
- `fp16_full_eval`: False
- `tf32`: True
- `local_rank`: 0
- `ddp_backend`: None
- `tpu_num_cores`: None
- `tpu_metrics_debug`: False
- `debug`: []
- `dataloader_drop_last`: False
- `dataloader_num_workers`: 0
- `dataloader_prefetch_factor`: None
- `past_index`: -1
- `disable_tqdm`: True
- `remove_unused_columns`: True
- `label_names`: None
- `load_best_model_at_end`: True
- `ignore_data_skip`: False
- `fsdp`: []
- `fsdp_min_num_params`: 0
- `fsdp_config`: {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}
- `fsdp_transformer_layer_cls_to_wrap`: None
- `accelerator_config`: {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True, 'non_blocking': False, 'gradient_accumulation_kwargs': None}
- `deepspeed`: None
- `label_smoothing_factor`: 0.0
- `optim`: adamw_torch_fused
- `optim_args`: None
- `adafactor`: False
- `group_by_length`: False
- `length_column_name`: length
- `ddp_find_unused_parameters`: None
- `ddp_bucket_cap_mb`: None
- `ddp_broadcast_buffers`: False
- `dataloader_pin_memory`: True
- `dataloader_persistent_workers`: False
- `skip_memory_metrics`: True
- `use_legacy_prediction_loop`: False
- `push_to_hub`: False
- `resume_from_checkpoint`: None
- `hub_model_id`: None
- `hub_strategy`: every_save
- `hub_private_repo`: False
- `hub_always_push`: False
- `gradient_checkpointing`: False
- `gradient_checkpointing_kwargs`: None
- `include_inputs_for_metrics`: False
- `eval_do_concat_batches`: True
- `fp16_backend`: auto
- `push_to_hub_model_id`: None
- `push_to_hub_organization`: None
- `mp_parameters`: 
- `auto_find_batch_size`: False
- `full_determinism`: False
- `torchdynamo`: None
- `ray_scope`: last
- `ddp_timeout`: 1800
- `torch_compile`: False
- `torch_compile_backend`: None
- `torch_compile_mode`: None
- `dispatch_batches`: None
- `split_batches`: None
- `include_tokens_per_second`: False
- `include_num_input_tokens_seen`: False
- `neftune_noise_alpha`: None
- `optim_target_modules`: None
- `batch_eval_metrics`: False
- `batch_sampler`: no_duplicates
- `multi_dataset_batch_sampler`: proportional

</details>

### Training Logs
| Epoch      | Step  | dim_128_cosine_map@100 | dim_256_cosine_map@100 | dim_384_cosine_map@100 | dim_64_cosine_map@100 |
|:----------:|:-----:|:----------------------:|:----------------------:|:----------------------:|:---------------------:|
| 0.6667     | 1     | 0.4134                 | 0.4621                 | 0.4641                 | 0.3385                |
| 2.0        | 3     | 0.4522                 | 0.5063                 | 0.5112                 | 0.4202                |
| **2.6667** | **4** | **0.4541**             | **0.5168**             | **0.5165**             | **0.4262**            |

* The bold row denotes the saved checkpoint.

### Framework Versions
- Python: 3.10.14
- Sentence Transformers: 3.0.1
- Transformers: 4.41.2
- PyTorch: 2.3.1+cu121
- Accelerate: 0.31.0
- Datasets: 2.19.1
- Tokenizers: 0.19.1

## Citation

### BibTeX

#### Sentence Transformers
```bibtex
@inproceedings{reimers-2019-sentence-bert,
    title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks",
    author = "Reimers, Nils and Gurevych, Iryna",
    booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing",
    month = "11",
    year = "2019",
    publisher = "Association for Computational Linguistics",
    url = "https://arxiv.org/abs/1908.10084",
}
```

#### MatryoshkaLoss
```bibtex
@misc{kusupati2024matryoshka,
    title={Matryoshka Representation Learning}, 
    author={Aditya Kusupati and Gantavya Bhatt and Aniket Rege and Matthew Wallingford and Aditya Sinha and Vivek Ramanujan and William Howard-Snyder and Kaifeng Chen and Sham Kakade and Prateek Jain and Ali Farhadi},
    year={2024},
    eprint={2205.13147},
    archivePrefix={arXiv},
    primaryClass={cs.LG}
}
```

#### MultipleNegativesRankingLoss
```bibtex
@misc{henderson2017efficient,
    title={Efficient Natural Language Response Suggestion for Smart Reply}, 
    author={Matthew Henderson and Rami Al-Rfou and Brian Strope and Yun-hsuan Sung and Laszlo Lukacs and Ruiqi Guo and Sanjiv Kumar and Balint Miklos and Ray Kurzweil},
    year={2017},
    eprint={1705.00652},
    archivePrefix={arXiv},
    primaryClass={cs.CL}
}
```

<!--
## Glossary

*Clearly define terms in order to be accessible across audiences.*
-->

<!--
## Model Card Authors

*Lists the people who create the model card, providing recognition and accountability for the detailed work that goes into its construction.*
-->

<!--
## Model Card Contact

*Provides a way for people who have updates to the Model Card, suggestions, or questions, to contact the Model Card authors.*
-->