[gnd-tib-core-xtransformer-en] name = GND-tib-core XTransformer English language = en backend = xtransformer analyzer = snowball(english) vocab = gnd-tib-core batch_size = 16 truncate_length = 256 learning_rate = 0.0001 num_train_epochs = 3 max_leaf_size = 1600 nr_splits = 256 threshold = 0.015 cn = 0.52 cp = 5.33 bootstrap_model = weighted-linear loss_function = weighted-squared-hinge max_active_matching_labels = 500 negative_sampling = tfn+man warmup_steps = 200 ensemble_method = concat-only post_processor = l3-hinge cost_sensitive_ranker = True rel_mode = induce rel_norm = l1 neg_mining_chain = tfn model_shortcut = FacebookAI/xlm-roberta-base