Arthur Câmara
commited on
Commit
·
624d635
1
Parent(s):
be8ddac
Readme fix
Browse files
README.md
CHANGED
@@ -9087,12 +9087,6 @@ passages = [
|
|
9087 |
tokenizer = AutoTokenizer.from_pretrained("zeta-alpha-ai/Zeta-Alpha-E5-Mistral")
|
9088 |
model = AutoModel.from_pretrained("zeta-alpha-ai/Zeta-Alpha-E5-Mistral")
|
9089 |
|
9090 |
-
max_length = 4096
|
9091 |
-
input_texts = queries + passages
|
9092 |
-
batch_dict = tokenizer(input_texts, max_length=max_length, padding=True, truncation=True, return_tensors="pt")
|
9093 |
-
outputs = model(**batch_dict)
|
9094 |
-
embeddings = last_token_pool(outputs.last_hidden_state, batch_dict['attention_mask'])
|
9095 |
-
|
9096 |
# get the embeddings
|
9097 |
max_length = 4096
|
9098 |
input_texts = queries + passages
|
|
|
9087 |
tokenizer = AutoTokenizer.from_pretrained("zeta-alpha-ai/Zeta-Alpha-E5-Mistral")
|
9088 |
model = AutoModel.from_pretrained("zeta-alpha-ai/Zeta-Alpha-E5-Mistral")
|
9089 |
|
|
|
|
|
|
|
|
|
|
|
|
|
9090 |
# get the embeddings
|
9091 |
max_length = 4096
|
9092 |
input_texts = queries + passages
|