Update configuration_nvembed.py (transformers + sentence-transformers) and infinity usage
#23
by
michaelfeil
- opened
- README.md +8 -0
- configuration_nvembed.py +2 -0
README.md
CHANGED
@@ -2107,6 +2107,14 @@ scores = (query_embeddings @ passage_embeddings.T) * 100
|
|
2107 |
print(scores.tolist())
|
2108 |
```
|
2109 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2110 |
## License
|
2111 |
This model should not be used for any commercial purpose. Refer the [license](https://spdx.org/licenses/CC-BY-NC-4.0) for the detailed terms.
|
2112 |
|
|
|
2107 |
print(scores.tolist())
|
2108 |
```
|
2109 |
|
2110 |
+
### Usage (Infinity)
|
2111 |
+
|
2112 |
+
Usage via [Infinity, MIT License](https://github.com/michaelfeil/infinity).
|
2113 |
+
```bash
|
2114 |
+
docker run -it --gpus all -v ./data:/app/.cache -p 7997:7997 michaelf34/infinity:0.0.70 \
|
2115 |
+
v2 --model-id nvidia/NV-Embed-v2 --revision "refs/pr/23" --batch-size 8
|
2116 |
+
```
|
2117 |
+
|
2118 |
## License
|
2119 |
This model should not be used for any commercial purpose. Refer the [license](https://spdx.org/licenses/CC-BY-NC-4.0) for the detailed terms.
|
2120 |
|
configuration_nvembed.py
CHANGED
@@ -76,6 +76,8 @@ class LatentAttentionConfig(PretrainedConfig):
|
|
76 |
self.latent_dim = latent_dim
|
77 |
self.cross_dim_head = cross_dim_head
|
78 |
|
|
|
|
|
79 |
|
80 |
class BidirectionalMistralConfig(MistralConfig):
|
81 |
model_type = BIDIR_MISTRAL_TYPE
|
|
|
76 |
self.latent_dim = latent_dim
|
77 |
self.cross_dim_head = cross_dim_head
|
78 |
|
79 |
+
super().__init__(**kwargs)
|
80 |
+
|
81 |
|
82 |
class BidirectionalMistralConfig(MistralConfig):
|
83 |
model_type = BIDIR_MISTRAL_TYPE
|