Add new SentenceTransformer model.
Browse files- README.md +219 -441
- config.json +1 -1
- model.safetensors +1 -1
README.md
CHANGED
|
@@ -6,7 +6,7 @@ tags:
|
|
| 6 |
- sentence-similarity
|
| 7 |
- feature-extraction
|
| 8 |
- dataset_size:100K<n<1M
|
| 9 |
-
- loss:
|
| 10 |
base_model: nomic-ai/nomic-embed-text-v1.5
|
| 11 |
metrics:
|
| 12 |
- cosine_accuracy
|
|
@@ -15,31 +15,31 @@ metrics:
|
|
| 15 |
- euclidean_accuracy
|
| 16 |
- max_accuracy
|
| 17 |
widget:
|
| 18 |
-
- source_sentence: 'search_query:
|
| 19 |
sentences:
|
| 20 |
-
- 'search_query:
|
| 21 |
-
- 'search_query:
|
| 22 |
-
- 'search_query:
|
| 23 |
-
- source_sentence: 'search_query:
|
| 24 |
sentences:
|
| 25 |
-
- 'search_query:
|
| 26 |
-
- 'search_query:
|
| 27 |
-
- 'search_query:
|
| 28 |
-
- source_sentence: 'search_query:
|
| 29 |
sentences:
|
| 30 |
-
- 'search_query:
|
| 31 |
-
- 'search_query:
|
| 32 |
-
- 'search_query:
|
| 33 |
-
- source_sentence: 'search_query:
|
| 34 |
sentences:
|
| 35 |
-
- 'search_query:
|
| 36 |
-
- 'search_query:
|
| 37 |
-
- 'search_query:
|
| 38 |
-
- source_sentence: 'search_query:
|
| 39 |
sentences:
|
| 40 |
-
- 'search_query:
|
| 41 |
-
- 'search_query:
|
| 42 |
-
- 'search_query:
|
| 43 |
pipeline_tag: sentence-similarity
|
| 44 |
model-index:
|
| 45 |
- name: SentenceTransformer based on nomic-ai/nomic-embed-text-v1.5
|
|
@@ -52,19 +52,19 @@ model-index:
|
|
| 52 |
type: triplet-esci
|
| 53 |
metrics:
|
| 54 |
- type: cosine_accuracy
|
| 55 |
-
value: 0.
|
| 56 |
name: Cosine Accuracy
|
| 57 |
- type: dot_accuracy
|
| 58 |
-
value: 0.
|
| 59 |
name: Dot Accuracy
|
| 60 |
- type: manhattan_accuracy
|
| 61 |
-
value: 0.
|
| 62 |
name: Manhattan Accuracy
|
| 63 |
- type: euclidean_accuracy
|
| 64 |
-
value: 0.
|
| 65 |
name: Euclidean Accuracy
|
| 66 |
- type: max_accuracy
|
| 67 |
-
value: 0.
|
| 68 |
name: Max Accuracy
|
| 69 |
---
|
| 70 |
|
|
@@ -117,9 +117,9 @@ from sentence_transformers import SentenceTransformer
|
|
| 117 |
model = SentenceTransformer("sentence_transformers_model_id")
|
| 118 |
# Run inference
|
| 119 |
sentences = [
|
| 120 |
-
'search_query:
|
| 121 |
-
'search_query:
|
| 122 |
-
'search_query:
|
| 123 |
]
|
| 124 |
embeddings = model.encode(sentences)
|
| 125 |
print(embeddings.shape)
|
|
@@ -165,11 +165,11 @@ You can finetune this model on your own dataset.
|
|
| 165 |
|
| 166 |
| Metric | Value |
|
| 167 |
|:--------------------|:----------|
|
| 168 |
-
| **cosine_accuracy** | **0.
|
| 169 |
-
| dot_accuracy | 0.
|
| 170 |
-
| manhattan_accuracy | 0.
|
| 171 |
-
| euclidean_accuracy | 0.
|
| 172 |
-
| max_accuracy | 0.
|
| 173 |
|
| 174 |
<!--
|
| 175 |
## Bias, Risks and Limitations
|
|
@@ -196,18 +196,18 @@ You can finetune this model on your own dataset.
|
|
| 196 |
| | anchor | positive | negative |
|
| 197 |
|:--------|:----------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|
|
| 198 |
| type | string | string | string |
|
| 199 |
-
| details | <ul><li>min: 7 tokens</li><li>mean: 12.
|
| 200 |
* Samples:
|
| 201 |
-
| anchor
|
| 202 |
-
|
| 203 |
-
| <code>search_query:
|
| 204 |
-
| <code>search_query:
|
| 205 |
-
| <code>search_query:
|
| 206 |
-
* Loss: [<code>
|
| 207 |
```json
|
| 208 |
{
|
| 209 |
-
"
|
| 210 |
-
"
|
| 211 |
}
|
| 212 |
```
|
| 213 |
|
|
@@ -222,18 +222,18 @@ You can finetune this model on your own dataset.
|
|
| 222 |
| | anchor | positive | negative |
|
| 223 |
|:--------|:----------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|
|
| 224 |
| type | string | string | string |
|
| 225 |
-
| details | <ul><li>min: 7 tokens</li><li>mean: 12.
|
| 226 |
* Samples:
|
| 227 |
-
| anchor
|
| 228 |
-
|
| 229 |
-
| <code>search_query:
|
| 230 |
-
| <code>search_query:
|
| 231 |
-
| <code>search_query:
|
| 232 |
-
* Loss: [<code>
|
| 233 |
```json
|
| 234 |
{
|
| 235 |
-
"
|
| 236 |
-
"
|
| 237 |
}
|
| 238 |
```
|
| 239 |
|
|
@@ -242,11 +242,14 @@ You can finetune this model on your own dataset.
|
|
| 242 |
|
| 243 |
- `per_device_train_batch_size`: 4
|
| 244 |
- `per_device_eval_batch_size`: 4
|
| 245 |
-
- `
|
| 246 |
-
- `
|
|
|
|
| 247 |
- `warmup_ratio`: 0.1
|
| 248 |
- `dataloader_drop_last`: True
|
| 249 |
- `dataloader_num_workers`: 4
|
|
|
|
|
|
|
| 250 |
- `batch_sampler`: no_duplicates
|
| 251 |
|
| 252 |
#### All Hyperparameters
|
|
@@ -259,9 +262,9 @@ You can finetune this model on your own dataset.
|
|
| 259 |
- `per_device_eval_batch_size`: 4
|
| 260 |
- `per_gpu_train_batch_size`: None
|
| 261 |
- `per_gpu_eval_batch_size`: None
|
| 262 |
-
- `gradient_accumulation_steps`:
|
| 263 |
- `eval_accumulation_steps`: None
|
| 264 |
-
- `learning_rate`: 1e-
|
| 265 |
- `weight_decay`: 0.0
|
| 266 |
- `adam_beta1`: 0.9
|
| 267 |
- `adam_beta2`: 0.999
|
|
@@ -269,7 +272,7 @@ You can finetune this model on your own dataset.
|
|
| 269 |
- `max_grad_norm`: 1.0
|
| 270 |
- `num_train_epochs`: 3
|
| 271 |
- `max_steps`: -1
|
| 272 |
-
- `lr_scheduler_type`:
|
| 273 |
- `lr_scheduler_kwargs`: {}
|
| 274 |
- `warmup_ratio`: 0.1
|
| 275 |
- `warmup_steps`: 0
|
|
@@ -301,12 +304,12 @@ You can finetune this model on your own dataset.
|
|
| 301 |
- `debug`: []
|
| 302 |
- `dataloader_drop_last`: True
|
| 303 |
- `dataloader_num_workers`: 4
|
| 304 |
-
- `dataloader_prefetch_factor`:
|
| 305 |
- `past_index`: -1
|
| 306 |
- `disable_tqdm`: False
|
| 307 |
- `remove_unused_columns`: True
|
| 308 |
- `label_names`: None
|
| 309 |
-
- `load_best_model_at_end`:
|
| 310 |
- `ignore_data_skip`: False
|
| 311 |
- `fsdp`: []
|
| 312 |
- `fsdp_min_num_params`: 0
|
|
@@ -363,381 +366,156 @@ You can finetune this model on your own dataset.
|
|
| 363 |
|
| 364 |
| Epoch | Step | Training Loss | loss | triplet-esci_cosine_accuracy |
|
| 365 |
|:------:|:-----:|:-------------:|:------:|:----------------------------:|
|
| 366 |
-
| 0.008 |
|
| 367 |
-
| 0.016 |
|
| 368 |
-
| 0.024 |
|
| 369 |
-
| 0.032 |
|
| 370 |
-
| 0.04 |
|
| 371 |
-
| 0.048 |
|
| 372 |
-
| 0.056 |
|
| 373 |
-
| 0.064 |
|
| 374 |
-
| 0.072 |
|
| 375 |
-
| 0.08 |
|
| 376 |
-
| 0.088 |
|
| 377 |
-
| 0.096 |
|
| 378 |
-
| 0.104 |
|
| 379 |
-
| 0.112 |
|
| 380 |
-
| 0.12 |
|
| 381 |
-
| 0.128 |
|
| 382 |
-
| 0.136 |
|
| 383 |
-
| 0.144 |
|
| 384 |
-
| 0.152 |
|
| 385 |
-
| 0.16 |
|
| 386 |
-
| 0.168 |
|
| 387 |
-
| 0.176 |
|
| 388 |
-
| 0.184 |
|
| 389 |
-
| 0.192 |
|
| 390 |
-
| 0.2 |
|
| 391 |
-
| 0.208 |
|
| 392 |
-
| 0.216 |
|
| 393 |
-
| 0.224 |
|
| 394 |
-
| 0.232 |
|
| 395 |
-
| 0.24 |
|
| 396 |
-
| 0.248 |
|
| 397 |
-
| 0.256 |
|
| 398 |
-
| 0.264 |
|
| 399 |
-
| 0.272 |
|
| 400 |
-
| 0.28 |
|
| 401 |
-
| 0.288 |
|
| 402 |
-
| 0.296 |
|
| 403 |
-
| 0.304 |
|
| 404 |
-
| 0.312 |
|
| 405 |
-
| 0.32 |
|
| 406 |
-
| 0.328 |
|
| 407 |
-
| 0.336 |
|
| 408 |
-
| 0.344 |
|
| 409 |
-
| 0.352 |
|
| 410 |
-
| 0.36 |
|
| 411 |
-
| 0.368 |
|
| 412 |
-
| 0.376 |
|
| 413 |
-
| 0.384 |
|
| 414 |
-
| 0.392 |
|
| 415 |
-
| 0.4 |
|
| 416 |
-
| 0.408 |
|
| 417 |
-
| 0.416 |
|
| 418 |
-
| 0.424 |
|
| 419 |
-
| 0.432 |
|
| 420 |
-
| 0.44 |
|
| 421 |
-
| 0.448 |
|
| 422 |
-
| 0.456 |
|
| 423 |
-
| 0.464 |
|
| 424 |
-
| 0.472 |
|
| 425 |
-
| 0.48 |
|
| 426 |
-
| 0.488 |
|
| 427 |
-
| 0.496 |
|
| 428 |
-
| 0.504 |
|
| 429 |
-
| 0.512 |
|
| 430 |
-
| 0.52 |
|
| 431 |
-
| 0.528 |
|
| 432 |
-
| 0.536 |
|
| 433 |
-
| 0.544 |
|
| 434 |
-
| 0.552 |
|
| 435 |
-
| 0.56 |
|
| 436 |
-
| 0.568 |
|
| 437 |
-
| 0.576 |
|
| 438 |
-
| 0.584 |
|
| 439 |
-
| 0.592 |
|
| 440 |
-
| 0.6 |
|
| 441 |
-
| 0.608 |
|
| 442 |
-
| 0.616 |
|
| 443 |
-
| 0.624 |
|
| 444 |
-
| 0.632 |
|
| 445 |
-
| 0.64 |
|
| 446 |
-
| 0.648 |
|
| 447 |
-
| 0.656 |
|
| 448 |
-
| 0.664 |
|
| 449 |
-
| 0.672 |
|
| 450 |
-
| 0.68 |
|
| 451 |
-
| 0.688 |
|
| 452 |
-
| 0.696 |
|
| 453 |
-
| 0.704 |
|
| 454 |
-
| 0.712 |
|
| 455 |
-
| 0.72 |
|
| 456 |
-
| 0.728 |
|
| 457 |
-
| 0.736 |
|
| 458 |
-
| 0.744 |
|
| 459 |
-
| 0.752 |
|
| 460 |
-
| 0.76 |
|
| 461 |
-
| 0.768 |
|
| 462 |
-
| 0.776 |
|
| 463 |
-
| 0.784 |
|
| 464 |
-
| 0.792 |
|
| 465 |
-
| 0.8 |
|
| 466 |
-
| 0.808 |
|
| 467 |
-
| 0.816 |
|
| 468 |
-
| 0.824 |
|
| 469 |
-
| 0.832 |
|
| 470 |
-
| 0.84 |
|
| 471 |
-
| 0.848 |
|
| 472 |
-
| 0.856 |
|
| 473 |
-
| 0.864 |
|
| 474 |
-
| 0.872 |
|
| 475 |
-
| 0.88 |
|
| 476 |
-
| 0.888 |
|
| 477 |
-
| 0.896 |
|
| 478 |
-
| 0.904 |
|
| 479 |
-
| 0.912 |
|
| 480 |
-
| 0.92 |
|
| 481 |
-
| 0.928 |
|
| 482 |
-
| 0.936 |
|
| 483 |
-
| 0.944 |
|
| 484 |
-
| 0.952 |
|
| 485 |
-
| 0.96 |
|
| 486 |
-
| 0.968 |
|
| 487 |
-
| 0.976 |
|
| 488 |
-
| 0.984 |
|
| 489 |
-
| 0.992 |
|
| 490 |
-
| 1.0 |
|
| 491 |
-
| 1.008 |
|
| 492 |
-
| 1.016 |
|
| 493 |
-
| 1.024 |
|
| 494 |
-
| 1.032 |
|
| 495 |
-
| 1.04 |
|
| 496 |
-
| 1.048 |
|
| 497 |
-
| 1.056 |
|
| 498 |
-
| 1.064 |
|
| 499 |
-
| 1.072 |
|
| 500 |
-
| 1.08 |
|
| 501 |
-
| 1.088 |
|
| 502 |
-
| 1.096 |
|
| 503 |
-
| 1.104 |
|
| 504 |
-
| 1.112 |
|
| 505 |
-
| 1.12 |
|
| 506 |
-
| 1.1280 |
|
| 507 |
-
| 1.1360 |
|
| 508 |
-
| 1.144 |
|
| 509 |
-
| 1.152 |
|
| 510 |
-
| 1.16 |
|
| 511 |
-
| 1.168 |
|
| 512 |
-
| 1.176 |
|
| 513 |
-
| 1.184 |
|
| 514 |
-
| 1.192 |
|
| 515 |
-
| 1.2 |
|
| 516 |
-
| 1.208 | 30200 | 1.5077 | - | - |
|
| 517 |
-
| 1.216 | 30400 | 1.3177 | - | - |
|
| 518 |
-
| 1.224 | 30600 | 1.095 | - | - |
|
| 519 |
-
| 1.232 | 30800 | 1.2841 | - | - |
|
| 520 |
-
| 1.24 | 31000 | 1.3544 | 3.6066 | 0.684 |
|
| 521 |
-
| 1.248 | 31200 | 1.2188 | - | - |
|
| 522 |
-
| 1.256 | 31400 | 1.1761 | - | - |
|
| 523 |
-
| 1.264 | 31600 | 1.2601 | - | - |
|
| 524 |
-
| 1.272 | 31800 | 1.2057 | - | - |
|
| 525 |
-
| 1.28 | 32000 | 1.0478 | 3.6371 | 0.681 |
|
| 526 |
-
| 1.288 | 32200 | 1.0888 | - | - |
|
| 527 |
-
| 1.296 | 32400 | 1.1335 | - | - |
|
| 528 |
-
| 1.304 | 32600 | 1.1297 | - | - |
|
| 529 |
-
| 1.312 | 32800 | 1.0302 | - | - |
|
| 530 |
-
| 1.32 | 33000 | 1.0583 | 3.6186 | 0.685 |
|
| 531 |
-
| 1.328 | 33200 | 1.0623 | - | - |
|
| 532 |
-
| 1.336 | 33400 | 0.9047 | - | - |
|
| 533 |
-
| 1.3440 | 33600 | 1.0706 | - | - |
|
| 534 |
-
| 1.3520 | 33800 | 1.0877 | - | - |
|
| 535 |
-
| 1.3600 | 34000 | 0.8205 | 3.6613 | 0.653 |
|
| 536 |
-
| 1.3680 | 34200 | 0.9596 | - | - |
|
| 537 |
-
| 1.376 | 34400 | 0.8855 | - | - |
|
| 538 |
-
| 1.384 | 34600 | 0.9186 | - | - |
|
| 539 |
-
| 1.392 | 34800 | 0.8087 | - | - |
|
| 540 |
-
| 1.4 | 35000 | 0.9732 | 3.6558 | 0.662 |
|
| 541 |
-
| 1.408 | 35200 | 0.8753 | - | - |
|
| 542 |
-
| 1.416 | 35400 | 0.8257 | - | - |
|
| 543 |
-
| 1.424 | 35600 | 0.8689 | - | - |
|
| 544 |
-
| 1.432 | 35800 | 0.8596 | - | - |
|
| 545 |
-
| 1.44 | 36000 | 0.9202 | 3.6872 | 0.66 |
|
| 546 |
-
| 1.448 | 36200 | 0.8993 | - | - |
|
| 547 |
-
| 1.456 | 36400 | 0.8889 | - | - |
|
| 548 |
-
| 1.464 | 36600 | 0.9138 | - | - |
|
| 549 |
-
| 1.472 | 36800 | 0.8212 | - | - |
|
| 550 |
-
| 1.48 | 37000 | 0.7591 | 3.6708 | 0.666 |
|
| 551 |
-
| 1.488 | 37200 | 0.769 | - | - |
|
| 552 |
-
| 1.496 | 37400 | 0.8656 | - | - |
|
| 553 |
-
| 1.504 | 37600 | 0.9134 | - | - |
|
| 554 |
-
| 1.512 | 37800 | 0.7212 | - | - |
|
| 555 |
-
| 1.52 | 38000 | 0.8118 | 3.6249 | 0.672 |
|
| 556 |
-
| 1.528 | 38200 | 0.7454 | - | - |
|
| 557 |
-
| 1.536 | 38400 | 0.7491 | - | - |
|
| 558 |
-
| 1.544 | 38600 | 0.8148 | - | - |
|
| 559 |
-
| 1.552 | 38800 | 0.6845 | - | - |
|
| 560 |
-
| 1.56 | 39000 | 0.6169 | 3.6868 | 0.679 |
|
| 561 |
-
| 1.568 | 39200 | 0.7377 | - | - |
|
| 562 |
-
| 1.576 | 39400 | 0.7296 | - | - |
|
| 563 |
-
| 1.584 | 39600 | 0.7204 | - | - |
|
| 564 |
-
| 1.592 | 39800 | 0.6748 | - | - |
|
| 565 |
-
| 1.6 | 40000 | 0.6494 | 3.7054 | 0.673 |
|
| 566 |
-
| 1.608 | 40200 | 0.7435 | - | - |
|
| 567 |
-
| 1.616 | 40400 | 0.6196 | - | - |
|
| 568 |
-
| 1.624 | 40600 | 0.6977 | - | - |
|
| 569 |
-
| 1.6320 | 40800 | 0.7442 | - | - |
|
| 570 |
-
| 1.6400 | 41000 | 0.5824 | 3.7500 | 0.66 |
|
| 571 |
-
| 1.6480 | 41200 | 0.6144 | - | - |
|
| 572 |
-
| 1.6560 | 41400 | 0.5909 | - | - |
|
| 573 |
-
| 1.6640 | 41600 | 0.6717 | - | - |
|
| 574 |
-
| 1.6720 | 41800 | 0.6436 | - | - |
|
| 575 |
-
| 1.6800 | 42000 | 0.6161 | 3.6769 | 0.676 |
|
| 576 |
-
| 1.688 | 42200 | 0.5282 | - | - |
|
| 577 |
-
| 1.696 | 42400 | 0.6647 | - | - |
|
| 578 |
-
| 1.704 | 42600 | 0.649 | - | - |
|
| 579 |
-
| 1.712 | 42800 | 0.6284 | - | - |
|
| 580 |
-
| 1.72 | 43000 | 0.7055 | 3.6992 | 0.671 |
|
| 581 |
-
| 1.728 | 43200 | 0.6249 | - | - |
|
| 582 |
-
| 1.736 | 43400 | 0.5722 | - | - |
|
| 583 |
-
| 1.744 | 43600 | 0.621 | - | - |
|
| 584 |
-
| 1.752 | 43800 | 0.6129 | - | - |
|
| 585 |
-
| 1.76 | 44000 | 0.501 | 3.7589 | 0.662 |
|
| 586 |
-
| 1.768 | 44200 | 0.5566 | - | - |
|
| 587 |
-
| 1.776 | 44400 | 0.576 | - | - |
|
| 588 |
-
| 1.784 | 44600 | 0.6428 | - | - |
|
| 589 |
-
| 1.792 | 44800 | 0.5629 | - | - |
|
| 590 |
-
| 1.8 | 45000 | 0.5134 | 3.7530 | 0.659 |
|
| 591 |
-
| 1.808 | 45200 | 0.4681 | - | - |
|
| 592 |
-
| 1.8160 | 45400 | 0.6268 | - | - |
|
| 593 |
-
| 1.8240 | 45600 | 0.5877 | - | - |
|
| 594 |
-
| 1.8320 | 45800 | 0.5219 | - | - |
|
| 595 |
-
| 1.8400 | 46000 | 0.545 | 3.7755 | 0.658 |
|
| 596 |
-
| 1.8480 | 46200 | 0.4539 | - | - |
|
| 597 |
-
| 1.8560 | 46400 | 0.5255 | - | - |
|
| 598 |
-
| 1.8640 | 46600 | 0.5573 | - | - |
|
| 599 |
-
| 1.8720 | 46800 | 0.5508 | - | - |
|
| 600 |
-
| 1.88 | 47000 | 0.5391 | 3.7489 | 0.653 |
|
| 601 |
-
| 1.888 | 47200 | 0.4276 | - | - |
|
| 602 |
-
| 1.896 | 47400 | 0.4906 | - | - |
|
| 603 |
-
| 1.904 | 47600 | 0.3771 | - | - |
|
| 604 |
-
| 1.912 | 47800 | 0.4959 | - | - |
|
| 605 |
-
| 1.92 | 48000 | 0.5377 | 3.7770 | 0.658 |
|
| 606 |
-
| 1.928 | 48200 | 0.4807 | - | - |
|
| 607 |
-
| 1.936 | 48400 | 0.5239 | - | - |
|
| 608 |
-
| 1.944 | 48600 | 0.4441 | - | - |
|
| 609 |
-
| 1.952 | 48800 | 0.4536 | - | - |
|
| 610 |
-
| 1.96 | 49000 | 0.5265 | 3.7507 | 0.669 |
|
| 611 |
-
| 1.968 | 49200 | 0.3817 | - | - |
|
| 612 |
-
| 1.976 | 49400 | 0.4468 | - | - |
|
| 613 |
-
| 1.984 | 49600 | 0.5766 | - | - |
|
| 614 |
-
| 1.992 | 49800 | 0.4789 | - | - |
|
| 615 |
-
| 2.0 | 50000 | 0.4853 | 3.7328 | 0.663 |
|
| 616 |
-
| 2.008 | 50200 | 0.3744 | - | - |
|
| 617 |
-
| 2.016 | 50400 | 0.4662 | - | - |
|
| 618 |
-
| 2.024 | 50600 | 0.394 | - | - |
|
| 619 |
-
| 2.032 | 50800 | 0.3938 | - | - |
|
| 620 |
-
| 2.04 | 51000 | 0.3586 | 3.8004 | 0.656 |
|
| 621 |
-
| 2.048 | 51200 | 0.3522 | - | - |
|
| 622 |
-
| 2.056 | 51400 | 0.4173 | - | - |
|
| 623 |
-
| 2.064 | 51600 | 0.3177 | - | - |
|
| 624 |
-
| 2.072 | 51800 | 0.4113 | - | - |
|
| 625 |
-
| 2.08 | 52000 | 0.3027 | 3.7366 | 0.665 |
|
| 626 |
-
| 2.088 | 52200 | 0.3693 | - | - |
|
| 627 |
-
| 2.096 | 52400 | 0.4268 | - | - |
|
| 628 |
-
| 2.104 | 52600 | 0.3678 | - | - |
|
| 629 |
-
| 2.112 | 52800 | 0.4192 | - | - |
|
| 630 |
-
| 2.12 | 53000 | 0.3105 | 3.7831 | 0.661 |
|
| 631 |
-
| 2.128 | 53200 | 0.3228 | - | - |
|
| 632 |
-
| 2.136 | 53400 | 0.2408 | - | - |
|
| 633 |
-
| 2.144 | 53600 | 0.2804 | - | - |
|
| 634 |
-
| 2.152 | 53800 | 0.3034 | - | - |
|
| 635 |
-
| 2.16 | 54000 | 0.3562 | 3.7866 | 0.656 |
|
| 636 |
-
| 2.168 | 54200 | 0.3526 | - | - |
|
| 637 |
-
| 2.176 | 54400 | 0.414 | - | - |
|
| 638 |
-
| 2.184 | 54600 | 0.3678 | - | - |
|
| 639 |
-
| 2.192 | 54800 | 0.2965 | - | - |
|
| 640 |
-
| 2.2 | 55000 | 0.3691 | 3.8108 | 0.655 |
|
| 641 |
-
| 2.208 | 55200 | 0.2739 | - | - |
|
| 642 |
-
| 2.216 | 55400 | 0.3283 | - | - |
|
| 643 |
-
| 2.224 | 55600 | 0.2133 | - | - |
|
| 644 |
-
| 2.232 | 55800 | 0.2582 | - | - |
|
| 645 |
-
| 2.24 | 56000 | 0.3234 | 3.7370 | 0.665 |
|
| 646 |
-
| 2.248 | 56200 | 0.2702 | - | - |
|
| 647 |
-
| 2.2560 | 56400 | 0.2713 | - | - |
|
| 648 |
-
| 2.2640 | 56600 | 0.2988 | - | - |
|
| 649 |
-
| 2.2720 | 56800 | 0.2338 | - | - |
|
| 650 |
-
| 2.2800 | 57000 | 0.183 | 3.7459 | 0.658 |
|
| 651 |
-
| 2.288 | 57200 | 0.2517 | - | - |
|
| 652 |
-
| 2.296 | 57400 | 0.2585 | - | - |
|
| 653 |
-
| 2.304 | 57600 | 0.2113 | - | - |
|
| 654 |
-
| 2.312 | 57800 | 0.1935 | - | - |
|
| 655 |
-
| 2.32 | 58000 | 0.2307 | 3.7409 | 0.661 |
|
| 656 |
-
| 2.328 | 58200 | 0.2353 | - | - |
|
| 657 |
-
| 2.336 | 58400 | 0.2099 | - | - |
|
| 658 |
-
| 2.344 | 58600 | 0.2823 | - | - |
|
| 659 |
-
| 2.352 | 58800 | 0.2071 | - | - |
|
| 660 |
-
| 2.36 | 59000 | 0.1928 | 3.7614 | 0.65 |
|
| 661 |
-
| 2.368 | 59200 | 0.1616 | - | - |
|
| 662 |
-
| 2.376 | 59400 | 0.1727 | - | - |
|
| 663 |
-
| 2.384 | 59600 | 0.1745 | - | - |
|
| 664 |
-
| 2.392 | 59800 | 0.1736 | - | - |
|
| 665 |
-
| 2.4 | 60000 | 0.2186 | 3.7309 | 0.659 |
|
| 666 |
-
| 2.408 | 60200 | 0.1637 | - | - |
|
| 667 |
-
| 2.416 | 60400 | 0.1957 | - | - |
|
| 668 |
-
| 2.424 | 60600 | 0.1512 | - | - |
|
| 669 |
-
| 2.432 | 60800 | 0.2133 | - | - |
|
| 670 |
-
| 2.44 | 61000 | 0.2122 | 3.7318 | 0.658 |
|
| 671 |
-
| 2.448 | 61200 | 0.1876 | - | - |
|
| 672 |
-
| 2.456 | 61400 | 0.2201 | - | - |
|
| 673 |
-
| 2.464 | 61600 | 0.1581 | - | - |
|
| 674 |
-
| 2.472 | 61800 | 0.1856 | - | - |
|
| 675 |
-
| 2.48 | 62000 | 0.1426 | 3.7491 | 0.657 |
|
| 676 |
-
| 2.488 | 62200 | 0.1769 | - | - |
|
| 677 |
-
| 2.496 | 62400 | 0.1706 | - | - |
|
| 678 |
-
| 2.504 | 62600 | 0.2492 | - | - |
|
| 679 |
-
| 2.512 | 62800 | 0.2026 | - | - |
|
| 680 |
-
| 2.52 | 63000 | 0.1612 | 3.7638 | 0.66 |
|
| 681 |
-
| 2.528 | 63200 | 0.21 | - | - |
|
| 682 |
-
| 2.536 | 63400 | 0.1183 | - | - |
|
| 683 |
-
| 2.544 | 63600 | 0.2244 | - | - |
|
| 684 |
-
| 2.552 | 63800 | 0.1503 | - | - |
|
| 685 |
-
| 2.56 | 64000 | 0.1581 | 3.7668 | 0.661 |
|
| 686 |
-
| 2.568 | 64200 | 0.1887 | - | - |
|
| 687 |
-
| 2.576 | 64400 | 0.1873 | - | - |
|
| 688 |
-
| 2.584 | 64600 | 0.1939 | - | - |
|
| 689 |
-
| 2.592 | 64800 | 0.2089 | - | - |
|
| 690 |
-
| 2.6 | 65000 | 0.1839 | 3.7631 | 0.657 |
|
| 691 |
-
| 2.608 | 65200 | 0.1508 | - | - |
|
| 692 |
-
| 2.616 | 65400 | 0.1247 | - | - |
|
| 693 |
-
| 2.624 | 65600 | 0.1457 | - | - |
|
| 694 |
-
| 2.632 | 65800 | 0.1267 | - | - |
|
| 695 |
-
| 2.64 | 66000 | 0.1327 | 3.7712 | 0.656 |
|
| 696 |
-
| 2.648 | 66200 | 0.1295 | - | - |
|
| 697 |
-
| 2.656 | 66400 | 0.1222 | - | - |
|
| 698 |
-
| 2.664 | 66600 | 0.1227 | - | - |
|
| 699 |
-
| 2.672 | 66800 | 0.1445 | - | - |
|
| 700 |
-
| 2.68 | 67000 | 0.1107 | 3.7753 | 0.659 |
|
| 701 |
-
| 2.6880 | 67200 | 0.1173 | - | - |
|
| 702 |
-
| 2.6960 | 67400 | 0.1743 | - | - |
|
| 703 |
-
| 2.7040 | 67600 | 0.1521 | - | - |
|
| 704 |
-
| 2.7120 | 67800 | 0.1516 | - | - |
|
| 705 |
-
| 2.7200 | 68000 | 0.1537 | 3.7786 | 0.658 |
|
| 706 |
-
| 2.7280 | 68200 | 0.108 | - | - |
|
| 707 |
-
| 2.7360 | 68400 | 0.1636 | - | - |
|
| 708 |
-
| 2.7440 | 68600 | 0.146 | - | - |
|
| 709 |
-
| 2.752 | 68800 | 0.1342 | - | - |
|
| 710 |
-
| 2.76 | 69000 | 0.0997 | 3.7753 | 0.658 |
|
| 711 |
-
| 2.768 | 69200 | 0.0952 | - | - |
|
| 712 |
-
| 2.776 | 69400 | 0.1372 | - | - |
|
| 713 |
-
| 2.784 | 69600 | 0.1558 | - | - |
|
| 714 |
-
| 2.792 | 69800 | 0.1352 | - | - |
|
| 715 |
-
| 2.8 | 70000 | 0.1723 | 3.7772 | 0.656 |
|
| 716 |
-
| 2.808 | 70200 | 0.1253 | - | - |
|
| 717 |
-
| 2.816 | 70400 | 0.1756 | - | - |
|
| 718 |
-
| 2.824 | 70600 | 0.1477 | - | - |
|
| 719 |
-
| 2.832 | 70800 | 0.1305 | - | - |
|
| 720 |
-
| 2.84 | 71000 | 0.1292 | 3.7787 | 0.656 |
|
| 721 |
-
| 2.848 | 71200 | 0.0797 | - | - |
|
| 722 |
-
| 2.856 | 71400 | 0.0955 | - | - |
|
| 723 |
-
| 2.864 | 71600 | 0.1214 | - | - |
|
| 724 |
-
| 2.872 | 71800 | 0.1704 | - | - |
|
| 725 |
-
| 2.88 | 72000 | 0.1291 | 3.7794 | 0.658 |
|
| 726 |
-
| 2.888 | 72200 | 0.0839 | - | - |
|
| 727 |
-
| 2.896 | 72400 | 0.1142 | - | - |
|
| 728 |
-
| 2.904 | 72600 | 0.0836 | - | - |
|
| 729 |
-
| 2.912 | 72800 | 0.1011 | - | - |
|
| 730 |
-
| 2.92 | 73000 | 0.153 | 3.7803 | 0.66 |
|
| 731 |
-
| 2.928 | 73200 | 0.0975 | - | - |
|
| 732 |
-
| 2.936 | 73400 | 0.1276 | - | - |
|
| 733 |
-
| 2.944 | 73600 | 0.0993 | - | - |
|
| 734 |
-
| 2.952 | 73800 | 0.1419 | - | - |
|
| 735 |
-
| 2.96 | 74000 | 0.1414 | 3.7807 | 0.658 |
|
| 736 |
-
| 2.968 | 74200 | 0.1105 | - | - |
|
| 737 |
-
| 2.976 | 74400 | 0.1085 | - | - |
|
| 738 |
-
| 2.984 | 74600 | 0.1281 | - | - |
|
| 739 |
-
| 2.992 | 74800 | 0.1057 | - | - |
|
| 740 |
-
| 3.0 | 75000 | 0.1197 | 3.7807 | 0.658 |
|
| 741 |
|
| 742 |
</details>
|
| 743 |
|
|
@@ -767,15 +545,15 @@ You can finetune this model on your own dataset.
|
|
| 767 |
}
|
| 768 |
```
|
| 769 |
|
| 770 |
-
####
|
| 771 |
```bibtex
|
| 772 |
-
@misc{
|
| 773 |
-
title={
|
| 774 |
-
author={
|
| 775 |
-
year={
|
| 776 |
-
eprint={
|
| 777 |
archivePrefix={arXiv},
|
| 778 |
-
primaryClass={cs.
|
| 779 |
}
|
| 780 |
```
|
| 781 |
|
|
|
|
| 6 |
- sentence-similarity
|
| 7 |
- feature-extraction
|
| 8 |
- dataset_size:100K<n<1M
|
| 9 |
+
- loss:CachedMultipleNegativesRankingLoss
|
| 10 |
base_model: nomic-ai/nomic-embed-text-v1.5
|
| 11 |
metrics:
|
| 12 |
- cosine_accuracy
|
|
|
|
| 15 |
- euclidean_accuracy
|
| 16 |
- max_accuracy
|
| 17 |
widget:
|
| 18 |
+
- source_sentence: 'search_query: shark'
|
| 19 |
sentences:
|
| 20 |
+
- 'search_query: skull'
|
| 21 |
+
- 'search_query: car picture frame'
|
| 22 |
+
- 'search_query: cartera de guchi'
|
| 23 |
+
- source_sentence: 'search_query: aolvo'
|
| 24 |
sentences:
|
| 25 |
+
- 'search_query: laço homem'
|
| 26 |
+
- 'search_query: vdi to hdmi cable'
|
| 27 |
+
- 'search_query: beads without holes'
|
| 28 |
+
- source_sentence: 'search_query: 赤色のカバン'
|
| 29 |
sentences:
|
| 30 |
+
- 'search_query: 結婚式 ガーランド'
|
| 31 |
+
- 'search_query: remaches zapatero'
|
| 32 |
+
- 'search_query: small feaux potted plants'
|
| 33 |
+
- source_sentence: 'search_query: vipkid'
|
| 34 |
sentences:
|
| 35 |
+
- 'search_query: ceiling lamps for kids'
|
| 36 |
+
- 'search_query: apple あいふぉんケース 12'
|
| 37 |
+
- 'search_query: zapatos zaragoza mujer'
|
| 38 |
+
- source_sentence: 'search_query: お布団バッグ'
|
| 39 |
sentences:
|
| 40 |
+
- 'search_query: 足なしソファー'
|
| 41 |
+
- 'search_query: all color handbag'
|
| 42 |
+
- 'search_query: tundra black out emblems'
|
| 43 |
pipeline_tag: sentence-similarity
|
| 44 |
model-index:
|
| 45 |
- name: SentenceTransformer based on nomic-ai/nomic-embed-text-v1.5
|
|
|
|
| 52 |
type: triplet-esci
|
| 53 |
metrics:
|
| 54 |
- type: cosine_accuracy
|
| 55 |
+
value: 0.787
|
| 56 |
name: Cosine Accuracy
|
| 57 |
- type: dot_accuracy
|
| 58 |
+
value: 0.22
|
| 59 |
name: Dot Accuracy
|
| 60 |
- type: manhattan_accuracy
|
| 61 |
+
value: 0.762
|
| 62 |
name: Manhattan Accuracy
|
| 63 |
- type: euclidean_accuracy
|
| 64 |
+
value: 0.768
|
| 65 |
name: Euclidean Accuracy
|
| 66 |
- type: max_accuracy
|
| 67 |
+
value: 0.787
|
| 68 |
name: Max Accuracy
|
| 69 |
---
|
| 70 |
|
|
|
|
| 117 |
model = SentenceTransformer("sentence_transformers_model_id")
|
| 118 |
# Run inference
|
| 119 |
sentences = [
|
| 120 |
+
'search_query: お布団バッグ',
|
| 121 |
+
'search_query: 足なしソファー',
|
| 122 |
+
'search_query: all color handbag',
|
| 123 |
]
|
| 124 |
embeddings = model.encode(sentences)
|
| 125 |
print(embeddings.shape)
|
|
|
|
| 165 |
|
| 166 |
| Metric | Value |
|
| 167 |
|:--------------------|:----------|
|
| 168 |
+
| **cosine_accuracy** | **0.787** |
|
| 169 |
+
| dot_accuracy | 0.22 |
|
| 170 |
+
| manhattan_accuracy | 0.762 |
|
| 171 |
+
| euclidean_accuracy | 0.768 |
|
| 172 |
+
| max_accuracy | 0.787 |
|
| 173 |
|
| 174 |
<!--
|
| 175 |
## Bias, Risks and Limitations
|
|
|
|
| 196 |
| | anchor | positive | negative |
|
| 197 |
|:--------|:----------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|
|
| 198 |
| type | string | string | string |
|
| 199 |
+
| details | <ul><li>min: 7 tokens</li><li>mean: 12.11 tokens</li><li>max: 47 tokens</li></ul> | <ul><li>min: 17 tokens</li><li>mean: 49.91 tokens</li><li>max: 166 tokens</li></ul> | <ul><li>min: 20 tokens</li><li>mean: 50.64 tokens</li><li>max: 152 tokens</li></ul> |
|
| 200 |
* Samples:
|
| 201 |
+
| anchor | positive | negative |
|
| 202 |
+
|:------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------|
|
| 203 |
+
| <code>search_query: blー5c</code> | <code>search_document: [EnergyPower] TECSUN PL-368 電池2個セット SSB・同期検波・長波 [交換用バッテリーBL-5C付] デジタルDSPポケット短波ラジオ 超小型 長・中波用外付アンテナ 10キー ポータブルBCL受信機 FMステレオ/LW/MW/SW ワールドバンドレシーバー 850局プリセットメモリー シグナルメーター USB充電 スリープタイマー アラー, TECSUN, PL-368 電池+セット [ブラック]</code> | <code>search_document: RADIWOWで作る SIHUADON R108 ポータブル BCL短波ラジオAM FM LW SW 航空無線 DSPレシーバー LCD 良好屋内および屋外アクティビティの両親への贈り物, RADIWOW, グレー</code> |
|
| 204 |
+
| <code>search_query: かわいいロングtシャツ</code> | <code>search_document: レディース ロンt 半袖 tシャツ オーバーサイズ コットン スリット 大きいサイズ 白 シャツ ビッグシルエット ワンピース シャツワンピ ロングtシャツ おおきいサイズ 夏 ピンク カジュアル カップ付き カーディガン キラキラ キャミソール キャミ サテン シンプル シニア シフォン シースルー シ, Sleeping Sheep(スリーピング シープ), ホワイト</code> | <code>search_document: Perkisboby スポーツウェア レディース ヨガウェア 4点セット 上下セット 5点セットウェア フィットネス 2点セット ジャージ スポーツブラ パンツ パーカー 半袖 ハーフパンツ, Perkisboby, 2点セット-グレー</code> |
|
| 205 |
+
| <code>search_query: iphone xr otterbox symmetry case</code> | <code>search_document: Symmetry Clear Series Case for iPhone XR (ONLY) Symmetry Case for iPhone XR Symmetry Case - Clear, VTSOU, Clear</code> | <code>search_document: OtterBox Symmetry Series Case for Apple iPhone XS Max - Tonic Violet / Purple, OtterBox, Tonic Violet / Purple</code> |
|
| 206 |
+
* Loss: [<code>CachedMultipleNegativesRankingLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cachedmultiplenegativesrankingloss) with these parameters:
|
| 207 |
```json
|
| 208 |
{
|
| 209 |
+
"scale": 20.0,
|
| 210 |
+
"similarity_fct": "cos_sim"
|
| 211 |
}
|
| 212 |
```
|
| 213 |
|
|
|
|
| 222 |
| | anchor | positive | negative |
|
| 223 |
|:--------|:----------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|
|
| 224 |
| type | string | string | string |
|
| 225 |
+
| details | <ul><li>min: 7 tokens</li><li>mean: 12.13 tokens</li><li>max: 49 tokens</li></ul> | <ul><li>min: 15 tokens</li><li>mean: 50.76 tokens</li><li>max: 173 tokens</li></ul> | <ul><li>min: 18 tokens</li><li>mean: 54.25 tokens</li><li>max: 161 tokens</li></ul> |
|
| 226 |
* Samples:
|
| 227 |
+
| anchor | positive | negative |
|
| 228 |
+
|:---------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------|
|
| 229 |
+
| <code>search_query: snack vending machine</code> | <code>search_document: Red All Metal Triple Compartment Commercial Vending Machine for 1 inch Gumballs, 1 inch Toy Capsules, Bouncy Balls, Candy, Nuts with Stand by American Gumball Company, American Gumball Company, CANDY RED</code> | <code>search_document: Vending Machine Halloween Costume - Funny Snack Food Adult Men & Women Outfits, Hauntlook, Multicolored</code> |
|
| 230 |
+
| <code>search_query: slim credit card holder without id window</code> | <code>search_document: Banuce Top Grain Leather Card Holder for Women Men Unisex ID Credit Card Case Slim Card Wallet Black, Banuce, 1 ID + 5 Card Slots: Black</code> | <code>search_document: Mens Wallet RFID Genuine Leather Bifold Wallets For Men, ID Window 16 Card Holders Gift Box, Swallowmall, Black Stripe</code> |
|
| 231 |
+
| <code>search_query: gucci belts for women</code> | <code>search_document: Gucci Women's Gg0027o 50Mm Optical Glasses, Gucci, Havana</code> | <code>search_document: Gucci G-Gucci Gold PVD Women's Watch(Model:YA125511), Gucci, PVD/Brown</code> |
|
| 232 |
+
* Loss: [<code>CachedMultipleNegativesRankingLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cachedmultiplenegativesrankingloss) with these parameters:
|
| 233 |
```json
|
| 234 |
{
|
| 235 |
+
"scale": 20.0,
|
| 236 |
+
"similarity_fct": "cos_sim"
|
| 237 |
}
|
| 238 |
```
|
| 239 |
|
|
|
|
| 242 |
|
| 243 |
- `per_device_train_batch_size`: 4
|
| 244 |
- `per_device_eval_batch_size`: 4
|
| 245 |
+
- `gradient_accumulation_steps`: 2
|
| 246 |
+
- `learning_rate`: 1e-06
|
| 247 |
+
- `lr_scheduler_type`: cosine
|
| 248 |
- `warmup_ratio`: 0.1
|
| 249 |
- `dataloader_drop_last`: True
|
| 250 |
- `dataloader_num_workers`: 4
|
| 251 |
+
- `dataloader_prefetch_factor`: 2
|
| 252 |
+
- `load_best_model_at_end`: True
|
| 253 |
- `batch_sampler`: no_duplicates
|
| 254 |
|
| 255 |
#### All Hyperparameters
|
|
|
|
| 262 |
- `per_device_eval_batch_size`: 4
|
| 263 |
- `per_gpu_train_batch_size`: None
|
| 264 |
- `per_gpu_eval_batch_size`: None
|
| 265 |
+
- `gradient_accumulation_steps`: 2
|
| 266 |
- `eval_accumulation_steps`: None
|
| 267 |
+
- `learning_rate`: 1e-06
|
| 268 |
- `weight_decay`: 0.0
|
| 269 |
- `adam_beta1`: 0.9
|
| 270 |
- `adam_beta2`: 0.999
|
|
|
|
| 272 |
- `max_grad_norm`: 1.0
|
| 273 |
- `num_train_epochs`: 3
|
| 274 |
- `max_steps`: -1
|
| 275 |
+
- `lr_scheduler_type`: cosine
|
| 276 |
- `lr_scheduler_kwargs`: {}
|
| 277 |
- `warmup_ratio`: 0.1
|
| 278 |
- `warmup_steps`: 0
|
|
|
|
| 304 |
- `debug`: []
|
| 305 |
- `dataloader_drop_last`: True
|
| 306 |
- `dataloader_num_workers`: 4
|
| 307 |
+
- `dataloader_prefetch_factor`: 2
|
| 308 |
- `past_index`: -1
|
| 309 |
- `disable_tqdm`: False
|
| 310 |
- `remove_unused_columns`: True
|
| 311 |
- `label_names`: None
|
| 312 |
+
- `load_best_model_at_end`: True
|
| 313 |
- `ignore_data_skip`: False
|
| 314 |
- `fsdp`: []
|
| 315 |
- `fsdp_min_num_params`: 0
|
|
|
|
| 366 |
|
| 367 |
| Epoch | Step | Training Loss | loss | triplet-esci_cosine_accuracy |
|
| 368 |
|:------:|:-----:|:-------------:|:------:|:----------------------------:|
|
| 369 |
+
| 0.008 | 100 | 0.7191 | - | - |
|
| 370 |
+
| 0.016 | 200 | 0.6917 | - | - |
|
| 371 |
+
| 0.024 | 300 | 0.7129 | - | - |
|
| 372 |
+
| 0.032 | 400 | 0.6826 | - | - |
|
| 373 |
+
| 0.04 | 500 | 0.7317 | - | - |
|
| 374 |
+
| 0.048 | 600 | 0.7237 | - | - |
|
| 375 |
+
| 0.056 | 700 | 0.6904 | - | - |
|
| 376 |
+
| 0.064 | 800 | 0.6815 | - | - |
|
| 377 |
+
| 0.072 | 900 | 0.6428 | - | - |
|
| 378 |
+
| 0.08 | 1000 | 0.6561 | 0.6741 | 0.74 |
|
| 379 |
+
| 0.088 | 1100 | 0.6097 | - | - |
|
| 380 |
+
| 0.096 | 1200 | 0.6426 | - | - |
|
| 381 |
+
| 0.104 | 1300 | 0.618 | - | - |
|
| 382 |
+
| 0.112 | 1400 | 0.6346 | - | - |
|
| 383 |
+
| 0.12 | 1500 | 0.611 | - | - |
|
| 384 |
+
| 0.128 | 1600 | 0.6092 | - | - |
|
| 385 |
+
| 0.136 | 1700 | 0.6512 | - | - |
|
| 386 |
+
| 0.144 | 1800 | 0.646 | - | - |
|
| 387 |
+
| 0.152 | 1900 | 0.6584 | - | - |
|
| 388 |
+
| 0.16 | 2000 | 0.6403 | 0.6411 | 0.747 |
|
| 389 |
+
| 0.168 | 2100 | 0.5882 | - | - |
|
| 390 |
+
| 0.176 | 2200 | 0.6361 | - | - |
|
| 391 |
+
| 0.184 | 2300 | 0.5641 | - | - |
|
| 392 |
+
| 0.192 | 2400 | 0.5734 | - | - |
|
| 393 |
+
| 0.2 | 2500 | 0.6156 | - | - |
|
| 394 |
+
| 0.208 | 2600 | 0.6252 | - | - |
|
| 395 |
+
| 0.216 | 2700 | 0.634 | - | - |
|
| 396 |
+
| 0.224 | 2800 | 0.5743 | - | - |
|
| 397 |
+
| 0.232 | 2900 | 0.5222 | - | - |
|
| 398 |
+
| 0.24 | 3000 | 0.5604 | 0.6180 | 0.765 |
|
| 399 |
+
| 0.248 | 3100 | 0.5864 | - | - |
|
| 400 |
+
| 0.256 | 3200 | 0.5541 | - | - |
|
| 401 |
+
| 0.264 | 3300 | 0.5661 | - | - |
|
| 402 |
+
| 0.272 | 3400 | 0.5493 | - | - |
|
| 403 |
+
| 0.28 | 3500 | 0.556 | - | - |
|
| 404 |
+
| 0.288 | 3600 | 0.56 | - | - |
|
| 405 |
+
| 0.296 | 3700 | 0.5552 | - | - |
|
| 406 |
+
| 0.304 | 3800 | 0.5833 | - | - |
|
| 407 |
+
| 0.312 | 3900 | 0.5578 | - | - |
|
| 408 |
+
| 0.32 | 4000 | 0.5495 | 0.6009 | 0.769 |
|
| 409 |
+
| 0.328 | 4100 | 0.5245 | - | - |
|
| 410 |
+
| 0.336 | 4200 | 0.477 | - | - |
|
| 411 |
+
| 0.344 | 4300 | 0.5536 | - | - |
|
| 412 |
+
| 0.352 | 4400 | 0.5493 | - | - |
|
| 413 |
+
| 0.36 | 4500 | 0.532 | - | - |
|
| 414 |
+
| 0.368 | 4600 | 0.5341 | - | - |
|
| 415 |
+
| 0.376 | 4700 | 0.528 | - | - |
|
| 416 |
+
| 0.384 | 4800 | 0.5574 | - | - |
|
| 417 |
+
| 0.392 | 4900 | 0.4953 | - | - |
|
| 418 |
+
| 0.4 | 5000 | 0.5365 | 0.5969 | 0.779 |
|
| 419 |
+
| 0.408 | 5100 | 0.4835 | - | - |
|
| 420 |
+
| 0.416 | 5200 | 0.4573 | - | - |
|
| 421 |
+
| 0.424 | 5300 | 0.5554 | - | - |
|
| 422 |
+
| 0.432 | 5400 | 0.5623 | - | - |
|
| 423 |
+
| 0.44 | 5500 | 0.5955 | - | - |
|
| 424 |
+
| 0.448 | 5600 | 0.5086 | - | - |
|
| 425 |
+
| 0.456 | 5700 | 0.5081 | - | - |
|
| 426 |
+
| 0.464 | 5800 | 0.4829 | - | - |
|
| 427 |
+
| 0.472 | 5900 | 0.5066 | - | - |
|
| 428 |
+
| 0.48 | 6000 | 0.4997 | 0.5920 | 0.776 |
|
| 429 |
+
| 0.488 | 6100 | 0.5075 | - | - |
|
| 430 |
+
| 0.496 | 6200 | 0.5051 | - | - |
|
| 431 |
+
| 0.504 | 6300 | 0.5019 | - | - |
|
| 432 |
+
| 0.512 | 6400 | 0.4774 | - | - |
|
| 433 |
+
| 0.52 | 6500 | 0.4975 | - | - |
|
| 434 |
+
| 0.528 | 6600 | 0.4756 | - | - |
|
| 435 |
+
| 0.536 | 6700 | 0.4656 | - | - |
|
| 436 |
+
| 0.544 | 6800 | 0.4671 | - | - |
|
| 437 |
+
| 0.552 | 6900 | 0.4646 | - | - |
|
| 438 |
+
| 0.56 | 7000 | 0.5595 | 0.5853 | 0.777 |
|
| 439 |
+
| 0.568 | 7100 | 0.4812 | - | - |
|
| 440 |
+
| 0.576 | 7200 | 0.506 | - | - |
|
| 441 |
+
| 0.584 | 7300 | 0.49 | - | - |
|
| 442 |
+
| 0.592 | 7400 | 0.464 | - | - |
|
| 443 |
+
| 0.6 | 7500 | 0.441 | - | - |
|
| 444 |
+
| 0.608 | 7600 | 0.4492 | - | - |
|
| 445 |
+
| 0.616 | 7700 | 0.457 | - | - |
|
| 446 |
+
| 0.624 | 7800 | 0.493 | - | - |
|
| 447 |
+
| 0.632 | 7900 | 0.4174 | - | - |
|
| 448 |
+
| 0.64 | 8000 | 0.4686 | 0.5809 | 0.785 |
|
| 449 |
+
| 0.648 | 8100 | 0.4529 | - | - |
|
| 450 |
+
| 0.656 | 8200 | 0.4784 | - | - |
|
| 451 |
+
| 0.664 | 8300 | 0.4697 | - | - |
|
| 452 |
+
| 0.672 | 8400 | 0.4489 | - | - |
|
| 453 |
+
| 0.68 | 8500 | 0.4439 | - | - |
|
| 454 |
+
| 0.688 | 8600 | 0.4063 | - | - |
|
| 455 |
+
| 0.696 | 8700 | 0.4634 | - | - |
|
| 456 |
+
| 0.704 | 8800 | 0.4446 | - | - |
|
| 457 |
+
| 0.712 | 8900 | 0.4725 | - | - |
|
| 458 |
+
| 0.72 | 9000 | 0.3954 | 0.5769 | 0.781 |
|
| 459 |
+
| 0.728 | 9100 | 0.4536 | - | - |
|
| 460 |
+
| 0.736 | 9200 | 0.4583 | - | - |
|
| 461 |
+
| 0.744 | 9300 | 0.4415 | - | - |
|
| 462 |
+
| 0.752 | 9400 | 0.4716 | - | - |
|
| 463 |
+
| 0.76 | 9500 | 0.4393 | - | - |
|
| 464 |
+
| 0.768 | 9600 | 0.4332 | - | - |
|
| 465 |
+
| 0.776 | 9700 | 0.4236 | - | - |
|
| 466 |
+
| 0.784 | 9800 | 0.4021 | - | - |
|
| 467 |
+
| 0.792 | 9900 | 0.4324 | - | - |
|
| 468 |
+
| 0.8 | 10000 | 0.4197 | 0.5796 | 0.78 |
|
| 469 |
+
| 0.808 | 10100 | 0.4576 | - | - |
|
| 470 |
+
| 0.816 | 10200 | 0.4238 | - | - |
|
| 471 |
+
| 0.824 | 10300 | 0.4468 | - | - |
|
| 472 |
+
| 0.832 | 10400 | 0.4301 | - | - |
|
| 473 |
+
| 0.84 | 10500 | 0.414 | - | - |
|
| 474 |
+
| 0.848 | 10600 | 0.4563 | - | - |
|
| 475 |
+
| 0.856 | 10700 | 0.4212 | - | - |
|
| 476 |
+
| 0.864 | 10800 | 0.3905 | - | - |
|
| 477 |
+
| 0.872 | 10900 | 0.4384 | - | - |
|
| 478 |
+
| 0.88 | 11000 | 0.3474 | 0.5709 | 0.788 |
|
| 479 |
+
| 0.888 | 11100 | 0.4396 | - | - |
|
| 480 |
+
| 0.896 | 11200 | 0.3819 | - | - |
|
| 481 |
+
| 0.904 | 11300 | 0.3748 | - | - |
|
| 482 |
+
| 0.912 | 11400 | 0.4217 | - | - |
|
| 483 |
+
| 0.92 | 11500 | 0.3893 | - | - |
|
| 484 |
+
| 0.928 | 11600 | 0.3835 | - | - |
|
| 485 |
+
| 0.936 | 11700 | 0.4303 | - | - |
|
| 486 |
+
| 0.944 | 11800 | 0.4274 | - | - |
|
| 487 |
+
| 0.952 | 11900 | 0.4089 | - | - |
|
| 488 |
+
| 0.96 | 12000 | 0.4009 | 0.5710 | 0.786 |
|
| 489 |
+
| 0.968 | 12100 | 0.3832 | - | - |
|
| 490 |
+
| 0.976 | 12200 | 0.3543 | - | - |
|
| 491 |
+
| 0.984 | 12300 | 0.4866 | - | - |
|
| 492 |
+
| 0.992 | 12400 | 0.4531 | - | - |
|
| 493 |
+
| 1.0 | 12500 | 0.3728 | - | - |
|
| 494 |
+
| 1.008 | 12600 | 0.386 | - | - |
|
| 495 |
+
| 1.016 | 12700 | 0.3622 | - | - |
|
| 496 |
+
| 1.024 | 12800 | 0.4013 | - | - |
|
| 497 |
+
| 1.032 | 12900 | 0.3543 | - | - |
|
| 498 |
+
| 1.04 | 13000 | 0.3918 | 0.5712 | 0.792 |
|
| 499 |
+
| 1.048 | 13100 | 0.3961 | - | - |
|
| 500 |
+
| 1.056 | 13200 | 0.3804 | - | - |
|
| 501 |
+
| 1.064 | 13300 | 0.4049 | - | - |
|
| 502 |
+
| 1.072 | 13400 | 0.3374 | - | - |
|
| 503 |
+
| 1.08 | 13500 | 0.3746 | - | - |
|
| 504 |
+
| 1.088 | 13600 | 0.3162 | - | - |
|
| 505 |
+
| 1.096 | 13700 | 0.3536 | - | - |
|
| 506 |
+
| 1.104 | 13800 | 0.3101 | - | - |
|
| 507 |
+
| 1.112 | 13900 | 0.3704 | - | - |
|
| 508 |
+
| 1.12 | 14000 | 0.3412 | 0.5758 | 0.788 |
|
| 509 |
+
| 1.1280 | 14100 | 0.342 | - | - |
|
| 510 |
+
| 1.1360 | 14200 | 0.383 | - | - |
|
| 511 |
+
| 1.144 | 14300 | 0.3554 | - | - |
|
| 512 |
+
| 1.152 | 14400 | 0.4013 | - | - |
|
| 513 |
+
| 1.16 | 14500 | 0.3486 | - | - |
|
| 514 |
+
| 1.168 | 14600 | 0.3367 | - | - |
|
| 515 |
+
| 1.176 | 14700 | 0.3737 | - | - |
|
| 516 |
+
| 1.184 | 14800 | 0.319 | - | - |
|
| 517 |
+
| 1.192 | 14900 | 0.3211 | - | - |
|
| 518 |
+
| 1.2 | 15000 | 0.3284 | 0.5804 | 0.787 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 519 |
|
| 520 |
</details>
|
| 521 |
|
|
|
|
| 545 |
}
|
| 546 |
```
|
| 547 |
|
| 548 |
+
#### CachedMultipleNegativesRankingLoss
|
| 549 |
```bibtex
|
| 550 |
+
@misc{gao2021scaling,
|
| 551 |
+
title={Scaling Deep Contrastive Learning Batch Size under Memory Limited Setup},
|
| 552 |
+
author={Luyu Gao and Yunyi Zhang and Jiawei Han and Jamie Callan},
|
| 553 |
+
year={2021},
|
| 554 |
+
eprint={2101.06983},
|
| 555 |
archivePrefix={arXiv},
|
| 556 |
+
primaryClass={cs.LG}
|
| 557 |
}
|
| 558 |
```
|
| 559 |
|
config.json
CHANGED
|
@@ -1,5 +1,5 @@
|
|
| 1 |
{
|
| 2 |
-
"_name_or_path": "models/nomic-embed-text-esci/checkpoint-
|
| 3 |
"activation_function": "swiglu",
|
| 4 |
"architectures": [
|
| 5 |
"NomicBertModel"
|
|
|
|
| 1 |
{
|
| 2 |
+
"_name_or_path": "models/nomic-embed-text-esci/checkpoint-15000",
|
| 3 |
"activation_function": "swiglu",
|
| 4 |
"architectures": [
|
| 5 |
"NomicBertModel"
|
model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 546938168
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:c50404df18f7d56454c1d2e200beeea9386fbbbb465f583137054b0f476a2b4d
|
| 3 |
size 546938168
|