diff --git a/1_Pooling/config.json b/1_Pooling/config.json
deleted file mode 100644
index a97f8d140b6aee43dfac9fc4521b2842657c5608..0000000000000000000000000000000000000000
--- a/1_Pooling/config.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
- "word_embedding_dimension": 384,
- "pooling_mode_cls_token": false,
- "pooling_mode_mean_tokens": true,
- "pooling_mode_max_tokens": false,
- "pooling_mode_mean_sqrt_len_tokens": false,
- "pooling_mode_weightedmean_tokens": false,
- "pooling_mode_lasttoken": false,
- "include_prompt": true
-}
\ No newline at end of file
diff --git a/checkpoint-1000/1_Pooling/config.json b/checkpoint-1000/1_Pooling/config.json
deleted file mode 100644
index a97f8d140b6aee43dfac9fc4521b2842657c5608..0000000000000000000000000000000000000000
--- a/checkpoint-1000/1_Pooling/config.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
- "word_embedding_dimension": 384,
- "pooling_mode_cls_token": false,
- "pooling_mode_mean_tokens": true,
- "pooling_mode_max_tokens": false,
- "pooling_mode_mean_sqrt_len_tokens": false,
- "pooling_mode_weightedmean_tokens": false,
- "pooling_mode_lasttoken": false,
- "include_prompt": true
-}
\ No newline at end of file
diff --git a/checkpoint-1000/README.md b/checkpoint-1000/README.md
deleted file mode 100644
index 3bc5608974f42cafdc8ed1ae922876e7c1ae90e5..0000000000000000000000000000000000000000
--- a/checkpoint-1000/README.md
+++ /dev/null
@@ -1,466 +0,0 @@
----
-language:
-- en
-license: apache-2.0
-tags:
-- sentence-transformers
-- sentence-similarity
-- feature-extraction
-- generated_from_trainer
-- dataset_size:2130621
-- loss:ContrastiveLoss
-base_model: sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2
-widget:
-- source_sentence: Kim Chol-sam
- sentences:
- - Stankevich Sergey Nikolayevich
- - Kim Chin-So’k
- - Julen Lopetegui Agote
-- source_sentence: دينا بنت عبد الحميد
- sentences:
- - Alexia van Amsberg
- - Anthony Nicholas Colin Maitland Biddulph, 5th Baron Biddulph
- - Dina bint Abdul-Hamíd
-- source_sentence: Մուհամեդ բեն Նաիֆ Ալ Սաուդ
- sentences:
- - Karpov Anatoly Evgenyevich
- - GNPower Mariveles Coal Plant [former]
- - Muhammed bin Nayef bin Abdul Aziz Al Saud
-- source_sentence: Edward Gnehm
- sentences:
- - Шауэрте, Хартмут
- - Ханзада Филипп, Эдинбург герцогі
- - AFX
-- source_sentence: Schori i Lidingö
- sentences:
- - Yordan Canev
- - ကားပေါ့ အန်နာတိုလီ
- - BYSTROV, Mikhail Ivanovich
-pipeline_tag: sentence-similarity
-library_name: sentence-transformers
-metrics:
-- cosine_accuracy
-- cosine_accuracy_threshold
-- cosine_f1
-- cosine_f1_threshold
-- cosine_precision
-- cosine_recall
-- cosine_ap
-- cosine_mcc
-model-index:
-- name: sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2-name-matcher-original
- results:
- - task:
- type: binary-classification
- name: Binary Classification
- dataset:
- name: sentence transformers paraphrase multilingual MiniLM L12 v2
- type: sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2
- metrics:
- - type: cosine_accuracy
- value: 0.9817931272716349
- name: Cosine Accuracy
- - type: cosine_accuracy_threshold
- value: 0.7197962999343872
- name: Cosine Accuracy Threshold
- - type: cosine_f1
- value: 0.9722373310278887
- name: Cosine F1
- - type: cosine_f1_threshold
- value: 0.7091608047485352
- name: Cosine F1 Threshold
- - type: cosine_precision
- value: 0.9675121928984912
- name: Cosine Precision
- - type: cosine_recall
- value: 0.9770088489465266
- name: Cosine Recall
- - type: cosine_ap
- value: 0.9944127523785896
- name: Cosine Ap
- - type: cosine_mcc
- value: 0.9587183163648803
- name: Cosine Mcc
----
-
-# sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2-name-matcher-original
-
-This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2](https://huggingface.co/sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2). It maps sentences & paragraphs to a 384-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more.
-
-## Model Details
-
-### Model Description
-- **Model Type:** Sentence Transformer
-- **Base model:** [sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2](https://huggingface.co/sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2)
-- **Maximum Sequence Length:** 128 tokens
-- **Output Dimensionality:** 384 dimensions
-- **Similarity Function:** Cosine Similarity
-
-- **Language:** en
-- **License:** apache-2.0
-
-### Model Sources
-
-- **Documentation:** [Sentence Transformers Documentation](https://sbert.net)
-- **Repository:** [Sentence Transformers on GitHub](https://github.com/UKPLab/sentence-transformers)
-- **Hugging Face:** [Sentence Transformers on Hugging Face](https://huggingface.co/models?library=sentence-transformers)
-
-### Full Model Architecture
-
-```
-SentenceTransformer(
- (0): Transformer({'max_seq_length': 128, 'do_lower_case': False}) with Transformer model: BertModel
- (1): Pooling({'word_embedding_dimension': 384, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
-)
-```
-
-## Usage
-
-### Direct Usage (Sentence Transformers)
-
-First install the Sentence Transformers library:
-
-```bash
-pip install -U sentence-transformers
-```
-
-Then you can load this model and run inference.
-```python
-from sentence_transformers import SentenceTransformer
-
-# Download from the 🤗 Hub
-model = SentenceTransformer("sentence_transformers_model_id")
-# Run inference
-sentences = [
- 'Schori i Lidingö',
- 'Yordan Canev',
- 'ကားပေါ့ အန်နာတိုလီ',
-]
-embeddings = model.encode(sentences)
-print(embeddings.shape)
-# [3, 384]
-
-# Get the similarity scores for the embeddings
-similarities = model.similarity(embeddings, embeddings)
-print(similarities.shape)
-# [3, 3]
-```
-
-
-
-
-
-
-
-## Evaluation
-
-### Metrics
-
-#### Binary Classification
-
-* Dataset: `sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2`
-* Evaluated with [BinaryClassificationEvaluator
](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.BinaryClassificationEvaluator)
-
-| Metric | Value |
-|:--------------------------|:-----------|
-| cosine_accuracy | 0.9818 |
-| cosine_accuracy_threshold | 0.7198 |
-| cosine_f1 | 0.9722 |
-| cosine_f1_threshold | 0.7092 |
-| cosine_precision | 0.9675 |
-| cosine_recall | 0.977 |
-| **cosine_ap** | **0.9944** |
-| cosine_mcc | 0.9587 |
-
-
-
-
-
-## Training Details
-
-### Training Dataset
-
-#### Unnamed Dataset
-
-* Size: 2,130,621 training samples
-* Columns: sentence1
, sentence2
, and label
-* Approximate statistics based on the first 1000 samples:
- | | sentence1 | sentence2 | label |
- |:--------|:---------------------------------------------------------------------------------|:---------------------------------------------------------------------------------|:---------------------------------------------------------------|
- | type | string | string | float |
- | details |
- min: 3 tokens
- mean: 9.32 tokens
- max: 57 tokens
| - min: 3 tokens
- mean: 9.16 tokens
- max: 54 tokens
| - min: 0.0
- mean: 0.34
- max: 1.0
|
-* Samples:
- | sentence1 | sentence2 | label |
- |:----------------------------------|:------------------------------------|:-----------------|
- | 캐스린 설리번
| Kathryn D. Sullivanová
| 1.0
|
- | ଶିବରାଜ ଅଧାଲରାଓ ପାଟିଲ
| Aleksander Lubocki
| 0.0
|
- | Пырванов, Георги
| アナトーリー・セルジュコフ
| 0.0
|
-* Loss: [ContrastiveLoss
](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#contrastiveloss) with these parameters:
- ```json
- {
- "distance_metric": "SiameseDistanceMetric.COSINE_DISTANCE",
- "margin": 0.5,
- "size_average": true
- }
- ```
-
-### Evaluation Dataset
-
-#### Unnamed Dataset
-
-* Size: 2,663,276 evaluation samples
-* Columns: sentence1
, sentence2
, and label
-* Approximate statistics based on the first 1000 samples:
- | | sentence1 | sentence2 | label |
- |:--------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|:---------------------------------------------------------------|
- | type | string | string | float |
- | details | - min: 3 tokens
- mean: 9.34 tokens
- max: 102 tokens
| - min: 4 tokens
- mean: 9.11 tokens
- max: 100 tokens
| - min: 0.0
- mean: 0.33
- max: 1.0
|
-* Samples:
- | sentence1 | sentence2 | label |
- |:--------------------------------------|:---------------------------------------|:-----------------|
- | Ева Херман
| I Xuan Karlos
| 0.0
|
- | Кличков Андрій Євгенович
| Андрэй Яўгенавіч Клычкоў
| 1.0
|
- | Кинах А.
| Senator John Hickenlooper
| 0.0
|
-* Loss: [ContrastiveLoss
](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#contrastiveloss) with these parameters:
- ```json
- {
- "distance_metric": "SiameseDistanceMetric.COSINE_DISTANCE",
- "margin": 0.5,
- "size_average": true
- }
- ```
-
-### Training Hyperparameters
-#### Non-Default Hyperparameters
-
-- `eval_strategy`: steps
-- `per_device_train_batch_size`: 1000
-- `per_device_eval_batch_size`: 1000
-- `gradient_accumulation_steps`: 4
-- `learning_rate`: 3e-05
-- `weight_decay`: 0.01
-- `num_train_epochs`: 8
-- `warmup_ratio`: 0.1
-- `fp16_opt_level`: O0
-- `load_best_model_at_end`: True
-- `optim`: adafactor
-
-#### All Hyperparameters
-Click to expand
-
-- `overwrite_output_dir`: False
-- `do_predict`: False
-- `eval_strategy`: steps
-- `prediction_loss_only`: True
-- `per_device_train_batch_size`: 1000
-- `per_device_eval_batch_size`: 1000
-- `per_gpu_train_batch_size`: None
-- `per_gpu_eval_batch_size`: None
-- `gradient_accumulation_steps`: 4
-- `eval_accumulation_steps`: None
-- `torch_empty_cache_steps`: None
-- `learning_rate`: 3e-05
-- `weight_decay`: 0.01
-- `adam_beta1`: 0.9
-- `adam_beta2`: 0.999
-- `adam_epsilon`: 1e-08
-- `max_grad_norm`: 1.0
-- `num_train_epochs`: 8
-- `max_steps`: -1
-- `lr_scheduler_type`: linear
-- `lr_scheduler_kwargs`: {}
-- `warmup_ratio`: 0.1
-- `warmup_steps`: 0
-- `log_level`: passive
-- `log_level_replica`: warning
-- `log_on_each_node`: True
-- `logging_nan_inf_filter`: True
-- `save_safetensors`: True
-- `save_on_each_node`: False
-- `save_only_model`: False
-- `restore_callback_states_from_checkpoint`: False
-- `no_cuda`: False
-- `use_cpu`: False
-- `use_mps_device`: False
-- `seed`: 42
-- `data_seed`: None
-- `jit_mode_eval`: False
-- `use_ipex`: False
-- `bf16`: False
-- `fp16`: False
-- `fp16_opt_level`: O0
-- `half_precision_backend`: auto
-- `bf16_full_eval`: False
-- `fp16_full_eval`: False
-- `tf32`: None
-- `local_rank`: 0
-- `ddp_backend`: None
-- `tpu_num_cores`: None
-- `tpu_metrics_debug`: False
-- `debug`: []
-- `dataloader_drop_last`: False
-- `dataloader_num_workers`: 0
-- `dataloader_prefetch_factor`: None
-- `past_index`: -1
-- `disable_tqdm`: False
-- `remove_unused_columns`: True
-- `label_names`: None
-- `load_best_model_at_end`: True
-- `ignore_data_skip`: False
-- `fsdp`: []
-- `fsdp_min_num_params`: 0
-- `fsdp_config`: {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}
-- `tp_size`: 0
-- `fsdp_transformer_layer_cls_to_wrap`: None
-- `accelerator_config`: {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True, 'non_blocking': False, 'gradient_accumulation_kwargs': None}
-- `deepspeed`: None
-- `label_smoothing_factor`: 0.0
-- `optim`: adafactor
-- `optim_args`: None
-- `adafactor`: False
-- `group_by_length`: False
-- `length_column_name`: length
-- `ddp_find_unused_parameters`: None
-- `ddp_bucket_cap_mb`: None
-- `ddp_broadcast_buffers`: False
-- `dataloader_pin_memory`: True
-- `dataloader_persistent_workers`: False
-- `skip_memory_metrics`: True
-- `use_legacy_prediction_loop`: False
-- `push_to_hub`: False
-- `resume_from_checkpoint`: None
-- `hub_model_id`: None
-- `hub_strategy`: every_save
-- `hub_private_repo`: None
-- `hub_always_push`: False
-- `gradient_checkpointing`: False
-- `gradient_checkpointing_kwargs`: None
-- `include_inputs_for_metrics`: False
-- `include_for_metrics`: []
-- `eval_do_concat_batches`: True
-- `fp16_backend`: auto
-- `push_to_hub_model_id`: None
-- `push_to_hub_organization`: None
-- `mp_parameters`:
-- `auto_find_batch_size`: False
-- `full_determinism`: False
-- `torchdynamo`: None
-- `ray_scope`: last
-- `ddp_timeout`: 1800
-- `torch_compile`: False
-- `torch_compile_backend`: None
-- `torch_compile_mode`: None
-- `include_tokens_per_second`: False
-- `include_num_input_tokens_seen`: False
-- `neftune_noise_alpha`: None
-- `optim_target_modules`: None
-- `batch_eval_metrics`: False
-- `eval_on_start`: False
-- `use_liger_kernel`: False
-- `eval_use_gather_object`: False
-- `average_tokens_across_devices`: False
-- `prompts`: None
-- `batch_sampler`: batch_sampler
-- `multi_dataset_batch_sampler`: proportional
-
-
-
-### Training Logs
-| Epoch | Step | Training Loss | Validation Loss | sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap |
-|:------:|:----:|:-------------:|:---------------:|:---------------------------------------------------------------------:|
-| -1 | -1 | - | - | 0.7140 |
-| 0.1877 | 100 | - | 0.0125 | 0.8849 |
-| 0.3754 | 200 | - | 0.0090 | 0.9369 |
-| 0.5631 | 300 | - | 0.0068 | 0.9630 |
-| 0.7508 | 400 | - | 0.0052 | 0.9774 |
-| 0.9385 | 500 | 0.0409 | 0.0040 | 0.9845 |
-| 1.1276 | 600 | - | 0.0033 | 0.9887 |
-| 1.3153 | 700 | - | 0.0028 | 0.9911 |
-| 1.5031 | 800 | - | 0.0026 | 0.9927 |
-| 1.6908 | 900 | - | 0.0022 | 0.9938 |
-| 1.8785 | 1000 | 0.0131 | 0.0022 | 0.9944 |
-
-
-### Framework Versions
-- Python: 3.12.9
-- Sentence Transformers: 3.4.1
-- Transformers: 4.51.3
-- PyTorch: 2.7.0+cu126
-- Accelerate: 1.6.0
-- Datasets: 3.6.0
-- Tokenizers: 0.21.1
-
-## Citation
-
-### BibTeX
-
-#### Sentence Transformers
-```bibtex
-@inproceedings{reimers-2019-sentence-bert,
- title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks",
- author = "Reimers, Nils and Gurevych, Iryna",
- booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing",
- month = "11",
- year = "2019",
- publisher = "Association for Computational Linguistics",
- url = "https://arxiv.org/abs/1908.10084",
-}
-```
-
-#### ContrastiveLoss
-```bibtex
-@inproceedings{hadsell2006dimensionality,
- author={Hadsell, R. and Chopra, S. and LeCun, Y.},
- booktitle={2006 IEEE Computer Society Conference on Computer Vision and Pattern Recognition (CVPR'06)},
- title={Dimensionality Reduction by Learning an Invariant Mapping},
- year={2006},
- volume={2},
- number={},
- pages={1735-1742},
- doi={10.1109/CVPR.2006.100}
-}
-```
-
-
-
-
-
-
\ No newline at end of file
diff --git a/checkpoint-1000/config.json b/checkpoint-1000/config.json
deleted file mode 100644
index 26e48501fdf44110239e00ad4d438aee8679504a..0000000000000000000000000000000000000000
--- a/checkpoint-1000/config.json
+++ /dev/null
@@ -1,25 +0,0 @@
-{
- "architectures": [
- "BertModel"
- ],
- "attention_probs_dropout_prob": 0.1,
- "classifier_dropout": null,
- "gradient_checkpointing": false,
- "hidden_act": "gelu",
- "hidden_dropout_prob": 0.1,
- "hidden_size": 384,
- "initializer_range": 0.02,
- "intermediate_size": 1536,
- "layer_norm_eps": 1e-12,
- "max_position_embeddings": 512,
- "model_type": "bert",
- "num_attention_heads": 12,
- "num_hidden_layers": 12,
- "pad_token_id": 0,
- "position_embedding_type": "absolute",
- "torch_dtype": "float32",
- "transformers_version": "4.51.3",
- "type_vocab_size": 2,
- "use_cache": true,
- "vocab_size": 250037
-}
diff --git a/checkpoint-1000/config_sentence_transformers.json b/checkpoint-1000/config_sentence_transformers.json
deleted file mode 100644
index dcf436801f55bd22a257de2aad7eef5cfd06efaa..0000000000000000000000000000000000000000
--- a/checkpoint-1000/config_sentence_transformers.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
- "__version__": {
- "sentence_transformers": "3.4.1",
- "transformers": "4.51.3",
- "pytorch": "2.7.0+cu126"
- },
- "prompts": {},
- "default_prompt_name": null,
- "similarity_fn_name": "cosine"
-}
\ No newline at end of file
diff --git a/checkpoint-1000/model.safetensors b/checkpoint-1000/model.safetensors
deleted file mode 100644
index 73640d8f7f70e27e135807c6ab63bffc4c1cd512..0000000000000000000000000000000000000000
--- a/checkpoint-1000/model.safetensors
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:53cdf706594f9c2e35f539f5023cae863f9d5c0e8588348281d86e7ac79b4662
-size 470637416
diff --git a/checkpoint-1000/modules.json b/checkpoint-1000/modules.json
deleted file mode 100644
index f7640f94e81bb7f4f04daf1668850b38763a13d9..0000000000000000000000000000000000000000
--- a/checkpoint-1000/modules.json
+++ /dev/null
@@ -1,14 +0,0 @@
-[
- {
- "idx": 0,
- "name": "0",
- "path": "",
- "type": "sentence_transformers.models.Transformer"
- },
- {
- "idx": 1,
- "name": "1",
- "path": "1_Pooling",
- "type": "sentence_transformers.models.Pooling"
- }
-]
\ No newline at end of file
diff --git a/checkpoint-1000/optimizer.pt b/checkpoint-1000/optimizer.pt
deleted file mode 100644
index b8c58ea739043f210901f0ab0fb962d19e7958fc..0000000000000000000000000000000000000000
--- a/checkpoint-1000/optimizer.pt
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:c108fa814d36d19a8e9c702a9800909a0bdbcc7bbc32071418d14ec158efbaf5
-size 1715019
diff --git a/checkpoint-1000/rng_state.pth b/checkpoint-1000/rng_state.pth
deleted file mode 100644
index 7db666e6775b0f053ef0cbb331ebaa54f8182cd0..0000000000000000000000000000000000000000
--- a/checkpoint-1000/rng_state.pth
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:31530f34a96cd557f736a4c9e2dbdab66da89f3ee40e3c858c87c688d4a1b9a1
-size 14645
diff --git a/checkpoint-1000/scheduler.pt b/checkpoint-1000/scheduler.pt
deleted file mode 100644
index c026cbfbd78e0d251a49db6ad9188412f9899f74..0000000000000000000000000000000000000000
--- a/checkpoint-1000/scheduler.pt
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:e8985e0cd69062d78f38bea6c82894c697cf2eff7e9a24bf93fa0da194c1b5e7
-size 1465
diff --git a/checkpoint-1000/sentence_bert_config.json b/checkpoint-1000/sentence_bert_config.json
deleted file mode 100644
index 5fd10429389515d3e5cccdeda08cae5fea1ae82e..0000000000000000000000000000000000000000
--- a/checkpoint-1000/sentence_bert_config.json
+++ /dev/null
@@ -1,4 +0,0 @@
-{
- "max_seq_length": 128,
- "do_lower_case": false
-}
\ No newline at end of file
diff --git a/checkpoint-1000/special_tokens_map.json b/checkpoint-1000/special_tokens_map.json
deleted file mode 100644
index b1879d702821e753ffe4245048eee415d54a9385..0000000000000000000000000000000000000000
--- a/checkpoint-1000/special_tokens_map.json
+++ /dev/null
@@ -1,51 +0,0 @@
-{
- "bos_token": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- },
- "cls_token": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- },
- "eos_token": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- },
- "mask_token": {
- "content": "",
- "lstrip": true,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- },
- "pad_token": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- },
- "sep_token": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- },
- "unk_token": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- }
-}
diff --git a/checkpoint-1000/tokenizer.json b/checkpoint-1000/tokenizer.json
deleted file mode 100644
index e3420945e193cc0791136cdc6e5cd69801c838af..0000000000000000000000000000000000000000
--- a/checkpoint-1000/tokenizer.json
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:cad551d5600a84242d0973327029452a1e3672ba6313c2a3c3d69c4310e12719
-size 17082987
diff --git a/checkpoint-1000/tokenizer_config.json b/checkpoint-1000/tokenizer_config.json
deleted file mode 100644
index facf4436a8f11c26085c16a14f4e576853927a9e..0000000000000000000000000000000000000000
--- a/checkpoint-1000/tokenizer_config.json
+++ /dev/null
@@ -1,65 +0,0 @@
-{
- "added_tokens_decoder": {
- "0": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false,
- "special": true
- },
- "1": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false,
- "special": true
- },
- "2": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false,
- "special": true
- },
- "3": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false,
- "special": true
- },
- "250001": {
- "content": "",
- "lstrip": true,
- "normalized": false,
- "rstrip": false,
- "single_word": false,
- "special": true
- }
- },
- "bos_token": "",
- "clean_up_tokenization_spaces": false,
- "cls_token": "",
- "do_lower_case": true,
- "eos_token": "",
- "extra_special_tokens": {},
- "mask_token": "",
- "max_length": 128,
- "model_max_length": 128,
- "pad_to_multiple_of": null,
- "pad_token": "",
- "pad_token_type_id": 0,
- "padding_side": "right",
- "sep_token": "",
- "stride": 0,
- "strip_accents": null,
- "tokenize_chinese_chars": true,
- "tokenizer_class": "BertTokenizer",
- "truncation_side": "right",
- "truncation_strategy": "longest_first",
- "unk_token": ""
-}
diff --git a/checkpoint-1000/trainer_state.json b/checkpoint-1000/trainer_state.json
deleted file mode 100644
index 59d5f0faee0a3010b3bfe7f0c72eebe378c700d3..0000000000000000000000000000000000000000
--- a/checkpoint-1000/trainer_state.json
+++ /dev/null
@@ -1,217 +0,0 @@
-{
- "best_global_step": 1000,
- "best_metric": 0.002240537665784359,
- "best_model_checkpoint": "data/fine-tuned-sbert-sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2-original-adafactor/checkpoint-1000",
- "epoch": 1.8784608165180665,
- "eval_steps": 100,
- "global_step": 1000,
- "is_hyper_param_search": false,
- "is_local_process_zero": true,
- "is_world_process_zero": true,
- "log_history": [
- {
- "epoch": 0.18770530267480057,
- "eval_loss": 0.012530049309134483,
- "eval_runtime": 812.6802,
- "eval_samples_per_second": 3277.151,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.8778235859541618,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7128396034240723,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.8848748516159781,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.812583495899967,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.6880456209182739,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.7185793630359445,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.7900823930955021,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.8364038065429271,
- "eval_steps_per_second": 3.278,
- "step": 100
- },
- {
- "epoch": 0.37541060534960113,
- "eval_loss": 0.009013425558805466,
- "eval_runtime": 792.9843,
- "eval_samples_per_second": 3358.548,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9164113424048541,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7378441095352173,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9368603114664952,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.8729798695775446,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7272344827651978,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.8103205315460159,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.8605654745268148,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.8857576838544123,
- "eval_steps_per_second": 3.359,
- "step": 200
- },
- {
- "epoch": 0.5631159080244017,
- "eval_loss": 0.006819029338657856,
- "eval_runtime": 809.9704,
- "eval_samples_per_second": 3288.115,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9398298338890391,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7449667453765869,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9629957356284182,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9088032597499417,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7449667453765869,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.864029341509194,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.8990159430733201,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9188060251084542,
- "eval_steps_per_second": 3.289,
- "step": 300
- },
- {
- "epoch": 0.7508212106992023,
- "eval_loss": 0.005150709766894579,
- "eval_runtime": 797.9199,
- "eval_samples_per_second": 3337.773,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9560016220600163,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7553268671035767,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9774059659768239,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9333702119012406,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7449506521224976,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.9005457325671423,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.916037892637527,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9513710688929036,
- "eval_steps_per_second": 3.339,
- "step": 400
- },
- {
- "epoch": 0.9385265133740028,
- "grad_norm": 0.17396493256092072,
- "learning_rate": 2.9428198433420364e-05,
- "loss": 0.0409,
- "step": 500
- },
- {
- "epoch": 0.9385265133740028,
- "eval_loss": 0.003973629325628281,
- "eval_runtime": 809.4532,
- "eval_samples_per_second": 3290.216,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9655950557207654,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7622435092926025,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9845099503823473,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9477742208778024,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7535413503646851,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.9221773981286795,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.9367750202319935,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9590347859107281,
- "eval_steps_per_second": 3.291,
- "step": 500
- },
- {
- "epoch": 1.1276396058188645,
- "eval_loss": 0.0032712339889258146,
- "eval_runtime": 793.7573,
- "eval_samples_per_second": 3355.277,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9712722657775374,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7610360383987427,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9887055977101925,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9564087809158087,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7610177993774414,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.9350876149915242,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.9471753898932449,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9658239646502422,
- "eval_steps_per_second": 3.356,
- "step": 600
- },
- {
- "epoch": 1.3153449084936648,
- "eval_loss": 0.0028166945558041334,
- "eval_runtime": 815.1943,
- "eval_samples_per_second": 3267.044,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9751246583160614,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7577522993087769,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9911117019106511,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9621558129059113,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7424367666244507,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.943665667488554,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.9536134909690983,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9708525597505264,
- "eval_steps_per_second": 3.268,
- "step": 700
- },
- {
- "epoch": 1.5030502111684654,
- "eval_loss": 0.0026242006570100784,
- "eval_runtime": 805.7115,
- "eval_samples_per_second": 3305.496,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9782673995974888,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7254683971405029,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9927214598054878,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9669240257663667,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7145971059799194,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.9507846488068235,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.9597660102710608,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9741896137072368,
- "eval_steps_per_second": 3.306,
- "step": 800
- },
- {
- "epoch": 1.690755513843266,
- "eval_loss": 0.002248650649562478,
- "eval_runtime": 818.5338,
- "eval_samples_per_second": 3253.715,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9801973506353069,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7349117994308472,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9938133122786723,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9698356230196407,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7348856329917908,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.9551340483533577,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.9641228578901284,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9756164919507957,
- "eval_steps_per_second": 3.255,
- "step": 900
- },
- {
- "epoch": 1.8784608165180665,
- "grad_norm": 0.07541557401418686,
- "learning_rate": 2.5511749347258486e-05,
- "loss": 0.0131,
- "step": 1000
- },
- {
- "epoch": 1.8784608165180665,
- "eval_loss": 0.002240537665784359,
- "eval_runtime": 803.6286,
- "eval_samples_per_second": 3314.063,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9817931272716349,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7197962999343872,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9944127523785896,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9722373310278887,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7091608047485352,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.9587183163648803,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.9675121928984912,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9770088489465266,
- "eval_steps_per_second": 3.315,
- "step": 1000
- }
- ],
- "logging_steps": 500,
- "max_steps": 4256,
- "num_input_tokens_seen": 0,
- "num_train_epochs": 8,
- "save_steps": 100,
- "stateful_callbacks": {
- "EarlyStoppingCallback": {
- "args": {
- "early_stopping_patience": 1,
- "early_stopping_threshold": 0.0
- },
- "attributes": {
- "early_stopping_patience_counter": 0
- }
- },
- "TrainerControl": {
- "args": {
- "should_epoch_stop": false,
- "should_evaluate": false,
- "should_log": false,
- "should_save": true,
- "should_training_stop": false
- },
- "attributes": {}
- }
- },
- "total_flos": 0.0,
- "train_batch_size": 1000,
- "trial_name": null,
- "trial_params": null
-}
diff --git a/checkpoint-1000/training_args.bin b/checkpoint-1000/training_args.bin
deleted file mode 100644
index f6aba0195c0abacac26202ea40cbb6012662a9ff..0000000000000000000000000000000000000000
--- a/checkpoint-1000/training_args.bin
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:9339753774865faea550d7da93688221ca0f43171c16e3034645a2149992c8a6
-size 6033
diff --git a/checkpoint-1000/unigram.json b/checkpoint-1000/unigram.json
deleted file mode 100644
index 2faa9ec874108d53a017ff2c7ab98d155fb21a82..0000000000000000000000000000000000000000
--- a/checkpoint-1000/unigram.json
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:da145b5e7700ae40f16691ec32a0b1fdc1ee3298db22a31ea55f57a966c4a65d
-size 14763260
diff --git a/checkpoint-1100/1_Pooling/config.json b/checkpoint-1100/1_Pooling/config.json
deleted file mode 100644
index a97f8d140b6aee43dfac9fc4521b2842657c5608..0000000000000000000000000000000000000000
--- a/checkpoint-1100/1_Pooling/config.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
- "word_embedding_dimension": 384,
- "pooling_mode_cls_token": false,
- "pooling_mode_mean_tokens": true,
- "pooling_mode_max_tokens": false,
- "pooling_mode_mean_sqrt_len_tokens": false,
- "pooling_mode_weightedmean_tokens": false,
- "pooling_mode_lasttoken": false,
- "include_prompt": true
-}
\ No newline at end of file
diff --git a/checkpoint-1100/README.md b/checkpoint-1100/README.md
deleted file mode 100644
index b854316e034d2e39fdf0901261d0f057f057bd3d..0000000000000000000000000000000000000000
--- a/checkpoint-1100/README.md
+++ /dev/null
@@ -1,467 +0,0 @@
----
-language:
-- en
-license: apache-2.0
-tags:
-- sentence-transformers
-- sentence-similarity
-- feature-extraction
-- generated_from_trainer
-- dataset_size:2130621
-- loss:ContrastiveLoss
-base_model: sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2
-widget:
-- source_sentence: Kim Chol-sam
- sentences:
- - Stankevich Sergey Nikolayevich
- - Kim Chin-So’k
- - Julen Lopetegui Agote
-- source_sentence: دينا بنت عبد الحميد
- sentences:
- - Alexia van Amsberg
- - Anthony Nicholas Colin Maitland Biddulph, 5th Baron Biddulph
- - Dina bint Abdul-Hamíd
-- source_sentence: Մուհամեդ բեն Նաիֆ Ալ Սաուդ
- sentences:
- - Karpov Anatoly Evgenyevich
- - GNPower Mariveles Coal Plant [former]
- - Muhammed bin Nayef bin Abdul Aziz Al Saud
-- source_sentence: Edward Gnehm
- sentences:
- - Шауэрте, Хартмут
- - Ханзада Филипп, Эдинбург герцогі
- - AFX
-- source_sentence: Schori i Lidingö
- sentences:
- - Yordan Canev
- - ကားပေါ့ အန်နာတိုလီ
- - BYSTROV, Mikhail Ivanovich
-pipeline_tag: sentence-similarity
-library_name: sentence-transformers
-metrics:
-- cosine_accuracy
-- cosine_accuracy_threshold
-- cosine_f1
-- cosine_f1_threshold
-- cosine_precision
-- cosine_recall
-- cosine_ap
-- cosine_mcc
-model-index:
-- name: sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2-name-matcher-original
- results:
- - task:
- type: binary-classification
- name: Binary Classification
- dataset:
- name: sentence transformers paraphrase multilingual MiniLM L12 v2
- type: sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2
- metrics:
- - type: cosine_accuracy
- value: 0.9828594815415578
- name: Cosine Accuracy
- - type: cosine_accuracy_threshold
- value: 0.7552986741065979
- name: Cosine Accuracy Threshold
- - type: cosine_f1
- value: 0.973889221813201
- name: Cosine F1
- - type: cosine_f1_threshold
- value: 0.7401974201202393
- name: Cosine F1 Threshold
- - type: cosine_precision
- value: 0.9661201195760486
- name: Cosine Precision
- - type: cosine_recall
- value: 0.9817842882294052
- name: Cosine Recall
- - type: cosine_ap
- value: 0.9950493119597241
- name: Cosine Ap
- - type: cosine_mcc
- value: 0.9611601510291333
- name: Cosine Mcc
----
-
-# sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2-name-matcher-original
-
-This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2](https://huggingface.co/sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2). It maps sentences & paragraphs to a 384-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more.
-
-## Model Details
-
-### Model Description
-- **Model Type:** Sentence Transformer
-- **Base model:** [sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2](https://huggingface.co/sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2)
-- **Maximum Sequence Length:** 128 tokens
-- **Output Dimensionality:** 384 dimensions
-- **Similarity Function:** Cosine Similarity
-
-- **Language:** en
-- **License:** apache-2.0
-
-### Model Sources
-
-- **Documentation:** [Sentence Transformers Documentation](https://sbert.net)
-- **Repository:** [Sentence Transformers on GitHub](https://github.com/UKPLab/sentence-transformers)
-- **Hugging Face:** [Sentence Transformers on Hugging Face](https://huggingface.co/models?library=sentence-transformers)
-
-### Full Model Architecture
-
-```
-SentenceTransformer(
- (0): Transformer({'max_seq_length': 128, 'do_lower_case': False}) with Transformer model: BertModel
- (1): Pooling({'word_embedding_dimension': 384, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
-)
-```
-
-## Usage
-
-### Direct Usage (Sentence Transformers)
-
-First install the Sentence Transformers library:
-
-```bash
-pip install -U sentence-transformers
-```
-
-Then you can load this model and run inference.
-```python
-from sentence_transformers import SentenceTransformer
-
-# Download from the 🤗 Hub
-model = SentenceTransformer("sentence_transformers_model_id")
-# Run inference
-sentences = [
- 'Schori i Lidingö',
- 'Yordan Canev',
- 'ကားပေါ့ အန်နာတိုလီ',
-]
-embeddings = model.encode(sentences)
-print(embeddings.shape)
-# [3, 384]
-
-# Get the similarity scores for the embeddings
-similarities = model.similarity(embeddings, embeddings)
-print(similarities.shape)
-# [3, 3]
-```
-
-
-
-
-
-
-
-## Evaluation
-
-### Metrics
-
-#### Binary Classification
-
-* Dataset: `sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2`
-* Evaluated with [BinaryClassificationEvaluator
](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.BinaryClassificationEvaluator)
-
-| Metric | Value |
-|:--------------------------|:----------|
-| cosine_accuracy | 0.9829 |
-| cosine_accuracy_threshold | 0.7553 |
-| cosine_f1 | 0.9739 |
-| cosine_f1_threshold | 0.7402 |
-| cosine_precision | 0.9661 |
-| cosine_recall | 0.9818 |
-| **cosine_ap** | **0.995** |
-| cosine_mcc | 0.9612 |
-
-
-
-
-
-## Training Details
-
-### Training Dataset
-
-#### Unnamed Dataset
-
-* Size: 2,130,621 training samples
-* Columns: sentence1
, sentence2
, and label
-* Approximate statistics based on the first 1000 samples:
- | | sentence1 | sentence2 | label |
- |:--------|:---------------------------------------------------------------------------------|:---------------------------------------------------------------------------------|:---------------------------------------------------------------|
- | type | string | string | float |
- | details | - min: 3 tokens
- mean: 9.32 tokens
- max: 57 tokens
| - min: 3 tokens
- mean: 9.16 tokens
- max: 54 tokens
| - min: 0.0
- mean: 0.34
- max: 1.0
|
-* Samples:
- | sentence1 | sentence2 | label |
- |:----------------------------------|:------------------------------------|:-----------------|
- | 캐스린 설리번
| Kathryn D. Sullivanová
| 1.0
|
- | ଶିବରାଜ ଅଧାଲରାଓ ପାଟିଲ
| Aleksander Lubocki
| 0.0
|
- | Пырванов, Георги
| アナトーリー・セルジュコフ
| 0.0
|
-* Loss: [ContrastiveLoss
](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#contrastiveloss) with these parameters:
- ```json
- {
- "distance_metric": "SiameseDistanceMetric.COSINE_DISTANCE",
- "margin": 0.5,
- "size_average": true
- }
- ```
-
-### Evaluation Dataset
-
-#### Unnamed Dataset
-
-* Size: 2,663,276 evaluation samples
-* Columns: sentence1
, sentence2
, and label
-* Approximate statistics based on the first 1000 samples:
- | | sentence1 | sentence2 | label |
- |:--------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|:---------------------------------------------------------------|
- | type | string | string | float |
- | details | - min: 3 tokens
- mean: 9.34 tokens
- max: 102 tokens
| - min: 4 tokens
- mean: 9.11 tokens
- max: 100 tokens
| - min: 0.0
- mean: 0.33
- max: 1.0
|
-* Samples:
- | sentence1 | sentence2 | label |
- |:--------------------------------------|:---------------------------------------|:-----------------|
- | Ева Херман
| I Xuan Karlos
| 0.0
|
- | Кличков Андрій Євгенович
| Андрэй Яўгенавіч Клычкоў
| 1.0
|
- | Кинах А.
| Senator John Hickenlooper
| 0.0
|
-* Loss: [ContrastiveLoss
](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#contrastiveloss) with these parameters:
- ```json
- {
- "distance_metric": "SiameseDistanceMetric.COSINE_DISTANCE",
- "margin": 0.5,
- "size_average": true
- }
- ```
-
-### Training Hyperparameters
-#### Non-Default Hyperparameters
-
-- `eval_strategy`: steps
-- `per_device_train_batch_size`: 1000
-- `per_device_eval_batch_size`: 1000
-- `gradient_accumulation_steps`: 4
-- `learning_rate`: 3e-05
-- `weight_decay`: 0.01
-- `num_train_epochs`: 8
-- `warmup_ratio`: 0.1
-- `fp16_opt_level`: O0
-- `load_best_model_at_end`: True
-- `optim`: adafactor
-
-#### All Hyperparameters
-Click to expand
-
-- `overwrite_output_dir`: False
-- `do_predict`: False
-- `eval_strategy`: steps
-- `prediction_loss_only`: True
-- `per_device_train_batch_size`: 1000
-- `per_device_eval_batch_size`: 1000
-- `per_gpu_train_batch_size`: None
-- `per_gpu_eval_batch_size`: None
-- `gradient_accumulation_steps`: 4
-- `eval_accumulation_steps`: None
-- `torch_empty_cache_steps`: None
-- `learning_rate`: 3e-05
-- `weight_decay`: 0.01
-- `adam_beta1`: 0.9
-- `adam_beta2`: 0.999
-- `adam_epsilon`: 1e-08
-- `max_grad_norm`: 1.0
-- `num_train_epochs`: 8
-- `max_steps`: -1
-- `lr_scheduler_type`: linear
-- `lr_scheduler_kwargs`: {}
-- `warmup_ratio`: 0.1
-- `warmup_steps`: 0
-- `log_level`: passive
-- `log_level_replica`: warning
-- `log_on_each_node`: True
-- `logging_nan_inf_filter`: True
-- `save_safetensors`: True
-- `save_on_each_node`: False
-- `save_only_model`: False
-- `restore_callback_states_from_checkpoint`: False
-- `no_cuda`: False
-- `use_cpu`: False
-- `use_mps_device`: False
-- `seed`: 42
-- `data_seed`: None
-- `jit_mode_eval`: False
-- `use_ipex`: False
-- `bf16`: False
-- `fp16`: False
-- `fp16_opt_level`: O0
-- `half_precision_backend`: auto
-- `bf16_full_eval`: False
-- `fp16_full_eval`: False
-- `tf32`: None
-- `local_rank`: 0
-- `ddp_backend`: None
-- `tpu_num_cores`: None
-- `tpu_metrics_debug`: False
-- `debug`: []
-- `dataloader_drop_last`: False
-- `dataloader_num_workers`: 0
-- `dataloader_prefetch_factor`: None
-- `past_index`: -1
-- `disable_tqdm`: False
-- `remove_unused_columns`: True
-- `label_names`: None
-- `load_best_model_at_end`: True
-- `ignore_data_skip`: False
-- `fsdp`: []
-- `fsdp_min_num_params`: 0
-- `fsdp_config`: {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}
-- `tp_size`: 0
-- `fsdp_transformer_layer_cls_to_wrap`: None
-- `accelerator_config`: {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True, 'non_blocking': False, 'gradient_accumulation_kwargs': None}
-- `deepspeed`: None
-- `label_smoothing_factor`: 0.0
-- `optim`: adafactor
-- `optim_args`: None
-- `adafactor`: False
-- `group_by_length`: False
-- `length_column_name`: length
-- `ddp_find_unused_parameters`: None
-- `ddp_bucket_cap_mb`: None
-- `ddp_broadcast_buffers`: False
-- `dataloader_pin_memory`: True
-- `dataloader_persistent_workers`: False
-- `skip_memory_metrics`: True
-- `use_legacy_prediction_loop`: False
-- `push_to_hub`: False
-- `resume_from_checkpoint`: None
-- `hub_model_id`: None
-- `hub_strategy`: every_save
-- `hub_private_repo`: None
-- `hub_always_push`: False
-- `gradient_checkpointing`: False
-- `gradient_checkpointing_kwargs`: None
-- `include_inputs_for_metrics`: False
-- `include_for_metrics`: []
-- `eval_do_concat_batches`: True
-- `fp16_backend`: auto
-- `push_to_hub_model_id`: None
-- `push_to_hub_organization`: None
-- `mp_parameters`:
-- `auto_find_batch_size`: False
-- `full_determinism`: False
-- `torchdynamo`: None
-- `ray_scope`: last
-- `ddp_timeout`: 1800
-- `torch_compile`: False
-- `torch_compile_backend`: None
-- `torch_compile_mode`: None
-- `include_tokens_per_second`: False
-- `include_num_input_tokens_seen`: False
-- `neftune_noise_alpha`: None
-- `optim_target_modules`: None
-- `batch_eval_metrics`: False
-- `eval_on_start`: False
-- `use_liger_kernel`: False
-- `eval_use_gather_object`: False
-- `average_tokens_across_devices`: False
-- `prompts`: None
-- `batch_sampler`: batch_sampler
-- `multi_dataset_batch_sampler`: proportional
-
-
-
-### Training Logs
-| Epoch | Step | Training Loss | Validation Loss | sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap |
-|:------:|:----:|:-------------:|:---------------:|:---------------------------------------------------------------------:|
-| -1 | -1 | - | - | 0.7140 |
-| 0.1877 | 100 | - | 0.0125 | 0.8849 |
-| 0.3754 | 200 | - | 0.0090 | 0.9369 |
-| 0.5631 | 300 | - | 0.0068 | 0.9630 |
-| 0.7508 | 400 | - | 0.0052 | 0.9774 |
-| 0.9385 | 500 | 0.0409 | 0.0040 | 0.9845 |
-| 1.1276 | 600 | - | 0.0033 | 0.9887 |
-| 1.3153 | 700 | - | 0.0028 | 0.9911 |
-| 1.5031 | 800 | - | 0.0026 | 0.9927 |
-| 1.6908 | 900 | - | 0.0022 | 0.9938 |
-| 1.8785 | 1000 | 0.0131 | 0.0022 | 0.9944 |
-| 2.0676 | 1100 | - | 0.0019 | 0.9950 |
-
-
-### Framework Versions
-- Python: 3.12.9
-- Sentence Transformers: 3.4.1
-- Transformers: 4.51.3
-- PyTorch: 2.7.0+cu126
-- Accelerate: 1.6.0
-- Datasets: 3.6.0
-- Tokenizers: 0.21.1
-
-## Citation
-
-### BibTeX
-
-#### Sentence Transformers
-```bibtex
-@inproceedings{reimers-2019-sentence-bert,
- title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks",
- author = "Reimers, Nils and Gurevych, Iryna",
- booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing",
- month = "11",
- year = "2019",
- publisher = "Association for Computational Linguistics",
- url = "https://arxiv.org/abs/1908.10084",
-}
-```
-
-#### ContrastiveLoss
-```bibtex
-@inproceedings{hadsell2006dimensionality,
- author={Hadsell, R. and Chopra, S. and LeCun, Y.},
- booktitle={2006 IEEE Computer Society Conference on Computer Vision and Pattern Recognition (CVPR'06)},
- title={Dimensionality Reduction by Learning an Invariant Mapping},
- year={2006},
- volume={2},
- number={},
- pages={1735-1742},
- doi={10.1109/CVPR.2006.100}
-}
-```
-
-
-
-
-
-
\ No newline at end of file
diff --git a/checkpoint-1100/config.json b/checkpoint-1100/config.json
deleted file mode 100644
index 26e48501fdf44110239e00ad4d438aee8679504a..0000000000000000000000000000000000000000
--- a/checkpoint-1100/config.json
+++ /dev/null
@@ -1,25 +0,0 @@
-{
- "architectures": [
- "BertModel"
- ],
- "attention_probs_dropout_prob": 0.1,
- "classifier_dropout": null,
- "gradient_checkpointing": false,
- "hidden_act": "gelu",
- "hidden_dropout_prob": 0.1,
- "hidden_size": 384,
- "initializer_range": 0.02,
- "intermediate_size": 1536,
- "layer_norm_eps": 1e-12,
- "max_position_embeddings": 512,
- "model_type": "bert",
- "num_attention_heads": 12,
- "num_hidden_layers": 12,
- "pad_token_id": 0,
- "position_embedding_type": "absolute",
- "torch_dtype": "float32",
- "transformers_version": "4.51.3",
- "type_vocab_size": 2,
- "use_cache": true,
- "vocab_size": 250037
-}
diff --git a/checkpoint-1100/config_sentence_transformers.json b/checkpoint-1100/config_sentence_transformers.json
deleted file mode 100644
index dcf436801f55bd22a257de2aad7eef5cfd06efaa..0000000000000000000000000000000000000000
--- a/checkpoint-1100/config_sentence_transformers.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
- "__version__": {
- "sentence_transformers": "3.4.1",
- "transformers": "4.51.3",
- "pytorch": "2.7.0+cu126"
- },
- "prompts": {},
- "default_prompt_name": null,
- "similarity_fn_name": "cosine"
-}
\ No newline at end of file
diff --git a/checkpoint-1100/model.safetensors b/checkpoint-1100/model.safetensors
deleted file mode 100644
index b873dde2ea0dcd9a4564f3ed6586ee6cf87d4af4..0000000000000000000000000000000000000000
--- a/checkpoint-1100/model.safetensors
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:763540a5075ed486170f85323b5ee9b40182439ea8f51d889e8674424cce13c2
-size 470637416
diff --git a/checkpoint-1100/modules.json b/checkpoint-1100/modules.json
deleted file mode 100644
index f7640f94e81bb7f4f04daf1668850b38763a13d9..0000000000000000000000000000000000000000
--- a/checkpoint-1100/modules.json
+++ /dev/null
@@ -1,14 +0,0 @@
-[
- {
- "idx": 0,
- "name": "0",
- "path": "",
- "type": "sentence_transformers.models.Transformer"
- },
- {
- "idx": 1,
- "name": "1",
- "path": "1_Pooling",
- "type": "sentence_transformers.models.Pooling"
- }
-]
\ No newline at end of file
diff --git a/checkpoint-1100/optimizer.pt b/checkpoint-1100/optimizer.pt
deleted file mode 100644
index 03ef458f7ffb9345dd2acfe855ed8260ddc45d46..0000000000000000000000000000000000000000
--- a/checkpoint-1100/optimizer.pt
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:e0711e7f2b2b4728583424781a68346e4cb105f82c5b3e33e835ff6603b1b546
-size 1715019
diff --git a/checkpoint-1100/rng_state.pth b/checkpoint-1100/rng_state.pth
deleted file mode 100644
index daf3e104f28697a178a58c60d9ce3a52fcceacaa..0000000000000000000000000000000000000000
--- a/checkpoint-1100/rng_state.pth
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:d533d8579fdbb2634c2232f32ae13c2e79a071512c8f417a9f5453a5c0587c27
-size 14645
diff --git a/checkpoint-1100/scheduler.pt b/checkpoint-1100/scheduler.pt
deleted file mode 100644
index df1b0fb5337749ce5e08702b420610e645c9e08f..0000000000000000000000000000000000000000
--- a/checkpoint-1100/scheduler.pt
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:2a3efc25b9c32ace074d8642ed698ba4f27854c75c4022587a44f288f2399a9b
-size 1465
diff --git a/checkpoint-1100/sentence_bert_config.json b/checkpoint-1100/sentence_bert_config.json
deleted file mode 100644
index 5fd10429389515d3e5cccdeda08cae5fea1ae82e..0000000000000000000000000000000000000000
--- a/checkpoint-1100/sentence_bert_config.json
+++ /dev/null
@@ -1,4 +0,0 @@
-{
- "max_seq_length": 128,
- "do_lower_case": false
-}
\ No newline at end of file
diff --git a/checkpoint-1100/special_tokens_map.json b/checkpoint-1100/special_tokens_map.json
deleted file mode 100644
index b1879d702821e753ffe4245048eee415d54a9385..0000000000000000000000000000000000000000
--- a/checkpoint-1100/special_tokens_map.json
+++ /dev/null
@@ -1,51 +0,0 @@
-{
- "bos_token": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- },
- "cls_token": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- },
- "eos_token": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- },
- "mask_token": {
- "content": "",
- "lstrip": true,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- },
- "pad_token": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- },
- "sep_token": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- },
- "unk_token": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- }
-}
diff --git a/checkpoint-1100/tokenizer.json b/checkpoint-1100/tokenizer.json
deleted file mode 100644
index e3420945e193cc0791136cdc6e5cd69801c838af..0000000000000000000000000000000000000000
--- a/checkpoint-1100/tokenizer.json
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:cad551d5600a84242d0973327029452a1e3672ba6313c2a3c3d69c4310e12719
-size 17082987
diff --git a/checkpoint-1100/tokenizer_config.json b/checkpoint-1100/tokenizer_config.json
deleted file mode 100644
index facf4436a8f11c26085c16a14f4e576853927a9e..0000000000000000000000000000000000000000
--- a/checkpoint-1100/tokenizer_config.json
+++ /dev/null
@@ -1,65 +0,0 @@
-{
- "added_tokens_decoder": {
- "0": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false,
- "special": true
- },
- "1": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false,
- "special": true
- },
- "2": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false,
- "special": true
- },
- "3": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false,
- "special": true
- },
- "250001": {
- "content": "",
- "lstrip": true,
- "normalized": false,
- "rstrip": false,
- "single_word": false,
- "special": true
- }
- },
- "bos_token": "",
- "clean_up_tokenization_spaces": false,
- "cls_token": "",
- "do_lower_case": true,
- "eos_token": "",
- "extra_special_tokens": {},
- "mask_token": "",
- "max_length": 128,
- "model_max_length": 128,
- "pad_to_multiple_of": null,
- "pad_token": "",
- "pad_token_type_id": 0,
- "padding_side": "right",
- "sep_token": "",
- "stride": 0,
- "strip_accents": null,
- "tokenize_chinese_chars": true,
- "tokenizer_class": "BertTokenizer",
- "truncation_side": "right",
- "truncation_strategy": "longest_first",
- "unk_token": ""
-}
diff --git a/checkpoint-1100/trainer_state.json b/checkpoint-1100/trainer_state.json
deleted file mode 100644
index c1ec352f7a38fdedbedf50fefef94348e203cdd4..0000000000000000000000000000000000000000
--- a/checkpoint-1100/trainer_state.json
+++ /dev/null
@@ -1,233 +0,0 @@
-{
- "best_global_step": 1100,
- "best_metric": 0.0018734760815277696,
- "best_model_checkpoint": "data/fine-tuned-sbert-sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2-original-adafactor/checkpoint-1100",
- "epoch": 2.0675739089629284,
- "eval_steps": 100,
- "global_step": 1100,
- "is_hyper_param_search": false,
- "is_local_process_zero": true,
- "is_world_process_zero": true,
- "log_history": [
- {
- "epoch": 0.18770530267480057,
- "eval_loss": 0.012530049309134483,
- "eval_runtime": 812.6802,
- "eval_samples_per_second": 3277.151,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.8778235859541618,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7128396034240723,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.8848748516159781,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.812583495899967,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.6880456209182739,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.7185793630359445,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.7900823930955021,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.8364038065429271,
- "eval_steps_per_second": 3.278,
- "step": 100
- },
- {
- "epoch": 0.37541060534960113,
- "eval_loss": 0.009013425558805466,
- "eval_runtime": 792.9843,
- "eval_samples_per_second": 3358.548,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9164113424048541,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7378441095352173,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9368603114664952,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.8729798695775446,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7272344827651978,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.8103205315460159,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.8605654745268148,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.8857576838544123,
- "eval_steps_per_second": 3.359,
- "step": 200
- },
- {
- "epoch": 0.5631159080244017,
- "eval_loss": 0.006819029338657856,
- "eval_runtime": 809.9704,
- "eval_samples_per_second": 3288.115,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9398298338890391,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7449667453765869,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9629957356284182,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9088032597499417,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7449667453765869,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.864029341509194,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.8990159430733201,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9188060251084542,
- "eval_steps_per_second": 3.289,
- "step": 300
- },
- {
- "epoch": 0.7508212106992023,
- "eval_loss": 0.005150709766894579,
- "eval_runtime": 797.9199,
- "eval_samples_per_second": 3337.773,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9560016220600163,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7553268671035767,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9774059659768239,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9333702119012406,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7449506521224976,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.9005457325671423,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.916037892637527,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9513710688929036,
- "eval_steps_per_second": 3.339,
- "step": 400
- },
- {
- "epoch": 0.9385265133740028,
- "grad_norm": 0.17396493256092072,
- "learning_rate": 2.9428198433420364e-05,
- "loss": 0.0409,
- "step": 500
- },
- {
- "epoch": 0.9385265133740028,
- "eval_loss": 0.003973629325628281,
- "eval_runtime": 809.4532,
- "eval_samples_per_second": 3290.216,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9655950557207654,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7622435092926025,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9845099503823473,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9477742208778024,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7535413503646851,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.9221773981286795,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.9367750202319935,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9590347859107281,
- "eval_steps_per_second": 3.291,
- "step": 500
- },
- {
- "epoch": 1.1276396058188645,
- "eval_loss": 0.0032712339889258146,
- "eval_runtime": 793.7573,
- "eval_samples_per_second": 3355.277,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9712722657775374,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7610360383987427,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9887055977101925,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9564087809158087,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7610177993774414,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.9350876149915242,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.9471753898932449,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9658239646502422,
- "eval_steps_per_second": 3.356,
- "step": 600
- },
- {
- "epoch": 1.3153449084936648,
- "eval_loss": 0.0028166945558041334,
- "eval_runtime": 815.1943,
- "eval_samples_per_second": 3267.044,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9751246583160614,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7577522993087769,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9911117019106511,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9621558129059113,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7424367666244507,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.943665667488554,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.9536134909690983,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9708525597505264,
- "eval_steps_per_second": 3.268,
- "step": 700
- },
- {
- "epoch": 1.5030502111684654,
- "eval_loss": 0.0026242006570100784,
- "eval_runtime": 805.7115,
- "eval_samples_per_second": 3305.496,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9782673995974888,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7254683971405029,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9927214598054878,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9669240257663667,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7145971059799194,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.9507846488068235,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.9597660102710608,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9741896137072368,
- "eval_steps_per_second": 3.306,
- "step": 800
- },
- {
- "epoch": 1.690755513843266,
- "eval_loss": 0.002248650649562478,
- "eval_runtime": 818.5338,
- "eval_samples_per_second": 3253.715,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9801973506353069,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7349117994308472,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9938133122786723,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9698356230196407,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7348856329917908,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.9551340483533577,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.9641228578901284,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9756164919507957,
- "eval_steps_per_second": 3.255,
- "step": 900
- },
- {
- "epoch": 1.8784608165180665,
- "grad_norm": 0.07541557401418686,
- "learning_rate": 2.5511749347258486e-05,
- "loss": 0.0131,
- "step": 1000
- },
- {
- "epoch": 1.8784608165180665,
- "eval_loss": 0.002240537665784359,
- "eval_runtime": 803.6286,
- "eval_samples_per_second": 3314.063,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9817931272716349,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7197962999343872,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9944127523785896,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9722373310278887,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7091608047485352,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.9587183163648803,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.9675121928984912,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9770088489465266,
- "eval_steps_per_second": 3.315,
- "step": 1000
- },
- {
- "epoch": 2.0675739089629284,
- "eval_loss": 0.0018734760815277696,
- "eval_runtime": 807.0812,
- "eval_samples_per_second": 3299.886,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9828594815415578,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7552986741065979,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9950493119597241,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.973889221813201,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7401974201202393,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.9611601510291333,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.9661201195760486,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9817842882294052,
- "eval_steps_per_second": 3.301,
- "step": 1100
- }
- ],
- "logging_steps": 500,
- "max_steps": 4256,
- "num_input_tokens_seen": 0,
- "num_train_epochs": 8,
- "save_steps": 100,
- "stateful_callbacks": {
- "EarlyStoppingCallback": {
- "args": {
- "early_stopping_patience": 1,
- "early_stopping_threshold": 0.0
- },
- "attributes": {
- "early_stopping_patience_counter": 0
- }
- },
- "TrainerControl": {
- "args": {
- "should_epoch_stop": false,
- "should_evaluate": false,
- "should_log": false,
- "should_save": true,
- "should_training_stop": false
- },
- "attributes": {}
- }
- },
- "total_flos": 0.0,
- "train_batch_size": 1000,
- "trial_name": null,
- "trial_params": null
-}
diff --git a/checkpoint-1100/training_args.bin b/checkpoint-1100/training_args.bin
deleted file mode 100644
index f6aba0195c0abacac26202ea40cbb6012662a9ff..0000000000000000000000000000000000000000
--- a/checkpoint-1100/training_args.bin
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:9339753774865faea550d7da93688221ca0f43171c16e3034645a2149992c8a6
-size 6033
diff --git a/checkpoint-1100/unigram.json b/checkpoint-1100/unigram.json
deleted file mode 100644
index 2faa9ec874108d53a017ff2c7ab98d155fb21a82..0000000000000000000000000000000000000000
--- a/checkpoint-1100/unigram.json
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:da145b5e7700ae40f16691ec32a0b1fdc1ee3298db22a31ea55f57a966c4a65d
-size 14763260
diff --git a/checkpoint-1200/1_Pooling/config.json b/checkpoint-1200/1_Pooling/config.json
deleted file mode 100644
index a97f8d140b6aee43dfac9fc4521b2842657c5608..0000000000000000000000000000000000000000
--- a/checkpoint-1200/1_Pooling/config.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
- "word_embedding_dimension": 384,
- "pooling_mode_cls_token": false,
- "pooling_mode_mean_tokens": true,
- "pooling_mode_max_tokens": false,
- "pooling_mode_mean_sqrt_len_tokens": false,
- "pooling_mode_weightedmean_tokens": false,
- "pooling_mode_lasttoken": false,
- "include_prompt": true
-}
\ No newline at end of file
diff --git a/checkpoint-1200/README.md b/checkpoint-1200/README.md
deleted file mode 100644
index 242cb95f5c93fd570dc1df42c4d441e6d9b8df43..0000000000000000000000000000000000000000
--- a/checkpoint-1200/README.md
+++ /dev/null
@@ -1,468 +0,0 @@
----
-language:
-- en
-license: apache-2.0
-tags:
-- sentence-transformers
-- sentence-similarity
-- feature-extraction
-- generated_from_trainer
-- dataset_size:2130621
-- loss:ContrastiveLoss
-base_model: sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2
-widget:
-- source_sentence: Kim Chol-sam
- sentences:
- - Stankevich Sergey Nikolayevich
- - Kim Chin-So’k
- - Julen Lopetegui Agote
-- source_sentence: دينا بنت عبد الحميد
- sentences:
- - Alexia van Amsberg
- - Anthony Nicholas Colin Maitland Biddulph, 5th Baron Biddulph
- - Dina bint Abdul-Hamíd
-- source_sentence: Մուհամեդ բեն Նաիֆ Ալ Սաուդ
- sentences:
- - Karpov Anatoly Evgenyevich
- - GNPower Mariveles Coal Plant [former]
- - Muhammed bin Nayef bin Abdul Aziz Al Saud
-- source_sentence: Edward Gnehm
- sentences:
- - Шауэрте, Хартмут
- - Ханзада Филипп, Эдинбург герцогі
- - AFX
-- source_sentence: Schori i Lidingö
- sentences:
- - Yordan Canev
- - ကားပေါ့ အန်နာတိုလီ
- - BYSTROV, Mikhail Ivanovich
-pipeline_tag: sentence-similarity
-library_name: sentence-transformers
-metrics:
-- cosine_accuracy
-- cosine_accuracy_threshold
-- cosine_f1
-- cosine_f1_threshold
-- cosine_precision
-- cosine_recall
-- cosine_ap
-- cosine_mcc
-model-index:
-- name: sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2-name-matcher-original
- results:
- - task:
- type: binary-classification
- name: Binary Classification
- dataset:
- name: sentence transformers paraphrase multilingual MiniLM L12 v2
- type: sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2
- metrics:
- - type: cosine_accuracy
- value: 0.9843050674356433
- name: Cosine Accuracy
- - type: cosine_accuracy_threshold
- value: 0.742120623588562
- name: Cosine Accuracy Threshold
- - type: cosine_f1
- value: 0.9760932477723254
- name: Cosine F1
- - type: cosine_f1_threshold
- value: 0.742120623588562
- name: Cosine F1 Threshold
- - type: cosine_precision
- value: 0.9703216856372878
- name: Cosine Precision
- - type: cosine_recall
- value: 0.9819338803033267
- name: Cosine Recall
- - type: cosine_ap
- value: 0.9955554741842152
- name: Cosine Ap
- - type: cosine_mcc
- value: 0.964449493634366
- name: Cosine Mcc
----
-
-# sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2-name-matcher-original
-
-This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2](https://huggingface.co/sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2). It maps sentences & paragraphs to a 384-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more.
-
-## Model Details
-
-### Model Description
-- **Model Type:** Sentence Transformer
-- **Base model:** [sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2](https://huggingface.co/sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2)
-- **Maximum Sequence Length:** 128 tokens
-- **Output Dimensionality:** 384 dimensions
-- **Similarity Function:** Cosine Similarity
-
-- **Language:** en
-- **License:** apache-2.0
-
-### Model Sources
-
-- **Documentation:** [Sentence Transformers Documentation](https://sbert.net)
-- **Repository:** [Sentence Transformers on GitHub](https://github.com/UKPLab/sentence-transformers)
-- **Hugging Face:** [Sentence Transformers on Hugging Face](https://huggingface.co/models?library=sentence-transformers)
-
-### Full Model Architecture
-
-```
-SentenceTransformer(
- (0): Transformer({'max_seq_length': 128, 'do_lower_case': False}) with Transformer model: BertModel
- (1): Pooling({'word_embedding_dimension': 384, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
-)
-```
-
-## Usage
-
-### Direct Usage (Sentence Transformers)
-
-First install the Sentence Transformers library:
-
-```bash
-pip install -U sentence-transformers
-```
-
-Then you can load this model and run inference.
-```python
-from sentence_transformers import SentenceTransformer
-
-# Download from the 🤗 Hub
-model = SentenceTransformer("sentence_transformers_model_id")
-# Run inference
-sentences = [
- 'Schori i Lidingö',
- 'Yordan Canev',
- 'ကားပေါ့ အန်နာတိုလီ',
-]
-embeddings = model.encode(sentences)
-print(embeddings.shape)
-# [3, 384]
-
-# Get the similarity scores for the embeddings
-similarities = model.similarity(embeddings, embeddings)
-print(similarities.shape)
-# [3, 3]
-```
-
-
-
-
-
-
-
-## Evaluation
-
-### Metrics
-
-#### Binary Classification
-
-* Dataset: `sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2`
-* Evaluated with [BinaryClassificationEvaluator
](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.BinaryClassificationEvaluator)
-
-| Metric | Value |
-|:--------------------------|:-----------|
-| cosine_accuracy | 0.9843 |
-| cosine_accuracy_threshold | 0.7421 |
-| cosine_f1 | 0.9761 |
-| cosine_f1_threshold | 0.7421 |
-| cosine_precision | 0.9703 |
-| cosine_recall | 0.9819 |
-| **cosine_ap** | **0.9956** |
-| cosine_mcc | 0.9644 |
-
-
-
-
-
-## Training Details
-
-### Training Dataset
-
-#### Unnamed Dataset
-
-* Size: 2,130,621 training samples
-* Columns: sentence1
, sentence2
, and label
-* Approximate statistics based on the first 1000 samples:
- | | sentence1 | sentence2 | label |
- |:--------|:---------------------------------------------------------------------------------|:---------------------------------------------------------------------------------|:---------------------------------------------------------------|
- | type | string | string | float |
- | details | - min: 3 tokens
- mean: 9.32 tokens
- max: 57 tokens
| - min: 3 tokens
- mean: 9.16 tokens
- max: 54 tokens
| - min: 0.0
- mean: 0.34
- max: 1.0
|
-* Samples:
- | sentence1 | sentence2 | label |
- |:----------------------------------|:------------------------------------|:-----------------|
- | 캐스린 설리번
| Kathryn D. Sullivanová
| 1.0
|
- | ଶିବରାଜ ଅଧାଲରାଓ ପାଟିଲ
| Aleksander Lubocki
| 0.0
|
- | Пырванов, Георги
| アナトーリー・セルジュコフ
| 0.0
|
-* Loss: [ContrastiveLoss
](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#contrastiveloss) with these parameters:
- ```json
- {
- "distance_metric": "SiameseDistanceMetric.COSINE_DISTANCE",
- "margin": 0.5,
- "size_average": true
- }
- ```
-
-### Evaluation Dataset
-
-#### Unnamed Dataset
-
-* Size: 2,663,276 evaluation samples
-* Columns: sentence1
, sentence2
, and label
-* Approximate statistics based on the first 1000 samples:
- | | sentence1 | sentence2 | label |
- |:--------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|:---------------------------------------------------------------|
- | type | string | string | float |
- | details | - min: 3 tokens
- mean: 9.34 tokens
- max: 102 tokens
| - min: 4 tokens
- mean: 9.11 tokens
- max: 100 tokens
| - min: 0.0
- mean: 0.33
- max: 1.0
|
-* Samples:
- | sentence1 | sentence2 | label |
- |:--------------------------------------|:---------------------------------------|:-----------------|
- | Ева Херман
| I Xuan Karlos
| 0.0
|
- | Кличков Андрій Євгенович
| Андрэй Яўгенавіч Клычкоў
| 1.0
|
- | Кинах А.
| Senator John Hickenlooper
| 0.0
|
-* Loss: [ContrastiveLoss
](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#contrastiveloss) with these parameters:
- ```json
- {
- "distance_metric": "SiameseDistanceMetric.COSINE_DISTANCE",
- "margin": 0.5,
- "size_average": true
- }
- ```
-
-### Training Hyperparameters
-#### Non-Default Hyperparameters
-
-- `eval_strategy`: steps
-- `per_device_train_batch_size`: 1000
-- `per_device_eval_batch_size`: 1000
-- `gradient_accumulation_steps`: 4
-- `learning_rate`: 3e-05
-- `weight_decay`: 0.01
-- `num_train_epochs`: 8
-- `warmup_ratio`: 0.1
-- `fp16_opt_level`: O0
-- `load_best_model_at_end`: True
-- `optim`: adafactor
-
-#### All Hyperparameters
-Click to expand
-
-- `overwrite_output_dir`: False
-- `do_predict`: False
-- `eval_strategy`: steps
-- `prediction_loss_only`: True
-- `per_device_train_batch_size`: 1000
-- `per_device_eval_batch_size`: 1000
-- `per_gpu_train_batch_size`: None
-- `per_gpu_eval_batch_size`: None
-- `gradient_accumulation_steps`: 4
-- `eval_accumulation_steps`: None
-- `torch_empty_cache_steps`: None
-- `learning_rate`: 3e-05
-- `weight_decay`: 0.01
-- `adam_beta1`: 0.9
-- `adam_beta2`: 0.999
-- `adam_epsilon`: 1e-08
-- `max_grad_norm`: 1.0
-- `num_train_epochs`: 8
-- `max_steps`: -1
-- `lr_scheduler_type`: linear
-- `lr_scheduler_kwargs`: {}
-- `warmup_ratio`: 0.1
-- `warmup_steps`: 0
-- `log_level`: passive
-- `log_level_replica`: warning
-- `log_on_each_node`: True
-- `logging_nan_inf_filter`: True
-- `save_safetensors`: True
-- `save_on_each_node`: False
-- `save_only_model`: False
-- `restore_callback_states_from_checkpoint`: False
-- `no_cuda`: False
-- `use_cpu`: False
-- `use_mps_device`: False
-- `seed`: 42
-- `data_seed`: None
-- `jit_mode_eval`: False
-- `use_ipex`: False
-- `bf16`: False
-- `fp16`: False
-- `fp16_opt_level`: O0
-- `half_precision_backend`: auto
-- `bf16_full_eval`: False
-- `fp16_full_eval`: False
-- `tf32`: None
-- `local_rank`: 0
-- `ddp_backend`: None
-- `tpu_num_cores`: None
-- `tpu_metrics_debug`: False
-- `debug`: []
-- `dataloader_drop_last`: False
-- `dataloader_num_workers`: 0
-- `dataloader_prefetch_factor`: None
-- `past_index`: -1
-- `disable_tqdm`: False
-- `remove_unused_columns`: True
-- `label_names`: None
-- `load_best_model_at_end`: True
-- `ignore_data_skip`: False
-- `fsdp`: []
-- `fsdp_min_num_params`: 0
-- `fsdp_config`: {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}
-- `tp_size`: 0
-- `fsdp_transformer_layer_cls_to_wrap`: None
-- `accelerator_config`: {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True, 'non_blocking': False, 'gradient_accumulation_kwargs': None}
-- `deepspeed`: None
-- `label_smoothing_factor`: 0.0
-- `optim`: adafactor
-- `optim_args`: None
-- `adafactor`: False
-- `group_by_length`: False
-- `length_column_name`: length
-- `ddp_find_unused_parameters`: None
-- `ddp_bucket_cap_mb`: None
-- `ddp_broadcast_buffers`: False
-- `dataloader_pin_memory`: True
-- `dataloader_persistent_workers`: False
-- `skip_memory_metrics`: True
-- `use_legacy_prediction_loop`: False
-- `push_to_hub`: False
-- `resume_from_checkpoint`: None
-- `hub_model_id`: None
-- `hub_strategy`: every_save
-- `hub_private_repo`: None
-- `hub_always_push`: False
-- `gradient_checkpointing`: False
-- `gradient_checkpointing_kwargs`: None
-- `include_inputs_for_metrics`: False
-- `include_for_metrics`: []
-- `eval_do_concat_batches`: True
-- `fp16_backend`: auto
-- `push_to_hub_model_id`: None
-- `push_to_hub_organization`: None
-- `mp_parameters`:
-- `auto_find_batch_size`: False
-- `full_determinism`: False
-- `torchdynamo`: None
-- `ray_scope`: last
-- `ddp_timeout`: 1800
-- `torch_compile`: False
-- `torch_compile_backend`: None
-- `torch_compile_mode`: None
-- `include_tokens_per_second`: False
-- `include_num_input_tokens_seen`: False
-- `neftune_noise_alpha`: None
-- `optim_target_modules`: None
-- `batch_eval_metrics`: False
-- `eval_on_start`: False
-- `use_liger_kernel`: False
-- `eval_use_gather_object`: False
-- `average_tokens_across_devices`: False
-- `prompts`: None
-- `batch_sampler`: batch_sampler
-- `multi_dataset_batch_sampler`: proportional
-
-
-
-### Training Logs
-| Epoch | Step | Training Loss | Validation Loss | sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap |
-|:------:|:----:|:-------------:|:---------------:|:---------------------------------------------------------------------:|
-| -1 | -1 | - | - | 0.7140 |
-| 0.1877 | 100 | - | 0.0125 | 0.8849 |
-| 0.3754 | 200 | - | 0.0090 | 0.9369 |
-| 0.5631 | 300 | - | 0.0068 | 0.9630 |
-| 0.7508 | 400 | - | 0.0052 | 0.9774 |
-| 0.9385 | 500 | 0.0409 | 0.0040 | 0.9845 |
-| 1.1276 | 600 | - | 0.0033 | 0.9887 |
-| 1.3153 | 700 | - | 0.0028 | 0.9911 |
-| 1.5031 | 800 | - | 0.0026 | 0.9927 |
-| 1.6908 | 900 | - | 0.0022 | 0.9938 |
-| 1.8785 | 1000 | 0.0131 | 0.0022 | 0.9944 |
-| 2.0676 | 1100 | - | 0.0019 | 0.9950 |
-| 2.2553 | 1200 | - | 0.0017 | 0.9956 |
-
-
-### Framework Versions
-- Python: 3.12.9
-- Sentence Transformers: 3.4.1
-- Transformers: 4.51.3
-- PyTorch: 2.7.0+cu126
-- Accelerate: 1.6.0
-- Datasets: 3.6.0
-- Tokenizers: 0.21.1
-
-## Citation
-
-### BibTeX
-
-#### Sentence Transformers
-```bibtex
-@inproceedings{reimers-2019-sentence-bert,
- title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks",
- author = "Reimers, Nils and Gurevych, Iryna",
- booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing",
- month = "11",
- year = "2019",
- publisher = "Association for Computational Linguistics",
- url = "https://arxiv.org/abs/1908.10084",
-}
-```
-
-#### ContrastiveLoss
-```bibtex
-@inproceedings{hadsell2006dimensionality,
- author={Hadsell, R. and Chopra, S. and LeCun, Y.},
- booktitle={2006 IEEE Computer Society Conference on Computer Vision and Pattern Recognition (CVPR'06)},
- title={Dimensionality Reduction by Learning an Invariant Mapping},
- year={2006},
- volume={2},
- number={},
- pages={1735-1742},
- doi={10.1109/CVPR.2006.100}
-}
-```
-
-
-
-
-
-
\ No newline at end of file
diff --git a/checkpoint-1200/config.json b/checkpoint-1200/config.json
deleted file mode 100644
index 26e48501fdf44110239e00ad4d438aee8679504a..0000000000000000000000000000000000000000
--- a/checkpoint-1200/config.json
+++ /dev/null
@@ -1,25 +0,0 @@
-{
- "architectures": [
- "BertModel"
- ],
- "attention_probs_dropout_prob": 0.1,
- "classifier_dropout": null,
- "gradient_checkpointing": false,
- "hidden_act": "gelu",
- "hidden_dropout_prob": 0.1,
- "hidden_size": 384,
- "initializer_range": 0.02,
- "intermediate_size": 1536,
- "layer_norm_eps": 1e-12,
- "max_position_embeddings": 512,
- "model_type": "bert",
- "num_attention_heads": 12,
- "num_hidden_layers": 12,
- "pad_token_id": 0,
- "position_embedding_type": "absolute",
- "torch_dtype": "float32",
- "transformers_version": "4.51.3",
- "type_vocab_size": 2,
- "use_cache": true,
- "vocab_size": 250037
-}
diff --git a/checkpoint-1200/config_sentence_transformers.json b/checkpoint-1200/config_sentence_transformers.json
deleted file mode 100644
index dcf436801f55bd22a257de2aad7eef5cfd06efaa..0000000000000000000000000000000000000000
--- a/checkpoint-1200/config_sentence_transformers.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
- "__version__": {
- "sentence_transformers": "3.4.1",
- "transformers": "4.51.3",
- "pytorch": "2.7.0+cu126"
- },
- "prompts": {},
- "default_prompt_name": null,
- "similarity_fn_name": "cosine"
-}
\ No newline at end of file
diff --git a/checkpoint-1200/model.safetensors b/checkpoint-1200/model.safetensors
deleted file mode 100644
index 758df5d10ff7af9db5b1be5837fa1db20cc72d2f..0000000000000000000000000000000000000000
--- a/checkpoint-1200/model.safetensors
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:2a16798609ad3be64f1c33cafbc6d8595006225a97722265fbba67e2dfaf916a
-size 470637416
diff --git a/checkpoint-1200/modules.json b/checkpoint-1200/modules.json
deleted file mode 100644
index f7640f94e81bb7f4f04daf1668850b38763a13d9..0000000000000000000000000000000000000000
--- a/checkpoint-1200/modules.json
+++ /dev/null
@@ -1,14 +0,0 @@
-[
- {
- "idx": 0,
- "name": "0",
- "path": "",
- "type": "sentence_transformers.models.Transformer"
- },
- {
- "idx": 1,
- "name": "1",
- "path": "1_Pooling",
- "type": "sentence_transformers.models.Pooling"
- }
-]
\ No newline at end of file
diff --git a/checkpoint-1200/optimizer.pt b/checkpoint-1200/optimizer.pt
deleted file mode 100644
index 688a4c38248062de6965ef846f0ce9ad3a2b236b..0000000000000000000000000000000000000000
--- a/checkpoint-1200/optimizer.pt
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:6e6c0d2369b6fe2e14855dd6ee01f523f4a5901a968149aedc664f3defacc964
-size 1715019
diff --git a/checkpoint-1200/rng_state.pth b/checkpoint-1200/rng_state.pth
deleted file mode 100644
index f7883f1663e6f9392cc038e07a44a0505edba1b0..0000000000000000000000000000000000000000
--- a/checkpoint-1200/rng_state.pth
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:51a5e12d95d9820ac91d074df8188d98ce5f4fc76cb3ec8a63d860d96a200697
-size 14645
diff --git a/checkpoint-1200/scheduler.pt b/checkpoint-1200/scheduler.pt
deleted file mode 100644
index 71a8a43af78ecac02cddf7ac3bc51a2133d1a52d..0000000000000000000000000000000000000000
--- a/checkpoint-1200/scheduler.pt
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:efdbea4967733a900bbe36cb7fab0e417825ab1560e9b509550180a0f55ecc51
-size 1465
diff --git a/checkpoint-1200/sentence_bert_config.json b/checkpoint-1200/sentence_bert_config.json
deleted file mode 100644
index 5fd10429389515d3e5cccdeda08cae5fea1ae82e..0000000000000000000000000000000000000000
--- a/checkpoint-1200/sentence_bert_config.json
+++ /dev/null
@@ -1,4 +0,0 @@
-{
- "max_seq_length": 128,
- "do_lower_case": false
-}
\ No newline at end of file
diff --git a/checkpoint-1200/special_tokens_map.json b/checkpoint-1200/special_tokens_map.json
deleted file mode 100644
index b1879d702821e753ffe4245048eee415d54a9385..0000000000000000000000000000000000000000
--- a/checkpoint-1200/special_tokens_map.json
+++ /dev/null
@@ -1,51 +0,0 @@
-{
- "bos_token": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- },
- "cls_token": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- },
- "eos_token": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- },
- "mask_token": {
- "content": "",
- "lstrip": true,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- },
- "pad_token": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- },
- "sep_token": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- },
- "unk_token": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- }
-}
diff --git a/checkpoint-1200/tokenizer.json b/checkpoint-1200/tokenizer.json
deleted file mode 100644
index e3420945e193cc0791136cdc6e5cd69801c838af..0000000000000000000000000000000000000000
--- a/checkpoint-1200/tokenizer.json
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:cad551d5600a84242d0973327029452a1e3672ba6313c2a3c3d69c4310e12719
-size 17082987
diff --git a/checkpoint-1200/tokenizer_config.json b/checkpoint-1200/tokenizer_config.json
deleted file mode 100644
index facf4436a8f11c26085c16a14f4e576853927a9e..0000000000000000000000000000000000000000
--- a/checkpoint-1200/tokenizer_config.json
+++ /dev/null
@@ -1,65 +0,0 @@
-{
- "added_tokens_decoder": {
- "0": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false,
- "special": true
- },
- "1": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false,
- "special": true
- },
- "2": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false,
- "special": true
- },
- "3": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false,
- "special": true
- },
- "250001": {
- "content": "",
- "lstrip": true,
- "normalized": false,
- "rstrip": false,
- "single_word": false,
- "special": true
- }
- },
- "bos_token": "",
- "clean_up_tokenization_spaces": false,
- "cls_token": "",
- "do_lower_case": true,
- "eos_token": "",
- "extra_special_tokens": {},
- "mask_token": "",
- "max_length": 128,
- "model_max_length": 128,
- "pad_to_multiple_of": null,
- "pad_token": "",
- "pad_token_type_id": 0,
- "padding_side": "right",
- "sep_token": "",
- "stride": 0,
- "strip_accents": null,
- "tokenize_chinese_chars": true,
- "tokenizer_class": "BertTokenizer",
- "truncation_side": "right",
- "truncation_strategy": "longest_first",
- "unk_token": ""
-}
diff --git a/checkpoint-1200/trainer_state.json b/checkpoint-1200/trainer_state.json
deleted file mode 100644
index b35e26591e541dc6afd36c4085cc569cbc91bc99..0000000000000000000000000000000000000000
--- a/checkpoint-1200/trainer_state.json
+++ /dev/null
@@ -1,249 +0,0 @@
-{
- "best_global_step": 1200,
- "best_metric": 0.0017435119953006506,
- "best_model_checkpoint": "data/fine-tuned-sbert-sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2-original-adafactor/checkpoint-1200",
- "epoch": 2.255279211637729,
- "eval_steps": 100,
- "global_step": 1200,
- "is_hyper_param_search": false,
- "is_local_process_zero": true,
- "is_world_process_zero": true,
- "log_history": [
- {
- "epoch": 0.18770530267480057,
- "eval_loss": 0.012530049309134483,
- "eval_runtime": 812.6802,
- "eval_samples_per_second": 3277.151,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.8778235859541618,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7128396034240723,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.8848748516159781,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.812583495899967,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.6880456209182739,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.7185793630359445,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.7900823930955021,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.8364038065429271,
- "eval_steps_per_second": 3.278,
- "step": 100
- },
- {
- "epoch": 0.37541060534960113,
- "eval_loss": 0.009013425558805466,
- "eval_runtime": 792.9843,
- "eval_samples_per_second": 3358.548,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9164113424048541,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7378441095352173,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9368603114664952,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.8729798695775446,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7272344827651978,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.8103205315460159,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.8605654745268148,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.8857576838544123,
- "eval_steps_per_second": 3.359,
- "step": 200
- },
- {
- "epoch": 0.5631159080244017,
- "eval_loss": 0.006819029338657856,
- "eval_runtime": 809.9704,
- "eval_samples_per_second": 3288.115,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9398298338890391,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7449667453765869,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9629957356284182,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9088032597499417,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7449667453765869,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.864029341509194,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.8990159430733201,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9188060251084542,
- "eval_steps_per_second": 3.289,
- "step": 300
- },
- {
- "epoch": 0.7508212106992023,
- "eval_loss": 0.005150709766894579,
- "eval_runtime": 797.9199,
- "eval_samples_per_second": 3337.773,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9560016220600163,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7553268671035767,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9774059659768239,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9333702119012406,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7449506521224976,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.9005457325671423,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.916037892637527,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9513710688929036,
- "eval_steps_per_second": 3.339,
- "step": 400
- },
- {
- "epoch": 0.9385265133740028,
- "grad_norm": 0.17396493256092072,
- "learning_rate": 2.9428198433420364e-05,
- "loss": 0.0409,
- "step": 500
- },
- {
- "epoch": 0.9385265133740028,
- "eval_loss": 0.003973629325628281,
- "eval_runtime": 809.4532,
- "eval_samples_per_second": 3290.216,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9655950557207654,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7622435092926025,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9845099503823473,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9477742208778024,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7535413503646851,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.9221773981286795,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.9367750202319935,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9590347859107281,
- "eval_steps_per_second": 3.291,
- "step": 500
- },
- {
- "epoch": 1.1276396058188645,
- "eval_loss": 0.0032712339889258146,
- "eval_runtime": 793.7573,
- "eval_samples_per_second": 3355.277,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9712722657775374,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7610360383987427,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9887055977101925,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9564087809158087,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7610177993774414,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.9350876149915242,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.9471753898932449,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9658239646502422,
- "eval_steps_per_second": 3.356,
- "step": 600
- },
- {
- "epoch": 1.3153449084936648,
- "eval_loss": 0.0028166945558041334,
- "eval_runtime": 815.1943,
- "eval_samples_per_second": 3267.044,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9751246583160614,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7577522993087769,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9911117019106511,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9621558129059113,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7424367666244507,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.943665667488554,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.9536134909690983,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9708525597505264,
- "eval_steps_per_second": 3.268,
- "step": 700
- },
- {
- "epoch": 1.5030502111684654,
- "eval_loss": 0.0026242006570100784,
- "eval_runtime": 805.7115,
- "eval_samples_per_second": 3305.496,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9782673995974888,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7254683971405029,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9927214598054878,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9669240257663667,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7145971059799194,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.9507846488068235,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.9597660102710608,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9741896137072368,
- "eval_steps_per_second": 3.306,
- "step": 800
- },
- {
- "epoch": 1.690755513843266,
- "eval_loss": 0.002248650649562478,
- "eval_runtime": 818.5338,
- "eval_samples_per_second": 3253.715,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9801973506353069,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7349117994308472,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9938133122786723,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9698356230196407,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7348856329917908,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.9551340483533577,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.9641228578901284,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9756164919507957,
- "eval_steps_per_second": 3.255,
- "step": 900
- },
- {
- "epoch": 1.8784608165180665,
- "grad_norm": 0.07541557401418686,
- "learning_rate": 2.5511749347258486e-05,
- "loss": 0.0131,
- "step": 1000
- },
- {
- "epoch": 1.8784608165180665,
- "eval_loss": 0.002240537665784359,
- "eval_runtime": 803.6286,
- "eval_samples_per_second": 3314.063,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9817931272716349,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7197962999343872,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9944127523785896,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9722373310278887,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7091608047485352,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.9587183163648803,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.9675121928984912,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9770088489465266,
- "eval_steps_per_second": 3.315,
- "step": 1000
- },
- {
- "epoch": 2.0675739089629284,
- "eval_loss": 0.0018734760815277696,
- "eval_runtime": 807.0812,
- "eval_samples_per_second": 3299.886,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9828594815415578,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7552986741065979,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9950493119597241,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.973889221813201,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7401974201202393,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.9611601510291333,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.9661201195760486,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9817842882294052,
- "eval_steps_per_second": 3.301,
- "step": 1100
- },
- {
- "epoch": 2.255279211637729,
- "eval_loss": 0.0017435119953006506,
- "eval_runtime": 802.6162,
- "eval_samples_per_second": 3318.244,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9843050674356433,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.742120623588562,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9955554741842152,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9760932477723254,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.742120623588562,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.964449493634366,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.9703216856372878,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9819338803033267,
- "eval_steps_per_second": 3.319,
- "step": 1200
- }
- ],
- "logging_steps": 500,
- "max_steps": 4256,
- "num_input_tokens_seen": 0,
- "num_train_epochs": 8,
- "save_steps": 100,
- "stateful_callbacks": {
- "EarlyStoppingCallback": {
- "args": {
- "early_stopping_patience": 1,
- "early_stopping_threshold": 0.0
- },
- "attributes": {
- "early_stopping_patience_counter": 0
- }
- },
- "TrainerControl": {
- "args": {
- "should_epoch_stop": false,
- "should_evaluate": false,
- "should_log": false,
- "should_save": true,
- "should_training_stop": false
- },
- "attributes": {}
- }
- },
- "total_flos": 0.0,
- "train_batch_size": 1000,
- "trial_name": null,
- "trial_params": null
-}
diff --git a/checkpoint-1200/training_args.bin b/checkpoint-1200/training_args.bin
deleted file mode 100644
index f6aba0195c0abacac26202ea40cbb6012662a9ff..0000000000000000000000000000000000000000
--- a/checkpoint-1200/training_args.bin
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:9339753774865faea550d7da93688221ca0f43171c16e3034645a2149992c8a6
-size 6033
diff --git a/checkpoint-1200/unigram.json b/checkpoint-1200/unigram.json
deleted file mode 100644
index 2faa9ec874108d53a017ff2c7ab98d155fb21a82..0000000000000000000000000000000000000000
--- a/checkpoint-1200/unigram.json
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:da145b5e7700ae40f16691ec32a0b1fdc1ee3298db22a31ea55f57a966c4a65d
-size 14763260
diff --git a/checkpoint-1300/1_Pooling/config.json b/checkpoint-1300/1_Pooling/config.json
deleted file mode 100644
index a97f8d140b6aee43dfac9fc4521b2842657c5608..0000000000000000000000000000000000000000
--- a/checkpoint-1300/1_Pooling/config.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
- "word_embedding_dimension": 384,
- "pooling_mode_cls_token": false,
- "pooling_mode_mean_tokens": true,
- "pooling_mode_max_tokens": false,
- "pooling_mode_mean_sqrt_len_tokens": false,
- "pooling_mode_weightedmean_tokens": false,
- "pooling_mode_lasttoken": false,
- "include_prompt": true
-}
\ No newline at end of file
diff --git a/checkpoint-1300/README.md b/checkpoint-1300/README.md
deleted file mode 100644
index 5fd2a478fc5e1e56ffeef38e9c5b0835aa37ad72..0000000000000000000000000000000000000000
--- a/checkpoint-1300/README.md
+++ /dev/null
@@ -1,469 +0,0 @@
----
-language:
-- en
-license: apache-2.0
-tags:
-- sentence-transformers
-- sentence-similarity
-- feature-extraction
-- generated_from_trainer
-- dataset_size:2130621
-- loss:ContrastiveLoss
-base_model: sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2
-widget:
-- source_sentence: Kim Chol-sam
- sentences:
- - Stankevich Sergey Nikolayevich
- - Kim Chin-So’k
- - Julen Lopetegui Agote
-- source_sentence: دينا بنت عبد الحميد
- sentences:
- - Alexia van Amsberg
- - Anthony Nicholas Colin Maitland Biddulph, 5th Baron Biddulph
- - Dina bint Abdul-Hamíd
-- source_sentence: Մուհամեդ բեն Նաիֆ Ալ Սաուդ
- sentences:
- - Karpov Anatoly Evgenyevich
- - GNPower Mariveles Coal Plant [former]
- - Muhammed bin Nayef bin Abdul Aziz Al Saud
-- source_sentence: Edward Gnehm
- sentences:
- - Шауэрте, Хартмут
- - Ханзада Филипп, Эдинбург герцогі
- - AFX
-- source_sentence: Schori i Lidingö
- sentences:
- - Yordan Canev
- - ကားပေါ့ အန်နာတိုလီ
- - BYSTROV, Mikhail Ivanovich
-pipeline_tag: sentence-similarity
-library_name: sentence-transformers
-metrics:
-- cosine_accuracy
-- cosine_accuracy_threshold
-- cosine_f1
-- cosine_f1_threshold
-- cosine_precision
-- cosine_recall
-- cosine_ap
-- cosine_mcc
-model-index:
-- name: sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2-name-matcher-original
- results:
- - task:
- type: binary-classification
- name: Binary Classification
- dataset:
- name: sentence transformers paraphrase multilingual MiniLM L12 v2
- type: sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2
- metrics:
- - type: cosine_accuracy
- value: 0.9853564026313418
- name: Cosine Accuracy
- - type: cosine_accuracy_threshold
- value: 0.6976222991943359
- name: Cosine Accuracy Threshold
- - type: cosine_f1
- value: 0.9776227541137591
- name: Cosine F1
- - type: cosine_f1_threshold
- value: 0.6851664781570435
- name: Cosine F1 Threshold
- - type: cosine_precision
- value: 0.9732136748238192
- name: Cosine Precision
- - type: cosine_recall
- value: 0.9820719652946388
- name: Cosine Recall
- - type: cosine_ap
- value: 0.9958172202316342
- name: Cosine Ap
- - type: cosine_mcc
- value: 0.9667334329094465
- name: Cosine Mcc
----
-
-# sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2-name-matcher-original
-
-This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2](https://huggingface.co/sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2). It maps sentences & paragraphs to a 384-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more.
-
-## Model Details
-
-### Model Description
-- **Model Type:** Sentence Transformer
-- **Base model:** [sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2](https://huggingface.co/sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2)
-- **Maximum Sequence Length:** 128 tokens
-- **Output Dimensionality:** 384 dimensions
-- **Similarity Function:** Cosine Similarity
-
-- **Language:** en
-- **License:** apache-2.0
-
-### Model Sources
-
-- **Documentation:** [Sentence Transformers Documentation](https://sbert.net)
-- **Repository:** [Sentence Transformers on GitHub](https://github.com/UKPLab/sentence-transformers)
-- **Hugging Face:** [Sentence Transformers on Hugging Face](https://huggingface.co/models?library=sentence-transformers)
-
-### Full Model Architecture
-
-```
-SentenceTransformer(
- (0): Transformer({'max_seq_length': 128, 'do_lower_case': False}) with Transformer model: BertModel
- (1): Pooling({'word_embedding_dimension': 384, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
-)
-```
-
-## Usage
-
-### Direct Usage (Sentence Transformers)
-
-First install the Sentence Transformers library:
-
-```bash
-pip install -U sentence-transformers
-```
-
-Then you can load this model and run inference.
-```python
-from sentence_transformers import SentenceTransformer
-
-# Download from the 🤗 Hub
-model = SentenceTransformer("sentence_transformers_model_id")
-# Run inference
-sentences = [
- 'Schori i Lidingö',
- 'Yordan Canev',
- 'ကားပေါ့ အန်နာတိုလီ',
-]
-embeddings = model.encode(sentences)
-print(embeddings.shape)
-# [3, 384]
-
-# Get the similarity scores for the embeddings
-similarities = model.similarity(embeddings, embeddings)
-print(similarities.shape)
-# [3, 3]
-```
-
-
-
-
-
-
-
-## Evaluation
-
-### Metrics
-
-#### Binary Classification
-
-* Dataset: `sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2`
-* Evaluated with [BinaryClassificationEvaluator
](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.BinaryClassificationEvaluator)
-
-| Metric | Value |
-|:--------------------------|:-----------|
-| cosine_accuracy | 0.9854 |
-| cosine_accuracy_threshold | 0.6976 |
-| cosine_f1 | 0.9776 |
-| cosine_f1_threshold | 0.6852 |
-| cosine_precision | 0.9732 |
-| cosine_recall | 0.9821 |
-| **cosine_ap** | **0.9958** |
-| cosine_mcc | 0.9667 |
-
-
-
-
-
-## Training Details
-
-### Training Dataset
-
-#### Unnamed Dataset
-
-* Size: 2,130,621 training samples
-* Columns: sentence1
, sentence2
, and label
-* Approximate statistics based on the first 1000 samples:
- | | sentence1 | sentence2 | label |
- |:--------|:---------------------------------------------------------------------------------|:---------------------------------------------------------------------------------|:---------------------------------------------------------------|
- | type | string | string | float |
- | details | - min: 3 tokens
- mean: 9.32 tokens
- max: 57 tokens
| - min: 3 tokens
- mean: 9.16 tokens
- max: 54 tokens
| - min: 0.0
- mean: 0.34
- max: 1.0
|
-* Samples:
- | sentence1 | sentence2 | label |
- |:----------------------------------|:------------------------------------|:-----------------|
- | 캐스린 설리번
| Kathryn D. Sullivanová
| 1.0
|
- | ଶିବରାଜ ଅଧାଲରାଓ ପାଟିଲ
| Aleksander Lubocki
| 0.0
|
- | Пырванов, Георги
| アナトーリー・セルジュコフ
| 0.0
|
-* Loss: [ContrastiveLoss
](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#contrastiveloss) with these parameters:
- ```json
- {
- "distance_metric": "SiameseDistanceMetric.COSINE_DISTANCE",
- "margin": 0.5,
- "size_average": true
- }
- ```
-
-### Evaluation Dataset
-
-#### Unnamed Dataset
-
-* Size: 2,663,276 evaluation samples
-* Columns: sentence1
, sentence2
, and label
-* Approximate statistics based on the first 1000 samples:
- | | sentence1 | sentence2 | label |
- |:--------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|:---------------------------------------------------------------|
- | type | string | string | float |
- | details | - min: 3 tokens
- mean: 9.34 tokens
- max: 102 tokens
| - min: 4 tokens
- mean: 9.11 tokens
- max: 100 tokens
| - min: 0.0
- mean: 0.33
- max: 1.0
|
-* Samples:
- | sentence1 | sentence2 | label |
- |:--------------------------------------|:---------------------------------------|:-----------------|
- | Ева Херман
| I Xuan Karlos
| 0.0
|
- | Кличков Андрій Євгенович
| Андрэй Яўгенавіч Клычкоў
| 1.0
|
- | Кинах А.
| Senator John Hickenlooper
| 0.0
|
-* Loss: [ContrastiveLoss
](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#contrastiveloss) with these parameters:
- ```json
- {
- "distance_metric": "SiameseDistanceMetric.COSINE_DISTANCE",
- "margin": 0.5,
- "size_average": true
- }
- ```
-
-### Training Hyperparameters
-#### Non-Default Hyperparameters
-
-- `eval_strategy`: steps
-- `per_device_train_batch_size`: 1000
-- `per_device_eval_batch_size`: 1000
-- `gradient_accumulation_steps`: 4
-- `learning_rate`: 3e-05
-- `weight_decay`: 0.01
-- `num_train_epochs`: 8
-- `warmup_ratio`: 0.1
-- `fp16_opt_level`: O0
-- `load_best_model_at_end`: True
-- `optim`: adafactor
-
-#### All Hyperparameters
-Click to expand
-
-- `overwrite_output_dir`: False
-- `do_predict`: False
-- `eval_strategy`: steps
-- `prediction_loss_only`: True
-- `per_device_train_batch_size`: 1000
-- `per_device_eval_batch_size`: 1000
-- `per_gpu_train_batch_size`: None
-- `per_gpu_eval_batch_size`: None
-- `gradient_accumulation_steps`: 4
-- `eval_accumulation_steps`: None
-- `torch_empty_cache_steps`: None
-- `learning_rate`: 3e-05
-- `weight_decay`: 0.01
-- `adam_beta1`: 0.9
-- `adam_beta2`: 0.999
-- `adam_epsilon`: 1e-08
-- `max_grad_norm`: 1.0
-- `num_train_epochs`: 8
-- `max_steps`: -1
-- `lr_scheduler_type`: linear
-- `lr_scheduler_kwargs`: {}
-- `warmup_ratio`: 0.1
-- `warmup_steps`: 0
-- `log_level`: passive
-- `log_level_replica`: warning
-- `log_on_each_node`: True
-- `logging_nan_inf_filter`: True
-- `save_safetensors`: True
-- `save_on_each_node`: False
-- `save_only_model`: False
-- `restore_callback_states_from_checkpoint`: False
-- `no_cuda`: False
-- `use_cpu`: False
-- `use_mps_device`: False
-- `seed`: 42
-- `data_seed`: None
-- `jit_mode_eval`: False
-- `use_ipex`: False
-- `bf16`: False
-- `fp16`: False
-- `fp16_opt_level`: O0
-- `half_precision_backend`: auto
-- `bf16_full_eval`: False
-- `fp16_full_eval`: False
-- `tf32`: None
-- `local_rank`: 0
-- `ddp_backend`: None
-- `tpu_num_cores`: None
-- `tpu_metrics_debug`: False
-- `debug`: []
-- `dataloader_drop_last`: False
-- `dataloader_num_workers`: 0
-- `dataloader_prefetch_factor`: None
-- `past_index`: -1
-- `disable_tqdm`: False
-- `remove_unused_columns`: True
-- `label_names`: None
-- `load_best_model_at_end`: True
-- `ignore_data_skip`: False
-- `fsdp`: []
-- `fsdp_min_num_params`: 0
-- `fsdp_config`: {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}
-- `tp_size`: 0
-- `fsdp_transformer_layer_cls_to_wrap`: None
-- `accelerator_config`: {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True, 'non_blocking': False, 'gradient_accumulation_kwargs': None}
-- `deepspeed`: None
-- `label_smoothing_factor`: 0.0
-- `optim`: adafactor
-- `optim_args`: None
-- `adafactor`: False
-- `group_by_length`: False
-- `length_column_name`: length
-- `ddp_find_unused_parameters`: None
-- `ddp_bucket_cap_mb`: None
-- `ddp_broadcast_buffers`: False
-- `dataloader_pin_memory`: True
-- `dataloader_persistent_workers`: False
-- `skip_memory_metrics`: True
-- `use_legacy_prediction_loop`: False
-- `push_to_hub`: False
-- `resume_from_checkpoint`: None
-- `hub_model_id`: None
-- `hub_strategy`: every_save
-- `hub_private_repo`: None
-- `hub_always_push`: False
-- `gradient_checkpointing`: False
-- `gradient_checkpointing_kwargs`: None
-- `include_inputs_for_metrics`: False
-- `include_for_metrics`: []
-- `eval_do_concat_batches`: True
-- `fp16_backend`: auto
-- `push_to_hub_model_id`: None
-- `push_to_hub_organization`: None
-- `mp_parameters`:
-- `auto_find_batch_size`: False
-- `full_determinism`: False
-- `torchdynamo`: None
-- `ray_scope`: last
-- `ddp_timeout`: 1800
-- `torch_compile`: False
-- `torch_compile_backend`: None
-- `torch_compile_mode`: None
-- `include_tokens_per_second`: False
-- `include_num_input_tokens_seen`: False
-- `neftune_noise_alpha`: None
-- `optim_target_modules`: None
-- `batch_eval_metrics`: False
-- `eval_on_start`: False
-- `use_liger_kernel`: False
-- `eval_use_gather_object`: False
-- `average_tokens_across_devices`: False
-- `prompts`: None
-- `batch_sampler`: batch_sampler
-- `multi_dataset_batch_sampler`: proportional
-
-
-
-### Training Logs
-| Epoch | Step | Training Loss | Validation Loss | sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap |
-|:------:|:----:|:-------------:|:---------------:|:---------------------------------------------------------------------:|
-| -1 | -1 | - | - | 0.7140 |
-| 0.1877 | 100 | - | 0.0125 | 0.8849 |
-| 0.3754 | 200 | - | 0.0090 | 0.9369 |
-| 0.5631 | 300 | - | 0.0068 | 0.9630 |
-| 0.7508 | 400 | - | 0.0052 | 0.9774 |
-| 0.9385 | 500 | 0.0409 | 0.0040 | 0.9845 |
-| 1.1276 | 600 | - | 0.0033 | 0.9887 |
-| 1.3153 | 700 | - | 0.0028 | 0.9911 |
-| 1.5031 | 800 | - | 0.0026 | 0.9927 |
-| 1.6908 | 900 | - | 0.0022 | 0.9938 |
-| 1.8785 | 1000 | 0.0131 | 0.0022 | 0.9944 |
-| 2.0676 | 1100 | - | 0.0019 | 0.9950 |
-| 2.2553 | 1200 | - | 0.0017 | 0.9956 |
-| 2.4430 | 1300 | - | 0.0019 | 0.9958 |
-
-
-### Framework Versions
-- Python: 3.12.9
-- Sentence Transformers: 3.4.1
-- Transformers: 4.51.3
-- PyTorch: 2.7.0+cu126
-- Accelerate: 1.6.0
-- Datasets: 3.6.0
-- Tokenizers: 0.21.1
-
-## Citation
-
-### BibTeX
-
-#### Sentence Transformers
-```bibtex
-@inproceedings{reimers-2019-sentence-bert,
- title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks",
- author = "Reimers, Nils and Gurevych, Iryna",
- booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing",
- month = "11",
- year = "2019",
- publisher = "Association for Computational Linguistics",
- url = "https://arxiv.org/abs/1908.10084",
-}
-```
-
-#### ContrastiveLoss
-```bibtex
-@inproceedings{hadsell2006dimensionality,
- author={Hadsell, R. and Chopra, S. and LeCun, Y.},
- booktitle={2006 IEEE Computer Society Conference on Computer Vision and Pattern Recognition (CVPR'06)},
- title={Dimensionality Reduction by Learning an Invariant Mapping},
- year={2006},
- volume={2},
- number={},
- pages={1735-1742},
- doi={10.1109/CVPR.2006.100}
-}
-```
-
-
-
-
-
-
\ No newline at end of file
diff --git a/checkpoint-1300/config.json b/checkpoint-1300/config.json
deleted file mode 100644
index 26e48501fdf44110239e00ad4d438aee8679504a..0000000000000000000000000000000000000000
--- a/checkpoint-1300/config.json
+++ /dev/null
@@ -1,25 +0,0 @@
-{
- "architectures": [
- "BertModel"
- ],
- "attention_probs_dropout_prob": 0.1,
- "classifier_dropout": null,
- "gradient_checkpointing": false,
- "hidden_act": "gelu",
- "hidden_dropout_prob": 0.1,
- "hidden_size": 384,
- "initializer_range": 0.02,
- "intermediate_size": 1536,
- "layer_norm_eps": 1e-12,
- "max_position_embeddings": 512,
- "model_type": "bert",
- "num_attention_heads": 12,
- "num_hidden_layers": 12,
- "pad_token_id": 0,
- "position_embedding_type": "absolute",
- "torch_dtype": "float32",
- "transformers_version": "4.51.3",
- "type_vocab_size": 2,
- "use_cache": true,
- "vocab_size": 250037
-}
diff --git a/checkpoint-1300/config_sentence_transformers.json b/checkpoint-1300/config_sentence_transformers.json
deleted file mode 100644
index dcf436801f55bd22a257de2aad7eef5cfd06efaa..0000000000000000000000000000000000000000
--- a/checkpoint-1300/config_sentence_transformers.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
- "__version__": {
- "sentence_transformers": "3.4.1",
- "transformers": "4.51.3",
- "pytorch": "2.7.0+cu126"
- },
- "prompts": {},
- "default_prompt_name": null,
- "similarity_fn_name": "cosine"
-}
\ No newline at end of file
diff --git a/checkpoint-1300/model.safetensors b/checkpoint-1300/model.safetensors
deleted file mode 100644
index a8aa559c334c3fc66e03a9ddd27372987d07c981..0000000000000000000000000000000000000000
--- a/checkpoint-1300/model.safetensors
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:bea639f895e7708a55ae3e3cfe23138278e63e6378e5bf609d6db6d629b9b438
-size 470637416
diff --git a/checkpoint-1300/modules.json b/checkpoint-1300/modules.json
deleted file mode 100644
index f7640f94e81bb7f4f04daf1668850b38763a13d9..0000000000000000000000000000000000000000
--- a/checkpoint-1300/modules.json
+++ /dev/null
@@ -1,14 +0,0 @@
-[
- {
- "idx": 0,
- "name": "0",
- "path": "",
- "type": "sentence_transformers.models.Transformer"
- },
- {
- "idx": 1,
- "name": "1",
- "path": "1_Pooling",
- "type": "sentence_transformers.models.Pooling"
- }
-]
\ No newline at end of file
diff --git a/checkpoint-1300/optimizer.pt b/checkpoint-1300/optimizer.pt
deleted file mode 100644
index 84fa920bf7210db539d9da0c26ab38415d0baa31..0000000000000000000000000000000000000000
--- a/checkpoint-1300/optimizer.pt
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:d1aa28c8815cf88ba694f02b5e15468bbaea3ce5e26506bd5c5ecd2c992ce7ce
-size 1715019
diff --git a/checkpoint-1300/rng_state.pth b/checkpoint-1300/rng_state.pth
deleted file mode 100644
index 312caed491c1e3e115c4dce10fd326d5b3636499..0000000000000000000000000000000000000000
--- a/checkpoint-1300/rng_state.pth
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:05feb30fd5220f89a53d409e9778994ba1c22d885cba182f441469ca0d0b6f67
-size 14645
diff --git a/checkpoint-1300/scheduler.pt b/checkpoint-1300/scheduler.pt
deleted file mode 100644
index 799bc40dae8447a987d82cf80ca6b9dba41b96b1..0000000000000000000000000000000000000000
--- a/checkpoint-1300/scheduler.pt
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:79d567b69e284ba581e3f8e7237823c230a46309045fc28a621b856fd794bfd8
-size 1465
diff --git a/checkpoint-1300/sentence_bert_config.json b/checkpoint-1300/sentence_bert_config.json
deleted file mode 100644
index 5fd10429389515d3e5cccdeda08cae5fea1ae82e..0000000000000000000000000000000000000000
--- a/checkpoint-1300/sentence_bert_config.json
+++ /dev/null
@@ -1,4 +0,0 @@
-{
- "max_seq_length": 128,
- "do_lower_case": false
-}
\ No newline at end of file
diff --git a/checkpoint-1300/special_tokens_map.json b/checkpoint-1300/special_tokens_map.json
deleted file mode 100644
index b1879d702821e753ffe4245048eee415d54a9385..0000000000000000000000000000000000000000
--- a/checkpoint-1300/special_tokens_map.json
+++ /dev/null
@@ -1,51 +0,0 @@
-{
- "bos_token": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- },
- "cls_token": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- },
- "eos_token": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- },
- "mask_token": {
- "content": "",
- "lstrip": true,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- },
- "pad_token": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- },
- "sep_token": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- },
- "unk_token": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- }
-}
diff --git a/checkpoint-1300/tokenizer.json b/checkpoint-1300/tokenizer.json
deleted file mode 100644
index e3420945e193cc0791136cdc6e5cd69801c838af..0000000000000000000000000000000000000000
--- a/checkpoint-1300/tokenizer.json
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:cad551d5600a84242d0973327029452a1e3672ba6313c2a3c3d69c4310e12719
-size 17082987
diff --git a/checkpoint-1300/tokenizer_config.json b/checkpoint-1300/tokenizer_config.json
deleted file mode 100644
index facf4436a8f11c26085c16a14f4e576853927a9e..0000000000000000000000000000000000000000
--- a/checkpoint-1300/tokenizer_config.json
+++ /dev/null
@@ -1,65 +0,0 @@
-{
- "added_tokens_decoder": {
- "0": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false,
- "special": true
- },
- "1": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false,
- "special": true
- },
- "2": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false,
- "special": true
- },
- "3": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false,
- "special": true
- },
- "250001": {
- "content": "",
- "lstrip": true,
- "normalized": false,
- "rstrip": false,
- "single_word": false,
- "special": true
- }
- },
- "bos_token": "",
- "clean_up_tokenization_spaces": false,
- "cls_token": "",
- "do_lower_case": true,
- "eos_token": "",
- "extra_special_tokens": {},
- "mask_token": "",
- "max_length": 128,
- "model_max_length": 128,
- "pad_to_multiple_of": null,
- "pad_token": "",
- "pad_token_type_id": 0,
- "padding_side": "right",
- "sep_token": "",
- "stride": 0,
- "strip_accents": null,
- "tokenize_chinese_chars": true,
- "tokenizer_class": "BertTokenizer",
- "truncation_side": "right",
- "truncation_strategy": "longest_first",
- "unk_token": ""
-}
diff --git a/checkpoint-1300/trainer_state.json b/checkpoint-1300/trainer_state.json
deleted file mode 100644
index ce6499d979c81a036c1c5a39c2575fc32c9f615d..0000000000000000000000000000000000000000
--- a/checkpoint-1300/trainer_state.json
+++ /dev/null
@@ -1,265 +0,0 @@
-{
- "best_global_step": 1200,
- "best_metric": 0.0017435119953006506,
- "best_model_checkpoint": "data/fine-tuned-sbert-sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2-original-adafactor/checkpoint-1200",
- "epoch": 2.4429845143125295,
- "eval_steps": 100,
- "global_step": 1300,
- "is_hyper_param_search": false,
- "is_local_process_zero": true,
- "is_world_process_zero": true,
- "log_history": [
- {
- "epoch": 0.18770530267480057,
- "eval_loss": 0.012530049309134483,
- "eval_runtime": 812.6802,
- "eval_samples_per_second": 3277.151,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.8778235859541618,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7128396034240723,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.8848748516159781,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.812583495899967,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.6880456209182739,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.7185793630359445,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.7900823930955021,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.8364038065429271,
- "eval_steps_per_second": 3.278,
- "step": 100
- },
- {
- "epoch": 0.37541060534960113,
- "eval_loss": 0.009013425558805466,
- "eval_runtime": 792.9843,
- "eval_samples_per_second": 3358.548,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9164113424048541,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7378441095352173,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9368603114664952,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.8729798695775446,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7272344827651978,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.8103205315460159,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.8605654745268148,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.8857576838544123,
- "eval_steps_per_second": 3.359,
- "step": 200
- },
- {
- "epoch": 0.5631159080244017,
- "eval_loss": 0.006819029338657856,
- "eval_runtime": 809.9704,
- "eval_samples_per_second": 3288.115,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9398298338890391,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7449667453765869,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9629957356284182,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9088032597499417,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7449667453765869,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.864029341509194,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.8990159430733201,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9188060251084542,
- "eval_steps_per_second": 3.289,
- "step": 300
- },
- {
- "epoch": 0.7508212106992023,
- "eval_loss": 0.005150709766894579,
- "eval_runtime": 797.9199,
- "eval_samples_per_second": 3337.773,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9560016220600163,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7553268671035767,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9774059659768239,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9333702119012406,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7449506521224976,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.9005457325671423,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.916037892637527,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9513710688929036,
- "eval_steps_per_second": 3.339,
- "step": 400
- },
- {
- "epoch": 0.9385265133740028,
- "grad_norm": 0.17396493256092072,
- "learning_rate": 2.9428198433420364e-05,
- "loss": 0.0409,
- "step": 500
- },
- {
- "epoch": 0.9385265133740028,
- "eval_loss": 0.003973629325628281,
- "eval_runtime": 809.4532,
- "eval_samples_per_second": 3290.216,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9655950557207654,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7622435092926025,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9845099503823473,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9477742208778024,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7535413503646851,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.9221773981286795,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.9367750202319935,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9590347859107281,
- "eval_steps_per_second": 3.291,
- "step": 500
- },
- {
- "epoch": 1.1276396058188645,
- "eval_loss": 0.0032712339889258146,
- "eval_runtime": 793.7573,
- "eval_samples_per_second": 3355.277,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9712722657775374,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7610360383987427,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9887055977101925,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9564087809158087,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7610177993774414,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.9350876149915242,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.9471753898932449,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9658239646502422,
- "eval_steps_per_second": 3.356,
- "step": 600
- },
- {
- "epoch": 1.3153449084936648,
- "eval_loss": 0.0028166945558041334,
- "eval_runtime": 815.1943,
- "eval_samples_per_second": 3267.044,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9751246583160614,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7577522993087769,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9911117019106511,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9621558129059113,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7424367666244507,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.943665667488554,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.9536134909690983,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9708525597505264,
- "eval_steps_per_second": 3.268,
- "step": 700
- },
- {
- "epoch": 1.5030502111684654,
- "eval_loss": 0.0026242006570100784,
- "eval_runtime": 805.7115,
- "eval_samples_per_second": 3305.496,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9782673995974888,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7254683971405029,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9927214598054878,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9669240257663667,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7145971059799194,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.9507846488068235,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.9597660102710608,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9741896137072368,
- "eval_steps_per_second": 3.306,
- "step": 800
- },
- {
- "epoch": 1.690755513843266,
- "eval_loss": 0.002248650649562478,
- "eval_runtime": 818.5338,
- "eval_samples_per_second": 3253.715,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9801973506353069,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7349117994308472,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9938133122786723,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9698356230196407,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7348856329917908,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.9551340483533577,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.9641228578901284,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9756164919507957,
- "eval_steps_per_second": 3.255,
- "step": 900
- },
- {
- "epoch": 1.8784608165180665,
- "grad_norm": 0.07541557401418686,
- "learning_rate": 2.5511749347258486e-05,
- "loss": 0.0131,
- "step": 1000
- },
- {
- "epoch": 1.8784608165180665,
- "eval_loss": 0.002240537665784359,
- "eval_runtime": 803.6286,
- "eval_samples_per_second": 3314.063,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9817931272716349,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7197962999343872,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9944127523785896,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9722373310278887,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7091608047485352,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.9587183163648803,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.9675121928984912,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9770088489465266,
- "eval_steps_per_second": 3.315,
- "step": 1000
- },
- {
- "epoch": 2.0675739089629284,
- "eval_loss": 0.0018734760815277696,
- "eval_runtime": 807.0812,
- "eval_samples_per_second": 3299.886,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9828594815415578,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7552986741065979,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9950493119597241,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.973889221813201,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7401974201202393,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.9611601510291333,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.9661201195760486,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9817842882294052,
- "eval_steps_per_second": 3.301,
- "step": 1100
- },
- {
- "epoch": 2.255279211637729,
- "eval_loss": 0.0017435119953006506,
- "eval_runtime": 802.6162,
- "eval_samples_per_second": 3318.244,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9843050674356433,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.742120623588562,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9955554741842152,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9760932477723254,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.742120623588562,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.964449493634366,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.9703216856372878,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9819338803033267,
- "eval_steps_per_second": 3.319,
- "step": 1200
- },
- {
- "epoch": 2.4429845143125295,
- "eval_loss": 0.001928201992996037,
- "eval_runtime": 818.5215,
- "eval_samples_per_second": 3253.764,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9853564026313418,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.6976222991943359,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9958172202316342,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9776227541137591,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.6851664781570435,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.9667334329094465,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.9732136748238192,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9820719652946388,
- "eval_steps_per_second": 3.255,
- "step": 1300
- }
- ],
- "logging_steps": 500,
- "max_steps": 4256,
- "num_input_tokens_seen": 0,
- "num_train_epochs": 8,
- "save_steps": 100,
- "stateful_callbacks": {
- "EarlyStoppingCallback": {
- "args": {
- "early_stopping_patience": 1,
- "early_stopping_threshold": 0.0
- },
- "attributes": {
- "early_stopping_patience_counter": 1
- }
- },
- "TrainerControl": {
- "args": {
- "should_epoch_stop": false,
- "should_evaluate": false,
- "should_log": false,
- "should_save": true,
- "should_training_stop": true
- },
- "attributes": {}
- }
- },
- "total_flos": 0.0,
- "train_batch_size": 1000,
- "trial_name": null,
- "trial_params": null
-}
diff --git a/checkpoint-1300/training_args.bin b/checkpoint-1300/training_args.bin
deleted file mode 100644
index f6aba0195c0abacac26202ea40cbb6012662a9ff..0000000000000000000000000000000000000000
--- a/checkpoint-1300/training_args.bin
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:9339753774865faea550d7da93688221ca0f43171c16e3034645a2149992c8a6
-size 6033
diff --git a/checkpoint-1300/unigram.json b/checkpoint-1300/unigram.json
deleted file mode 100644
index 2faa9ec874108d53a017ff2c7ab98d155fb21a82..0000000000000000000000000000000000000000
--- a/checkpoint-1300/unigram.json
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:da145b5e7700ae40f16691ec32a0b1fdc1ee3298db22a31ea55f57a966c4a65d
-size 14763260
diff --git a/checkpoint-900/1_Pooling/config.json b/checkpoint-900/1_Pooling/config.json
deleted file mode 100644
index a97f8d140b6aee43dfac9fc4521b2842657c5608..0000000000000000000000000000000000000000
--- a/checkpoint-900/1_Pooling/config.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
- "word_embedding_dimension": 384,
- "pooling_mode_cls_token": false,
- "pooling_mode_mean_tokens": true,
- "pooling_mode_max_tokens": false,
- "pooling_mode_mean_sqrt_len_tokens": false,
- "pooling_mode_weightedmean_tokens": false,
- "pooling_mode_lasttoken": false,
- "include_prompt": true
-}
\ No newline at end of file
diff --git a/checkpoint-900/README.md b/checkpoint-900/README.md
deleted file mode 100644
index 683350c1ce91442d3fa01de820e3af30b94b218f..0000000000000000000000000000000000000000
--- a/checkpoint-900/README.md
+++ /dev/null
@@ -1,465 +0,0 @@
----
-language:
-- en
-license: apache-2.0
-tags:
-- sentence-transformers
-- sentence-similarity
-- feature-extraction
-- generated_from_trainer
-- dataset_size:2130621
-- loss:ContrastiveLoss
-base_model: sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2
-widget:
-- source_sentence: Kim Chol-sam
- sentences:
- - Stankevich Sergey Nikolayevich
- - Kim Chin-So’k
- - Julen Lopetegui Agote
-- source_sentence: دينا بنت عبد الحميد
- sentences:
- - Alexia van Amsberg
- - Anthony Nicholas Colin Maitland Biddulph, 5th Baron Biddulph
- - Dina bint Abdul-Hamíd
-- source_sentence: Մուհամեդ բեն Նաիֆ Ալ Սաուդ
- sentences:
- - Karpov Anatoly Evgenyevich
- - GNPower Mariveles Coal Plant [former]
- - Muhammed bin Nayef bin Abdul Aziz Al Saud
-- source_sentence: Edward Gnehm
- sentences:
- - Шауэрте, Хартмут
- - Ханзада Филипп, Эдинбург герцогі
- - AFX
-- source_sentence: Schori i Lidingö
- sentences:
- - Yordan Canev
- - ကားပေါ့ အန်နာတိုလီ
- - BYSTROV, Mikhail Ivanovich
-pipeline_tag: sentence-similarity
-library_name: sentence-transformers
-metrics:
-- cosine_accuracy
-- cosine_accuracy_threshold
-- cosine_f1
-- cosine_f1_threshold
-- cosine_precision
-- cosine_recall
-- cosine_ap
-- cosine_mcc
-model-index:
-- name: sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2-name-matcher-original
- results:
- - task:
- type: binary-classification
- name: Binary Classification
- dataset:
- name: sentence transformers paraphrase multilingual MiniLM L12 v2
- type: sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2
- metrics:
- - type: cosine_accuracy
- value: 0.9801973506353069
- name: Cosine Accuracy
- - type: cosine_accuracy_threshold
- value: 0.7349117994308472
- name: Cosine Accuracy Threshold
- - type: cosine_f1
- value: 0.9698356230196407
- name: Cosine F1
- - type: cosine_f1_threshold
- value: 0.7348856329917908
- name: Cosine F1 Threshold
- - type: cosine_precision
- value: 0.9641228578901284
- name: Cosine Precision
- - type: cosine_recall
- value: 0.9756164919507957
- name: Cosine Recall
- - type: cosine_ap
- value: 0.9938133122786723
- name: Cosine Ap
- - type: cosine_mcc
- value: 0.9551340483533577
- name: Cosine Mcc
----
-
-# sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2-name-matcher-original
-
-This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2](https://huggingface.co/sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2). It maps sentences & paragraphs to a 384-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more.
-
-## Model Details
-
-### Model Description
-- **Model Type:** Sentence Transformer
-- **Base model:** [sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2](https://huggingface.co/sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2)
-- **Maximum Sequence Length:** 128 tokens
-- **Output Dimensionality:** 384 dimensions
-- **Similarity Function:** Cosine Similarity
-
-- **Language:** en
-- **License:** apache-2.0
-
-### Model Sources
-
-- **Documentation:** [Sentence Transformers Documentation](https://sbert.net)
-- **Repository:** [Sentence Transformers on GitHub](https://github.com/UKPLab/sentence-transformers)
-- **Hugging Face:** [Sentence Transformers on Hugging Face](https://huggingface.co/models?library=sentence-transformers)
-
-### Full Model Architecture
-
-```
-SentenceTransformer(
- (0): Transformer({'max_seq_length': 128, 'do_lower_case': False}) with Transformer model: BertModel
- (1): Pooling({'word_embedding_dimension': 384, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
-)
-```
-
-## Usage
-
-### Direct Usage (Sentence Transformers)
-
-First install the Sentence Transformers library:
-
-```bash
-pip install -U sentence-transformers
-```
-
-Then you can load this model and run inference.
-```python
-from sentence_transformers import SentenceTransformer
-
-# Download from the 🤗 Hub
-model = SentenceTransformer("sentence_transformers_model_id")
-# Run inference
-sentences = [
- 'Schori i Lidingö',
- 'Yordan Canev',
- 'ကားပေါ့ အန်နာတိုလီ',
-]
-embeddings = model.encode(sentences)
-print(embeddings.shape)
-# [3, 384]
-
-# Get the similarity scores for the embeddings
-similarities = model.similarity(embeddings, embeddings)
-print(similarities.shape)
-# [3, 3]
-```
-
-
-
-
-
-
-
-## Evaluation
-
-### Metrics
-
-#### Binary Classification
-
-* Dataset: `sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2`
-* Evaluated with [BinaryClassificationEvaluator
](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.BinaryClassificationEvaluator)
-
-| Metric | Value |
-|:--------------------------|:-----------|
-| cosine_accuracy | 0.9802 |
-| cosine_accuracy_threshold | 0.7349 |
-| cosine_f1 | 0.9698 |
-| cosine_f1_threshold | 0.7349 |
-| cosine_precision | 0.9641 |
-| cosine_recall | 0.9756 |
-| **cosine_ap** | **0.9938** |
-| cosine_mcc | 0.9551 |
-
-
-
-
-
-## Training Details
-
-### Training Dataset
-
-#### Unnamed Dataset
-
-* Size: 2,130,621 training samples
-* Columns: sentence1
, sentence2
, and label
-* Approximate statistics based on the first 1000 samples:
- | | sentence1 | sentence2 | label |
- |:--------|:---------------------------------------------------------------------------------|:---------------------------------------------------------------------------------|:---------------------------------------------------------------|
- | type | string | string | float |
- | details | - min: 3 tokens
- mean: 9.32 tokens
- max: 57 tokens
| - min: 3 tokens
- mean: 9.16 tokens
- max: 54 tokens
| - min: 0.0
- mean: 0.34
- max: 1.0
|
-* Samples:
- | sentence1 | sentence2 | label |
- |:----------------------------------|:------------------------------------|:-----------------|
- | 캐스린 설리번
| Kathryn D. Sullivanová
| 1.0
|
- | ଶିବରାଜ ଅଧାଲରାଓ ପାଟିଲ
| Aleksander Lubocki
| 0.0
|
- | Пырванов, Георги
| アナトーリー・セルジュコフ
| 0.0
|
-* Loss: [ContrastiveLoss
](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#contrastiveloss) with these parameters:
- ```json
- {
- "distance_metric": "SiameseDistanceMetric.COSINE_DISTANCE",
- "margin": 0.5,
- "size_average": true
- }
- ```
-
-### Evaluation Dataset
-
-#### Unnamed Dataset
-
-* Size: 2,663,276 evaluation samples
-* Columns: sentence1
, sentence2
, and label
-* Approximate statistics based on the first 1000 samples:
- | | sentence1 | sentence2 | label |
- |:--------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|:---------------------------------------------------------------|
- | type | string | string | float |
- | details | - min: 3 tokens
- mean: 9.34 tokens
- max: 102 tokens
| - min: 4 tokens
- mean: 9.11 tokens
- max: 100 tokens
| - min: 0.0
- mean: 0.33
- max: 1.0
|
-* Samples:
- | sentence1 | sentence2 | label |
- |:--------------------------------------|:---------------------------------------|:-----------------|
- | Ева Херман
| I Xuan Karlos
| 0.0
|
- | Кличков Андрій Євгенович
| Андрэй Яўгенавіч Клычкоў
| 1.0
|
- | Кинах А.
| Senator John Hickenlooper
| 0.0
|
-* Loss: [ContrastiveLoss
](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#contrastiveloss) with these parameters:
- ```json
- {
- "distance_metric": "SiameseDistanceMetric.COSINE_DISTANCE",
- "margin": 0.5,
- "size_average": true
- }
- ```
-
-### Training Hyperparameters
-#### Non-Default Hyperparameters
-
-- `eval_strategy`: steps
-- `per_device_train_batch_size`: 1000
-- `per_device_eval_batch_size`: 1000
-- `gradient_accumulation_steps`: 4
-- `learning_rate`: 3e-05
-- `weight_decay`: 0.01
-- `num_train_epochs`: 8
-- `warmup_ratio`: 0.1
-- `fp16_opt_level`: O0
-- `load_best_model_at_end`: True
-- `optim`: adafactor
-
-#### All Hyperparameters
-Click to expand
-
-- `overwrite_output_dir`: False
-- `do_predict`: False
-- `eval_strategy`: steps
-- `prediction_loss_only`: True
-- `per_device_train_batch_size`: 1000
-- `per_device_eval_batch_size`: 1000
-- `per_gpu_train_batch_size`: None
-- `per_gpu_eval_batch_size`: None
-- `gradient_accumulation_steps`: 4
-- `eval_accumulation_steps`: None
-- `torch_empty_cache_steps`: None
-- `learning_rate`: 3e-05
-- `weight_decay`: 0.01
-- `adam_beta1`: 0.9
-- `adam_beta2`: 0.999
-- `adam_epsilon`: 1e-08
-- `max_grad_norm`: 1.0
-- `num_train_epochs`: 8
-- `max_steps`: -1
-- `lr_scheduler_type`: linear
-- `lr_scheduler_kwargs`: {}
-- `warmup_ratio`: 0.1
-- `warmup_steps`: 0
-- `log_level`: passive
-- `log_level_replica`: warning
-- `log_on_each_node`: True
-- `logging_nan_inf_filter`: True
-- `save_safetensors`: True
-- `save_on_each_node`: False
-- `save_only_model`: False
-- `restore_callback_states_from_checkpoint`: False
-- `no_cuda`: False
-- `use_cpu`: False
-- `use_mps_device`: False
-- `seed`: 42
-- `data_seed`: None
-- `jit_mode_eval`: False
-- `use_ipex`: False
-- `bf16`: False
-- `fp16`: False
-- `fp16_opt_level`: O0
-- `half_precision_backend`: auto
-- `bf16_full_eval`: False
-- `fp16_full_eval`: False
-- `tf32`: None
-- `local_rank`: 0
-- `ddp_backend`: None
-- `tpu_num_cores`: None
-- `tpu_metrics_debug`: False
-- `debug`: []
-- `dataloader_drop_last`: False
-- `dataloader_num_workers`: 0
-- `dataloader_prefetch_factor`: None
-- `past_index`: -1
-- `disable_tqdm`: False
-- `remove_unused_columns`: True
-- `label_names`: None
-- `load_best_model_at_end`: True
-- `ignore_data_skip`: False
-- `fsdp`: []
-- `fsdp_min_num_params`: 0
-- `fsdp_config`: {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}
-- `tp_size`: 0
-- `fsdp_transformer_layer_cls_to_wrap`: None
-- `accelerator_config`: {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True, 'non_blocking': False, 'gradient_accumulation_kwargs': None}
-- `deepspeed`: None
-- `label_smoothing_factor`: 0.0
-- `optim`: adafactor
-- `optim_args`: None
-- `adafactor`: False
-- `group_by_length`: False
-- `length_column_name`: length
-- `ddp_find_unused_parameters`: None
-- `ddp_bucket_cap_mb`: None
-- `ddp_broadcast_buffers`: False
-- `dataloader_pin_memory`: True
-- `dataloader_persistent_workers`: False
-- `skip_memory_metrics`: True
-- `use_legacy_prediction_loop`: False
-- `push_to_hub`: False
-- `resume_from_checkpoint`: None
-- `hub_model_id`: None
-- `hub_strategy`: every_save
-- `hub_private_repo`: None
-- `hub_always_push`: False
-- `gradient_checkpointing`: False
-- `gradient_checkpointing_kwargs`: None
-- `include_inputs_for_metrics`: False
-- `include_for_metrics`: []
-- `eval_do_concat_batches`: True
-- `fp16_backend`: auto
-- `push_to_hub_model_id`: None
-- `push_to_hub_organization`: None
-- `mp_parameters`:
-- `auto_find_batch_size`: False
-- `full_determinism`: False
-- `torchdynamo`: None
-- `ray_scope`: last
-- `ddp_timeout`: 1800
-- `torch_compile`: False
-- `torch_compile_backend`: None
-- `torch_compile_mode`: None
-- `include_tokens_per_second`: False
-- `include_num_input_tokens_seen`: False
-- `neftune_noise_alpha`: None
-- `optim_target_modules`: None
-- `batch_eval_metrics`: False
-- `eval_on_start`: False
-- `use_liger_kernel`: False
-- `eval_use_gather_object`: False
-- `average_tokens_across_devices`: False
-- `prompts`: None
-- `batch_sampler`: batch_sampler
-- `multi_dataset_batch_sampler`: proportional
-
-
-
-### Training Logs
-| Epoch | Step | Training Loss | Validation Loss | sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap |
-|:------:|:----:|:-------------:|:---------------:|:---------------------------------------------------------------------:|
-| -1 | -1 | - | - | 0.7140 |
-| 0.1877 | 100 | - | 0.0125 | 0.8849 |
-| 0.3754 | 200 | - | 0.0090 | 0.9369 |
-| 0.5631 | 300 | - | 0.0068 | 0.9630 |
-| 0.7508 | 400 | - | 0.0052 | 0.9774 |
-| 0.9385 | 500 | 0.0409 | 0.0040 | 0.9845 |
-| 1.1276 | 600 | - | 0.0033 | 0.9887 |
-| 1.3153 | 700 | - | 0.0028 | 0.9911 |
-| 1.5031 | 800 | - | 0.0026 | 0.9927 |
-| 1.6908 | 900 | - | 0.0022 | 0.9938 |
-
-
-### Framework Versions
-- Python: 3.12.9
-- Sentence Transformers: 3.4.1
-- Transformers: 4.51.3
-- PyTorch: 2.7.0+cu126
-- Accelerate: 1.6.0
-- Datasets: 3.6.0
-- Tokenizers: 0.21.1
-
-## Citation
-
-### BibTeX
-
-#### Sentence Transformers
-```bibtex
-@inproceedings{reimers-2019-sentence-bert,
- title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks",
- author = "Reimers, Nils and Gurevych, Iryna",
- booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing",
- month = "11",
- year = "2019",
- publisher = "Association for Computational Linguistics",
- url = "https://arxiv.org/abs/1908.10084",
-}
-```
-
-#### ContrastiveLoss
-```bibtex
-@inproceedings{hadsell2006dimensionality,
- author={Hadsell, R. and Chopra, S. and LeCun, Y.},
- booktitle={2006 IEEE Computer Society Conference on Computer Vision and Pattern Recognition (CVPR'06)},
- title={Dimensionality Reduction by Learning an Invariant Mapping},
- year={2006},
- volume={2},
- number={},
- pages={1735-1742},
- doi={10.1109/CVPR.2006.100}
-}
-```
-
-
-
-
-
-
\ No newline at end of file
diff --git a/checkpoint-900/config.json b/checkpoint-900/config.json
deleted file mode 100644
index 26e48501fdf44110239e00ad4d438aee8679504a..0000000000000000000000000000000000000000
--- a/checkpoint-900/config.json
+++ /dev/null
@@ -1,25 +0,0 @@
-{
- "architectures": [
- "BertModel"
- ],
- "attention_probs_dropout_prob": 0.1,
- "classifier_dropout": null,
- "gradient_checkpointing": false,
- "hidden_act": "gelu",
- "hidden_dropout_prob": 0.1,
- "hidden_size": 384,
- "initializer_range": 0.02,
- "intermediate_size": 1536,
- "layer_norm_eps": 1e-12,
- "max_position_embeddings": 512,
- "model_type": "bert",
- "num_attention_heads": 12,
- "num_hidden_layers": 12,
- "pad_token_id": 0,
- "position_embedding_type": "absolute",
- "torch_dtype": "float32",
- "transformers_version": "4.51.3",
- "type_vocab_size": 2,
- "use_cache": true,
- "vocab_size": 250037
-}
diff --git a/checkpoint-900/config_sentence_transformers.json b/checkpoint-900/config_sentence_transformers.json
deleted file mode 100644
index dcf436801f55bd22a257de2aad7eef5cfd06efaa..0000000000000000000000000000000000000000
--- a/checkpoint-900/config_sentence_transformers.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
- "__version__": {
- "sentence_transformers": "3.4.1",
- "transformers": "4.51.3",
- "pytorch": "2.7.0+cu126"
- },
- "prompts": {},
- "default_prompt_name": null,
- "similarity_fn_name": "cosine"
-}
\ No newline at end of file
diff --git a/checkpoint-900/model.safetensors b/checkpoint-900/model.safetensors
deleted file mode 100644
index 795e6aaa670415820ca709514edbe530bdea1aaa..0000000000000000000000000000000000000000
--- a/checkpoint-900/model.safetensors
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:7bd22f29179cb59eb49ab1e22762c421371c4a0057f02dd94030fc6b968320a5
-size 470637416
diff --git a/checkpoint-900/modules.json b/checkpoint-900/modules.json
deleted file mode 100644
index f7640f94e81bb7f4f04daf1668850b38763a13d9..0000000000000000000000000000000000000000
--- a/checkpoint-900/modules.json
+++ /dev/null
@@ -1,14 +0,0 @@
-[
- {
- "idx": 0,
- "name": "0",
- "path": "",
- "type": "sentence_transformers.models.Transformer"
- },
- {
- "idx": 1,
- "name": "1",
- "path": "1_Pooling",
- "type": "sentence_transformers.models.Pooling"
- }
-]
\ No newline at end of file
diff --git a/checkpoint-900/optimizer.pt b/checkpoint-900/optimizer.pt
deleted file mode 100644
index 1977f7f965df8dcac656820a8f691ac8ff03ec37..0000000000000000000000000000000000000000
--- a/checkpoint-900/optimizer.pt
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:c8991a774d54025a5dcc46dbfaa413ba269663f199753722d2a3b79cefa8b292
-size 1715019
diff --git a/checkpoint-900/rng_state.pth b/checkpoint-900/rng_state.pth
deleted file mode 100644
index c6a5ab15ecd4a2fef9af49c645654df65ebde454..0000000000000000000000000000000000000000
--- a/checkpoint-900/rng_state.pth
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:80b85d95a42799e9a94a2b86a10328a7da2fb159a0243740b6c87e8c3319eabf
-size 14645
diff --git a/checkpoint-900/scheduler.pt b/checkpoint-900/scheduler.pt
deleted file mode 100644
index 1efe206ea47391412c8839ac9f5454484a614dcb..0000000000000000000000000000000000000000
--- a/checkpoint-900/scheduler.pt
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:4c98de6545907d56f9ea5ea2c9583eab89319b7449506f9ce652f5af5699468d
-size 1465
diff --git a/checkpoint-900/sentence_bert_config.json b/checkpoint-900/sentence_bert_config.json
deleted file mode 100644
index 5fd10429389515d3e5cccdeda08cae5fea1ae82e..0000000000000000000000000000000000000000
--- a/checkpoint-900/sentence_bert_config.json
+++ /dev/null
@@ -1,4 +0,0 @@
-{
- "max_seq_length": 128,
- "do_lower_case": false
-}
\ No newline at end of file
diff --git a/checkpoint-900/special_tokens_map.json b/checkpoint-900/special_tokens_map.json
deleted file mode 100644
index b1879d702821e753ffe4245048eee415d54a9385..0000000000000000000000000000000000000000
--- a/checkpoint-900/special_tokens_map.json
+++ /dev/null
@@ -1,51 +0,0 @@
-{
- "bos_token": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- },
- "cls_token": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- },
- "eos_token": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- },
- "mask_token": {
- "content": "",
- "lstrip": true,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- },
- "pad_token": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- },
- "sep_token": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- },
- "unk_token": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- }
-}
diff --git a/checkpoint-900/tokenizer.json b/checkpoint-900/tokenizer.json
deleted file mode 100644
index e3420945e193cc0791136cdc6e5cd69801c838af..0000000000000000000000000000000000000000
--- a/checkpoint-900/tokenizer.json
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:cad551d5600a84242d0973327029452a1e3672ba6313c2a3c3d69c4310e12719
-size 17082987
diff --git a/checkpoint-900/tokenizer_config.json b/checkpoint-900/tokenizer_config.json
deleted file mode 100644
index facf4436a8f11c26085c16a14f4e576853927a9e..0000000000000000000000000000000000000000
--- a/checkpoint-900/tokenizer_config.json
+++ /dev/null
@@ -1,65 +0,0 @@
-{
- "added_tokens_decoder": {
- "0": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false,
- "special": true
- },
- "1": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false,
- "special": true
- },
- "2": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false,
- "special": true
- },
- "3": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false,
- "special": true
- },
- "250001": {
- "content": "",
- "lstrip": true,
- "normalized": false,
- "rstrip": false,
- "single_word": false,
- "special": true
- }
- },
- "bos_token": "",
- "clean_up_tokenization_spaces": false,
- "cls_token": "",
- "do_lower_case": true,
- "eos_token": "",
- "extra_special_tokens": {},
- "mask_token": "",
- "max_length": 128,
- "model_max_length": 128,
- "pad_to_multiple_of": null,
- "pad_token": "",
- "pad_token_type_id": 0,
- "padding_side": "right",
- "sep_token": "",
- "stride": 0,
- "strip_accents": null,
- "tokenize_chinese_chars": true,
- "tokenizer_class": "BertTokenizer",
- "truncation_side": "right",
- "truncation_strategy": "longest_first",
- "unk_token": ""
-}
diff --git a/checkpoint-900/trainer_state.json b/checkpoint-900/trainer_state.json
deleted file mode 100644
index c7cdd0b7791a686e5a68fa0cb88f11e29aff85b4..0000000000000000000000000000000000000000
--- a/checkpoint-900/trainer_state.json
+++ /dev/null
@@ -1,194 +0,0 @@
-{
- "best_global_step": 900,
- "best_metric": 0.002248650649562478,
- "best_model_checkpoint": "data/fine-tuned-sbert-sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2-original-adafactor/checkpoint-900",
- "epoch": 1.690755513843266,
- "eval_steps": 100,
- "global_step": 900,
- "is_hyper_param_search": false,
- "is_local_process_zero": true,
- "is_world_process_zero": true,
- "log_history": [
- {
- "epoch": 0.18770530267480057,
- "eval_loss": 0.012530049309134483,
- "eval_runtime": 812.6802,
- "eval_samples_per_second": 3277.151,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.8778235859541618,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7128396034240723,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.8848748516159781,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.812583495899967,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.6880456209182739,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.7185793630359445,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.7900823930955021,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.8364038065429271,
- "eval_steps_per_second": 3.278,
- "step": 100
- },
- {
- "epoch": 0.37541060534960113,
- "eval_loss": 0.009013425558805466,
- "eval_runtime": 792.9843,
- "eval_samples_per_second": 3358.548,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9164113424048541,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7378441095352173,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9368603114664952,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.8729798695775446,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7272344827651978,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.8103205315460159,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.8605654745268148,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.8857576838544123,
- "eval_steps_per_second": 3.359,
- "step": 200
- },
- {
- "epoch": 0.5631159080244017,
- "eval_loss": 0.006819029338657856,
- "eval_runtime": 809.9704,
- "eval_samples_per_second": 3288.115,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9398298338890391,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7449667453765869,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9629957356284182,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9088032597499417,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7449667453765869,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.864029341509194,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.8990159430733201,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9188060251084542,
- "eval_steps_per_second": 3.289,
- "step": 300
- },
- {
- "epoch": 0.7508212106992023,
- "eval_loss": 0.005150709766894579,
- "eval_runtime": 797.9199,
- "eval_samples_per_second": 3337.773,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9560016220600163,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7553268671035767,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9774059659768239,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9333702119012406,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7449506521224976,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.9005457325671423,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.916037892637527,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9513710688929036,
- "eval_steps_per_second": 3.339,
- "step": 400
- },
- {
- "epoch": 0.9385265133740028,
- "grad_norm": 0.17396493256092072,
- "learning_rate": 2.9428198433420364e-05,
- "loss": 0.0409,
- "step": 500
- },
- {
- "epoch": 0.9385265133740028,
- "eval_loss": 0.003973629325628281,
- "eval_runtime": 809.4532,
- "eval_samples_per_second": 3290.216,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9655950557207654,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7622435092926025,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9845099503823473,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9477742208778024,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7535413503646851,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.9221773981286795,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.9367750202319935,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9590347859107281,
- "eval_steps_per_second": 3.291,
- "step": 500
- },
- {
- "epoch": 1.1276396058188645,
- "eval_loss": 0.0032712339889258146,
- "eval_runtime": 793.7573,
- "eval_samples_per_second": 3355.277,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9712722657775374,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7610360383987427,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9887055977101925,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9564087809158087,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7610177993774414,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.9350876149915242,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.9471753898932449,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9658239646502422,
- "eval_steps_per_second": 3.356,
- "step": 600
- },
- {
- "epoch": 1.3153449084936648,
- "eval_loss": 0.0028166945558041334,
- "eval_runtime": 815.1943,
- "eval_samples_per_second": 3267.044,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9751246583160614,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7577522993087769,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9911117019106511,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9621558129059113,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7424367666244507,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.943665667488554,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.9536134909690983,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9708525597505264,
- "eval_steps_per_second": 3.268,
- "step": 700
- },
- {
- "epoch": 1.5030502111684654,
- "eval_loss": 0.0026242006570100784,
- "eval_runtime": 805.7115,
- "eval_samples_per_second": 3305.496,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9782673995974888,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7254683971405029,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9927214598054878,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9669240257663667,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7145971059799194,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.9507846488068235,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.9597660102710608,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9741896137072368,
- "eval_steps_per_second": 3.306,
- "step": 800
- },
- {
- "epoch": 1.690755513843266,
- "eval_loss": 0.002248650649562478,
- "eval_runtime": 818.5338,
- "eval_samples_per_second": 3253.715,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy": 0.9801973506353069,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_accuracy_threshold": 0.7349117994308472,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_ap": 0.9938133122786723,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1": 0.9698356230196407,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_f1_threshold": 0.7348856329917908,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_mcc": 0.9551340483533577,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_precision": 0.9641228578901284,
- "eval_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_cosine_recall": 0.9756164919507957,
- "eval_steps_per_second": 3.255,
- "step": 900
- }
- ],
- "logging_steps": 500,
- "max_steps": 4256,
- "num_input_tokens_seen": 0,
- "num_train_epochs": 8,
- "save_steps": 100,
- "stateful_callbacks": {
- "EarlyStoppingCallback": {
- "args": {
- "early_stopping_patience": 1,
- "early_stopping_threshold": 0.0
- },
- "attributes": {
- "early_stopping_patience_counter": 0
- }
- },
- "TrainerControl": {
- "args": {
- "should_epoch_stop": false,
- "should_evaluate": false,
- "should_log": false,
- "should_save": true,
- "should_training_stop": false
- },
- "attributes": {}
- }
- },
- "total_flos": 0.0,
- "train_batch_size": 1000,
- "trial_name": null,
- "trial_params": null
-}
diff --git a/checkpoint-900/training_args.bin b/checkpoint-900/training_args.bin
deleted file mode 100644
index f6aba0195c0abacac26202ea40cbb6012662a9ff..0000000000000000000000000000000000000000
--- a/checkpoint-900/training_args.bin
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:9339753774865faea550d7da93688221ca0f43171c16e3034645a2149992c8a6
-size 6033
diff --git a/checkpoint-900/unigram.json b/checkpoint-900/unigram.json
deleted file mode 100644
index 2faa9ec874108d53a017ff2c7ab98d155fb21a82..0000000000000000000000000000000000000000
--- a/checkpoint-900/unigram.json
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:da145b5e7700ae40f16691ec32a0b1fdc1ee3298db22a31ea55f57a966c4a65d
-size 14763260
diff --git a/config.json b/config.json
deleted file mode 100644
index 26e48501fdf44110239e00ad4d438aee8679504a..0000000000000000000000000000000000000000
--- a/config.json
+++ /dev/null
@@ -1,25 +0,0 @@
-{
- "architectures": [
- "BertModel"
- ],
- "attention_probs_dropout_prob": 0.1,
- "classifier_dropout": null,
- "gradient_checkpointing": false,
- "hidden_act": "gelu",
- "hidden_dropout_prob": 0.1,
- "hidden_size": 384,
- "initializer_range": 0.02,
- "intermediate_size": 1536,
- "layer_norm_eps": 1e-12,
- "max_position_embeddings": 512,
- "model_type": "bert",
- "num_attention_heads": 12,
- "num_hidden_layers": 12,
- "pad_token_id": 0,
- "position_embedding_type": "absolute",
- "torch_dtype": "float32",
- "transformers_version": "4.51.3",
- "type_vocab_size": 2,
- "use_cache": true,
- "vocab_size": 250037
-}
diff --git a/eval/binary_classification_evaluation_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_results.csv b/eval/binary_classification_evaluation_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_results.csv
deleted file mode 100644
index 6ea0a1b02bf5de42c5914dcff7b9a0505585b736..0000000000000000000000000000000000000000
--- a/eval/binary_classification_evaluation_sentence-transformers-paraphrase-multilingual-MiniLM-L12-v2_results.csv
+++ /dev/null
@@ -1,19 +0,0 @@
-epoch,steps,cosine_accuracy,cosine_accuracy_threshold,cosine_f1,cosine_precision,cosine_recall,cosine_f1_threshold,cosine_ap,cosine_mcc
-1.8598130841121496,100,0.9346675177411482,0.67952216,0.9003093846682708,0.8945690538540362,0.9061238611463499,0.6791203,0.9564959700255526,0.8517640890817079
-3.710280373831776,200,0.9564450118274321,0.69247603,0.9339482974604258,0.9223934315600045,0.9457963326029293,0.6891077,0.9756899897636082,0.9016239916822852
-5.5607476635514015,300,0.9622273119813765,0.7210211,0.9425234975790372,0.9312246735704638,0.9540998731403529,0.7118325,0.9799459083669457,0.9144248443043316
-5.897196261682243,318,0.9622273119813765,0.7210211,0.9425234975790372,0.9312246735704638,0.9540998731403529,0.7118325,0.9799459083669457,0.9144248443043316
-0.18770530267480057,100,0.8778235859541618,0.7128396,0.812583495899967,0.7900823930955021,0.8364038065429271,0.6880456,0.8848748516159781,0.7185793630359445
-0.37541060534960113,200,0.9164113424048541,0.7378441,0.8729798695775446,0.8605654745268148,0.8857576838544123,0.7272345,0.9368603114664952,0.8103205315460159
-0.5631159080244017,300,0.9398298338890391,0.74496675,0.9088032597499417,0.8990159430733201,0.9188060251084542,0.74496675,0.9629957356284182,0.864029341509194
-0.7508212106992023,400,0.9560016220600163,0.75532687,0.9333702119012406,0.916037892637527,0.9513710688929036,0.74495065,0.9774059659768239,0.9005457325671423
-0.9385265133740028,500,0.9655950557207654,0.7622435,0.9477742208778024,0.9367750202319935,0.9590347859107281,0.75354135,0.9845099503823473,0.9221773981286795
-1.1276396058188645,600,0.9712722657775374,0.76103604,0.9564087809158087,0.9471753898932449,0.9658239646502422,0.7610178,0.9887055977101925,0.9350876149915242
-1.3153449084936648,700,0.9751246583160614,0.7577523,0.9621558129059113,0.9536134909690983,0.9708525597505264,0.74243677,0.9911117019106511,0.943665667488554
-1.5030502111684654,800,0.9782673995974888,0.7254684,0.9669240257663667,0.9597660102710608,0.9741896137072368,0.7145971,0.9927214598054878,0.9507846488068235
-1.690755513843266,900,0.9801973506353069,0.7349118,0.9698356230196407,0.9641228578901284,0.9756164919507957,0.73488563,0.9938133122786723,0.9551340483533577
-1.8784608165180665,1000,0.9817931272716349,0.7197963,0.9722373310278887,0.9675121928984912,0.9770088489465266,0.7091608,0.9944127523785896,0.9587183163648803
-2.0675739089629284,1100,0.9828594815415578,0.7552987,0.973889221813201,0.9661201195760486,0.9817842882294052,0.7401974,0.9950493119597241,0.9611601510291333
-2.255279211637729,1200,0.9843050674356433,0.7421206,0.9760932477723254,0.9703216856372878,0.9819338803033267,0.7421206,0.9955554741842152,0.964449493634366
-2.4429845143125295,1300,0.9853564026313418,0.6976223,0.9776227541137591,0.9732136748238192,0.9820719652946388,0.6851665,0.9958172202316342,0.9667334329094465
-2.4429845143125295,1300,0.9843050674356433,0.7421206,0.9760932477723254,0.9703216856372878,0.9819338803033267,0.7421206,0.9955554741842152,0.964449493634366
diff --git a/model.safetensors b/model.safetensors
deleted file mode 100644
index 758df5d10ff7af9db5b1be5837fa1db20cc72d2f..0000000000000000000000000000000000000000
--- a/model.safetensors
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:2a16798609ad3be64f1c33cafbc6d8595006225a97722265fbba67e2dfaf916a
-size 470637416
diff --git a/modules.json b/modules.json
deleted file mode 100644
index f7640f94e81bb7f4f04daf1668850b38763a13d9..0000000000000000000000000000000000000000
--- a/modules.json
+++ /dev/null
@@ -1,14 +0,0 @@
-[
- {
- "idx": 0,
- "name": "0",
- "path": "",
- "type": "sentence_transformers.models.Transformer"
- },
- {
- "idx": 1,
- "name": "1",
- "path": "1_Pooling",
- "type": "sentence_transformers.models.Pooling"
- }
-]
\ No newline at end of file
diff --git a/sentence_bert_config.json b/sentence_bert_config.json
deleted file mode 100644
index 5fd10429389515d3e5cccdeda08cae5fea1ae82e..0000000000000000000000000000000000000000
--- a/sentence_bert_config.json
+++ /dev/null
@@ -1,4 +0,0 @@
-{
- "max_seq_length": 128,
- "do_lower_case": false
-}
\ No newline at end of file
diff --git a/special_tokens_map.json b/special_tokens_map.json
deleted file mode 100644
index b1879d702821e753ffe4245048eee415d54a9385..0000000000000000000000000000000000000000
--- a/special_tokens_map.json
+++ /dev/null
@@ -1,51 +0,0 @@
-{
- "bos_token": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- },
- "cls_token": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- },
- "eos_token": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- },
- "mask_token": {
- "content": "",
- "lstrip": true,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- },
- "pad_token": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- },
- "sep_token": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- },
- "unk_token": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false
- }
-}
diff --git a/tokenizer.json b/tokenizer.json
deleted file mode 100644
index e3420945e193cc0791136cdc6e5cd69801c838af..0000000000000000000000000000000000000000
--- a/tokenizer.json
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:cad551d5600a84242d0973327029452a1e3672ba6313c2a3c3d69c4310e12719
-size 17082987
diff --git a/tokenizer_config.json b/tokenizer_config.json
deleted file mode 100644
index facf4436a8f11c26085c16a14f4e576853927a9e..0000000000000000000000000000000000000000
--- a/tokenizer_config.json
+++ /dev/null
@@ -1,65 +0,0 @@
-{
- "added_tokens_decoder": {
- "0": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false,
- "special": true
- },
- "1": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false,
- "special": true
- },
- "2": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false,
- "special": true
- },
- "3": {
- "content": "",
- "lstrip": false,
- "normalized": false,
- "rstrip": false,
- "single_word": false,
- "special": true
- },
- "250001": {
- "content": "",
- "lstrip": true,
- "normalized": false,
- "rstrip": false,
- "single_word": false,
- "special": true
- }
- },
- "bos_token": "",
- "clean_up_tokenization_spaces": false,
- "cls_token": "",
- "do_lower_case": true,
- "eos_token": "",
- "extra_special_tokens": {},
- "mask_token": "",
- "max_length": 128,
- "model_max_length": 128,
- "pad_to_multiple_of": null,
- "pad_token": "",
- "pad_token_type_id": 0,
- "padding_side": "right",
- "sep_token": "",
- "stride": 0,
- "strip_accents": null,
- "tokenize_chinese_chars": true,
- "tokenizer_class": "BertTokenizer",
- "truncation_side": "right",
- "truncation_strategy": "longest_first",
- "unk_token": ""
-}
diff --git a/training_args.bin b/training_args.bin
deleted file mode 100644
index f6aba0195c0abacac26202ea40cbb6012662a9ff..0000000000000000000000000000000000000000
--- a/training_args.bin
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:9339753774865faea550d7da93688221ca0f43171c16e3034645a2149992c8a6
-size 6033
diff --git a/unigram.json b/unigram.json
deleted file mode 100644
index 2faa9ec874108d53a017ff2c7ab98d155fb21a82..0000000000000000000000000000000000000000
--- a/unigram.json
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:da145b5e7700ae40f16691ec32a0b1fdc1ee3298db22a31ea55f57a966c4a65d
-size 14763260