model_id
stringlengths
7
105
model_card
stringlengths
1
130k
model_labels
listlengths
2
80k
JCRios/ces-ViTModel
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # ces-ViTModel This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.1427 - Accuracy: 0.9624 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0002 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 4 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.06 | 3.85 | 500 | 0.1427 | 0.9624 | ### Framework versions - Transformers 4.30.2 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.13.3
[ "angular_leaf_spot", "bean_rust", "healthy" ]
Kotiks/swin-tiny-patch4-window7-224-finetuned-eurosat-kornia
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # swin-tiny-patch4-window7-224-finetuned-eurosat-kornia This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.5886 - Accuracy: 0.5909 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 64 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 5 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 3 | 0.6243 | 0.6818 | | No log | 2.0 | 6 | 0.5460 | 0.7273 | | No log | 3.0 | 9 | 0.5540 | 0.7273 | | 0.6502 | 4.0 | 12 | 0.5747 | 0.6818 | | 0.6502 | 5.0 | 15 | 0.5886 | 0.5909 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "clethrionomys", "sylvaemus" ]
Taekor/food_classifier
<!-- This model card has been generated automatically according to the information Keras had access to. You should probably proofread and complete it, then remove this comment. --> # Taekor/food_classifier This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Train Loss: 2.8017 - Validation Loss: 1.6806 - Train Accuracy: 0.797 - Epoch: 0 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - optimizer: {'name': 'AdamWeightDecay', 'learning_rate': {'module': 'keras.optimizers.schedules', 'class_name': 'PolynomialDecay', 'config': {'initial_learning_rate': 3e-05, 'decay_steps': 4000, 'end_learning_rate': 0.0, 'power': 1.0, 'cycle': False, 'name': None}, 'registered_name': None}, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-08, 'amsgrad': False, 'weight_decay_rate': 0.01} - training_precision: float32 ### Training results | Train Loss | Validation Loss | Train Accuracy | Epoch | |:----------:|:---------------:|:--------------:|:-----:| | 2.8017 | 1.6806 | 0.797 | 0 | ### Framework versions - Transformers 4.35.2 - TensorFlow 2.15.0 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "apple_pie", "baby_back_ribs", "bruschetta", "waffles", "caesar_salad", "cannoli", "caprese_salad", "carrot_cake", "ceviche", "cheesecake", "cheese_plate", "chicken_curry", "chicken_quesadilla", "baklava", "chicken_wings", "chocolate_cake", "chocolate_mousse", "churros", "clam_chowder", "club_sandwich", "crab_cakes", "creme_brulee", "croque_madame", "cup_cakes", "beef_carpaccio", "deviled_eggs", "donuts", "dumplings", "edamame", "eggs_benedict", "escargots", "falafel", "filet_mignon", "fish_and_chips", "foie_gras", "beef_tartare", "french_fries", "french_onion_soup", "french_toast", "fried_calamari", "fried_rice", "frozen_yogurt", "garlic_bread", "gnocchi", "greek_salad", "grilled_cheese_sandwich", "beet_salad", "grilled_salmon", "guacamole", "gyoza", "hamburger", "hot_and_sour_soup", "hot_dog", "huevos_rancheros", "hummus", "ice_cream", "lasagna", "beignets", "lobster_bisque", "lobster_roll_sandwich", "macaroni_and_cheese", "macarons", "miso_soup", "mussels", "nachos", "omelette", "onion_rings", "oysters", "bibimbap", "pad_thai", "paella", "pancakes", "panna_cotta", "peking_duck", "pho", "pizza", "pork_chop", "poutine", "prime_rib", "bread_pudding", "pulled_pork_sandwich", "ramen", "ravioli", "red_velvet_cake", "risotto", "samosa", "sashimi", "scallops", "seaweed_salad", "shrimp_and_grits", "breakfast_burrito", "spaghetti_bolognese", "spaghetti_carbonara", "spring_rolls", "steak", "strawberry_shortcake", "sushi", "tacos", "takoyaki", "tiramisu", "tuna_tartare" ]
Pa-satith/cloud_classifier
<!-- This model card has been generated automatically according to the information Keras had access to. You should probably proofread and complete it, then remove this comment. --> # Pa-satith/cloud_classifier This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Train Loss: 2.2214 - Validation Loss: 2.0617 - Train Accuracy: 0.368 - Epoch: 0 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - optimizer: {'name': 'AdamWeightDecay', 'learning_rate': {'module': 'keras.optimizers.schedules', 'class_name': 'PolynomialDecay', 'config': {'initial_learning_rate': 3e-05, 'decay_steps': 999, 'end_learning_rate': 0.0, 'power': 1.0, 'cycle': False, 'name': None}, 'registered_name': None}, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-08, 'amsgrad': False, 'weight_decay_rate': 0.01} - training_precision: float32 ### Training results | Train Loss | Validation Loss | Train Accuracy | Epoch | |:----------:|:---------------:|:--------------:|:-----:| | 2.2214 | 2.0617 | 0.368 | 0 | ### Framework versions - Transformers 4.35.2 - TensorFlow 2.15.0 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "altocumulus", "altostratus", "cirroculumulus", "cirrostratus", "cirrus", "cumulonimbus", "cumulus", "nimbostratus", "stratocumulus", "stratus" ]
Coelhomatias/vit-cxr4
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-cxr4 This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.3774 - Precision: 0.8587 - Recall: 0.9317 - F1: 0.8937 - Accuracy: 0.8924 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 3e-05 - train_batch_size: 96 - eval_batch_size: 64 - seed: 17 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 6 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Precision | Recall | F1 | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:---------:|:------:|:------:|:--------:| | 0.3151 | 0.31 | 100 | 0.3317 | 0.8152 | 0.9143 | 0.8619 | 0.8552 | | 0.319 | 0.63 | 200 | 0.3048 | 0.8670 | 0.8514 | 0.8591 | 0.8620 | | 0.2926 | 0.94 | 300 | 0.2867 | 0.8580 | 0.8662 | 0.8621 | 0.8631 | | 0.1884 | 1.25 | 400 | 0.2635 | 0.8468 | 0.9381 | 0.8901 | 0.8856 | | 0.234 | 1.57 | 500 | 0.2639 | 0.8232 | 0.9677 | 0.8896 | 0.8814 | | 0.2349 | 1.88 | 600 | 0.2478 | 0.8530 | 0.9328 | 0.8911 | 0.8874 | | 0.1476 | 2.19 | 700 | 0.2560 | 0.8584 | 0.9297 | 0.8926 | 0.8895 | | 0.1289 | 2.51 | 800 | 0.2698 | 0.8809 | 0.8916 | 0.8862 | 0.8869 | | 0.1579 | 2.82 | 900 | 0.2614 | 0.8879 | 0.8715 | 0.8796 | 0.8822 | | 0.0745 | 3.13 | 1000 | 0.2783 | 0.8854 | 0.8905 | 0.8880 | 0.8889 | | 0.0697 | 3.45 | 1100 | 0.2844 | 0.8893 | 0.8879 | 0.8886 | 0.8900 | | 0.0602 | 3.76 | 1200 | 0.3213 | 0.8797 | 0.8932 | 0.8864 | 0.8869 | | 0.0246 | 4.08 | 1300 | 0.3393 | 0.8753 | 0.9096 | 0.8921 | 0.8913 | | 0.0301 | 4.39 | 1400 | 0.3593 | 0.8644 | 0.9307 | 0.8964 | 0.8937 | | 0.0348 | 4.7 | 1500 | 0.3804 | 0.8653 | 0.9344 | 0.8986 | 0.8957 | | 0.011 | 5.02 | 1600 | 0.3897 | 0.8622 | 0.9365 | 0.8978 | 0.8947 | | 0.0077 | 5.33 | 1700 | 0.4088 | 0.8754 | 0.9180 | 0.8962 | 0.8950 | | 0.0064 | 5.64 | 1800 | 0.4281 | 0.8780 | 0.9170 | 0.8971 | 0.8960 | | 0.0031 | 5.96 | 1900 | 0.4289 | 0.8736 | 0.9207 | 0.8965 | 0.8950 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "negative", "positive" ]
tbass134/beans
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # beans This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.0558 - Accuracy: 0.9774 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0002 - train_batch_size: 16 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 4 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.0685 | 1.54 | 100 | 0.0671 | 0.9850 | | 0.0048 | 3.08 | 200 | 0.0558 | 0.9774 | ### Framework versions - Transformers 4.36.2 - Pytorch 2.1.2 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "angular_leaf_spot", "bean_rust", "healthy" ]
tbass134/notebooks
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # google/vit-base-patch16-224-in21k This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.0405 - Accuracy: 0.9774 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0002 - train_batch_size: 16 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 4 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.1266 | 1.54 | 100 | 0.1121 | 0.9624 | | 0.0167 | 3.08 | 200 | 0.0405 | 0.9774 | ### Framework versions - Transformers 4.36.2 - Pytorch 2.1.2 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "angular_leaf_spot", "bean_rust", "healthy" ]
Aruno/gemini-beauty
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # gemini-beauty This model is a fine-tuned version of [](https://huggingface.co/) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.1226 - Accuracy: 0.5158 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 8 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 1.3724 | 1.0 | 148 | 1.2028 | 0.4586 | | 1.3217 | 2.0 | 296 | 1.1831 | 0.4812 | | 1.2649 | 3.0 | 444 | 1.1674 | 0.4981 | | 1.2456 | 4.0 | 592 | 1.1236 | 0.5146 | | 1.2176 | 5.0 | 740 | 1.1384 | 0.5040 | | 1.2069 | 6.0 | 888 | 1.1165 | 0.5207 | | 1.2083 | 7.0 | 1036 | 1.1663 | 0.4985 | | 1.1663 | 8.0 | 1184 | 1.1226 | 0.5158 | ### Framework versions - Transformers 4.36.2 - Pytorch 2.1.2+cu121 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "attractive", "normal", "ugly", "very_attractive", "very_ugly" ]
deniseb/vit-base-patch16-224-finetuned-flower
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-base-patch16-224-finetuned-flower This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on the imagefolder dataset. ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 5 ### Training results ### Framework versions - Transformers 4.24.0 - Pytorch 2.1.0+cu121 - Datasets 2.7.1 - Tokenizers 0.13.3
[ "daisy", "dandelion", "roses", "sunflowers", "tulips" ]
pittawat/vit-base-uppercase-english-characters
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-base-uppercase-english-characters This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the pittawat/uppercase-english-characters dataset. It achieves the following results on the evaluation set: - Loss: 0.3160 - Accuracy: 0.9573 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0002 - train_batch_size: 32 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 4 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.5944 | 1.35 | 100 | 0.5538 | 0.9487 | | 0.2241 | 2.7 | 200 | 0.3160 | 0.9573 | ### Framework versions - Transformers 4.26.1 - Pytorch 1.13.0 - Datasets 2.1.0 - Tokenizers 0.13.2
[ "a", "b", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "c", "u", "v", "w", "x", "y", "z", "d", "e", "f", "g", "h", "i", "j" ]
augrit/food_classifier
<!-- This model card has been generated automatically according to the information Keras had access to. You should probably proofread and complete it, then remove this comment. --> # augrit/food_classifier This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Train Loss: 2.3004 - Validation Loss: 1.3738 - Train Accuracy: 0.843 - Epoch: 0 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - optimizer: {'name': 'AdamWeightDecay', 'learning_rate': {'module': 'keras.optimizers.schedules', 'class_name': 'PolynomialDecay', 'config': {'initial_learning_rate': 3e-05, 'decay_steps': 20000, 'end_learning_rate': 0.0, 'power': 1.0, 'cycle': False, 'name': None}, 'registered_name': None}, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-08, 'amsgrad': False, 'weight_decay_rate': 0.01} - training_precision: float32 ### Training results | Train Loss | Validation Loss | Train Accuracy | Epoch | |:----------:|:---------------:|:--------------:|:-----:| | 2.3004 | 1.3738 | 0.843 | 0 | ### Framework versions - Transformers 4.35.2 - TensorFlow 2.15.0 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "apple_pie", "baby_back_ribs", "bruschetta", "waffles", "caesar_salad", "cannoli", "caprese_salad", "carrot_cake", "ceviche", "cheesecake", "cheese_plate", "chicken_curry", "chicken_quesadilla", "baklava", "chicken_wings", "chocolate_cake", "chocolate_mousse", "churros", "clam_chowder", "club_sandwich", "crab_cakes", "creme_brulee", "croque_madame", "cup_cakes", "beef_carpaccio", "deviled_eggs", "donuts", "dumplings", "edamame", "eggs_benedict", "escargots", "falafel", "filet_mignon", "fish_and_chips", "foie_gras", "beef_tartare", "french_fries", "french_onion_soup", "french_toast", "fried_calamari", "fried_rice", "frozen_yogurt", "garlic_bread", "gnocchi", "greek_salad", "grilled_cheese_sandwich", "beet_salad", "grilled_salmon", "guacamole", "gyoza", "hamburger", "hot_and_sour_soup", "hot_dog", "huevos_rancheros", "hummus", "ice_cream", "lasagna", "beignets", "lobster_bisque", "lobster_roll_sandwich", "macaroni_and_cheese", "macarons", "miso_soup", "mussels", "nachos", "omelette", "onion_rings", "oysters", "bibimbap", "pad_thai", "paella", "pancakes", "panna_cotta", "peking_duck", "pho", "pizza", "pork_chop", "poutine", "prime_rib", "bread_pudding", "pulled_pork_sandwich", "ramen", "ravioli", "red_velvet_cake", "risotto", "samosa", "sashimi", "scallops", "seaweed_salad", "shrimp_and_grits", "breakfast_burrito", "spaghetti_bolognese", "spaghetti_carbonara", "spring_rolls", "steak", "strawberry_shortcake", "sushi", "tacos", "takoyaki", "tiramisu", "tuna_tartare" ]
julianz1/swin-tiny-patch4-window7-224-finetuned-food101
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # swin-tiny-patch4-window7-224-finetuned-food101 This model is a fine-tuned version of [microsoft/swin-tiny-patch4-window7-224](https://huggingface.co/microsoft/swin-tiny-patch4-window7-224) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.8486 - Accuracy: 0.7760 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 1.4777 | 1.0 | 503 | 1.1436 | 0.7062 | | 1.2418 | 2.0 | 1006 | 0.9227 | 0.7571 | | 1.0657 | 3.0 | 1509 | 0.8486 | 0.7760 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "apple_pie", "baby_back_ribs", "baklava", "beef_carpaccio", "beef_tartare", "beet_salad", "beignets", "bibimbap", "bread_pudding", "breakfast_burrito", "bruschetta", "caesar_salad", "cannoli", "caprese_salad", "carrot_cake", "ceviche", "cheesecake", "cheese_plate", "chicken_curry", "chicken_quesadilla", "chicken_wings", "chocolate_cake", "chocolate_mousse", "churros", "clam_chowder", "club_sandwich", "crab_cakes", "creme_brulee", "croque_madame", "cup_cakes", "deviled_eggs", "donuts", "dumplings", "edamame", "eggs_benedict", "escargots", "falafel", "filet_mignon", "fish_and_chips", "foie_gras", "french_fries", "french_onion_soup", "french_toast", "fried_calamari", "fried_rice", "frozen_yogurt", "garlic_bread", "gnocchi", "greek_salad", "grilled_cheese_sandwich", "grilled_salmon", "guacamole", "gyoza", "hamburger", "hot_and_sour_soup", "hot_dog", "huevos_rancheros", "hummus", "ice_cream", "lasagna", "lobster_bisque", "lobster_roll_sandwich", "macaroni_and_cheese", "macarons", "miso_soup", "mussels", "nachos", "omelette", "onion_rings", "oysters", "pad_thai", "paella", "pancakes", "panna_cotta", "peking_duck", "pho", "pizza", "pork_chop", "poutine", "prime_rib", "pulled_pork_sandwich", "ramen", "ravioli", "red_velvet_cake", "risotto", "samosa", "sashimi", "scallops", "seaweed_salad", "shrimp_and_grits", "spaghetti_bolognese", "spaghetti_carbonara", "spring_rolls", "steak", "strawberry_shortcake", "sushi", "tacos", "takoyaki", "tiramisu", "tuna_tartare", "waffles" ]
VolodymyrChapman/vit-base-patch16-224-finetuned-flower
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-base-patch16-224-finetuned-flower This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on the imagefolder dataset. ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 80 - eval_batch_size: 80 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 5 ### Training results ### Framework versions - Transformers 4.24.0 - Pytorch 2.1.0+cu121 - Datasets 2.7.1 - Tokenizers 0.13.3
[ "daisy", "dandelion", "roses", "sunflowers", "tulips" ]
alirzb/WS800_BEiT_42820085
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # WS800_BEiT_42820085 This model is a fine-tuned version of [microsoft/beit-base-patch16-224](https://huggingface.co/microsoft/beit-base-patch16-224) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.0451 - Accuracy: 1.0 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 64 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 10 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 0.96 | 6 | 0.6592 | 0.85 | | No log | 1.92 | 12 | 0.4828 | 0.9875 | | No log | 2.88 | 18 | 0.3227 | 0.9875 | | No log | 4.0 | 25 | 0.2001 | 0.9875 | | No log | 4.96 | 31 | 0.1378 | 0.9875 | | No log | 5.92 | 37 | 0.0838 | 1.0 | | No log | 6.88 | 43 | 0.0635 | 1.0 | | 0.297 | 8.0 | 50 | 0.0510 | 1.0 | | 0.297 | 8.96 | 56 | 0.0462 | 1.0 | | 0.297 | 9.6 | 60 | 0.0451 | 1.0 | ### Framework versions - Transformers 4.36.2 - Pytorch 2.1.2+cu118 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "none_seizures", "seizures" ]
alirzb/WS800_ViT_42820348
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # WS800_ViT_42820348 This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.1485 - Accuracy: 1.0 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 64 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 10 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 0.96 | 6 | 0.6689 | 0.925 | | No log | 1.92 | 12 | 0.5584 | 0.975 | | No log | 2.88 | 18 | 0.4389 | 0.9875 | | No log | 4.0 | 25 | 0.3280 | 0.9875 | | No log | 4.96 | 31 | 0.2582 | 1.0 | | No log | 5.92 | 37 | 0.2105 | 1.0 | | No log | 6.88 | 43 | 0.1805 | 1.0 | | 0.3864 | 8.0 | 50 | 0.1601 | 1.0 | | 0.3864 | 8.96 | 56 | 0.1504 | 1.0 | | 0.3864 | 9.6 | 60 | 0.1485 | 1.0 | ### Framework versions - Transformers 4.36.2 - Pytorch 2.1.2+cu118 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "none_seizures", "seizures" ]
alirzb/WS800_DeiT_42820350
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # WS800_DeiT_42820350 This model is a fine-tuned version of [facebook/deit-base-distilled-patch16-224](https://huggingface.co/facebook/deit-base-distilled-patch16-224) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.0000 - Accuracy: 1.0 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 64 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 10 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 0.96 | 6 | 0.1229 | 0.9625 | | No log | 1.92 | 12 | 0.1149 | 0.95 | | No log | 2.88 | 18 | 0.0406 | 0.9875 | | No log | 4.0 | 25 | 0.0004 | 1.0 | | No log | 4.96 | 31 | 0.0001 | 1.0 | | No log | 5.92 | 37 | 0.0001 | 1.0 | | No log | 6.88 | 43 | 0.0000 | 1.0 | | 0.0759 | 8.0 | 50 | 0.0000 | 1.0 | | 0.0759 | 8.96 | 56 | 0.0000 | 1.0 | | 0.0759 | 9.6 | 60 | 0.0000 | 1.0 | ### Framework versions - Transformers 4.36.2 - Pytorch 2.1.2+cu118 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "none_seizures", "seizures" ]
alirzb/WS800_SwinT_42820409
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # WS800_SwinT_42820409 This model is a fine-tuned version of [microsoft/swin-base-patch4-window7-224](https://huggingface.co/microsoft/swin-base-patch4-window7-224) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.0091 - Accuracy: 1.0 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 64 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 10 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 0.96 | 6 | 0.6706 | 0.9625 | | No log | 1.92 | 12 | 0.4819 | 1.0 | | No log | 2.88 | 18 | 0.2188 | 1.0 | | No log | 4.0 | 25 | 0.0947 | 1.0 | | No log | 4.96 | 31 | 0.0464 | 1.0 | | No log | 5.92 | 37 | 0.0263 | 1.0 | | No log | 6.88 | 43 | 0.0163 | 1.0 | | 0.2515 | 8.0 | 50 | 0.0112 | 1.0 | | 0.2515 | 8.96 | 56 | 0.0103 | 1.0 | | 0.2515 | 9.6 | 60 | 0.0091 | 1.0 | ### Framework versions - Transformers 4.36.2 - Pytorch 2.1.2+cu118 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "none_seizures", "seizures" ]
julianz1/axis-inference-v0
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # axis-inference-v0 This model is a fine-tuned version of [facebook/convnextv2-tiny-1k-224](https://huggingface.co/facebook/convnextv2-tiny-1k-224) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.7092 - Accuracy: 0.5243 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 4 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 5.6101 | 0.94 | 12 | 0.9202 | 0.4701 | | 0.8441 | 1.96 | 25 | 0.7214 | 0.5410 | | 0.7249 | 2.98 | 38 | 0.7014 | 0.5131 | | 0.6997 | 3.76 | 48 | 0.7092 | 0.5243 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "tench, tinca tinca", "goldfish, carassius auratus", "great white shark, white shark, man-eater, man-eating shark, carcharodon carcharias", "tiger shark, galeocerdo cuvieri", "hammerhead, hammerhead shark", "electric ray, crampfish, numbfish, torpedo", "stingray", "cock", "hen", "ostrich, struthio camelus", "brambling, fringilla montifringilla", "goldfinch, carduelis carduelis", "house finch, linnet, carpodacus mexicanus", "junco, snowbird", "indigo bunting, indigo finch, indigo bird, passerina cyanea", "robin, american robin, turdus migratorius", "bulbul", "jay", "magpie", "chickadee", "water ouzel, dipper", "kite", "bald eagle, american eagle, haliaeetus leucocephalus", "vulture", "great grey owl, great gray owl, strix nebulosa", "european fire salamander, salamandra salamandra", "common newt, triturus vulgaris", "eft", "spotted salamander, ambystoma maculatum", "axolotl, mud puppy, ambystoma mexicanum", "bullfrog, rana catesbeiana", "tree frog, tree-frog", "tailed frog, bell toad, ribbed toad, tailed toad, ascaphus trui", "loggerhead, loggerhead turtle, caretta caretta", "leatherback turtle, leatherback, leathery turtle, dermochelys coriacea", "mud turtle", "terrapin", "box turtle, box tortoise", "banded gecko", "common iguana, iguana, iguana iguana", "american chameleon, anole, anolis carolinensis", "whiptail, whiptail lizard", "agama", "frilled lizard, chlamydosaurus kingi", "alligator lizard", "gila monster, heloderma suspectum", "green lizard, lacerta viridis", "african chameleon, chamaeleo chamaeleon", "komodo dragon, komodo lizard, dragon lizard, giant lizard, varanus komodoensis", "african crocodile, nile crocodile, crocodylus niloticus", "american alligator, alligator mississipiensis", "triceratops", "thunder snake, worm snake, carphophis amoenus", "ringneck snake, ring-necked snake, ring snake", "hognose snake, puff adder, sand viper", "green snake, grass snake", "king snake, kingsnake", "garter snake, grass snake", "water snake", "vine snake", "night snake, hypsiglena torquata", "boa constrictor, constrictor constrictor", "rock python, rock snake, python sebae", "indian cobra, naja naja", "green mamba", "sea snake", "horned viper, cerastes, sand viper, horned asp, cerastes cornutus", "diamondback, diamondback rattlesnake, crotalus adamanteus", "sidewinder, horned rattlesnake, crotalus cerastes", "trilobite", "harvestman, daddy longlegs, phalangium opilio", "scorpion", "black and gold garden spider, argiope aurantia", "barn spider, araneus cavaticus", "garden spider, aranea diademata", "black widow, latrodectus mactans", "tarantula", "wolf spider, hunting spider", "tick", "centipede", "black grouse", "ptarmigan", "ruffed grouse, partridge, bonasa umbellus", "prairie chicken, prairie grouse, prairie fowl", "peacock", "quail", "partridge", "african grey, african gray, psittacus erithacus", "macaw", "sulphur-crested cockatoo, kakatoe galerita, cacatua galerita", "lorikeet", "coucal", "bee eater", "hornbill", "hummingbird", "jacamar", "toucan", "drake", "red-breasted merganser, mergus serrator", "goose", "black swan, cygnus atratus", "tusker", "echidna, spiny anteater, anteater", "platypus, duckbill, duckbilled platypus, duck-billed platypus, ornithorhynchus anatinus", "wallaby, brush kangaroo", "koala, koala bear, kangaroo bear, native bear, phascolarctos cinereus", "wombat", "jellyfish", "sea anemone, anemone", "brain coral", "flatworm, platyhelminth", "nematode, nematode worm, roundworm", "conch", "snail", "slug", "sea slug, nudibranch", "chiton, coat-of-mail shell, sea cradle, polyplacophore", "chambered nautilus, pearly nautilus, nautilus", "dungeness crab, cancer magister", "rock crab, cancer irroratus", "fiddler crab", "king crab, alaska crab, alaskan king crab, alaska king crab, paralithodes camtschatica", "american lobster, northern lobster, maine lobster, homarus americanus", "spiny lobster, langouste, rock lobster, crawfish, crayfish, sea crawfish", "crayfish, crawfish, crawdad, crawdaddy", "hermit crab", "isopod", "white stork, ciconia ciconia", "black stork, ciconia nigra", "spoonbill", "flamingo", "little blue heron, egretta caerulea", "american egret, great white heron, egretta albus", "bittern", "crane", "limpkin, aramus pictus", "european gallinule, porphyrio porphyrio", "american coot, marsh hen, mud hen, water hen, fulica americana", "bustard", "ruddy turnstone, arenaria interpres", "red-backed sandpiper, dunlin, erolia alpina", "redshank, tringa totanus", "dowitcher", "oystercatcher, oyster catcher", "pelican", "king penguin, aptenodytes patagonica", "albatross, mollymawk", "grey whale, gray whale, devilfish, eschrichtius gibbosus, eschrichtius robustus", "killer whale, killer, orca, grampus, sea wolf, orcinus orca", "dugong, dugong dugon", "sea lion", "chihuahua", "japanese spaniel", "maltese dog, maltese terrier, maltese", "pekinese, pekingese, peke", "shih-tzu", "blenheim spaniel", "papillon", "toy terrier", "rhodesian ridgeback", "afghan hound, afghan", "basset, basset hound", "beagle", "bloodhound, sleuthhound", "bluetick", "black-and-tan coonhound", "walker hound, walker foxhound", "english foxhound", "redbone", "borzoi, russian wolfhound", "irish wolfhound", "italian greyhound", "whippet", "ibizan hound, ibizan podenco", "norwegian elkhound, elkhound", "otterhound, otter hound", "saluki, gazelle hound", "scottish deerhound, deerhound", "weimaraner", "staffordshire bullterrier, staffordshire bull terrier", "american staffordshire terrier, staffordshire terrier, american pit bull terrier, pit bull terrier", "bedlington terrier", "border terrier", "kerry blue terrier", "irish terrier", "norfolk terrier", "norwich terrier", "yorkshire terrier", "wire-haired fox terrier", "lakeland terrier", "sealyham terrier, sealyham", "airedale, airedale terrier", "cairn, cairn terrier", "australian terrier", "dandie dinmont, dandie dinmont terrier", "boston bull, boston terrier", "miniature schnauzer", "giant schnauzer", "standard schnauzer", "scotch terrier, scottish terrier, scottie", "tibetan terrier, chrysanthemum dog", "silky terrier, sydney silky", "soft-coated wheaten terrier", "west highland white terrier", "lhasa, lhasa apso", "flat-coated retriever", "curly-coated retriever", "golden retriever", "labrador retriever", "chesapeake bay retriever", "german short-haired pointer", "vizsla, hungarian pointer", "english setter", "irish setter, red setter", "gordon setter", "brittany spaniel", "clumber, clumber spaniel", "english springer, english springer spaniel", "welsh springer spaniel", "cocker spaniel, english cocker spaniel, cocker", "sussex spaniel", "irish water spaniel", "kuvasz", "schipperke", "groenendael", "malinois", "briard", "kelpie", "komondor", "old english sheepdog, bobtail", "shetland sheepdog, shetland sheep dog, shetland", "collie", "border collie", "bouvier des flandres, bouviers des flandres", "rottweiler", "german shepherd, german shepherd dog, german police dog, alsatian", "doberman, doberman pinscher", "miniature pinscher", "greater swiss mountain dog", "bernese mountain dog", "appenzeller", "entlebucher", "boxer", "bull mastiff", "tibetan mastiff", "french bulldog", "great dane", "saint bernard, st bernard", "eskimo dog, husky", "malamute, malemute, alaskan malamute", "siberian husky", "dalmatian, coach dog, carriage dog", "affenpinscher, monkey pinscher, monkey dog", "basenji", "pug, pug-dog", "leonberg", "newfoundland, newfoundland dog", "great pyrenees", "samoyed, samoyede", "pomeranian", "chow, chow chow", "keeshond", "brabancon griffon", "pembroke, pembroke welsh corgi", "cardigan, cardigan welsh corgi", "toy poodle", "miniature poodle", "standard poodle", "mexican hairless", "timber wolf, grey wolf, gray wolf, canis lupus", "white wolf, arctic wolf, canis lupus tundrarum", "red wolf, maned wolf, canis rufus, canis niger", "coyote, prairie wolf, brush wolf, canis latrans", "dingo, warrigal, warragal, canis dingo", "dhole, cuon alpinus", "african hunting dog, hyena dog, cape hunting dog, lycaon pictus", "hyena, hyaena", "red fox, vulpes vulpes", "kit fox, vulpes macrotis", "arctic fox, white fox, alopex lagopus", "grey fox, gray fox, urocyon cinereoargenteus", "tabby, tabby cat", "tiger cat", "persian cat", "siamese cat, siamese", "egyptian cat", "cougar, puma, catamount, mountain lion, painter, panther, felis concolor", "lynx, catamount", "leopard, panthera pardus", "snow leopard, ounce, panthera uncia", "jaguar, panther, panthera onca, felis onca", "lion, king of beasts, panthera leo", "tiger, panthera tigris", "cheetah, chetah, acinonyx jubatus", "brown bear, bruin, ursus arctos", "american black bear, black bear, ursus americanus, euarctos americanus", "ice bear, polar bear, ursus maritimus, thalarctos maritimus", "sloth bear, melursus ursinus, ursus ursinus", "mongoose", "meerkat, mierkat", "tiger beetle", "ladybug, ladybeetle, lady beetle, ladybird, ladybird beetle", "ground beetle, carabid beetle", "long-horned beetle, longicorn, longicorn beetle", "leaf beetle, chrysomelid", "dung beetle", "rhinoceros beetle", "weevil", "fly", "bee", "ant, emmet, pismire", "grasshopper, hopper", "cricket", "walking stick, walkingstick, stick insect", "cockroach, roach", "mantis, mantid", "cicada, cicala", "leafhopper", "lacewing, lacewing fly", "dragonfly, darning needle, devil's darning needle, sewing needle, snake feeder, snake doctor, mosquito hawk, skeeter hawk", "damselfly", "admiral", "ringlet, ringlet butterfly", "monarch, monarch butterfly, milkweed butterfly, danaus plexippus", "cabbage butterfly", "sulphur butterfly, sulfur butterfly", "lycaenid, lycaenid butterfly", "starfish, sea star", "sea urchin", "sea cucumber, holothurian", "wood rabbit, cottontail, cottontail rabbit", "hare", "angora, angora rabbit", "hamster", "porcupine, hedgehog", "fox squirrel, eastern fox squirrel, sciurus niger", "marmot", "beaver", "guinea pig, cavia cobaya", "sorrel", "zebra", "hog, pig, grunter, squealer, sus scrofa", "wild boar, boar, sus scrofa", "warthog", "hippopotamus, hippo, river horse, hippopotamus amphibius", "ox", "water buffalo, water ox, asiatic buffalo, bubalus bubalis", "bison", "ram, tup", "bighorn, bighorn sheep, cimarron, rocky mountain bighorn, rocky mountain sheep, ovis canadensis", "ibex, capra ibex", "hartebeest", "impala, aepyceros melampus", "gazelle", "arabian camel, dromedary, camelus dromedarius", "llama", "weasel", "mink", "polecat, fitch, foulmart, foumart, mustela putorius", "black-footed ferret, ferret, mustela nigripes", "otter", "skunk, polecat, wood pussy", "badger", "armadillo", "three-toed sloth, ai, bradypus tridactylus", "orangutan, orang, orangutang, pongo pygmaeus", "gorilla, gorilla gorilla", "chimpanzee, chimp, pan troglodytes", "gibbon, hylobates lar", "siamang, hylobates syndactylus, symphalangus syndactylus", "guenon, guenon monkey", "patas, hussar monkey, erythrocebus patas", "baboon", "macaque", "langur", "colobus, colobus monkey", "proboscis monkey, nasalis larvatus", "marmoset", "capuchin, ringtail, cebus capucinus", "howler monkey, howler", "titi, titi monkey", "spider monkey, ateles geoffroyi", "squirrel monkey, saimiri sciureus", "madagascar cat, ring-tailed lemur, lemur catta", "indri, indris, indri indri, indri brevicaudatus", "indian elephant, elephas maximus", "african elephant, loxodonta africana", "lesser panda, red panda, panda, bear cat, cat bear, ailurus fulgens", "giant panda, panda, panda bear, coon bear, ailuropoda melanoleuca", "barracouta, snoek", "eel", "coho, cohoe, coho salmon, blue jack, silver salmon, oncorhynchus kisutch", "rock beauty, holocanthus tricolor", "anemone fish", "sturgeon", "gar, garfish, garpike, billfish, lepisosteus osseus", "lionfish", "puffer, pufferfish, blowfish, globefish", "abacus", "abaya", "academic gown, academic robe, judge's robe", "accordion, piano accordion, squeeze box", "acoustic guitar", "aircraft carrier, carrier, flattop, attack aircraft carrier", "airliner", "airship, dirigible", "altar", "ambulance", "amphibian, amphibious vehicle", "analog clock", "apiary, bee house", "apron", "ashcan, trash can, garbage can, wastebin, ash bin, ash-bin, ashbin, dustbin, trash barrel, trash bin", "assault rifle, assault gun", "backpack, back pack, knapsack, packsack, rucksack, haversack", "bakery, bakeshop, bakehouse", "balance beam, beam", "balloon", "ballpoint, ballpoint pen, ballpen, biro", "band aid", "banjo", "bannister, banister, balustrade, balusters, handrail", "barbell", "barber chair", "barbershop", "barn", "barometer", "barrel, cask", "barrow, garden cart, lawn cart, wheelbarrow", "baseball", "basketball", "bassinet", "bassoon", "bathing cap, swimming cap", "bath towel", "bathtub, bathing tub, bath, tub", "beach wagon, station wagon, wagon, estate car, beach waggon, station waggon, waggon", "beacon, lighthouse, beacon light, pharos", "beaker", "bearskin, busby, shako", "beer bottle", "beer glass", "bell cote, bell cot", "bib", "bicycle-built-for-two, tandem bicycle, tandem", "bikini, two-piece", "binder, ring-binder", "binoculars, field glasses, opera glasses", "birdhouse", "boathouse", "bobsled, bobsleigh, bob", "bolo tie, bolo, bola tie, bola", "bonnet, poke bonnet", "bookcase", "bookshop, bookstore, bookstall", "bottlecap", "bow", "bow tie, bow-tie, bowtie", "brass, memorial tablet, plaque", "brassiere, bra, bandeau", "breakwater, groin, groyne, mole, bulwark, seawall, jetty", "breastplate, aegis, egis", "broom", "bucket, pail", "buckle", "bulletproof vest", "bullet train, bullet", "butcher shop, meat market", "cab, hack, taxi, taxicab", "caldron, cauldron", "candle, taper, wax light", "cannon", "canoe", "can opener, tin opener", "cardigan", "car mirror", "carousel, carrousel, merry-go-round, roundabout, whirligig", "carpenter's kit, tool kit", "carton", "car wheel", "cash machine, cash dispenser, automated teller machine, automatic teller machine, automated teller, automatic teller, atm", "cassette", "cassette player", "castle", "catamaran", "cd player", "cello, violoncello", "cellular telephone, cellular phone, cellphone, cell, mobile phone", "chain", "chainlink fence", "chain mail, ring mail, mail, chain armor, chain armour, ring armor, ring armour", "chain saw, chainsaw", "chest", "chiffonier, commode", "chime, bell, gong", "china cabinet, china closet", "christmas stocking", "church, church building", "cinema, movie theater, movie theatre, movie house, picture palace", "cleaver, meat cleaver, chopper", "cliff dwelling", "cloak", "clog, geta, patten, sabot", "cocktail shaker", "coffee mug", "coffeepot", "coil, spiral, volute, whorl, helix", "combination lock", "computer keyboard, keypad", "confectionery, confectionary, candy store", "container ship, containership, container vessel", "convertible", "corkscrew, bottle screw", "cornet, horn, trumpet, trump", "cowboy boot", "cowboy hat, ten-gallon hat", "cradle", "crane", "crash helmet", "crate", "crib, cot", "crock pot", "croquet ball", "crutch", "cuirass", "dam, dike, dyke", "desk", "desktop computer", "dial telephone, dial phone", "diaper, nappy, napkin", "digital clock", "digital watch", "dining table, board", "dishrag, dishcloth", "dishwasher, dish washer, dishwashing machine", "disk brake, disc brake", "dock, dockage, docking facility", "dogsled, dog sled, dog sleigh", "dome", "doormat, welcome mat", "drilling platform, offshore rig", "drum, membranophone, tympan", "drumstick", "dumbbell", "dutch oven", "electric fan, blower", "electric guitar", "electric locomotive", "entertainment center", "envelope", "espresso maker", "face powder", "feather boa, boa", "file, file cabinet, filing cabinet", "fireboat", "fire engine, fire truck", "fire screen, fireguard", "flagpole, flagstaff", "flute, transverse flute", "folding chair", "football helmet", "forklift", "fountain", "fountain pen", "four-poster", "freight car", "french horn, horn", "frying pan, frypan, skillet", "fur coat", "garbage truck, dustcart", "gasmask, respirator, gas helmet", "gas pump, gasoline pump, petrol pump, island dispenser", "goblet", "go-kart", "golf ball", "golfcart, golf cart", "gondola", "gong, tam-tam", "gown", "grand piano, grand", "greenhouse, nursery, glasshouse", "grille, radiator grille", "grocery store, grocery, food market, market", "guillotine", "hair slide", "hair spray", "half track", "hammer", "hamper", "hand blower, blow dryer, blow drier, hair dryer, hair drier", "hand-held computer, hand-held microcomputer", "handkerchief, hankie, hanky, hankey", "hard disc, hard disk, fixed disk", "harmonica, mouth organ, harp, mouth harp", "harp", "harvester, reaper", "hatchet", "holster", "home theater, home theatre", "honeycomb", "hook, claw", "hoopskirt, crinoline", "horizontal bar, high bar", "horse cart, horse-cart", "hourglass", "ipod", "iron, smoothing iron", "jack-o'-lantern", "jean, blue jean, denim", "jeep, landrover", "jersey, t-shirt, tee shirt", "jigsaw puzzle", "jinrikisha, ricksha, rickshaw", "joystick", "kimono", "knee pad", "knot", "lab coat, laboratory coat", "ladle", "lampshade, lamp shade", "laptop, laptop computer", "lawn mower, mower", "lens cap, lens cover", "letter opener, paper knife, paperknife", "library", "lifeboat", "lighter, light, igniter, ignitor", "limousine, limo", "liner, ocean liner", "lipstick, lip rouge", "loafer", "lotion", "loudspeaker, speaker, speaker unit, loudspeaker system, speaker system", "loupe, jeweler's loupe", "lumbermill, sawmill", "magnetic compass", "mailbag, postbag", "mailbox, letter box", "maillot", "maillot, tank suit", "manhole cover", "maraca", "marimba, xylophone", "mask", "matchstick", "maypole", "maze, labyrinth", "measuring cup", "medicine chest, medicine cabinet", "megalith, megalithic structure", "microphone, mike", "microwave, microwave oven", "military uniform", "milk can", "minibus", "miniskirt, mini", "minivan", "missile", "mitten", "mixing bowl", "mobile home, manufactured home", "model t", "modem", "monastery", "monitor", "moped", "mortar", "mortarboard", "mosque", "mosquito net", "motor scooter, scooter", "mountain bike, all-terrain bike, off-roader", "mountain tent", "mouse, computer mouse", "mousetrap", "moving van", "muzzle", "nail", "neck brace", "necklace", "nipple", "notebook, notebook computer", "obelisk", "oboe, hautboy, hautbois", "ocarina, sweet potato", "odometer, hodometer, mileometer, milometer", "oil filter", "organ, pipe organ", "oscilloscope, scope, cathode-ray oscilloscope, cro", "overskirt", "oxcart", "oxygen mask", "packet", "paddle, boat paddle", "paddlewheel, paddle wheel", "padlock", "paintbrush", "pajama, pyjama, pj's, jammies", "palace", "panpipe, pandean pipe, syrinx", "paper towel", "parachute, chute", "parallel bars, bars", "park bench", "parking meter", "passenger car, coach, carriage", "patio, terrace", "pay-phone, pay-station", "pedestal, plinth, footstall", "pencil box, pencil case", "pencil sharpener", "perfume, essence", "petri dish", "photocopier", "pick, plectrum, plectron", "pickelhaube", "picket fence, paling", "pickup, pickup truck", "pier", "piggy bank, penny bank", "pill bottle", "pillow", "ping-pong ball", "pinwheel", "pirate, pirate ship", "pitcher, ewer", "plane, carpenter's plane, woodworking plane", "planetarium", "plastic bag", "plate rack", "plow, plough", "plunger, plumber's helper", "polaroid camera, polaroid land camera", "pole", "police van, police wagon, paddy wagon, patrol wagon, wagon, black maria", "poncho", "pool table, billiard table, snooker table", "pop bottle, soda bottle", "pot, flowerpot", "potter's wheel", "power drill", "prayer rug, prayer mat", "printer", "prison, prison house", "projectile, missile", "projector", "puck, hockey puck", "punching bag, punch bag, punching ball, punchball", "purse", "quill, quill pen", "quilt, comforter, comfort, puff", "racer, race car, racing car", "racket, racquet", "radiator", "radio, wireless", "radio telescope, radio reflector", "rain barrel", "recreational vehicle, rv, r.v.", "reel", "reflex camera", "refrigerator, icebox", "remote control, remote", "restaurant, eating house, eating place, eatery", "revolver, six-gun, six-shooter", "rifle", "rocking chair, rocker", "rotisserie", "rubber eraser, rubber, pencil eraser", "rugby ball", "rule, ruler", "running shoe", "safe", "safety pin", "saltshaker, salt shaker", "sandal", "sarong", "sax, saxophone", "scabbard", "scale, weighing machine", "school bus", "schooner", "scoreboard", "screen, crt screen", "screw", "screwdriver", "seat belt, seatbelt", "sewing machine", "shield, buckler", "shoe shop, shoe-shop, shoe store", "shoji", "shopping basket", "shopping cart", "shovel", "shower cap", "shower curtain", "ski", "ski mask", "sleeping bag", "slide rule, slipstick", "sliding door", "slot, one-armed bandit", "snorkel", "snowmobile", "snowplow, snowplough", "soap dispenser", "soccer ball", "sock", "solar dish, solar collector, solar furnace", "sombrero", "soup bowl", "space bar", "space heater", "space shuttle", "spatula", "speedboat", "spider web, spider's web", "spindle", "sports car, sport car", "spotlight, spot", "stage", "steam locomotive", "steel arch bridge", "steel drum", "stethoscope", "stole", "stone wall", "stopwatch, stop watch", "stove", "strainer", "streetcar, tram, tramcar, trolley, trolley car", "stretcher", "studio couch, day bed", "stupa, tope", "submarine, pigboat, sub, u-boat", "suit, suit of clothes", "sundial", "sunglass", "sunglasses, dark glasses, shades", "sunscreen, sunblock, sun blocker", "suspension bridge", "swab, swob, mop", "sweatshirt", "swimming trunks, bathing trunks", "swing", "switch, electric switch, electrical switch", "syringe", "table lamp", "tank, army tank, armored combat vehicle, armoured combat vehicle", "tape player", "teapot", "teddy, teddy bear", "television, television system", "tennis ball", "thatch, thatched roof", "theater curtain, theatre curtain", "thimble", "thresher, thrasher, threshing machine", "throne", "tile roof", "toaster", "tobacco shop, tobacconist shop, tobacconist", "toilet seat", "torch", "totem pole", "tow truck, tow car, wrecker", "toyshop", "tractor", "trailer truck, tractor trailer, trucking rig, rig, articulated lorry, semi", "tray", "trench coat", "tricycle, trike, velocipede", "trimaran", "tripod", "triumphal arch", "trolleybus, trolley coach, trackless trolley", "trombone", "tub, vat", "turnstile", "typewriter keyboard", "umbrella", "unicycle, monocycle", "upright, upright piano", "vacuum, vacuum cleaner", "vase", "vault", "velvet", "vending machine", "vestment", "viaduct", "violin, fiddle", "volleyball", "waffle iron", "wall clock", "wallet, billfold, notecase, pocketbook", "wardrobe, closet, press", "warplane, military plane", "washbasin, handbasin, washbowl, lavabo, wash-hand basin", "washer, automatic washer, washing machine", "water bottle", "water jug", "water tower", "whiskey jug", "whistle", "wig", "window screen", "window shade", "windsor tie", "wine bottle", "wing", "wok", "wooden spoon", "wool, woolen, woollen", "worm fence, snake fence, snake-rail fence, virginia fence", "wreck", "yawl", "yurt", "web site, website, internet site, site", "comic book", "crossword puzzle, crossword", "street sign", "traffic light, traffic signal, stoplight", "book jacket, dust cover, dust jacket, dust wrapper", "menu", "plate", "guacamole", "consomme", "hot pot, hotpot", "trifle", "ice cream, icecream", "ice lolly, lolly, lollipop, popsicle", "french loaf", "bagel, beigel", "pretzel", "cheeseburger", "hotdog, hot dog, red hot", "mashed potato", "head cabbage", "broccoli", "cauliflower", "zucchini, courgette", "spaghetti squash", "acorn squash", "butternut squash", "cucumber, cuke", "artichoke, globe artichoke", "bell pepper", "cardoon", "mushroom", "granny smith", "strawberry", "orange", "lemon", "fig", "pineapple, ananas", "banana", "jackfruit, jak, jack", "custard apple", "pomegranate", "hay", "carbonara", "chocolate sauce, chocolate syrup", "dough", "meat loaf, meatloaf", "pizza, pizza pie", "potpie", "burrito", "red wine", "espresso", "cup", "eggnog", "alp", "bubble", "cliff, drop, drop-off", "coral reef", "geyser", "lakeside, lakeshore", "promontory, headland, head, foreland", "sandbar, sand bar", "seashore, coast, seacoast, sea-coast", "valley, vale", "volcano", "ballplayer, baseball player", "groom, bridegroom", "scuba diver", "rapeseed", "daisy", "yellow lady's slipper, yellow lady-slipper, cypripedium calceolus, cypripedium parviflorum", "corn", "acorn", "hip, rose hip, rosehip", "buckeye, horse chestnut, conker", "coral fungus", "agaric", "gyromitra", "stinkhorn, carrion fungus", "earthstar", "hen-of-the-woods, hen of the woods, polyporus frondosus, grifola frondosa", "bolete", "ear, spike, capitulum", "toilet tissue, toilet paper, bathroom tissue" ]
mejdik/swin-tiny-patch4-window7-224-finetuned-eurosat
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # swin-tiny-patch4-window7-224-finetuned-eurosat This model is a fine-tuned version of [microsoft/swin-tiny-patch4-window7-224](https://huggingface.co/microsoft/swin-tiny-patch4-window7-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.0860 - Accuracy: 0.9748 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.2841 | 1.0 | 190 | 0.1861 | 0.9515 | | 0.1951 | 2.0 | 380 | 0.1127 | 0.9652 | | 0.1413 | 3.0 | 570 | 0.0860 | 0.9748 | ### Framework versions - Transformers 4.36.2 - Pytorch 2.1.2+cu121 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "annualcrop", "forest", "herbaceousvegetation", "highway", "industrial", "pasture", "permanentcrop", "residential", "river", "sealake" ]
ahmed-ai/skin_lesions_classifier
<!-- This model card has been generated automatically according to the information Keras had access to. You should probably proofread and complete it, then remove this comment. --> # ahmed-ai/skin_lesions_classifier <h2 style="color:red; font-size: 3rem">Important Warning</h2> <p style="font-weight: bold; font-size: 1.5rem;">This model is currently undergoing development; as such, it should not be used for clinical diagnosis or relied upon for medical decision-making at this stage.</p> This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Train Loss: 0.8374 - Validation Loss: 0.7696 - Train Accuracy: 0.7102 - Epoch: 4 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - optimizer: {'name': 'AdamWeightDecay', 'learning_rate': {'module': 'keras.optimizers.schedules', 'class_name': 'PolynomialDecay', 'config': {'initial_learning_rate': 3e-05, 'decay_steps': 109580, 'end_learning_rate': 0.0, 'power': 1.0, 'cycle': False, 'name': None}, 'registered_name': None}, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-08, 'amsgrad': False, 'weight_decay_rate': 0.01} - training_precision: float32 ### Training results | Train Loss | Validation Loss | Train Accuracy | Epoch | |:----------:|:---------------:|:--------------:|:-----:| | 1.3152 | 1.0475 | 0.6511 | 0 | | 1.0540 | 0.8775 | 0.6918 | 1 | | 0.9540 | 0.8533 | 0.6814 | 2 | | 0.8859 | 0.7491 | 0.7201 | 3 | | 0.8374 | 0.7696 | 0.7102 | 4 | ### Framework versions - Transformers 4.36.2 - TensorFlow 2.15.0 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "actinic keratoses", "basal cell carcinoma", "melanoma", "monkeypox", "squamous cell carcinoma", "vascular lesions", "dermatofibroma", "benign keratosis-like lesions", "chickenpox", "cowpox", "dermatofibroma", "hfmd", "healthy", "measles", "melanocytic nevi" ]
Coelhomatias/vit-cxr4-384
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-cxr4-384 This model is a fine-tuned version of [google/vit-base-patch16-384](https://huggingface.co/google/vit-base-patch16-384) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.2413 - Precision: 0.8525 - Recall: 0.9419 - F1: 0.8950 - Accuracy: 0.8926 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 24 - eval_batch_size: 24 - seed: 17 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 2 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Precision | Recall | F1 | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:---------:|:------:|:------:|:--------:| | 0.3694 | 0.08 | 100 | 0.3851 | 0.8076 | 0.8435 | 0.8251 | 0.8234 | | 0.3084 | 0.16 | 200 | 0.4478 | 0.7556 | 0.9889 | 0.8566 | 0.8364 | | 0.3177 | 0.24 | 300 | 0.3073 | 0.8062 | 0.9572 | 0.8752 | 0.8652 | | 0.3413 | 0.31 | 400 | 0.2936 | 0.8172 | 0.9434 | 0.8758 | 0.8678 | | 0.2612 | 0.39 | 500 | 0.2936 | 0.8402 | 0.9122 | 0.8747 | 0.8709 | | 0.3607 | 0.47 | 600 | 0.2717 | 0.8210 | 0.9603 | 0.8852 | 0.8769 | | 0.274 | 0.55 | 700 | 0.2875 | 0.8373 | 0.9196 | 0.8765 | 0.8720 | | 0.3127 | 0.63 | 800 | 0.2664 | 0.8156 | 0.9683 | 0.8854 | 0.8761 | | 0.2875 | 0.71 | 900 | 0.2643 | 0.8369 | 0.9334 | 0.8825 | 0.8772 | | 0.2652 | 0.78 | 1000 | 0.2659 | 0.8134 | 0.9683 | 0.8841 | 0.8746 | | 0.2661 | 0.86 | 1100 | 0.2591 | 0.8334 | 0.9445 | 0.8855 | 0.8793 | | 0.3019 | 0.94 | 1200 | 0.2729 | 0.8851 | 0.8599 | 0.8723 | 0.8756 | | 0.229 | 1.02 | 1300 | 0.2548 | 0.8357 | 0.9603 | 0.8937 | 0.8871 | | 0.1841 | 1.1 | 1400 | 0.2438 | 0.8586 | 0.9217 | 0.8891 | 0.8863 | | 0.2257 | 1.18 | 1500 | 0.2365 | 0.8629 | 0.9254 | 0.8931 | 0.8905 | | 0.2217 | 1.25 | 1600 | 0.2509 | 0.8888 | 0.8662 | 0.8773 | 0.8803 | | 0.2619 | 1.33 | 1700 | 0.2588 | 0.8373 | 0.9582 | 0.8937 | 0.8874 | | 0.2222 | 1.41 | 1800 | 0.2521 | 0.8644 | 0.9238 | 0.8931 | 0.8908 | | 0.2044 | 1.49 | 1900 | 0.2598 | 0.8409 | 0.9588 | 0.8960 | 0.8900 | | 0.2238 | 1.57 | 2000 | 0.2641 | 0.9117 | 0.8302 | 0.8691 | 0.8764 | | 0.249 | 1.65 | 2100 | 0.2368 | 0.8464 | 0.9561 | 0.8979 | 0.8926 | | 0.1773 | 1.72 | 2200 | 0.2233 | 0.8682 | 0.9265 | 0.8964 | 0.8942 | | 0.1447 | 1.8 | 2300 | 0.2269 | 0.8760 | 0.9191 | 0.8970 | 0.8957 | | 0.245 | 1.88 | 2400 | 0.2355 | 0.8578 | 0.9445 | 0.8991 | 0.8952 | | 0.1685 | 1.96 | 2500 | 0.2312 | 0.8615 | 0.9344 | 0.8965 | 0.8934 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "negative", "positive" ]
calledice666/swin-tiny-patch4-window7-224-finetuned-eurosat
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # swin-tiny-patch4-window7-224-finetuned-eurosat This model is a fine-tuned version of [microsoft/swin-tiny-patch4-window7-224](https://huggingface.co/microsoft/swin-tiny-patch4-window7-224) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.0944 - Accuracy: 0.9684 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.4761 | 1.0 | 351 | 0.1498 | 0.9514 | | 0.3594 | 2.0 | 703 | 0.1006 | 0.966 | | 0.3228 | 2.99 | 1053 | 0.0944 | 0.9684 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "airplane", "automobile", "bird", "cat", "deer", "dog", "frog", "horse", "ship", "truck" ]
PeteA2Z/kelp_binary_model
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # kelp_binary_model This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.6605 - Accuracy: 0.6256 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 16 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 1 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.6681 | 0.28 | 10 | 0.6698 | 0.6264 | | 0.6628 | 0.57 | 20 | 0.6595 | 0.6300 | | 0.6634 | 0.85 | 30 | 0.6605 | 0.6256 | ### Framework versions - Transformers 4.31.0 - Pytorch 2.1.2 - Datasets 2.12.0 - Tokenizers 0.13.2
[ "ja_kelp", "nee_kelp" ]
jaydip-tss/my_awesome_food_model
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # my_awesome_food_model This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.8266 - Accuracy: 0.922 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 64 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 1.1714 | 0.99 | 62 | 1.1657 | 0.896 | | 0.8623 | 2.0 | 125 | 0.9087 | 0.915 | | 0.7595 | 2.98 | 186 | 0.8252 | 0.925 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "apple_pie", "baby_back_ribs", "bruschetta", "waffles", "caesar_salad", "cannoli", "caprese_salad", "carrot_cake", "ceviche", "cheesecake", "cheese_plate", "chicken_curry", "chicken_quesadilla", "baklava", "chicken_wings", "chocolate_cake", "chocolate_mousse", "churros", "clam_chowder", "club_sandwich", "crab_cakes", "creme_brulee", "croque_madame", "cup_cakes", "beef_carpaccio", "deviled_eggs", "donuts", "dumplings", "edamame", "eggs_benedict", "escargots", "falafel", "filet_mignon", "fish_and_chips", "foie_gras", "beef_tartare", "french_fries", "french_onion_soup", "french_toast", "fried_calamari", "fried_rice", "frozen_yogurt", "garlic_bread", "gnocchi", "greek_salad", "grilled_cheese_sandwich", "beet_salad", "grilled_salmon", "guacamole", "gyoza", "hamburger", "hot_and_sour_soup", "hot_dog", "huevos_rancheros", "hummus", "ice_cream", "lasagna", "beignets", "lobster_bisque", "lobster_roll_sandwich", "macaroni_and_cheese", "macarons", "miso_soup", "mussels", "nachos", "omelette", "onion_rings", "oysters", "bibimbap", "pad_thai", "paella", "pancakes", "panna_cotta", "peking_duck", "pho", "pizza", "pork_chop", "poutine", "prime_rib", "bread_pudding", "pulled_pork_sandwich", "ramen", "ravioli", "red_velvet_cake", "risotto", "samosa", "sashimi", "scallops", "seaweed_salad", "shrimp_and_grits", "breakfast_burrito", "spaghetti_bolognese", "spaghetti_carbonara", "spring_rolls", "steak", "strawberry_shortcake", "sushi", "tacos", "takoyaki", "tiramisu", "tuna_tartare" ]
ahmed-ai/mit-b1-skin-lesion-classifier
<!-- This model card has been generated automatically according to the information Keras had access to. You should probably proofread and complete it, then remove this comment. --> # ahmed-ai/mit-b1-skin-classifier <h2 style="color:red; font-size: 3rem">Important Warning</h2> <p style="font-weight: bold; font-size: 1.5rem;">This model is currently undergoing development; as such, it should not be used for clinical diagnosis or relied upon for medical decision-making at this stage.</p> This model is a fine-tuned version of [nvidia/mit-b1](https://huggingface.co/nvidia/mit-b1) on an unknown dataset. It achieves the following results on the evaluation set: - Train Loss: 0.4567 - Validation Loss: 0.6271 - Train Accuracy: 0.7610 - Epoch: 9 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - optimizer: {'name': 'AdamWeightDecay', 'learning_rate': {'module': 'keras.optimizers.schedules', 'class_name': 'PolynomialDecay', 'config': {'initial_learning_rate': 3e-05, 'decay_steps': 191770, 'end_learning_rate': 0.0, 'power': 1.0, 'cycle': False, 'name': None}, 'registered_name': None}, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-08, 'amsgrad': False, 'weight_decay_rate': 0.01} - training_precision: float32 ### Training results | Train Loss | Validation Loss | Train Accuracy | Epoch | |:----------:|:---------------:|:--------------:|:-----:| | 1.1152 | 0.8715 | 0.6858 | 0 | | 0.8784 | 0.7400 | 0.7216 | 1 | | 0.7774 | 0.7492 | 0.7273 | 2 | | 0.7114 | 0.7229 | 0.7213 | 3 | | 0.6492 | 0.6735 | 0.7393 | 4 | | 0.6005 | 0.6498 | 0.7507 | 5 | | 0.5560 | 0.6181 | 0.7596 | 6 | | 0.5179 | 0.6473 | 0.7582 | 7 | | 0.4913 | 0.6100 | 0.7711 | 8 | | 0.4567 | 0.6271 | 0.7610 | 9 | ### Framework versions - Transformers 4.36.2 - TensorFlow 2.13.0 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "actinic keratoses", "basal cell carcinoma", "melanoma", "monkeypox", "squamous cell carcinoma", "vascular lesions", "dermatofibroma", "benign keratosis-like lesions", "chickenpox", "cowpox", "dermatofibroma", "hfmd", "healthy", "measles", "melanocytic nevi" ]
amber90-dev/vit-base-patch16-224-finetuned-flower
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-base-patch16-224-finetuned-flower This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on the imagefolder dataset. ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 5 ### Training results ### Framework versions - Transformers 4.24.0 - Pytorch 2.1.0+cu121 - Datasets 2.7.1 - Tokenizers 0.13.3
[ "daisy", "dandelion", "roses", "sunflowers", "tulips" ]
ggergues/swin-tiny-patch4-window7-224-finetuned-eurosat-kornia
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # swin-tiny-patch4-window7-224-finetuned-eurosat-kornia This model is a fine-tuned version of [microsoft/swin-tiny-patch4-window7-224](https://huggingface.co/microsoft/swin-tiny-patch4-window7-224) on the imagefolder dataset. ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 3 ### Framework versions - Transformers 4.36.2 - Pytorch 2.1.2+cpu - Datasets 2.16.1 - Tokenizers 0.15.0
[ "annualcrop", "forest", "herbaceousvegetation", "highway", "industrial", "pasture", "permanentcrop", "residential", "river", "sealake" ]
wahidww/swin-tiny-patch4-window7-224-finetuned-mobile-eye-tracking-dataset-v2
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # swin-tiny-patch4-window7-224-finetuned-mobile-eye-tracking-dataset-v2 This model is a fine-tuned version of [microsoft/swin-tiny-patch4-window7-224](https://huggingface.co/microsoft/swin-tiny-patch4-window7-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.2851 - Accuracy: 0.9094 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 1e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 30 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-------:|:----:|:---------------:|:--------:| | 2.0421 | 0.9941 | 127 | 1.9090 | 0.3566 | | 1.2943 | 1.9922 | 254 | 1.2209 | 0.5679 | | 0.8757 | 2.9902 | 381 | 0.8373 | 0.7108 | | 0.7362 | 3.9961 | 509 | 0.6260 | 0.7967 | | 0.6011 | 4.9941 | 636 | 0.5550 | 0.8049 | | 0.5267 | 5.9922 | 763 | 0.4797 | 0.8339 | | 0.4969 | 6.9902 | 890 | 0.4508 | 0.8583 | | 0.4418 | 7.9961 | 1018 | 0.4243 | 0.8571 | | 0.4564 | 8.9941 | 1145 | 0.4035 | 0.8618 | | 0.4199 | 9.9922 | 1272 | 0.3678 | 0.8804 | | 0.3929 | 10.9902 | 1399 | 0.3479 | 0.8897 | | 0.3578 | 11.9961 | 1527 | 0.3355 | 0.8931 | | 0.3405 | 12.9941 | 1654 | 0.3173 | 0.8955 | | 0.3399 | 13.9922 | 1781 | 0.3177 | 0.9048 | | 0.3219 | 14.9902 | 1908 | 0.3354 | 0.8943 | | 0.3296 | 15.9961 | 2036 | 0.3175 | 0.8978 | | 0.3259 | 16.9941 | 2163 | 0.2908 | 0.9048 | | 0.3201 | 17.9922 | 2290 | 0.2883 | 0.9082 | | 0.2922 | 18.9902 | 2417 | 0.2945 | 0.9013 | | 0.3204 | 19.9961 | 2545 | 0.2961 | 0.9013 | | 0.2926 | 20.9941 | 2672 | 0.2934 | 0.9048 | | 0.3083 | 21.9922 | 2799 | 0.2960 | 0.9024 | | 0.3174 | 22.9902 | 2926 | 0.2876 | 0.9059 | | 0.2693 | 23.9961 | 3054 | 0.2896 | 0.9036 | | 0.2807 | 24.9941 | 3181 | 0.2852 | 0.8990 | | 0.289 | 25.9922 | 3308 | 0.2847 | 0.9048 | | 0.2829 | 26.9902 | 3435 | 0.2888 | 0.9048 | | 0.2749 | 27.9961 | 3563 | 0.2898 | 0.9059 | | 0.2726 | 28.9941 | 3690 | 0.2780 | 0.9094 | | 0.2744 | 29.9374 | 3810 | 0.2851 | 0.9094 | ### Framework versions - Transformers 4.46.2 - Pytorch 2.5.1+cu121 - Datasets 3.1.0 - Tokenizers 0.20.3
[ "central serous chorioretinopathy - color fundus", "disc edema", "glaucoma", "macular scar", "pterygium", "retinal detachment", "retinitis pigmentosa", "cataract", "diabetic_retinopathy", "normal" ]
yuramoomin/my_awesome_food_model
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # my_awesome_food_model This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 1.6263 - Accuracy: 0.898 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 64 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 2.684 | 0.99 | 62 | 2.5207 | 0.82 | | 1.845 | 2.0 | 125 | 1.7768 | 0.897 | | 1.6058 | 2.98 | 186 | 1.6263 | 0.898 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "apple_pie", "baby_back_ribs", "bruschetta", "waffles", "caesar_salad", "cannoli", "caprese_salad", "carrot_cake", "ceviche", "cheesecake", "cheese_plate", "chicken_curry", "chicken_quesadilla", "baklava", "chicken_wings", "chocolate_cake", "chocolate_mousse", "churros", "clam_chowder", "club_sandwich", "crab_cakes", "creme_brulee", "croque_madame", "cup_cakes", "beef_carpaccio", "deviled_eggs", "donuts", "dumplings", "edamame", "eggs_benedict", "escargots", "falafel", "filet_mignon", "fish_and_chips", "foie_gras", "beef_tartare", "french_fries", "french_onion_soup", "french_toast", "fried_calamari", "fried_rice", "frozen_yogurt", "garlic_bread", "gnocchi", "greek_salad", "grilled_cheese_sandwich", "beet_salad", "grilled_salmon", "guacamole", "gyoza", "hamburger", "hot_and_sour_soup", "hot_dog", "huevos_rancheros", "hummus", "ice_cream", "lasagna", "beignets", "lobster_bisque", "lobster_roll_sandwich", "macaroni_and_cheese", "macarons", "miso_soup", "mussels", "nachos", "omelette", "onion_rings", "oysters", "bibimbap", "pad_thai", "paella", "pancakes", "panna_cotta", "peking_duck", "pho", "pizza", "pork_chop", "poutine", "prime_rib", "bread_pudding", "pulled_pork_sandwich", "ramen", "ravioli", "red_velvet_cake", "risotto", "samosa", "sashimi", "scallops", "seaweed_salad", "shrimp_and_grits", "breakfast_burrito", "spaghetti_bolognese", "spaghetti_carbonara", "spring_rolls", "steak", "strawberry_shortcake", "sushi", "tacos", "takoyaki", "tiramisu", "tuna_tartare" ]
wahidww/vit-base-patch16-224-in21k-finetuned-mobile-eye-tracking-dataset-v2
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-base-patch16-224-in21k-finetuned-mobile-eye-tracking-dataset-v2 This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.2615 - Accuracy: 0.9231 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 15 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 2 | 0.6641 | 0.6154 | | No log | 2.0 | 4 | 0.6343 | 0.6154 | | No log | 3.0 | 6 | 0.5990 | 0.6154 | | No log | 4.0 | 8 | 0.5438 | 0.8462 | | No log | 5.0 | 10 | 0.5108 | 0.9231 | | No log | 6.0 | 12 | 0.4413 | 0.8462 | | No log | 7.0 | 14 | 0.3947 | 0.8462 | | No log | 8.0 | 16 | 0.3568 | 0.9231 | | No log | 9.0 | 18 | 0.3297 | 0.9231 | | 0.4923 | 10.0 | 20 | 0.3110 | 0.9231 | | 0.4923 | 11.0 | 22 | 0.2988 | 0.9231 | | 0.4923 | 12.0 | 24 | 0.2836 | 0.9231 | | 0.4923 | 13.0 | 26 | 0.2702 | 0.9231 | | 0.4923 | 14.0 | 28 | 0.2636 | 0.9231 | | 0.4923 | 15.0 | 30 | 0.2615 | 0.9231 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "central serous chorioretinopathy - color fundus", "disc edema", "glaucoma", "macular scar", "pterygium", "retinal detachment", "retinitis pigmentosa", "cataract", "diabetic_retinopathy", "normal" ]
adhisetiawan/ViT-flowers-species
<!-- This model card has been generated automatically according to the information Keras had access to. You should probably proofread and complete it, then remove this comment. --> # adhisetiawan/ViT-flowers-species This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Train Loss: 0.0831 - Validation Loss: 0.1388 - Train Accuracy: 0.9605 - Epoch: 4 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - optimizer: {'name': 'AdamWeightDecay', 'learning_rate': {'module': 'keras.optimizers.schedules', 'class_name': 'PolynomialDecay', 'config': {'initial_learning_rate': 3e-05, 'decay_steps': 14680, 'end_learning_rate': 0.0, 'power': 1.0, 'cycle': False, 'name': None}, 'registered_name': None}, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-08, 'amsgrad': False, 'weight_decay_rate': 0.01} - training_precision: float32 ### Training results | Train Loss | Validation Loss | Train Accuracy | Epoch | |:----------:|:---------------:|:--------------:|:-----:| | 0.7563 | 0.3186 | 0.9482 | 0 | | 0.2194 | 0.2133 | 0.9496 | 1 | | 0.1417 | 0.1802 | 0.9550 | 2 | | 0.0973 | 0.1482 | 0.9605 | 3 | | 0.0831 | 0.1388 | 0.9605 | 4 | ### Framework versions - Transformers 4.35.2 - TensorFlow 2.15.0 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "daisy", "dandelion", "roses", "sunflowers", "tulips" ]
DazMashaly/swin_larger
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # swin_larger This model was trained from scratch on the zindi dataset. It achieves the following results on the evaluation set: - eval_loss: 0.7009 - eval_accuracy: 0.7617 - eval_runtime: 222.198 - eval_samples_per_second: 17.43 - eval_steps_per_second: 0.549 - epoch: 1.0 - step: 173 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 50 ### Framework versions - Transformers 4.36.0 - Pytorch 2.0.0 - Datasets 2.1.0 - Tokenizers 0.15.0
[ "dr", "g", "nd", "wd", "other" ]
DazMashaly/swin_cont
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # swin_cont This model was trained from scratch on the zindi dataset. It achieves the following results on the evaluation set: - eval_loss: 0.4766 - eval_accuracy: 0.7545 - eval_runtime: 236.8539 - eval_samples_per_second: 16.352 - eval_steps_per_second: 0.515 - epoch: 2.0 - step: 347 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 50 ### Framework versions - Transformers 4.36.0 - Pytorch 2.0.0 - Datasets 2.1.0 - Tokenizers 0.15.0
[ "dr", "g", "nd", "wd", "other" ]
varcoder/resnet-101-finetuned-CivilEng11k
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # resnet-101-finetuned-CivilEng11k This model is a fine-tuned version of [microsoft/resnet-101](https://huggingface.co/microsoft/resnet-101) on the None dataset. It achieves the following results on the evaluation set: - Loss: 0.5490 - Accuracy: 0.8542 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0003 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 10 - total_train_batch_size: 320 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 10 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 0.81 | 3 | 1.0724 | 0.5729 | | No log | 1.89 | 7 | 0.9717 | 0.6542 | | 1.0293 | 2.97 | 11 | 0.8594 | 0.6678 | | 1.0293 | 3.78 | 14 | 0.7830 | 0.7017 | | 1.0293 | 4.86 | 18 | 0.6764 | 0.7593 | | 0.78 | 5.95 | 22 | 0.6072 | 0.7831 | | 0.78 | 6.76 | 25 | 0.5745 | 0.8339 | | 0.78 | 7.84 | 29 | 0.5489 | 0.8508 | | 0.6037 | 8.11 | 30 | 0.5490 | 0.8542 | ### Framework versions - Transformers 4.30.2 - Pytorch 1.13.1+cpu - Datasets 2.13.1 - Tokenizers 0.13.3
[ "crazing", "live_knot", "steel_defect" ]
yuramoomin/my_awesome_cat_horse_model
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # my_awesome_cat_horse_model This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.1749 - Accuracy: 0.956 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 64 - eval_batch_size: 64 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 256 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.6028 | 0.95 | 15 | 0.3283 | 0.961 | | 0.2124 | 1.97 | 31 | 0.1926 | 0.953 | | 0.1665 | 2.86 | 45 | 0.1749 | 0.956 | ### Framework versions - Transformers 4.36.2 - Pytorch 2.1.2+cu121 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "horse", "cat" ]
yuramoomin/my_awesome_cat_horse_model_updated
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # my_awesome_cat_horse_model_updated This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.1033 - Accuracy: 0.967 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 64 - eval_batch_size: 64 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 256 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.1431 | 0.95 | 15 | 0.1245 | 0.959 | | 0.0769 | 1.97 | 31 | 0.0962 | 0.971 | | 0.0751 | 2.86 | 45 | 0.1033 | 0.967 | ### Framework versions - Transformers 4.36.2 - Pytorch 2.1.2+cu121 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "horse", "cat" ]
ytechnology/platzi-vit_model-alexander-ferreras
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # platzi-vit_model-alexander-ferreras This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.0488 - Accuracy: 0.9850 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0002 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 4 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.1398 | 3.85 | 500 | 0.0488 | 0.9850 | ### Framework versions - Transformers 4.36.2 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "angular_leaf_spot", "bean_rust", "healthy" ]
alirzb/WS800_BEiT_42895082
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # WS800_BEiT_42895082 This model is a fine-tuned version of [microsoft/beit-base-patch16-224](https://huggingface.co/microsoft/beit-base-patch16-224) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.0845 - Accuracy: 0.975 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 64 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 20 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 5 | 0.6815 | 0.9375 | | No log | 2.0 | 10 | 0.6041 | 0.95 | | No log | 3.0 | 15 | 0.4946 | 0.9125 | | No log | 4.0 | 20 | 0.3233 | 0.975 | | No log | 5.0 | 25 | 0.2158 | 0.9875 | | No log | 6.0 | 30 | 0.1514 | 0.9875 | | No log | 7.0 | 35 | 0.1109 | 0.9875 | | No log | 8.0 | 40 | 0.0909 | 0.9875 | | No log | 9.0 | 45 | 0.0886 | 0.975 | | 0.3029 | 10.0 | 50 | 0.1020 | 0.975 | | 0.3029 | 11.0 | 55 | 0.1155 | 0.975 | | 0.3029 | 12.0 | 60 | 0.1197 | 0.975 | | 0.3029 | 13.0 | 65 | 0.1247 | 0.975 | | 0.3029 | 14.0 | 70 | 0.1021 | 0.975 | | 0.3029 | 15.0 | 75 | 0.2528 | 0.95 | | 0.3029 | 16.0 | 80 | 0.1866 | 0.9625 | | 0.3029 | 17.0 | 85 | 0.2019 | 0.9625 | | 0.3029 | 18.0 | 90 | 0.1456 | 0.9625 | | 0.3029 | 19.0 | 95 | 0.1014 | 0.975 | | 0.0143 | 20.0 | 100 | 0.0845 | 0.975 | ### Framework versions - Transformers 4.36.2 - Pytorch 2.1.2+cu118 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "none_seizures", "seizures" ]
alirzb/WS800_DeiT_42895082
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # WS800_DeiT_42895082 This model is a fine-tuned version of [facebook/deit-base-distilled-patch16-224](https://huggingface.co/facebook/deit-base-distilled-patch16-224) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.2587 - Accuracy: 0.975 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 64 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 20 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 5 | 0.6389 | 0.5125 | | No log | 2.0 | 10 | 0.1840 | 0.9375 | | No log | 3.0 | 15 | 0.1272 | 0.9625 | | No log | 4.0 | 20 | 0.1958 | 0.9625 | | No log | 5.0 | 25 | 0.1635 | 0.975 | | No log | 6.0 | 30 | 0.2280 | 0.975 | | No log | 7.0 | 35 | 0.2664 | 0.9625 | | No log | 8.0 | 40 | 0.2636 | 0.9625 | | No log | 9.0 | 45 | 0.2582 | 0.975 | | 0.1252 | 10.0 | 50 | 0.2571 | 0.975 | | 0.1252 | 11.0 | 55 | 0.2571 | 0.975 | | 0.1252 | 12.0 | 60 | 0.2572 | 0.975 | | 0.1252 | 13.0 | 65 | 0.2574 | 0.975 | | 0.1252 | 14.0 | 70 | 0.2577 | 0.975 | | 0.1252 | 15.0 | 75 | 0.2580 | 0.975 | | 0.1252 | 16.0 | 80 | 0.2582 | 0.975 | | 0.1252 | 17.0 | 85 | 0.2584 | 0.975 | | 0.1252 | 18.0 | 90 | 0.2586 | 0.975 | | 0.1252 | 19.0 | 95 | 0.2587 | 0.975 | | 0.0 | 20.0 | 100 | 0.2587 | 0.975 | ### Framework versions - Transformers 4.36.2 - Pytorch 2.1.2+cu118 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "none_seizures", "seizures" ]
alirzb/WS800_SwinT_42895082
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # WS800_SwinT_42895082 This model is a fine-tuned version of [microsoft/swin-base-patch4-window7-224](https://huggingface.co/microsoft/swin-base-patch4-window7-224) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.0762 - Accuracy: 0.9875 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 64 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 20 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 5 | 0.6874 | 0.925 | | No log | 2.0 | 10 | 0.6195 | 0.95 | | No log | 3.0 | 15 | 0.4087 | 0.9625 | | No log | 4.0 | 20 | 0.2299 | 0.9875 | | No log | 5.0 | 25 | 0.1265 | 0.9875 | | No log | 6.0 | 30 | 0.0764 | 0.9875 | | No log | 7.0 | 35 | 0.0752 | 0.9875 | | No log | 8.0 | 40 | 0.0656 | 0.9875 | | No log | 9.0 | 45 | 0.0668 | 0.9875 | | 0.2735 | 10.0 | 50 | 0.1085 | 0.975 | | 0.2735 | 11.0 | 55 | 0.1147 | 0.9625 | | 0.2735 | 12.0 | 60 | 0.0731 | 0.9875 | | 0.2735 | 13.0 | 65 | 0.1228 | 0.9625 | | 0.2735 | 14.0 | 70 | 0.0732 | 0.9875 | | 0.2735 | 15.0 | 75 | 0.0663 | 0.9875 | | 0.2735 | 16.0 | 80 | 0.0674 | 0.9875 | | 0.2735 | 17.0 | 85 | 0.0728 | 0.9875 | | 0.2735 | 18.0 | 90 | 0.0750 | 0.9875 | | 0.2735 | 19.0 | 95 | 0.0759 | 0.9875 | | 0.0111 | 20.0 | 100 | 0.0762 | 0.9875 | ### Framework versions - Transformers 4.36.2 - Pytorch 2.1.2+cu118 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "none_seizures", "seizures" ]
alirzb/WS800_ViT_42895082
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # WS800_ViT_42895082 This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.0776 - Accuracy: 0.9875 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 64 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 20 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 5 | 0.6859 | 0.925 | | No log | 2.0 | 10 | 0.6328 | 0.975 | | No log | 3.0 | 15 | 0.5301 | 0.975 | | No log | 4.0 | 20 | 0.4404 | 0.9625 | | No log | 5.0 | 25 | 0.3480 | 0.975 | | No log | 6.0 | 30 | 0.2758 | 0.975 | | No log | 7.0 | 35 | 0.2179 | 0.9875 | | No log | 8.0 | 40 | 0.1789 | 0.9875 | | No log | 9.0 | 45 | 0.1505 | 0.9875 | | 0.3788 | 10.0 | 50 | 0.1296 | 0.9875 | | 0.3788 | 11.0 | 55 | 0.1145 | 0.9875 | | 0.3788 | 12.0 | 60 | 0.1034 | 0.9875 | | 0.3788 | 13.0 | 65 | 0.0954 | 0.9875 | | 0.3788 | 14.0 | 70 | 0.0895 | 0.9875 | | 0.3788 | 15.0 | 75 | 0.0853 | 0.9875 | | 0.3788 | 16.0 | 80 | 0.0822 | 0.9875 | | 0.3788 | 17.0 | 85 | 0.0801 | 0.9875 | | 0.3788 | 18.0 | 90 | 0.0787 | 0.9875 | | 0.3788 | 19.0 | 95 | 0.0779 | 0.9875 | | 0.0547 | 20.0 | 100 | 0.0776 | 0.9875 | ### Framework versions - Transformers 4.36.2 - Pytorch 2.1.2+cu118 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "none_seizures", "seizures" ]
ChrisGuarino/cat_ds
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # cat_ds This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.0137 - Accuracy: 1.0 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0002 - train_batch_size: 16 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 4 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.4194 | 1.0 | 13 | 0.0737 | 1.0 | | 0.063 | 2.0 | 26 | 0.0266 | 1.0 | | 0.051 | 3.0 | 39 | 0.0151 | 1.0 | | 0.014 | 4.0 | 52 | 0.0137 | 1.0 | ### Framework versions - Transformers 4.37.1 - Pytorch 2.1.0.post103 - Datasets 2.16.1 - Tokenizers 0.15.1
[ "prim", "rupe" ]
Ricardo-H/swin-tiny-patch4-window7-224-finetuned-eurosat
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # swin-tiny-patch4-window7-224-finetuned-eurosat This model is a fine-tuned version of [microsoft/swin-tiny-patch4-window7-224](https://huggingface.co/microsoft/swin-tiny-patch4-window7-224) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.1229 - Accuracy: 0.9618 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 64 - eval_batch_size: 64 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 256 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.5796 | 1.0 | 176 | 0.2204 | 0.9462 | | 0.3995 | 2.0 | 352 | 0.1403 | 0.9582 | | 0.3781 | 3.0 | 528 | 0.1229 | 0.9618 | ### Framework versions - Transformers 4.36.2 - Pytorch 2.1.2 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "airplane", "automobile", "bird", "cat", "deer", "dog", "frog", "horse", "ship", "truck" ]
Ricardo-H/swin-tiny-patch4-window7-224-finetuned-cifar10
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # swin-tiny-patch4-window7-224-finetuned-cifar10 This model is a fine-tuned version of [microsoft/swin-tiny-patch4-window7-224](https://huggingface.co/microsoft/swin-tiny-patch4-window7-224) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.0760 - Accuracy: 0.9758 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 64 - eval_batch_size: 64 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 256 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 10 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.314 | 1.0 | 176 | 0.1211 | 0.9612 | | 0.2992 | 2.0 | 352 | 0.1186 | 0.9622 | | 0.3544 | 3.0 | 528 | 0.0989 | 0.968 | | 0.3068 | 4.0 | 704 | 0.0872 | 0.9724 | | 0.3421 | 5.0 | 880 | 0.0858 | 0.972 | | 0.2915 | 6.0 | 1056 | 0.0824 | 0.9724 | | 0.3051 | 7.0 | 1232 | 0.0822 | 0.974 | | 0.2849 | 8.0 | 1408 | 0.0770 | 0.975 | | 0.2661 | 9.0 | 1584 | 0.0773 | 0.9756 | | 0.2504 | 10.0 | 1760 | 0.0760 | 0.9758 | ### Framework versions - Transformers 4.36.2 - Pytorch 2.1.2 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "airplane", "automobile", "bird", "cat", "deer", "dog", "frog", "horse", "ship", "truck" ]
sruthis/alzheimer_model_aug_deit5
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # alzheimer_model_aug_deit5 This model is a fine-tuned version of [facebook/deit-base-distilled-patch16-224](https://huggingface.co/facebook/deit-base-distilled-patch16-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.0472 - Accuracy: 0.9939 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 8 - seed: 1234 - gradient_accumulation_steps: 10 - total_train_batch_size: 160 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 5 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 0.97 | 12 | 0.5252 | 0.8947 | | No log | 1.94 | 24 | 0.1506 | 0.9636 | | No log | 2.98 | 37 | 0.0787 | 0.9858 | | No log | 3.95 | 49 | 0.0587 | 0.9919 | | No log | 4.84 | 60 | 0.0472 | 0.9939 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.15.1
[ "2-4d_upper", "bacterial_spot_upper", "cold_injury_upper", "early_blight_upper", "healthy_upperside", "little_leaf_upperside", "nutritional_disorder_upper", "spider_mite_upper", "tsw_upper", "tylc_upper" ]
ggergues/swin-tiny-patch4-window7-224-finetuned-eurosat
<!-- This model card has been generated automatically according to the information Keras had access to. You should probably proofread and complete it, then remove this comment. --> # ggergues/swin-tiny-patch4-window7-224-finetuned-eurosat This model is a fine-tuned version of [microsoft/swin-tiny-patch4-window7-224](https://huggingface.co/microsoft/swin-tiny-patch4-window7-224) on an unknown dataset. It achieves the following results on the evaluation set: - Train Loss: 0.5115 - Validation Loss: 0.1813 - Validation Accuracy: 0.9437 - Epoch: 2 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - optimizer: {'name': 'AdamWeightDecay', 'learning_rate': 5e-05, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-07, 'amsgrad': False, 'weight_decay_rate': 0.01} - training_precision: float32 ### Training results | Train Loss | Validation Loss | Validation Accuracy | Epoch | |:----------:|:---------------:|:-------------------:|:-----:| | 0.7934 | 0.2019 | 0.9333 | 0 | | 0.5631 | 0.2128 | 0.9307 | 1 | | 0.5115 | 0.1813 | 0.9437 | 2 | ### Framework versions - Transformers 4.36.2 - TensorFlow 2.15.0 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "annualcrop", "forest", "herbaceousvegetation", "highway", "industrial", "pasture", "permanentcrop", "residential", "river", "sealake" ]
gianlab/swin-tiny-patch4-window7-224-finetuned-crop-classification
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # swin-tiny-patch4-window7-224-finetuned-crop-classification This model is a fine-tuned version of [microsoft/swin-tiny-patch4-window7-224](https://huggingface.co/microsoft/swin-tiny-patch4-window7-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.6957 - Accuracy: 0.7234 ## Model description This model was created by importing images of crop damage. I then used the image classification tutorial here: https://colab.research.google.com/github/huggingface/notebooks/blob/main/examples/image_classification.ipynb obtaining the following notebook: https://colab.research.google.com/drive/1qEskI6O-Jjv7UCanfQmUmzz8qUyg7FS3?usp=sharing The possible classified data are: Damage types | Damage | Definition | |-----------------|---------------------| | DR | Drought | | G | Good (growth) | | ND | Nutrient Deficient | | WD | Weed | | other | Disease, Pest, Wind | Crop example: ![Screenshot](crop.png) ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 2 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.7819 | 1.0 | 183 | 0.7262 | 0.7016 | | 0.7104 | 1.99 | 366 | 0.6957 | 0.7234 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "dr", "g", "nd", "wd", "other" ]
IanTseng/vis_items_with_hand_classfier
<!-- This model card has been generated automatically according to the information Keras had access to. You should probably proofread and complete it, then remove this comment. --> # vis_items_with_hand_classfier This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Train Loss: 0.0886 - Validation Loss: 0.0126 - Train Accuracy: 0.9981 - Epoch: 4 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - optimizer: {'name': 'AdamWeightDecay', 'learning_rate': {'module': 'keras.optimizers.schedules', 'class_name': 'PolynomialDecay', 'config': {'initial_learning_rate': 3e-05, 'decay_steps': 32405, 'end_learning_rate': 0.0, 'power': 1.0, 'cycle': False, 'name': None}, 'registered_name': None}, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-08, 'amsgrad': False, 'weight_decay_rate': 0.01} - training_precision: float32 ### Training results | Train Loss | Validation Loss | Train Accuracy | Epoch | |:----------:|:---------------:|:--------------:|:-----:| | 0.3574 | 0.0613 | 0.9932 | 0 | | 0.1445 | 0.0334 | 0.9932 | 1 | | 0.1196 | 0.0282 | 0.9963 | 2 | | 0.0986 | 0.0208 | 0.9963 | 3 | | 0.0886 | 0.0126 | 0.9981 | 4 | ### Framework versions - Transformers 4.35.2 - TensorFlow 2.15.0 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "0", "1", "2", "3", "4", "5", "6" ]
nicolasdupuisroy/vit-letter-identification-v3
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-letter-identification-v3 This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.3440 - Accuracy: 0.7615 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 2e-05 - train_batch_size: 80 - eval_batch_size: 80 - seed: 1337 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 250.0 ### Training results | Training Loss | Epoch | Step | Accuracy | Validation Loss | |:-------------:|:-----:|:----:|:--------:|:---------------:| | No log | 1.0 | 7 | 0.0154 | 3.9449 | | 3.9333 | 2.0 | 14 | 0.0231 | 3.9367 | | 3.8939 | 3.0 | 21 | 0.0308 | 3.9280 | | 3.8939 | 4.0 | 28 | 0.0462 | 3.9167 | | 3.8562 | 5.0 | 35 | 0.0692 | 3.9033 | | 3.8008 | 6.0 | 42 | 0.0769 | 3.8874 | | 3.8008 | 7.0 | 49 | 0.1077 | 3.8670 | | 3.7555 | 8.0 | 56 | 0.1 | 3.8495 | | 3.6917 | 9.0 | 63 | 0.1154 | 3.8305 | | 3.6372 | 10.0 | 70 | 0.1385 | 3.8138 | | 3.6372 | 11.0 | 77 | 0.1231 | 3.7966 | | 3.5846 | 12.0 | 84 | 0.1538 | 3.7767 | | 3.5047 | 13.0 | 91 | 0.2308 | 3.7516 | | 3.5047 | 14.0 | 98 | 0.2385 | 3.7279 | | 3.4547 | 15.0 | 105 | 0.2385 | 3.7031 | | 3.3796 | 16.0 | 112 | 0.2692 | 3.6725 | | 3.3796 | 17.0 | 119 | 0.2769 | 3.6462 | | 3.3283 | 18.0 | 126 | 0.2923 | 3.6226 | | 3.2728 | 19.0 | 133 | 0.2846 | 3.6022 | | 3.2229 | 20.0 | 140 | 0.2769 | 3.5930 | | 3.2229 | 21.0 | 147 | 0.3308 | 3.5748 | | 3.1514 | 22.0 | 154 | 0.3385 | 3.5404 | | 3.1179 | 23.0 | 161 | 0.3385 | 3.5146 | | 3.1179 | 24.0 | 168 | 0.3462 | 3.4916 | | 3.0559 | 25.0 | 175 | 0.3385 | 3.4733 | | 3.0051 | 26.0 | 182 | 0.3615 | 3.4540 | | 3.0051 | 27.0 | 189 | 0.3692 | 3.4499 | | 2.9775 | 28.0 | 196 | 0.3769 | 3.4355 | | 2.9277 | 29.0 | 203 | 0.3846 | 3.4166 | | 2.9066 | 30.0 | 210 | 0.4 | 3.4007 | | 2.9066 | 31.0 | 217 | 0.3692 | 3.3826 | | 2.8464 | 32.0 | 224 | 0.4077 | 3.3698 | | 2.8044 | 33.0 | 231 | 0.4077 | 3.3509 | | 2.8044 | 34.0 | 238 | 0.3769 | 3.3243 | | 2.7699 | 35.0 | 245 | 0.3923 | 3.3201 | | 2.7251 | 36.0 | 252 | 0.4 | 3.3013 | | 2.7251 | 37.0 | 259 | 0.4231 | 3.2936 | | 2.6915 | 38.0 | 266 | 0.4538 | 3.2827 | | 2.6527 | 39.0 | 273 | 0.4615 | 3.2627 | | 2.6541 | 40.0 | 280 | 0.4615 | 3.2581 | | 2.6541 | 41.0 | 287 | 0.4231 | 3.2342 | | 2.5968 | 42.0 | 294 | 0.4385 | 3.2211 | | 2.573 | 43.0 | 301 | 0.4077 | 3.2122 | | 2.573 | 44.0 | 308 | 0.4615 | 3.2259 | | 2.554 | 45.0 | 315 | 0.4308 | 3.2271 | | 2.5222 | 46.0 | 322 | 0.4462 | 3.2208 | | 2.5222 | 47.0 | 329 | 0.4462 | 3.2139 | | 2.5085 | 48.0 | 336 | 0.4538 | 3.2040 | | 2.4593 | 49.0 | 343 | 0.4923 | 3.2053 | | 2.4585 | 50.0 | 350 | 0.4769 | 3.1822 | | 2.4585 | 51.0 | 357 | 0.4692 | 3.1697 | | 2.4228 | 52.0 | 364 | 0.4692 | 3.1589 | | 2.3954 | 53.0 | 371 | 0.4769 | 3.1375 | | 2.3954 | 54.0 | 378 | 0.4538 | 3.1092 | | 2.3641 | 55.0 | 385 | 0.4769 | 3.0999 | | 2.3651 | 56.0 | 392 | 0.4615 | 3.0860 | | 2.3651 | 57.0 | 399 | 0.4615 | 3.0813 | | 2.3182 | 58.0 | 406 | 0.4923 | 3.0692 | | 2.3029 | 59.0 | 413 | 0.4846 | 3.0610 | | 2.2988 | 60.0 | 420 | 0.4615 | 3.0627 | | 2.2988 | 61.0 | 427 | 0.4692 | 3.0520 | | 2.2865 | 62.0 | 434 | 0.4538 | 3.0395 | | 2.2623 | 63.0 | 441 | 0.4615 | 3.0357 | | 2.2623 | 64.0 | 448 | 0.4615 | 3.0333 | | 2.2252 | 65.0 | 455 | 0.4769 | 3.0229 | | 2.2339 | 66.0 | 462 | 0.4769 | 3.0203 | | 2.2339 | 67.0 | 469 | 0.4923 | 3.0076 | | 2.2017 | 68.0 | 476 | 0.4846 | 2.9876 | | 2.1972 | 69.0 | 483 | 0.4923 | 2.9716 | | 2.1964 | 70.0 | 490 | 0.5 | 2.9632 | | 2.1964 | 71.0 | 497 | 0.4923 | 2.9597 | | 2.1775 | 72.0 | 504 | 0.5 | 2.9581 | | 2.1619 | 73.0 | 511 | 0.5077 | 2.9516 | | 2.1619 | 74.0 | 518 | 0.5154 | 2.9356 | | 2.1633 | 75.0 | 525 | 0.5077 | 2.9286 | | 2.1207 | 76.0 | 532 | 0.5154 | 2.9266 | | 2.1207 | 77.0 | 539 | 0.5231 | 2.9205 | | 2.1353 | 78.0 | 546 | 0.5154 | 2.9131 | | 2.1075 | 79.0 | 553 | 0.5231 | 2.9075 | | 2.1025 | 80.0 | 560 | 0.5231 | 2.9073 | | 2.1025 | 81.0 | 567 | 0.5154 | 2.9174 | | 2.1031 | 82.0 | 574 | 0.5308 | 2.9131 | | 2.0932 | 83.0 | 581 | 0.5308 | 2.9092 | | 2.0932 | 84.0 | 588 | 0.5308 | 2.8978 | | 2.0861 | 85.0 | 595 | 0.5308 | 2.8871 | | 2.0478 | 86.0 | 602 | 0.5385 | 2.8829 | | 2.0478 | 87.0 | 609 | 0.5462 | 2.8804 | | 2.0815 | 88.0 | 616 | 0.5462 | 2.8725 | | 2.0756 | 89.0 | 623 | 0.5462 | 2.8694 | | 2.065 | 90.0 | 630 | 0.5462 | 2.8665 | | 2.065 | 91.0 | 637 | 0.5462 | 2.8615 | | 2.0572 | 92.0 | 644 | 0.5462 | 2.8599 | | 2.0358 | 93.0 | 651 | 0.5462 | 2.8620 | | 2.0358 | 94.0 | 658 | 0.5462 | 2.8629 | | 2.0663 | 95.0 | 665 | 0.5538 | 2.8625 | | 2.0353 | 96.0 | 672 | 0.5538 | 2.8628 | | 2.0353 | 97.0 | 679 | 0.5538 | 2.8629 | | 2.0506 | 98.0 | 686 | 0.5538 | 2.8622 | | 2.0494 | 99.0 | 693 | 0.5538 | 2.8622 | | 2.0566 | 100.0 | 700 | 0.5538 | 2.8622 | ### Framework versions - Transformers 4.37.0.dev0 - Pytorch 2.1.0+cu121 - Datasets 2.4.0 - Tokenizers 0.15.0
[ "a_", "b_", "k_", "l_", "m_", "n_", "o_", "p_", "q_", "r_", "s_", "t_", "c_", "u_", "v_", "w_", "x_", "y_", "z_", "a", "b", "c", "d", "d_", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "e_", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "f_", "y", "z", "g_", "h_", "i_", "j_" ]
Devarshi/Swin_transformer_dent_detection
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # Swin_transformer_dent_detection This model is a fine-tuned version of [microsoft/swin-base-patch4-window7-224-in22k](https://huggingface.co/microsoft/swin-base-patch4-window7-224-in22k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.6854 - Accuracy: 0.55 - F1: 0.55 - Recall: 0.55 - Precision: 0.55 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | F1 | Recall | Precision | |:-------------:|:-----:|:----:|:---------------:|:--------:|:----:|:------:|:---------:| | No log | 0.8 | 1 | 0.6854 | 0.55 | 0.55 | 0.55 | 0.55 | | No log | 1.8 | 2 | 0.9226 | 0.55 | 0.55 | 0.55 | 0.55 | | No log | 2.8 | 3 | 0.8610 | 0.55 | 0.55 | 0.55 | 0.55 | ### Framework versions - Transformers 4.23.1 - Pytorch 1.13.0 - Datasets 2.6.1 - Tokenizers 0.13.1
[ "dent", "perfect" ]
sanjeev77/vit-fire-detection
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-fire-detection This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.0211 - Precision: 0.9947 - Recall: 0.9947 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0002 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_steps: 100 - num_epochs: 10 ### Training results | Training Loss | Epoch | Step | Validation Loss | Precision | Recall | |:-------------:|:-----:|:----:|:---------------:|:---------:|:------:| | 0.106 | 1.0 | 190 | 0.0632 | 0.9836 | 0.9828 | | 0.0279 | 2.0 | 380 | 0.0257 | 0.9947 | 0.9947 | | 0.0303 | 3.0 | 570 | 0.0431 | 0.9832 | 0.9828 | | 0.0155 | 4.0 | 760 | 0.0253 | 0.9934 | 0.9934 | | 0.0131 | 5.0 | 950 | 0.0243 | 0.9934 | 0.9934 | | 0.0104 | 6.0 | 1140 | 0.0216 | 0.9921 | 0.9921 | | 0.0133 | 7.0 | 1330 | 0.0210 | 0.9934 | 0.9934 | | 0.0071 | 8.0 | 1520 | 0.0286 | 0.9921 | 0.9921 | | 0.001 | 9.0 | 1710 | 0.0285 | 0.9921 | 0.9921 | | 0.0086 | 10.0 | 1900 | 0.0211 | 0.9947 | 0.9947 | ### Framework versions - Transformers 4.36.2 - Pytorch 2.1.2 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "fire", "normal", "smoke" ]
ksuyash/finetuned-indian-food
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # finetuned-indian-food This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the indian_food_images dataset. It achieves the following results on the evaluation set: - Loss: 0.0144 - Accuracy: 0.9981 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0002 - train_batch_size: 16 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 4 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.9849 | 0.26 | 100 | 0.8445 | 0.8721 | | 0.4628 | 0.51 | 200 | 0.4435 | 0.9201 | | 0.4738 | 0.77 | 300 | 0.3339 | 0.9336 | | 0.3603 | 1.02 | 400 | 0.2924 | 0.9328 | | 0.1792 | 1.28 | 500 | 0.1862 | 0.9560 | | 0.2304 | 1.53 | 600 | 0.1352 | 0.9711 | | 0.1512 | 1.79 | 700 | 0.1244 | 0.9689 | | 0.1805 | 2.04 | 800 | 0.0843 | 0.9805 | | 0.1672 | 2.3 | 900 | 0.0576 | 0.9879 | | 0.0154 | 2.55 | 1000 | 0.0498 | 0.9900 | | 0.0357 | 2.81 | 1100 | 0.0359 | 0.9933 | | 0.0241 | 3.06 | 1200 | 0.0290 | 0.9951 | | 0.0133 | 3.32 | 1300 | 0.0228 | 0.9967 | | 0.0088 | 3.57 | 1400 | 0.0193 | 0.9970 | | 0.0511 | 3.83 | 1500 | 0.0144 | 0.9981 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "burger", "butter_naan", "kaathi_rolls", "kadai_paneer", "kulfi", "masala_dosa", "momos", "paani_puri", "pakode", "pav_bhaji", "pizza", "samosa", "chai", "chapati", "chole_bhature", "dal_makhani", "dhokla", "fried_rice", "idli", "jalebi" ]
JPeace18/autotrain-caridentifier
# Model Trained Using AutoTrain - Problem type: Image Classification ## Validation Metricsg loss: 6.425095081329346 f1_macro: 0.005258975403754648 f1_micro: 0.029495571813631112 f1_weighted: 0.010640418098909527 precision_macro: 0.006409892728092866 precision_micro: 0.029495571813631112 precision_weighted: 0.01244378584000894 recall_macro: 0.014090852423880612 recall_micro: 0.029495571813631112 recall_weighted: 0.029495571813631112 accuracy: 0.029495571813631112
[ "acura_ilx_2013", "acura_ilx_2014", "acura_ilx_2015", "acura_ilx_2016", "acura_ilx_2017", "acura_ilx_2019", "acura_ilx_2020", "acura_mdx_2011", "acura_mdx_2012", "acura_mdx_2013", "acura_mdx_2014", "acura_mdx_2016", "acura_mdx_2017", "acura_mdx_2019", "acura_mdx_2020", "acura_nsx_2001", "acura_nsx_2002", "acura_nsx_2003", "acura_nsx_2004", "acura_nsx_2005", "acura_nsx_2016", "acura_nsx_2017", "acura_nsx_2019", "acura_nsx_2020", "acura_rdx_2011", "acura_rdx_2012", "acura_rdx_2013", "acura_rdx_2014", "acura_rdx_2015", "acura_rdx_2016", "acura_rdx_2017", "acura_rdx_2019", "acura_rdx_2020", "acura_rlx_2014", "acura_rlx_2015", "acura_rlx_2016", "acura_rlx_2017", "acura_rlx_2019", "acura_rlx_2020", "acura_tlx_2015", "acura_tlx_2016", "acura_tlx_2017", "acura_tlx_2019", "acura_tlx_2020", "alfa romeo_4c spider_2020", "alfa romeo_4c_2015", "alfa romeo_4c_2016", "alfa romeo_4c_2017", "alfa romeo_4c_2019", "alfa romeo_giulia_2011", "alfa romeo_giulia_2017", "alfa romeo_giulia_2020", "alfa romeo_stelvio_2019", "alfa romeo_stelvio_2020", "aston martin_db11_2017", "aston martin_db11_2019", "aston martin_dbs_2008", "aston martin_dbs_2009", "aston martin_dbs_2010", "aston martin_dbs_2011", "aston martin_dbs_2012", "aston martin_dbs_2019", "aston martin_vanquish_2003", "aston martin_vanquish_2004", "aston martin_vanquish_2005", "aston martin_vanquish_2006", "aston martin_vanquish_2014", "aston martin_vanquish_2015", "aston martin_vanquish_2016", "aston martin_vanquish_2017", "aston martin_vanquish_2019", "aston martin_vantage_2011", "aston martin_vantage_2012", "aston martin_vantage_2013", "aston martin_vantage_2014", "aston martin_vantage_2015", "aston martin_vantage_2016", "aston martin_vantage_2017", "aston martin_vantage_2019", "audi_a3_2010", "audi_a3_2011", "audi_a3_2012", "audi_a3_2013", "audi_a3_2015", "audi_a3_2016", "audi_a3_2017", "audi_a3_2019", "audi_a3_2020", "audi_a4_2011", "audi_a4_2012", "audi_a4_2013", "audi_a4_2014", "audi_a4_2015", "audi_a4_2016", "audi_a4_2017", "audi_a4_2019", "audi_a5_2010", "audi_a5_2011", "audi_a5_2012", "audi_a5_2013", "audi_a5_2014", "audi_a5_2015", "audi_a5_2016", "audi_a5_2017", "audi_a5_2019", "audi_a6_2011", "audi_a6_2012", "audi_a6_2013", "audi_a6_2014", "audi_a6_2015", "audi_a6_2016", "audi_a6_2017", "audi_a6_2019", "audi_a6_2020", "audi_a7_2012", "audi_a7_2013", "audi_a7_2014", "audi_a7_2015", "audi_a7_2016", "audi_a7_2017", "audi_a7_2019", "audi_a7_2020", "audi_a8_2011", "audi_a8_2012", "audi_a8_2013", "audi_a8_2014", "audi_a8_2015", "audi_a8_2016", "audi_a8_2017", "audi_a8_2019", "audi_a8_2020", "audi_q3_2015", "audi_q3_2016", "audi_q3_2017", "audi_q3_2019", "audi_q3_2020", "audi_q5_2011", "audi_q5_2012", "audi_q5_2013", "audi_q5_2014", "audi_q5_2015", "audi_q5_2016", "audi_q5_2017", "audi_q5_2019", "audi_q5_2020", "audi_q7_2010", "audi_q7_2011", "audi_q7_2012", "audi_q7_2013", "audi_q7_2014", "audi_q7_2015", "audi_q7_2017", "audi_q7_2019", "audi_q7_2020", "audi_q8_2019", "audi_q8_2020", "audi_r8_2010", "audi_r8_2011", "audi_r8_2012", "audi_r8_2014", "audi_r8_2015", "audi_r8_2017", "audi_r8_2020", "audi_tt_2011", "audi_tt_2012", "audi_tt_2013", "audi_tt_2014", "audi_tt_2015", "audi_tt_2016", "audi_tt_2017", "audi_tt_2019", "audi_tt_2020", "audi_e-tron_2019", "bmw_2-series_2014", "bmw_2-series_2015", "bmw_2-series_2016", "bmw_2-series_2017", "bmw_2-series_2019", "bmw_2-series_2020", "bmw_3-series_2011", "bmw_3-series_2012", "bmw_3-series_2013", "bmw_3-series_2014", "bmw_3-series_2015", "bmw_3-series_2016", "bmw_3-series_2017", "bmw_3-series_2019", "bmw_3-series_2020", "bmw_4-series_2014", "bmw_4-series_2015", "bmw_4-series_2016", "bmw_4-series_2017", "bmw_4-series_2019", "bmw_4-series_2020", "bmw_5-series_2011", "bmw_5-series_2012", "bmw_5-series_2013", "bmw_5-series_2014", "bmw_5-series_2015", "bmw_5-series_2016", "bmw_5-series_2017", "bmw_5-series_2019", "bmw_5-series_2020", "bmw_6-series_2010", "bmw_6-series_2011", "bmw_6-series_2012", "bmw_6-series_2013", "bmw_6-series_2014", "bmw_6-series_2015", "bmw_6-series_2016", "bmw_6-series_2017", "bmw_6-series_2019", "bmw_7-series_2011", "bmw_7-series_2012", "bmw_7-series_2013", "bmw_7-series_2014", "bmw_7-series_2015", "bmw_7-series_2016", "bmw_7-series_2017", "bmw_7-series_2019", "bmw_7-series_2020", "bmw_8-series_1996", "bmw_8-series_2019", "bmw_8-series_2020", "bmw_x1_2013", "bmw_x1_2014", "bmw_x1_2015", "bmw_x1_2016", "bmw_x1_2017", "bmw_x1_2019", "bmw_x1_2020", "bmw_x2_2020", "bmw_x3_2011", "bmw_x3_2012", "bmw_x3_2013", "bmw_x3_2014", "bmw_x3_2015", "bmw_x3_2016", "bmw_x3_2017", "bmw_x3_2019", "bmw_x3_2020", "bmw_x4_2015", "bmw_x4_2016", "bmw_x4_2017", "bmw_x4_2019", "bmw_x4_2020", "bmw_x5_2011", "bmw_x5_2012", "bmw_x5_2013", "bmw_x5_2014", "bmw_x5_2015", "bmw_x5_2016", "bmw_x5_2017", "bmw_x5_2019", "bmw_x5_2020", "bmw_x6_2011", "bmw_x6_2012", "bmw_x6_2013", "bmw_x6_2014", "bmw_x6_2015", "bmw_x6_2016", "bmw_x6_2017", "bmw_x6_2019", "bmw_x6_2020", "bmw_x7_2019", "bmw_x7_2020", "bmw_z4_2009", "bmw_z4_2010", "bmw_z4_2011", "bmw_z4_2012", "bmw_z4_2014", "bmw_z4_2015", "bmw_z4_2019", "bmw_z4_2020", "bmw_i3_2014", "bmw_i3_2015", "bmw_i3_2016", "bmw_i3_2017", "bmw_i3_2019", "bmw_i8_2014", "bmw_i8_2015", "bmw_i8_2016", "bmw_i8_2020", "bentley_bentayga_2017", "bentley_bentayga_2020", "bentley_continental gt_2011", "bentley_continental gt_2012", "bentley_continental gt_2013", "bentley_continental gt_2014", "bentley_continental gt_2015", "bentley_continental gt_2016", "bentley_continental gt_2017", "bentley_continental gt_2019", "bentley_continental gt_2020", "bentley_flying spur_2011", "bentley_flying spur_2012", "bentley_flying spur_2013", "bentley_flying spur_2014", "bentley_flying spur_2015", "bentley_flying spur_2016", "bentley_flying spur_2017", "bentley_flying spur_2019", "bentley_flying spur_2020", "bentley_mulsanne_2011", "bentley_mulsanne_2012", "bentley_mulsanne_2014", "bentley_mulsanne_2015", "bentley_mulsanne_2016", "bentley_mulsanne_2020", "buick_cascada_2016", "buick_cascada_2017", "buick_cascada_2019", "buick_enclave_2011", "buick_enclave_2012", "buick_enclave_2013", "buick_enclave_2014", "buick_enclave_2015", "buick_enclave_2016", "buick_enclave_2017", "buick_enclave_2019", "buick_enclave_2020", "buick_encore_2013", "buick_encore_2014", "buick_encore_2015", "buick_encore_2016", "buick_encore_2017", "buick_encore_2019", "buick_encore_2020", "buick_envision_2016", "buick_envision_2017", "buick_envision_2019", "buick_envision_2020", "buick_lacrosse_2010", "buick_lacrosse_2011", "buick_lacrosse_2012", "buick_lacrosse_2013", "buick_lacrosse_2014", "buick_lacrosse_2015", "buick_lacrosse_2016", "buick_lacrosse_2017", "buick_lacrosse_2019", "buick_regal_2011", "buick_regal_2012", "buick_regal_2013", "buick_regal_2014", "buick_regal_2015", "buick_regal_2016", "buick_regal_2017", "buick_regal_2019", "buick_regal_2020", "cadillac_ats_2013", "cadillac_ats_2014", "cadillac_ats_2015", "cadillac_ats_2016", "cadillac_ats_2017", "cadillac_ats_2019", "cadillac_ct4_2020", "cadillac_ct5_2020", "cadillac_ct6_2016", "cadillac_ct6_2017", "cadillac_ct6_2019", "cadillac_ct6_2020", "cadillac_cts_2010", "cadillac_cts_2011", "cadillac_cts_2012", "cadillac_cts_2013", "cadillac_cts_2014", "cadillac_cts_2015", "cadillac_cts_2016", "cadillac_cts_2017", "cadillac_cts_2019", "cadillac_escalade_2011", "cadillac_escalade_2012", "cadillac_escalade_2013", "cadillac_escalade_2014", "cadillac_escalade_2015", "cadillac_escalade_2016", "cadillac_escalade_2017", "cadillac_escalade_2019", "cadillac_escalade_2020", "cadillac_xt4_2019", "cadillac_xt4_2020", "cadillac_xt5_2017", "cadillac_xt5_2019", "cadillac_xt5_2020", "cadillac_xt6_2020", "cadillac_xts_2012", "cadillac_xts_2013", "cadillac_xts_2014", "cadillac_xts_2015", "cadillac_xts_2016", "cadillac_xts_2017", "cadillac_xts_2019", "chevrolet_blazer_1998", "chevrolet_blazer_1999", "chevrolet_blazer_2000", "chevrolet_blazer_2001", "chevrolet_blazer_2002", "chevrolet_blazer_2003", "chevrolet_blazer_2004", "chevrolet_blazer_2019", "chevrolet_blazer_2020", "chevrolet_bolt ev_2017", "chevrolet_bolt ev_2019", "chevrolet_camaro_2011", "chevrolet_camaro_2012", "chevrolet_camaro_2013", "chevrolet_camaro_2014", "chevrolet_camaro_2015", "chevrolet_camaro_2016", "chevrolet_camaro_2017", "chevrolet_camaro_2019", "chevrolet_camaro_2020", "chevrolet_colorado_2009", "chevrolet_colorado_2010", "chevrolet_colorado_2011", "chevrolet_colorado_2012", "chevrolet_colorado_2015", "chevrolet_colorado_2016", "chevrolet_colorado_2017", "chevrolet_colorado_2019", "chevrolet_colorado_2020", "chevrolet_corvette_2011", "chevrolet_corvette_2012", "chevrolet_corvette_2013", "chevrolet_corvette_2014", "chevrolet_corvette_2015", "chevrolet_corvette_2016", "chevrolet_corvette_2017", "chevrolet_corvette_2019", "chevrolet_corvette_2020", "chevrolet_cruze_2011", "chevrolet_cruze_2012", "chevrolet_cruze_2013", "chevrolet_cruze_2014", "chevrolet_cruze_2015", "chevrolet_cruze_2016", "chevrolet_cruze_2017", "chevrolet_cruze_2019", "chevrolet_equinox_2011", "chevrolet_equinox_2012", "chevrolet_equinox_2013", "chevrolet_equinox_2014", "chevrolet_equinox_2015", "chevrolet_equinox_2016", "chevrolet_equinox_2017", "chevrolet_equinox_2019", "chevrolet_equinox_2020", "chevrolet_impala_2011", "chevrolet_impala_2012", "chevrolet_impala_2013", "chevrolet_impala_2014", "chevrolet_impala_2015", "chevrolet_impala_2016", "chevrolet_impala_2017", "chevrolet_impala_2019", "chevrolet_impala_2020", "chevrolet_malibu_2011", "chevrolet_malibu_2012", "chevrolet_malibu_2013", "chevrolet_malibu_2014", "chevrolet_malibu_2015", "chevrolet_malibu_2016", "chevrolet_malibu_2017", "chevrolet_malibu_2019", "chevrolet_malibu_2020", "chevrolet_silverado 1500_2011", "chevrolet_silverado 1500_2012", "chevrolet_silverado 1500_2013", "chevrolet_silverado 1500_2014", "chevrolet_silverado 1500_2015", "chevrolet_silverado 1500_2016", "chevrolet_silverado 1500_2017", "chevrolet_silverado 1500_2019", "chevrolet_silverado 1500_2020", "chevrolet_silverado 2500hd_2011", "chevrolet_silverado 2500hd_2012", "chevrolet_silverado 2500hd_2013", "chevrolet_silverado 2500hd_2014", "chevrolet_silverado 2500hd_2015", "chevrolet_silverado 2500hd_2016", "chevrolet_silverado 2500hd_2017", "chevrolet_silverado 2500hd_2019", "chevrolet_silverado 2500hd_2020", "chevrolet_sonic_2012", "chevrolet_sonic_2013", "chevrolet_sonic_2014", "chevrolet_sonic_2015", "chevrolet_sonic_2016", "chevrolet_sonic_2017", "chevrolet_sonic_2019", "chevrolet_sonic_2020", "chevrolet_spark_2013", "chevrolet_spark_2014", "chevrolet_spark_2015", "chevrolet_spark_2016", "chevrolet_spark_2017", "chevrolet_spark_2019", "chevrolet_spark_2020", "chevrolet_suburban_2011", "chevrolet_suburban_2012", "chevrolet_suburban_2013", "chevrolet_suburban_2014", "chevrolet_suburban_2015", "chevrolet_suburban_2016", "chevrolet_suburban_2017", "chevrolet_suburban_2019", "chevrolet_tahoe_2011", "chevrolet_tahoe_2012", "chevrolet_tahoe_2013", "chevrolet_tahoe_2014", "chevrolet_tahoe_2015", "chevrolet_tahoe_2016", "chevrolet_tahoe_2017", "chevrolet_tahoe_2019", "chevrolet_trailblazer_2002", "chevrolet_trailblazer_2003", "chevrolet_trailblazer_2004", "chevrolet_trailblazer_2005", "chevrolet_trailblazer_2006", "chevrolet_trailblazer_2007", "chevrolet_trailblazer_2008", "chevrolet_traverse_2011", "chevrolet_traverse_2012", "chevrolet_traverse_2013", "chevrolet_traverse_2014", "chevrolet_traverse_2015", "chevrolet_traverse_2016", "chevrolet_traverse_2017", "chevrolet_traverse_2019", "chevrolet_traverse_2020", "chevrolet_trax_2015", "chevrolet_trax_2016", "chevrolet_trax_2017", "chevrolet_trax_2019", "chevrolet_trax_2020", "chevrolet_volt_2011", "chevrolet_volt_2012", "chevrolet_volt_2013", "chevrolet_volt_2014", "chevrolet_volt_2015", "chevrolet_volt_2016", "chevrolet_volt_2017", "chevrolet_volt_2019", "chrysler_300_2011", "chrysler_300_2012", "chrysler_300_2013", "chrysler_300_2014", "chrysler_300_2015", "chrysler_300_2016", "chrysler_300_2017", "chrysler_300_2019", "chrysler_300_2020", "chrysler_pacifica_2004", "chrysler_pacifica_2005", "chrysler_pacifica_2006", "chrysler_pacifica_2007", "chrysler_pacifica_2008", "chrysler_pacifica_2017", "chrysler_pacifica_2019", "chrysler_pacifica_2020", "dodge_challenger_2011", "dodge_challenger_2012", "dodge_challenger_2013", "dodge_challenger_2014", "dodge_challenger_2015", "dodge_challenger_2016", "dodge_challenger_2017", "dodge_challenger_2019", "dodge_challenger_2020", "dodge_charger_2011", "dodge_charger_2012", "dodge_charger_2013", "dodge_charger_2014", "dodge_charger_2015", "dodge_charger_2016", "dodge_charger_2017", "dodge_charger_2019", "dodge_charger_2020", "dodge_durango_2011", "dodge_durango_2012", "dodge_durango_2013", "dodge_durango_2014", "dodge_durango_2015", "dodge_durango_2016", "dodge_durango_2017", "dodge_durango_2019", "dodge_durango_2020", "dodge_grand caravan_2011", "dodge_grand caravan_2012", "dodge_grand caravan_2013", "dodge_grand caravan_2014", "dodge_grand caravan_2015", "dodge_grand caravan_2016", "dodge_grand caravan_2017", "dodge_grand caravan_2019", "dodge_grand caravan_2020", "dodge_journey_2011", "dodge_journey_2012", "dodge_journey_2013", "dodge_journey_2014", "dodge_journey_2015", "dodge_journey_2016", "dodge_journey_2017", "dodge_journey_2019", "dodge_journey_2020", "fiat_124 spider_2017", "fiat_124 spider_2019", "fiat_124 spider_2020", "fiat_500l_2014", "fiat_500l_2015", "fiat_500l_2016", "fiat_500l_2017", "fiat_500l_2019", "fiat_500x_2016", "fiat_500x_2017", "fiat_500x_2019", "fiat_500x_2020", "fiat_500_2011", "fiat_500_2012", "fiat_500_2013", "fiat_500_2014", "fiat_500_2015", "fiat_500_2016", "fiat_500_2017", "fiat_500_2019", "fiat_500e_2013", "fiat_500e_2014", "fiat_500e_2015", "fiat_500e_2016", "fiat_500e_2017", "fiat_500e_2019", "ferrari_488 gtb_2016", "ferrari_488 gtb_2019", "ferrari_gtc4lusso_2017", "ferrari_gtc4lusso_2020", "ferrari_portofino_2019", "ford_ecosport_2019", "ford_ecosport_2020", "ford_edge_2011", "ford_edge_2012", "ford_edge_2013", "ford_edge_2014", "ford_edge_2015", "ford_edge_2016", "ford_edge_2017", "ford_edge_2019", "ford_edge_2020", "ford_escape_2011", "ford_escape_2012", "ford_escape_2013", "ford_escape_2014", "ford_escape_2015", "ford_escape_2016", "ford_escape_2017", "ford_escape_2019", "ford_escape_2020", "ford_expedition_2011", "ford_expedition_2012", "ford_expedition_2013", "ford_expedition_2014", "ford_expedition_2015", "ford_expedition_2016", "ford_expedition_2017", "ford_expedition_2019", "ford_expedition_2020", "ford_explorer_2011", "ford_explorer_2012", "ford_explorer_2013", "ford_explorer_2014", "ford_explorer_2015", "ford_explorer_2016", "ford_explorer_2017", "ford_explorer_2019", "ford_explorer_2020", "ford_f-150_2011", "ford_f-150_2012", "ford_f-150_2013", "ford_f-150_2014", "ford_f-150_2015", "ford_f-150_2016", "ford_f-150_2017", "ford_f-150_2019", "ford_fiesta_2011", "ford_fiesta_2012", "ford_fiesta_2013", "ford_fiesta_2014", "ford_fiesta_2015", "ford_fiesta_2016", "ford_fiesta_2017", "ford_fiesta_2019", "ford_flex_2010", "ford_flex_2011", "ford_flex_2012", "ford_flex_2013", "ford_flex_2014", "ford_flex_2015", "ford_flex_2016", "ford_flex_2017", "ford_flex_2019", "ford_fusion_2011", "ford_fusion_2012", "ford_fusion_2013", "ford_fusion_2014", "ford_fusion_2015", "ford_fusion_2016", "ford_fusion_2017", "ford_fusion_2019", "ford_fusion_2020", "ford_mustang_2011", "ford_mustang_2012", "ford_mustang_2013", "ford_mustang_2014", "ford_mustang_2015", "ford_mustang_2016", "ford_mustang_2017", "ford_mustang_2019", "ford_mustang_2020", "ford_ranger_2004", "ford_ranger_2005", "ford_ranger_2006", "ford_ranger_2007", "ford_ranger_2008", "ford_ranger_2009", "ford_ranger_2010", "ford_ranger_2019", "ford_super duty f-250_2011", "ford_super duty f-250_2012", "ford_super duty f-250_2013", "ford_super duty f-250_2014", "ford_super duty f-250_2015", "ford_super duty f-250_2016", "ford_super duty f-250_2017", "ford_super duty f-250_2019", "ford_super duty f-250_2020", "ford_taurus_2010", "ford_taurus_2011", "ford_taurus_2012", "ford_taurus_2013", "ford_taurus_2014", "ford_taurus_2015", "ford_taurus_2016", "ford_taurus_2017", "ford_taurus_2019", "ford_transit connect wagon_2011", "ford_transit connect wagon_2012", "ford_transit connect wagon_2013", "ford_transit connect wagon_2014", "ford_transit connect wagon_2015", "ford_transit connect wagon_2016", "ford_transit connect wagon_2017", "ford_transit connect wagon_2019", "ford_transit connect wagon_2020", "gmc_acadia_2011", "gmc_acadia_2012", "gmc_acadia_2013", "gmc_acadia_2014", "gmc_acadia_2015", "gmc_acadia_2016", "gmc_acadia_2017", "gmc_acadia_2019", "gmc_acadia_2020", "gmc_canyon_2009", "gmc_canyon_2010", "gmc_canyon_2011", "gmc_canyon_2012", "gmc_canyon_2015", "gmc_canyon_2016", "gmc_canyon_2017", "gmc_canyon_2019", "gmc_canyon_2020", "gmc_sierra 1500_2011", "gmc_sierra 1500_2012", "gmc_sierra 1500_2013", "gmc_sierra 1500_2014", "gmc_sierra 1500_2015", "gmc_sierra 1500_2016", "gmc_sierra 1500_2017", "gmc_sierra 1500_2019", "gmc_sierra 1500_2020", "gmc_sierra 2500hd_2011", "gmc_sierra 2500hd_2012", "gmc_sierra 2500hd_2013", "gmc_sierra 2500hd_2014", "gmc_sierra 2500hd_2015", "gmc_sierra 2500hd_2016", "gmc_sierra 2500hd_2017", "gmc_sierra 2500hd_2019", "gmc_sierra 2500hd_2020", "gmc_terrain_2011", "gmc_terrain_2012", "gmc_terrain_2013", "gmc_terrain_2014", "gmc_terrain_2015", "gmc_terrain_2016", "gmc_terrain_2017", "gmc_terrain_2019", "gmc_terrain_2020", "gmc_yukon_2011", "gmc_yukon_2012", "gmc_yukon_2013", "gmc_yukon_2014", "gmc_yukon_2015", "gmc_yukon_2016", "gmc_yukon_2017", "gmc_yukon_2019", "gmc_yukon_2020", "genesis_g70_2020", "genesis_g80_2017", "genesis_g80_2020", "genesis_g90_2017", "genesis_g90_2019", "genesis_g90_2020", "honda_accord_2011", "honda_accord_2012", "honda_accord_2013", "honda_accord_2014", "honda_accord_2015", "honda_accord_2016", "honda_accord_2017", "honda_accord_2019", "honda_accord_2020", "honda_cr-v_2011", "honda_cr-v_2012", "honda_cr-v_2013", "honda_cr-v_2014", "honda_cr-v_2015", "honda_cr-v_2016", "honda_cr-v_2017", "honda_cr-v_2019", "honda_cr-v_2020", "honda_civic_2011", "honda_civic_2012", "honda_civic_2013", "honda_civic_2014", "honda_civic_2015", "honda_civic_2016", "honda_civic_2017", "honda_civic_2019", "honda_civic_2020", "honda_clarity_2003", "honda_clarity_2005", "honda_clarity_2008", "honda_clarity_2009", "honda_clarity_2017", "honda_clarity_2019", "honda_fit_2011", "honda_fit_2012", "honda_fit_2013", "honda_fit_2014", "honda_fit_2015", "honda_fit_2016", "honda_fit_2017", "honda_fit_2019", "honda_hr-v_2016", "honda_hr-v_2017", "honda_hr-v_2019", "honda_hr-v_2020", "honda_insight_2005", "honda_insight_2006", "honda_insight_2009", "honda_insight_2010", "honda_insight_2011", "honda_insight_2012", "honda_insight_2013", "honda_insight_2019", "honda_insight_2020", "honda_odyssey_2011", "honda_odyssey_2012", "honda_odyssey_2013", "honda_odyssey_2014", "honda_odyssey_2015", "honda_odyssey_2016", "honda_odyssey_2017", "honda_odyssey_2019", "honda_odyssey_2020", "honda_passport_1995", "honda_passport_1997", "honda_passport_1998", "honda_passport_1999", "honda_passport_2000", "honda_passport_2001", "honda_passport_2019", "honda_passport_2020", "honda_pilot_2011", "honda_pilot_2012", "honda_pilot_2013", "honda_pilot_2014", "honda_pilot_2015", "honda_pilot_2016", "honda_pilot_2017", "honda_pilot_2019", "honda_pilot_2020", "honda_ridgeline_2008", "honda_ridgeline_2009", "honda_ridgeline_2010", "honda_ridgeline_2011", "honda_ridgeline_2012", "honda_ridgeline_2013", "honda_ridgeline_2014", "honda_ridgeline_2017", "honda_ridgeline_2019", "hyundai_accent_2011", "hyundai_accent_2012", "hyundai_accent_2013", "hyundai_accent_2014", "hyundai_accent_2015", "hyundai_accent_2016", "hyundai_accent_2017", "hyundai_accent_2019", "hyundai_accent_2020", "hyundai_elantra_2011", "hyundai_elantra_2012", "hyundai_elantra_2013", "hyundai_elantra_2014", "hyundai_elantra_2015", "hyundai_elantra_2016", "hyundai_elantra_2017", "hyundai_elantra_2019", "hyundai_elantra_2020", "hyundai_ioniq_2017", "hyundai_ioniq_2019", "hyundai_ioniq_2020", "hyundai_kona electric_2019", "hyundai_kona electric_2020", "hyundai_kona_2019", "hyundai_kona_2020", "hyundai_nexo_2019", "hyundai_nexo_2020", "hyundai_palisade_2020", "hyundai_santa fe_2011", "hyundai_santa fe_2012", "hyundai_santa fe_2013", "hyundai_santa fe_2014", "hyundai_santa fe_2015", "hyundai_santa fe_2016", "hyundai_santa fe_2017", "hyundai_santa fe_2019", "hyundai_santa fe_2020", "hyundai_sonata_2011", "hyundai_sonata_2012", "hyundai_sonata_2013", "hyundai_sonata_2014", "hyundai_sonata_2015", "hyundai_sonata_2016", "hyundai_sonata_2017", "hyundai_sonata_2019", "hyundai_sonata_2020", "hyundai_tucson_2011", "hyundai_tucson_2012", "hyundai_tucson_2013", "hyundai_tucson_2014", "hyundai_tucson_2015", "hyundai_tucson_2016", "hyundai_tucson_2017", "hyundai_tucson_2019", "hyundai_tucson_2020", "hyundai_veloster_2012", "hyundai_veloster_2013", "hyundai_veloster_2014", "hyundai_veloster_2015", "hyundai_veloster_2016", "hyundai_veloster_2019", "hyundai_veloster_2020", "hyundai_venue_2020", "infiniti_q50_2014", "infiniti_q50_2015", "infiniti_q50_2016", "infiniti_q50_2017", "infiniti_q50_2019", "infiniti_q50_2020", "infiniti_q60_2014", "infiniti_q60_2015", "infiniti_q60_2016", "infiniti_q60_2017", "infiniti_q60_2019", "infiniti_q60_2020", "infiniti_q70_2014", "infiniti_q70_2015", "infiniti_q70_2016", "infiniti_q70_2017", "infiniti_q70_2019", "infiniti_qx30_2017", "infiniti_qx30_2019", "infiniti_qx50_2014", "infiniti_qx50_2015", "infiniti_qx50_2016", "infiniti_qx50_2019", "infiniti_qx50_2020", "infiniti_qx60_2014", "infiniti_qx60_2015", "infiniti_qx60_2016", "infiniti_qx60_2017", "infiniti_qx60_2019", "infiniti_qx60_2020", "infiniti_qx80_2014", "infiniti_qx80_2015", "infiniti_qx80_2016", "infiniti_qx80_2017", "infiniti_qx80_2020", "jaguar_e-pace_2020", "jaguar_f-pace_2017", "jaguar_f-pace_2019", "jaguar_f-pace_2020", "jaguar_f-type_2014", "jaguar_f-type_2015", "jaguar_f-type_2016", "jaguar_f-type_2017", "jaguar_f-type_2020", "jaguar_i-pace_2019", "jaguar_i-pace_2020", "jaguar_xe_2017", "jaguar_xe_2019", "jaguar_xe_2020", "jaguar_xf_2011", "jaguar_xf_2012", "jaguar_xf_2013", "jaguar_xf_2014", "jaguar_xf_2015", "jaguar_xf_2016", "jaguar_xf_2017", "jaguar_xf_2019", "jaguar_xf_2020", "jaguar_xj_2010", "jaguar_xj_2011", "jaguar_xj_2012", "jaguar_xj_2013", "jaguar_xj_2014", "jaguar_xj_2015", "jaguar_xj_2017", "jaguar_xj_2019", "jeep_cherokee_1999", "jeep_cherokee_2000", "jeep_cherokee_2001", "jeep_cherokee_2014", "jeep_cherokee_2015", "jeep_cherokee_2016", "jeep_cherokee_2017", "jeep_cherokee_2019", "jeep_compass_2011", "jeep_compass_2012", "jeep_compass_2013", "jeep_compass_2014", "jeep_compass_2015", "jeep_compass_2017", "jeep_compass_2019", "jeep_compass_2020", "jeep_gladiator_2020", "jeep_grand cherokee_2011", "jeep_grand cherokee_2012", "jeep_grand cherokee_2013", "jeep_grand cherokee_2014", "jeep_grand cherokee_2015", "jeep_grand cherokee_2016", "jeep_grand cherokee_2017", "jeep_grand cherokee_2019", "jeep_grand cherokee_2020", "jeep_renegade_2015", "jeep_renegade_2016", "jeep_renegade_2017", "jeep_renegade_2019", "jeep_renegade_2020", "jeep_wrangler_2011", "jeep_wrangler_2012", "jeep_wrangler_2013", "jeep_wrangler_2014", "jeep_wrangler_2015", "jeep_wrangler_2016", "jeep_wrangler_2017", "jeep_wrangler_2019", "jeep_wrangler_2020", "kia_cadenza_2014", "kia_cadenza_2015", "kia_cadenza_2016", "kia_cadenza_2017", "kia_cadenza_2019", "kia_forte_2011", "kia_forte_2012", "kia_forte_2013", "kia_forte_2014", "kia_forte_2015", "kia_forte_2016", "kia_forte_2017", "kia_forte_2019", "kia_forte_2020", "kia_k900_2015", "kia_k900_2016", "kia_k900_2017", "kia_k900_2019", "kia_niro_2017", "kia_niro_2019", "kia_niro_2020", "kia_optima_2011", "kia_optima_2012", "kia_optima_2013", "kia_optima_2014", "kia_optima_2015", "kia_optima_2016", "kia_optima_2017", "kia_optima_2019", "kia_rio_2011", "kia_rio_2012", "kia_rio_2013", "kia_rio_2014", "kia_rio_2015", "kia_rio_2016", "kia_rio_2017", "kia_rio_2019", "kia_rio_2020", "kia_sedona_2010", "kia_sedona_2011", "kia_sedona_2012", "kia_sedona_2014", "kia_sedona_2015", "kia_sedona_2016", "kia_sedona_2017", "kia_sedona_2019", "kia_sedona_2020", "kia_sorento_2011", "kia_sorento_2012", "kia_sorento_2013", "kia_sorento_2014", "kia_sorento_2015", "kia_sorento_2016", "kia_sorento_2017", "kia_sorento_2019", "kia_sorento_2020", "kia_soul ev_2015", "kia_soul ev_2016", "kia_soul ev_2017", "kia_soul_2011", "kia_soul_2012", "kia_soul_2013", "kia_soul_2014", "kia_soul_2015", "kia_soul_2016", "kia_soul_2017", "kia_soul_2019", "kia_soul_2020", "kia_sportage_2011", "kia_sportage_2012", "kia_sportage_2013", "kia_sportage_2014", "kia_sportage_2015", "kia_sportage_2016", "kia_sportage_2017", "kia_sportage_2019", "kia_sportage_2020", "kia_stinger_2019", "kia_stinger_2020", "kia_telluride_2020", "lamborghini_aventador_2013", "lamborghini_aventador_2014", "lamborghini_aventador_2015", "lamborghini_aventador_2016", "lamborghini_aventador_2019", "lamborghini_huracan_2015", "lamborghini_huracan_2016", "lamborghini_huracan_2017", "lamborghini_huracan_2019", "lamborghini_urus_2019", "land rover_defender_2020", "land rover_discovery sport_2016", "land rover_discovery sport_2017", "land rover_discovery sport_2019", "land rover_discovery sport_2020", "land rover_discovery_1999", "land rover_discovery_2000", "land rover_discovery_2001", "land rover_discovery_2002", "land rover_discovery_2003", "land rover_discovery_2004", "land rover_discovery_2017", "land rover_discovery_2019", "land rover_discovery_2020", "land rover_range rover evoque_2012", "land rover_range rover evoque_2013", "land rover_range rover evoque_2014", "land rover_range rover evoque_2015", "land rover_range rover evoque_2016", "land rover_range rover evoque_2019", "land rover_range rover evoque_2020", "land rover_range rover sport_2011", "land rover_range rover sport_2012", "land rover_range rover sport_2013", "land rover_range rover sport_2014", "land rover_range rover sport_2015", "land rover_range rover sport_2016", "land rover_range rover sport_2017", "land rover_range rover sport_2019", "land rover_range rover sport_2020", "land rover_range rover velar_2019", "land rover_range rover velar_2020", "land rover_range rover_2011", "land rover_range rover_2012", "land rover_range rover_2013", "land rover_range rover_2014", "land rover_range rover_2015", "land rover_range rover_2016", "land rover_range rover_2017", "land rover_range rover_2019", "land rover_range rover_2020", "lexus_es_2011", "lexus_es_2012", "lexus_es_2013", "lexus_es_2014", "lexus_es_2015", "lexus_es_2016", "lexus_es_2017", "lexus_es_2019", "lexus_es_2020", "lexus_gs_2010", "lexus_gs_2011", "lexus_gs_2013", "lexus_gs_2014", "lexus_gs_2015", "lexus_gs_2016", "lexus_gs_2017", "lexus_gs_2020", "lexus_gx_2011", "lexus_gx_2012", "lexus_gx_2013", "lexus_gx_2014", "lexus_gx_2015", "lexus_gx_2016", "lexus_gx_2017", "lexus_gx_2019", "lexus_gx_2020", "lexus_is_2011", "lexus_is_2012", "lexus_is_2013", "lexus_is_2014", "lexus_is_2015", "lexus_is_2016", "lexus_is_2017", "lexus_is_2019", "lexus_is_2020", "lexus_lc_2019", "lexus_ls_2011", "lexus_ls_2012", "lexus_ls_2013", "lexus_ls_2014", "lexus_ls_2015", "lexus_ls_2016", "lexus_ls_2017", "lexus_ls_2019", "lexus_ls_2020", "lexus_lx_2010", "lexus_lx_2011", "lexus_lx_2013", "lexus_lx_2014", "lexus_lx_2015", "lexus_lx_2016", "lexus_lx_2017", "lexus_lx_2019", "lexus_nx_2015", "lexus_nx_2016", "lexus_nx_2017", "lexus_nx_2019", "lexus_nx_2020", "lexus_rc_2015", "lexus_rc_2016", "lexus_rc_2017", "lexus_rc_2019", "lexus_rc_2020", "lexus_rx_2011", "lexus_rx_2012", "lexus_rx_2013", "lexus_rx_2014", "lexus_rx_2015", "lexus_rx_2016", "lexus_rx_2017", "lexus_rx_2019", "lexus_rx_2020", "lexus_ux_2019", "lexus_ux_2020", "lincoln_aviator_2003", "lincoln_aviator_2004", "lincoln_aviator_2005", "lincoln_aviator_2019", "lincoln_aviator_2020", "lincoln_continental_1997", "lincoln_continental_1998", "lincoln_continental_1999", "lincoln_continental_2000", "lincoln_continental_2001", "lincoln_continental_2002", "lincoln_continental_2017", "lincoln_continental_2019", "lincoln_continental_2020", "lincoln_corsair_2020", "lincoln_mkc_2015", "lincoln_mkc_2016", "lincoln_mkc_2017", "lincoln_mkc_2019", "lincoln_mkt_2010", "lincoln_mkt_2011", "lincoln_mkt_2012", "lincoln_mkt_2013", "lincoln_mkt_2014", "lincoln_mkt_2015", "lincoln_mkt_2016", "lincoln_mkt_2017", "lincoln_mkt_2019", "lincoln_mkz_2011", "lincoln_mkz_2012", "lincoln_mkz_2013", "lincoln_mkz_2014", "lincoln_mkz_2015", "lincoln_mkz_2016", "lincoln_mkz_2017", "lincoln_mkz_2019", "lincoln_mkz_2020", "lincoln_nautilus_2019", "lincoln_nautilus_2020", "lincoln_navigator_2011", "lincoln_navigator_2012", "lincoln_navigator_2013", "lincoln_navigator_2014", "lincoln_navigator_2015", "lincoln_navigator_2016", "lincoln_navigator_2017", "lincoln_navigator_2019", "lincoln_navigator_2020", "mini_clubman_2010", "mini_clubman_2011", "mini_clubman_2012", "mini_clubman_2013", "mini_clubman_2014", "mini_clubman_2016", "mini_clubman_2017", "mini_clubman_2019", "mini_clubman_2020", "mini_cooper countryman_2011", "mini_cooper countryman_2012", "mini_cooper countryman_2013", "mini_cooper countryman_2014", "mini_cooper countryman_2015", "mini_cooper countryman_2016", "mini_cooper countryman_2017", "mini_cooper countryman_2019", "mini_cooper countryman_2020", "mini_cooper_2011", "mini_cooper_2012", "mini_cooper_2013", "mini_cooper_2014", "mini_cooper_2015", "mini_cooper_2016", "mini_cooper_2017", "mini_cooper_2019", "mini_cooper_2020", "maserati_ghibli_2014", "maserati_ghibli_2015", "maserati_ghibli_2016", "maserati_ghibli_2020", "maserati_granturismo_2010", "maserati_granturismo_2011", "maserati_granturismo_2012", "maserati_granturismo_2013", "maserati_granturismo_2014", "maserati_granturismo_2015", "maserati_granturismo_2016", "maserati_granturismo_2017", "maserati_granturismo_2019", "maserati_levante_2013", "maserati_levante_2017", "maserati_levante_2019", "maserati_levante_2020", "maserati_quattroporte_2011", "maserati_quattroporte_2012", "maserati_quattroporte_2013", "maserati_quattroporte_2014", "maserati_quattroporte_2015", "maserati_quattroporte_2016", "maserati_quattroporte_2017", "maserati_quattroporte_2019", "mazda_cx-30_2020", "mazda_cx-3_2016", "mazda_cx-3_2017", "mazda_cx-3_2019", "mazda_cx-3_2020", "mazda_cx-5_2013", "mazda_cx-5_2014", "mazda_cx-5_2015", "mazda_cx-5_2016", "mazda_cx-5_2017", "mazda_cx-5_2019", "mazda_cx-9_2011", "mazda_cx-9_2012", "mazda_cx-9_2013", "mazda_cx-9_2014", "mazda_cx-9_2015", "mazda_cx-9_2016", "mazda_cx-9_2017", "mazda_cx-9_2019", "mazda_cx-9_2020", "mazda_mazda3_2011", "mazda_mazda3_2012", "mazda_mazda3_2013", "mazda_mazda3_2014", "mazda_mazda3_2015", "mazda_mazda3_2016", "mazda_mazda3_2017", "mazda_mazda3_2019", "mazda_mazda3_2020", "mazda_mazda6_2011", "mazda_mazda6_2012", "mazda_mazda6_2013", "mazda_mazda6_2014", "mazda_mazda6_2015", "mazda_mazda6_2016", "mazda_mazda6_2017", "mazda_mazda6_2019", "mazda_mx-5 miata_2010", "mazda_mx-5 miata_2011", "mazda_mx-5 miata_2012", "mazda_mx-5 miata_2013", "mazda_mx-5 miata_2014", "mazda_mx-5 miata_2015", "mazda_mx-5 miata_2016", "mazda_mx-5 miata_2017", "mazda_mx-5 miata_2019", "mazda_mazda3 hatchback_2020", "mclaren_570gt_2017", "mclaren_570s_2016", "mclaren_570s_2017", "mclaren_570s_2019", "mclaren_720s_2017", "mclaren_720s_2020", "mercedes-benz_a class_2019", "mercedes-benz_a class_2020", "mercedes-benz_amg gt_2016", "mercedes-benz_amg gt_2017", "mercedes-benz_amg gt_2019", "mercedes-benz_amg gt_2020", "mercedes-benz_c class_2011", "mercedes-benz_c class_2012", "mercedes-benz_c class_2013", "mercedes-benz_c class_2014", "mercedes-benz_c class_2015", "mercedes-benz_c class_2016", "mercedes-benz_c class_2017", "mercedes-benz_c class_2019", "mercedes-benz_c class_2020", "mercedes-benz_cla class_2014", "mercedes-benz_cla class_2015", "mercedes-benz_cla class_2016", "mercedes-benz_cla class_2017", "mercedes-benz_cla class_2019", "mercedes-benz_cla class_2020", "mercedes-benz_cls class_2011", "mercedes-benz_cls class_2012", "mercedes-benz_cls class_2013", "mercedes-benz_cls class_2014", "mercedes-benz_cls class_2015", "mercedes-benz_cls class_2016", "mercedes-benz_cls class_2017", "mercedes-benz_cls class_2019", "mercedes-benz_cls class_2020", "mercedes-benz_e class_2011", "mercedes-benz_e class_2012", "mercedes-benz_e class_2013", "mercedes-benz_e class_2014", "mercedes-benz_e class_2015", "mercedes-benz_e class_2016", "mercedes-benz_e class_2017", "mercedes-benz_e class_2019", "mercedes-benz_e class_2020", "mercedes-benz_eqc_2020", "mercedes-benz_g class_2011", "mercedes-benz_g class_2012", "mercedes-benz_g class_2013", "mercedes-benz_g class_2014", "mercedes-benz_g class_2015", "mercedes-benz_g class_2016", "mercedes-benz_g class_2017", "mercedes-benz_g class_2019", "mercedes-benz_g class_2020", "mercedes-benz_gla class_2015", "mercedes-benz_gla class_2016", "mercedes-benz_gla class_2017", "mercedes-benz_gla class_2019", "mercedes-benz_gla class_2020", "mercedes-benz_glb class_2020", "mercedes-benz_glc class_2016", "mercedes-benz_glc class_2017", "mercedes-benz_glc class_2019", "mercedes-benz_glc class_2020", "mercedes-benz_gle class_2016", "mercedes-benz_gle class_2017", "mercedes-benz_gle class_2019", "mercedes-benz_gle class_2020", "mercedes-benz_gls class_2017", "mercedes-benz_gls class_2019", "mercedes-benz_gls class_2020", "mercedes-benz_metris_2016", "mercedes-benz_metris_2017", "mercedes-benz_metris_2019", "mercedes-benz_metris_2020", "mercedes-benz_s class_2011", "mercedes-benz_s class_2012", "mercedes-benz_s class_2013", "mercedes-benz_s class_2014", "mercedes-benz_s class_2015", "mercedes-benz_s class_2016", "mercedes-benz_s class_2017", "mercedes-benz_s class_2019", "mercedes-benz_s class_2020", "mercedes-benz_sl class_2011", "mercedes-benz_sl class_2012", "mercedes-benz_sl class_2013", "mercedes-benz_sl class_2014", "mercedes-benz_sl class_2015", "mercedes-benz_sl class_2016", "mercedes-benz_sl class_2017", "mercedes-benz_sl class_2019", "mercedes-benz_sl class_2020", "mercedes-benz_slc class_2017", "mercedes-benz_slc class_2019", "mercedes-benz_slc class_2020", "mitsubishi_eclipse cross_2019", "mitsubishi_eclipse cross_2020", "mitsubishi_mirage_1999", "mitsubishi_mirage_2000", "mitsubishi_mirage_2001", "mitsubishi_mirage_2002", "mitsubishi_mirage_2014", "mitsubishi_mirage_2015", "mitsubishi_mirage_2017", "mitsubishi_mirage_2019", "mitsubishi_outlander sport_2011", "mitsubishi_outlander sport_2012", "mitsubishi_outlander sport_2013", "mitsubishi_outlander sport_2014", "mitsubishi_outlander sport_2015", "mitsubishi_outlander sport_2016", "mitsubishi_outlander sport_2017", "mitsubishi_outlander sport_2019", "mitsubishi_outlander sport_2020", "mitsubishi_outlander_2011", "mitsubishi_outlander_2012", "mitsubishi_outlander_2013", "mitsubishi_outlander_2014", "mitsubishi_outlander_2015", "mitsubishi_outlander_2016", "mitsubishi_outlander_2017", "mitsubishi_outlander_2019", "mitsubishi_outlander_2020", "nissan_370z_2011", "nissan_370z_2012", "nissan_370z_2013", "nissan_370z_2014", "nissan_370z_2015", "nissan_370z_2016", "nissan_370z_2017", "nissan_370z_2019", "nissan_370z_2020", "nissan_altima_2011", "nissan_altima_2012", "nissan_altima_2013", "nissan_altima_2014", "nissan_altima_2015", "nissan_altima_2016", "nissan_altima_2017", "nissan_altima_2019", "nissan_altima_2020", "nissan_armada_2010", "nissan_armada_2011", "nissan_armada_2012", "nissan_armada_2013", "nissan_armada_2014", "nissan_armada_2015", "nissan_armada_2017", "nissan_armada_2019", "nissan_armada_2020", "nissan_frontier_2010", "nissan_frontier_2011", "nissan_frontier_2012", "nissan_frontier_2013", "nissan_frontier_2014", "nissan_frontier_2015", "nissan_frontier_2016", "nissan_frontier_2017", "nissan_frontier_2019", "nissan_gt-r_2011", "nissan_gt-r_2012", "nissan_gt-r_2013", "nissan_gt-r_2014", "nissan_gt-r_2015", "nissan_gt-r_2016", "nissan_gt-r_2017", "nissan_gt-r_2019", "nissan_gt-r_2020", "nissan_kicks_2019", "nissan_kicks_2020", "nissan_leaf_2011", "nissan_leaf_2012", "nissan_leaf_2013", "nissan_leaf_2014", "nissan_leaf_2015", "nissan_leaf_2016", "nissan_leaf_2017", "nissan_leaf_2019", "nissan_maxima_2010", "nissan_maxima_2011", "nissan_maxima_2012", "nissan_maxima_2013", "nissan_maxima_2014", "nissan_maxima_2016", "nissan_maxima_2017", "nissan_maxima_2019", "nissan_murano_2011", "nissan_murano_2012", "nissan_murano_2013", "nissan_murano_2014", "nissan_murano_2015", "nissan_murano_2016", "nissan_murano_2017", "nissan_murano_2019", "nissan_murano_2020", "nissan_nv200_2013", "nissan_nv200_2014", "nissan_nv200_2015", "nissan_nv200_2016", "nissan_nv200_2017", "nissan_nv200_2019", "nissan_nv200_2020", "nissan_pathfinder_2011", "nissan_pathfinder_2012", "nissan_pathfinder_2013", "nissan_pathfinder_2014", "nissan_pathfinder_2015", "nissan_pathfinder_2016", "nissan_pathfinder_2017", "nissan_pathfinder_2019", "nissan_pathfinder_2020", "nissan_rogue sport_2017", "nissan_rogue sport_2019", "nissan_rogue sport_2020", "nissan_rogue_2011", "nissan_rogue_2012", "nissan_rogue_2013", "nissan_rogue_2014", "nissan_rogue_2015", "nissan_rogue_2016", "nissan_rogue_2017", "nissan_rogue_2019", "nissan_rogue_2020", "nissan_sentra_2011", "nissan_sentra_2012", "nissan_sentra_2013", "nissan_sentra_2014", "nissan_sentra_2015", "nissan_sentra_2016", "nissan_sentra_2017", "nissan_sentra_2019", "nissan_sentra_2020", "nissan_titan_2011", "nissan_titan_2012", "nissan_titan_2013", "nissan_titan_2014", "nissan_titan_2015", "nissan_titan_2016", "nissan_titan_2017", "nissan_titan_2019", "nissan_titan_2020", "nissan_versa_2011", "nissan_versa_2012", "nissan_versa_2013", "nissan_versa_2014", "nissan_versa_2015", "nissan_versa_2016", "nissan_versa_2017", "nissan_versa_2019", "nissan_versa_2020", "porsche_718 spyder_2020", "porsche_718_2017", "porsche_718_2019", "porsche_718_2020", "porsche_911_2011", "porsche_911_2012", "porsche_911_2013", "porsche_911_2014", "porsche_911_2015", "porsche_911_2016", "porsche_911_2017", "porsche_911_2019", "porsche_911_2020", "porsche_cayenne_2011", "porsche_cayenne_2012", "porsche_cayenne_2013", "porsche_cayenne_2014", "porsche_cayenne_2015", "porsche_cayenne_2016", "porsche_cayenne_2017", "porsche_cayenne_2019", "porsche_cayenne_2020", "porsche_macan_2015", "porsche_macan_2016", "porsche_macan_2017", "porsche_macan_2019", "porsche_panamera_2011", "porsche_panamera_2012", "porsche_panamera_2013", "porsche_panamera_2014", "porsche_panamera_2015", "porsche_panamera_2016", "porsche_panamera_2017", "porsche_panamera_2019", "porsche_panamera_2020", "porsche_taycan_2020", "ram_1500_2011", "ram_1500_2012", "ram_1500_2013", "ram_1500_2014", "ram_1500_2015", "ram_1500_2016", "ram_1500_2017", "ram_1500_2019", "ram_1500_2020", "ram_2500_2011", "ram_2500_2012", "ram_2500_2013", "ram_2500_2014", "ram_2500_2015", "ram_2500_2016", "ram_2500_2017", "ram_2500_2019", "ram_2500_2020", "rolls-royce_cullinan_2020", "rolls-royce_dawn_2016", "rolls-royce_dawn_2017", "rolls-royce_dawn_2020", "rolls-royce_ghost_2011", "rolls-royce_ghost_2012", "rolls-royce_ghost_2014", "rolls-royce_ghost_2015", "rolls-royce_ghost_2016", "rolls-royce_ghost_2017", "rolls-royce_ghost_2019", "rolls-royce_phantom_2011", "rolls-royce_phantom_2012", "rolls-royce_phantom_2014", "rolls-royce_phantom_2015", "rolls-royce_phantom_2017", "rolls-royce_phantom_2019", "rolls-royce_phantom_2020", "rolls-royce_wraith_2014", "rolls-royce_wraith_2015", "rolls-royce_wraith_2016", "rolls-royce_wraith_2017", "rolls-royce_wraith_2020", "subaru_ascent_2019", "subaru_ascent_2020", "subaru_brz_2013", "subaru_brz_2014", "subaru_brz_2015", "subaru_brz_2016", "subaru_brz_2017", "subaru_brz_2019", "subaru_brz_2020", "subaru_crosstrek_2013", "subaru_crosstrek_2014", "subaru_crosstrek_2015", "subaru_crosstrek_2016", "subaru_crosstrek_2017", "subaru_crosstrek_2019", "subaru_crosstrek_2020", "subaru_forester_2011", "subaru_forester_2013", "subaru_forester_2014", "subaru_forester_2015", "subaru_forester_2016", "subaru_forester_2017", "subaru_forester_2019", "subaru_forester_2020", "subaru_impreza_2011", "subaru_impreza_2012", "subaru_impreza_2013", "subaru_impreza_2014", "subaru_impreza_2015", "subaru_impreza_2016", "subaru_impreza_2017", "subaru_impreza_2019", "subaru_impreza_2020", "subaru_legacy_2011", "subaru_legacy_2012", "subaru_legacy_2013", "subaru_legacy_2014", "subaru_legacy_2015", "subaru_legacy_2016", "subaru_legacy_2017", "subaru_legacy_2019", "subaru_legacy_2020", "subaru_outback_2011", "subaru_outback_2012", "subaru_outback_2013", "subaru_outback_2014", "subaru_outback_2015", "subaru_outback_2016", "subaru_outback_2017", "subaru_outback_2019", "subaru_outback_2020", "subaru_wrx_2011", "subaru_wrx_2012", "subaru_wrx_2013", "subaru_wrx_2014", "subaru_wrx_2015", "subaru_wrx_2016", "subaru_wrx_2017", "subaru_wrx_2019", "subaru_wrx_2020", "tesla_model 3_2017", "tesla_model 3_2019", "tesla_model s_2012", "tesla_model s_2013", "tesla_model s_2014", "tesla_model s_2015", "tesla_model s_2016", "tesla_model s_2017", "tesla_model s_2019", "tesla_model x_2016", "tesla_model x_2017", "tesla_model x_2019", "tesla_model y_2020", "toyota_4runner_2011", "toyota_4runner_2012", "toyota_4runner_2013", "toyota_4runner_2014", "toyota_4runner_2015", "toyota_4runner_2016", "toyota_4runner_2017", "toyota_4runner_2019", "toyota_4runner_2020", "toyota_86_2017", "toyota_86_2019", "toyota_avalon_2011", "toyota_avalon_2012", "toyota_avalon_2013", "toyota_avalon_2014", "toyota_avalon_2015", "toyota_avalon_2016", "toyota_avalon_2017", "toyota_avalon_2019", "toyota_c-hr_2019", "toyota_c-hr_2020", "toyota_camry_2011", "toyota_camry_2012", "toyota_camry_2013", "toyota_camry_2014", "toyota_camry_2015", "toyota_camry_2016", "toyota_camry_2017", "toyota_camry_2019", "toyota_camry_2020", "toyota_corolla_2011", "toyota_corolla_2012", "toyota_corolla_2013", "toyota_corolla_2014", "toyota_corolla_2015", "toyota_corolla_2016", "toyota_corolla_2017", "toyota_corolla_2019", "toyota_corolla_2020", "toyota_highlander_2011", "toyota_highlander_2012", "toyota_highlander_2013", "toyota_highlander_2014", "toyota_highlander_2015", "toyota_highlander_2016", "toyota_highlander_2017", "toyota_highlander_2019", "toyota_highlander_2020", "toyota_land cruiser_2010", "toyota_land cruiser_2011", "toyota_land cruiser_2013", "toyota_land cruiser_2014", "toyota_land cruiser_2015", "toyota_land cruiser_2016", "toyota_land cruiser_2017", "toyota_land cruiser_2019", "toyota_land cruiser_2020", "toyota_mirai_2016", "toyota_mirai_2017", "toyota_mirai_2019", "toyota_prius c_2012", "toyota_prius c_2013", "toyota_prius c_2014", "toyota_prius c_2015", "toyota_prius c_2016", "toyota_prius c_2017", "toyota_prius c_2019", "toyota_prius_2011", "toyota_prius_2012", "toyota_prius_2013", "toyota_prius_2014", "toyota_prius_2015", "toyota_prius_2016", "toyota_prius_2017", "toyota_prius_2019", "toyota_prius_2020", "toyota_rav4_2011", "toyota_rav4_2012", "toyota_rav4_2013", "toyota_rav4_2014", "toyota_rav4_2015", "toyota_rav4_2016", "toyota_rav4_2017", "toyota_rav4_2019", "toyota_rav4_2020", "toyota_sequoia_2011", "toyota_sequoia_2012", "toyota_sequoia_2013", "toyota_sequoia_2014", "toyota_sequoia_2015", "toyota_sequoia_2016", "toyota_sequoia_2017", "toyota_sequoia_2019", "toyota_sequoia_2020", "toyota_sienna_2011", "toyota_sienna_2012", "toyota_sienna_2013", "toyota_sienna_2014", "toyota_sienna_2015", "toyota_sienna_2016", "toyota_sienna_2017", "toyota_sienna_2019", "toyota_sienna_2020", "toyota_supra_1990", "toyota_supra_1994", "toyota_supra_1997", "toyota_supra_2020", "toyota_tacoma_2011", "toyota_tacoma_2012", "toyota_tacoma_2013", "toyota_tacoma_2014", "toyota_tacoma_2015", "toyota_tacoma_2016", "toyota_tacoma_2017", "toyota_tacoma_2019", "toyota_tacoma_2020", "toyota_tundra_2011", "toyota_tundra_2012", "toyota_tundra_2013", "toyota_tundra_2014", "toyota_tundra_2015", "toyota_tundra_2016", "toyota_tundra_2017", "toyota_tundra_2019", "toyota_yaris hatchback_2020", "toyota_yaris_2011", "toyota_yaris_2012", "toyota_yaris_2013", "toyota_yaris_2014", "toyota_yaris_2015", "toyota_yaris_2016", "toyota_yaris_2017", "toyota_yaris_2019", "toyota_yaris_2020", "volkswagen_arteon_2019", "volkswagen_arteon_2020", "volkswagen_atlas_2019", "volkswagen_atlas_2020", "volkswagen_beetle_2010", "volkswagen_beetle_2011", "volkswagen_beetle_2012", "volkswagen_beetle_2013", "volkswagen_beetle_2014", "volkswagen_beetle_2015", "volkswagen_beetle_2016", "volkswagen_beetle_2017", "volkswagen_beetle_2019", "volkswagen_golf_2011", "volkswagen_golf_2012", "volkswagen_golf_2013", "volkswagen_golf_2014", "volkswagen_golf_2015", "volkswagen_golf_2016", "volkswagen_golf_2017", "volkswagen_golf_2019", "volkswagen_golf_2020", "volkswagen_jetta_2011", "volkswagen_jetta_2012", "volkswagen_jetta_2013", "volkswagen_jetta_2014", "volkswagen_jetta_2015", "volkswagen_jetta_2016", "volkswagen_jetta_2017", "volkswagen_jetta_2019", "volkswagen_jetta_2020", "volkswagen_passat_2010", "volkswagen_passat_2012", "volkswagen_passat_2013", "volkswagen_passat_2014", "volkswagen_passat_2015", "volkswagen_passat_2016", "volkswagen_passat_2017", "volkswagen_passat_2019", "volkswagen_passat_2020", "volkswagen_tiguan_2011", "volkswagen_tiguan_2012", "volkswagen_tiguan_2013", "volkswagen_tiguan_2014", "volkswagen_tiguan_2015", "volkswagen_tiguan_2016", "volkswagen_tiguan_2017", "volkswagen_tiguan_2019", "volkswagen_tiguan_2020", "volkswagen_e-golf_2015", "volkswagen_e-golf_2016", "volkswagen_e-golf_2017", "volkswagen_e-golf_2019", "volvo_s60_2011", "volvo_s60_2012", "volvo_s60_2013", "volvo_s60_2014", "volvo_s60_2015", "volvo_s60_2016", "volvo_s60_2017", "volvo_s60_2019", "volvo_s60_2020", "volvo_s90_1998", "volvo_s90_2017", "volvo_s90_2019", "volvo_s90_2020", "volvo_v60_2015", "volvo_v60_2016", "volvo_v60_2017", "volvo_v60_2019", "volvo_v60_2020", "volvo_v90_1998", "volvo_v90_2017", "volvo_v90_2019", "volvo_v90_2020", "volvo_xc40_2019", "volvo_xc40_2020", "volvo_xc60_2011", "volvo_xc60_2012", "volvo_xc60_2013", "volvo_xc60_2014", "volvo_xc60_2015", "volvo_xc60_2016", "volvo_xc60_2017", "volvo_xc60_2019", "volvo_xc60_2020", "volvo_xc90_2010", "volvo_xc90_2011", "volvo_xc90_2012", "volvo_xc90_2013", "volvo_xc90_2014", "volvo_xc90_2016", "volvo_xc90_2017", "volvo_xc90_2019", "volvo_xc90_2020", "smart_fortwo_2010", "smart_fortwo_2011", "smart_fortwo_2012", "smart_fortwo_2013", "smart_fortwo_2014", "smart_fortwo_2015", "smart_fortwo_2016", "smart_fortwo_2017", "smart_fortwo_2019" ]
Kur-der130/salida_1
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # salida_1 This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.0595 - Accuracy: 0.9850 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0002 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 4 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.1332 | 3.85 | 500 | 0.0595 | 0.9850 | ### Framework versions - Transformers 4.36.2 - Pytorch 2.1.2+cu121 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "angular_leaf_spot", "bean_rust", "healthy" ]
TtT609/food_classifier
<!-- This model card has been generated automatically according to the information Keras had access to. You should probably proofread and complete it, then remove this comment. --> # TtT609/food_classifier This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Train Loss: 2.7456 - Validation Loss: 1.5988 - Train Accuracy: 0.839 - Epoch: 0 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - optimizer: {'name': 'AdamWeightDecay', 'learning_rate': {'module': 'keras.optimizers.schedules', 'class_name': 'PolynomialDecay', 'config': {'initial_learning_rate': 3e-05, 'decay_steps': 4000, 'end_learning_rate': 0.0, 'power': 1.0, 'cycle': False, 'name': None}, 'registered_name': None}, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-08, 'amsgrad': False, 'weight_decay_rate': 0.01} - training_precision: float32 ### Training results | Train Loss | Validation Loss | Train Accuracy | Epoch | |:----------:|:---------------:|:--------------:|:-----:| | 2.7456 | 1.5988 | 0.839 | 0 | ### Framework versions - Transformers 4.41.2 - TensorFlow 2.15.0 - Datasets 2.19.2 - Tokenizers 0.19.1
[ "apple_pie", "baby_back_ribs", "bruschetta", "waffles", "caesar_salad", "cannoli", "caprese_salad", "carrot_cake", "ceviche", "cheesecake", "cheese_plate", "chicken_curry", "chicken_quesadilla", "baklava", "chicken_wings", "chocolate_cake", "chocolate_mousse", "churros", "clam_chowder", "club_sandwich", "crab_cakes", "creme_brulee", "croque_madame", "cup_cakes", "beef_carpaccio", "deviled_eggs", "donuts", "dumplings", "edamame", "eggs_benedict", "escargots", "falafel", "filet_mignon", "fish_and_chips", "foie_gras", "beef_tartare", "french_fries", "french_onion_soup", "french_toast", "fried_calamari", "fried_rice", "frozen_yogurt", "garlic_bread", "gnocchi", "greek_salad", "grilled_cheese_sandwich", "beet_salad", "grilled_salmon", "guacamole", "gyoza", "hamburger", "hot_and_sour_soup", "hot_dog", "huevos_rancheros", "hummus", "ice_cream", "lasagna", "beignets", "lobster_bisque", "lobster_roll_sandwich", "macaroni_and_cheese", "macarons", "miso_soup", "mussels", "nachos", "omelette", "onion_rings", "oysters", "bibimbap", "pad_thai", "paella", "pancakes", "panna_cotta", "peking_duck", "pho", "pizza", "pork_chop", "poutine", "prime_rib", "bread_pudding", "pulled_pork_sandwich", "ramen", "ravioli", "red_velvet_cake", "risotto", "samosa", "sashimi", "scallops", "seaweed_salad", "shrimp_and_grits", "breakfast_burrito", "spaghetti_bolognese", "spaghetti_carbonara", "spring_rolls", "steak", "strawberry_shortcake", "sushi", "tacos", "takoyaki", "tiramisu", "tuna_tartare" ]
mohdadeeb/DR-ViT
<!-- This model card has been generated automatically according to the information Keras had access to. You should probably proofread and complete it, then remove this comment. --> # DR-ViT This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on an unknown dataset. It achieves the following results on the evaluation set: - Train Loss: 0.7068 - Train Accuracy: 0.7214 - Train Top-3-accuracy: 0.9677 - Validation Loss: 0.6596 - Validation Accuracy: 0.7345 - Validation Top-3-accuracy: 0.9782 - Epoch: 1 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - optimizer: {'name': 'AdamWeightDecay', 'learning_rate': {'module': 'keras.optimizers.schedules', 'class_name': 'PolynomialDecay', 'config': {'initial_learning_rate': 3e-05, 'decay_steps': 4400, 'end_learning_rate': 0.0, 'power': 1.0, 'cycle': False, 'name': None}, 'registered_name': None}, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-08, 'amsgrad': False, 'weight_decay_rate': 0.01} - training_precision: mixed_float16 ### Training results | Train Loss | Train Accuracy | Train Top-3-accuracy | Validation Loss | Validation Accuracy | Validation Top-3-accuracy | Epoch | |:----------:|:--------------:|:--------------------:|:---------------:|:-------------------:|:-------------------------:|:-----:| | 0.8883 | 0.6645 | 0.9255 | 0.7075 | 0.7200 | 0.9655 | 0 | | 0.7068 | 0.7214 | 0.9677 | 0.6596 | 0.7345 | 0.9782 | 1 | ### Framework versions - Transformers 4.35.2 - TensorFlow 2.15.0 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "proliferate dr", "healthy", "mild dr", "severe dr", "moderate dr" ]
JPeace18/CarIdentifier2
# Model Trained Using AutoTrain - Problem type: Image Classification ## Validation Metricsg loss: 5.987339019775391 f1_macro: 0.014272821191130344 f1_micro: 0.049443212901385813 f1_weighted: 0.024064270485240995 precision_macro: 0.021826631000654732 precision_micro: 0.049443212901385813 precision_weighted: 0.03411589816873522 recall_macro: 0.02794951571811764 recall_micro: 0.049443212901385813 recall_weighted: 0.049443212901385813 accuracy: 0.049443212901385813
[ "acura_ilx_2013", "acura_ilx_2014", "acura_ilx_2015", "acura_ilx_2016", "acura_ilx_2017", "acura_ilx_2019", "acura_ilx_2020", "acura_mdx_2011", "acura_mdx_2012", "acura_mdx_2013", "acura_mdx_2014", "acura_mdx_2016", "acura_mdx_2017", "acura_mdx_2019", "acura_mdx_2020", "acura_nsx_2001", "acura_nsx_2002", "acura_nsx_2003", "acura_nsx_2004", "acura_nsx_2005", "acura_nsx_2016", "acura_nsx_2017", "acura_nsx_2019", "acura_nsx_2020", "acura_rdx_2011", "acura_rdx_2012", "acura_rdx_2013", "acura_rdx_2014", "acura_rdx_2015", "acura_rdx_2016", "acura_rdx_2017", "acura_rdx_2019", "acura_rdx_2020", "acura_rlx_2014", "acura_rlx_2015", "acura_rlx_2016", "acura_rlx_2017", "acura_rlx_2019", "acura_rlx_2020", "acura_tlx_2015", "acura_tlx_2016", "acura_tlx_2017", "acura_tlx_2019", "acura_tlx_2020", "alfa romeo_4c spider_2020", "alfa romeo_4c_2015", "alfa romeo_4c_2016", "alfa romeo_4c_2017", "alfa romeo_4c_2019", "alfa romeo_giulia_2011", "alfa romeo_giulia_2017", "alfa romeo_giulia_2020", "alfa romeo_stelvio_2019", "alfa romeo_stelvio_2020", "aston martin_db11_2017", "aston martin_db11_2019", "aston martin_dbs_2008", "aston martin_dbs_2009", "aston martin_dbs_2010", "aston martin_dbs_2011", "aston martin_dbs_2012", "aston martin_dbs_2019", "aston martin_vanquish_2003", "aston martin_vanquish_2004", "aston martin_vanquish_2005", "aston martin_vanquish_2006", "aston martin_vanquish_2014", "aston martin_vanquish_2015", "aston martin_vanquish_2016", "aston martin_vanquish_2017", "aston martin_vanquish_2019", "aston martin_vantage_2011", "aston martin_vantage_2012", "aston martin_vantage_2013", "aston martin_vantage_2014", "aston martin_vantage_2015", "aston martin_vantage_2016", "aston martin_vantage_2017", "aston martin_vantage_2019", "audi_a3_2010", "audi_a3_2011", "audi_a3_2012", "audi_a3_2013", "audi_a3_2015", "audi_a3_2016", "audi_a3_2017", "audi_a3_2019", "audi_a3_2020", "audi_a4_2011", "audi_a4_2012", "audi_a4_2013", "audi_a4_2014", "audi_a4_2015", "audi_a4_2016", "audi_a4_2017", "audi_a4_2019", "audi_a5_2010", "audi_a5_2011", "audi_a5_2012", "audi_a5_2013", "audi_a5_2014", "audi_a5_2015", "audi_a5_2016", "audi_a5_2017", "audi_a5_2019", "audi_a6_2011", "audi_a6_2012", "audi_a6_2013", "audi_a6_2014", "audi_a6_2015", "audi_a6_2016", "audi_a6_2017", "audi_a6_2019", "audi_a6_2020", "audi_a7_2012", "audi_a7_2013", "audi_a7_2014", "audi_a7_2015", "audi_a7_2016", "audi_a7_2017", "audi_a7_2019", "audi_a7_2020", "audi_a8_2011", "audi_a8_2012", "audi_a8_2013", "audi_a8_2014", "audi_a8_2015", "audi_a8_2016", "audi_a8_2017", "audi_a8_2019", "audi_a8_2020", "audi_q3_2015", "audi_q3_2016", "audi_q3_2017", "audi_q3_2019", "audi_q3_2020", "audi_q5_2011", "audi_q5_2012", "audi_q5_2013", "audi_q5_2014", "audi_q5_2015", "audi_q5_2016", "audi_q5_2017", "audi_q5_2019", "audi_q5_2020", "audi_q7_2010", "audi_q7_2011", "audi_q7_2012", "audi_q7_2013", "audi_q7_2014", "audi_q7_2015", "audi_q7_2017", "audi_q7_2019", "audi_q7_2020", "audi_q8_2019", "audi_q8_2020", "audi_r8_2010", "audi_r8_2011", "audi_r8_2012", "audi_r8_2014", "audi_r8_2015", "audi_r8_2017", "audi_r8_2020", "audi_tt_2011", "audi_tt_2012", "audi_tt_2013", "audi_tt_2014", "audi_tt_2015", "audi_tt_2016", "audi_tt_2017", "audi_tt_2019", "audi_tt_2020", "audi_e-tron_2019", "bmw_2-series_2014", "bmw_2-series_2015", "bmw_2-series_2016", "bmw_2-series_2017", "bmw_2-series_2019", "bmw_2-series_2020", "bmw_3-series_2011", "bmw_3-series_2012", "bmw_3-series_2013", "bmw_3-series_2014", "bmw_3-series_2015", "bmw_3-series_2016", "bmw_3-series_2017", "bmw_3-series_2019", "bmw_3-series_2020", "bmw_4-series_2014", "bmw_4-series_2015", "bmw_4-series_2016", "bmw_4-series_2017", "bmw_4-series_2019", "bmw_4-series_2020", "bmw_5-series_2011", "bmw_5-series_2012", "bmw_5-series_2013", "bmw_5-series_2014", "bmw_5-series_2015", "bmw_5-series_2016", "bmw_5-series_2017", "bmw_5-series_2019", "bmw_5-series_2020", "bmw_6-series_2010", "bmw_6-series_2011", "bmw_6-series_2012", "bmw_6-series_2013", "bmw_6-series_2014", "bmw_6-series_2015", "bmw_6-series_2016", "bmw_6-series_2017", "bmw_6-series_2019", "bmw_7-series_2011", "bmw_7-series_2012", "bmw_7-series_2013", "bmw_7-series_2014", "bmw_7-series_2015", "bmw_7-series_2016", "bmw_7-series_2017", "bmw_7-series_2019", "bmw_7-series_2020", "bmw_8-series_1996", "bmw_8-series_2019", "bmw_8-series_2020", "bmw_x1_2013", "bmw_x1_2014", "bmw_x1_2015", "bmw_x1_2016", "bmw_x1_2017", "bmw_x1_2019", "bmw_x1_2020", "bmw_x2_2020", "bmw_x3_2011", "bmw_x3_2012", "bmw_x3_2013", "bmw_x3_2014", "bmw_x3_2015", "bmw_x3_2016", "bmw_x3_2017", "bmw_x3_2019", "bmw_x3_2020", "bmw_x4_2015", "bmw_x4_2016", "bmw_x4_2017", "bmw_x4_2019", "bmw_x4_2020", "bmw_x5_2011", "bmw_x5_2012", "bmw_x5_2013", "bmw_x5_2014", "bmw_x5_2015", "bmw_x5_2016", "bmw_x5_2017", "bmw_x5_2019", "bmw_x5_2020", "bmw_x6_2011", "bmw_x6_2012", "bmw_x6_2013", "bmw_x6_2014", "bmw_x6_2015", "bmw_x6_2016", "bmw_x6_2017", "bmw_x6_2019", "bmw_x6_2020", "bmw_x7_2019", "bmw_x7_2020", "bmw_z4_2009", "bmw_z4_2010", "bmw_z4_2011", "bmw_z4_2012", "bmw_z4_2014", "bmw_z4_2015", "bmw_z4_2019", "bmw_z4_2020", "bmw_i3_2014", "bmw_i3_2015", "bmw_i3_2016", "bmw_i3_2017", "bmw_i3_2019", "bmw_i8_2014", "bmw_i8_2015", "bmw_i8_2016", "bmw_i8_2020", "bentley_bentayga_2017", "bentley_bentayga_2020", "bentley_continental gt_2011", "bentley_continental gt_2012", "bentley_continental gt_2013", "bentley_continental gt_2014", "bentley_continental gt_2015", "bentley_continental gt_2016", "bentley_continental gt_2017", "bentley_continental gt_2019", "bentley_continental gt_2020", "bentley_flying spur_2011", "bentley_flying spur_2012", "bentley_flying spur_2013", "bentley_flying spur_2014", "bentley_flying spur_2015", "bentley_flying spur_2016", "bentley_flying spur_2017", "bentley_flying spur_2019", "bentley_flying spur_2020", "bentley_mulsanne_2011", "bentley_mulsanne_2012", "bentley_mulsanne_2014", "bentley_mulsanne_2015", "bentley_mulsanne_2016", "bentley_mulsanne_2020", "buick_cascada_2016", "buick_cascada_2017", "buick_cascada_2019", "buick_enclave_2011", "buick_enclave_2012", "buick_enclave_2013", "buick_enclave_2014", "buick_enclave_2015", "buick_enclave_2016", "buick_enclave_2017", "buick_enclave_2019", "buick_enclave_2020", "buick_encore_2013", "buick_encore_2014", "buick_encore_2015", "buick_encore_2016", "buick_encore_2017", "buick_encore_2019", "buick_encore_2020", "buick_envision_2016", "buick_envision_2017", "buick_envision_2019", "buick_envision_2020", "buick_lacrosse_2010", "buick_lacrosse_2011", "buick_lacrosse_2012", "buick_lacrosse_2013", "buick_lacrosse_2014", "buick_lacrosse_2015", "buick_lacrosse_2016", "buick_lacrosse_2017", "buick_lacrosse_2019", "buick_regal_2011", "buick_regal_2012", "buick_regal_2013", "buick_regal_2014", "buick_regal_2015", "buick_regal_2016", "buick_regal_2017", "buick_regal_2019", "buick_regal_2020", "cadillac_ats_2013", "cadillac_ats_2014", "cadillac_ats_2015", "cadillac_ats_2016", "cadillac_ats_2017", "cadillac_ats_2019", "cadillac_ct4_2020", "cadillac_ct5_2020", "cadillac_ct6_2016", "cadillac_ct6_2017", "cadillac_ct6_2019", "cadillac_ct6_2020", "cadillac_cts_2010", "cadillac_cts_2011", "cadillac_cts_2012", "cadillac_cts_2013", "cadillac_cts_2014", "cadillac_cts_2015", "cadillac_cts_2016", "cadillac_cts_2017", "cadillac_cts_2019", "cadillac_escalade_2011", "cadillac_escalade_2012", "cadillac_escalade_2013", "cadillac_escalade_2014", "cadillac_escalade_2015", "cadillac_escalade_2016", "cadillac_escalade_2017", "cadillac_escalade_2019", "cadillac_escalade_2020", "cadillac_xt4_2019", "cadillac_xt4_2020", "cadillac_xt5_2017", "cadillac_xt5_2019", "cadillac_xt5_2020", "cadillac_xt6_2020", "cadillac_xts_2012", "cadillac_xts_2013", "cadillac_xts_2014", "cadillac_xts_2015", "cadillac_xts_2016", "cadillac_xts_2017", "cadillac_xts_2019", "chevrolet_blazer_1998", "chevrolet_blazer_1999", "chevrolet_blazer_2000", "chevrolet_blazer_2001", "chevrolet_blazer_2002", "chevrolet_blazer_2003", "chevrolet_blazer_2004", "chevrolet_blazer_2019", "chevrolet_blazer_2020", "chevrolet_bolt ev_2017", "chevrolet_bolt ev_2019", "chevrolet_camaro_2011", "chevrolet_camaro_2012", "chevrolet_camaro_2013", "chevrolet_camaro_2014", "chevrolet_camaro_2015", "chevrolet_camaro_2016", "chevrolet_camaro_2017", "chevrolet_camaro_2019", "chevrolet_camaro_2020", "chevrolet_colorado_2009", "chevrolet_colorado_2010", "chevrolet_colorado_2011", "chevrolet_colorado_2012", "chevrolet_colorado_2015", "chevrolet_colorado_2016", "chevrolet_colorado_2017", "chevrolet_colorado_2019", "chevrolet_colorado_2020", "chevrolet_corvette_2011", "chevrolet_corvette_2012", "chevrolet_corvette_2013", "chevrolet_corvette_2014", "chevrolet_corvette_2015", "chevrolet_corvette_2016", "chevrolet_corvette_2017", "chevrolet_corvette_2019", "chevrolet_corvette_2020", "chevrolet_cruze_2011", "chevrolet_cruze_2012", "chevrolet_cruze_2013", "chevrolet_cruze_2014", "chevrolet_cruze_2015", "chevrolet_cruze_2016", "chevrolet_cruze_2017", "chevrolet_cruze_2019", "chevrolet_equinox_2011", "chevrolet_equinox_2012", "chevrolet_equinox_2013", "chevrolet_equinox_2014", "chevrolet_equinox_2015", "chevrolet_equinox_2016", "chevrolet_equinox_2017", "chevrolet_equinox_2019", "chevrolet_equinox_2020", "chevrolet_impala_2011", "chevrolet_impala_2012", "chevrolet_impala_2013", "chevrolet_impala_2014", "chevrolet_impala_2015", "chevrolet_impala_2016", "chevrolet_impala_2017", "chevrolet_impala_2019", "chevrolet_impala_2020", "chevrolet_malibu_2011", "chevrolet_malibu_2012", "chevrolet_malibu_2013", "chevrolet_malibu_2014", "chevrolet_malibu_2015", "chevrolet_malibu_2016", "chevrolet_malibu_2017", "chevrolet_malibu_2019", "chevrolet_malibu_2020", "chevrolet_silverado 1500_2011", "chevrolet_silverado 1500_2012", "chevrolet_silverado 1500_2013", "chevrolet_silverado 1500_2014", "chevrolet_silverado 1500_2015", "chevrolet_silverado 1500_2016", "chevrolet_silverado 1500_2017", "chevrolet_silverado 1500_2019", "chevrolet_silverado 1500_2020", "chevrolet_silverado 2500hd_2011", "chevrolet_silverado 2500hd_2012", "chevrolet_silverado 2500hd_2013", "chevrolet_silverado 2500hd_2014", "chevrolet_silverado 2500hd_2015", "chevrolet_silverado 2500hd_2016", "chevrolet_silverado 2500hd_2017", "chevrolet_silverado 2500hd_2019", "chevrolet_silverado 2500hd_2020", "chevrolet_sonic_2012", "chevrolet_sonic_2013", "chevrolet_sonic_2014", "chevrolet_sonic_2015", "chevrolet_sonic_2016", "chevrolet_sonic_2017", "chevrolet_sonic_2019", "chevrolet_sonic_2020", "chevrolet_spark_2013", "chevrolet_spark_2014", "chevrolet_spark_2015", "chevrolet_spark_2016", "chevrolet_spark_2017", "chevrolet_spark_2019", "chevrolet_spark_2020", "chevrolet_suburban_2011", "chevrolet_suburban_2012", "chevrolet_suburban_2013", "chevrolet_suburban_2014", "chevrolet_suburban_2015", "chevrolet_suburban_2016", "chevrolet_suburban_2017", "chevrolet_suburban_2019", "chevrolet_tahoe_2011", "chevrolet_tahoe_2012", "chevrolet_tahoe_2013", "chevrolet_tahoe_2014", "chevrolet_tahoe_2015", "chevrolet_tahoe_2016", "chevrolet_tahoe_2017", "chevrolet_tahoe_2019", "chevrolet_trailblazer_2002", "chevrolet_trailblazer_2003", "chevrolet_trailblazer_2004", "chevrolet_trailblazer_2005", "chevrolet_trailblazer_2006", "chevrolet_trailblazer_2007", "chevrolet_trailblazer_2008", "chevrolet_traverse_2011", "chevrolet_traverse_2012", "chevrolet_traverse_2013", "chevrolet_traverse_2014", "chevrolet_traverse_2015", "chevrolet_traverse_2016", "chevrolet_traverse_2017", "chevrolet_traverse_2019", "chevrolet_traverse_2020", "chevrolet_trax_2015", "chevrolet_trax_2016", "chevrolet_trax_2017", "chevrolet_trax_2019", "chevrolet_trax_2020", "chevrolet_volt_2011", "chevrolet_volt_2012", "chevrolet_volt_2013", "chevrolet_volt_2014", "chevrolet_volt_2015", "chevrolet_volt_2016", "chevrolet_volt_2017", "chevrolet_volt_2019", "chrysler_300_2011", "chrysler_300_2012", "chrysler_300_2013", "chrysler_300_2014", "chrysler_300_2015", "chrysler_300_2016", "chrysler_300_2017", "chrysler_300_2019", "chrysler_300_2020", "chrysler_pacifica_2004", "chrysler_pacifica_2005", "chrysler_pacifica_2006", "chrysler_pacifica_2007", "chrysler_pacifica_2008", "chrysler_pacifica_2017", "chrysler_pacifica_2019", "chrysler_pacifica_2020", "dodge_challenger_2011", "dodge_challenger_2012", "dodge_challenger_2013", "dodge_challenger_2014", "dodge_challenger_2015", "dodge_challenger_2016", "dodge_challenger_2017", "dodge_challenger_2019", "dodge_challenger_2020", "dodge_charger_2011", "dodge_charger_2012", "dodge_charger_2013", "dodge_charger_2014", "dodge_charger_2015", "dodge_charger_2016", "dodge_charger_2017", "dodge_charger_2019", "dodge_charger_2020", "dodge_durango_2011", "dodge_durango_2012", "dodge_durango_2013", "dodge_durango_2014", "dodge_durango_2015", "dodge_durango_2016", "dodge_durango_2017", "dodge_durango_2019", "dodge_durango_2020", "dodge_grand caravan_2011", "dodge_grand caravan_2012", "dodge_grand caravan_2013", "dodge_grand caravan_2014", "dodge_grand caravan_2015", "dodge_grand caravan_2016", "dodge_grand caravan_2017", "dodge_grand caravan_2019", "dodge_grand caravan_2020", "dodge_journey_2011", "dodge_journey_2012", "dodge_journey_2013", "dodge_journey_2014", "dodge_journey_2015", "dodge_journey_2016", "dodge_journey_2017", "dodge_journey_2019", "dodge_journey_2020", "fiat_124 spider_2017", "fiat_124 spider_2019", "fiat_124 spider_2020", "fiat_500l_2014", "fiat_500l_2015", "fiat_500l_2016", "fiat_500l_2017", "fiat_500l_2019", "fiat_500x_2016", "fiat_500x_2017", "fiat_500x_2019", "fiat_500x_2020", "fiat_500_2011", "fiat_500_2012", "fiat_500_2013", "fiat_500_2014", "fiat_500_2015", "fiat_500_2016", "fiat_500_2017", "fiat_500_2019", "fiat_500e_2013", "fiat_500e_2014", "fiat_500e_2015", "fiat_500e_2016", "fiat_500e_2017", "fiat_500e_2019", "ferrari_488 gtb_2016", "ferrari_488 gtb_2019", "ferrari_gtc4lusso_2017", "ferrari_gtc4lusso_2020", "ferrari_portofino_2019", "ford_ecosport_2019", "ford_ecosport_2020", "ford_edge_2011", "ford_edge_2012", "ford_edge_2013", "ford_edge_2014", "ford_edge_2015", "ford_edge_2016", "ford_edge_2017", "ford_edge_2019", "ford_edge_2020", "ford_escape_2011", "ford_escape_2012", "ford_escape_2013", "ford_escape_2014", "ford_escape_2015", "ford_escape_2016", "ford_escape_2017", "ford_escape_2019", "ford_escape_2020", "ford_expedition_2011", "ford_expedition_2012", "ford_expedition_2013", "ford_expedition_2014", "ford_expedition_2015", "ford_expedition_2016", "ford_expedition_2017", "ford_expedition_2019", "ford_expedition_2020", "ford_explorer_2011", "ford_explorer_2012", "ford_explorer_2013", "ford_explorer_2014", "ford_explorer_2015", "ford_explorer_2016", "ford_explorer_2017", "ford_explorer_2019", "ford_explorer_2020", "ford_f-150_2011", "ford_f-150_2012", "ford_f-150_2013", "ford_f-150_2014", "ford_f-150_2015", "ford_f-150_2016", "ford_f-150_2017", "ford_f-150_2019", "ford_fiesta_2011", "ford_fiesta_2012", "ford_fiesta_2013", "ford_fiesta_2014", "ford_fiesta_2015", "ford_fiesta_2016", "ford_fiesta_2017", "ford_fiesta_2019", "ford_flex_2010", "ford_flex_2011", "ford_flex_2012", "ford_flex_2013", "ford_flex_2014", "ford_flex_2015", "ford_flex_2016", "ford_flex_2017", "ford_flex_2019", "ford_fusion_2011", "ford_fusion_2012", "ford_fusion_2013", "ford_fusion_2014", "ford_fusion_2015", "ford_fusion_2016", "ford_fusion_2017", "ford_fusion_2019", "ford_fusion_2020", "ford_mustang_2011", "ford_mustang_2012", "ford_mustang_2013", "ford_mustang_2014", "ford_mustang_2015", "ford_mustang_2016", "ford_mustang_2017", "ford_mustang_2019", "ford_mustang_2020", "ford_ranger_2004", "ford_ranger_2005", "ford_ranger_2006", "ford_ranger_2007", "ford_ranger_2008", "ford_ranger_2009", "ford_ranger_2010", "ford_ranger_2019", "ford_super duty f-250_2011", "ford_super duty f-250_2012", "ford_super duty f-250_2013", "ford_super duty f-250_2014", "ford_super duty f-250_2015", "ford_super duty f-250_2016", "ford_super duty f-250_2017", "ford_super duty f-250_2019", "ford_super duty f-250_2020", "ford_taurus_2010", "ford_taurus_2011", "ford_taurus_2012", "ford_taurus_2013", "ford_taurus_2014", "ford_taurus_2015", "ford_taurus_2016", "ford_taurus_2017", "ford_taurus_2019", "ford_transit connect wagon_2011", "ford_transit connect wagon_2012", "ford_transit connect wagon_2013", "ford_transit connect wagon_2014", "ford_transit connect wagon_2015", "ford_transit connect wagon_2016", "ford_transit connect wagon_2017", "ford_transit connect wagon_2019", "ford_transit connect wagon_2020", "gmc_acadia_2011", "gmc_acadia_2012", "gmc_acadia_2013", "gmc_acadia_2014", "gmc_acadia_2015", "gmc_acadia_2016", "gmc_acadia_2017", "gmc_acadia_2019", "gmc_acadia_2020", "gmc_canyon_2009", "gmc_canyon_2010", "gmc_canyon_2011", "gmc_canyon_2012", "gmc_canyon_2015", "gmc_canyon_2016", "gmc_canyon_2017", "gmc_canyon_2019", "gmc_canyon_2020", "gmc_sierra 1500_2011", "gmc_sierra 1500_2012", "gmc_sierra 1500_2013", "gmc_sierra 1500_2014", "gmc_sierra 1500_2015", "gmc_sierra 1500_2016", "gmc_sierra 1500_2017", "gmc_sierra 1500_2019", "gmc_sierra 1500_2020", "gmc_sierra 2500hd_2011", "gmc_sierra 2500hd_2012", "gmc_sierra 2500hd_2013", "gmc_sierra 2500hd_2014", "gmc_sierra 2500hd_2015", "gmc_sierra 2500hd_2016", "gmc_sierra 2500hd_2017", "gmc_sierra 2500hd_2019", "gmc_sierra 2500hd_2020", "gmc_terrain_2011", "gmc_terrain_2012", "gmc_terrain_2013", "gmc_terrain_2014", "gmc_terrain_2015", "gmc_terrain_2016", "gmc_terrain_2017", "gmc_terrain_2019", "gmc_terrain_2020", "gmc_yukon_2011", "gmc_yukon_2012", "gmc_yukon_2013", "gmc_yukon_2014", "gmc_yukon_2015", "gmc_yukon_2016", "gmc_yukon_2017", "gmc_yukon_2019", "gmc_yukon_2020", "genesis_g70_2020", "genesis_g80_2017", "genesis_g80_2020", "genesis_g90_2017", "genesis_g90_2019", "genesis_g90_2020", "honda_accord_2011", "honda_accord_2012", "honda_accord_2013", "honda_accord_2014", "honda_accord_2015", "honda_accord_2016", "honda_accord_2017", "honda_accord_2019", "honda_accord_2020", "honda_cr-v_2011", "honda_cr-v_2012", "honda_cr-v_2013", "honda_cr-v_2014", "honda_cr-v_2015", "honda_cr-v_2016", "honda_cr-v_2017", "honda_cr-v_2019", "honda_cr-v_2020", "honda_civic_2011", "honda_civic_2012", "honda_civic_2013", "honda_civic_2014", "honda_civic_2015", "honda_civic_2016", "honda_civic_2017", "honda_civic_2019", "honda_civic_2020", "honda_clarity_2003", "honda_clarity_2005", "honda_clarity_2008", "honda_clarity_2009", "honda_clarity_2017", "honda_clarity_2019", "honda_fit_2011", "honda_fit_2012", "honda_fit_2013", "honda_fit_2014", "honda_fit_2015", "honda_fit_2016", "honda_fit_2017", "honda_fit_2019", "honda_hr-v_2016", "honda_hr-v_2017", "honda_hr-v_2019", "honda_hr-v_2020", "honda_insight_2005", "honda_insight_2006", "honda_insight_2009", "honda_insight_2010", "honda_insight_2011", "honda_insight_2012", "honda_insight_2013", "honda_insight_2019", "honda_insight_2020", "honda_odyssey_2011", "honda_odyssey_2012", "honda_odyssey_2013", "honda_odyssey_2014", "honda_odyssey_2015", "honda_odyssey_2016", "honda_odyssey_2017", "honda_odyssey_2019", "honda_odyssey_2020", "honda_passport_1995", "honda_passport_1997", "honda_passport_1998", "honda_passport_1999", "honda_passport_2000", "honda_passport_2001", "honda_passport_2019", "honda_passport_2020", "honda_pilot_2011", "honda_pilot_2012", "honda_pilot_2013", "honda_pilot_2014", "honda_pilot_2015", "honda_pilot_2016", "honda_pilot_2017", "honda_pilot_2019", "honda_pilot_2020", "honda_ridgeline_2008", "honda_ridgeline_2009", "honda_ridgeline_2010", "honda_ridgeline_2011", "honda_ridgeline_2012", "honda_ridgeline_2013", "honda_ridgeline_2014", "honda_ridgeline_2017", "honda_ridgeline_2019", "hyundai_accent_2011", "hyundai_accent_2012", "hyundai_accent_2013", "hyundai_accent_2014", "hyundai_accent_2015", "hyundai_accent_2016", "hyundai_accent_2017", "hyundai_accent_2019", "hyundai_accent_2020", "hyundai_elantra_2011", "hyundai_elantra_2012", "hyundai_elantra_2013", "hyundai_elantra_2014", "hyundai_elantra_2015", "hyundai_elantra_2016", "hyundai_elantra_2017", "hyundai_elantra_2019", "hyundai_elantra_2020", "hyundai_ioniq_2017", "hyundai_ioniq_2019", "hyundai_ioniq_2020", "hyundai_kona electric_2019", "hyundai_kona electric_2020", "hyundai_kona_2019", "hyundai_kona_2020", "hyundai_nexo_2019", "hyundai_nexo_2020", "hyundai_palisade_2020", "hyundai_santa fe_2011", "hyundai_santa fe_2012", "hyundai_santa fe_2013", "hyundai_santa fe_2014", "hyundai_santa fe_2015", "hyundai_santa fe_2016", "hyundai_santa fe_2017", "hyundai_santa fe_2019", "hyundai_santa fe_2020", "hyundai_sonata_2011", "hyundai_sonata_2012", "hyundai_sonata_2013", "hyundai_sonata_2014", "hyundai_sonata_2015", "hyundai_sonata_2016", "hyundai_sonata_2017", "hyundai_sonata_2019", "hyundai_sonata_2020", "hyundai_tucson_2011", "hyundai_tucson_2012", "hyundai_tucson_2013", "hyundai_tucson_2014", "hyundai_tucson_2015", "hyundai_tucson_2016", "hyundai_tucson_2017", "hyundai_tucson_2019", "hyundai_tucson_2020", "hyundai_veloster_2012", "hyundai_veloster_2013", "hyundai_veloster_2014", "hyundai_veloster_2015", "hyundai_veloster_2016", "hyundai_veloster_2019", "hyundai_veloster_2020", "hyundai_venue_2020", "infiniti_q50_2014", "infiniti_q50_2015", "infiniti_q50_2016", "infiniti_q50_2017", "infiniti_q50_2019", "infiniti_q50_2020", "infiniti_q60_2014", "infiniti_q60_2015", "infiniti_q60_2016", "infiniti_q60_2017", "infiniti_q60_2019", "infiniti_q60_2020", "infiniti_q70_2014", "infiniti_q70_2015", "infiniti_q70_2016", "infiniti_q70_2017", "infiniti_q70_2019", "infiniti_qx30_2017", "infiniti_qx30_2019", "infiniti_qx50_2014", "infiniti_qx50_2015", "infiniti_qx50_2016", "infiniti_qx50_2019", "infiniti_qx50_2020", "infiniti_qx60_2014", "infiniti_qx60_2015", "infiniti_qx60_2016", "infiniti_qx60_2017", "infiniti_qx60_2019", "infiniti_qx60_2020", "infiniti_qx80_2014", "infiniti_qx80_2015", "infiniti_qx80_2016", "infiniti_qx80_2017", "infiniti_qx80_2020", "jaguar_e-pace_2020", "jaguar_f-pace_2017", "jaguar_f-pace_2019", "jaguar_f-pace_2020", "jaguar_f-type_2014", "jaguar_f-type_2015", "jaguar_f-type_2016", "jaguar_f-type_2017", "jaguar_f-type_2020", "jaguar_i-pace_2019", "jaguar_i-pace_2020", "jaguar_xe_2017", "jaguar_xe_2019", "jaguar_xe_2020", "jaguar_xf_2011", "jaguar_xf_2012", "jaguar_xf_2013", "jaguar_xf_2014", "jaguar_xf_2015", "jaguar_xf_2016", "jaguar_xf_2017", "jaguar_xf_2019", "jaguar_xf_2020", "jaguar_xj_2010", "jaguar_xj_2011", "jaguar_xj_2012", "jaguar_xj_2013", "jaguar_xj_2014", "jaguar_xj_2015", "jaguar_xj_2017", "jaguar_xj_2019", "jeep_cherokee_1999", "jeep_cherokee_2000", "jeep_cherokee_2001", "jeep_cherokee_2014", "jeep_cherokee_2015", "jeep_cherokee_2016", "jeep_cherokee_2017", "jeep_cherokee_2019", "jeep_compass_2011", "jeep_compass_2012", "jeep_compass_2013", "jeep_compass_2014", "jeep_compass_2015", "jeep_compass_2017", "jeep_compass_2019", "jeep_compass_2020", "jeep_gladiator_2020", "jeep_grand cherokee_2011", "jeep_grand cherokee_2012", "jeep_grand cherokee_2013", "jeep_grand cherokee_2014", "jeep_grand cherokee_2015", "jeep_grand cherokee_2016", "jeep_grand cherokee_2017", "jeep_grand cherokee_2019", "jeep_grand cherokee_2020", "jeep_renegade_2015", "jeep_renegade_2016", "jeep_renegade_2017", "jeep_renegade_2019", "jeep_renegade_2020", "jeep_wrangler_2011", "jeep_wrangler_2012", "jeep_wrangler_2013", "jeep_wrangler_2014", "jeep_wrangler_2015", "jeep_wrangler_2016", "jeep_wrangler_2017", "jeep_wrangler_2019", "jeep_wrangler_2020", "kia_cadenza_2014", "kia_cadenza_2015", "kia_cadenza_2016", "kia_cadenza_2017", "kia_cadenza_2019", "kia_forte_2011", "kia_forte_2012", "kia_forte_2013", "kia_forte_2014", "kia_forte_2015", "kia_forte_2016", "kia_forte_2017", "kia_forte_2019", "kia_forte_2020", "kia_k900_2015", "kia_k900_2016", "kia_k900_2017", "kia_k900_2019", "kia_niro_2017", "kia_niro_2019", "kia_niro_2020", "kia_optima_2011", "kia_optima_2012", "kia_optima_2013", "kia_optima_2014", "kia_optima_2015", "kia_optima_2016", "kia_optima_2017", "kia_optima_2019", "kia_rio_2011", "kia_rio_2012", "kia_rio_2013", "kia_rio_2014", "kia_rio_2015", "kia_rio_2016", "kia_rio_2017", "kia_rio_2019", "kia_rio_2020", "kia_sedona_2010", "kia_sedona_2011", "kia_sedona_2012", "kia_sedona_2014", "kia_sedona_2015", "kia_sedona_2016", "kia_sedona_2017", "kia_sedona_2019", "kia_sedona_2020", "kia_sorento_2011", "kia_sorento_2012", "kia_sorento_2013", "kia_sorento_2014", "kia_sorento_2015", "kia_sorento_2016", "kia_sorento_2017", "kia_sorento_2019", "kia_sorento_2020", "kia_soul ev_2015", "kia_soul ev_2016", "kia_soul ev_2017", "kia_soul_2011", "kia_soul_2012", "kia_soul_2013", "kia_soul_2014", "kia_soul_2015", "kia_soul_2016", "kia_soul_2017", "kia_soul_2019", "kia_soul_2020", "kia_sportage_2011", "kia_sportage_2012", "kia_sportage_2013", "kia_sportage_2014", "kia_sportage_2015", "kia_sportage_2016", "kia_sportage_2017", "kia_sportage_2019", "kia_sportage_2020", "kia_stinger_2019", "kia_stinger_2020", "kia_telluride_2020", "lamborghini_aventador_2013", "lamborghini_aventador_2014", "lamborghini_aventador_2015", "lamborghini_aventador_2016", "lamborghini_aventador_2019", "lamborghini_huracan_2015", "lamborghini_huracan_2016", "lamborghini_huracan_2017", "lamborghini_huracan_2019", "lamborghini_urus_2019", "land rover_defender_2020", "land rover_discovery sport_2016", "land rover_discovery sport_2017", "land rover_discovery sport_2019", "land rover_discovery sport_2020", "land rover_discovery_1999", "land rover_discovery_2000", "land rover_discovery_2001", "land rover_discovery_2002", "land rover_discovery_2003", "land rover_discovery_2004", "land rover_discovery_2017", "land rover_discovery_2019", "land rover_discovery_2020", "land rover_range rover evoque_2012", "land rover_range rover evoque_2013", "land rover_range rover evoque_2014", "land rover_range rover evoque_2015", "land rover_range rover evoque_2016", "land rover_range rover evoque_2019", "land rover_range rover evoque_2020", "land rover_range rover sport_2011", "land rover_range rover sport_2012", "land rover_range rover sport_2013", "land rover_range rover sport_2014", "land rover_range rover sport_2015", "land rover_range rover sport_2016", "land rover_range rover sport_2017", "land rover_range rover sport_2019", "land rover_range rover sport_2020", "land rover_range rover velar_2019", "land rover_range rover velar_2020", "land rover_range rover_2011", "land rover_range rover_2012", "land rover_range rover_2013", "land rover_range rover_2014", "land rover_range rover_2015", "land rover_range rover_2016", "land rover_range rover_2017", "land rover_range rover_2019", "land rover_range rover_2020", "lexus_es_2011", "lexus_es_2012", "lexus_es_2013", "lexus_es_2014", "lexus_es_2015", "lexus_es_2016", "lexus_es_2017", "lexus_es_2019", "lexus_es_2020", "lexus_gs_2010", "lexus_gs_2011", "lexus_gs_2013", "lexus_gs_2014", "lexus_gs_2015", "lexus_gs_2016", "lexus_gs_2017", "lexus_gs_2020", "lexus_gx_2011", "lexus_gx_2012", "lexus_gx_2013", "lexus_gx_2014", "lexus_gx_2015", "lexus_gx_2016", "lexus_gx_2017", "lexus_gx_2019", "lexus_gx_2020", "lexus_is_2011", "lexus_is_2012", "lexus_is_2013", "lexus_is_2014", "lexus_is_2015", "lexus_is_2016", "lexus_is_2017", "lexus_is_2019", "lexus_is_2020", "lexus_lc_2019", "lexus_ls_2011", "lexus_ls_2012", "lexus_ls_2013", "lexus_ls_2014", "lexus_ls_2015", "lexus_ls_2016", "lexus_ls_2017", "lexus_ls_2019", "lexus_ls_2020", "lexus_lx_2010", "lexus_lx_2011", "lexus_lx_2013", "lexus_lx_2014", "lexus_lx_2015", "lexus_lx_2016", "lexus_lx_2017", "lexus_lx_2019", "lexus_nx_2015", "lexus_nx_2016", "lexus_nx_2017", "lexus_nx_2019", "lexus_nx_2020", "lexus_rc_2015", "lexus_rc_2016", "lexus_rc_2017", "lexus_rc_2019", "lexus_rc_2020", "lexus_rx_2011", "lexus_rx_2012", "lexus_rx_2013", "lexus_rx_2014", "lexus_rx_2015", "lexus_rx_2016", "lexus_rx_2017", "lexus_rx_2019", "lexus_rx_2020", "lexus_ux_2019", "lexus_ux_2020", "lincoln_aviator_2003", "lincoln_aviator_2004", "lincoln_aviator_2005", "lincoln_aviator_2019", "lincoln_aviator_2020", "lincoln_continental_1997", "lincoln_continental_1998", "lincoln_continental_1999", "lincoln_continental_2000", "lincoln_continental_2001", "lincoln_continental_2002", "lincoln_continental_2017", "lincoln_continental_2019", "lincoln_continental_2020", "lincoln_corsair_2020", "lincoln_mkc_2015", "lincoln_mkc_2016", "lincoln_mkc_2017", "lincoln_mkc_2019", "lincoln_mkt_2010", "lincoln_mkt_2011", "lincoln_mkt_2012", "lincoln_mkt_2013", "lincoln_mkt_2014", "lincoln_mkt_2015", "lincoln_mkt_2016", "lincoln_mkt_2017", "lincoln_mkt_2019", "lincoln_mkz_2011", "lincoln_mkz_2012", "lincoln_mkz_2013", "lincoln_mkz_2014", "lincoln_mkz_2015", "lincoln_mkz_2016", "lincoln_mkz_2017", "lincoln_mkz_2019", "lincoln_mkz_2020", "lincoln_nautilus_2019", "lincoln_nautilus_2020", "lincoln_navigator_2011", "lincoln_navigator_2012", "lincoln_navigator_2013", "lincoln_navigator_2014", "lincoln_navigator_2015", "lincoln_navigator_2016", "lincoln_navigator_2017", "lincoln_navigator_2019", "lincoln_navigator_2020", "mini_clubman_2010", "mini_clubman_2011", "mini_clubman_2012", "mini_clubman_2013", "mini_clubman_2014", "mini_clubman_2016", "mini_clubman_2017", "mini_clubman_2019", "mini_clubman_2020", "mini_cooper countryman_2011", "mini_cooper countryman_2012", "mini_cooper countryman_2013", "mini_cooper countryman_2014", "mini_cooper countryman_2015", "mini_cooper countryman_2016", "mini_cooper countryman_2017", "mini_cooper countryman_2019", "mini_cooper countryman_2020", "mini_cooper_2011", "mini_cooper_2012", "mini_cooper_2013", "mini_cooper_2014", "mini_cooper_2015", "mini_cooper_2016", "mini_cooper_2017", "mini_cooper_2019", "mini_cooper_2020", "maserati_ghibli_2014", "maserati_ghibli_2015", "maserati_ghibli_2016", "maserati_ghibli_2020", "maserati_granturismo_2010", "maserati_granturismo_2011", "maserati_granturismo_2012", "maserati_granturismo_2013", "maserati_granturismo_2014", "maserati_granturismo_2015", "maserati_granturismo_2016", "maserati_granturismo_2017", "maserati_granturismo_2019", "maserati_levante_2013", "maserati_levante_2017", "maserati_levante_2019", "maserati_levante_2020", "maserati_quattroporte_2011", "maserati_quattroporte_2012", "maserati_quattroporte_2013", "maserati_quattroporte_2014", "maserati_quattroporte_2015", "maserati_quattroporte_2016", "maserati_quattroporte_2017", "maserati_quattroporte_2019", "mazda_cx-30_2020", "mazda_cx-3_2016", "mazda_cx-3_2017", "mazda_cx-3_2019", "mazda_cx-3_2020", "mazda_cx-5_2013", "mazda_cx-5_2014", "mazda_cx-5_2015", "mazda_cx-5_2016", "mazda_cx-5_2017", "mazda_cx-5_2019", "mazda_cx-9_2011", "mazda_cx-9_2012", "mazda_cx-9_2013", "mazda_cx-9_2014", "mazda_cx-9_2015", "mazda_cx-9_2016", "mazda_cx-9_2017", "mazda_cx-9_2019", "mazda_cx-9_2020", "mazda_mazda3_2011", "mazda_mazda3_2012", "mazda_mazda3_2013", "mazda_mazda3_2014", "mazda_mazda3_2015", "mazda_mazda3_2016", "mazda_mazda3_2017", "mazda_mazda3_2019", "mazda_mazda3_2020", "mazda_mazda6_2011", "mazda_mazda6_2012", "mazda_mazda6_2013", "mazda_mazda6_2014", "mazda_mazda6_2015", "mazda_mazda6_2016", "mazda_mazda6_2017", "mazda_mazda6_2019", "mazda_mx-5 miata_2010", "mazda_mx-5 miata_2011", "mazda_mx-5 miata_2012", "mazda_mx-5 miata_2013", "mazda_mx-5 miata_2014", "mazda_mx-5 miata_2015", "mazda_mx-5 miata_2016", "mazda_mx-5 miata_2017", "mazda_mx-5 miata_2019", "mazda_mazda3 hatchback_2020", "mclaren_570gt_2017", "mclaren_570s_2016", "mclaren_570s_2017", "mclaren_570s_2019", "mclaren_720s_2017", "mclaren_720s_2020", "mercedes-benz_a class_2019", "mercedes-benz_a class_2020", "mercedes-benz_amg gt_2016", "mercedes-benz_amg gt_2017", "mercedes-benz_amg gt_2019", "mercedes-benz_amg gt_2020", "mercedes-benz_c class_2011", "mercedes-benz_c class_2012", "mercedes-benz_c class_2013", "mercedes-benz_c class_2014", "mercedes-benz_c class_2015", "mercedes-benz_c class_2016", "mercedes-benz_c class_2017", "mercedes-benz_c class_2019", "mercedes-benz_c class_2020", "mercedes-benz_cla class_2014", "mercedes-benz_cla class_2015", "mercedes-benz_cla class_2016", "mercedes-benz_cla class_2017", "mercedes-benz_cla class_2019", "mercedes-benz_cla class_2020", "mercedes-benz_cls class_2011", "mercedes-benz_cls class_2012", "mercedes-benz_cls class_2013", "mercedes-benz_cls class_2014", "mercedes-benz_cls class_2015", "mercedes-benz_cls class_2016", "mercedes-benz_cls class_2017", "mercedes-benz_cls class_2019", "mercedes-benz_cls class_2020", "mercedes-benz_e class_2011", "mercedes-benz_e class_2012", "mercedes-benz_e class_2013", "mercedes-benz_e class_2014", "mercedes-benz_e class_2015", "mercedes-benz_e class_2016", "mercedes-benz_e class_2017", "mercedes-benz_e class_2019", "mercedes-benz_e class_2020", "mercedes-benz_eqc_2020", "mercedes-benz_g class_2011", "mercedes-benz_g class_2012", "mercedes-benz_g class_2013", "mercedes-benz_g class_2014", "mercedes-benz_g class_2015", "mercedes-benz_g class_2016", "mercedes-benz_g class_2017", "mercedes-benz_g class_2019", "mercedes-benz_g class_2020", "mercedes-benz_gla class_2015", "mercedes-benz_gla class_2016", "mercedes-benz_gla class_2017", "mercedes-benz_gla class_2019", "mercedes-benz_gla class_2020", "mercedes-benz_glb class_2020", "mercedes-benz_glc class_2016", "mercedes-benz_glc class_2017", "mercedes-benz_glc class_2019", "mercedes-benz_glc class_2020", "mercedes-benz_gle class_2016", "mercedes-benz_gle class_2017", "mercedes-benz_gle class_2019", "mercedes-benz_gle class_2020", "mercedes-benz_gls class_2017", "mercedes-benz_gls class_2019", "mercedes-benz_gls class_2020", "mercedes-benz_metris_2016", "mercedes-benz_metris_2017", "mercedes-benz_metris_2019", "mercedes-benz_metris_2020", "mercedes-benz_s class_2011", "mercedes-benz_s class_2012", "mercedes-benz_s class_2013", "mercedes-benz_s class_2014", "mercedes-benz_s class_2015", "mercedes-benz_s class_2016", "mercedes-benz_s class_2017", "mercedes-benz_s class_2019", "mercedes-benz_s class_2020", "mercedes-benz_sl class_2011", "mercedes-benz_sl class_2012", "mercedes-benz_sl class_2013", "mercedes-benz_sl class_2014", "mercedes-benz_sl class_2015", "mercedes-benz_sl class_2016", "mercedes-benz_sl class_2017", "mercedes-benz_sl class_2019", "mercedes-benz_sl class_2020", "mercedes-benz_slc class_2017", "mercedes-benz_slc class_2019", "mercedes-benz_slc class_2020", "mitsubishi_eclipse cross_2019", "mitsubishi_eclipse cross_2020", "mitsubishi_mirage_1999", "mitsubishi_mirage_2000", "mitsubishi_mirage_2001", "mitsubishi_mirage_2002", "mitsubishi_mirage_2014", "mitsubishi_mirage_2015", "mitsubishi_mirage_2017", "mitsubishi_mirage_2019", "mitsubishi_outlander sport_2011", "mitsubishi_outlander sport_2012", "mitsubishi_outlander sport_2013", "mitsubishi_outlander sport_2014", "mitsubishi_outlander sport_2015", "mitsubishi_outlander sport_2016", "mitsubishi_outlander sport_2017", "mitsubishi_outlander sport_2019", "mitsubishi_outlander sport_2020", "mitsubishi_outlander_2011", "mitsubishi_outlander_2012", "mitsubishi_outlander_2013", "mitsubishi_outlander_2014", "mitsubishi_outlander_2015", "mitsubishi_outlander_2016", "mitsubishi_outlander_2017", "mitsubishi_outlander_2019", "mitsubishi_outlander_2020", "nissan_370z_2011", "nissan_370z_2012", "nissan_370z_2013", "nissan_370z_2014", "nissan_370z_2015", "nissan_370z_2016", "nissan_370z_2017", "nissan_370z_2019", "nissan_370z_2020", "nissan_altima_2011", "nissan_altima_2012", "nissan_altima_2013", "nissan_altima_2014", "nissan_altima_2015", "nissan_altima_2016", "nissan_altima_2017", "nissan_altima_2019", "nissan_altima_2020", "nissan_armada_2010", "nissan_armada_2011", "nissan_armada_2012", "nissan_armada_2013", "nissan_armada_2014", "nissan_armada_2015", "nissan_armada_2017", "nissan_armada_2019", "nissan_armada_2020", "nissan_frontier_2010", "nissan_frontier_2011", "nissan_frontier_2012", "nissan_frontier_2013", "nissan_frontier_2014", "nissan_frontier_2015", "nissan_frontier_2016", "nissan_frontier_2017", "nissan_frontier_2019", "nissan_gt-r_2011", "nissan_gt-r_2012", "nissan_gt-r_2013", "nissan_gt-r_2014", "nissan_gt-r_2015", "nissan_gt-r_2016", "nissan_gt-r_2017", "nissan_gt-r_2019", "nissan_gt-r_2020", "nissan_kicks_2019", "nissan_kicks_2020", "nissan_leaf_2011", "nissan_leaf_2012", "nissan_leaf_2013", "nissan_leaf_2014", "nissan_leaf_2015", "nissan_leaf_2016", "nissan_leaf_2017", "nissan_leaf_2019", "nissan_maxima_2010", "nissan_maxima_2011", "nissan_maxima_2012", "nissan_maxima_2013", "nissan_maxima_2014", "nissan_maxima_2016", "nissan_maxima_2017", "nissan_maxima_2019", "nissan_murano_2011", "nissan_murano_2012", "nissan_murano_2013", "nissan_murano_2014", "nissan_murano_2015", "nissan_murano_2016", "nissan_murano_2017", "nissan_murano_2019", "nissan_murano_2020", "nissan_nv200_2013", "nissan_nv200_2014", "nissan_nv200_2015", "nissan_nv200_2016", "nissan_nv200_2017", "nissan_nv200_2019", "nissan_nv200_2020", "nissan_pathfinder_2011", "nissan_pathfinder_2012", "nissan_pathfinder_2013", "nissan_pathfinder_2014", "nissan_pathfinder_2015", "nissan_pathfinder_2016", "nissan_pathfinder_2017", "nissan_pathfinder_2019", "nissan_pathfinder_2020", "nissan_rogue sport_2017", "nissan_rogue sport_2019", "nissan_rogue sport_2020", "nissan_rogue_2011", "nissan_rogue_2012", "nissan_rogue_2013", "nissan_rogue_2014", "nissan_rogue_2015", "nissan_rogue_2016", "nissan_rogue_2017", "nissan_rogue_2019", "nissan_rogue_2020", "nissan_sentra_2011", "nissan_sentra_2012", "nissan_sentra_2013", "nissan_sentra_2014", "nissan_sentra_2015", "nissan_sentra_2016", "nissan_sentra_2017", "nissan_sentra_2019", "nissan_sentra_2020", "nissan_titan_2011", "nissan_titan_2012", "nissan_titan_2013", "nissan_titan_2014", "nissan_titan_2015", "nissan_titan_2016", "nissan_titan_2017", "nissan_titan_2019", "nissan_titan_2020", "nissan_versa_2011", "nissan_versa_2012", "nissan_versa_2013", "nissan_versa_2014", "nissan_versa_2015", "nissan_versa_2016", "nissan_versa_2017", "nissan_versa_2019", "nissan_versa_2020", "porsche_718 spyder_2020", "porsche_718_2017", "porsche_718_2019", "porsche_718_2020", "porsche_911_2011", "porsche_911_2012", "porsche_911_2013", "porsche_911_2014", "porsche_911_2015", "porsche_911_2016", "porsche_911_2017", "porsche_911_2019", "porsche_911_2020", "porsche_cayenne_2011", "porsche_cayenne_2012", "porsche_cayenne_2013", "porsche_cayenne_2014", "porsche_cayenne_2015", "porsche_cayenne_2016", "porsche_cayenne_2017", "porsche_cayenne_2019", "porsche_cayenne_2020", "porsche_macan_2015", "porsche_macan_2016", "porsche_macan_2017", "porsche_macan_2019", "porsche_panamera_2011", "porsche_panamera_2012", "porsche_panamera_2013", "porsche_panamera_2014", "porsche_panamera_2015", "porsche_panamera_2016", "porsche_panamera_2017", "porsche_panamera_2019", "porsche_panamera_2020", "porsche_taycan_2020", "ram_1500_2011", "ram_1500_2012", "ram_1500_2013", "ram_1500_2014", "ram_1500_2015", "ram_1500_2016", "ram_1500_2017", "ram_1500_2019", "ram_1500_2020", "ram_2500_2011", "ram_2500_2012", "ram_2500_2013", "ram_2500_2014", "ram_2500_2015", "ram_2500_2016", "ram_2500_2017", "ram_2500_2019", "ram_2500_2020", "rolls-royce_cullinan_2020", "rolls-royce_dawn_2016", "rolls-royce_dawn_2017", "rolls-royce_dawn_2020", "rolls-royce_ghost_2011", "rolls-royce_ghost_2012", "rolls-royce_ghost_2014", "rolls-royce_ghost_2015", "rolls-royce_ghost_2016", "rolls-royce_ghost_2017", "rolls-royce_ghost_2019", "rolls-royce_phantom_2011", "rolls-royce_phantom_2012", "rolls-royce_phantom_2014", "rolls-royce_phantom_2015", "rolls-royce_phantom_2017", "rolls-royce_phantom_2019", "rolls-royce_phantom_2020", "rolls-royce_wraith_2014", "rolls-royce_wraith_2015", "rolls-royce_wraith_2016", "rolls-royce_wraith_2017", "rolls-royce_wraith_2020", "subaru_ascent_2019", "subaru_ascent_2020", "subaru_brz_2013", "subaru_brz_2014", "subaru_brz_2015", "subaru_brz_2016", "subaru_brz_2017", "subaru_brz_2019", "subaru_brz_2020", "subaru_crosstrek_2013", "subaru_crosstrek_2014", "subaru_crosstrek_2015", "subaru_crosstrek_2016", "subaru_crosstrek_2017", "subaru_crosstrek_2019", "subaru_crosstrek_2020", "subaru_forester_2011", "subaru_forester_2013", "subaru_forester_2014", "subaru_forester_2015", "subaru_forester_2016", "subaru_forester_2017", "subaru_forester_2019", "subaru_forester_2020", "subaru_impreza_2011", "subaru_impreza_2012", "subaru_impreza_2013", "subaru_impreza_2014", "subaru_impreza_2015", "subaru_impreza_2016", "subaru_impreza_2017", "subaru_impreza_2019", "subaru_impreza_2020", "subaru_legacy_2011", "subaru_legacy_2012", "subaru_legacy_2013", "subaru_legacy_2014", "subaru_legacy_2015", "subaru_legacy_2016", "subaru_legacy_2017", "subaru_legacy_2019", "subaru_legacy_2020", "subaru_outback_2011", "subaru_outback_2012", "subaru_outback_2013", "subaru_outback_2014", "subaru_outback_2015", "subaru_outback_2016", "subaru_outback_2017", "subaru_outback_2019", "subaru_outback_2020", "subaru_wrx_2011", "subaru_wrx_2012", "subaru_wrx_2013", "subaru_wrx_2014", "subaru_wrx_2015", "subaru_wrx_2016", "subaru_wrx_2017", "subaru_wrx_2019", "subaru_wrx_2020", "tesla_model 3_2017", "tesla_model 3_2019", "tesla_model s_2012", "tesla_model s_2013", "tesla_model s_2014", "tesla_model s_2015", "tesla_model s_2016", "tesla_model s_2017", "tesla_model s_2019", "tesla_model x_2016", "tesla_model x_2017", "tesla_model x_2019", "tesla_model y_2020", "toyota_4runner_2011", "toyota_4runner_2012", "toyota_4runner_2013", "toyota_4runner_2014", "toyota_4runner_2015", "toyota_4runner_2016", "toyota_4runner_2017", "toyota_4runner_2019", "toyota_4runner_2020", "toyota_86_2017", "toyota_86_2019", "toyota_avalon_2011", "toyota_avalon_2012", "toyota_avalon_2013", "toyota_avalon_2014", "toyota_avalon_2015", "toyota_avalon_2016", "toyota_avalon_2017", "toyota_avalon_2019", "toyota_c-hr_2019", "toyota_c-hr_2020", "toyota_camry_2011", "toyota_camry_2012", "toyota_camry_2013", "toyota_camry_2014", "toyota_camry_2015", "toyota_camry_2016", "toyota_camry_2017", "toyota_camry_2019", "toyota_camry_2020", "toyota_corolla_2011", "toyota_corolla_2012", "toyota_corolla_2013", "toyota_corolla_2014", "toyota_corolla_2015", "toyota_corolla_2016", "toyota_corolla_2017", "toyota_corolla_2019", "toyota_corolla_2020", "toyota_highlander_2011", "toyota_highlander_2012", "toyota_highlander_2013", "toyota_highlander_2014", "toyota_highlander_2015", "toyota_highlander_2016", "toyota_highlander_2017", "toyota_highlander_2019", "toyota_highlander_2020", "toyota_land cruiser_2010", "toyota_land cruiser_2011", "toyota_land cruiser_2013", "toyota_land cruiser_2014", "toyota_land cruiser_2015", "toyota_land cruiser_2016", "toyota_land cruiser_2017", "toyota_land cruiser_2019", "toyota_land cruiser_2020", "toyota_mirai_2016", "toyota_mirai_2017", "toyota_mirai_2019", "toyota_prius c_2012", "toyota_prius c_2013", "toyota_prius c_2014", "toyota_prius c_2015", "toyota_prius c_2016", "toyota_prius c_2017", "toyota_prius c_2019", "toyota_prius_2011", "toyota_prius_2012", "toyota_prius_2013", "toyota_prius_2014", "toyota_prius_2015", "toyota_prius_2016", "toyota_prius_2017", "toyota_prius_2019", "toyota_prius_2020", "toyota_rav4_2011", "toyota_rav4_2012", "toyota_rav4_2013", "toyota_rav4_2014", "toyota_rav4_2015", "toyota_rav4_2016", "toyota_rav4_2017", "toyota_rav4_2019", "toyota_rav4_2020", "toyota_sequoia_2011", "toyota_sequoia_2012", "toyota_sequoia_2013", "toyota_sequoia_2014", "toyota_sequoia_2015", "toyota_sequoia_2016", "toyota_sequoia_2017", "toyota_sequoia_2019", "toyota_sequoia_2020", "toyota_sienna_2011", "toyota_sienna_2012", "toyota_sienna_2013", "toyota_sienna_2014", "toyota_sienna_2015", "toyota_sienna_2016", "toyota_sienna_2017", "toyota_sienna_2019", "toyota_sienna_2020", "toyota_supra_1990", "toyota_supra_1994", "toyota_supra_1997", "toyota_supra_2020", "toyota_tacoma_2011", "toyota_tacoma_2012", "toyota_tacoma_2013", "toyota_tacoma_2014", "toyota_tacoma_2015", "toyota_tacoma_2016", "toyota_tacoma_2017", "toyota_tacoma_2019", "toyota_tacoma_2020", "toyota_tundra_2011", "toyota_tundra_2012", "toyota_tundra_2013", "toyota_tundra_2014", "toyota_tundra_2015", "toyota_tundra_2016", "toyota_tundra_2017", "toyota_tundra_2019", "toyota_yaris hatchback_2020", "toyota_yaris_2011", "toyota_yaris_2012", "toyota_yaris_2013", "toyota_yaris_2014", "toyota_yaris_2015", "toyota_yaris_2016", "toyota_yaris_2017", "toyota_yaris_2019", "toyota_yaris_2020", "volkswagen_arteon_2019", "volkswagen_arteon_2020", "volkswagen_atlas_2019", "volkswagen_atlas_2020", "volkswagen_beetle_2010", "volkswagen_beetle_2011", "volkswagen_beetle_2012", "volkswagen_beetle_2013", "volkswagen_beetle_2014", "volkswagen_beetle_2015", "volkswagen_beetle_2016", "volkswagen_beetle_2017", "volkswagen_beetle_2019", "volkswagen_golf_2011", "volkswagen_golf_2012", "volkswagen_golf_2013", "volkswagen_golf_2014", "volkswagen_golf_2015", "volkswagen_golf_2016", "volkswagen_golf_2017", "volkswagen_golf_2019", "volkswagen_golf_2020", "volkswagen_jetta_2011", "volkswagen_jetta_2012", "volkswagen_jetta_2013", "volkswagen_jetta_2014", "volkswagen_jetta_2015", "volkswagen_jetta_2016", "volkswagen_jetta_2017", "volkswagen_jetta_2019", "volkswagen_jetta_2020", "volkswagen_passat_2010", "volkswagen_passat_2012", "volkswagen_passat_2013", "volkswagen_passat_2014", "volkswagen_passat_2015", "volkswagen_passat_2016", "volkswagen_passat_2017", "volkswagen_passat_2019", "volkswagen_passat_2020", "volkswagen_tiguan_2011", "volkswagen_tiguan_2012", "volkswagen_tiguan_2013", "volkswagen_tiguan_2014", "volkswagen_tiguan_2015", "volkswagen_tiguan_2016", "volkswagen_tiguan_2017", "volkswagen_tiguan_2019", "volkswagen_tiguan_2020", "volkswagen_e-golf_2015", "volkswagen_e-golf_2016", "volkswagen_e-golf_2017", "volkswagen_e-golf_2019", "volvo_s60_2011", "volvo_s60_2012", "volvo_s60_2013", "volvo_s60_2014", "volvo_s60_2015", "volvo_s60_2016", "volvo_s60_2017", "volvo_s60_2019", "volvo_s60_2020", "volvo_s90_1998", "volvo_s90_2017", "volvo_s90_2019", "volvo_s90_2020", "volvo_v60_2015", "volvo_v60_2016", "volvo_v60_2017", "volvo_v60_2019", "volvo_v60_2020", "volvo_v90_1998", "volvo_v90_2017", "volvo_v90_2019", "volvo_v90_2020", "volvo_xc40_2019", "volvo_xc40_2020", "volvo_xc60_2011", "volvo_xc60_2012", "volvo_xc60_2013", "volvo_xc60_2014", "volvo_xc60_2015", "volvo_xc60_2016", "volvo_xc60_2017", "volvo_xc60_2019", "volvo_xc60_2020", "volvo_xc90_2010", "volvo_xc90_2011", "volvo_xc90_2012", "volvo_xc90_2013", "volvo_xc90_2014", "volvo_xc90_2016", "volvo_xc90_2017", "volvo_xc90_2019", "volvo_xc90_2020", "smart_fortwo_2010", "smart_fortwo_2011", "smart_fortwo_2012", "smart_fortwo_2013", "smart_fortwo_2014", "smart_fortwo_2015", "smart_fortwo_2016", "smart_fortwo_2017", "smart_fortwo_2019" ]
Takekazuchi/Caracam
# Caracam (gen 1) This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.9156 - Accuracy: 0.5852 ## Model description First generation of my AI that tells you what car you took a picture of. \ More versions coming soon with accuracy ratings of 85% and higher! Trained on 70+ brands with 2700+ cars going from 1945-2024. \ ***App coming soon (also called Caracam) to Android and IOS*** \ (Late March - Early April 2024). In the future I will take user opinion into account on what brands to add. The app will be updated semi-yearly with user-suggested car brands! \ if you wish to support project Caracam please visit my [Patreon](https://www.patreon.com/Caracam) or my [Cashapp](https://cash.app/$Clippayy)!! ## Intended uses & limitations ***NOT FOR COMMERCIAL USE OUTSIDE OF OFFICIAL CARACAM MOBILE APP*** ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:-----:|:---------------:|:--------:| | 4.0308 | 1.0 | 5362 | 3.6948 | 0.2491 | | 2.694 | 2.0 | 10725 | 2.2586 | 0.5199 | | 2.4475 | 3.0 | 16086 | 1.9156 | 0.5852 | ### Framework versions - Transformers 4.36.2 - Pytorch 2.1.2+cpu - Datasets 2.16.1 - Tokenizers 0.15.0
[ "ac cobra", "acura csx 2005", "acura el 1997", "acura ilx 2012", "acura ilx 2014", "acura ilx 2016", "acura ilx 2018", "acura integra 1986", "acura integra 1994", "acura integra 2022", "acura legend 1986", "acura mdx 2001", "acura mdx 2006", "acura mdx 2010", "acura mdx 2013", "acura mdx 2016", "acura mdx 2021", "acura nsx 1991", "acura nsx 2001", "acura nsx 2016", "acura rdx 2006", "acura rdx 2012", "acura rdx 2015", "acura rdx 2021", "acura rl 2004", "acura rlx 2017", "acura rsx 2002", "acura rsx 2005", "acura slx 1996", "acura slx 1997", "acura tl 1999", "acura tl 2003", "acura tl 2008", "acura tlx 2020", "acura tsx 2008", "acura zdx 2009", "alfa romeo 145 1994", "alfa romeo 146 1995", "alfa romeo 147 2005", "alfa romeo 147 2000", "alfa romeo 155 1992", "alfa romeo 156 1997", "alfa romeo 156 2003", "alfa romeo 156 gta 2001", "alfa romeo 159 2005", "alfa romeo 164 1988", "alfa romeo 166 1996", "alfa romeo 1900 1950", "alfa romeo 2600 1962", "alfa romeo 33 1983", "alfa romeo 33 1990", "alfa romeo 33 stradale 1967", "alfa romeo 4c 2013", "alfa romeo 6 1979", "alfa romeo 6c 2500 1939", "alfa romeo 75 1985", "alfa romeo 8c 2008", "alfa romeo alfasud 1972", "alfa romeo alfetta 1972", "alfa romeo arna 1983", "alfa romeo brera 2005", "alfa romeo gt 2003", "alfa romeo gtv 1995", "alfa romeo giulia 1962", "alfa romeo giulia 2016", "alfa romeo giulia 2022", "alfa romeo giulietta 1954", "alfa romeo giulietta 2010", "alfa romeo mito 2008", "alfa romeo mito 2016", "alfa romeo montreal 1970", "alfa romeo spider 1970", "alfa romeo spider 1983", "alfa romeo spider 1995", "alfa romeo spider 2003", "alfa romeo sprint 1976", "alfa romeo stelvio 2017", "alfa romeo tonale 2022", "aston martin cygnet 2011", "aston martin db11 2017", "aston martin db2 1950", "aston martin db4 1958", "aston martin db5 1963", "aston martin db6 1965", "aston martin db7 1993", "aston martin db7 1999", "aston martin db9 2004", "aston martin db9 2010", "aston martin db9 2013", "aston martin dbs 1967", "aston martin dbs 2008", "aston martin dbs superleggera 2018", "aston martin dbs770 ultimate 2023", "aston martin dbx 2020", "aston martin dbx707 2022", "aston martin lagonda 1976", "aston martin lagonda 1986", "aston martin one-77 2009", "aston martin rapide amr 2017", "aston martin rapide e 2019", "aston martin v12 2009", "aston martin v12 speedster 2020", "aston martin v12 vantage 2014", "aston martin v12 vantage 2022", "aston martin v12 vantage amr 2017", "aston martin v12 vantage v600 2018", "aston martin v12 zagato 2012", "aston martin v8 1973", "aston martin v8 1977", "aston martin v8 1993", "aston martin v8 1998", "aston martin v8 2005", "aston martin v8 2011", "aston martin valhalla 2022", "aston martin vanquish 2001", "aston martin vanquish 2004", "aston martin vanquish 2012", "aston martin vanquish s 2016", "aston martin vantage 2018", "aston martin vantage gt8 2016", "aston martin virage 1986", "aston martin virage 2011", "aston martin vulcan 2015", "audi 100 (c1) 1968", "audi 100 (c2) 1976", "audi 100 (c3) 1982", "audi 100 (c4) 1991", "audi 200 1984", "audi 80 s2 (b4) 1993", "audi 90 (b2) 1984", "audi 90 (b3) 1987", "audi a1 2010", "audi a1 2018", "audi a2 1999", "audi a3 2003", "audi a3 2005", "audi a3 2008", "audi a3 2013", "audi a3 2016", "audi a3 2020", "audi a4 1994", "audi a4 2001", "audi a4 2004", "audi a4 2007", "audi a4 2012", "audi a4 2016", "audi a4 2018", "audi a4 2019", "audi a5 2007", "audi a5 2011", "audi a5 2016", "audi a5 2019", "audi a6 (c4) 1994", "audi a6 1997", "audi a6 2001", "audi a6 2005", "audi a6 2008", "audi a6 2011", "audi a6 2014", "audi a6 2018", "audi a8 (d2) 1994", "audi a8 (d3) 2003", "audi a8 (d4) 2010", "audi a8 (d4) 2013", "audi a8 2017", "audi a8 2021", "audi q2 2016", "audi q2 2020", "audi q3 2011", "audi q3 2018", "audi q4 2021", "audi q5 2008", "audi q5 2012", "audi q5 2016", "audi q5 2020", "audi q7 2006", "audi q7 2009", "audi q7 2015", "audi q7 2019", "audi q8 2018", "audi q8 e-tron 2023", "audi quattro 1980", "audi r8 2008", "audi r8 2010", "audi r8 2012", "audi r8 2015", "audi r8 2018", "audi r8 2021", "audi rs 3 2015", "audi rs 3 2021", "audi rs q3 2013", "audi rs q8 2019", "audi rs e-tron gt 2021", "audi rs4 (b8) 2012", "audi rs4 (b9) 2017", "audi rs4 2000", "audi rs4 2005", "audi rs5 2010", "audi rs5 2013", "audi rs5 2019", "audi rs6 2002", "audi rs6 2008", "audi rs6 2013", "audi rs7 2013", "audi rs7 2019", "audi rs7 2022", "audi s1 2014", "audi s3 1999", "audi s3 2006", "audi s3 2008", "audi s3 2013", "audi s3 2016", "audi s3 2020", "audi s4 1997", "audi s4 2003", "audi s4 2005", "audi s4 2008", "audi s4 2012", "audi s4 2016", "audi s4 2019", "audi s5 2007", "audi s5 2012", "audi s5 2016", "audi s5 2020", "audi s6 (c4) 1994", "audi s6 1999", "audi s6 2006", "audi s6 2008", "audi s6 2012", "audi s6 2014", "audi s7 2011", "audi s7 2019", "audi s8 (d4) 2012", "audi s8 1996", "audi s8 1999", "audi s8 2006", "audi s8 2013", "audi s8 2015", "audi s8 2019", "audi s8 2021", "audi sq2 2018", "audi sq5 2013", "audi sq5 2017", "audi sq7 2016", "audi sq7 2019", "audi sq8 2019", "audi tt 1998", "audi tt 2006", "audi tt 2013", "audi tt 2014", "audi tt 2016", "audi tt 2018", "audi tts 2020", "audi v8 1988", "audi e-tron 2019", "bentley arnage r 2003", "bentley arnage t 2002", "bentley batur 2022", "bentley bentayga 2015", "bentley bentayga 2020", "bentley brooklands 1992", "bentley brooklands 2007", "bentley continental flying spur 2005", "bentley continental gt 2002", "bentley continental gt 2017", "bentley continental gt v8 2019", "bentley continental gtc speed 2009", "bentley continental r 1991", "bentley flying spur 2014", "bentley flying spur 2019", "bentley mulliner bacalar 2020", "bentley mulsanne 2009", "bentley mulsanne 2016", "bentley s1 1955", "bentley state limousine 2002", "bentley t1 saloon 1955", "bmw 1 series 2004", "bmw 1 series 2007", "bmw 1 series 2011", "bmw 1 series 2017", "bmw 1 series 2019", "bmw 2 series 2013", "bmw 2 series 2017", "bmw 2800 1968", "bmw 3 series 1975", "bmw 3 series 1988", "bmw 3 series 1999", "bmw 3 series 2001", "bmw 3 series 2003", "bmw 3 series 2008", "bmw 3 series 2012", "bmw 3200 1962", "bmw 4 series 2020", "bmw 5 series 1972", "bmw 5 series 2000", "bmw 5 series 2003", "bmw 5 series 2007", "bmw 5 series 2009", "bmw 5 series 2013", "bmw 5 series 2016", "bmw 5 series 2020", "bmw 501 1952", "bmw 503 1956", "bmw 507 1955", "bmw 6 series 2003", "bmw 6 series 2012", "bmw 7 series 1977", "bmw 7 series 1986", "bmw 7 series 2005", "bmw 7 series 2008", "bmw 7 series 2012", "bmw 7 series 2016", "bmw 7 series 2019", "bmw 7 series 2022", "bmw 8 series 1989", "bmw 8 series 2022", "bmw isetta 1955", "bmw l7 1997", "bmw m1 1978", "bmw m2 2015", "bmw m2 2019", "bmw m2 2022", "bmw m3 (e36) 1992", "bmw m3 2000", "bmw m3 2001", "bmw m3 2014", "bmw m3 2020", "bmw m3 2023", "bmw m3 gts 2010", "bmw m4 2017", "bmw m4 2022", "bmw m5 (e28) 1985", "bmw m5 (e34) 1988", "bmw m5 1998", "bmw m5 2005", "bmw m5 2011", "bmw m5 2013", "bmw m5 2017", "bmw m5 2020", "bmw m6 2005", "bmw m6 2012", "bmw m8 2019", "bmw x1 2009", "bmw x1 2016", "bmw x1 2019", "bmw x1 2022", "bmw x2 2018", "bmw x3 2004", "bmw x3 2007", "bmw x3 2010", "bmw x3 2014", "bmw x3 2017", "bmw x3 2021", "bmw x4 2014", "bmw x4 2018", "bmw x4 2021", "bmw x4 m 2019", "bmw x5 2000", "bmw x5 2007", "bmw x5 2010", "bmw x5 2014", "bmw x5 2018", "bmw x5 2023", "bmw x5 m 2023", "bmw x6 2010", "bmw x6 2014", "bmw x6 2019", "bmw x6 2023", "bmw x6 m 2010", "bmw x6 m 2014", "bmw x6 m 2019", "bmw x6 m 2023", "bmw x7 2018", "bmw x7 2022", "bmw xm 2022", "bmw z1 1988", "bmw z3 1996", "bmw z4 2002", "bmw z4 2006", "bmw z4 2009", "bmw z4 2018", "bmw z8 2000", "bmw i3 2013", "bmw i4 2021", "bmw i7 2022", "bmw i8 2014", "bmw ix 2021", "bmw ix3 2020", "bugatti bolide 2022", "bugatti centodieci 2019", "bugatti chiron 2016", "bugatti divo 2018", "bugatti eb 110 gt 1991", "bugatti type 101 1951", "bugatti type 251 1955", "bugatti veyron 2005", "bugatti w16 mistral 2022", "buick cascada 2016", "buick century 1939", "buick enclave 2007", "buick enclave 2012", "buick enclave 2017", "buick enclave 2021", "buick encore 2013", "buick encore 2016", "buick encore gx 2019", "buick envision 2014", "buick envision 2020", "buick gran sport 455 1970", "buick lacrosse 2004", "buick lacrosse 2009", "buick lacrosse 2016", "buick lesabre 1991", "buick lesabre 1999", "buick lucerne 2005", "buick park avenue 1991", "buick park avenue 1997", "buick rainier 2003", "buick reatta 1988", "buick regal 1988", "buick regal 1997", "buick regal 2010", "buick regal 2013", "buick regal 2017", "buick rendezvous 2002", "buick riviera 1963", "buick riviera 1986", "buick roadmaster 1949", "buick skylark 1991", "buick super riviera 1956", "buick terraza 2005", "buick verano 2012", "bac mono", "cadillac ats 2012", "cadillac ats 2014", "cadillac allante 1987", "cadillac bls 2006", "cadillac brougham 1992", "cadillac ct4-v 2019", "cadillac ct5 2019", "cadillac ct6 2016", "cadillac cts 2002", "cadillac cts 2007", "cadillac cts 2013", "cadillac cts-v 2003", "cadillac cts-v 2015", "cadillac catera 1997", "cadillac dts 2005", "cadillac deville 1994", "cadillac deville 1999", "cadillac elr 2014", "cadillac eldorado 1967", "cadillac eldorado 1971", "cadillac eldorado 1991", "cadillac eldorado brougham 1957", "cadillac escalade 2006", "cadillac escalade 2014", "cadillac escalade 2020", "cadillac escalade-v 2022", "cadillac lyriq 2022", "cadillac sts 2007", "cadillac seville 1998", "cadillac seville 1992", "cadillac xlr 2003", "cadillac xlr 2008", "cadillac xt4 2018", "cadillac xt4 2023", "cadillac xt6 2019", "cadillac xts 2013", "cadillac xts 2017", "chevrolet agile 2009", "chevrolet avalanche 2001", "chevrolet avalanche 2006", "chevrolet aveo 2002", "chevrolet aveo 2004", "chevrolet aveo 2008", "chevrolet aveo 2011", "chevrolet blazer 1995", "chevrolet blazer 2022", "chevrolet bolt 2016", "chevrolet bolt ev 2021", "chevrolet camaro 1967", "chevrolet camaro 1970", "chevrolet camaro 1982", "chevrolet camaro 1993", "chevrolet camaro 2009", "chevrolet camaro 2013", "chevrolet camaro 2016", "chevrolet camaro 2018", "chevrolet camaro z28 1974", "chevrolet camaro z28 1997", "chevrolet camaro zl1 2012", "chevrolet captiva 2006", "chevrolet captiva 2011", "chevrolet cavalier 1994", "chevrolet cavalier 2003", "chevrolet celta 2000", "chevrolet chevelle 1963", "chevrolet chevelle 1969", "chevrolet classic 2002", "chevrolet cobalt 2004", "chevrolet cobalt 2008", "chevrolet cobalt 2011", "chevrolet colorado 2003", "chevrolet colorado 2009", "chevrolet colorado 2015", "chevrolet corvette c1 1956", "chevrolet corvette c2 1963", "chevrolet corvette c3 1969", "chevrolet corvette c4 1983", "chevrolet corvette c5 1997", "chevrolet corvette c6 2004", "chevrolet corvette c7 2013", "chevrolet corvette c8 2019", "chevrolet cruze 2009", "chevrolet cruze 2016", "chevrolet epica 2006", "chevrolet equinox 2004", "chevrolet equinox 2009", "chevrolet equinox 2017", "chevrolet equinox 2020", "chevrolet equinox ev 2022", "chevrolet evanda 2004", "chevrolet express 1995", "chevrolet express 2002", "chevrolet express 2008", "chevrolet hhr 2005", "chevrolet impala 1958", "chevrolet impala 1966", "chevrolet impala 1999", "chevrolet impala 2005", "chevrolet impala 2013", "chevrolet malibu 1996", "chevrolet malibu 2003", "chevrolet malibu 2008", "chevrolet malibu 2012", "chevrolet malibu 2016", "chevrolet malibu 2018", "chevrolet nomad 1955", "chevrolet nomad 1957", "chevrolet nubira 2002", "chevrolet nubira 2004", "chevrolet onix 2012", "chevrolet orlando 2010", "chevrolet prisma 2013", "chevrolet s-10 1997", "chevrolet s-10 2000", "chevrolet ss 2013", "chevrolet ssr 2003", "chevrolet silverado 2007", "chevrolet silverado 2013", "chevrolet silverado 2018", "chevrolet silverado 2023", "chevrolet sonic 2011", "chevrolet sonic 2016", "chevrolet spark 2005", "chevrolet spark 2013", "chevrolet spark 2016", "chevrolet suburban 1999", "chevrolet suburban 2006", "chevrolet suburban 2014", "chevrolet tahoe 1991", "chevrolet tahoe 2006", "chevrolet tahoe 2008", "chevrolet tahoe 2014", "chevrolet tahoe 2020", "chevrolet tracker 1999", "chevrolet tracker 2013", "chevrolet trailblazer 2000", "chevrolet trailblazer 2008", "chevrolet trailblazer 2012", "chevrolet trailblazer 2020", "chevrolet trailblazer 2023", "chevrolet traverse 2008", "chevrolet traverse 2012", "chevrolet traverse 2017", "chevrolet trax 2017", "chevrolet trax 2022", "chevrolet venture 1996", "chevrolet volt 2011", "chevrolet volt 2016", "citroen 2cv 1949", "citroen ami 2020", "citroen ax 1986", "citroen ax 1991", "citroen axel 1985", "citroen bx 1983", "citroen bx 1989", "citroen berlingo 1996", "citroen berlingo 2002", "citroen berlingo 2008", "citroen berlingo 2012", "citroen berlingo 2015", "citroen berlingo 2018", "citroen c-crosser 2007", "citroen c-zero 2010", "citroen c1 2005", "citroen c1 2012", "citroen c2 2003", "citroen c2 2008", "citroen c3 2002", "citroen c3 2005", "citroen c3 2009", "citroen c3 2013", "citroen c3 2016", "citroen c3 2019", "citroen c4 2004", "citroen c4 2007", "citroen c4 2010", "citroen c4 2020", "citroen c4 aircross 2012", "citroen c4 cactus 2014", "citroen c4 picasso 2016", "citroen c4 x 2022", "citroen c5 2001", "citroen c5 2004", "citroen c5 2008", "citroen c5 2014", "citroen c5 x 2021", "citroen c6 2005", "citroen c8 2002", "citroen c8 2008", "citroen cx 1974", "citroen cx 1982", "citroen ds 3 2009", "citroen ds19 1955", "citroen ds21 1968", "citroen ds23 1973", "citroen ds4 2010", "citroen ds5 2011", "citroen dyane 1969", "citroen e-mehari 2016", "citroen evasion 1994", "citroen evasion 1998", "citroen gsa 1979", "citroen nemo combi 2008", "citroen sm 1970", "citroen saxo 1996", "citroen visa 1978", "citroen xm 1989", "citroen xm 1994", "citroen xantia 1993", "citroen xantia 1998", "citroen xsara 1997", "citroen xsara 2000", "citroen xsara picasso 2004", "citroen zx 1991", "citroen zx 1996", "datsun go 2013", "datsun go 2018", "datsun mi-do 2014", "delage d12", "dodge aries 1981", "dodge avenger 2007", "dodge avenger 2010", "dodge caliber 2006", "dodge caravan 1983", "dodge caravan 1995", "dodge caravan 2001", "dodge caravan 2007", "dodge challenger 1969", "dodge challenger 2007", "dodge challenger 2010", "dodge challenger 2015", "dodge charger 1966", "dodge charger 1968", "dodge charger 1971", "dodge charger 1974", "dodge charger 1981", "dodge charger 2005", "dodge charger 2015", "dodge charger srt 2019", "dodge dakota 1996", "dodge dakota 2004", "dodge dart 2012", "dodge durango 1997", "dodge durango 2004", "dodge durango 2013", "dodge durango 2020", "dodge hornet 2022", "dodge journey 2008", "dodge journey 2011", "dodge journey 2014", "dodge magnum 2004", "dodge magnum 2007", "dodge neon 1994", "dodge neon 1999", "dodge neon 2003", "dodge nitro 2006", "dodge polara 1962", "dodge ram 1500 2013", "dodge ram 1500 2015", "dodge ram 1500 2018", "dodge ram 1500 rev 2023", "dodge ram 1500 trx 2020", "dodge stratus 1994", "dodge stratus 2001", "dodge viper 2003", "dodge viper 2007", "dodge viper 2012", "dodge viper rt 1991", "delorean dmc-12", "donkervoort d8", "ferrari 195 1950", "ferrari 212 1951", "ferrari 250 europa 1953", "ferrari 250 gto 1954", "ferrari 275 1964", "ferrari 288 1984", "ferrari 296 gts 2022", "ferrari 308 1982", "ferrari 328 1985", "ferrari 348 1989", "ferrari 360 modena 1999", "ferrari 365 1968", "ferrari 400 1976", "ferrari 412 1985", "ferrari 456 1992", "ferrari 456 m gt 1998", "ferrari 458 2009", "ferrari 458 speciale 2014", "ferrari 488 2015", "ferrari 512 1992", "ferrari 512 m 1994", "ferrari 550 1996", "ferrari 575m 2002", "ferrari 599 2006", "ferrari 612 2004", "ferrari 812 2017", "ferrari california 2014", "ferrari daytona sp3 2021", "ferrari dino 206 1968", "ferrari dino 208 1975", "ferrari dino 246 1969", "ferrari dino 308 1973", "ferrari enzo 2002", "ferrari f12 2012", "ferrari f149 2008", "ferrari f355 1994", "ferrari f40 1987", "ferrari f430 2004", "ferrari f50 1995", "ferrari f60 2014", "ferrari f8 2019", "ferrari ff 2011", "ferrari fxx 2005", "ferrari gtc4lusso 2016", "ferrari laferrari 2013", "ferrari mondial 1983", "ferrari mondial 8 1981", "ferrari mondial t 1989", "ferrari portofino 2017", "ferrari portofino m 2020", "ferrari purosangue 2022", "ferrari roma 2019", "ferrari roma spider 2023", "ferrari sp1 2018", "ferrari testarossa 1984", "fiat 1100 1955", "fiat 1100 s 1947", "fiat 1200 1957", "fiat 124 1966", "fiat 124 1972", "fiat 124 abarth 2018", "fiat 125 1967", "fiat 126 1972", "fiat 127 1971", "fiat 128 1969", "fiat 130 1971", "fiat 132 1972", "fiat 1400 1950", "fiat 1500 1961", "fiat 1800 1959", "fiat 2300 1961", "fiat 500 1957", "fiat 500 1965", "fiat 500 1972", "fiat 500 2007", "fiat 500c abarth 2012", "fiat 500x 2015", "fiat 500e 2013", "fiat 500e 2020", "fiat 600 1955", "fiat 600 2005", "fiat 850 1965", "fiat 8v 1952", "fiat argenta 1983", "fiat argo 2017", "fiat barchetta 1995", "fiat bravo 1995", "fiat bravo 2007", "fiat campagnola 1974", "fiat campagnola a 1955", "fiat cinquecento 1992", "fiat coupe 1994", "fiat croma 1991", "fiat dino 1967", "fiat doblo 2001", "fiat doblo 2005", "fiat doblo 2010", "fiat doblo 2015", "fiat freemont 2011", "fiat freemont cross 2015", "fiat fullback 2016", "fiat idea 2003", "fiat idea 2010", "fiat linea 2006", "fiat marea 1996", "fiat mille 1983", "fiat multipla 1998", "fiat multipla 2004", "fiat panda 1981", "fiat panda 1986", "fiat panda 2003", "fiat panda 2011", "fiat panda city cross 2017", "fiat punto 1994", "fiat punto 1999", "fiat punto 2012", "fiat qubo 2008", "fiat regata 1984", "fiat ritmo 1978", "fiat sedici 2006", "fiat sedici 2009", "fiat seicento 1998", "fiat seicento 2004", "fiat siena 2002", "fiat siena 2005", "fiat stilo 2001", "fiat stilo 2006", "fiat tipo 1988", "fiat tipo 2015", "fiat tipo 2020", "fiat topolino 2023", "fiat ulysse 1994", "fiat uno 1983", "fiat uno 1989", "fiat uno 2010", "fiat x1 1972", "ford anglia 100e 1953", "ford anglia 105e 1959", "ford bronco 1966", "ford bronco 1978", "ford bronco 1980", "ford bronco 1987", "ford bronco 1992", "ford bronco 2020", "ford bronco raptor 2022", "ford bullitt 2019", "ford capri 1969", "ford capri 1978", "ford consul 1950", "ford cortina 1962", "ford cortina 1966", "ford cortina 1970", "ford cortina 1976", "ford cougar 1998", "ford crestliner 1949", "ford crown victoria 1998", "ford e-tourneo 2023", "ford ecosport 2004", "ford ecosport 2017", "ford econovan 1999", "ford ecosport 2013", "ford edge 2006", "ford edge 2010", "ford edge 2015", "ford edge 2018", "ford escape 2000", "ford escape 2007", "ford escape 2012", "ford escape 2016", "ford escape 2019", "ford escape 2022", "ford escort 1980", "ford escort 1992", "ford escort 1993", "ford escort 1995", "ford everest 2003", "ford everest 2007", "ford everest 2015", "ford everest 2018", "ford everest 2022", "ford excursion 2000", "ford expedition 1996", "ford expedition 2002", "ford expedition 2007", "ford expedition 2014", "ford expedition 2017", "ford expedition 2021", "ford explorer 2002", "ford explorer 2005", "ford explorer 2010", "ford explorer 2015", "ford explorer 2019", "ford explorer 2023", "ford f-150 1996", "ford f-150 2001", "ford f-150 2004", "ford f-150 2009", "ford f-150 2014", "ford f-150 2018", "ford f-150 2020", "ford f-150 lightning 2022", "ford f-150 raptor 2021", "ford f-150 svt raptor 2017", "ford fiesta 1976", "ford fiesta 1983", "ford fiesta 1989", "ford fiesta 1995", "ford fiesta 1999", "ford fiesta 2002", "ford fiesta 2005", "ford fiesta 2008", "ford fiesta 2011", "ford fiesta 2013", "ford fiesta 2016", "ford fiesta 2021", "ford figo 2010", "ford figo 2012", "ford figo 2015", "ford figo 2018", "ford five hundred 2004", "ford flex 2009", "ford flex 2012", "ford focus 1998", "ford focus 2001", "ford focus 2005", "ford focus 2007", "ford focus 2011", "ford focus 2014", "ford focus 2016", "ford focus 2018", "ford focus 2021", "ford freestar 2003", "ford fusion 2002", "ford fusion 2005", "ford fusion 2008", "ford fusion 2012", "ford fusion 2016", "ford fusion 2018", "ford gt 2004", "ford gt 2017", "ford galaxy 1995", "ford galaxy 2000", "ford galaxy 2006", "ford galaxy 2016", "ford ka 1997", "ford ka 2003", "ford ka 2008", "ford ka 2016", "ford ka 2018", "ford maverick 1993", "ford maverick 1996", "ford maverick 2021", "ford max 2006", "ford max 2010", "ford max 2015", "ford max 2019", "ford mondeo 1993", "ford mondeo 1996", "ford mondeo 2000", "ford mondeo 2003", "ford mondeo 2005", "ford mondeo 2007", "ford mondeo 2010", "ford mondeo 2014", "ford mondeo 2019", "ford mustang 1964", "ford mustang 1968", "ford mustang 1969", "ford mustang 1978", "ford mustang 1981", "ford mustang 1998", "ford mustang 2004", "ford mustang 2009", "ford mustang 2014", "ford mustang 2017", "ford mustang dark horse 2023", "ford mustang gt 2023", "ford mustang gt 350 shelby 1965", "ford mustang mach-e 2020", "ford mustang shelby gt350 2015", "ford mustang shelby gt500 2009", "ford mustang shelby gt500 2012", "ford mustang shelby gt500 2019", "ford orion 1990", "ford pinto 1971", "ford probe 1994", "ford puma 1998", "ford puma 2019", "ford ranger 2000", "ford ranger 2008", "ford ranger 2011", "ford ranger 2015", "ford ranger 2018", "ford ranger 2021", "ford ranger 2023", "ford scorpio 1990", "ford scorpio 1994", "ford sierra 1990", "ford taunus 12m 1952", "ford taurus 1995", "ford taurus 1999", "ford taurus 2007", "ford taurus 2009", "ford taurus 2012", "ford thunderbird 1955", "ford thunderbird 1957", "ford thunderbird 1964", "ford thunderbird 1972", "ford thunderbird 1977", "ford thunderbird 1980", "ford thunderbird 1989", "ford thunderbird 2001", "ford tourneo 2003", "ford tourneo 2007", "ford tourneo 2009", "ford tourneo 2013", "ford tourneo 2017", "ford transit 2018", "ford windstar 1998", "ford zodiac 1962", "ford zodiac 1966", "gmc acadia 2007", "gmc acadia 2012", "gmc acadia 2016", "gmc acadia 2019", "gmc canyon 2004", "gmc canyon 2014", "gmc envoy 2001", "gmc envoy 2008", "gmc hummer ev 2021", "gmc jimmy 1997", "gmc safari 1994", "gmc savana 1995", "gmc savana 2003", "gmc savana 2013", "gmc sierra 2007", "gmc sierra 2008", "gmc sierra 2013", "gmc sierra 2018", "gmc sierra 2022", "gmc terrain 2009", "gmc terrain 2017", "gmc terrain 2021", "gmc yukon 1999", "gmc yukon 2008", "gmc yukon 2014", "gmc yukon 2020", "honda 1300 1969", "honda accord 1998", "honda accord 2001", "honda accord 1976", "honda accord 1981", "honda accord 1989", "honda accord 1993", "honda accord 1996", "honda accord 1999", "honda accord 2003", "honda accord 2006", "honda accord 2008", "honda accord 2012", "honda accord 2017", "honda accord 2020", "honda accord 2022", "honda avancier 1999", "honda beat 1991", "honda cr-v 1996", "honda cr-v 2002", "honda cr-v 2007", "honda cr-v 2010", "honda cr-v 2014", "honda cr-v 2016", "honda cr-v 2019", "honda cr-v 2022", "honda cr-z 2010", "honda city 1983", "honda civic 1972", "honda civic 1979", "honda civic 1983", "honda civic 1987", "honda civic 1991", "honda civic 1995", "honda civic 2000", "honda civic 2005", "honda civic 2008", "honda civic 2012", "honda civic 2015", "honda civic 2021", "honda civic type r 2022", "honda concerto 1990", "honda crosstour 2013", "honda element 2003", "honda fcx clarity 2007", "honda fit 2002", "honda fit 2004", "honda fit 2008", "honda fit 2017", "honda fit 2020", "honda fit ev 2012", "honda hr-v 1999", "honda hr-v 2001", "honda hr-v 2014", "honda hr-v 2018", "honda hr-v 2021", "honda insight 1999", "honda insight 2009", "honda insight 2018", "honda jazz 2013", "honda l700 1965", "honda mobilio 2001", "honda mobilio 2004", "honda n360 1967", "honda n600 1969", "honda nsx 1991", "honda nsx 2002", "honda nsx 2016", "honda odyssey 2005", "honda odyssey 2008", "honda odyssey 2011", "honda odyssey 2017", "honda odyssey 2020", "honda passport 2018", "honda pilot 2009", "honda pilot 2016", "honda pilot 2018", "honda pilot 2022", "honda prelude 1979", "honda prelude 1983", "honda prelude 1987", "honda prelude 1992", "honda prelude 1996", "honda ridgeline 2005", "honda ridgeline 2009", "honda ridgeline 2016", "honda ridgeline 2021", "honda s2000 1999", "honda s500 1963", "honda s660 2015", "honda s800 1966", "honda shuttle 1998", "honda stream 2000", "honda stream 2003", "honda thats 2002", "honda e 2020", "honda insight 2012", "hyundai accent 1999", "hyundai accent 2003", "hyundai accent 2006", "hyundai accent 2011", "hyundai accent 2017", "hyundai atos 1998", "hyundai atos 2003", "hyundai atos 2005", "hyundai azera 2012", "hyundai bayon 2021", "hyundai coupe 1996", "hyundai coupe 2001", "hyundai coupe 2004", "hyundai coupe 2007", "hyundai elantra 2000", "hyundai elantra 2003", "hyundai elantra 2006", "hyundai elantra 2010", "hyundai elantra 2016", "hyundai elantra 2020", "hyundai equus 2010", "hyundai excel 1989", "hyundai excel 1998", "hyundai genesis 2008", "hyundai genesis 2014", "hyundai getz 2002", "hyundai getz 2005", "hyundai grandeur 2005", "hyundai grandeur 2022", "hyundai ioniq 2016", "hyundai ioniq 2019", "hyundai ioniq 5 2021", "hyundai ioniq 6 2022", "hyundai kona 2017", "hyundai kona 2021", "hyundai kona 2023", "hyundai kona electric 2018", "hyundai lantra 1995", "hyundai matrix 2001", "hyundai nexo 2018", "hyundai palisade 2019", "hyundai palisade 2022", "hyundai pony 1989", "hyundai santa cruz 2021", "hyundai santa fe 2000", "hyundai santa fe 2004", "hyundai santa fe 2009", "hyundai santa fe 2012", "hyundai santa fe 2016", "hyundai santa fe 2020", "hyundai santa fe 2023", "hyundai sonata 1989", "hyundai sonata 1996", "hyundai sonata 2001", "hyundai sonata 2004", "hyundai sonata 2009", "hyundai sonata 2014", "hyundai sonata 2017", "hyundai sonata 2019", "hyundai sonata 2023", "hyundai staria 2021", "hyundai terracan 2001", "hyundai terracan 2004", "hyundai trajet 2000", "hyundai trajet 2004", "hyundai tucson 2004", "hyundai tucson 2009", "hyundai tucson 2016", "hyundai tucson 2018", "hyundai tucson 2020", "hyundai veloster 2011", "hyundai veloster 2018", "hyundai venue 2019", "hyundai veracruz 2009", "hyundai xg 1999", "hyundai xg 2003", "hyundai i10 2008", "hyundai i10 2014", "hyundai i10 2016", "hyundai i10 2019", "hyundai i20 2009", "hyundai i20 2014", "hyundai i20 2018", "hyundai i30 2007", "hyundai i30 2012", "hyundai i30 2018", "hyundai i30 2020", "hyundai i40 2011", "hyundai i40 2018", "hyundai i800 2008", "hyundai ix20 2010", "hyundai ix20 2015", "infiniti ex 2007", "infiniti ex37 2013", "infiniti fx 2002", "infiniti fx 2008", "infiniti g20 1991", "infiniti g20 1999", "infiniti g25 2011", "infiniti g35 2001", "infiniti g35 2006", "infiniti g37 2008", "infiniti i30 1995", "infiniti i30 1999", "infiniti ipl g 2011", "infiniti j30 1993", "infiniti jx 2012", "infiniti m 2010", "infiniti m30 1990", "infiniti m45 2003", "infiniti q30 2015", "infiniti q45 1989", "infiniti q45 1996", "infiniti q45 2001", "infiniti q60 2016", "infiniti q60 2018", "infiniti q70 2013", "infiniti qx 2010", "infiniti qx4 1997", "infiniti qx50 2016", "infiniti qx50 2020", "infiniti qx55 2021", "infiniti qx56 2004", "infiniti qx80 2014", "isuzu 117 coupe 1968", "isuzu amigo 1997", "isuzu ascender 2001", "isuzu axiom 2001", "isuzu d-max double cab 2012", "isuzu hombre 1995", "isuzu mu-7 2004", "isuzu rodeo 2002", "isuzu trooper 1998", "isuzu i-series 2005", "jaguar f-pace 2015", "jaguar f-type 2014", "jaguar f-type 2016", "jaguar f-type 2020", "jaguar i-pace 2018", "jaguar s-type 1999", "jaguar x-type 2004", "jaguar xe 2019", "jaguar xf (x260) 2020", "jaguar xf 2015", "jaguar xfr 2009", "jaguar xfr 2011", "jaguar xj 1979", "jaguar xj 1986", "jaguar xj 1994", "jaguar xj 2007", "jaguar xj 2012", "jaguar xjr 1994", "jaguar xjr 2013", "jaguar xjr15 1990", "jaguar xjr575 2017", "jaguar xk 2006", "jaguar xk8 (x100) 1996", "jaguar xkr 2009", "jeep avenger 2022", "jeep cherokee 1984", "jeep cherokee 1997", "jeep cherokee 2013", "jeep cherokee 2018", "jeep commander 2005", "jeep compass 2006", "jeep compass 2011", "jeep compass 2016", "jeep compass 2021", "jeep gladiator 2020", "jeep grand cherokee 1992", "jeep grand cherokee 2003", "jeep grand cherokee 2005", "jeep grand cherokee 2010", "jeep grand cherokee l 2020", "jeep grand cherokee srt 2013", "jeep grand cherokee srt-8 2006", "jeep liberty 2001", "jeep liberty 2005", "jeep liberty 2007", "jeep patriot 2007", "jeep renegade 2014", "jeep renegade 2018", "jeep trackhawk 2018", "jeep wagoneer 1963", "jeep wagoneer 2021", "jeep wrangler 1987", "jeep wrangler 1996", "jeep wrangler 2006", "jeep wrangler rubicon 2018", "jeep wrangler unlimited 2012", "kia borrego 2009", "kia carens 2000", "kia carens 2002", "kia carens 2008", "kia carens 2013", "kia carens 2016", "kia carnival 2021", "kia ceed 2012", "kia ceed 2018", "kia ceed 2021", "kia ev6 2021", "kia ev9 2023", "kia forte 2009", "kia forte 2013", "kia forte 2016", "kia forte 2018", "kia forte 2021", "kia joice 1999", "kia k5 2019", "kia k7 2013", "kia k7 2016", "kia k9 2012", "kia niro 2016", "kia niro 2019", "kia opirus 2003", "kia opirus 2007", "kia optima 2001", "kia optima 2003", "kia optima 2006", "kia optima 2008", "kia optima 2016", "kia optima 2018", "kia picanto 2004", "kia picanto 2007", "kia picanto 2011", "kia picanto 2015", "kia picanto 2017", "kia picanto 2023", "kia rio 2005", "kia rio 2011", "kia rio 2020", "kia rondo 2008", "kia sedona 2002", "kia sedona 2005", "kia sedona 2015", "kia sedona 2018", "kia seltos 2019", "kia shuma 1998", "kia shuma 2001", "kia sorento 2002", "kia sorento 2006", "kia sorento 2009", "kia sorento 2014", "kia sorento 2017", "kia sorento 2020", "kia soul 2008", "kia soul 2013", "kia soul 2018", "kia soul 2022", "kia spectra 2004", "kia spectra 2007", "kia sportage 2004", "kia sportage 2008", "kia sportage 2013", "kia sportage 2015", "kia sportage 2018", "kia sportage 2021", "kia stinger 2017", "kia stonic 2017", "kia telluride 2019", "kia venga 2009", "kia venga 2014", "kia ceed 2007", "kia ceed 2015", "ktm x-bow 2008", "ktm x-bow gt-xr 2023", "koenigsegg agera 2010", "koenigsegg cc8s 2002", "koenigsegg ccx 2006", "koenigsegg gemera 2020", "koenigsegg jesko 2019", "koenigsegg one1 2014", "koenigsegg regera 2015", "lamborghini 350 gt 1964", "lamborghini 400 gt 1965", "lamborghini aventador lp 700-4 2011", "lamborghini aventador lp750-4 sv 2015", "lamborghini aventador s roadster 2017", "lamborghini aventador svj 2018", "lamborghini centenario 2016", "lamborghini countach lp 400 1973", "lamborghini diablo 1990", "lamborghini espada 1968", "lamborghini gallardo 2003", "lamborghini gallardo lp 560-4 (2) 2012", "lamborghini gallardo lp 570-4 2010", "lamborghini huracan evo 2019", "lamborghini huracan lp 580-2 (rwd) 2016", "lamborghini huracan lp610-4 spyder 2015", "lamborghini huracan performante 2017", "lamborghini huracan sto 2020", "lamborghini huracan tecnica 2022", "lamborghini islero 1968", "lamborghini jarama 1970", "lamborghini jarama rallye 1972", "lamborghini lm 002 1986", "lamborghini lp780-4 ultimae 2022", "lamborghini miura 1966", "lamborghini murcielago 2001", "lamborghini murcielago lp 670-4 2009", "lamborghini reventon 2008", "lamborghini revuelto 2023", "lamborghini sian 2019", "lamborghini silhouette p300 1976", "lamborghini sterrato 2022", "lamborghini urraco 1972", "lamborghini urus 2018", "lamborghini veneno 2013", "land rover defender 90 1991", "land rover defender 90 2007", "land rover defender 90 2012", "land rover defender 90 2019", "land rover discovery 1990", "land rover discovery 2002", "land rover discovery 2013", "land rover discovery 2017", "land rover freelander 1998", "land rover freelander 2003", "land rover freelander 2006", "land rover range rover 1988", "land rover range rover 1994", "land rover range rover 2002", "land rover range rover 2009", "land rover range rover 2017", "land rover range rover 2021", "land rover sv coupe 2018", "lexus ct 2011", "lexus ct 2014", "lexus es 2002", "lexus es 2006", "lexus es 2012", "lexus es 2016", "lexus gs 1993", "lexus gs 1997", "lexus gs 2005", "lexus gs 2012", "lexus gs 2015", "lexus gx 2003", "lexus gx 2013", "lexus gx 2019", "lexus hs 2009", "lexus is 1998", "lexus is 2005", "lexus is 2014", "lexus is 2020", "lexus is f 2008", "lexus lbx 2023", "lexus lc 2021", "lexus lfa 2010", "lexus lm 2023", "lexus ls 1990", "lexus ls 1997", "lexus ls 2000", "lexus ls 2006", "lexus ls 2012", "lexus ls 2017", "lexus ls 2020", "lexus lx 1996", "lexus lx 2008", "lexus lx 2015", "lexus lx 2021", "lexus nx 2014", "lexus nx 2021", "lexus rc 2014", "lexus rc 2018", "lexus rx 1998", "lexus rx 2008", "lexus rx 2012", "lexus rx 2016", "lexus rx 2019", "lexus rx 2022", "lexus rz 2022", "lexus sc 1991", "lexus sc 2001", "lexus tx 2023", "lexus ux 2018", "lincoln aviator 2002", "lincoln continental (first gen) 1958", "lincoln continental (fourth gen) 1980", "lincoln continental (second gen) 1961", "lincoln continental (seventh gen) 1995", "lincoln continental (sixth gen) 1988", "lincoln continental (third gen) 1970", "lincoln continental 2016", "lincoln corsair 2019", "lincoln corsair 2022", "lincoln ls 2000", "lincoln mkc 2014", "lincoln mkc 2018", "lincoln mks 2009", "lincoln mkt 2009", "lincoln mkx 2006", "lincoln mkx 2011", "lincoln mkx 2016", "lincoln mkz 2006", "lincoln mkz 2010", "lincoln mkz 2016", "lincoln mark lt 2005", "lincoln mark lt 2009", "lincoln nautilus 2018", "lincoln nautilus 2023", "lincoln navigator 1998", "lincoln navigator 2006", "lincoln navigator 2014", "lincoln navigator 2018", "lincoln town car 1998", "lincoln town car 2003", "lincoln zephyr 2005", "lincoln zephyr 2021", "lincoln zephyr fastback 1936", "lotus 2 eleven 2007", "lotus 3 eleven 430 2018", "lotus elan roadster 1962", "lotus elan roadster 1989", "lotus eletre 2022", "lotus elise 1997", "lotus elise 2001", "lotus elise 2010", "lotus elise cup 2017", "lotus elite 1957", "lotus emira 2021", "lotus esprit 1976", "lotus europa 1965", "lotus europa s 2006", "lotus evija 2020", "lotus evora 2008", "lotus evora 400 2015", "lotus evora gt 2019", "lotus evora gt430 2017", "lotus excel 1982", "lotus exige 2000", "lotus exige 2008", "lotus exige 410 sport 2017", "lotus exige s 2012", "lotus sport 2021", "lexus es 2023", "lucid motors air", "maserati 3200 gt 1998", "maserati bora 1971", "maserati folgore 2022", "maserati ghibli 1967", "maserati ghibli 2013", "maserati ghibli 2017", "maserati ghibli 2020", "maserati grancabrio 2010", "maserati grancabrio 2017", "maserati granturismo 2007", "maserati granturismo mc stradale 2011", "maserati granturismo s 2008", "maserati grecale folgore 2023", "maserati grecale gt 2022", "maserati levante 2016", "maserati mc 12 2004", "maserati mc20 2020", "maserati modena 2022", "maserati quattroporte i 1963", "maserati quattroporte ii 1974", "maserati quattroporte iv 1994", "maserati quattroporte sport gt s 2009", "maserati quattroporte v 2003", "maserati quattroporte vi 2016", "maserati spyder 2001", "maserati trofeo 2022", "mazda 121 1987", "mazda 121 1991", "mazda 2 2002", "mazda 2 2007", "mazda 2 2014", "mazda 2 2019", "mazda 3 2004", "mazda 3 2009", "mazda 3 2013", "mazda 3 2016", "mazda 3 2018", "mazda 323 1986", "mazda 323 1994", "mazda 5 2005", "mazda 5 2008", "mazda 5 2010", "mazda 6 2002", "mazda 6 2005", "mazda 6 2013", "mazda 6 2018", "mazda 626 1988", "mazda 626 1991", "mazda 626 1997", "mazda b series 1999", "mazda bt-50 2006", "mazda bt-50 2011", "mazda bt-50 2015", "mazda bt-50 2018", "mazda biante 2008", "mazda cx-3 2015", "mazda cx-30 2019", "mazda cx-5 2012", "mazda cx-5 2015", "mazda cx-50 2021", "mazda cx-7 2007", "mazda cx-7 2009", "mazda cx-8 2017", "mazda cx-9 2007", "mazda cx-9 2013", "mazda cx-9 2016", "mazda flair 2012", "mazda mx-3 1991", "mazda mx-30 2019", "mazda mx-5 1989", "mazda mx-5 1998", "mazda mx-5 2005", "mazda mx-5 2008", "mazda mx-5 2012", "mazda mx-5 2015", "mazda mx-6 1992", "mazda rx-2 1970", "mazda rx-3 1971", "mazda rx-7 (fc) 1985", "mazda rx-7 (fd) 1992", "mazda rx-8 2003", "mazda rx-8 2008", "mazda tribute 2001", "mazda tribute 2007", "mazda verisa 2004", "mazda xedos 1992", "mazda xedos 2001", "mclaren 540c 2015", "mclaren 570s 2016", "mclaren 600lt 2018", "mclaren 620r 2019", "mclaren 650s 2014", "mclaren 675lt 2015", "mclaren 720s 2017", "mclaren 750s 2022", "mclaren 765lt 2020", "mclaren artura 2021", "mclaren elva 2020", "mclaren f1 1993", "mclaren gt 2019", "mclaren mp4-12c 2011", "mclaren p1 2013", "mclaren senna 2018", "mclaren speedtail 2018", "mercedes benz 170 vk 1938", "mercedes benz 190 (w201) 1982", "mercedes benz 190 e 2.5-16 evolution ii 1990", "mercedes benz 300 sel 6.3 (w109) 1967", "mercedes benz 300 sl coupe (w198) 1954", "mercedes benz 300 sl roadster (w198) 1957", "mercedes benz 500 e (w124) 1991", "mercedes benz 600 (w100) 1964", "mercedes benz a-class 2022", "mercedes benz a-class sedan (v177) 2018", "mercedes benz a-klasse (w168) 1997", "mercedes benz a-klasse (w168) 2001", "mercedes benz a-klasse (w169) 2004", "mercedes benz a-klasse (w169) 2008", "mercedes benz a-klasse (w176) 2012", "mercedes benz a-klasse (w176) 2015", "mercedes benz a45 amg (w176) 2013", "mercedes benz b-class (w247) 2018", "mercedes benz b-class 2022", "mercedes benz b-klasse (w245) 2005", "mercedes benz b-klasse (w246) 2012", "mercedes benz b-klasse 2008", "mercedes benz c 36 amg (w202) 1995", "mercedes benz c 43 amg (w202) 1997", "mercedes benz c 450 amg t-modell (s205) 2015", "mercedes benz c 55 amg (w203) 2004", "mercedes benz c 63 amg (w204) 2007", "mercedes benz c 63 amg (w204) 2011", "mercedes benz c-class (w205) 2014", "mercedes benz c-class (w206) 2021", "mercedes benz c-class all-terrain 2021", "mercedes benz c-class t-modell (s204) 2011", "mercedes benz c-class t-modell (s205) 2018", "mercedes benz c-klasse (w202) 1993", "mercedes benz c-klasse (w203) 2000", "mercedes benz c-klasse (w204) 2007", "mercedes benz c-klasse amg (c203) 2002", "mercedes benz c-klasse t-modell (s202) 1996", "mercedes benz ce (c124) 1987", "mercedes benz cl (c216) 2010", "mercedes benz cl 55 amg (c215) 2000", "mercedes benz cl 55 amg (c215) 2002", "mercedes benz cl 63 amg (c216) 2012", "mercedes benz cl 65 amg (c215) 2003", "mercedes benz cl 65 amg (c216) 2011", "mercedes benz cl 65 amg 40th anniversary (c216) 2007", "mercedes benz cl coupe (c140) 1996", "mercedes benz cla (c117) 2013", "mercedes benz cla (c117) 2016", "mercedes benz cla 2023", "mercedes benz cla coupe (c118) 2019", "mercedes benz cla shooting brake (c118) 2019", "mercedes benz clc (w203) 2008", "mercedes benz clk (c208) 1997", "mercedes benz clk (c208) 1999", "mercedes benz clk (c209) 2002", "mercedes benz clk 55 amg (c208) 1999", "mercedes benz clk 55 amg (c209) 2003", "mercedes benz clk cabrio (a208) 1998", "mercedes benz clk dtm amg (a209) 2006", "mercedes benz clk dtm amg (c209) 2004", "mercedes benz clk gtr amg 1998", "mercedes benz cls 2021", "mercedes benz cls class (c218) 2014", "mercedes benz cls class (c257) 2018", "mercedes benz cls shooting brake (x218) 2012", "mercedes benz cls shooting brake (x218) 2014", "mercedes benz cls-klasse (c218) 2010", "mercedes benz cls-klasse (c219) 2004", "mercedes benz cls-klasse (c219) 2008", "mercedes benz cabriolet b (w150) 1938", "mercedes benz citan 2021", "mercedes benz coupe (c114) 1969", "mercedes benz coupe (w111) 1961", "mercedes benz e 50 amg (w210) 1996", "mercedes benz e 55 amg (w210) 1997", "mercedes benz e 55 amg (w211) 2002", "mercedes benz e 55 amg t-modell (s211) 2004", "mercedes benz e 63 amg (w211) 2006", "mercedes benz e-class (a238) 2016", "mercedes benz e-class (s213) 2016", "mercedes benz e-class (w213) 2020", "mercedes benz e-klasse (a207) 2009", "mercedes benz e-klasse (w110) 1961", "mercedes benz e-klasse (w114) 1968", "mercedes benz e-klasse (w120) 1953", "mercedes benz e-klasse (w123) 1975", "mercedes benz e-klasse (w124) 1993", "mercedes benz e-klasse (w210) 1995", "mercedes benz e-klasse (w210) 1999", "mercedes benz e-klasse (w212) 2009", "mercedes benz e-klasse (w212) 2013", "mercedes benz e-klasse coupe (c207) 2013", "mercedes benz e-klasse t-modell (s210) 1999", "mercedes benz e-klasse t-modell (s211) 2003", "mercedes benz eqa 2021", "mercedes benz eqc 400 2019", "mercedes benz eqe 2021", "mercedes benz eqs 2022", "mercedes benz eqv 2019", "mercedes benz g 55 amg (w453) 2004", "mercedes benz g 55 amg (w463) 1999", "mercedes benz g 55 amg (w463) 2006", "mercedes benz g-class (w464) 2018", "mercedes benz g-class 4x4 squared (w463) 2015", "mercedes benz g-klasse (w460) 1979", "mercedes benz g-klasse (w463) 1989", "mercedes benz g-klasse (w463) 2007", "mercedes benz g-klasse (w463) 2012", "mercedes benz g-klasse kurz (w463) 2000", "mercedes benz gl 63 amg (x166) 2012", "mercedes benz gl-klasse (x164) 2009", "mercedes benz gl-klasse (x165) 2012", "mercedes benz gla (x156) 2013", "mercedes benz gla (x156) 2017", "mercedes benz gla 2020", "mercedes benz gla 2023", "mercedes benz glc (x253) 2015", "mercedes benz glc (x254) 2022", "mercedes benz glc coupe (c253) 2016", "mercedes benz gle (c292) 2015", "mercedes benz gle 2023", "mercedes benz gle coupe 2019", "mercedes benz glk 2012", "mercedes benz glk-klasse (x204) 2008", "mercedes benz gls (x167) 2019", "mercedes benz gls 2023", "mercedes benz gls-class (x166) 2016", "mercedes benz ml 63 amg (w166) 2011", "mercedes benz ml-klasse (w163) 2001", "mercedes benz ml-klasse (w164) 2008", "mercedes benz maybach 2023", "mercedes benz r-klasse (w251) 2005", "mercedes benz r-klasse (w251) 2010", "mercedes benz s 65 amg (w220) 2004", "mercedes benz s 65 amg (w221) 2006", "mercedes benz s 65 coupe (c217) 2014", "mercedes benz s-class (w223) 2020", "mercedes benz s-class 2017", "mercedes benz s-class cabriolet (a217) 2016", "mercedes benz s-class maybach (x222) 2015", "mercedes benz s-class maybach (x222) 2018", "mercedes benz s-klasse (w108) 1965", "mercedes benz s-klasse (w111) 1959", "mercedes benz s-klasse (w116) 1972", "mercedes benz s-klasse (w126) 1979", "mercedes benz s-klasse (w140) 1991", "mercedes benz s-klasse (w180) 1954", "mercedes benz s-klasse (w221) 2005", "mercedes benz s-klasse landaulet (a140) 1997", "mercedes benz s-class (w222) 2013", "mercedes benz s600 pullman 2015", "mercedes benz s650 pullman maybach 2018", "mercedes benz sl (r107) 1971", "mercedes benz sl (r129) 1989", "mercedes benz sl (r129) 1995", "mercedes benz sl (r129) 1998", "mercedes benz sl (w113) 1963", "mercedes benz sl 55 amg (r230) 2002", "mercedes benz sl 55 amg (r230) 2006", "mercedes benz sl 63 amg (r231) 2016", "mercedes benz sl 65 amg (r230) 2006", "mercedes benz sl 65 amg (r231) 2012", "mercedes benz sl 65 amg 2008", "mercedes benz sl 73 amg (r129) 1999", "mercedes benz sl-klasse (r231) 2012", "mercedes benz slc (c107) 1972", "mercedes benz slc-class (r172) 2016", "mercedes benz slk (r170) 1996", "mercedes benz slk (r170) 2000", "mercedes benz slk (r171) 2004", "mercedes benz slk (r171) 2008", "mercedes benz slk (r172) 2011", "mercedes benz slk 55 amg (r172) 2012", "mercedes benz slr mclaren (c199) 2003", "mercedes benz slr mclaren 722 edition (c199) 2006", "mercedes benz slr mclaren roadster (c199) 2007", "mercedes benz slr stirling moss 2009", "mercedes benz sls amg (c197) 2010", "mercedes benz sls amg black series 2013", "mercedes benz sls amg gt 2012", "mercedes benz sls amg roadster (c197) 2011", "mercedes benz t-class 2022", "mercedes benz typ 170 (w136) 1946", "mercedes benz typ 190 (w121) 1955", "mercedes benz typ 220 (w187) 1951", "mercedes benz typ 300 (w186) 1951", "mercedes benz typ 540 k spezial-coupe (w29) 1939", "mercedes benz v-class (w447) 2014", "mercedes benz v-class 2019", "mercedes benz v-klasse (w638) 1996", "mercedes benz vaneo (w414) 2002", "mercedes benz viano 2003", "mercedes benz viano 2010", "mercedes benz vito tourer 2020", "mercedes benz x-class 2017", "mercury cougar 1998", "mercury grand marquis 2002", "mercury grand marquis 2008", "mercury marauder 2003", "mercury mariner 2004", "mercury mariner 2007", "mercury milan 2005", "mercury milan 2009", "mercury montego 2004", "mercury monterey 2004", "mercury mountaineer 2006", "mercury villager 1992", "mercury villager 1998", "mini clubman 2007", "mini clubman 2015", "mini clubvan 2012", "mini countryman 2010", "mini countryman 2016", "mini countryman 2020", "mini hatch 1997", "mini hatch 2014", "mini hatch 2018", "mini john cooper works gp 2019", "mini mini classic 1997", "mini paceman 2013", "mini roadster 2011", "mini s 2021", "mitsubishi 3000 gt 1990", "mitsubishi 3000 gt 1994", "mitsubishi asx 2010", "mitsubishi asx 2016", "mitsubishi asx 2019", "mitsubishi carisma 1995", "mitsubishi colt 1988", "mitsubishi colt 1992", "mitsubishi colt 1996", "mitsubishi colt 2004", "mitsubishi colt 2008", "mitsubishi colt 2023", "mitsubishi colt czc 2006", "mitsubishi eclipse 1990", "mitsubishi eclipse 1995", "mitsubishi eclipse 2000", "mitsubishi eclipse 2005", "mitsubishi eclipse 2009", "mitsubishi eclipse cross 2017", "mitsubishi endeavor 2002", "mitsubishi galant 1988", "mitsubishi galant 1993", "mitsubishi galant 1997", "mitsubishi galant 2004", "mitsubishi galant 2008", "mitsubishi galant vr-4 1997", "mitsubishi grandis 2005", "mitsubishi l200 1995", "mitsubishi l200 2005", "mitsubishi l200 2015", "mitsubishi l300 1986", "mitsubishi lancer 1988", "mitsubishi lancer 1994", "mitsubishi lancer 2000", "mitsubishi lancer 2007", "mitsubishi lancer 2016", "mitsubishi lancer evolution i 1992", "mitsubishi lancer evolution iv 1996", "mitsubishi lancer evolution ix 2005", "mitsubishi lancer evolution vi 1999", "mitsubishi lancer evolution viii 2003", "mitsubishi lancer evolution x 2008", "mitsubishi mirage 2012", "mitsubishi mirage 2019", "mitsubishi mirage g4 2013", "mitsubishi mirage gt 2016", "mitsubishi montero 1998", "mitsubishi outlander 2003", "mitsubishi outlander 2007", "mitsubishi outlander 2012", "mitsubishi outlander 2021", "mitsubishi pajero 1982", "mitsubishi pajero 1992", "mitsubishi pajero 2006", "mitsubishi raider 2005", "mitsubishi sigma 1991", "mitsubishi space runner 1991", "mitsubishi space star 2002", "mitsubishi starion 1982", "mitsubishi i-miev 2009", "mastretta mxt", "nissan 100 nx 1991", "nissan 200 sx 1989", "nissan 200 sx 1994", "nissan 300 zx 1984", "nissan 300 zx 1990", "nissan 350z 2002", "nissan 350z 2006", "nissan 370z 2012", "nissan 370z nismo 2014", "nissan almera 2000", "nissan almera 1995", "nissan altima 2012", "nissan altima 2022", "nissan altima 2002", "nissan altima 2007", "nissan altima 2018", "nissan ariya 2020", "nissan armada 2003", "nissan armada 2008", "nissan armada 2016", "nissan armada 2020", "nissan bluebird 1986", "nissan cube 2008", "nissan frontier 2005", "nissan frontier 2009", "nissan frontier 2018", "nissan frontier 2021", "nissan gt-r (r35) 2007", "nissan gt-r 2016", "nissan gt-r 2023", "nissan gt-r nismo 2019", "nissan grand livina 2006", "nissan juke 2010", "nissan juke 2019", "nissan kicks 2016", "nissan kicks 2020", "nissan leaf 2010", "nissan leaf 2017", "nissan leaf 2022", "nissan liberty 1999", "nissan maxima 1989", "nissan maxima 1995", "nissan maxima 2000", "nissan maxima 2004", "nissan maxima 2009", "nissan maxima 2016", "nissan maxima 2018", "nissan micra 1982", "nissan micra 1989", "nissan micra 1998", "nissan micra 2003", "nissan micra 2007", "nissan micra 2013", "nissan micra 2017", "nissan micra 2020", "nissan murano 2003", "nissan murano 2008", "nissan murano 2015", "nissan murano 2018", "nissan np300 pickup 2008", "nissan nv200 2009", "nissan navara np300 2015", "nissan note 2005", "nissan note 2008", "nissan note 2013", "nissan note 2017", "nissan pathfinder 2001", "nissan pathfinder 2005", "nissan pathfinder 2012", "nissan pathfinder 2016", "nissan pathfinder 2021", "nissan patrol 1988", "nissan patrol 1998", "nissan patrol 2004", "nissan patrol 2010", "nissan patrol 2014", "nissan pixo 2009", "nissan platina 2006", "nissan prairie 1989", "nissan primera 1990", "nissan primera 1994", "nissan primera 1999", "nissan pulsar 2014", "nissan qashqai 2007", "nissan qashqai 2010", "nissan qashqai 2013", "nissan qashqai 2018", "nissan qashqai 2021", "nissan quest 2004", "nissan quest 2009", "nissan rogue 2007", "nissan rogue 2013", "nissan rogue 2016", "nissan rogue 2020", "nissan sakura 2022", "nissan sentra 2000", "nissan sentra 2004", "nissan sentra 2012", "nissan sentra 2015", "nissan sentra 2019", "nissan serena 1991", "nissan serena 1999", "nissan serena 2005", "nissan serena 2010", "nissan serena 2016", "nissan skyline 2001", "nissan skyline 2006", "nissan skyline gt-r (c110) 1972", "nissan skyline gt-r (pgc-10) 1969", "nissan skyline gt-r (r32) 1989", "nissan skyline gt-r (r33) 1995", "nissan skyline gt-r (r34) 1999", "nissan teana 2003", "nissan teana 2009", "nissan titan 2004", "nissan titan 2009", "nissan titan 2015", "nissan titan 2019", "nissan versa 2006", "nissan versa 2019", "nissan versa 2022", "nissan x-trail 2022", "nissan x-trail (t30) 2001", "nissan x-trail (t31) 2007", "nissan x-trail (t32) 2014", "nissan xterra 2002", "nissan xterra 2009", "nissan z 2021", "opel adam 2013", "opel ampera 2011", "opel ampera-e 2016", "opel antara 2007", "opel antara 2010", "opel astra 1991", "opel astra 1998", "opel astra 2004", "opel astra 2009", "opel astra 2015", "opel astra 2019", "opel astra 2021", "opel calibra 1989", "opel cascada 2013", "opel combo 2001", "opel combo 2011", "opel combo life 2018", "opel corsa 1993", "opel corsa 2000", "opel corsa 2003", "opel corsa 2006", "opel corsa 2014", "opel corsa 2019", "opel crossland 2020", "opel frontera 1992", "opel frontera 1998", "opel gt 1968", "opel gt 2007", "opel insignia 2008", "opel insignia 2017", "opel kadett 1984", "opel manta 1975", "opel meriva 2006", "opel meriva 2010", "opel mokka 2012", "opel mokka x 2016", "opel mokka-e 2020", "opel monterey 1992", "opel monza 1983", "opel omega 1986", "opel omega 1994", "opel omega 1999", "opel rekord 1977", "opel rekord 1982", "opel rocks-e 2021", "opel senator 1983", "opel senator 1987", "opel signum 2003", "opel sintra 1997", "opel speedster 2001", "opel tigra 1994", "opel vectra 1988", "opel vectra 1995", "opel vectra 2002", "opel vectra 2005", "opel zafira 2006", "pagani huarya imola 2020", "pagani huayra 2012", "pagani utopia 2022", "pagani zonda 2002", "pontiac aztek 2000", "pontiac bonneville 1958", "pontiac bonneville 1961", "pontiac bonneville 2000", "pontiac firebird 1967", "pontiac firebird 1970", "pontiac firebird 1982", "pontiac firebird 1994", "pontiac firebird 2000", "pontiac g3 2009", "pontiac g5 2004", "pontiac g6 2008", "pontiac g8 2007", "pontiac gto 1965", "pontiac gto 1970", "pontiac gto 2003", "pontiac grand am 1998", "pontiac grand prix 1990", "pontiac grand prix 2003", "pontiac lemans gto 1964", "pontiac montana 2000", "pontiac solstice 2005", "pontiac solstice 2008", "pontiac sunfire 1994", "pontiac sunfire 2001", "pontiac torrent 2005", "pontiac trans sport 1990", "pontiac vibe 2002", "porsche 718 boxster (982) 2016", "porsche 718 cayman (982c) 2016", "porsche 718 cayman 2018", "porsche 718 spyder (982) 2019", "porsche 911 (901) 1964", "porsche 911 (911) 2010", "porsche 911 gt2 (993) 1995", "porsche 911 gt2 (996) 2001", "porsche 911 gt2 (997) 2007", "porsche 911 gt2 rs (991) 2017", "porsche 911 gt2 rs (997) 2010", "porsche 911 gt3 (991) 2013", "porsche 911 gt3 (991.2) 2017", "porsche 911 gt3 (992) 2021", "porsche 911 gt3 (996) 1999", "porsche 911 gt3 (997) 2009", "porsche 911 gt3 rs (991.1) 2016", "porsche 911 gt3 rs (996) 2004", "porsche 911 gt3 rs (997.2) 2009", "porsche 911 turbo (930) 1974", "porsche 911 turbo (964) 1990", "porsche 911 turbo (991) 2013", "porsche 911 turbo (991.2) 2016", "porsche 911 turbo (992) 2020", "porsche 911 turbo (993) 1995", "porsche 911 turbo (996) 2000", "porsche 911 turbo (997) 2006", "porsche 912 (901) 1965", "porsche 918 spyder 2013", "porsche 928 1978", "porsche 928 gt 1989", "porsche 928 gts 1992", "porsche 928 s4 1986", "porsche 944 1981", "porsche 944 s2 1988", "porsche 959 1987", "porsche 968 1991", "porsche boxster (981) 2012", "porsche boxster (986) 1996", "porsche boxster (986) 2002", "porsche boxster (987) 2004", "porsche boxster (987) 2008", "porsche boxster s (986) 1999", "porsche boxster s (987) 2004", "porsche boxster spyder (987) 2009", "porsche carrera gt (980) 2003", "porsche cayenne (955) 2002", "porsche cayenne (957) 2007", "porsche cayenne (958) 2010", "porsche cayenne (958) 2014", "porsche cayenne (po536) 2017", "porsche cayenne 2023", "porsche cayenne e-hybrid 2018", "porsche cayenne gts (957) 2008", "porsche cayenne s transsyberia (957) 2009", "porsche cayenne turbo (955) 2002", "porsche cayenne turbo (957) 2007", "porsche cayenne turbo s (955) 2006", "porsche cayman (981c) 2012", "porsche cayman (987c) 2006", "porsche gts 2021", "porsche macan (95b) 2014", "porsche macan (95b) 2018", "porsche macan (95b) 2021", "porsche macan gts (95b) 2015", "porsche macan t 2022", "porsche panamera (907) 2009", "porsche panamera (970) 2013", "porsche panamera (971) 2016", "porsche panamera (971) 2020", "porsche panamera gts (970) 2011", "porsche panamera turbo (970) 2009", "porsche taycan 2021", "porsche taycan cross turismo 2021", "porsche taycan turbo 2019", "renault 11 1983", "renault 12 1969", "renault 14 1976", "renault 16 1965", "renault 18 1978", "renault 19 1988", "renault 19 1992", "renault 20 1975", "renault 21 1986", "renault 21 1989", "renault 25 1984", "renault 25 1988", "renault 4 cv 1947", "renault 5 1972", "renault 8 1962", "renault 9 1981", "renault 9 1986", "renault alaskan 2016", "renault alpine a310 1977", "renault alpine a610 1991", "renault arkana 2019", "renault arkana 2021", "renault austral 2022", "renault avantime 2001", "renault captur 2013", "renault captur 2017", "renault captur 2019", "renault clio 1990", "renault clio 2001", "renault clio 2006", "renault clio 2009", "renault clio 2012", "renault clio 2019", "renault clio 2023", "renault escape 2023", "renault espace 1985", "renault espace 1991", "renault espace 1997", "renault espace 2002", "renault espace 2006", "renault espace 2014", "renault fluence 2009", "renault fluence 2013", "renault fuego 1980", "renault kangoo 1997", "renault kangoo 2005", "renault kangoo 2008", "renault kangoo 2020", "renault kiger 2021", "renault koleos 2007", "renault koleos 2011", "renault koleos 2016", "renault kwid 2015", "renault laguna 1994", "renault laguna 1998", "renault laguna 2001", "renault laguna 2007", "renault laguna 2010", "renault latitude 2010", "renault logan 2014", "renault megane 2020", "renault megane 1996", "renault megane 1999", "renault megane 2002", "renault megane 2006", "renault megane 2014", "renault megane 2016", "renault modus 2005", "renault modus 2008", "renault rafale 2023", "renault safrane 1992", "renault safrane 1996", "renault sandero 2012", "renault scala 2012", "renault scenic 1999", "renault scenic 2003", "renault scenic 2006", "renault scenic 2009", "renault scenic 2013", "renault scenic 2016", "renault sport spider 1996", "renault triber 2019", "renault twingo 1998", "renault twingo 2007", "renault twingo 2011", "renault twingo 2014", "renault twizy 2012", "renault vel satis 2002", "renault vel satis 2005", "renault wind 2010", "renault zoe 2013", "renault zoe 2019", "rolls-royce camargue 1975", "rolls-royce corniche 1971", "rolls-royce corniche v 2000", "rolls-royce cullinan 2018", "rolls-royce flying spur 1994", "rolls-royce ghost 2009", "rolls-royce ghost 2020", "rolls-royce ghost ii 2014", "rolls-royce park ward 2000", "rolls-royce phantom 2003", "rolls-royce phantom 2009", "rolls-royce phantom 2012", "rolls-royce phantom 2017", "rolls-royce silver dawn 1949", "rolls-royce silver dawn 1996", "rolls-royce silver seraph 1998", "rolls-royce silver shadow 1965", "rolls-royce silver shadow 1977", "rolls-royce silver spirit 1977", "rolls-royce spectre 2022", "rolls-royce wraith 2013", "saturn astra 2007", "saturn aura 2006", "saturn ion 2003", "saturn outlook 2006", "saturn relay 2004", "saturn sky 2006", "saturn vue 2001", "scion fr-s 2013", "scion ia 2016", "scion im 2016", "scion iq 2011", "scion tc 2003", "scion tc 2013", "scion xa 2003", "scion xb 2003", "scion xb 2007", "scion xd 2007", "smart 1 2022", "smart city coupe 1998", "smart crossblade 2002", "smart eq forfour 2019", "smart forfour 2003", "smart fortwo 2003", "smart fortwo 2007", "smart roadster 2003", "smart forfour 2014", "smart fortwo 2016", "subaru 360 1958", "subaru ascent 2019", "subaru ascent 2022", "subaru brz 2012", "subaru brz 2018", "subaru brz 2021", "subaru baja 2003", "subaru crosstrek 2015", "subaru crosstrek 2023", "subaru crosstrek 2024", "subaru exiga 2008", "subaru forester 1997", "subaru forester 2000", "subaru forester 2008", "subaru forester 2018", "subaru g3x justy 2004", "subaru impreza 1993", "subaru impreza 2005", "subaru impreza 2007", "subaru impreza 2012", "subaru impreza 2022", "subaru justy 1989", "subaru justy 1996", "subaru justy 2008", "subaru legacy 1999", "subaru legacy 2002", "subaru legacy 2003", "subaru legacy 2009", "subaru legacy 2014", "subaru legacy 2019", "subaru legacy 2022", "subaru levorg 2014", "subaru mini jumbo 1988", "subaru outback 1998", "subaru outback 2003", "subaru outback 2009", "subaru outback 2014", "subaru outback 2022", "subaru r1 2005", "subaru r2 1969", "subaru r2 2003", "subaru svx 1992", "subaru solterra 2021", "subaru stella 2006", "subaru trezia 2010", "subaru tribeca 2005", "subaru tribeca 2007", "subaru vivio 1992", "subaru wrx 2014", "subaru wrx 2021", "subaru wrx sti 1998", "subaru wrx sti 2001", "suzuki 800 2000", "suzuki apv 2004", "suzuki across 2020", "suzuki aerio 2001", "suzuki alto 2002", "suzuki alto 2009", "suzuki baleno 2000", "suzuki baleno 2016", "suzuki equator 2009", "suzuki esteem 1995", "suzuki forenza 2004", "suzuki gypsy 1985", "suzuki ignis 2000", "suzuki ignis 2003", "suzuki ignis 2016", "suzuki ignis 2019", "suzuki jimny 2005", "suzuki jimny 2018", "suzuki kizashi 2009", "suzuki s-cross 2021", "suzuki sx4 2006", "suzuki sx4 2013", "suzuki swace 2020", "suzuki swift 1991", "suzuki swift 1996", "suzuki swift 2005", "suzuki swift 2010", "suzuki swift 2014", "suzuki swift 2017", "suzuki verona 2004", "suzuki vitara 1989", "suzuki vitara 2005", "suzuki vitara 2014", "suzuki vitara 2018", "suzuki wagon r 1997", "suzuki wagon r 2005", "suzuki x-90 1996", "suzuki xl7 2007", "tesla cybertruck 2021", "tesla model 3 2017", "tesla model s 2012", "tesla model s 2016", "tesla model x 2015", "tesla model x 2021", "tesla model y 2019", "tesla roadster 2008", "toyota 4runner 1990", "toyota 4runner 2003", "toyota 4runner 2009", "toyota 4runner 2013", "toyota alphard 2002", "toyota aurion 2006", "toyota aurion 2012", "toyota auris 2006", "toyota auris 2013", "toyota auris 2015", "toyota avalon 2005", "toyota avalon 2012", "toyota avalon 2018", "toyota avensis 1997", "toyota avensis 2000", "toyota avensis 2003", "toyota avensis 2006", "toyota avensis 2009", "toyota avensis 2015", "toyota aygo 2005", "toyota aygo 2014", "toyota aygo 2021", "toyota c-hr 2016", "toyota c-hr 2023", "toyota camry 1983", "toyota camry 1987", "toyota camry 1991", "toyota camry 1997", "toyota camry 2001", "toyota camry 2004", "toyota camry 2007", "toyota camry 2009", "toyota camry 2011", "toyota camry 2014", "toyota camry 2019", "toyota celica 1990", "toyota celica 1994", "toyota celica 1999", "toyota celica 2002", "toyota corolla 1987", "toyota corolla 1992", "toyota corolla 1997", "toyota corolla 2000", "toyota corolla 2002", "toyota corolla 2004", "toyota corolla 2007", "toyota corolla 2010", "toyota corolla 2013", "toyota corolla 2019", "toyota crown 1979", "toyota crown 2022", "toyota etios 2010", "toyota fj cruiser 2006", "toyota fj cruiser 2011", "toyota fortuner 2011", "toyota fortuner 2015", "toyota gr 86 2021", "toyota gr corolla 2022", "toyota gt 86 2012", "toyota highlander 2000", "toyota highlander 2008", "toyota highlander 2014", "toyota hilux 2005", "toyota hilux 2011", "toyota hilux 2015", "toyota hilux 2018", "toyota hilux 2020", "toyota land cruiser 100 1998", "toyota land cruiser 100 2002", "toyota land cruiser 120 2003", "toyota land cruiser 150 2009", "toyota land cruiser 150 2013", "toyota land cruiser 150 2017", "toyota land cruiser 200 2007", "toyota land cruiser 200 2011", "toyota land cruiser 200 2015", "toyota land cruiser 2019", "toyota land cruiser fj70 1984", "toyota land cruiser fj80 1989", "toyota mr2 1985", "toyota mr2 1990", "toyota mr2 2002", "toyota matrix 2009", "toyota mirai 2015", "toyota paseo 1996", "toyota picnic 1996", "toyota previa 1992", "toyota previa 2000", "toyota previa 2007", "toyota prius 1997", "toyota prius 2004", "toyota prius 2009", "toyota prius 2015", "toyota prius 2022", "toyota rav4 2000", "toyota rav4 2003", "toyota rav4 2006", "toyota rav4 2008", "toyota rav4 2010", "toyota rav4 2013", "toyota rav4 2015", "toyota sequoia 2000", "toyota sequoia 2007", "toyota sequoia 2022", "toyota sienna 1998", "toyota sienna 2010", "toyota sienna 2017", "toyota starlet 1984", "toyota starlet 1989", "toyota supra 1986", "toyota supra 1993", "toyota supra 2019", "toyota tacoma 2005", "toyota tacoma 2011", "toyota tacoma 2015", "toyota tacoma trd 2019", "toyota tundra 1999", "toyota tundra 2006", "toyota tundra 2013", "toyota tundra 2021", "toyota urban cruiser 2009", "toyota venza 2009", "toyota venza 2012", "toyota venza 2020", "toyota verso 2009", "toyota yaris 1999", "toyota yaris 2003", "toyota yaris 2006", "toyota yaris 2011", "toyota yaris 2016", "toyota yaris 2020", "toyota bz4x 2021", "toyota iq 2008", "volkswagen amarok 2009", "volkswagen amarok 2011", "volkswagen amarok 2016", "volkswagen amarok 2022", "volkswagen arteon 2017", "volkswagen arteon 2020", "volkswagen atlas 2017", "volkswagen atlas 2020", "volkswagen beetle 1945", "volkswagen beetle 1998", "volkswagen beetle 2005", "volkswagen beetle 2011", "volkswagen beetle 2016", "volkswagen bora 1998", "volkswagen caddy 2005", "volkswagen caddy 2010", "volkswagen caddy 2020", "volkswagen corrado 1989", "volkswagen eos 2006", "volkswagen fox 2005", "volkswagen gol 2008", "volkswagen gol 2014", "volkswagen golf i 1974", "volkswagen golf ii 1983", "volkswagen golf iii 1991", "volkswagen golf iv 1997", "volkswagen golf iv r32 2002", "volkswagen golf r 2021", "volkswagen golf v 2004", "volkswagen golf vi 2008", "volkswagen golf vii 2012", "volkswagen golf viii 2019", "volkswagen id buzz 2023", "volkswagen id.3 2020", "volkswagen id.4 2020", "volkswagen id.5 2021", "volkswagen id.7 2023", "volkswagen jetta 1992", "volkswagen jetta 2005", "volkswagen jetta 2010", "volkswagen jetta 2014", "volkswagen jetta 2018", "volkswagen jetta 2021", "volkswagen lupo 1998", "volkswagen multivan (t7) 2021", "volkswagen multivan 2003", "volkswagen multivan 2015", "volkswagen nivus 2020", "volkswagen passat 1973", "volkswagen passat 1981", "volkswagen passat 1988", "volkswagen passat 1993", "volkswagen passat 1996", "volkswagen passat 2000", "volkswagen passat 2005", "volkswagen passat 2010", "volkswagen passat 2014", "volkswagen passat 2019", "volkswagen phaeton 2003", "volkswagen phaeton 2010", "volkswagen pointer 1994", "volkswagen polo 1994", "volkswagen polo 1999", "volkswagen polo 2005", "volkswagen polo 2009", "volkswagen polo 2014", "volkswagen polo 2017", "volkswagen polo 2021", "volkswagen routan 2008", "volkswagen scirocco 1977", "volkswagen scirocco 1981", "volkswagen scirocco 2008", "volkswagen scirocco 2014", "volkswagen sharan 1996", "volkswagen sharan 2000", "volkswagen sharan 2010", "volkswagen t-cross 2018", "volkswagen t-roc 2017", "volkswagen t-roc 2021", "volkswagen taigo 2021", "volkswagen tiguan 2008", "volkswagen tiguan 2016", "volkswagen tiguan 2021", "volkswagen touareg 2003", "volkswagen touareg 2010", "volkswagen touareg 2014", "volkswagen touareg 2018", "volkswagen touran 2006", "volkswagen touran 2015", "volkswagen vento 2010", "volkswagen xl1 2013", "volkswagen up! 2012", "volvo 142 1967", "volvo 144 1967", "volvo 244 1975", "volvo 262 c 1977", "volvo 265 1980", "volvo 343 1976", "volvo 440 1988", "volvo 440 1993", "volvo 480 1986", "volvo 66 1975", "volvo 760 1982", "volvo 780 1986", "volvo 850 1992", "volvo 940 1990", "volvo 960 1994", "volvo c30 2006", "volvo c30 2009", "volvo c40 recharge 2022", "volvo c70 1996", "volvo c70 2005", "volvo ex90 2022", "volvo p1800 1961", "volvo s40 1996", "volvo s40 2007", "volvo s60 2000", "volvo s60 2008", "volvo s60 2018", "volvo s70 1997", "volvo s80 1998", "volvo s80 2003", "volvo s80 2009", "volvo s90 1997", "volvo s90 2016", "volvo s90 2019", "volvo v40 2000", "volvo v40 2012", "volvo v40 2016", "volvo v50 2004", "volvo v50 2007", "volvo v60 2010", "volvo v70 2000", "volvo v70 2004", "volvo v70 2007", "volvo xc60 2008", "volvo xc60 2013", "volvo xc70 2000", "volvo xc70 2004", "volvo xc70 2007", "volvo xc90 2002", "volvo xc90 2007", "volvo xc90 2014", "volvo xc90 2019", "zaz-965", "zaz-966" ]
lrzjason/noise-classifier
An vit classifier for handling noise image like this ![0b36d3c4-da8d-4fb1-bc14-a948af35f02e.jpg](https://cdn-uploads.huggingface.co/production/uploads/63891deed68e37abd59e883f/aOspZVn_4W-hUFKt5JNUj.jpeg) It has limitation inbetween clear and noise ``` from datasets import load_dataset from PIL import Image from transformers import ViTImageProcessor, ViTForImageClassification, TrainingArguments, Trainer import torch import numpy as np from datasets import load_metric import os import shutil model_name_or_path = 'lrzjason/noise-classifier' image_processor = ViTImageProcessor.from_pretrained(model_name_or_path) model = ViTForImageClassification.from_pretrained(model_name_or_path) input_dir = '' file = 'b5b457f4-5b52-4d68-be1b-9a2f557465f6.jpg' image = Image.open(os.path.join(input_dir, file)) inputs = image_processor(image, return_tensors="pt") with torch.no_grad(): logits = model(**inputs).logits # model predicts one of the 1000 ImageNet classes predicted_label = logits.argmax(-1).item() ```
[ "clear", "noise" ]
Rodolfo1987/vit-base-patch16-224-finetuned-flower
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-base-patch16-224-finetuned-flower This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on the imagefolder dataset. ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 5 ### Training results ### Framework versions - Transformers 4.24.0 - Pytorch 2.1.0+cu121 - Datasets 2.7.1 - Tokenizers 0.13.3
[ "daisy", "dandelion", "roses", "sunflowers", "tulips" ]
BttDenis/vit-base-patch16-224-finetuned-flower
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-base-patch16-224-finetuned-flower This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on the imagefolder dataset. ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 5 ### Training results ### Framework versions - Transformers 4.24.0 - Pytorch 2.1.0+cu121 - Datasets 2.7.1 - Tokenizers 0.13.3
[ "daisy", "dandelion", "roses", "sunflowers", "tulips" ]
jzeep3/vit-base-patch16-224-finetuned-flower
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-base-patch16-224-finetuned-flower This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on the imagefolder dataset. ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 5 ### Training results ### Framework versions - Transformers 4.24.0 - Pytorch 2.1.0+cu121 - Datasets 2.7.1 - Tokenizers 0.13.3
[ "daisy", "dandelion", "roses", "sunflowers", "tulips" ]
porkchopEnjoyer/food_classifier
<!-- This model card has been generated automatically according to the information Keras had access to. You should probably proofread and complete it, then remove this comment. --> # porkchopEnjoyer/food_classifier This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Train Loss: 0.0822 - Validation Loss: 0.6913 - Train Accuracy: 0.5 - Epoch: 4 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - optimizer: {'name': 'AdamWeightDecay', 'learning_rate': {'module': 'keras.optimizers.schedules', 'class_name': 'PolynomialDecay', 'config': {'initial_learning_rate': 3e-05, 'decay_steps': 1515, 'end_learning_rate': 0.0, 'power': 1.0, 'cycle': False, 'name': None}, 'registered_name': None}, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-08, 'amsgrad': False, 'weight_decay_rate': 0.01} - training_precision: float32 ### Training results | Train Loss | Validation Loss | Train Accuracy | Epoch | |:----------:|:---------------:|:--------------:|:-----:| | 0.6312 | 0.7057 | 0.5 | 0 | | 0.4490 | 0.7302 | 0.5 | 1 | | 0.2669 | 0.8137 | 0.75 | 2 | | 0.1450 | 0.6267 | 0.5 | 3 | | 0.0822 | 0.6913 | 0.5 | 4 | ### Framework versions - Transformers 4.35.2 - TensorFlow 2.15.0 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "loss", "notloss" ]
joeZhuang/my_awesome_food_model
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # my_awesome_food_model This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 1.4765 - Accuracy: 0.921 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 64 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 2.5391 | 0.99 | 62 | 2.3444 | 0.855 | | 1.6948 | 2.0 | 125 | 1.6518 | 0.906 | | 1.4136 | 2.98 | 186 | 1.4765 | 0.921 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "apple_pie", "baby_back_ribs", "bruschetta", "waffles", "caesar_salad", "cannoli", "caprese_salad", "carrot_cake", "ceviche", "cheesecake", "cheese_plate", "chicken_curry", "chicken_quesadilla", "baklava", "chicken_wings", "chocolate_cake", "chocolate_mousse", "churros", "clam_chowder", "club_sandwich", "crab_cakes", "creme_brulee", "croque_madame", "cup_cakes", "beef_carpaccio", "deviled_eggs", "donuts", "dumplings", "edamame", "eggs_benedict", "escargots", "falafel", "filet_mignon", "fish_and_chips", "foie_gras", "beef_tartare", "french_fries", "french_onion_soup", "french_toast", "fried_calamari", "fried_rice", "frozen_yogurt", "garlic_bread", "gnocchi", "greek_salad", "grilled_cheese_sandwich", "beet_salad", "grilled_salmon", "guacamole", "gyoza", "hamburger", "hot_and_sour_soup", "hot_dog", "huevos_rancheros", "hummus", "ice_cream", "lasagna", "beignets", "lobster_bisque", "lobster_roll_sandwich", "macaroni_and_cheese", "macarons", "miso_soup", "mussels", "nachos", "omelette", "onion_rings", "oysters", "bibimbap", "pad_thai", "paella", "pancakes", "panna_cotta", "peking_duck", "pho", "pizza", "pork_chop", "poutine", "prime_rib", "bread_pudding", "pulled_pork_sandwich", "ramen", "ravioli", "red_velvet_cake", "risotto", "samosa", "sashimi", "scallops", "seaweed_salad", "shrimp_and_grits", "breakfast_burrito", "spaghetti_bolognese", "spaghetti_carbonara", "spring_rolls", "steak", "strawberry_shortcake", "sushi", "tacos", "takoyaki", "tiramisu", "tuna_tartare" ]
nicolasdupuisroy/vit-gabor-detection-v3
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-gabor-detection-v3 This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.4139 - Accuracy: 1.0 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 2e-05 - train_batch_size: 200 - eval_batch_size: 200 - seed: 1337 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 120.0 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 1 | 0.6629 | 0.5 | | No log | 2.0 | 2 | 0.6564 | 0.5 | | No log | 3.0 | 3 | 0.6496 | 0.5 | | No log | 4.0 | 4 | 0.6428 | 0.5 | | No log | 5.0 | 5 | 0.6362 | 0.5 | | No log | 6.0 | 6 | 0.6296 | 0.5 | | No log | 7.0 | 7 | 0.6232 | 0.5 | | No log | 8.0 | 8 | 0.6172 | 0.5 | | No log | 9.0 | 9 | 0.6113 | 1.0 | | 0.509 | 10.0 | 10 | 0.6058 | 1.0 | | 0.509 | 11.0 | 11 | 0.6005 | 1.0 | | 0.509 | 12.0 | 12 | 0.5950 | 1.0 | | 0.509 | 13.0 | 13 | 0.5892 | 1.0 | | 0.509 | 14.0 | 14 | 0.5832 | 1.0 | | 0.509 | 15.0 | 15 | 0.5765 | 1.0 | | 0.509 | 16.0 | 16 | 0.5699 | 1.0 | | 0.509 | 17.0 | 17 | 0.5630 | 1.0 | | 0.509 | 18.0 | 18 | 0.5562 | 1.0 | | 0.509 | 19.0 | 19 | 0.5494 | 1.0 | | 0.248 | 20.0 | 20 | 0.5426 | 1.0 | | 0.248 | 21.0 | 21 | 0.5360 | 1.0 | | 0.248 | 22.0 | 22 | 0.5295 | 1.0 | | 0.248 | 23.0 | 23 | 0.5231 | 1.0 | | 0.248 | 24.0 | 24 | 0.5175 | 1.0 | | 0.248 | 25.0 | 25 | 0.5126 | 1.0 | | 0.248 | 26.0 | 26 | 0.5079 | 1.0 | | 0.248 | 27.0 | 27 | 0.5034 | 1.0 | | 0.248 | 28.0 | 28 | 0.4991 | 1.0 | | 0.248 | 29.0 | 29 | 0.4949 | 1.0 | | 0.119 | 30.0 | 30 | 0.4908 | 1.0 | | 0.119 | 31.0 | 31 | 0.4868 | 1.0 | | 0.119 | 32.0 | 32 | 0.4833 | 1.0 | | 0.119 | 33.0 | 33 | 0.4803 | 1.0 | | 0.119 | 34.0 | 34 | 0.4777 | 1.0 | | 0.119 | 35.0 | 35 | 0.4751 | 1.0 | | 0.119 | 36.0 | 36 | 0.4727 | 1.0 | | 0.119 | 37.0 | 37 | 0.4704 | 1.0 | | 0.119 | 38.0 | 38 | 0.4681 | 1.0 | | 0.119 | 39.0 | 39 | 0.4658 | 1.0 | | 0.0692 | 40.0 | 40 | 0.4635 | 1.0 | | 0.0692 | 41.0 | 41 | 0.4612 | 1.0 | | 0.0692 | 42.0 | 42 | 0.4588 | 1.0 | | 0.0692 | 43.0 | 43 | 0.4564 | 1.0 | | 0.0692 | 44.0 | 44 | 0.4542 | 1.0 | | 0.0692 | 45.0 | 45 | 0.4522 | 1.0 | | 0.0692 | 46.0 | 46 | 0.4504 | 1.0 | | 0.0692 | 47.0 | 47 | 0.4488 | 1.0 | | 0.0692 | 48.0 | 48 | 0.4474 | 1.0 | | 0.0692 | 49.0 | 49 | 0.4463 | 1.0 | | 0.0487 | 50.0 | 50 | 0.4453 | 1.0 | | 0.0487 | 51.0 | 51 | 0.4444 | 1.0 | | 0.0487 | 52.0 | 52 | 0.4435 | 1.0 | | 0.0487 | 53.0 | 53 | 0.4427 | 1.0 | | 0.0487 | 54.0 | 54 | 0.4419 | 1.0 | | 0.0487 | 55.0 | 55 | 0.4410 | 1.0 | | 0.0487 | 56.0 | 56 | 0.4402 | 1.0 | | 0.0487 | 57.0 | 57 | 0.4394 | 1.0 | | 0.0487 | 58.0 | 58 | 0.4385 | 1.0 | | 0.0487 | 59.0 | 59 | 0.4375 | 1.0 | | 0.0374 | 60.0 | 60 | 0.4366 | 1.0 | | 0.0374 | 61.0 | 61 | 0.4356 | 1.0 | | 0.0374 | 62.0 | 62 | 0.4347 | 1.0 | | 0.0374 | 63.0 | 63 | 0.4338 | 1.0 | | 0.0374 | 64.0 | 64 | 0.4328 | 1.0 | | 0.0374 | 65.0 | 65 | 0.4319 | 1.0 | | 0.0374 | 66.0 | 66 | 0.4311 | 1.0 | | 0.0374 | 67.0 | 67 | 0.4302 | 1.0 | | 0.0374 | 68.0 | 68 | 0.4294 | 1.0 | | 0.0374 | 69.0 | 69 | 0.4286 | 1.0 | | 0.0321 | 70.0 | 70 | 0.4278 | 1.0 | | 0.0321 | 71.0 | 71 | 0.4271 | 1.0 | | 0.0321 | 72.0 | 72 | 0.4264 | 1.0 | | 0.0321 | 73.0 | 73 | 0.4257 | 1.0 | | 0.0321 | 74.0 | 74 | 0.4251 | 1.0 | | 0.0321 | 75.0 | 75 | 0.4245 | 1.0 | | 0.0321 | 76.0 | 76 | 0.4239 | 1.0 | | 0.0321 | 77.0 | 77 | 0.4233 | 1.0 | | 0.0321 | 78.0 | 78 | 0.4228 | 1.0 | | 0.0321 | 79.0 | 79 | 0.4223 | 1.0 | | 0.0285 | 80.0 | 80 | 0.4219 | 1.0 | | 0.0285 | 81.0 | 81 | 0.4215 | 1.0 | | 0.0285 | 82.0 | 82 | 0.4211 | 1.0 | | 0.0285 | 83.0 | 83 | 0.4206 | 1.0 | | 0.0285 | 84.0 | 84 | 0.4201 | 1.0 | | 0.0285 | 85.0 | 85 | 0.4197 | 1.0 | | 0.0285 | 86.0 | 86 | 0.4192 | 1.0 | | 0.0285 | 87.0 | 87 | 0.4189 | 1.0 | | 0.0285 | 88.0 | 88 | 0.4185 | 1.0 | | 0.0285 | 89.0 | 89 | 0.4182 | 1.0 | | 0.0268 | 90.0 | 90 | 0.4179 | 1.0 | | 0.0268 | 91.0 | 91 | 0.4176 | 1.0 | | 0.0268 | 92.0 | 92 | 0.4173 | 1.0 | | 0.0268 | 93.0 | 93 | 0.4170 | 1.0 | | 0.0268 | 94.0 | 94 | 0.4168 | 1.0 | | 0.0268 | 95.0 | 95 | 0.4165 | 1.0 | | 0.0268 | 96.0 | 96 | 0.4163 | 1.0 | | 0.0268 | 97.0 | 97 | 0.4161 | 1.0 | | 0.0268 | 98.0 | 98 | 0.4159 | 1.0 | | 0.0268 | 99.0 | 99 | 0.4157 | 1.0 | | 0.0249 | 100.0 | 100 | 0.4155 | 1.0 | | 0.0249 | 101.0 | 101 | 0.4154 | 1.0 | | 0.0249 | 102.0 | 102 | 0.4152 | 1.0 | | 0.0249 | 103.0 | 103 | 0.4151 | 1.0 | | 0.0249 | 104.0 | 104 | 0.4150 | 1.0 | | 0.0249 | 105.0 | 105 | 0.4148 | 1.0 | | 0.0249 | 106.0 | 106 | 0.4147 | 1.0 | | 0.0249 | 107.0 | 107 | 0.4146 | 1.0 | | 0.0249 | 108.0 | 108 | 0.4145 | 1.0 | | 0.0249 | 109.0 | 109 | 0.4144 | 1.0 | | 0.0242 | 110.0 | 110 | 0.4144 | 1.0 | | 0.0242 | 111.0 | 111 | 0.4143 | 1.0 | | 0.0242 | 112.0 | 112 | 0.4142 | 1.0 | | 0.0242 | 113.0 | 113 | 0.4141 | 1.0 | | 0.0242 | 114.0 | 114 | 0.4141 | 1.0 | | 0.0242 | 115.0 | 115 | 0.4140 | 1.0 | | 0.0242 | 116.0 | 116 | 0.4140 | 1.0 | | 0.0242 | 117.0 | 117 | 0.4139 | 1.0 | | 0.0242 | 118.0 | 118 | 0.4139 | 1.0 | | 0.0242 | 119.0 | 119 | 0.4139 | 1.0 | | 0.0292 | 120.0 | 120 | 0.4139 | 1.0 | ### Framework versions - Transformers 4.38.0.dev0 - Pytorch 2.1.0+cu121 - Datasets 2.4.0 - Tokenizers 0.15.0
[ "absent", "present" ]
kashiram/swin-tiny-patch4-window7-224-finetuned-crop-classification
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # swin-tiny-patch4-window7-224-finetuned-crop-classification This model is a fine-tuned version of [microsoft/swin-tiny-patch4-window7-224](https://huggingface.co/microsoft/swin-tiny-patch4-window7-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.6271 - Accuracy: 0.7468 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 2e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - gradient_accumulation_steps: 2 - total_train_batch_size: 32 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_steps: 500 - num_epochs: 4 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.7134 | 1.0 | 733 | 0.6852 | 0.7265 | | 0.6432 | 2.0 | 1467 | 0.6506 | 0.7349 | | 0.6945 | 3.0 | 2200 | 0.6434 | 0.7415 | | 0.6312 | 4.0 | 2932 | 0.6271 | 0.7468 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "dr", "g", "nd", "wd", "other" ]
sowbaranika13/vit-base-patch16-224-finetuned-flower
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-base-patch16-224-finetuned-flower This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on the imagefolder dataset. ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 5 ### Training results ### Framework versions - Transformers 4.24.0 - Pytorch 2.1.0+cu121 - Datasets 2.7.1 - Tokenizers 0.13.3
[ "daisy", "dandelion", "roses", "sunflowers", "tulips" ]
jayspring/vit-base-patch16-224-finetuned-flower
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-base-patch16-224-finetuned-flower This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on the imagefolder dataset. ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 5 ### Training results ### Framework versions - Transformers 4.24.0 - Pytorch 2.1.0+cu121 - Datasets 2.7.1 - Tokenizers 0.13.3
[ "daisy", "dandelion", "roses", "sunflowers", "tulips" ]
Yuuki0/anime-cartoon-detect
# Train Result: - epoch: 1.0 - loss: 4895933612886589.0, - runtime: 6608.2118, - samples_per_second: 1.211, - steps_per_second: 0.03 --- # Eval Result: - epoch: 1.0 - accuracy: 0.9292134831460674 - loss: 443748739710976.0 - runtime: 142.4015 - samples_per_second: 6.25 - steps_per_second: 0.625 ---
[ "anime", "cartoon" ]
LukeJacob2023/nsfw-image-detector
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # nsfw-image-detector This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.8138 - Accuracy: 0.9316 - Accuracy K: 0.9887 You can access 384 version on: https://huggingface.co/LukeJacob2023/nsfw-image-detector-384 ## Model description Labels: ['drawings', 'hentai', 'neutral', 'porn', 'sexy'] ## Intended uses & limitations ## Training and evaluation data A custom dataset about 28k images, if you need to improve your domain's accurate, you can contribute the dataset to me. ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 2e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_steps: 500 - num_epochs: 10 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | Accuracy K | |:-------------:|:-----:|:----:|:---------------:|:--------:|:----------:| | 0.7836 | 1.0 | 720 | 0.3188 | 0.9085 | 0.9891 | | 0.2441 | 2.0 | 1440 | 0.2382 | 0.9257 | 0.9936 | | 0.1412 | 3.0 | 2160 | 0.2334 | 0.9335 | 0.9932 | | 0.0857 | 4.0 | 2880 | 0.2934 | 0.9347 | 0.9934 | | 0.0569 | 5.0 | 3600 | 0.4500 | 0.9307 | 0.9927 | | 0.0371 | 6.0 | 4320 | 0.5524 | 0.9357 | 0.9910 | | 0.0232 | 7.0 | 5040 | 0.6691 | 0.9347 | 0.9913 | | 0.02 | 8.0 | 5760 | 0.7408 | 0.9335 | 0.9917 | | 0.0154 | 9.0 | 6480 | 0.8138 | 0.9316 | 0.9887 | ### Framework versions - Transformers 4.36.2 - Pytorch 2.0.0 - Datasets 2.15.0 - Tokenizers 0.15.0
[ "drawings", "hentai", "neutral", "porn", "sexy" ]
ambarish004/vit-base-patch16-224-finetuned-polyterrasse
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-base-patch16-224-finetuned-polyterrasse This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.2635 - Accuracy: 1.0 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 4 - eval_batch_size: 4 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 16 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 5 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 0.86 | 3 | 0.5713 | 0.6667 | | No log | 2.0 | 7 | 0.2635 | 1.0 | | 0.3363 | 2.86 | 10 | 0.1832 | 1.0 | | 0.3363 | 4.0 | 14 | 0.1458 | 1.0 | | 0.3363 | 4.29 | 15 | 0.1437 | 1.0 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "no", "yes" ]
atuo/vit-base-patch16-224-in21k-finetuned-crop-classification
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-base-patch16-224-in21k-finetuned-crop-classification This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.6450 - Accuracy: 0.7472 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 10 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.8031 | 1.0 | 183 | 0.7603 | 0.7050 | | 0.7311 | 2.0 | 367 | 0.7047 | 0.7250 | | 0.7144 | 3.0 | 550 | 0.6968 | 0.7211 | | 0.6516 | 4.0 | 734 | 0.6569 | 0.7376 | | 0.6371 | 5.0 | 917 | 0.6483 | 0.7376 | | 0.6246 | 6.0 | 1101 | 0.6492 | 0.7365 | | 0.5659 | 7.0 | 1284 | 0.6481 | 0.7411 | | 0.533 | 8.0 | 1468 | 0.6450 | 0.7472 | | 0.5416 | 9.0 | 1651 | 0.6382 | 0.7453 | | 0.5062 | 9.97 | 1830 | 0.6395 | 0.7461 | ### Framework versions - Transformers 4.36.2 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "dr", "g", "nd", "wd", "other" ]
satyam2652/vit-base-patch16-224-in21k-euroSat
<!-- This model card has been generated automatically according to the information Keras had access to. You should probably proofread and complete it, then remove this comment. --> # satyam2652/vit-base-patch16-224-in21k-euroSat This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Train Loss: 3.1431 - Train Accuracy: 1.0 - Train Top-3-accuracy: 1.0 - Validation Loss: 3.6270 - Validation Accuracy: 0.9566 - Validation Top-3-accuracy: 0.9948 - Epoch: 9 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - optimizer: {'name': 'AdamWeightDecay', 'learning_rate': {'module': 'keras.optimizers.schedules', 'class_name': 'PolynomialDecay', 'config': {'initial_learning_rate': 3e-05, 'decay_steps': 1680, 'end_learning_rate': 0.0, 'power': 1.0, 'cycle': False, 'name': None}, 'registered_name': None}, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-08, 'amsgrad': False, 'weight_decay_rate': 0.01} - training_precision: float32 ### Training results | Train Loss | Train Accuracy | Train Top-3-accuracy | Validation Loss | Validation Accuracy | Validation Top-3-accuracy | Epoch | |:----------:|:--------------:|:--------------------:|:---------------:|:-------------------:|:-------------------------:|:-----:| | 5.3716 | 0.1168 | 0.1875 | 5.2023 | 0.3802 | 0.5556 | 0 | | 4.8783 | 0.7768 | 0.8929 | 4.7966 | 0.7257 | 0.9115 | 1 | | 4.4274 | 0.9360 | 0.9933 | 4.4790 | 0.8212 | 0.9618 | 2 | | 4.0789 | 0.9769 | 0.9985 | 4.2315 | 0.8837 | 0.9740 | 3 | | 3.8025 | 0.9926 | 1.0 | 4.0327 | 0.9062 | 0.9878 | 4 | | 3.5840 | 0.9970 | 1.0 | 3.8840 | 0.9253 | 0.9913 | 5 | | 3.4123 | 1.0 | 1.0 | 3.7695 | 0.9392 | 0.9931 | 6 | | 3.2826 | 1.0 | 1.0 | 3.6876 | 0.9462 | 0.9948 | 7 | | 3.1937 | 1.0 | 1.0 | 3.6414 | 0.9583 | 0.9948 | 8 | | 3.1431 | 1.0 | 1.0 | 3.6270 | 0.9566 | 0.9948 | 9 | ### Framework versions - Transformers 4.37.2 - TensorFlow 2.15.0 - Datasets 2.16.1 - Tokenizers 0.15.1
[ "u119", "u115", "u122", "u210", "u211", "u215", "u207", "u212", "u206", "u222", "u223", "u220", "u217", "u126", "u218", "u225", "u226", "u224", "u219", "u221", "u227", "u232", "u233", "u228", "u123", "u229", "u231", "u234", "u230", "u235", "u238", "u237", "u239", "u236", "u001", "u120", "u000", "u002", "u003", "u009", "u014", "u016", "u011", "u017", "u013", "u005", "u130", "u010", "u018", "u008", "u007", "u004", "u019", "u006", "u012", "u015", "u020", "u128", "u027", "u031", "u021", "u024", "u030", "u029", "u026", "u028", "u023", "u032", "u121", "u025", "u022", "u033", "u042", "u037", "u034", "u043", "u041", "u040", "u035", "u129", "u039", "u044", "u038", "u036", "u048", "u053", "u047", "u052", "u050", "u046", "u127", "u045", "u049", "u051", "u058", "u057", "u054", "u062", "u056", "u063", "u055", "u124", "u061", "u059", "u060", "u068", "u069", "u070", "u067", "u072", "u064", "u065", "u113", "u131", "u071", "u066", "u078", "u076", "u077", "u073", "u075", "u074", "u080", "u081", "u135", "u079", "u085", "u087", "u084", "u088", "u082", "u086", "u083", "u089", "u099", "u132", "u090", "u095", "u092", "u093", "u098", "u100", "u097", "u094", "u096", "u091", "u133", "u101", "u109", "u104", "u103", "u108", "u110", "u102", "u105", "u107", "u106", "u134", "u136", "u140", "u138", "u139", "u141", "u112", "u137", "u149", "u148", "u143", "u142", "u145", "u147", "u146", "u144", "u150", "u118", "u151", "u158", "u154", "u153", "u152", "u159", "u160", "u162", "u161", "u155", "u111", "u157", "u156", "u163", "u168", "u165", "u167", "u170", "u166", "u172", "u173", "u116", "u169", "u164", "u171", "u174", "u176", "u178", "u181", "u184", "u179", "u183", "u117", "u175", "u182", "u177", "u180", "u188", "u192", "u190", "u195", "u186", "u185", "u114", "u189", "u194", "u191", "u193", "u196", "u197", "u187", "u202", "u204", "u200", "u125", "u205", "u199", "u198", "u201", "u203", "u213", "u214", "u216", "u209", "u208" ]
amaye15/ViT-Standford-Dogs
# CanineNet: Dog Breed Classification for 'Le Refuge' **Github:** [Here](https://github.com/amaye15/CanineNet/tree/main) **Huggingface Model:** [Here](https://huggingface.co/amaye15/ViT-Standford-Dogs) **Streamlit App:** [Here](https://caninenet.streamlit.app/) **DagsHub & Mlflow Experiement Tracking:** [Here](https://dagshub.com/andrewmayes14/mlflow/experiments/#/) ## Project Overview As a volunteer at 'Le Refuge,' a local animal protection association, I embarked on a mission to develop a machine learning tool that could aid in the classification of dog breeds from their vast image database. The project was inspired by my own experience finding my beloved pet, Snooky, through this association. To give back, I aimed to streamline their data management process by implementing an advanced breed classification algorithm. ```python from transformers import pipeline # Specify the model you want to use model_name = "amaye15/ViT-Standford-Dogs" # Example: Vision Transformer # Load the image classification pipeline with the specified model image_classifier = pipeline("image-classification", model=model_name) # Path or URL to your image image_path_or_url = "path_or_url_to_your_image" # Get predictions predictions = image_classifier(image_path_or_url) # Define the number of top predictions you want to see (top k) top_k = 5 # Print the top k predictions for prediction in predictions[:top_k]: print(f"Class: {prediction['label']}, Confidence: {prediction['score']:.2f}") ```
[ "bull mastiff", "chesapeake bay retriever", "tibetan mastiff", "bouvier des flandres", "cardigan", "miniature pinscher", "toy poodle", "kerry blue terrier", "miniature schnauzer", "bernese mountain dog", "gordon setter", "borzoi", "bloodhound", "irish setter", "scottish deerhound", "norwegian elkhound", "keeshond", "otterhound", "dhole", "norwich terrier", "basenji", "italian greyhound", "pembroke", "malinois", "standard schnauzer", "kelpie", "old english sheepdog", "yorkshire terrier", "greater swiss mountain dog", "miniature poodle", "ibizan hound", "english springer", "german shepherd", "schipperke", "airedale", "irish wolfhound", "komondor", "cairn", "silky terrier", "eskimo dog", "english setter", "tibetan terrier", "saint bernard", "redbone", "labrador retriever", "american staffordshire terrier", "shetland sheepdog", "toy terrier", "west highland white terrier", "pomeranian", "pekinese", "african hunting dog", "brabancon griffon", "dingo", "collie", "cocker spaniel", "great pyrenees", "japanese spaniel", "standard poodle", "papillon", "english foxhound", "french bulldog", "chihuahua", "australian terrier", "black and tan coonhound", "walker hound", "border collie", "samoyed", "rhodesian ridgeback", "malamute", "bluetick", "staffordshire bullterrier", "scotch terrier", "norfolk terrier", "great dane", "clumber", "groenendael", "saluki", "leonberg", "briard", "basset", "vizsla", "irish terrier", "newfoundland", "entlebucher", "mexican hairless", "brittany spaniel", "beagle", "bedlington terrier", "soft coated wheaten terrier", "siberian husky", "giant schnauzer", "lakeland terrier", "border terrier", "flat coated retriever", "irish water spaniel", "curly coated retriever", "blenheim spaniel", "affenpinscher", "lhasa", "whippet", "german short haired pointer", "pug", "boxer", "wire haired fox terrier", "sealyham terrier", "welsh springer spaniel", "golden retriever", "maltese dog", "afghan hound", "appenzeller", "rottweiler", "dandie dinmont", "weimaraner", "boston bull", "kuvasz", "doberman", "sussex spaniel", "shih tzu", "chow" ]
Warins/food_classifier
<!-- This model card has been generated automatically according to the information Keras had access to. You should probably proofread and complete it, then remove this comment. --> # Warins/food_classifier This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Train Loss: 2.8240 - Validation Loss: 1.6827 - Train Accuracy: 0.818 - Epoch: 0 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - optimizer: {'name': 'AdamWeightDecay', 'learning_rate': {'module': 'keras.optimizers.schedules', 'class_name': 'PolynomialDecay', 'config': {'initial_learning_rate': 3e-05, 'decay_steps': 4000, 'end_learning_rate': 0.0, 'power': 1.0, 'cycle': False, 'name': None}, 'registered_name': None}, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-08, 'amsgrad': False, 'weight_decay_rate': 0.01} - training_precision: float32 ### Training results | Train Loss | Validation Loss | Train Accuracy | Epoch | |:----------:|:---------------:|:--------------:|:-----:| | 2.8240 | 1.6827 | 0.818 | 0 | ### Framework versions - Transformers 4.35.2 - TensorFlow 2.15.0 - Tokenizers 0.15.0
[ "apple_pie", "baby_back_ribs", "bruschetta", "waffles", "caesar_salad", "cannoli", "caprese_salad", "carrot_cake", "ceviche", "cheesecake", "cheese_plate", "chicken_curry", "chicken_quesadilla", "baklava", "chicken_wings", "chocolate_cake", "chocolate_mousse", "churros", "clam_chowder", "club_sandwich", "crab_cakes", "creme_brulee", "croque_madame", "cup_cakes", "beef_carpaccio", "deviled_eggs", "donuts", "dumplings", "edamame", "eggs_benedict", "escargots", "falafel", "filet_mignon", "fish_and_chips", "foie_gras", "beef_tartare", "french_fries", "french_onion_soup", "french_toast", "fried_calamari", "fried_rice", "frozen_yogurt", "garlic_bread", "gnocchi", "greek_salad", "grilled_cheese_sandwich", "beet_salad", "grilled_salmon", "guacamole", "gyoza", "hamburger", "hot_and_sour_soup", "hot_dog", "huevos_rancheros", "hummus", "ice_cream", "lasagna", "beignets", "lobster_bisque", "lobster_roll_sandwich", "macaroni_and_cheese", "macarons", "miso_soup", "mussels", "nachos", "omelette", "onion_rings", "oysters", "bibimbap", "pad_thai", "paella", "pancakes", "panna_cotta", "peking_duck", "pho", "pizza", "pork_chop", "poutine", "prime_rib", "bread_pudding", "pulled_pork_sandwich", "ramen", "ravioli", "red_velvet_cake", "risotto", "samosa", "sashimi", "scallops", "seaweed_salad", "shrimp_and_grits", "breakfast_burrito", "spaghetti_bolognese", "spaghetti_carbonara", "spring_rolls", "steak", "strawberry_shortcake", "sushi", "tacos", "takoyaki", "tiramisu", "tuna_tartare" ]
kevintran/autotrain-image-classififier-3
# Model Trained Using AutoTrain - Problem type: Image Classification ## Validation Metricsg loss: 0.15451037883758545 f1_macro: 0.9467270357903562 f1_micro: 0.946 f1_weighted: 0.9456145469707284 precision_macro: 0.9494787542337152 precision_micro: 0.946 precision_weighted: 0.9483015719746657 recall_macro: 0.946937588084916 recall_micro: 0.946 recall_weighted: 0.946 accuracy: 0.946
[ "buildings", "forest", "glacier", "mountain", "sea", "street" ]
nomis15/checkpoint-100
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # checkpoint-100 This model is a fine-tuned version of [facebook/deit-base-patch16-224](https://huggingface.co/facebook/deit-base-patch16-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.5022 - Accuracy: 0.8050 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 1e-05 - train_batch_size: 20 - eval_batch_size: 8 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 80 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 15 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 1.2967 | 1.24 | 50 | 0.9589 | 0.5975 | | 0.721 | 2.48 | 100 | 0.6094 | 0.7528 | | 0.5443 | 3.73 | 150 | 0.5702 | 0.7702 | | 0.4597 | 4.97 | 200 | 0.5168 | 0.7938 | | 0.3985 | 6.21 | 250 | 0.5091 | 0.7925 | | 0.3407 | 7.45 | 300 | 0.5011 | 0.8 | | 0.2989 | 8.7 | 350 | 0.5031 | 0.7963 | | 0.271 | 9.94 | 400 | 0.4951 | 0.8012 | | 0.2355 | 11.18 | 450 | 0.4973 | 0.8050 | | 0.2047 | 12.42 | 500 | 0.5011 | 0.8025 | | 0.1934 | 13.66 | 550 | 0.5018 | 0.8037 | | 0.1861 | 14.91 | 600 | 0.5022 | 0.8050 | ### Framework versions - Transformers 4.37.0 - Pytorch 2.1.2+cu121 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "adenocarcinoma", "adenoma", "normal_tissue", "serrated_lesion" ]
MattyB95/VIT-VoxCelebSpoof-MFCC-Synthetic-Voice-Detection
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # VIT-VoxCelebSpoof-MFCC-Synthetic-Voice-Detection This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the None dataset. It achieves the following results on the evaluation set: - Loss: 0.0001 - Accuracy: 1.0000 - F1: 1.0000 - Precision: 1.0 - Recall: 1.0000 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 3.0 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | F1 | Precision | Recall | |:-------------:|:-----:|:-----:|:---------------:|:--------:|:------:|:---------:|:------:| | 0.0 | 1.0 | 29527 | 0.0006 | 0.9999 | 0.9999 | 0.9999 | 0.9999 | | 0.0 | 2.0 | 59054 | 0.0002 | 0.9999 | 1.0000 | 1.0000 | 1.0000 | | 0.0 | 3.0 | 88581 | 0.0001 | 1.0000 | 1.0000 | 1.0 | 1.0000 | ### Framework versions - Transformers 4.37.0 - Pytorch 2.1.2+cu121 - Datasets 2.16.1 - Tokenizers 0.15.1
[ "bonafide", "spoof" ]
MattyB95/VIT-VoxCelebSpoof-ConstantQ-Synthetic-Voice-Detection
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # VIT-VoxCelebSpoof-ConstantQ-Synthetic-Voice-Detection This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the None dataset. It achieves the following results on the evaluation set: - Accuracy: 1.0 - F1: 1.0 - Loss: 0.0000 - Precision: 1.0 - Recall: 1.0 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 3.0 ### Training results | Training Loss | Epoch | Step | Accuracy | F1 | Validation Loss | Precision | Recall | |:-------------:|:-----:|:-----:|:--------:|:------:|:---------------:|:---------:|:------:| | 0.0 | 1.0 | 29527 | 0.9999 | 0.9999 | 0.0006 | 0.9999 | 1.0 | | 0.0 | 2.0 | 59054 | 1.0000 | 1.0000 | 0.0002 | 1.0 | 0.9999 | | 0.0 | 3.0 | 88581 | 1.0 | 1.0 | 0.0000 | 1.0 | 1.0 | ### Framework versions - Transformers 4.37.0 - Pytorch 2.1.2+cu121 - Datasets 2.16.1 - Tokenizers 0.15.1
[ "bonafide", "spoof" ]
YoelCanaza/base-beans-classification-vit-model-yoel
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # prueba-vit-model-yoel This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.0081 - Accuracy: 1.0 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0002 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 4 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.0212 | 3.85 | 500 | 0.0081 | 1.0 | ### Framework versions - Transformers 4.28.0 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.13.3
[ "angular_leaf_spot", "bean_rust", "healthy" ]
eskayML/swin-tiny-patch4-window7-224-finetuned-crop-classification
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # swin-tiny-patch4-window7-224-finetuned-crop-classification This model is a fine-tuned version of [microsoft/swin-tiny-patch4-window7-224](https://huggingface.co/microsoft/swin-tiny-patch4-window7-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.6979 - Accuracy: 0.7208 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 2 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.7918 | 1.0 | 183 | 0.7514 | 0.6989 | | 0.7053 | 1.99 | 366 | 0.6979 | 0.7208 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "dr", "g", "nd", "wd", "other" ]
MattyB95/VIT-VoxCelebSpoof-Mel_Spectrogram-Synthetic-Voice-Detection
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # VIT-VoxCelebSpoof-Mel_Spectrogram-Synthetic-Voice-Detection This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the None dataset. It achieves the following results on the evaluation set: - Loss: 0.0002 - Accuracy: 1.0000 - F1: 1.0000 - Precision: 1.0000 - Recall: 1.0 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 3.0 ### Training results | Training Loss | Epoch | Step | Accuracy | F1 | Validation Loss | Precision | Recall | |:-------------:|:-----:|:-----:|:--------:|:------:|:---------------:|:---------:|:------:| | 0.0048 | 1.0 | 29527 | 0.9998 | 0.9999 | 0.0010 | 0.9998 | 1.0 | | 0.0 | 2.0 | 59054 | 0.0006 | 0.9999 | 0.9999 | 0.9999 | 0.9999 | | 0.0 | 3.0 | 88581 | 0.0002 | 1.0000 | 1.0000 | 1.0000 | 1.0 | ### Framework versions - Transformers 4.37.0 - Pytorch 2.1.2+cu121 - Datasets 2.16.1 - Tokenizers 0.15.1
[ "bonafide", "spoof" ]
hlillemark/my_awesome_food_model
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # my_awesome_food_model This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 3.8640 - Accuracy: 0.573 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 512 - eval_batch_size: 512 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 2048 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 10 ### Training results | Training Loss | Epoch | Step | Accuracy | Validation Loss | |:-------------:|:-----:|:----:|:--------:|:---------------:| | No log | 1.0 | 2 | 0.036 | 4.5210 | | No log | 2.0 | 4 | 0.278 | 4.4151 | | No log | 3.0 | 6 | 0.437 | 4.3629 | | No log | 4.0 | 8 | 4.2960 | 0.547 | | 4.3122 | 5.0 | 10 | 4.1697 | 0.589 | | 4.3122 | 6.0 | 12 | 4.0601 | 0.568 | | 4.3122 | 7.0 | 14 | 3.9770 | 0.521 | | 4.3122 | 8.0 | 16 | 3.9177 | 0.539 | | 4.3122 | 9.0 | 18 | 3.8843 | 0.545 | | 3.9792 | 10.0 | 20 | 3.8640 | 0.573 | ### Framework versions - Transformers 4.37.0 - Pytorch 2.1.2+cu121 - Datasets 2.16.1 - Tokenizers 0.15.1
[ "apple_pie", "baby_back_ribs", "bruschetta", "waffles", "caesar_salad", "cannoli", "caprese_salad", "carrot_cake", "ceviche", "cheesecake", "cheese_plate", "chicken_curry", "chicken_quesadilla", "baklava", "chicken_wings", "chocolate_cake", "chocolate_mousse", "churros", "clam_chowder", "club_sandwich", "crab_cakes", "creme_brulee", "croque_madame", "cup_cakes", "beef_carpaccio", "deviled_eggs", "donuts", "dumplings", "edamame", "eggs_benedict", "escargots", "falafel", "filet_mignon", "fish_and_chips", "foie_gras", "beef_tartare", "french_fries", "french_onion_soup", "french_toast", "fried_calamari", "fried_rice", "frozen_yogurt", "garlic_bread", "gnocchi", "greek_salad", "grilled_cheese_sandwich", "beet_salad", "grilled_salmon", "guacamole", "gyoza", "hamburger", "hot_and_sour_soup", "hot_dog", "huevos_rancheros", "hummus", "ice_cream", "lasagna", "beignets", "lobster_bisque", "lobster_roll_sandwich", "macaroni_and_cheese", "macarons", "miso_soup", "mussels", "nachos", "omelette", "onion_rings", "oysters", "bibimbap", "pad_thai", "paella", "pancakes", "panna_cotta", "peking_duck", "pho", "pizza", "pork_chop", "poutine", "prime_rib", "bread_pudding", "pulled_pork_sandwich", "ramen", "ravioli", "red_velvet_cake", "risotto", "samosa", "sashimi", "scallops", "seaweed_salad", "shrimp_and_grits", "breakfast_burrito", "spaghetti_bolognese", "spaghetti_carbonara", "spring_rolls", "steak", "strawberry_shortcake", "sushi", "tacos", "takoyaki", "tiramisu", "tuna_tartare" ]
mmervecerit/vit-base-beans-tutorial
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-base-beans-tutorial This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the beans dataset. It achieves the following results on the evaluation set: - Loss: 0.0442 - Accuracy: 0.9850 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0002 - train_batch_size: 16 - eval_batch_size: 8 - seed: 1787 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 5 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.0664 | 1.54 | 100 | 0.0698 | 0.9774 | | 0.0115 | 3.08 | 200 | 0.0600 | 0.9850 | | 0.008 | 4.62 | 300 | 0.0442 | 0.9850 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.15.1
[ "angular_leaf_spot", "bean_rust", "healthy" ]
judith0/classification_INE_v1
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # swin-tiny-patch4-window7-224-finetuned-eurosat This model is a fine-tuned version of [microsoft/swin-tiny-patch4-window7-224](https://huggingface.co/microsoft/swin-tiny-patch4-window7-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.2135 - Accuracy: 0.9767 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 0.96 | 6 | 0.4536 | 0.9186 | | 0.691 | 1.92 | 12 | 0.2135 | 0.9767 | | 0.691 | 2.88 | 18 | 0.1602 | 0.9767 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "other", "anverso", "reverso" ]
judith0/classification_INE_v2
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # classification_INE_v1-finetuned-eurosat This model is a fine-tuned version of [judith0/classification_INE_v1](https://huggingface.co/judith0/classification_INE_v1) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.0261 - Accuracy: 1.0 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 0.96 | 6 | 0.0528 | 0.9884 | | 0.1411 | 1.92 | 12 | 0.0261 | 1.0 | | 0.1411 | 2.88 | 18 | 0.0182 | 1.0 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "other", "anverso", "reverso" ]
ansilmbabl/swin-tiny-patch4-window7-224-finetuned-eurosat
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # swin-tiny-patch4-window7-224-finetuned-eurosat This model is a fine-tuned version of [microsoft/swin-tiny-patch4-window7-224](https://huggingface.co/microsoft/swin-tiny-patch4-window7-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.0690 - Accuracy: 0.9756 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.2345 | 1.0 | 190 | 0.1822 | 0.9348 | | 0.1618 | 2.0 | 380 | 0.0947 | 0.9670 | | 0.1439 | 3.0 | 570 | 0.0690 | 0.9756 | ### Framework versions - Transformers 4.37.0 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.15.0
[ "annualcrop", "forest", "herbaceousvegetation", "highway", "industrial", "pasture", "permanentcrop", "residential", "river", "sealake" ]
jalaneunos/vit-base-patch16-224-in21k-finetuned-FER2013
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-base-patch16-224-in21k-finetuned-FER2013 This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.3264 - Accuracy: 0.8732 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 10 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.4811 | 1.0 | 202 | 0.4315 | 0.8004 | | 0.4287 | 2.0 | 404 | 0.3579 | 0.8433 | | 0.4184 | 3.0 | 606 | 0.3517 | 0.8467 | | 0.3931 | 4.0 | 808 | 0.3308 | 0.8555 | | 0.3667 | 5.0 | 1010 | 0.3204 | 0.8610 | | 0.3545 | 6.0 | 1212 | 0.3144 | 0.8659 | | 0.3137 | 7.0 | 1414 | 0.3308 | 0.8642 | | 0.3178 | 8.0 | 1616 | 0.3230 | 0.8645 | | 0.2998 | 9.0 | 1818 | 0.3206 | 0.8708 | | 0.2773 | 10.0 | 2020 | 0.3264 | 0.8732 | ### Framework versions - Transformers 4.38.2 - Pytorch 2.2.1+cu121 - Datasets 2.18.0 - Tokenizers 0.15.2
[ "negative", "positive" ]
ansilmbabl/cards-swin-tiny-patch4-window7-224-finetuned-v1
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # cards-swin-tiny-patch4-window7-224-finetuned-v1 This model is a fine-tuned version of [microsoft/swin-tiny-patch4-window7-224](https://huggingface.co/microsoft/swin-tiny-patch4-window7-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.3763 - Accuracy: 0.4107 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 10 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 1.6417 | 1.0 | 734 | 1.6075 | 0.3000 | | 1.577 | 2.0 | 1468 | 1.5511 | 0.3355 | | 1.5699 | 3.0 | 2202 | 1.4887 | 0.3567 | | 1.5361 | 4.0 | 2936 | 1.4659 | 0.3686 | | 1.525 | 5.0 | 3670 | 1.4169 | 0.3920 | | 1.4744 | 6.0 | 4404 | 1.4029 | 0.3957 | | 1.4846 | 7.0 | 5138 | 1.3962 | 0.4029 | | 1.4729 | 8.0 | 5872 | 1.3932 | 0.4026 | | 1.4416 | 9.0 | 6606 | 1.3821 | 0.4088 | | 1.4255 | 10.0 | 7340 | 1.3763 | 0.4107 | ### Framework versions - Transformers 4.37.2 - Pytorch 2.0.1+cu117 - Datasets 2.17.0 - Tokenizers 0.15.2
[ "grade_01", "grade_02", "grade_03", "grade_04", "grade_05", "grade_06", "grade_07", "grade_08", "grade_09" ]
enverkulahli/cat-sounds
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # cat-sounds This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.2256 - Accuracy: 0.9462 - F1: 0.9464 - Precision: 0.9477 - Recall: 0.9462 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 4 - eval_batch_size: 4 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 16 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | F1 | Precision | Recall | |:-------------:|:-----:|:----:|:---------------:|:--------:|:------:|:---------:|:------:| | 0.2716 | 1.0 | 297 | 0.3630 | 0.8957 | 0.8961 | 0.9047 | 0.8957 | | 0.098 | 2.0 | 594 | 0.2674 | 0.9344 | 0.9350 | 0.9372 | 0.9344 | | 0.0487 | 3.0 | 891 | 0.2256 | 0.9462 | 0.9464 | 0.9477 | 0.9462 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.15.1
[ "angry", "defence", "fighting", "happy", "huntingmind", "mating", "mothercall", "paining", "resting", "warning" ]
dvs/autotrain-kisd2-y8ibj
# Model Trained Using AutoTrain - Problem type: Image Classification ## Validation Metricsg loss: 0.4466552734375 f1: 1.0 precision: 1.0 recall: 1.0 auc: 1.0 accuracy: 1.0
[ "mulder", "scully" ]
riyasankhe/vit-base-beans-tutorial
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-base-beans-tutorial This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the beans dataset. It achieves the following results on the evaluation set: - Loss: 0.0180 - Accuracy: 0.9925 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0002 - train_batch_size: 16 - eval_batch_size: 8 - seed: 1787 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 5 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.0469 | 1.54 | 100 | 0.1320 | 0.9624 | | 0.0186 | 3.08 | 200 | 0.0315 | 0.9925 | | 0.0084 | 4.62 | 300 | 0.0180 | 0.9925 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.15.1
[ "angular_leaf_spot", "bean_rust", "healthy" ]
samink/vit-base-beans-tutorial-samink
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-base-beans-tutorial-samink This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the beans dataset. It achieves the following results on the evaluation set: - Loss: 0.0472 - Accuracy: 0.9925 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0002 - train_batch_size: 16 - eval_batch_size: 8 - seed: 1787 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 5 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.0515 | 1.54 | 100 | 0.1152 | 0.9624 | | 0.0279 | 3.08 | 200 | 0.0472 | 0.9925 | | 0.0083 | 4.62 | 300 | 0.0680 | 0.9850 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.15.1
[ "angular_leaf_spot", "bean_rust", "healthy" ]
moreover18/vit-part1-friends-YB
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-part1-friends-YB This model is a fine-tuned version of [moreover18/vit-base-patch16-224-in21k-YB](https://huggingface.co/moreover18/vit-base-patch16-224-in21k-YB) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.2325 - Accuracy: 0.9218 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 64 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.5297 | 2.6 | 100 | 0.2325 | 0.9218 | ### Framework versions - Transformers 4.37.1 - Pytorch 1.12.1+cu116 - Datasets 2.4.0 - Tokenizers 0.15.1
[ "friends", "other" ]
moreover18/vit-part1-friends
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-part1-friends This model is a fine-tuned version of [moreover18/vit-base-patch16-224-in21k-YB](https://huggingface.co/moreover18/vit-base-patch16-224-in21k-YB) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.2036 - Accuracy: 0.9381 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 64 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 7 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.1627 | 1.3 | 50 | 0.2258 | 0.9202 | | 0.1183 | 2.6 | 100 | 0.2148 | 0.9235 | | 0.1223 | 3.9 | 150 | 0.2055 | 0.9267 | | 0.0992 | 5.19 | 200 | 0.1976 | 0.9332 | | 0.0824 | 6.49 | 250 | 0.2036 | 0.9381 | ### Framework versions - Transformers 4.37.1 - Pytorch 1.12.1+cu116 - Datasets 2.4.0 - Tokenizers 0.15.1
[ "friends", "other" ]
sahild23d23e23re23r/vit-base-patch16-224-finetuned-eurosat
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-base-patch16-224-finetuned-eurosat This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - eval_loss: 0.0077 - eval_accuracy: 0.9990 - eval_runtime: 172.7146 - eval_samples_per_second: 50.372 - eval_steps_per_second: 1.575 - epoch: 1.0 - step: 611 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 3 ### Framework versions - Transformers 4.34.0 - Pytorch 2.1.0 - Datasets 2.12.0 - Tokenizers 0.14.1
[ "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z", "del", "nothing", "space" ]
ares1123/photo_age_detection
**Photo Age Detection** This is designed to analyze facial features and predict age of a person in an image.
[ "01", "02", "03", "04", "05", "06-07", "08-09", "10-12", "13-15", "16-20", "21-25", "26-30", "31-35", "36-40", "41-45", "46-50", "51-55", "56-60", "61-65", "66-70", "71-80", "81-90", "90+" ]
sahild23d23e23re23r/sign-lan-model
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # sign-lan-model This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.0087 - Accuracy: 0.9993 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 1 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.2769 | 1.0 | 611 | 0.0087 | 0.9993 | ### Framework versions - Transformers 4.34.0 - Pytorch 2.1.0 - Datasets 2.12.0 - Tokenizers 0.14.1
[ "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z", "del", "nothing", "space" ]
Hector001/emotion-vit-model-hector
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # emotion-vit-model-hector This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0002 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 4 ### Training results ### Framework versions - Transformers 4.28.0 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.13.3
[ "happy", "neutral", "sad" ]
moreover18/vit-part1-friends2
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-part1-friends2 This model is a fine-tuned version of [moreover18/vit-part1-friends](https://huggingface.co/moreover18/vit-part1-friends) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.7298 - Accuracy: 0.7395 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 12 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.1497 | 7.41 | 50 | 0.7298 | 0.7395 | ### Framework versions - Transformers 4.37.1 - Pytorch 1.12.1+cu116 - Datasets 2.4.0 - Tokenizers 0.15.1
[ "friends", "other" ]