model_id
stringlengths
7
105
model_card
stringlengths
1
130k
model_labels
listlengths
2
80k
crocutacrocuto/dinov2-base-MEG5-5
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # dinov2-base-MEG5-5 This model is a fine-tuned version of [facebook/dinov2-base](https://huggingface.co/facebook/dinov2-base) on the None dataset. It achieves the following results on the evaluation set: - Loss: 0.6964 - Accuracy: 0.8704 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 64 - eval_batch_size: 64 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 256 - optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - num_epochs: 5 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:------:|:-----:|:---------------:|:--------:| | 0.1774 | 1.0000 | 12653 | 0.6342 | 0.8221 | | 0.1053 | 2.0 | 25307 | 0.5782 | 0.8484 | | 0.0592 | 3.0000 | 37960 | 0.5909 | 0.8606 | | 0.0452 | 4.0 | 50614 | 0.6203 | 0.8698 | | 0.0121 | 4.9998 | 63265 | 0.6964 | 0.8704 | ### Framework versions - Transformers 4.46.3 - Pytorch 2.3.0+cu121 - Datasets 2.19.1 - Tokenizers 0.20.3
[ "aardvark", "baboon", "badger", "bird", "black-and-white colobus", "blue duiker", "blue monkey", "buffalo", "bushbuck", "bushpig", "chimpanzee", "civet_genet", "elephant", "galago_potto", "golden cat", "gorilla", "guineafowl", "hyrax", "jackal", "leopard", "lhoests monkey", "mandrill", "mongoose", "monkey", "pangolin", "porcupine", "red colobus_red-capped mangabey", "red duiker", "rodent", "serval", "spotted hyena", "squirrel", "water chevrotain", "yellow-backed duiker" ]
msudhanshu10/vit-base-oxford-iiit-pets
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-base-oxford-iiit-pets This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on the pcuenq/oxford-pets dataset. It achieves the following results on the evaluation set: - Loss: 0.1847 - Accuracy: 0.9310 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0003 - train_batch_size: 16 - eval_batch_size: 8 - seed: 42 - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - num_epochs: 5 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.3992 | 1.0 | 370 | 0.3184 | 0.9242 | | 0.217 | 2.0 | 740 | 0.2497 | 0.9296 | | 0.1648 | 3.0 | 1110 | 0.2375 | 0.9310 | | 0.1369 | 4.0 | 1480 | 0.2321 | 0.9323 | | 0.1437 | 5.0 | 1850 | 0.2298 | 0.9323 | ### Framework versions - Transformers 4.48.3 - Pytorch 2.5.1+cu124 - Datasets 3.3.2 - Tokenizers 0.21.0
[ "siamese", "birman", "shiba inu", "staffordshire bull terrier", "basset hound", "bombay", "japanese chin", "chihuahua", "german shorthaired", "pomeranian", "beagle", "english cocker spaniel", "american pit bull terrier", "ragdoll", "persian", "egyptian mau", "miniature pinscher", "sphynx", "maine coon", "keeshond", "yorkshire terrier", "havanese", "leonberger", "wheaten terrier", "american bulldog", "english setter", "boxer", "newfoundland", "bengal", "samoyed", "british shorthair", "great pyrenees", "abyssinian", "pug", "saint bernard", "russian blue", "scottish terrier" ]
touchtech/fashion-images-perspectives-vit-large-patch16-384-v3
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # fashion-images-perspectives-vit-large-patch16-384-v3 This model is a fine-tuned version of [google/vit-large-patch16-384](https://huggingface.co/google/vit-large-patch16-384) on the touchtech/fashion-images-perspectives-v2 dataset. It achieves the following results on the evaluation set: - Loss: 0.1759 - Accuracy: 0.9518 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 2e-05 - train_batch_size: 8 - eval_batch_size: 8 - seed: 1337 - optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - num_epochs: 5.0 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:-----:|:---------------:|:--------:| | 0.1754 | 1.0 | 3423 | 0.1759 | 0.9518 | | 0.1168 | 2.0 | 6846 | 0.2239 | 0.9499 | | 0.0424 | 3.0 | 10269 | 0.2223 | 0.9607 | | 0.0165 | 4.0 | 13692 | 0.2298 | 0.9627 | | 0.0043 | 5.0 | 17115 | 0.2514 | 0.9619 | ### Framework versions - Transformers 4.49.0 - Pytorch 2.1.0+cu118 - Datasets 3.3.2 - Tokenizers 0.21.0
[ "model-back-close", "model-back-full", "pack-detail", "pack-front", "pack-side", "pack-top", "model-detail", "model-front-close", "model-front-full", "model-side-close", "model-side-full", "pack-angled", "pack-back", "pack-bottom" ]
Phi1lzA/swin-tiny-patch4-window7-224-finetuned-eurosat
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # swin-tiny-patch4-window7-224-finetuned-eurosat This model is a fine-tuned version of [microsoft/swin-tiny-patch4-window7-224](https://huggingface.co/microsoft/swin-tiny-patch4-window7-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.0530 - Accuracy: 0.9811 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.2168 | 1.0 | 190 | 0.0890 | 0.97 | | 0.1355 | 2.0 | 380 | 0.0846 | 0.9722 | | 0.1072 | 3.0 | 570 | 0.0530 | 0.9811 | ### Framework versions - Transformers 4.49.0 - Pytorch 2.6.0+cu118 - Datasets 3.3.2 - Tokenizers 0.21.0
[ "annualcrop", "forest", "herbaceousvegetation", "highway", "industrial", "pasture", "permanentcrop", "residential", "river", "sealake" ]
Josh8T/image_classification
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # image_classification This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.5032 - Accuracy: 0.5375 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0003 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 64 - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 15 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 1.0207 | 1.0 | 10 | 1.3284 | 0.4875 | | 0.9015 | 2.0 | 20 | 1.2407 | 0.5687 | | 0.7775 | 3.0 | 30 | 1.3203 | 0.55 | | 0.649 | 4.0 | 40 | 1.2159 | 0.5312 | | 0.5012 | 5.0 | 50 | 1.4273 | 0.5 | | 0.546 | 6.0 | 60 | 1.3676 | 0.5 | | 0.4387 | 7.0 | 70 | 1.4644 | 0.5062 | | 0.3772 | 8.0 | 80 | 1.3247 | 0.5563 | | 0.3367 | 9.0 | 90 | 1.5443 | 0.5125 | | 0.3075 | 10.0 | 100 | 1.3807 | 0.575 | | 0.3138 | 11.0 | 110 | 1.4366 | 0.5188 | | 0.2298 | 12.0 | 120 | 1.5350 | 0.4938 | | 0.2454 | 13.0 | 130 | 1.4485 | 0.5312 | | 0.2273 | 14.0 | 140 | 1.5771 | 0.475 | | 0.187 | 15.0 | 150 | 1.4757 | 0.5312 | ### Framework versions - Transformers 4.48.3 - Pytorch 2.5.1+cu124 - Datasets 3.3.2 - Tokenizers 0.21.0
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
andro-flock/b2-classification
# Model Card for Model ID <!-- Provide a quick summary of what the model is/does. --> ## Model Details ### Model Description <!-- Provide a longer summary of what this model is. --> This is the model card of a 🤗 transformers model that has been pushed on the Hub. This model card has been automatically generated. - **Developed by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Model type:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] - **Finetuned from model [optional]:** [More Information Needed] ### Model Sources [optional] <!-- Provide the basic links for the model. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. --> ### Direct Use <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. --> [More Information Needed] ### Downstream Use [optional] <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations. ## How to Get Started with the Model Use the code below to get started with the model. [More Information Needed] ## Training Details ### Training Data <!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. --> [More Information Needed] ### Training Procedure <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. --> #### Preprocessing [optional] [More Information Needed] #### Training Hyperparameters - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision --> #### Speeds, Sizes, Times [optional] <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. --> [More Information Needed] ## Evaluation <!-- This section describes the evaluation protocols and provides the results. --> ### Testing Data, Factors & Metrics #### Testing Data <!-- This should link to a Dataset Card if possible. --> [More Information Needed] #### Factors <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. --> [More Information Needed] #### Metrics <!-- These are the evaluation metrics being used, ideally with a description of why. --> [More Information Needed] ### Results [More Information Needed] #### Summary ## Model Examination [optional] <!-- Relevant interpretability work for the model goes here --> [More Information Needed] ## Environmental Impact <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly --> Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700). - **Hardware Type:** [More Information Needed] - **Hours used:** [More Information Needed] - **Cloud Provider:** [More Information Needed] - **Compute Region:** [More Information Needed] - **Carbon Emitted:** [More Information Needed] ## Technical Specifications [optional] ### Model Architecture and Objective [More Information Needed] ### Compute Infrastructure [More Information Needed] #### Hardware [More Information Needed] #### Software [More Information Needed] ## Citation [optional] <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Model Card Authors [optional] [More Information Needed] ## Model Card Contact [More Information Needed]
[ "background", "hat", "hair", "sunglasses", "upper-clothes", "skirt", "pants", "dress", "belt", "left-shoe", "right-shoe", "face", "left-leg", "right-leg", "left-arm", "right-arm", "bag", "scarf" ]
Lucaslym/vit-base-oxford-iiit-pets
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-base-oxford-iiit-pets This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on the pcuenq/oxford-pets dataset. It achieves the following results on the evaluation set: - Loss: 0.1954 - Accuracy: 0.9391 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0003 - train_batch_size: 16 - eval_batch_size: 8 - seed: 42 - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - num_epochs: 5 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.3905 | 1.0 | 370 | 0.2737 | 0.9337 | | 0.2228 | 2.0 | 740 | 0.2048 | 0.9445 | | 0.1811 | 3.0 | 1110 | 0.1884 | 0.9499 | | 0.1567 | 4.0 | 1480 | 0.1784 | 0.9445 | | 0.1329 | 5.0 | 1850 | 0.1761 | 0.9432 | ### Framework versions - Transformers 4.48.3 - Pytorch 2.5.1+cu124 - Datasets 3.3.2 - Tokenizers 0.21.0
[ "siamese", "birman", "shiba inu", "staffordshire bull terrier", "basset hound", "bombay", "japanese chin", "chihuahua", "german shorthaired", "pomeranian", "beagle", "english cocker spaniel", "american pit bull terrier", "ragdoll", "persian", "egyptian mau", "miniature pinscher", "sphynx", "maine coon", "keeshond", "yorkshire terrier", "havanese", "leonberger", "wheaten terrier", "american bulldog", "english setter", "boxer", "newfoundland", "bengal", "samoyed", "british shorthair", "great pyrenees", "abyssinian", "pug", "saint bernard", "russian blue", "scottish terrier" ]
inginjadibuparto/emotion-with-augmentation-data
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # emotion-with-augmentation-data This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.2647 - Accuracy: 0.625 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 64 - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - num_epochs: 20 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 2.0245 | 1.0 | 30 | 1.9436 | 0.4125 | | 1.7584 | 2.0 | 60 | 1.6910 | 0.4688 | | 1.4786 | 3.0 | 90 | 1.4755 | 0.5125 | | 1.1628 | 4.0 | 120 | 1.3309 | 0.5437 | | 0.8958 | 5.0 | 150 | 1.2398 | 0.5625 | | 0.6309 | 6.0 | 180 | 1.2162 | 0.5625 | | 0.4298 | 7.0 | 210 | 1.1475 | 0.6 | | 0.2922 | 8.0 | 240 | 1.1929 | 0.5687 | | 0.1959 | 9.0 | 270 | 1.1736 | 0.575 | | 0.1518 | 10.0 | 300 | 1.1897 | 0.6188 | | 0.1305 | 11.0 | 330 | 1.2232 | 0.6 | | 0.1152 | 12.0 | 360 | 1.2435 | 0.6062 | | 0.1043 | 13.0 | 390 | 1.2601 | 0.6125 | | 0.0986 | 14.0 | 420 | 1.2647 | 0.625 | | 0.0922 | 15.0 | 450 | 1.2797 | 0.6 | | 0.0852 | 16.0 | 480 | 1.2887 | 0.6125 | | 0.0851 | 17.0 | 510 | 1.2970 | 0.6062 | | 0.0804 | 18.0 | 540 | 1.3018 | 0.6062 | | 0.0788 | 19.0 | 570 | 1.3053 | 0.6 | | 0.0785 | 20.0 | 600 | 1.3067 | 0.6 | ### Framework versions - Transformers 4.48.3 - Pytorch 2.5.1+cu124 - Datasets 3.3.2 - Tokenizers 0.21.0
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
hasnanmr/vit-base-oxford-iiit-pets
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-base-oxford-iiit-pets This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on the cepha-cutoutCLAHE dataset. It achieves the following results on the evaluation set: - Loss: 0.6194 - Accuracy: 0.7639 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0001 - train_batch_size: 16 - eval_batch_size: 8 - seed: 42 - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - num_epochs: 50 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 1.0901 | 1.0 | 32 | 0.9278 | 0.4931 | | 0.5383 | 2.0 | 64 | 0.6985 | 0.6319 | | 0.2707 | 3.0 | 96 | 0.6691 | 0.7222 | | 0.0366 | 4.0 | 128 | 0.9557 | 0.6806 | | 0.0066 | 5.0 | 160 | 0.8927 | 0.7083 | | 0.0075 | 6.0 | 192 | 1.2046 | 0.7014 | | 0.0013 | 7.0 | 224 | 1.2583 | 0.7083 | | 0.0006 | 8.0 | 256 | 1.3180 | 0.6944 | | 0.0004 | 9.0 | 288 | 1.3468 | 0.7014 | | 0.0002 | 10.0 | 320 | 1.3582 | 0.6875 | | 0.0002 | 11.0 | 352 | 1.3868 | 0.6875 | | 0.0002 | 12.0 | 384 | 1.4094 | 0.6806 | | 0.0002 | 13.0 | 416 | 1.4392 | 0.6806 | | 0.0002 | 14.0 | 448 | 1.4536 | 0.6875 | | 0.0001 | 15.0 | 480 | 1.4695 | 0.6875 | | 0.0001 | 16.0 | 512 | 1.4850 | 0.6875 | | 0.0001 | 17.0 | 544 | 1.5004 | 0.6875 | | 0.0001 | 18.0 | 576 | 1.5110 | 0.6875 | | 0.0001 | 19.0 | 608 | 1.5219 | 0.6875 | | 0.0001 | 20.0 | 640 | 1.5340 | 0.6875 | | 0.0001 | 21.0 | 672 | 1.5461 | 0.6875 | | 0.0001 | 22.0 | 704 | 1.5541 | 0.6875 | | 0.0001 | 23.0 | 736 | 1.5624 | 0.6875 | ### Framework versions - Transformers 4.48.3 - Pytorch 2.5.1+cu124 - Datasets 3.3.2 - Tokenizers 0.21.0
[ "label_0", "label_1", "label_2" ]
hasnanmr/vit-base-finetuned-cephalometric
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-base-finetuned-cephalometric This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on the cepha-cutoutCLAHE dataset. It achieves the following results on the evaluation set: - Loss: 0.7340 - Accuracy: 0.6528 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0001 - train_batch_size: 32 - eval_batch_size: 8 - seed: 42 - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - num_epochs: 50 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 16 | 0.9458 | 0.5486 | | 0.9879 | 2.0 | 32 | 0.6947 | 0.6597 | | 0.4628 | 3.0 | 48 | 0.6375 | 0.6597 | | 0.135 | 4.0 | 64 | 0.7060 | 0.6944 | | 0.0339 | 5.0 | 80 | 0.7301 | 0.6597 | | 0.0339 | 6.0 | 96 | 0.9236 | 0.6875 | | 0.0059 | 7.0 | 112 | 0.9261 | 0.6806 | | 0.0024 | 8.0 | 128 | 0.9961 | 0.6875 | | 0.0012 | 9.0 | 144 | 1.0060 | 0.6736 | | 0.0008 | 10.0 | 160 | 1.0329 | 0.6875 | | 0.0008 | 11.0 | 176 | 1.0575 | 0.6944 | | 0.0006 | 12.0 | 192 | 1.0768 | 0.6944 | | 0.0006 | 13.0 | 208 | 1.1002 | 0.6944 | | 0.0005 | 14.0 | 224 | 1.1220 | 0.6875 | | 0.0004 | 15.0 | 240 | 1.1367 | 0.6875 | | 0.0004 | 16.0 | 256 | 1.1538 | 0.6875 | | 0.0004 | 17.0 | 272 | 1.1707 | 0.6875 | | 0.0003 | 18.0 | 288 | 1.1855 | 0.6875 | | 0.0003 | 19.0 | 304 | 1.2007 | 0.6875 | | 0.0003 | 20.0 | 320 | 1.2066 | 0.6806 | | 0.0003 | 21.0 | 336 | 1.2211 | 0.6806 | | 0.0003 | 22.0 | 352 | 1.2291 | 0.6875 | | 0.0002 | 23.0 | 368 | 1.2385 | 0.6875 | | 0.0002 | 24.0 | 384 | 1.2508 | 0.6875 | ### Framework versions - Transformers 4.48.3 - Pytorch 2.5.1+cu124 - Datasets 3.3.2 - Tokenizers 0.21.0
[ "label_0", "label_1", "label_2" ]
rain930/BEiT-Beans-Finetuned
# Model Card for Model ID <!-- Provide a quick summary of what the model is/does. --> ## Model Details ### Model Description <!-- Provide a longer summary of what this model is. --> This is the model card of a 🤗 transformers model that has been pushed on the Hub. This model card has been automatically generated. - **Developed by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Model type:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] - **Finetuned from model [optional]:** [More Information Needed] ### Model Sources [optional] <!-- Provide the basic links for the model. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. --> ### Direct Use <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. --> [More Information Needed] ### Downstream Use [optional] <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations. ## How to Get Started with the Model Use the code below to get started with the model. [More Information Needed] ## Training Details ### Training Data <!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. --> [More Information Needed] ### Training Procedure <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. --> #### Preprocessing [optional] [More Information Needed] #### Training Hyperparameters - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision --> #### Speeds, Sizes, Times [optional] <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. --> [More Information Needed] ## Evaluation <!-- This section describes the evaluation protocols and provides the results. --> ### Testing Data, Factors & Metrics #### Testing Data <!-- This should link to a Dataset Card if possible. --> [More Information Needed] #### Factors <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. --> [More Information Needed] #### Metrics <!-- These are the evaluation metrics being used, ideally with a description of why. --> [More Information Needed] ### Results [More Information Needed] #### Summary ## Model Examination [optional] <!-- Relevant interpretability work for the model goes here --> [More Information Needed] ## Environmental Impact <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly --> Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700). - **Hardware Type:** [More Information Needed] - **Hours used:** [More Information Needed] - **Cloud Provider:** [More Information Needed] - **Compute Region:** [More Information Needed] - **Carbon Emitted:** [More Information Needed] ## Technical Specifications [optional] ### Model Architecture and Objective [More Information Needed] ### Compute Infrastructure [More Information Needed] #### Hardware [More Information Needed] #### Software [More Information Needed] ## Citation [optional] <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Model Card Authors [optional] [More Information Needed] ## Model Card Contact [More Information Needed]
[ "label_0", "label_1", "label_2" ]
gekina/results
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # results This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on the imagefolder dataset. ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - num_epochs: 3 ### Framework versions - Transformers 4.48.3 - Pytorch 2.5.1+cu124 - Datasets 3.3.2 - Tokenizers 0.21.0
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
vjeux/bazaar
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # bazaar This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 64 - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 3 ### Training results ### Framework versions - Transformers 4.48.3 - Pytorch 2.5.1+cu124 - Datasets 3.3.2 - Tokenizers 0.21.0
[ "arbalest", "athanor", "boomerang", "bootstraps", "brassknuckles", "brawler", "charcole", "chum", "citrus", "clamera", "concealeddagger", "coral", "balcony", "cosmicwind", "counterstrike", "crypto", "curry", "docklines", "electrifiedhull", "emergencyburn", "emergencydraught", "energypotion", "eyeofthecolossus", "barofgold", "feather", "forebodingwinds", "frostpotion", "fullarsenal", "gpu", "gatlinggun", "gavel", "golfclubs", "greasefire", "hacksaw", "bellelista", "hammer", "hardenedshield", "harpoon", "hatchet", "heatlover", "honingsteel", "hypnoticdrain", "icebullets", "icepick", "igloo", "blackice", "infernalgreatsword", "initialdose", "insectwing", "intrusioncountermeasures", "juggler", "kneebrace", "landscraper", "lettingoffsteam", "likeclockwork", "lioncane", "bluepigglesl", "lumboars", "magiccarpet", "magmacore", "makeshiftbarricade", "motherboard", "outmaneuver", "palanquin", "phonograph", "powersander", "precisiondiver", "bluepigglesx", "propertymogul", "quickignition", "racecarl", "ramrod", "rearshielding", "redenvelope", "rocketlauncher", "rollingpin", "safe", "shieldbash", "boarmarket", "slowburn", "spikedbuckler", "stopthat", "sunderer", "tempering", "theboulder", "thrusters", "tinydancer", "tommoogun", "toolsofthetrade", "boobytrap", "toughness", "toxicweapons", "trainedspider", "trickledowneconomics", "uzi", "vengeance", "venom", "venomander", "voidshield", "welloiledmachine" ]
Anonymous123345/rps_vit
# Model Card for Model ID <!-- Provide a quick summary of what the model is/does. --> ## Model Details ### Model Description <!-- Provide a longer summary of what this model is. --> This is the model card of a 🤗 transformers model that has been pushed on the Hub. This model card has been automatically generated. - **Developed by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Model type:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] - **Finetuned from model [optional]:** [More Information Needed] ### Model Sources [optional] <!-- Provide the basic links for the model. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. --> ### Direct Use <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. --> [More Information Needed] ### Downstream Use [optional] <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations. ## How to Get Started with the Model Use the code below to get started with the model. [More Information Needed] ## Training Details ### Training Data <!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. --> [More Information Needed] ### Training Procedure <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. --> #### Preprocessing [optional] [More Information Needed] #### Training Hyperparameters - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision --> #### Speeds, Sizes, Times [optional] <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. --> [More Information Needed] ## Evaluation <!-- This section describes the evaluation protocols and provides the results. --> ### Testing Data, Factors & Metrics #### Testing Data <!-- This should link to a Dataset Card if possible. --> [More Information Needed] #### Factors <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. --> [More Information Needed] #### Metrics <!-- These are the evaluation metrics being used, ideally with a description of why. --> [More Information Needed] ### Results [More Information Needed] #### Summary ## Model Examination [optional] <!-- Relevant interpretability work for the model goes here --> [More Information Needed] ## Environmental Impact <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly --> Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700). - **Hardware Type:** [More Information Needed] - **Hours used:** [More Information Needed] - **Cloud Provider:** [More Information Needed] - **Compute Region:** [More Information Needed] - **Carbon Emitted:** [More Information Needed] ## Technical Specifications [optional] ### Model Architecture and Objective [More Information Needed] ### Compute Infrastructure [More Information Needed] #### Hardware [More Information Needed] #### Software [More Information Needed] ## Citation [optional] <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Model Card Authors [optional] [More Information Needed] ## Model Card Contact [More Information Needed]
[ "paper", "rock", "scissors", "none" ]
distill-lab/distill-n4_01_combined_cls_v0b4
# Model Card for Model ID <!-- Provide a quick summary of what the model is/does. --> ## Model Details ### Model Description <!-- Provide a longer summary of what this model is. --> This is the model card of a 🤗 transformers model that has been pushed on the Hub. This model card has been automatically generated. - **Developed by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Model type:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] - **Finetuned from model [optional]:** [More Information Needed] ### Model Sources [optional] <!-- Provide the basic links for the model. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. --> ### Direct Use <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. --> [More Information Needed] ### Downstream Use [optional] <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations. ## How to Get Started with the Model Use the code below to get started with the model. [More Information Needed] ## Training Details ### Training Data <!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. --> [More Information Needed] ### Training Procedure <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. --> #### Preprocessing [optional] [More Information Needed] #### Training Hyperparameters - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision --> #### Speeds, Sizes, Times [optional] <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. --> [More Information Needed] ## Evaluation <!-- This section describes the evaluation protocols and provides the results. --> ### Testing Data, Factors & Metrics #### Testing Data <!-- This should link to a Dataset Card if possible. --> [More Information Needed] #### Factors <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. --> [More Information Needed] #### Metrics <!-- These are the evaluation metrics being used, ideally with a description of why. --> [More Information Needed] ### Results [More Information Needed] #### Summary ## Model Examination [optional] <!-- Relevant interpretability work for the model goes here --> [More Information Needed] ## Environmental Impact <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly --> Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700). - **Hardware Type:** [More Information Needed] - **Hours used:** [More Information Needed] - **Cloud Provider:** [More Information Needed] - **Compute Region:** [More Information Needed] - **Carbon Emitted:** [More Information Needed] ## Technical Specifications [optional] ### Model Architecture and Objective [More Information Needed] ### Compute Infrastructure [More Information Needed] #### Hardware [More Information Needed] #### Software [More Information Needed] ## Citation [optional] <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Model Card Authors [optional] [More Information Needed] ## Model Card Contact [More Information Needed]
[ "star_0", "star_1" ]
lumenggan/atla-characters-detect
# Model Card for Model ID <!-- Provide a quick summary of what the model is/does. --> ## Model Details ### Model Description <!-- Provide a longer summary of what this model is. --> This is the model card of a 🤗 transformers model that has been pushed on the Hub. This model card has been automatically generated. - **Developed by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Model type:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] - **Finetuned from model [optional]:** [More Information Needed] ### Model Sources [optional] <!-- Provide the basic links for the model. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. --> ### Direct Use <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. --> [More Information Needed] ### Downstream Use [optional] <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations. ## How to Get Started with the Model Use the code below to get started with the model. [More Information Needed] ## Training Details ### Training Data <!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. --> [More Information Needed] ### Training Procedure <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. --> #### Preprocessing [optional] [More Information Needed] #### Training Hyperparameters - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision --> #### Speeds, Sizes, Times [optional] <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. --> [More Information Needed] ## Evaluation <!-- This section describes the evaluation protocols and provides the results. --> ### Testing Data, Factors & Metrics #### Testing Data <!-- This should link to a Dataset Card if possible. --> [More Information Needed] #### Factors <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. --> [More Information Needed] #### Metrics <!-- These are the evaluation metrics being used, ideally with a description of why. --> [More Information Needed] ### Results [More Information Needed] #### Summary ## Model Examination [optional] <!-- Relevant interpretability work for the model goes here --> [More Information Needed] ## Environmental Impact <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly --> Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700). - **Hardware Type:** [More Information Needed] - **Hours used:** [More Information Needed] - **Cloud Provider:** [More Information Needed] - **Compute Region:** [More Information Needed] - **Carbon Emitted:** [More Information Needed] ## Technical Specifications [optional] ### Model Architecture and Objective [More Information Needed] ### Compute Infrastructure [More Information Needed] #### Hardware [More Information Needed] #### Software [More Information Needed] ## Citation [optional] <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Model Card Authors [optional] [More Information Needed] ## Model Card Contact [More Information Needed]
[ "aang", "azula", "katara", "no_characters", "sokka", "zuko" ]
distill-lab/distill-n4_00-01_combined_cls_v0b4
# Model Card for Model ID <!-- Provide a quick summary of what the model is/does. --> ## Model Details ### Model Description <!-- Provide a longer summary of what this model is. --> This is the model card of a 🤗 transformers model that has been pushed on the Hub. This model card has been automatically generated. - **Developed by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Model type:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] - **Finetuned from model [optional]:** [More Information Needed] ### Model Sources [optional] <!-- Provide the basic links for the model. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. --> ### Direct Use <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. --> [More Information Needed] ### Downstream Use [optional] <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations. ## How to Get Started with the Model Use the code below to get started with the model. [More Information Needed] ## Training Details ### Training Data <!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. --> [More Information Needed] ### Training Procedure <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. --> #### Preprocessing [optional] [More Information Needed] #### Training Hyperparameters - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision --> #### Speeds, Sizes, Times [optional] <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. --> [More Information Needed] ## Evaluation <!-- This section describes the evaluation protocols and provides the results. --> ### Testing Data, Factors & Metrics #### Testing Data <!-- This should link to a Dataset Card if possible. --> [More Information Needed] #### Factors <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. --> [More Information Needed] #### Metrics <!-- These are the evaluation metrics being used, ideally with a description of why. --> [More Information Needed] ### Results [More Information Needed] #### Summary ## Model Examination [optional] <!-- Relevant interpretability work for the model goes here --> [More Information Needed] ## Environmental Impact <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly --> Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700). - **Hardware Type:** [More Information Needed] - **Hours used:** [More Information Needed] - **Cloud Provider:** [More Information Needed] - **Compute Region:** [More Information Needed] - **Carbon Emitted:** [More Information Needed] ## Technical Specifications [optional] ### Model Architecture and Objective [More Information Needed] ### Compute Infrastructure [More Information Needed] #### Hardware [More Information Needed] #### Software [More Information Needed] ## Citation [optional] <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Model Card Authors [optional] [More Information Needed] ## Model Card Contact [More Information Needed]
[ "star_0", "star_1" ]
Biolbe/swin-tiny-patch4-window7-224-finetuned-eurosat
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # swin-tiny-patch4-window7-224-finetuned-eurosat This model is a fine-tuned version of [microsoft/swin-tiny-patch4-window7-224](https://huggingface.co/microsoft/swin-tiny-patch4-window7-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.0544 - Accuracy: 0.98 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.2069 | 1.0 | 190 | 0.0867 | 0.9681 | | 0.1617 | 2.0 | 380 | 0.0670 | 0.9756 | | 0.1308 | 3.0 | 570 | 0.0544 | 0.98 | ### Framework versions - Transformers 4.49.0 - Pytorch 2.6.0+cu124 - Datasets 3.3.2 - Tokenizers 0.21.0
[ "annualcrop", "forest", "herbaceousvegetation", "highway", "industrial", "pasture", "permanentcrop", "residential", "river", "sealake" ]
goktug14/dinov2-small-fer
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # dinov2-small-fer This model is a fine-tuned version of [facebook/dinov2-small-imagenet1k-1-layer](https://huggingface.co/facebook/dinov2-small-imagenet1k-1-layer) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.8495 - Accuracy: 0.6964 - F1 angry: 0.6087 - F1 disgust: 0.5556 - F1 fear: 0.5091 - F1 happy: 0.8972 - F1 neutral: 0.6705 - F1 sad: 0.5893 - F1 surprise: 0.7923 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 1e-05 - train_batch_size: 64 - eval_batch_size: 64 - seed: 42 - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - num_epochs: 12 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | F1 angry | F1 disgust | F1 fear | F1 happy | F1 neutral | F1 sad | F1 surprise | |:-------------:|:-----:|:----:|:---------------:|:--------:|:--------:|:----------:|:-------:|:--------:|:----------:|:------:|:-----------:| | No log | 1.0 | 449 | 1.1298 | 0.5659 | 0.3647 | 0.0 | 0.3323 | 0.8466 | 0.5333 | 0.4221 | 0.6871 | | 1.3025 | 2.0 | 898 | 0.9742 | 0.6322 | 0.5274 | 0.2029 | 0.4050 | 0.8734 | 0.6080 | 0.5073 | 0.7528 | | 1.0413 | 3.0 | 1347 | 0.9270 | 0.6499 | 0.5517 | 0.4157 | 0.4662 | 0.8846 | 0.6287 | 0.4934 | 0.7694 | | 0.954 | 4.0 | 1796 | 0.9256 | 0.6537 | 0.5383 | 0.3975 | 0.4553 | 0.8801 | 0.6342 | 0.5153 | 0.7714 | | 0.8994 | 5.0 | 2245 | 0.8979 | 0.6655 | 0.5820 | 0.3896 | 0.4075 | 0.8881 | 0.6377 | 0.5610 | 0.7759 | | 0.8501 | 6.0 | 2694 | 0.8805 | 0.6776 | 0.6003 | 0.4599 | 0.4657 | 0.8932 | 0.6599 | 0.5382 | 0.7816 | | 0.7972 | 7.0 | 3143 | 0.9028 | 0.6723 | 0.5918 | 0.4094 | 0.3981 | 0.8890 | 0.6547 | 0.5487 | 0.7811 | | 0.7676 | 8.0 | 3592 | 0.8530 | 0.6884 | 0.6152 | 0.5085 | 0.5093 | 0.8922 | 0.6667 | 0.5447 | 0.7933 | | 0.7138 | 9.0 | 4041 | 0.8927 | 0.6761 | 0.5794 | 0.5771 | 0.4487 | 0.8852 | 0.6494 | 0.5670 | 0.7833 | | 0.7138 | 10.0 | 4490 | 0.8556 | 0.6896 | 0.6114 | 0.5668 | 0.4927 | 0.8957 | 0.6613 | 0.5799 | 0.7859 | | 0.6799 | 11.0 | 4939 | 0.8477 | 0.6973 | 0.6174 | 0.5672 | 0.5118 | 0.8974 | 0.6694 | 0.5843 | 0.7900 | | 0.643 | 12.0 | 5388 | 0.8495 | 0.6964 | 0.6087 | 0.5556 | 0.5091 | 0.8972 | 0.6705 | 0.5893 | 0.7923 | ### Framework versions - Transformers 4.48.3 - Pytorch 2.5.1+cu124 - Datasets 3.3.2 - Tokenizers 0.21.0
[ "angry", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
NekoJar/pretrained_result
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # pretrained_result This model is a fine-tuned version of [motheecreator/vit-Facial-Expression-Recognition](https://huggingface.co/motheecreator/vit-Facial-Expression-Recognition) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.6537 - Accuracy: 0.7843 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 3e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 8 - total_train_batch_size: 256 - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: cosine - lr_scheduler_warmup_steps: 1000 - num_epochs: 10 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:------:|:----:|:---------------:|:--------:| | 0.6131 | 0.9904 | 90 | 0.6537 | 0.7843 | | 0.5963 | 1.9904 | 180 | 0.6502 | 0.7824 | | 0.5716 | 2.9904 | 270 | 0.6506 | 0.7783 | | 0.5616 | 3.9904 | 360 | 0.6429 | 0.7821 | | 0.5272 | 4.9904 | 450 | 0.6516 | 0.7772 | | 0.5064 | 5.9904 | 540 | 0.6446 | 0.7764 | ### Framework versions - Transformers 4.48.2 - Pytorch 2.6.0+cu126 - Datasets 3.2.0 - Tokenizers 0.21.0
[ "angry", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
DomiCormi/results
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # results This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on the imagefolder dataset. ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 2e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - num_epochs: 3 ### Framework versions - Transformers 4.49.0 - Pytorch 2.6.0+cu124 - Datasets 3.4.1 - Tokenizers 0.21.1
[ "label_0", "label_1", "label_2", "label_3", "label_4", "label_5", "label_6" ]
distill-lab/distill-n4_00-01_combined_cls_v1b0
# Model Card for Model ID <!-- Provide a quick summary of what the model is/does. --> ## Model Details ### Model Description <!-- Provide a longer summary of what this model is. --> This is the model card of a 🤗 transformers model that has been pushed on the Hub. This model card has been automatically generated. - **Developed by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Model type:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] - **Finetuned from model [optional]:** [More Information Needed] ### Model Sources [optional] <!-- Provide the basic links for the model. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. --> ### Direct Use <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. --> [More Information Needed] ### Downstream Use [optional] <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations. ## How to Get Started with the Model Use the code below to get started with the model. [More Information Needed] ## Training Details ### Training Data <!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. --> [More Information Needed] ### Training Procedure <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. --> #### Preprocessing [optional] [More Information Needed] #### Training Hyperparameters - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision --> #### Speeds, Sizes, Times [optional] <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. --> [More Information Needed] ## Evaluation <!-- This section describes the evaluation protocols and provides the results. --> ### Testing Data, Factors & Metrics #### Testing Data <!-- This should link to a Dataset Card if possible. --> [More Information Needed] #### Factors <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. --> [More Information Needed] #### Metrics <!-- These are the evaluation metrics being used, ideally with a description of why. --> [More Information Needed] ### Results [More Information Needed] #### Summary ## Model Examination [optional] <!-- Relevant interpretability work for the model goes here --> [More Information Needed] ## Environmental Impact <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly --> Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700). - **Hardware Type:** [More Information Needed] - **Hours used:** [More Information Needed] - **Cloud Provider:** [More Information Needed] - **Compute Region:** [More Information Needed] - **Carbon Emitted:** [More Information Needed] ## Technical Specifications [optional] ### Model Architecture and Objective [More Information Needed] ### Compute Infrastructure [More Information Needed] #### Hardware [More Information Needed] #### Software [More Information Needed] ## Citation [optional] <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Model Card Authors [optional] [More Information Needed] ## Model Card Contact [More Information Needed]
[ "star_0", "star_1" ]
lewisnjue/vit-base-oxford-iiit-pets
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-base-oxford-iiit-pets This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on the pcuenq/oxford-pets dataset. It achieves the following results on the evaluation set: - Loss: 0.2087 - Accuracy: 0.9350 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0003 - train_batch_size: 16 - eval_batch_size: 8 - seed: 42 - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - num_epochs: 5 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.372 | 1.0 | 370 | 0.2946 | 0.9296 | | 0.2137 | 2.0 | 740 | 0.2215 | 0.9323 | | 0.1663 | 3.0 | 1110 | 0.2002 | 0.9432 | | 0.148 | 4.0 | 1480 | 0.1884 | 0.9459 | | 0.1303 | 5.0 | 1850 | 0.1857 | 0.9472 | ### Framework versions - Transformers 4.48.3 - Pytorch 2.5.1+cu124 - Datasets 3.3.2 - Tokenizers 0.21.0
[ "siamese", "birman", "shiba inu", "staffordshire bull terrier", "basset hound", "bombay", "japanese chin", "chihuahua", "german shorthaired", "pomeranian", "beagle", "english cocker spaniel", "american pit bull terrier", "ragdoll", "persian", "egyptian mau", "miniature pinscher", "sphynx", "maine coon", "keeshond", "yorkshire terrier", "havanese", "leonberger", "wheaten terrier", "american bulldog", "english setter", "boxer", "newfoundland", "bengal", "samoyed", "british shorthair", "great pyrenees", "abyssinian", "pug", "saint bernard", "russian blue", "scottish terrier" ]
distill-lab/distill-n4_00-01_combined_cls_v1b1
# Model Card for Model ID <!-- Provide a quick summary of what the model is/does. --> ## Model Details ### Model Description <!-- Provide a longer summary of what this model is. --> This is the model card of a 🤗 transformers model that has been pushed on the Hub. This model card has been automatically generated. - **Developed by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Model type:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] - **Finetuned from model [optional]:** [More Information Needed] ### Model Sources [optional] <!-- Provide the basic links for the model. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. --> ### Direct Use <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. --> [More Information Needed] ### Downstream Use [optional] <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations. ## How to Get Started with the Model Use the code below to get started with the model. [More Information Needed] ## Training Details ### Training Data <!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. --> [More Information Needed] ### Training Procedure <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. --> #### Preprocessing [optional] [More Information Needed] #### Training Hyperparameters - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision --> #### Speeds, Sizes, Times [optional] <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. --> [More Information Needed] ## Evaluation <!-- This section describes the evaluation protocols and provides the results. --> ### Testing Data, Factors & Metrics #### Testing Data <!-- This should link to a Dataset Card if possible. --> [More Information Needed] #### Factors <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. --> [More Information Needed] #### Metrics <!-- These are the evaluation metrics being used, ideally with a description of why. --> [More Information Needed] ### Results [More Information Needed] #### Summary ## Model Examination [optional] <!-- Relevant interpretability work for the model goes here --> [More Information Needed] ## Environmental Impact <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly --> Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700). - **Hardware Type:** [More Information Needed] - **Hours used:** [More Information Needed] - **Cloud Provider:** [More Information Needed] - **Compute Region:** [More Information Needed] - **Carbon Emitted:** [More Information Needed] ## Technical Specifications [optional] ### Model Architecture and Objective [More Information Needed] ### Compute Infrastructure [More Information Needed] #### Hardware [More Information Needed] #### Software [More Information Needed] ## Citation [optional] <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Model Card Authors [optional] [More Information Needed] ## Model Card Contact [More Information Needed]
[ "star_0", "star_1" ]
synonym/vit-base-oxford-iiit-pets
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-base-oxford-iiit-pets This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on the pcuenq/oxford-pets dataset. It achieves the following results on the evaluation set: - Loss: 0.2031 - Accuracy: 0.9459 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0003 - train_batch_size: 16 - eval_batch_size: 8 - seed: 42 - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - num_epochs: 5 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.3727 | 1.0 | 370 | 0.2756 | 0.9337 | | 0.2145 | 2.0 | 740 | 0.2168 | 0.9378 | | 0.1835 | 3.0 | 1110 | 0.1918 | 0.9459 | | 0.147 | 4.0 | 1480 | 0.1857 | 0.9472 | | 0.1315 | 5.0 | 1850 | 0.1818 | 0.9472 | ### Framework versions - Transformers 4.48.3 - Pytorch 2.5.1+cu124 - Datasets 3.3.2 - Tokenizers 0.21.0
[ "siamese", "birman", "shiba inu", "staffordshire bull terrier", "basset hound", "bombay", "japanese chin", "chihuahua", "german shorthaired", "pomeranian", "beagle", "english cocker spaniel", "american pit bull terrier", "ragdoll", "persian", "egyptian mau", "miniature pinscher", "sphynx", "maine coon", "keeshond", "yorkshire terrier", "havanese", "leonberger", "wheaten terrier", "american bulldog", "english setter", "boxer", "newfoundland", "bengal", "samoyed", "british shorthair", "great pyrenees", "abyssinian", "pug", "saint bernard", "russian blue", "scottish terrier" ]
honeia11/resnet-18
# Model Card for Model ID <!-- Provide a quick summary of what the model is/does. --> ## Model Details ### Model Description <!-- Provide a longer summary of what this model is. --> This is the model card of a 🤗 transformers model that has been pushed on the Hub. This model card has been automatically generated. - **Developed by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Model type:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] - **Finetuned from model [optional]:** [More Information Needed] ### Model Sources [optional] <!-- Provide the basic links for the model. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. --> ### Direct Use <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. --> [More Information Needed] ### Downstream Use [optional] <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations. ## How to Get Started with the Model Use the code below to get started with the model. [More Information Needed] ## Training Details ### Training Data <!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. --> [More Information Needed] ### Training Procedure <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. --> #### Preprocessing [optional] [More Information Needed] #### Training Hyperparameters - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision --> #### Speeds, Sizes, Times [optional] <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. --> [More Information Needed] ## Evaluation <!-- This section describes the evaluation protocols and provides the results. --> ### Testing Data, Factors & Metrics #### Testing Data <!-- This should link to a Dataset Card if possible. --> [More Information Needed] #### Factors <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. --> [More Information Needed] #### Metrics <!-- These are the evaluation metrics being used, ideally with a description of why. --> [More Information Needed] ### Results [More Information Needed] #### Summary ## Model Examination [optional] <!-- Relevant interpretability work for the model goes here --> [More Information Needed] ## Environmental Impact <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly --> Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700). - **Hardware Type:** [More Information Needed] - **Hours used:** [More Information Needed] - **Cloud Provider:** [More Information Needed] - **Compute Region:** [More Information Needed] - **Carbon Emitted:** [More Information Needed] ## Technical Specifications [optional] ### Model Architecture and Objective [More Information Needed] ### Compute Infrastructure [More Information Needed] #### Hardware [More Information Needed] #### Software [More Information Needed] ## Citation [optional] <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Model Card Authors [optional] [More Information Needed] ## Model Card Contact [More Information Needed]
[ "tench, tinca tinca", "goldfish, carassius auratus", "great white shark, white shark, man-eater, man-eating shark, carcharodon carcharias", "tiger shark, galeocerdo cuvieri", "hammerhead, hammerhead shark", "electric ray, crampfish, numbfish, torpedo", "stingray", "cock", "hen", "ostrich, struthio camelus", "brambling, fringilla montifringilla", "goldfinch, carduelis carduelis", "house finch, linnet, carpodacus mexicanus", "junco, snowbird", "indigo bunting, indigo finch, indigo bird, passerina cyanea", "robin, american robin, turdus migratorius", "bulbul", "jay", "magpie", "chickadee", "water ouzel, dipper", "kite", "bald eagle, american eagle, haliaeetus leucocephalus", "vulture", "great grey owl, great gray owl, strix nebulosa", "european fire salamander, salamandra salamandra", "common newt, triturus vulgaris", "eft", "spotted salamander, ambystoma maculatum", "axolotl, mud puppy, ambystoma mexicanum", "bullfrog, rana catesbeiana", "tree frog, tree-frog", "tailed frog, bell toad, ribbed toad, tailed toad, ascaphus trui", "loggerhead, loggerhead turtle, caretta caretta", "leatherback turtle, leatherback, leathery turtle, dermochelys coriacea", "mud turtle", "terrapin", "box turtle, box tortoise", "banded gecko", "common iguana, iguana, iguana iguana", "american chameleon, anole, anolis carolinensis", "whiptail, whiptail lizard", "agama", "frilled lizard, chlamydosaurus kingi", "alligator lizard", "gila monster, heloderma suspectum", "green lizard, lacerta viridis", "african chameleon, chamaeleo chamaeleon", "komodo dragon, komodo lizard, dragon lizard, giant lizard, varanus komodoensis", "african crocodile, nile crocodile, crocodylus niloticus", "american alligator, alligator mississipiensis", "triceratops", "thunder snake, worm snake, carphophis amoenus", "ringneck snake, ring-necked snake, ring snake", "hognose snake, puff adder, sand viper", "green snake, grass snake", "king snake, kingsnake", "garter snake, grass snake", "water snake", "vine snake", "night snake, hypsiglena torquata", "boa constrictor, constrictor constrictor", "rock python, rock snake, python sebae", "indian cobra, naja naja", "green mamba", "sea snake", "horned viper, cerastes, sand viper, horned asp, cerastes cornutus", "diamondback, diamondback rattlesnake, crotalus adamanteus", "sidewinder, horned rattlesnake, crotalus cerastes", "trilobite", "harvestman, daddy longlegs, phalangium opilio", "scorpion", "black and gold garden spider, argiope aurantia", "barn spider, araneus cavaticus", "garden spider, aranea diademata", "black widow, latrodectus mactans", "tarantula", "wolf spider, hunting spider", "tick", "centipede", "black grouse", "ptarmigan", "ruffed grouse, partridge, bonasa umbellus", "prairie chicken, prairie grouse, prairie fowl", "peacock", "quail", "partridge", "african grey, african gray, psittacus erithacus", "macaw", "sulphur-crested cockatoo, kakatoe galerita, cacatua galerita", "lorikeet", "coucal", "bee eater", "hornbill", "hummingbird", "jacamar", "toucan", "drake", "red-breasted merganser, mergus serrator", "goose", "black swan, cygnus atratus", "tusker", "echidna, spiny anteater, anteater", "platypus, duckbill, duckbilled platypus, duck-billed platypus, ornithorhynchus anatinus", "wallaby, brush kangaroo", "koala, koala bear, kangaroo bear, native bear, phascolarctos cinereus", "wombat", "jellyfish", "sea anemone, anemone", "brain coral", "flatworm, platyhelminth", "nematode, nematode worm, roundworm", "conch", "snail", "slug", "sea slug, nudibranch", "chiton, coat-of-mail shell, sea cradle, polyplacophore", "chambered nautilus, pearly nautilus, nautilus", "dungeness crab, cancer magister", "rock crab, cancer irroratus", "fiddler crab", "king crab, alaska crab, alaskan king crab, alaska king crab, paralithodes camtschatica", "american lobster, northern lobster, maine lobster, homarus americanus", "spiny lobster, langouste, rock lobster, crawfish, crayfish, sea crawfish", "crayfish, crawfish, crawdad, crawdaddy", "hermit crab", "isopod", "white stork, ciconia ciconia", "black stork, ciconia nigra", "spoonbill", "flamingo", "little blue heron, egretta caerulea", "american egret, great white heron, egretta albus", "bittern", "crane", "limpkin, aramus pictus", "european gallinule, porphyrio porphyrio", "american coot, marsh hen, mud hen, water hen, fulica americana", "bustard", "ruddy turnstone, arenaria interpres", "red-backed sandpiper, dunlin, erolia alpina", "redshank, tringa totanus", "dowitcher", "oystercatcher, oyster catcher", "pelican", "king penguin, aptenodytes patagonica", "albatross, mollymawk", "grey whale, gray whale, devilfish, eschrichtius gibbosus, eschrichtius robustus", "killer whale, killer, orca, grampus, sea wolf, orcinus orca", "dugong, dugong dugon", "sea lion", "chihuahua", "japanese spaniel", "maltese dog, maltese terrier, maltese", "pekinese, pekingese, peke", "shih-tzu", "blenheim spaniel", "papillon", "toy terrier", "rhodesian ridgeback", "afghan hound, afghan", "basset, basset hound", "beagle", "bloodhound, sleuthhound", "bluetick", "black-and-tan coonhound", "walker hound, walker foxhound", "english foxhound", "redbone", "borzoi, russian wolfhound", "irish wolfhound", "italian greyhound", "whippet", "ibizan hound, ibizan podenco", "norwegian elkhound, elkhound", "otterhound, otter hound", "saluki, gazelle hound", "scottish deerhound, deerhound", "weimaraner", "staffordshire bullterrier, staffordshire bull terrier", "american staffordshire terrier, staffordshire terrier, american pit bull terrier, pit bull terrier", "bedlington terrier", "border terrier", "kerry blue terrier", "irish terrier", "norfolk terrier", "norwich terrier", "yorkshire terrier", "wire-haired fox terrier", "lakeland terrier", "sealyham terrier, sealyham", "airedale, airedale terrier", "cairn, cairn terrier", "australian terrier", "dandie dinmont, dandie dinmont terrier", "boston bull, boston terrier", "miniature schnauzer", "giant schnauzer", "standard schnauzer", "scotch terrier, scottish terrier, scottie", "tibetan terrier, chrysanthemum dog", "silky terrier, sydney silky", "soft-coated wheaten terrier", "west highland white terrier", "lhasa, lhasa apso", "flat-coated retriever", "curly-coated retriever", "golden retriever", "labrador retriever", "chesapeake bay retriever", "german short-haired pointer", "vizsla, hungarian pointer", "english setter", "irish setter, red setter", "gordon setter", "brittany spaniel", "clumber, clumber spaniel", "english springer, english springer spaniel", "welsh springer spaniel", "cocker spaniel, english cocker spaniel, cocker", "sussex spaniel", "irish water spaniel", "kuvasz", "schipperke", "groenendael", "malinois", "briard", "kelpie", "komondor", "old english sheepdog, bobtail", "shetland sheepdog, shetland sheep dog, shetland", "collie", "border collie", "bouvier des flandres, bouviers des flandres", "rottweiler", "german shepherd, german shepherd dog, german police dog, alsatian", "doberman, doberman pinscher", "miniature pinscher", "greater swiss mountain dog", "bernese mountain dog", "appenzeller", "entlebucher", "boxer", "bull mastiff", "tibetan mastiff", "french bulldog", "great dane", "saint bernard, st bernard", "eskimo dog, husky", "malamute, malemute, alaskan malamute", "siberian husky", "dalmatian, coach dog, carriage dog", "affenpinscher, monkey pinscher, monkey dog", "basenji", "pug, pug-dog", "leonberg", "newfoundland, newfoundland dog", "great pyrenees", "samoyed, samoyede", "pomeranian", "chow, chow chow", "keeshond", "brabancon griffon", "pembroke, pembroke welsh corgi", "cardigan, cardigan welsh corgi", "toy poodle", "miniature poodle", "standard poodle", "mexican hairless", "timber wolf, grey wolf, gray wolf, canis lupus", "white wolf, arctic wolf, canis lupus tundrarum", "red wolf, maned wolf, canis rufus, canis niger", "coyote, prairie wolf, brush wolf, canis latrans", "dingo, warrigal, warragal, canis dingo", "dhole, cuon alpinus", "african hunting dog, hyena dog, cape hunting dog, lycaon pictus", "hyena, hyaena", "red fox, vulpes vulpes", "kit fox, vulpes macrotis", "arctic fox, white fox, alopex lagopus", "grey fox, gray fox, urocyon cinereoargenteus", "tabby, tabby cat", "tiger cat", "persian cat", "siamese cat, siamese", "egyptian cat", "cougar, puma, catamount, mountain lion, painter, panther, felis concolor", "lynx, catamount", "leopard, panthera pardus", "snow leopard, ounce, panthera uncia", "jaguar, panther, panthera onca, felis onca", "lion, king of beasts, panthera leo", "tiger, panthera tigris", "cheetah, chetah, acinonyx jubatus", "brown bear, bruin, ursus arctos", "american black bear, black bear, ursus americanus, euarctos americanus", "ice bear, polar bear, ursus maritimus, thalarctos maritimus", "sloth bear, melursus ursinus, ursus ursinus", "mongoose", "meerkat, mierkat", "tiger beetle", "ladybug, ladybeetle, lady beetle, ladybird, ladybird beetle", "ground beetle, carabid beetle", "long-horned beetle, longicorn, longicorn beetle", "leaf beetle, chrysomelid", "dung beetle", "rhinoceros beetle", "weevil", "fly", "bee", "ant, emmet, pismire", "grasshopper, hopper", "cricket", "walking stick, walkingstick, stick insect", "cockroach, roach", "mantis, mantid", "cicada, cicala", "leafhopper", "lacewing, lacewing fly", "dragonfly, darning needle, devil's darning needle, sewing needle, snake feeder, snake doctor, mosquito hawk, skeeter hawk", "damselfly", "admiral", "ringlet, ringlet butterfly", "monarch, monarch butterfly, milkweed butterfly, danaus plexippus", "cabbage butterfly", "sulphur butterfly, sulfur butterfly", "lycaenid, lycaenid butterfly", "starfish, sea star", "sea urchin", "sea cucumber, holothurian", "wood rabbit, cottontail, cottontail rabbit", "hare", "angora, angora rabbit", "hamster", "porcupine, hedgehog", "fox squirrel, eastern fox squirrel, sciurus niger", "marmot", "beaver", "guinea pig, cavia cobaya", "sorrel", "zebra", "hog, pig, grunter, squealer, sus scrofa", "wild boar, boar, sus scrofa", "warthog", "hippopotamus, hippo, river horse, hippopotamus amphibius", "ox", "water buffalo, water ox, asiatic buffalo, bubalus bubalis", "bison", "ram, tup", "bighorn, bighorn sheep, cimarron, rocky mountain bighorn, rocky mountain sheep, ovis canadensis", "ibex, capra ibex", "hartebeest", "impala, aepyceros melampus", "gazelle", "arabian camel, dromedary, camelus dromedarius", "llama", "weasel", "mink", "polecat, fitch, foulmart, foumart, mustela putorius", "black-footed ferret, ferret, mustela nigripes", "otter", "skunk, polecat, wood pussy", "badger", "armadillo", "three-toed sloth, ai, bradypus tridactylus", "orangutan, orang, orangutang, pongo pygmaeus", "gorilla, gorilla gorilla", "chimpanzee, chimp, pan troglodytes", "gibbon, hylobates lar", "siamang, hylobates syndactylus, symphalangus syndactylus", "guenon, guenon monkey", "patas, hussar monkey, erythrocebus patas", "baboon", "macaque", "langur", "colobus, colobus monkey", "proboscis monkey, nasalis larvatus", "marmoset", "capuchin, ringtail, cebus capucinus", "howler monkey, howler", "titi, titi monkey", "spider monkey, ateles geoffroyi", "squirrel monkey, saimiri sciureus", "madagascar cat, ring-tailed lemur, lemur catta", "indri, indris, indri indri, indri brevicaudatus", "indian elephant, elephas maximus", "african elephant, loxodonta africana", "lesser panda, red panda, panda, bear cat, cat bear, ailurus fulgens", "giant panda, panda, panda bear, coon bear, ailuropoda melanoleuca", "barracouta, snoek", "eel", "coho, cohoe, coho salmon, blue jack, silver salmon, oncorhynchus kisutch", "rock beauty, holocanthus tricolor", "anemone fish", "sturgeon", "gar, garfish, garpike, billfish, lepisosteus osseus", "lionfish", "puffer, pufferfish, blowfish, globefish", "abacus", "abaya", "academic gown, academic robe, judge's robe", "accordion, piano accordion, squeeze box", "acoustic guitar", "aircraft carrier, carrier, flattop, attack aircraft carrier", "airliner", "airship, dirigible", "altar", "ambulance", "amphibian, amphibious vehicle", "analog clock", "apiary, bee house", "apron", "ashcan, trash can, garbage can, wastebin, ash bin, ash-bin, ashbin, dustbin, trash barrel, trash bin", "assault rifle, assault gun", "backpack, back pack, knapsack, packsack, rucksack, haversack", "bakery, bakeshop, bakehouse", "balance beam, beam", "balloon", "ballpoint, ballpoint pen, ballpen, biro", "band aid", "banjo", "bannister, banister, balustrade, balusters, handrail", "barbell", "barber chair", "barbershop", "barn", "barometer", "barrel, cask", "barrow, garden cart, lawn cart, wheelbarrow", "baseball", "basketball", "bassinet", "bassoon", "bathing cap, swimming cap", "bath towel", "bathtub, bathing tub, bath, tub", "beach wagon, station wagon, wagon, estate car, beach waggon, station waggon, waggon", "beacon, lighthouse, beacon light, pharos", "beaker", "bearskin, busby, shako", "beer bottle", "beer glass", "bell cote, bell cot", "bib", "bicycle-built-for-two, tandem bicycle, tandem", "bikini, two-piece", "binder, ring-binder", "binoculars, field glasses, opera glasses", "birdhouse", "boathouse", "bobsled, bobsleigh, bob", "bolo tie, bolo, bola tie, bola", "bonnet, poke bonnet", "bookcase", "bookshop, bookstore, bookstall", "bottlecap", "bow", "bow tie, bow-tie, bowtie", "brass, memorial tablet, plaque", "brassiere, bra, bandeau", "breakwater, groin, groyne, mole, bulwark, seawall, jetty", "breastplate, aegis, egis", "broom", "bucket, pail", "buckle", "bulletproof vest", "bullet train, bullet", "butcher shop, meat market", "cab, hack, taxi, taxicab", "caldron, cauldron", "candle, taper, wax light", "cannon", "canoe", "can opener, tin opener", "cardigan", "car mirror", "carousel, carrousel, merry-go-round, roundabout, whirligig", "carpenter's kit, tool kit", "carton", "car wheel", "cash machine, cash dispenser, automated teller machine, automatic teller machine, automated teller, automatic teller, atm", "cassette", "cassette player", "castle", "catamaran", "cd player", "cello, violoncello", "cellular telephone, cellular phone, cellphone, cell, mobile phone", "chain", "chainlink fence", "chain mail, ring mail, mail, chain armor, chain armour, ring armor, ring armour", "chain saw, chainsaw", "chest", "chiffonier, commode", "chime, bell, gong", "china cabinet, china closet", "christmas stocking", "church, church building", "cinema, movie theater, movie theatre, movie house, picture palace", "cleaver, meat cleaver, chopper", "cliff dwelling", "cloak", "clog, geta, patten, sabot", "cocktail shaker", "coffee mug", "coffeepot", "coil, spiral, volute, whorl, helix", "combination lock", "computer keyboard, keypad", "confectionery, confectionary, candy store", "container ship, containership, container vessel", "convertible", "corkscrew, bottle screw", "cornet, horn, trumpet, trump", "cowboy boot", "cowboy hat, ten-gallon hat", "cradle", "crane", "crash helmet", "crate", "crib, cot", "crock pot", "croquet ball", "crutch", "cuirass", "dam, dike, dyke", "desk", "desktop computer", "dial telephone, dial phone", "diaper, nappy, napkin", "digital clock", "digital watch", "dining table, board", "dishrag, dishcloth", "dishwasher, dish washer, dishwashing machine", "disk brake, disc brake", "dock, dockage, docking facility", "dogsled, dog sled, dog sleigh", "dome", "doormat, welcome mat", "drilling platform, offshore rig", "drum, membranophone, tympan", "drumstick", "dumbbell", "dutch oven", "electric fan, blower", "electric guitar", "electric locomotive", "entertainment center", "envelope", "espresso maker", "face powder", "feather boa, boa", "file, file cabinet, filing cabinet", "fireboat", "fire engine, fire truck", "fire screen, fireguard", "flagpole, flagstaff", "flute, transverse flute", "folding chair", "football helmet", "forklift", "fountain", "fountain pen", "four-poster", "freight car", "french horn, horn", "frying pan, frypan, skillet", "fur coat", "garbage truck, dustcart", "gasmask, respirator, gas helmet", "gas pump, gasoline pump, petrol pump, island dispenser", "goblet", "go-kart", "golf ball", "golfcart, golf cart", "gondola", "gong, tam-tam", "gown", "grand piano, grand", "greenhouse, nursery, glasshouse", "grille, radiator grille", "grocery store, grocery, food market, market", "guillotine", "hair slide", "hair spray", "half track", "hammer", "hamper", "hand blower, blow dryer, blow drier, hair dryer, hair drier", "hand-held computer, hand-held microcomputer", "handkerchief, hankie, hanky, hankey", "hard disc, hard disk, fixed disk", "harmonica, mouth organ, harp, mouth harp", "harp", "harvester, reaper", "hatchet", "holster", "home theater, home theatre", "honeycomb", "hook, claw", "hoopskirt, crinoline", "horizontal bar, high bar", "horse cart, horse-cart", "hourglass", "ipod", "iron, smoothing iron", "jack-o'-lantern", "jean, blue jean, denim", "jeep, landrover", "jersey, t-shirt, tee shirt", "jigsaw puzzle", "jinrikisha, ricksha, rickshaw", "joystick", "kimono", "knee pad", "knot", "lab coat, laboratory coat", "ladle", "lampshade, lamp shade", "laptop, laptop computer", "lawn mower, mower", "lens cap, lens cover", "letter opener, paper knife, paperknife", "library", "lifeboat", "lighter, light, igniter, ignitor", "limousine, limo", "liner, ocean liner", "lipstick, lip rouge", "loafer", "lotion", "loudspeaker, speaker, speaker unit, loudspeaker system, speaker system", "loupe, jeweler's loupe", "lumbermill, sawmill", "magnetic compass", "mailbag, postbag", "mailbox, letter box", "maillot", "maillot, tank suit", "manhole cover", "maraca", "marimba, xylophone", "mask", "matchstick", "maypole", "maze, labyrinth", "measuring cup", "medicine chest, medicine cabinet", "megalith, megalithic structure", "microphone, mike", "microwave, microwave oven", "military uniform", "milk can", "minibus", "miniskirt, mini", "minivan", "missile", "mitten", "mixing bowl", "mobile home, manufactured home", "model t", "modem", "monastery", "monitor", "moped", "mortar", "mortarboard", "mosque", "mosquito net", "motor scooter, scooter", "mountain bike, all-terrain bike, off-roader", "mountain tent", "mouse, computer mouse", "mousetrap", "moving van", "muzzle", "nail", "neck brace", "necklace", "nipple", "notebook, notebook computer", "obelisk", "oboe, hautboy, hautbois", "ocarina, sweet potato", "odometer, hodometer, mileometer, milometer", "oil filter", "organ, pipe organ", "oscilloscope, scope, cathode-ray oscilloscope, cro", "overskirt", "oxcart", "oxygen mask", "packet", "paddle, boat paddle", "paddlewheel, paddle wheel", "padlock", "paintbrush", "pajama, pyjama, pj's, jammies", "palace", "panpipe, pandean pipe, syrinx", "paper towel", "parachute, chute", "parallel bars, bars", "park bench", "parking meter", "passenger car, coach, carriage", "patio, terrace", "pay-phone, pay-station", "pedestal, plinth, footstall", "pencil box, pencil case", "pencil sharpener", "perfume, essence", "petri dish", "photocopier", "pick, plectrum, plectron", "pickelhaube", "picket fence, paling", "pickup, pickup truck", "pier", "piggy bank, penny bank", "pill bottle", "pillow", "ping-pong ball", "pinwheel", "pirate, pirate ship", "pitcher, ewer", "plane, carpenter's plane, woodworking plane", "planetarium", "plastic bag", "plate rack", "plow, plough", "plunger, plumber's helper", "polaroid camera, polaroid land camera", "pole", "police van, police wagon, paddy wagon, patrol wagon, wagon, black maria", "poncho", "pool table, billiard table, snooker table", "pop bottle, soda bottle", "pot, flowerpot", "potter's wheel", "power drill", "prayer rug, prayer mat", "printer", "prison, prison house", "projectile, missile", "projector", "puck, hockey puck", "punching bag, punch bag, punching ball, punchball", "purse", "quill, quill pen", "quilt, comforter, comfort, puff", "racer, race car, racing car", "racket, racquet", "radiator", "radio, wireless", "radio telescope, radio reflector", "rain barrel", "recreational vehicle, rv, r.v.", "reel", "reflex camera", "refrigerator, icebox", "remote control, remote", "restaurant, eating house, eating place, eatery", "revolver, six-gun, six-shooter", "rifle", "rocking chair, rocker", "rotisserie", "rubber eraser, rubber, pencil eraser", "rugby ball", "rule, ruler", "running shoe", "safe", "safety pin", "saltshaker, salt shaker", "sandal", "sarong", "sax, saxophone", "scabbard", "scale, weighing machine", "school bus", "schooner", "scoreboard", "screen, crt screen", "screw", "screwdriver", "seat belt, seatbelt", "sewing machine", "shield, buckler", "shoe shop, shoe-shop, shoe store", "shoji", "shopping basket", "shopping cart", "shovel", "shower cap", "shower curtain", "ski", "ski mask", "sleeping bag", "slide rule, slipstick", "sliding door", "slot, one-armed bandit", "snorkel", "snowmobile", "snowplow, snowplough", "soap dispenser", "soccer ball", "sock", "solar dish, solar collector, solar furnace", "sombrero", "soup bowl", "space bar", "space heater", "space shuttle", "spatula", "speedboat", "spider web, spider's web", "spindle", "sports car, sport car", "spotlight, spot", "stage", "steam locomotive", "steel arch bridge", "steel drum", "stethoscope", "stole", "stone wall", "stopwatch, stop watch", "stove", "strainer", "streetcar, tram, tramcar, trolley, trolley car", "stretcher", "studio couch, day bed", "stupa, tope", "submarine, pigboat, sub, u-boat", "suit, suit of clothes", "sundial", "sunglass", "sunglasses, dark glasses, shades", "sunscreen, sunblock, sun blocker", "suspension bridge", "swab, swob, mop", "sweatshirt", "swimming trunks, bathing trunks", "swing", "switch, electric switch, electrical switch", "syringe", "table lamp", "tank, army tank, armored combat vehicle, armoured combat vehicle", "tape player", "teapot", "teddy, teddy bear", "television, television system", "tennis ball", "thatch, thatched roof", "theater curtain, theatre curtain", "thimble", "thresher, thrasher, threshing machine", "throne", "tile roof", "toaster", "tobacco shop, tobacconist shop, tobacconist", "toilet seat", "torch", "totem pole", "tow truck, tow car, wrecker", "toyshop", "tractor", "trailer truck, tractor trailer, trucking rig, rig, articulated lorry, semi", "tray", "trench coat", "tricycle, trike, velocipede", "trimaran", "tripod", "triumphal arch", "trolleybus, trolley coach, trackless trolley", "trombone", "tub, vat", "turnstile", "typewriter keyboard", "umbrella", "unicycle, monocycle", "upright, upright piano", "vacuum, vacuum cleaner", "vase", "vault", "velvet", "vending machine", "vestment", "viaduct", "violin, fiddle", "volleyball", "waffle iron", "wall clock", "wallet, billfold, notecase, pocketbook", "wardrobe, closet, press", "warplane, military plane", "washbasin, handbasin, washbowl, lavabo, wash-hand basin", "washer, automatic washer, washing machine", "water bottle", "water jug", "water tower", "whiskey jug", "whistle", "wig", "window screen", "window shade", "windsor tie", "wine bottle", "wing", "wok", "wooden spoon", "wool, woolen, woollen", "worm fence, snake fence, snake-rail fence, virginia fence", "wreck", "yawl", "yurt", "web site, website, internet site, site", "comic book", "crossword puzzle, crossword", "street sign", "traffic light, traffic signal, stoplight", "book jacket, dust cover, dust jacket, dust wrapper", "menu", "plate", "guacamole", "consomme", "hot pot, hotpot", "trifle", "ice cream, icecream", "ice lolly, lolly, lollipop, popsicle", "french loaf", "bagel, beigel", "pretzel", "cheeseburger", "hotdog, hot dog, red hot", "mashed potato", "head cabbage", "broccoli", "cauliflower", "zucchini, courgette", "spaghetti squash", "acorn squash", "butternut squash", "cucumber, cuke", "artichoke, globe artichoke", "bell pepper", "cardoon", "mushroom", "granny smith", "strawberry", "orange", "lemon", "fig", "pineapple, ananas", "banana", "jackfruit, jak, jack", "custard apple", "pomegranate", "hay", "carbonara", "chocolate sauce, chocolate syrup", "dough", "meat loaf, meatloaf", "pizza, pizza pie", "potpie", "burrito", "red wine", "espresso", "cup", "eggnog", "alp", "bubble", "cliff, drop, drop-off", "coral reef", "geyser", "lakeside, lakeshore", "promontory, headland, head, foreland", "sandbar, sand bar", "seashore, coast, seacoast, sea-coast", "valley, vale", "volcano", "ballplayer, baseball player", "groom, bridegroom", "scuba diver", "rapeseed", "daisy", "yellow lady's slipper, yellow lady-slipper, cypripedium calceolus, cypripedium parviflorum", "corn", "acorn", "hip, rose hip, rosehip", "buckeye, horse chestnut, conker", "coral fungus", "agaric", "gyromitra", "stinkhorn, carrion fungus", "earthstar", "hen-of-the-woods, hen of the woods, polyporus frondosus, grifola frondosa", "bolete", "ear, spike, capitulum", "toilet tissue, toilet paper, bathroom tissue" ]
natix-network-org/roadwork
# Model Card for Model ID <!-- Provide a quick summary of what the model is/does. --> ## Model Details ### Model Description <!-- Provide a longer summary of what this model is. --> This is the model card of a 🤗 transformers model that has been pushed on the Hub. This model card has been automatically generated. - **Developed by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Model type:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] - **Finetuned from model [optional]:** [More Information Needed] ### Model Sources [optional] <!-- Provide the basic links for the model. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. --> ### Direct Use <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. --> [More Information Needed] ### Downstream Use [optional] <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations. ## How to Get Started with the Model Use the code below to get started with the model. [More Information Needed] ## Training Details ### Training Data <!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. --> [More Information Needed] ### Training Procedure <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. --> #### Preprocessing [optional] [More Information Needed] #### Training Hyperparameters - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision --> #### Speeds, Sizes, Times [optional] <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. --> [More Information Needed] ## Evaluation <!-- This section describes the evaluation protocols and provides the results. --> ### Testing Data, Factors & Metrics #### Testing Data <!-- This should link to a Dataset Card if possible. --> [More Information Needed] #### Factors <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. --> [More Information Needed] #### Metrics <!-- These are the evaluation metrics being used, ideally with a description of why. --> [More Information Needed] ### Results [More Information Needed] #### Summary ## Model Examination [optional] <!-- Relevant interpretability work for the model goes here --> [More Information Needed] ## Environmental Impact <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly --> Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700). - **Hardware Type:** [More Information Needed] - **Hours used:** [More Information Needed] - **Cloud Provider:** [More Information Needed] - **Compute Region:** [More Information Needed] - **Carbon Emitted:** [More Information Needed] ## Technical Specifications [optional] ### Model Architecture and Objective [More Information Needed] ### Compute Infrastructure [More Information Needed] #### Hardware [More Information Needed] #### Software [More Information Needed] ## Citation [optional] <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Model Card Authors [optional] [More Information Needed] ## Model Card Contact [More Information Needed]
[ "none", "roadwork" ]
darthraider/vit-base-fruit-punch
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit4fruveg This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.0004 - Accuracy: 1.0 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 16 - seed: 42 - optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - num_epochs: 8 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.0488 | 0.8 | 100 | 0.0470 | 1.0 | | 0.0174 | 1.6 | 200 | 0.0173 | 1.0 | | 0.0074 | 2.4 | 300 | 0.0075 | 1.0 | | 0.0035 | 3.2 | 400 | 0.0035 | 1.0 | | 0.0018 | 4.0 | 500 | 0.0023 | 1.0 | | 0.001 | 4.8 | 600 | 0.0011 | 1.0 | | 0.0006 | 5.6 | 700 | 0.0007 | 1.0 | | 0.0005 | 6.4 | 800 | 0.0005 | 1.0 | | 0.0004 | 7.2 | 900 | 0.0004 | 1.0 | | 0.0003 | 8.0 | 1000 | 0.0004 | 1.0 | ### Framework versions - Transformers 4.47.0 - Pytorch 2.5.1+cu121 - Datasets 3.2.0 - Tokenizers 0.21.0
[ "raw_banana", "raw_mango", "ripe_banana", "ripe_mango" ]
touchtech/fashion-images-pack-types-vit-large-patch16-384-v1
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # fashion-images-pack-types-vit-large-patch16-384-v1 This model is a fine-tuned version of [google/vit-large-patch16-384](https://huggingface.co/google/vit-large-patch16-384) on the touchtech/fashion-images-pack-types dataset. It achieves the following results on the evaluation set: - Loss: 0.0446 - Accuracy: 0.9900 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 2e-05 - train_batch_size: 8 - eval_batch_size: 8 - seed: 1337 - optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - num_epochs: 5.0 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.0823 | 1.0 | 1697 | 0.0661 | 0.9837 | | 0.0197 | 2.0 | 3394 | 0.0900 | 0.9812 | | 0.015 | 3.0 | 5091 | 0.0446 | 0.9900 | | 0.0017 | 4.0 | 6788 | 0.0518 | 0.9912 | | 0.0 | 5.0 | 8485 | 0.0485 | 0.9917 | ### Framework versions - Transformers 4.49.0 - Pytorch 2.6.0+cu124 - Datasets 3.3.2 - Tokenizers 0.21.0
[ "model", "pack-accessory", "pack-top-l3", "pack-bottom-l0", "pack-bottom-l1", "pack-multi", "pack-one-piece-l1", "pack-shoes", "pack-top-bottom", "pack-top-l1", "pack-top-l2" ]
distill-lab/distill-n4_00-01_combined_cls_v1b2
# Model Card for distill-lab/distill-n4_00-01_combined_cls_v1b2 current batches: `nv3[v0] (1700) | nv4[v1-2k] (4000) | nv4[v1-210k] (b1b2: 4000)` metrics: ``` 636 ***** train metrics ***** 1637 epoch = 20.0 1638 total_flos = 66966619017GF 1639 train_loss = 0.2338 1640 train_runtime = 0:58:49.65 1641 train_samples_per_second = 56.736 1642 train_steps_per_second = 0.89 1644 ***** eval metrics ***** 1645 epoch = 20.0 1646 eval_accuracy = 0.7521 1647 eval_loss = 0.8814 1648 eval_runtime = 0:00:12.42 1649 eval_samples_per_second = 142.171 1650 eval_steps_per_second = 2.977 ``` ## Model details: <!-- Provide a quick summary of what the model is/does. --> ```bash # since ordinal on pretrained anime is really bad, let's try doing it on pretrained as classifier instead: BASE_MODEL = "facebook/dinov2-with-registers-large" DATASET = "distill-lab/COMBINE_nai-distill_00-01_eagle.library" TASK = "classification" # using single card to train it, so had to do higher batch size cmd = f"""python -m trainlib.hf_trainer.cli \ --model_name_or_path {BASE_MODEL} \ --dataset_name {DATASET} \ --output_dir distill-n4_00-01_combined_cls_v1b2-100e \ --remove_unused_columns False \ --label_column_name star \ --task {TASK} \ --do_train \ --do_eval \ --eval_strategy steps \ --eval_steps 100 \ --learning_rate 1e-5 \ --num_train_epochs 20 \ --per_device_train_batch_size 64 \ --per_device_eval_batch_size 48 \ --logging_strategy steps \ --logging_steps 2 \ --save_total_limit 1 \ --seed 1337 \ --lr_scheduler_type cosine \ --dataloader_num_workers 16 \ --ignore_mismatched_sizes True """ rest = f""" --push_to_hub: True \ --push_to_hub_organization distill-lab \ --hub_model_id nai-distill_00-01_combined_eagle_{TASK} \ --hub_strategy "end""" print(cmd) !{cmd} ```
[ "star_0", "star_1" ]
distill-lab/distill-n4_00-01_combined_cls_v1b2-100e
# Model Card for Model ID <!-- Provide a quick summary of what the model is/does. --> current batches: `nv3[v0] (1700) | nv4[v1-2k] (4000) | nv4[v1-210k] (b1b2: 4000)` same as https://huggingface.co/distill-lab/distill-n4_00-01_combined_cls_v1b2 but instead of 20 tried 100e. metrics: ``` 8168 ***** train metrics ***** 8169 epoch = 100.0 8170 total_flos = 334833095087GF 8171 train_loss = 0.0776 8172 train_runtime = 4:53:00.40 8173 train_samples_per_second = 56.955 8174 train_steps_per_second = 0.893 8176 ***** eval metrics ***** 8177 epoch = 100.0 8178 eval_accuracy = 0.7487 8179 eval_loss = 1.9947 8180 eval_runtime = 0:00:12.56 8181 eval_samples_per_second = 140.622 8182 eval_steps_per_second = 2.945 ``` ## Model details: (no significant accuracy jump; just to see what happens) ```bash BASE_MODEL = "facebook/dinov2-with-registers-large" DATASET = "distill-lab/COMBINE_nai-distill_00-01_eagle.library" TASK = "classification" # using single card to train it, so had to do higher batch size cmd = f"""python -m trainlib.hf_trainer.cli \ --model_name_or_path {BASE_MODEL} \ --dataset_name {DATASET} \ --output_dir distill-n4_00-01_combined_cls_v1b2-100e \ --remove_unused_columns False \ --label_column_name star \ --task {TASK} \ --do_train \ --do_eval \ --eval_strategy steps \ --eval_steps 100 \ --learning_rate 1e-5 \ --num_train_epochs 100 \ --per_device_train_batch_size 64 \ --per_device_eval_batch_size 48 \ --logging_strategy steps \ --logging_steps 2 \ --save_total_limit 1 \ --seed 1337 \ --lr_scheduler_type cosine \ --dataloader_num_workers 16 \ --ignore_mismatched_sizes True """ rest = f""" --push_to_hub: True \ --push_to_hub_organization distill-lab \ --hub_model_id nai-distill_00-01_combined_eagle_{TASK} \ --hub_strategy "end""" print(cmd) !{cmd} ```
[ "star_0", "star_1" ]
distill-lab/distill-n4_00-01_combined_cls_v1b2-siglip2-large-patch16-512
# Model Card for Model ID <!-- Provide a quick summary of what the model is/does. --> current batches: `nv3[v0] (1700) | nv4[v1-2k] (4000) | nv4[v1-210k] (b1b2: 4000)` Try using `google/siglip2-large-patch16-512` instead of dino v2 for a model difference (turns out 1% better than `google/siglip2-base-patch16-512`).. eval metrics: ``` wandb: Run summary: wandb: eval/accuracy 0.77533 wandb: eval/loss 0.4809 wandb: eval/runtime 15.9025 wandb: eval/samples_per_second 111.114 wandb: eval/steps_per_second 0.692 wandb: total_flos 1.4915777670524436e+20 wandb: train/epoch 10.0 wandb: train/global_step 570 wandb: train/grad_norm 375217.9375 wandb: train/learning_rate 0.0 wandb: train/loss 0.286 wandb: train_loss 0.40591 wandb: train_runtime 1032.5423 wandb: train_samples_per_second 96.974 wandb: train_steps_per_second 0.552 ``` ## Model Details trainlib commit: 1b17bfef5ccbb5a22157e56ab8da71ba7c8c0ed6 - (it was comitted right after aug was changed for a later task) training script: ```bash #!/bin/bash # =================== BEGIN NOTES ======================= # BS24 ooms; bs18 66943MiB / 81559MiB; try bs22 # bs22 (try to match siglip2-base for large as much as possible): 77679MiB / 81559MiB # ORIGINAL AUGMENTATION: # - model trained on this with exact config had eval/accuracy 0.77533 # train_transforms = Compose([ # RandomResizedCrop(size), # RandomHorizontalFlip(), # ToTensor(), # normalize, # ]) # MODIFIED AUGMENTATION: # from torchvision.transforms import Compose, RandomResizedCrop, RandomRotation, RandomHorizontalFlip, ColorJitter, RandomApply, GaussianBlur, ToTensor # train_transforms = Compose([ # RandomResizedCrop(size=224, scale=(0.8, 1.0), ratio=(0.9, 1.1)), # RandomRotation(5), # RandomHorizontalFlip(p=0.2), # ColorJitter(brightness=0.1, contrast=0.1, saturation=0.1, hue=0.05), # RandomApply([GaussianBlur(kernel_size=3, sigma=(0.5, 1.5))], p=0.1), # ToTensor(), # normalize, # ]) # =================== END NOTES ========================== # Define variables BASE_MODEL="google/siglip2-large-patch16-512" DATASET="distill-lab/COMBINE_nai-distill_00-01_eagle.library" TASK="classification" NUM_EPOCHS=10 # Run training command python -m trainlib.hf_trainer.cli \ --model_name_or_path $BASE_MODEL \ --dataset_name $DATASET \ --output_dir distill-n4_00-01_combined_cls_v1b2_classification_$BASE_MODEL \ --remove_unused_columns False \ --label_column_name star \ --task $TASK \ --do_train \ --do_eval \ --eval_strategy steps \ --eval_steps 100 \ --learning_rate 5e-6 \ --num_train_epochs $NUM_EPOCHS \ --per_device_train_batch_size 22 \ --per_device_eval_batch_size 22 \ --logging_strategy steps \ --logging_steps 2 \ --save_total_limit 1 \ --seed 1337 \ --lr_scheduler_type cosine \ --dataloader_num_workers 16 \ --ignore_mismatched_sizes True \ --fp16 True # EXTRA ARGUMENT ```
[ "star_0", "star_1" ]
ishaansaxena/vit-base-beans
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-base-beans This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.0013 - Accuracy: 1.0 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0002 - train_batch_size: 16 - eval_batch_size: 8 - seed: 42 - optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - num_epochs: 4 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:------:|:----:|:---------------:|:--------:| | 0.1405 | 0.3448 | 100 | 0.0269 | 0.9981 | | 0.0862 | 0.6897 | 200 | 0.0432 | 0.9865 | | 0.0058 | 1.0345 | 300 | 0.0057 | 1.0 | | 0.0038 | 1.3793 | 400 | 0.0037 | 1.0 | | 0.0028 | 1.7241 | 500 | 0.0028 | 1.0 | | 0.0022 | 2.0690 | 600 | 0.0022 | 1.0 | | 0.0019 | 2.4138 | 700 | 0.0018 | 1.0 | | 0.0016 | 2.7586 | 800 | 0.0016 | 1.0 | | 0.0015 | 3.1034 | 900 | 0.0015 | 1.0 | | 0.0014 | 3.4483 | 1000 | 0.0014 | 1.0 | | 0.0013 | 3.7931 | 1100 | 0.0013 | 1.0 | ### Framework versions - Transformers 4.47.0 - Pytorch 2.5.1+cu121 - Datasets 3.3.1 - Tokenizers 0.21.0
[ "bacterial spot rot", "black rot", "disease free", "downy mildew" ]
Snppuzzle/Lanna-model-mobilenet_v2_1.0_224
# Model Card for Model ID <!-- Provide a quick summary of what the model is/does. --> ## Model Details ### Model Description <!-- Provide a longer summary of what this model is. --> This is the model card of a 🤗 transformers model that has been pushed on the Hub. This model card has been automatically generated. - **Developed by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Model type:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] - **Finetuned from model [optional]:** [More Information Needed] ### Model Sources [optional] <!-- Provide the basic links for the model. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. --> ### Direct Use <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. --> [More Information Needed] ### Downstream Use [optional] <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations. ## How to Get Started with the Model Use the code below to get started with the model. [More Information Needed] ## Training Details ### Training Data <!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. --> [More Information Needed] ### Training Procedure <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. --> #### Preprocessing [optional] [More Information Needed] #### Training Hyperparameters - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision --> #### Speeds, Sizes, Times [optional] <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. --> [More Information Needed] ## Evaluation <!-- This section describes the evaluation protocols and provides the results. --> ### Testing Data, Factors & Metrics #### Testing Data <!-- This should link to a Dataset Card if possible. --> [More Information Needed] #### Factors <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. --> [More Information Needed] #### Metrics <!-- These are the evaluation metrics being used, ideally with a description of why. --> [More Information Needed] ### Results [More Information Needed] #### Summary ## Model Examination [optional] <!-- Relevant interpretability work for the model goes here --> [More Information Needed] ## Environmental Impact <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly --> Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700). - **Hardware Type:** [More Information Needed] - **Hours used:** [More Information Needed] - **Cloud Provider:** [More Information Needed] - **Compute Region:** [More Information Needed] - **Carbon Emitted:** [More Information Needed] ## Technical Specifications [optional] ### Model Architecture and Objective [More Information Needed] ### Compute Infrastructure [More Information Needed] #### Hardware [More Information Needed] #### Software [More Information Needed] ## Citation [optional] <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Model Card Authors [optional] [More Information Needed] ## Model Card Contact [More Information Needed]
[ "aabnam", "amnat", "anong", "anu", "aphet", "athik", "banman", "binbon", "bochai", "bokin", "bolao", "bopen", "boron", "buppe", "chaehom", "chaeyang", "chaidi", "chanan", "changhan", "chaofa", "chaomom", "chaomueang", "chata", "chatu", "chaya", "chiangdao", "chiangmai", "chingchang", "chokdi", "dangsaap", "deklek", "deumnai", "doilo", "doiluang", "doitao", "dokbua", "eka", "fanhan", "hangdong", "hangsat", "hungtam", "huwai", "inta", "iti", "itom", "jara", "kadi", "kamyao", "kanmo", "kapmo", "kephet", "kepphak", "khaikai", "khaipa", "khamaen", "khaoma", "khata", "kheumnguem", "khomchai", "khongbo", "khongtua", "khunyuam", "khwaluat", "khwamsuk", "kinkhao", "kinkhong", "kinmuea", "kinru", "kluaibo", "laemai", "laichiao", "lailong", "lampang", "lattho", "loka", "luathak", "luatok", "maechaem", "maechai", "maechan", "maecharim", "maelao", "maelim", "maemo", "maephrik", "maetaeng", "maeth", "maetha", "maewang", "maha", "mahachai", "mam", "manpen", "manu", "mueangphan", "mueangyong", "nakrian", "nambo", "nanglong", "nangsue", "naokhong", "nara", "newin", "nganban", "nguenchae", "nguenchat", "omkoi", "oprom", "oram", "osot", "padaet", "phaideuan", "phaka", "phakhawa", "phayao", "phoenwai", "phuphiang", "phusang", "phuttha", "phuttho", "pikat", "pikot", "piso", "puri", "rakha", "ratna", "roisai", "ruluem", "saichai", "saket", "sana", "sanam", "sanya", "sapha", "sawa", "sayong", "siri", "sitth", "soekho", "soekman", "somkhuan", "songkho", "sukhato", "sukka", "taefai", "taehai", "tanam", "taro", "thairat", "thamam", "thawai", "thewa", "thuti", "uru", "wailang", "wasa", "wati", "wihan", "witcha", "witwo", "yapheng", "yukloek" ]
corranm/square_run_square_run_first_vote_full_pic_25_age
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # square_run_square_run_first_vote_full_pic_25_age This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 1.8420 - F1 Macro: 0.2101 - F1 Micro: 0.3182 - F1 Weighted: 0.2588 - Precision Macro: 0.1853 - Precision Micro: 0.3182 - Precision Weighted: 0.2261 - Recall Macro: 0.2563 - Recall Micro: 0.3182 - Recall Weighted: 0.3182 - Accuracy: 0.3182 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0001 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Use OptimizerNames.ADAMW_BNB with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 30 ### Training results | Training Loss | Epoch | Step | Validation Loss | F1 Macro | F1 Micro | F1 Weighted | Precision Macro | Precision Micro | Precision Weighted | Recall Macro | Recall Micro | Recall Weighted | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:|:--------:|:-----------:|:---------------:|:---------------:|:------------------:|:------------:|:------------:|:---------------:|:--------:| | 1.8947 | 1.0 | 58 | 1.9103 | 0.0802 | 0.1591 | 0.0926 | 0.0654 | 0.1591 | 0.0737 | 0.1311 | 0.1591 | 0.1591 | 0.1591 | | 1.9864 | 2.0 | 116 | 2.0017 | 0.0614 | 0.1439 | 0.0665 | 0.0434 | 0.1439 | 0.0473 | 0.1340 | 0.1439 | 0.1439 | 0.1439 | | 1.9069 | 3.0 | 174 | 1.8861 | 0.1323 | 0.2348 | 0.1697 | 0.1197 | 0.2348 | 0.1496 | 0.1773 | 0.2348 | 0.2348 | 0.2348 | | 1.7102 | 4.0 | 232 | 1.8780 | 0.0963 | 0.2273 | 0.1303 | 0.0927 | 0.2273 | 0.1189 | 0.1631 | 0.2273 | 0.2273 | 0.2273 | | 1.9048 | 5.0 | 290 | 1.8504 | 0.1544 | 0.2424 | 0.1936 | 0.1677 | 0.2424 | 0.2005 | 0.1873 | 0.2424 | 0.2424 | 0.2424 | | 1.8432 | 6.0 | 348 | 1.9349 | 0.1092 | 0.1591 | 0.1296 | 0.0978 | 0.1591 | 0.1203 | 0.1407 | 0.1591 | 0.1591 | 0.1591 | | 2.051 | 7.0 | 406 | 1.9871 | 0.1542 | 0.2273 | 0.1718 | 0.2630 | 0.2273 | 0.2834 | 0.1932 | 0.2273 | 0.2273 | 0.2273 | | 1.6873 | 8.0 | 464 | 2.1073 | 0.1197 | 0.1742 | 0.1432 | 0.1470 | 0.1742 | 0.1808 | 0.1434 | 0.1742 | 0.1742 | 0.1742 | | 1.6756 | 9.0 | 522 | 2.0864 | 0.1541 | 0.2121 | 0.1829 | 0.1584 | 0.2121 | 0.1847 | 0.1760 | 0.2121 | 0.2121 | 0.2121 | | 1.3861 | 10.0 | 580 | 2.1820 | 0.2356 | 0.2879 | 0.2457 | 0.3112 | 0.2879 | 0.2483 | 0.2629 | 0.2879 | 0.2879 | 0.2879 | | 1.4967 | 11.0 | 638 | 2.3178 | 0.1792 | 0.2121 | 0.2035 | 0.1951 | 0.2121 | 0.2294 | 0.1895 | 0.2121 | 0.2121 | 0.2121 | | 0.647 | 12.0 | 696 | 2.5355 | 0.2424 | 0.2348 | 0.2357 | 0.3563 | 0.2348 | 0.2855 | 0.2339 | 0.2348 | 0.2348 | 0.2348 | | 1.0499 | 13.0 | 754 | 2.6150 | 0.2180 | 0.2197 | 0.2148 | 0.3053 | 0.2197 | 0.2945 | 0.2269 | 0.2197 | 0.2197 | 0.2197 | | 0.8517 | 14.0 | 812 | 2.5920 | 0.2557 | 0.2576 | 0.2599 | 0.3949 | 0.2576 | 0.3338 | 0.2385 | 0.2576 | 0.2576 | 0.2576 | | 0.9049 | 15.0 | 870 | 2.7174 | 0.2563 | 0.2652 | 0.2625 | 0.2770 | 0.2652 | 0.2709 | 0.2515 | 0.2652 | 0.2652 | 0.2652 | | 0.4174 | 16.0 | 928 | 2.8881 | 0.2089 | 0.2121 | 0.2107 | 0.3001 | 0.2121 | 0.2618 | 0.1958 | 0.2121 | 0.2121 | 0.2121 | | 0.3634 | 17.0 | 986 | 3.1611 | 0.2103 | 0.2348 | 0.2209 | 0.2170 | 0.2348 | 0.2139 | 0.2145 | 0.2348 | 0.2348 | 0.2348 | | 0.4008 | 18.0 | 1044 | 3.4658 | 0.2233 | 0.2576 | 0.2354 | 0.2480 | 0.2576 | 0.2431 | 0.2321 | 0.2576 | 0.2576 | 0.2576 | | 0.1012 | 19.0 | 1102 | 3.5065 | 0.2435 | 0.2652 | 0.2569 | 0.3072 | 0.2652 | 0.2987 | 0.2398 | 0.2652 | 0.2652 | 0.2652 | | 0.1552 | 20.0 | 1160 | 3.5254 | 0.2306 | 0.25 | 0.2416 | 0.2414 | 0.25 | 0.2520 | 0.2357 | 0.25 | 0.25 | 0.25 | | 0.0613 | 21.0 | 1218 | 3.6319 | 0.2040 | 0.2197 | 0.2084 | 0.2162 | 0.2197 | 0.2073 | 0.2057 | 0.2197 | 0.2197 | 0.2197 | | 0.1634 | 22.0 | 1276 | 3.6378 | 0.2652 | 0.2803 | 0.2734 | 0.2815 | 0.2803 | 0.2789 | 0.2623 | 0.2803 | 0.2803 | 0.2803 | | 0.2401 | 23.0 | 1334 | 3.6470 | 0.2371 | 0.25 | 0.2479 | 0.2416 | 0.25 | 0.2497 | 0.2360 | 0.25 | 0.25 | 0.25 | | 0.0739 | 24.0 | 1392 | 3.9052 | 0.2123 | 0.2197 | 0.2194 | 0.2290 | 0.2197 | 0.2298 | 0.2076 | 0.2197 | 0.2197 | 0.2197 | | 0.2851 | 25.0 | 1450 | 3.8456 | 0.2345 | 0.2424 | 0.2424 | 0.2478 | 0.2424 | 0.2458 | 0.2300 | 0.2424 | 0.2424 | 0.2424 | | 0.0082 | 26.0 | 1508 | 4.0511 | 0.2375 | 0.25 | 0.2412 | 0.2737 | 0.25 | 0.2497 | 0.2332 | 0.25 | 0.25 | 0.25 | | 0.012 | 27.0 | 1566 | 4.1588 | 0.2219 | 0.2348 | 0.2280 | 0.2510 | 0.2348 | 0.2411 | 0.2176 | 0.2348 | 0.2348 | 0.2348 | | 0.0052 | 28.0 | 1624 | 4.2070 | 0.2218 | 0.2348 | 0.2256 | 0.2390 | 0.2348 | 0.2339 | 0.2245 | 0.2348 | 0.2348 | 0.2348 | | 0.0197 | 29.0 | 1682 | 4.1533 | 0.2259 | 0.2424 | 0.2355 | 0.2402 | 0.2424 | 0.2388 | 0.2256 | 0.2424 | 0.2424 | 0.2424 | | 0.0225 | 30.0 | 1740 | 4.1562 | 0.2426 | 0.2576 | 0.2530 | 0.2587 | 0.2576 | 0.2561 | 0.2388 | 0.2576 | 0.2576 | 0.2576 | ### Framework versions - Transformers 4.49.0 - Pytorch 2.6.0+cu124 - Datasets 3.3.1 - Tokenizers 0.21.0
[ "-", "0", "1", "2", "3", "4", "5" ]
corranm/square_run_square_run_second_vote_full_pic_25_age_gender
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # square_run_square_run_second_vote_full_pic_25_age_gender This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 1.8291 - F1 Macro: 0.0852 - F1 Micro: 0.2576 - F1 Weighted: 0.1329 - Precision Macro: 0.0700 - Precision Micro: 0.2576 - Precision Weighted: 0.1080 - Recall Macro: 0.1633 - Recall Micro: 0.2576 - Recall Weighted: 0.2576 - Accuracy: 0.2576 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0001 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Use OptimizerNames.ADAMW_BNB with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 30 ### Training results | Training Loss | Epoch | Step | Validation Loss | F1 Macro | F1 Micro | F1 Weighted | Precision Macro | Precision Micro | Precision Weighted | Recall Macro | Recall Micro | Recall Weighted | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:|:--------:|:-----------:|:---------------:|:---------------:|:------------------:|:------------:|:------------:|:---------------:|:--------:| | 1.8826 | 1.0 | 58 | 1.8802 | 0.1022 | 0.1818 | 0.1321 | 0.0986 | 0.1818 | 0.1212 | 0.1360 | 0.1818 | 0.1818 | 0.1818 | | 1.8382 | 2.0 | 116 | 2.0223 | 0.0738 | 0.1515 | 0.0863 | 0.0660 | 0.1515 | 0.0799 | 0.1467 | 0.1515 | 0.1515 | 0.1515 | | 1.8296 | 3.0 | 174 | 1.8755 | 0.0841 | 0.2273 | 0.1108 | 0.0646 | 0.2273 | 0.0809 | 0.1598 | 0.2273 | 0.2273 | 0.2273 | | 1.7755 | 4.0 | 232 | 1.8591 | 0.0794 | 0.2197 | 0.1135 | 0.0792 | 0.2197 | 0.1056 | 0.1467 | 0.2197 | 0.2197 | 0.2197 | | 1.8011 | 5.0 | 290 | 1.9143 | 0.1504 | 0.1818 | 0.1636 | 0.1863 | 0.1818 | 0.2045 | 0.1744 | 0.1818 | 0.1818 | 0.1818 | | 1.921 | 6.0 | 348 | 1.9060 | 0.0595 | 0.1818 | 0.0832 | 0.0416 | 0.1818 | 0.0563 | 0.1242 | 0.1818 | 0.1818 | 0.1818 | | 1.9152 | 7.0 | 406 | 1.8987 | 0.1780 | 0.2348 | 0.2040 | 0.1785 | 0.2348 | 0.2065 | 0.2078 | 0.2348 | 0.2348 | 0.2348 | | 1.7616 | 8.0 | 464 | 1.9606 | 0.1223 | 0.1970 | 0.1511 | 0.1341 | 0.1970 | 0.1662 | 0.1699 | 0.1970 | 0.1970 | 0.1970 | | 1.8596 | 9.0 | 522 | 2.0854 | 0.1110 | 0.1818 | 0.1461 | 0.1146 | 0.1818 | 0.1422 | 0.1331 | 0.1818 | 0.1818 | 0.1818 | | 1.9122 | 10.0 | 580 | 1.9908 | 0.1603 | 0.2121 | 0.1830 | 0.1931 | 0.2121 | 0.2296 | 0.1841 | 0.2121 | 0.2121 | 0.2121 | | 1.6702 | 11.0 | 638 | 2.0234 | 0.1220 | 0.1742 | 0.1508 | 0.1251 | 0.1742 | 0.1473 | 0.1360 | 0.1742 | 0.1742 | 0.1742 | | 1.1316 | 12.0 | 696 | 2.2998 | 0.1124 | 0.2121 | 0.1523 | 0.1126 | 0.2121 | 0.1479 | 0.1492 | 0.2121 | 0.2121 | 0.2121 | | 1.3513 | 13.0 | 754 | 2.2459 | 0.1855 | 0.2803 | 0.2326 | 0.2088 | 0.2803 | 0.2422 | 0.2144 | 0.2803 | 0.2803 | 0.2803 | | 0.6987 | 14.0 | 812 | 2.3719 | 0.1576 | 0.2045 | 0.1913 | 0.1537 | 0.2045 | 0.1849 | 0.1668 | 0.2045 | 0.2045 | 0.2045 | | 1.3304 | 15.0 | 870 | 2.9644 | 0.1378 | 0.1591 | 0.1728 | 0.1675 | 0.1591 | 0.2196 | 0.1337 | 0.1591 | 0.1591 | 0.1591 | | 0.6744 | 16.0 | 928 | 2.9702 | 0.1575 | 0.1894 | 0.1838 | 0.1582 | 0.1894 | 0.1908 | 0.1688 | 0.1894 | 0.1894 | 0.1894 | | 0.6061 | 17.0 | 986 | 3.0238 | 0.1174 | 0.1894 | 0.1617 | 0.1122 | 0.1894 | 0.1537 | 0.1360 | 0.1894 | 0.1894 | 0.1894 | | 1.0875 | 18.0 | 1044 | 3.1215 | 0.1095 | 0.1667 | 0.1468 | 0.1067 | 0.1667 | 0.1401 | 0.1216 | 0.1667 | 0.1667 | 0.1667 | | 0.2082 | 19.0 | 1102 | 3.4389 | 0.1217 | 0.1667 | 0.1589 | 0.1198 | 0.1667 | 0.1549 | 0.1271 | 0.1667 | 0.1667 | 0.1667 | | 0.4382 | 20.0 | 1160 | 3.6764 | 0.1607 | 0.1970 | 0.2010 | 0.1736 | 0.1970 | 0.2200 | 0.1607 | 0.1970 | 0.1970 | 0.1970 | | 0.0939 | 21.0 | 1218 | 3.8913 | 0.1524 | 0.1894 | 0.1829 | 0.1524 | 0.1894 | 0.1856 | 0.1603 | 0.1894 | 0.1894 | 0.1894 | | 0.1365 | 22.0 | 1276 | 4.0012 | 0.1509 | 0.1894 | 0.1828 | 0.1545 | 0.1894 | 0.1878 | 0.1571 | 0.1894 | 0.1894 | 0.1894 | | 0.4348 | 23.0 | 1334 | 4.1969 | 0.1558 | 0.1818 | 0.1880 | 0.1638 | 0.1818 | 0.2027 | 0.1548 | 0.1818 | 0.1818 | 0.1818 | | 0.4376 | 24.0 | 1392 | 4.3269 | 0.1256 | 0.1591 | 0.1580 | 0.1256 | 0.1591 | 0.1588 | 0.1270 | 0.1591 | 0.1591 | 0.1591 | | 0.1977 | 25.0 | 1450 | 4.2918 | 0.1182 | 0.1439 | 0.1433 | 0.1202 | 0.1439 | 0.1473 | 0.1207 | 0.1439 | 0.1439 | 0.1439 | | 0.1903 | 26.0 | 1508 | 4.4985 | 0.1132 | 0.1439 | 0.1412 | 0.1132 | 0.1439 | 0.1438 | 0.1172 | 0.1439 | 0.1439 | 0.1439 | | 0.0205 | 27.0 | 1566 | 4.5622 | 0.1232 | 0.1515 | 0.1500 | 0.1250 | 0.1515 | 0.1525 | 0.1248 | 0.1515 | 0.1515 | 0.1515 | | 0.2659 | 28.0 | 1624 | 4.5656 | 0.1106 | 0.1439 | 0.1396 | 0.1077 | 0.1439 | 0.1358 | 0.1139 | 0.1439 | 0.1439 | 0.1439 | | 0.0062 | 29.0 | 1682 | 4.6734 | 0.1174 | 0.1515 | 0.1460 | 0.1133 | 0.1515 | 0.1412 | 0.1223 | 0.1515 | 0.1515 | 0.1515 | | 0.224 | 30.0 | 1740 | 4.6959 | 0.1164 | 0.1515 | 0.1454 | 0.1115 | 0.1515 | 0.1402 | 0.1223 | 0.1515 | 0.1515 | 0.1515 | ### Framework versions - Transformers 4.49.0 - Pytorch 2.6.0+cu124 - Datasets 3.3.1 - Tokenizers 0.21.0
[ "-", "0", "1", "2", "3", "4", "5" ]
corranm/square_run_square_run_second_vote_full_pic_25
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # square_run_square_run_second_vote_full_pic_25 This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 1.8562 - F1 Macro: 0.1224 - F1 Micro: 0.2273 - F1 Weighted: 0.1585 - Precision Macro: 0.1354 - Precision Micro: 0.2273 - Precision Weighted: 0.1677 - Recall Macro: 0.1689 - Recall Micro: 0.2273 - Recall Weighted: 0.2273 - Accuracy: 0.2273 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0001 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Use OptimizerNames.ADAMW_BNB with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 30 ### Training results | Training Loss | Epoch | Step | Validation Loss | F1 Macro | F1 Micro | F1 Weighted | Precision Macro | Precision Micro | Precision Weighted | Recall Macro | Recall Micro | Recall Weighted | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:|:--------:|:-----------:|:---------------:|:---------------:|:------------------:|:------------:|:------------:|:---------------:|:--------:| | 1.8549 | 1.0 | 58 | 1.8882 | 0.0921 | 0.1818 | 0.1204 | 0.0853 | 0.1818 | 0.1146 | 0.1396 | 0.1818 | 0.1818 | 0.1818 | | 1.7658 | 2.0 | 116 | 1.9512 | 0.0767 | 0.1818 | 0.0967 | 0.0520 | 0.1818 | 0.0664 | 0.1495 | 0.1818 | 0.1818 | 0.1818 | | 1.8358 | 3.0 | 174 | 1.9288 | 0.0518 | 0.2197 | 0.0796 | 0.0316 | 0.2197 | 0.0486 | 0.1429 | 0.2197 | 0.2197 | 0.2197 | | 1.838 | 4.0 | 232 | 1.8900 | 0.0913 | 0.1818 | 0.1280 | 0.0727 | 0.1818 | 0.1001 | 0.1271 | 0.1818 | 0.1818 | 0.1818 | | 1.7559 | 5.0 | 290 | 1.9283 | 0.0850 | 0.1212 | 0.0951 | 0.1143 | 0.1212 | 0.1319 | 0.1191 | 0.1212 | 0.1212 | 0.1212 | | 1.8566 | 6.0 | 348 | 1.9484 | 0.0706 | 0.2045 | 0.0945 | 0.0620 | 0.2045 | 0.0752 | 0.1400 | 0.2045 | 0.2045 | 0.2045 | | 2.2934 | 7.0 | 406 | 1.9065 | 0.1343 | 0.2045 | 0.1672 | 0.1147 | 0.2045 | 0.1420 | 0.1633 | 0.2045 | 0.2045 | 0.2045 | | 1.7536 | 8.0 | 464 | 1.9479 | 0.1478 | 0.2197 | 0.1850 | 0.1421 | 0.2197 | 0.1848 | 0.1805 | 0.2197 | 0.2197 | 0.2197 | | 1.8144 | 9.0 | 522 | 2.0296 | 0.1669 | 0.2121 | 0.2033 | 0.1791 | 0.2121 | 0.2094 | 0.1677 | 0.2121 | 0.2121 | 0.2121 | | 1.8444 | 10.0 | 580 | 2.0174 | 0.1767 | 0.2121 | 0.2074 | 0.1983 | 0.2121 | 0.2457 | 0.1882 | 0.2121 | 0.2121 | 0.2121 | | 1.5718 | 11.0 | 638 | 2.0928 | 0.1532 | 0.2197 | 0.1994 | 0.1432 | 0.2197 | 0.1846 | 0.1674 | 0.2197 | 0.2197 | 0.2197 | | 1.3007 | 12.0 | 696 | 2.3367 | 0.1347 | 0.2273 | 0.1843 | 0.1228 | 0.2273 | 0.1637 | 0.1619 | 0.2273 | 0.2273 | 0.2273 | | 1.6815 | 13.0 | 754 | 2.3941 | 0.1582 | 0.25 | 0.1983 | 0.1550 | 0.25 | 0.1865 | 0.1920 | 0.25 | 0.25 | 0.25 | | 0.7993 | 14.0 | 812 | 2.4611 | 0.1382 | 0.1818 | 0.1763 | 0.1368 | 0.1818 | 0.1758 | 0.1447 | 0.1818 | 0.1818 | 0.1818 | | 1.0532 | 15.0 | 870 | 2.6119 | 0.1591 | 0.2121 | 0.2028 | 0.1857 | 0.2121 | 0.2359 | 0.1652 | 0.2121 | 0.2121 | 0.2121 | | 0.676 | 16.0 | 928 | 2.6952 | 0.1942 | 0.2424 | 0.2419 | 0.1998 | 0.2424 | 0.2520 | 0.1986 | 0.2424 | 0.2424 | 0.2424 | | 0.5739 | 17.0 | 986 | 2.9870 | 0.1704 | 0.2197 | 0.1996 | 0.1794 | 0.2197 | 0.2173 | 0.1894 | 0.2197 | 0.2197 | 0.2197 | | 0.3949 | 18.0 | 1044 | 2.9761 | 0.1543 | 0.2197 | 0.2029 | 0.1522 | 0.2197 | 0.2002 | 0.1658 | 0.2197 | 0.2197 | 0.2197 | | 0.4349 | 19.0 | 1102 | 3.2431 | 0.1300 | 0.1591 | 0.1608 | 0.1364 | 0.1591 | 0.1727 | 0.1333 | 0.1591 | 0.1591 | 0.1591 | | 0.2835 | 20.0 | 1160 | 3.0830 | 0.1741 | 0.2121 | 0.2163 | 0.1864 | 0.2121 | 0.2394 | 0.1769 | 0.2121 | 0.2121 | 0.2121 | | 0.0613 | 21.0 | 1218 | 3.3210 | 0.1730 | 0.2348 | 0.2231 | 0.1745 | 0.2348 | 0.2228 | 0.1794 | 0.2348 | 0.2348 | 0.2348 | | 0.331 | 22.0 | 1276 | 3.4701 | 0.1809 | 0.2197 | 0.2204 | 0.1805 | 0.2197 | 0.2219 | 0.1821 | 0.2197 | 0.2197 | 0.2197 | | 0.0503 | 23.0 | 1334 | 3.5875 | 0.1869 | 0.2424 | 0.2379 | 0.1892 | 0.2424 | 0.2406 | 0.1896 | 0.2424 | 0.2424 | 0.2424 | | 0.4794 | 24.0 | 1392 | 3.6579 | 0.1739 | 0.2197 | 0.2200 | 0.1790 | 0.2197 | 0.2291 | 0.1753 | 0.2197 | 0.2197 | 0.2197 | | 0.2527 | 25.0 | 1450 | 3.7844 | 0.1786 | 0.2121 | 0.2154 | 0.1819 | 0.2121 | 0.2244 | 0.1796 | 0.2121 | 0.2121 | 0.2121 | | 0.1714 | 26.0 | 1508 | 3.9514 | 0.1805 | 0.2121 | 0.2206 | 0.1908 | 0.2121 | 0.2407 | 0.1793 | 0.2121 | 0.2121 | 0.2121 | | 0.0291 | 27.0 | 1566 | 3.9734 | 0.1864 | 0.2197 | 0.2268 | 0.1929 | 0.2197 | 0.2392 | 0.1842 | 0.2197 | 0.2197 | 0.2197 | | 0.2084 | 28.0 | 1624 | 4.0018 | 0.1767 | 0.2197 | 0.2223 | 0.1813 | 0.2197 | 0.2307 | 0.1762 | 0.2197 | 0.2197 | 0.2197 | | 0.0191 | 29.0 | 1682 | 4.0030 | 0.2000 | 0.2424 | 0.2448 | 0.2038 | 0.2424 | 0.2526 | 0.2004 | 0.2424 | 0.2424 | 0.2424 | | 0.0414 | 30.0 | 1740 | 4.0048 | 0.2004 | 0.2424 | 0.2450 | 0.2047 | 0.2424 | 0.2526 | 0.2000 | 0.2424 | 0.2424 | 0.2424 | ### Framework versions - Transformers 4.49.0 - Pytorch 2.6.0+cu124 - Datasets 3.3.1 - Tokenizers 0.21.0
[ "-", "0", "1", "2", "3", "4", "5" ]
corranm/square_run_square_run_first_vote_full_pic_25_age_gender
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # square_run_square_run_first_vote_full_pic_25_age_gender This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 1.8346 - F1 Macro: 0.1851 - F1 Micro: 0.3182 - F1 Weighted: 0.2345 - Precision Macro: 0.1760 - Precision Micro: 0.3182 - Precision Weighted: 0.2208 - Recall Macro: 0.2432 - Recall Micro: 0.3182 - Recall Weighted: 0.3182 - Accuracy: 0.3182 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0001 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Use OptimizerNames.ADAMW_BNB with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 30 ### Training results | Training Loss | Epoch | Step | Validation Loss | F1 Macro | F1 Micro | F1 Weighted | Precision Macro | Precision Micro | Precision Weighted | Recall Macro | Recall Micro | Recall Weighted | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:|:--------:|:-----------:|:---------------:|:---------------:|:------------------:|:------------:|:------------:|:---------------:|:--------:| | 1.9467 | 1.0 | 58 | 1.8976 | 0.0492 | 0.1439 | 0.0606 | 0.0509 | 0.1439 | 0.0569 | 0.1108 | 0.1439 | 0.1439 | 0.1439 | | 2.0139 | 2.0 | 116 | 2.0406 | 0.0794 | 0.1894 | 0.0861 | 0.0546 | 0.1894 | 0.0593 | 0.1762 | 0.1894 | 0.1894 | 0.1894 | | 1.9091 | 3.0 | 174 | 1.8605 | 0.1264 | 0.25 | 0.1652 | 0.1148 | 0.25 | 0.1468 | 0.1847 | 0.25 | 0.25 | 0.25 | | 1.7652 | 4.0 | 232 | 1.8629 | 0.0886 | 0.2348 | 0.1154 | 0.0844 | 0.2348 | 0.1043 | 0.1701 | 0.2348 | 0.2348 | 0.2348 | | 1.841 | 5.0 | 290 | 1.8822 | 0.1340 | 0.1970 | 0.1697 | 0.1327 | 0.1970 | 0.1698 | 0.1569 | 0.1970 | 0.1970 | 0.1970 | | 1.8134 | 6.0 | 348 | 1.8746 | 0.1157 | 0.2197 | 0.1513 | 0.0930 | 0.2197 | 0.1211 | 0.1672 | 0.2197 | 0.2197 | 0.2197 | | 1.9708 | 7.0 | 406 | 1.9749 | 0.1497 | 0.2197 | 0.1805 | 0.2764 | 0.2197 | 0.3067 | 0.1825 | 0.2197 | 0.2197 | 0.2197 | | 1.7448 | 8.0 | 464 | 1.9512 | 0.1740 | 0.2424 | 0.2025 | 0.1928 | 0.2424 | 0.2300 | 0.2042 | 0.2424 | 0.2424 | 0.2424 | | 1.7039 | 9.0 | 522 | 1.9580 | 0.1514 | 0.2121 | 0.1770 | 0.1520 | 0.2121 | 0.1761 | 0.1786 | 0.2121 | 0.2121 | 0.2121 | | 1.5542 | 10.0 | 580 | 2.2252 | 0.1736 | 0.2197 | 0.2007 | 0.1875 | 0.2197 | 0.2203 | 0.1986 | 0.2197 | 0.2197 | 0.2197 | | 1.4417 | 11.0 | 638 | 2.1616 | 0.2483 | 0.2955 | 0.2879 | 0.2504 | 0.2955 | 0.2902 | 0.2555 | 0.2955 | 0.2955 | 0.2955 | | 0.9508 | 12.0 | 696 | 2.3267 | 0.2034 | 0.2348 | 0.2246 | 0.2047 | 0.2348 | 0.2280 | 0.2150 | 0.2348 | 0.2348 | 0.2348 | | 1.169 | 13.0 | 754 | 2.6085 | 0.1204 | 0.1515 | 0.1285 | 0.1492 | 0.1515 | 0.1657 | 0.1474 | 0.1515 | 0.1515 | 0.1515 | | 0.7098 | 14.0 | 812 | 2.4232 | 0.2598 | 0.2727 | 0.2634 | 0.3326 | 0.2727 | 0.3027 | 0.2485 | 0.2727 | 0.2727 | 0.2727 | | 0.9771 | 15.0 | 870 | 2.8032 | 0.1870 | 0.2121 | 0.2126 | 0.1962 | 0.2121 | 0.2269 | 0.1891 | 0.2121 | 0.2121 | 0.2121 | | 0.9971 | 16.0 | 928 | 2.9563 | 0.1644 | 0.2197 | 0.1996 | 0.1659 | 0.2197 | 0.1975 | 0.1780 | 0.2197 | 0.2197 | 0.2197 | | 0.3475 | 17.0 | 986 | 3.2126 | 0.1811 | 0.1894 | 0.1821 | 0.1894 | 0.1894 | 0.1906 | 0.1887 | 0.1894 | 0.1894 | 0.1894 | | 0.5477 | 18.0 | 1044 | 3.4777 | 0.2024 | 0.2045 | 0.2103 | 0.2774 | 0.2045 | 0.2715 | 0.1933 | 0.2045 | 0.2045 | 0.2045 | | 0.3557 | 19.0 | 1102 | 3.4563 | 0.2104 | 0.2424 | 0.2279 | 0.2264 | 0.2424 | 0.2417 | 0.2212 | 0.2424 | 0.2424 | 0.2424 | | 0.1263 | 20.0 | 1160 | 3.4877 | 0.1997 | 0.2197 | 0.2172 | 0.2041 | 0.2197 | 0.2225 | 0.2037 | 0.2197 | 0.2197 | 0.2197 | | 0.103 | 21.0 | 1218 | 3.7280 | 0.1908 | 0.2045 | 0.2011 | 0.2249 | 0.2045 | 0.2354 | 0.1978 | 0.2045 | 0.2045 | 0.2045 | | 0.1128 | 22.0 | 1276 | 4.0516 | 0.1865 | 0.2197 | 0.2075 | 0.1924 | 0.2197 | 0.2103 | 0.1936 | 0.2197 | 0.2197 | 0.2197 | | 0.0164 | 23.0 | 1334 | 4.1125 | 0.2020 | 0.2197 | 0.2203 | 0.2047 | 0.2197 | 0.2234 | 0.2016 | 0.2197 | 0.2197 | 0.2197 | | 0.0668 | 24.0 | 1392 | 4.1906 | 0.1919 | 0.2121 | 0.2070 | 0.1952 | 0.2121 | 0.2098 | 0.1960 | 0.2121 | 0.2121 | 0.2121 | | 0.3535 | 25.0 | 1450 | 4.2915 | 0.1868 | 0.2197 | 0.2092 | 0.1875 | 0.2197 | 0.2078 | 0.1945 | 0.2197 | 0.2197 | 0.2197 | | 0.0093 | 26.0 | 1508 | 4.2088 | 0.2287 | 0.2424 | 0.2372 | 0.2337 | 0.2424 | 0.2378 | 0.2293 | 0.2424 | 0.2424 | 0.2424 | | 0.0277 | 27.0 | 1566 | 4.2956 | 0.2216 | 0.25 | 0.2480 | 0.2238 | 0.25 | 0.2483 | 0.2217 | 0.25 | 0.25 | 0.25 | | 0.0922 | 28.0 | 1624 | 4.3624 | 0.1949 | 0.2273 | 0.2167 | 0.1910 | 0.2273 | 0.2106 | 0.2025 | 0.2273 | 0.2273 | 0.2273 | | 0.2617 | 29.0 | 1682 | 4.4012 | 0.1979 | 0.2348 | 0.2227 | 0.1930 | 0.2348 | 0.2157 | 0.2071 | 0.2348 | 0.2348 | 0.2348 | | 0.0108 | 30.0 | 1740 | 4.4032 | 0.2052 | 0.2424 | 0.2315 | 0.2016 | 0.2424 | 0.2249 | 0.2124 | 0.2424 | 0.2424 | 0.2424 | ### Framework versions - Transformers 4.49.0 - Pytorch 2.6.0+cu124 - Datasets 3.3.1 - Tokenizers 0.21.0
[ "-", "0", "1", "2", "3", "4", "5" ]
touchtech/fashion-images-gender-age-vit-large-patch16-384-v1
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # fashion-images-gender-age-vit-large-patch16-384-v1 This model is a fine-tuned version of [google/vit-large-patch16-384](https://huggingface.co/google/vit-large-patch16-384) on the touchtech/fashion-images-gender-age dataset. It achieves the following results on the evaluation set: - Loss: 0.0306 - Accuracy: 0.9951 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 2e-05 - train_batch_size: 8 - eval_batch_size: 8 - seed: 1337 - optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - num_epochs: 5.0 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:-----:|:---------------:|:--------:| | 0.084 | 1.0 | 2457 | 0.0423 | 0.9905 | | 0.0233 | 2.0 | 4914 | 0.0353 | 0.9928 | | 0.009 | 3.0 | 7371 | 0.0404 | 0.9942 | | 0.0056 | 4.0 | 9828 | 0.0312 | 0.9951 | | 0.0 | 5.0 | 12285 | 0.0306 | 0.9951 | ### Framework versions - Transformers 4.49.0 - Pytorch 2.6.0+cu124 - Datasets 3.3.2 - Tokenizers 0.21.0
[ "model-female-adult", "model-female-child", "model-male-adult", "model-male-child", "pack" ]
msoczka/vit-base-oxford-iiit-pets
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-base-oxford-iiit-pets This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on the pcuenq/oxford-pets dataset. It achieves the following results on the evaluation set: - Loss: 0.2193 - Accuracy: 0.9418 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0003 - train_batch_size: 16 - eval_batch_size: 8 - seed: 42 - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - num_epochs: 5 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.361 | 1.0 | 370 | 0.2664 | 0.9405 | | 0.2042 | 2.0 | 740 | 0.2037 | 0.9350 | | 0.1622 | 3.0 | 1110 | 0.1811 | 0.9391 | | 0.1555 | 4.0 | 1480 | 0.1696 | 0.9432 | | 0.1275 | 5.0 | 1850 | 0.1670 | 0.9432 | ### Framework versions - Transformers 4.49.0 - Pytorch 2.6.0+cu118 - Datasets 2.16.1 - Tokenizers 0.21.0
[ "siamese", "birman", "shiba inu", "staffordshire bull terrier", "basset hound", "bombay", "japanese chin", "chihuahua", "german shorthaired", "pomeranian", "beagle", "english cocker spaniel", "american pit bull terrier", "ragdoll", "persian", "egyptian mau", "miniature pinscher", "sphynx", "maine coon", "keeshond", "yorkshire terrier", "havanese", "leonberger", "wheaten terrier", "american bulldog", "english setter", "boxer", "newfoundland", "bengal", "samoyed", "british shorthair", "great pyrenees", "abyssinian", "pug", "saint bernard", "russian blue", "scottish terrier" ]
zhuchi76/vit-base-transfer-learning-oxford-pets
# Model Card for Model ID <!-- Provide a quick summary of what the model is/does. --> ## Model Details ### Model Description <!-- Provide a longer summary of what this model is. --> This is the model card of a 🤗 transformers model that has been pushed on the Hub. This model card has been automatically generated. - **Developed by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Model type:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] - **Finetuned from model [optional]:** [More Information Needed] ### Model Sources [optional] <!-- Provide the basic links for the model. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. --> ### Direct Use <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. --> [More Information Needed] ### Downstream Use [optional] <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations. ## How to Get Started with the Model Use the code below to get started with the model. [More Information Needed] ## Training Details ### Training Data <!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. --> [More Information Needed] ### Training Procedure <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. --> #### Preprocessing [optional] [More Information Needed] #### Training Hyperparameters - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision --> #### Speeds, Sizes, Times [optional] <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. --> [More Information Needed] ## Evaluation <!-- This section describes the evaluation protocols and provides the results. --> ### Testing Data, Factors & Metrics #### Testing Data <!-- This should link to a Dataset Card if possible. --> [More Information Needed] #### Factors <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. --> [More Information Needed] #### Metrics <!-- These are the evaluation metrics being used, ideally with a description of why. --> [More Information Needed] ### Results [More Information Needed] #### Summary ## Model Examination [optional] <!-- Relevant interpretability work for the model goes here --> [More Information Needed] ## Environmental Impact <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly --> Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700). - **Hardware Type:** [More Information Needed] - **Hours used:** [More Information Needed] - **Cloud Provider:** [More Information Needed] - **Compute Region:** [More Information Needed] - **Carbon Emitted:** [More Information Needed] ## Technical Specifications [optional] ### Model Architecture and Objective [More Information Needed] ### Compute Infrastructure [More Information Needed] #### Hardware [More Information Needed] #### Software [More Information Needed] ## Citation [optional] <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Model Card Authors [optional] [More Information Needed] ## Model Card Contact [More Information Needed]
[ "siamese", "birman", "shiba inu", "staffordshire bull terrier", "basset hound", "bombay", "japanese chin", "chihuahua", "german shorthaired", "pomeranian", "beagle", "english cocker spaniel", "american pit bull terrier", "ragdoll", "persian", "egyptian mau", "miniature pinscher", "sphynx", "maine coon", "keeshond", "yorkshire terrier", "havanese", "leonberger", "wheaten terrier", "american bulldog", "english setter", "boxer", "newfoundland", "bengal", "samoyed", "british shorthair", "great pyrenees", "abyssinian", "pug", "saint bernard", "russian blue", "scottish terrier" ]
zhuchi76/vit-base-lora-oxford-pets
# Model Card for Model ID <!-- Provide a quick summary of what the model is/does. --> ## Model Details ### Model Description <!-- Provide a longer summary of what this model is. --> This is the model card of a 🤗 transformers model that has been pushed on the Hub. This model card has been automatically generated. - **Developed by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Model type:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] - **Finetuned from model [optional]:** [More Information Needed] ### Model Sources [optional] <!-- Provide the basic links for the model. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. --> ### Direct Use <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. --> [More Information Needed] ### Downstream Use [optional] <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations. ## How to Get Started with the Model Use the code below to get started with the model. [More Information Needed] ## Training Details ### Training Data <!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. --> [More Information Needed] ### Training Procedure <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. --> #### Preprocessing [optional] [More Information Needed] #### Training Hyperparameters - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision --> #### Speeds, Sizes, Times [optional] <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. --> [More Information Needed] ## Evaluation <!-- This section describes the evaluation protocols and provides the results. --> ### Testing Data, Factors & Metrics #### Testing Data <!-- This should link to a Dataset Card if possible. --> [More Information Needed] #### Factors <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. --> [More Information Needed] #### Metrics <!-- These are the evaluation metrics being used, ideally with a description of why. --> [More Information Needed] ### Results [More Information Needed] #### Summary ## Model Examination [optional] <!-- Relevant interpretability work for the model goes here --> [More Information Needed] ## Environmental Impact <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly --> Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700). - **Hardware Type:** [More Information Needed] - **Hours used:** [More Information Needed] - **Cloud Provider:** [More Information Needed] - **Compute Region:** [More Information Needed] - **Carbon Emitted:** [More Information Needed] ## Technical Specifications [optional] ### Model Architecture and Objective [More Information Needed] ### Compute Infrastructure [More Information Needed] #### Hardware [More Information Needed] #### Software [More Information Needed] ## Citation [optional] <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Model Card Authors [optional] [More Information Needed] ## Model Card Contact [More Information Needed]
[ "siamese", "birman", "shiba inu", "staffordshire bull terrier", "basset hound", "bombay", "japanese chin", "chihuahua", "german shorthaired", "pomeranian", "beagle", "english cocker spaniel", "american pit bull terrier", "ragdoll", "persian", "egyptian mau", "miniature pinscher", "sphynx", "maine coon", "keeshond", "yorkshire terrier", "havanese", "leonberger", "wheaten terrier", "american bulldog", "english setter", "boxer", "newfoundland", "bengal", "samoyed", "british shorthair", "great pyrenees", "abyssinian", "pug", "saint bernard", "russian blue", "scottish terrier" ]
dskong07/plug-classif-model
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # plug-classif-model This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.5418 - Accuracy: 1.0 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 2 - eval_batch_size: 2 - seed: 42 - gradient_accumulation_steps: 2 - total_train_batch_size: 4 - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - num_epochs: 8 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:------:|:----:|:---------------:|:--------:| | 0.695 | 0.1538 | 1 | 0.7619 | 0.1429 | | 0.6096 | 0.3077 | 2 | 0.7630 | 0.2857 | | 0.7567 | 0.4615 | 3 | 0.7897 | 0.2857 | | 0.6185 | 0.6154 | 4 | 0.7943 | 0.2857 | | 0.5869 | 0.7692 | 5 | 0.7740 | 0.2857 | | 0.8098 | 0.9231 | 6 | 0.7680 | 0.4286 | | 0.402 | 1.0 | 7 | 0.7535 | 0.2857 | | 0.5498 | 1.1538 | 8 | 0.7027 | 0.2857 | | 0.5556 | 1.3077 | 9 | 0.7100 | 0.2857 | | 0.4257 | 1.4615 | 10 | 0.6922 | 0.4286 | | 0.5488 | 1.6154 | 11 | 0.6592 | 0.4286 | | 0.4829 | 1.7692 | 12 | 0.7471 | 0.2857 | | 0.677 | 1.9231 | 13 | 0.6789 | 0.4286 | | 0.3105 | 2.0 | 14 | 0.6908 | 0.4286 | | 0.461 | 2.1538 | 15 | 0.6732 | 0.4286 | | 0.388 | 2.3077 | 16 | 0.6960 | 0.5714 | | 0.4678 | 2.4615 | 17 | 0.6274 | 0.5714 | | 0.4753 | 2.6154 | 18 | 0.6437 | 0.5714 | | 0.5482 | 2.7692 | 19 | 0.6570 | 0.5714 | | 0.4301 | 2.9231 | 20 | 0.6745 | 0.7143 | | 0.177 | 3.0 | 21 | 0.6477 | 0.4286 | | 0.4159 | 3.1538 | 22 | 0.6018 | 0.5714 | | 0.3089 | 3.3077 | 23 | 0.5951 | 0.5714 | | 0.4568 | 3.4615 | 24 | 0.5659 | 0.8571 | | 0.4791 | 3.6154 | 25 | 0.5845 | 0.8571 | | 0.4097 | 3.7692 | 26 | 0.6343 | 0.8571 | | 0.4327 | 3.9231 | 27 | 0.5930 | 0.8571 | | 0.1493 | 4.0 | 28 | 0.5458 | 1.0 | | 0.3021 | 4.1538 | 29 | 0.5421 | 1.0 | | 0.3166 | 4.3077 | 30 | 0.5646 | 1.0 | | 0.2537 | 4.4615 | 31 | 0.5960 | 0.8571 | | 0.2853 | 4.6154 | 32 | 0.5636 | 0.8571 | | 0.3353 | 4.7692 | 33 | 0.5513 | 1.0 | | 0.3462 | 4.9231 | 34 | 0.5735 | 0.8571 | | 0.1871 | 5.0 | 35 | 0.5109 | 1.0 | | 0.2953 | 5.1538 | 36 | 0.5797 | 1.0 | | 0.2655 | 5.3077 | 37 | 0.5374 | 1.0 | | 0.352 | 5.4615 | 38 | 0.5245 | 1.0 | | 0.3536 | 5.6154 | 39 | 0.5387 | 0.8571 | | 0.2579 | 5.7692 | 40 | 0.5067 | 1.0 | | 0.3356 | 5.9231 | 41 | 0.5992 | 0.8571 | | 0.1094 | 6.0 | 42 | 0.5778 | 0.8571 | | 0.3345 | 6.1538 | 43 | 0.4571 | 1.0 | | 0.2314 | 6.3077 | 44 | 0.4651 | 1.0 | | 0.3312 | 6.4615 | 45 | 0.4798 | 1.0 | | 0.206 | 6.6154 | 46 | 0.4911 | 1.0 | | 0.3101 | 6.7692 | 47 | 0.4788 | 1.0 | | 0.3 | 6.9231 | 48 | 0.5418 | 1.0 | ### Framework versions - Transformers 4.48.3 - Pytorch 2.6.0+cpu - Datasets 3.2.0 - Tokenizers 0.21.0
[ "healthy", "broken" ]
touchtech/fashion-images-perspectives-vit-large-patch16-384-v1
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # fashion-images-perspectives-vit-large-patch16-384-v1 This model is a fine-tuned version of [google/vit-large-patch16-384](https://huggingface.co/google/vit-large-patch16-384) on the touchtech/fashion-images-perspectives-v2 dataset. It achieves the following results on the evaluation set: - Loss: 0.1572 - Accuracy: 0.9545 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 2e-05 - train_batch_size: 8 - eval_batch_size: 8 - seed: 1337 - optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - num_epochs: 5.0 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:-----:|:---------------:|:--------:| | 0.1766 | 1.0 | 3423 | 0.1572 | 0.9545 | | 0.1108 | 2.0 | 6846 | 0.1968 | 0.9580 | | 0.0456 | 3.0 | 10269 | 0.2058 | 0.9607 | | 0.0118 | 4.0 | 13692 | 0.2166 | 0.9634 | | 0.007 | 5.0 | 17115 | 0.2290 | 0.9642 | ### Framework versions - Transformers 4.49.0 - Pytorch 2.6.0+cu124 - Datasets 3.3.2 - Tokenizers 0.21.0
[ "model-back-close", "model-back-full", "pack-detail", "pack-front", "pack-side", "pack-top", "model-detail", "model-front-close", "model-front-full", "model-side-close", "model-side-full", "pack-angled", "pack-back", "pack-bottom" ]
dskong07/cord-classif-model
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # cord-classif-model This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.2013 - Accuracy: 1.0 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 2 - eval_batch_size: 2 - seed: 42 - gradient_accumulation_steps: 2 - total_train_batch_size: 4 - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - num_epochs: 8 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:------:|:----:|:---------------:|:--------:| | 0.7042 | 0.1111 | 1 | 0.6871 | 0.5 | | 0.7058 | 0.2222 | 2 | 0.6750 | 0.6 | | 0.6416 | 0.3333 | 3 | 0.6667 | 0.9 | | 0.6936 | 0.4444 | 4 | 0.6343 | 0.7 | | 0.6629 | 0.5556 | 5 | 0.6190 | 0.9 | | 0.7195 | 0.6667 | 6 | 0.5947 | 0.9 | | 0.6868 | 0.7778 | 7 | 0.6155 | 0.9 | | 0.6476 | 0.8889 | 8 | 0.5540 | 0.9 | | 0.7552 | 1.0 | 9 | 0.5931 | 0.9 | | 0.5168 | 1.1111 | 10 | 0.5694 | 0.9 | | 0.4808 | 1.2222 | 11 | 0.5690 | 0.9 | | 0.6157 | 1.3333 | 12 | 0.5573 | 0.9 | | 0.5479 | 1.4444 | 13 | 0.5512 | 0.9 | | 0.4646 | 1.5556 | 14 | 0.5307 | 0.9 | | 0.4772 | 1.6667 | 15 | 0.5170 | 0.9 | | 0.4864 | 1.7778 | 16 | 0.5357 | 0.9 | | 0.4775 | 1.8889 | 17 | 0.4613 | 0.9 | | 0.6061 | 2.0 | 18 | 0.4886 | 0.9 | | 0.3524 | 2.1111 | 19 | 0.4830 | 0.9 | | 0.3927 | 2.2222 | 20 | 0.4916 | 0.9 | | 0.4264 | 2.3333 | 21 | 0.4799 | 0.9 | | 0.3172 | 2.4444 | 22 | 0.4445 | 0.9 | | 0.3645 | 2.5556 | 23 | 0.4737 | 0.9 | | 0.3675 | 2.6667 | 24 | 0.4502 | 0.9 | | 0.5295 | 2.7778 | 25 | 0.4212 | 0.9 | | 0.2749 | 2.8889 | 26 | 0.4278 | 0.9 | | 0.3156 | 3.0 | 27 | 0.4320 | 0.9 | | 0.3443 | 3.1111 | 28 | 0.3981 | 0.9 | | 0.3151 | 3.2222 | 29 | 0.3999 | 0.9 | | 0.3343 | 3.3333 | 30 | 0.3813 | 0.9 | | 0.2849 | 3.4444 | 31 | 0.3708 | 0.9 | | 0.203 | 3.5556 | 32 | 0.3883 | 0.9 | | 0.2974 | 3.6667 | 33 | 0.3516 | 0.9 | | 0.39 | 3.7778 | 34 | 0.3712 | 0.9 | | 0.3439 | 3.8889 | 35 | 0.3459 | 0.9 | | 0.311 | 4.0 | 36 | 0.3271 | 0.9 | | 0.2814 | 4.1111 | 37 | 0.3801 | 0.9 | | 0.161 | 4.2222 | 38 | 0.3165 | 0.9 | | 0.14 | 4.3333 | 39 | 0.2890 | 0.9 | | 0.3928 | 4.4444 | 40 | 0.3259 | 0.9 | | 0.1812 | 4.5556 | 41 | 0.2839 | 0.9 | | 0.21 | 4.6667 | 42 | 0.2612 | 1.0 | | 0.1424 | 4.7778 | 43 | 0.3312 | 1.0 | | 0.2238 | 4.8889 | 44 | 0.2660 | 0.9 | | 0.2472 | 5.0 | 45 | 0.2653 | 0.9 | | 0.1143 | 5.1111 | 46 | 0.2353 | 1.0 | | 0.1888 | 5.2222 | 47 | 0.2542 | 1.0 | | 0.2183 | 5.3333 | 48 | 0.2679 | 1.0 | | 0.1019 | 5.4444 | 49 | 0.2618 | 1.0 | | 0.2266 | 5.5556 | 50 | 0.2353 | 1.0 | | 0.15 | 5.6667 | 51 | 0.2337 | 1.0 | | 0.2253 | 5.7778 | 52 | 0.2540 | 1.0 | | 0.1451 | 5.8889 | 53 | 0.2390 | 1.0 | | 0.1481 | 6.0 | 54 | 0.2346 | 0.9 | | 0.1112 | 6.1111 | 55 | 0.2171 | 1.0 | | 0.1482 | 6.2222 | 56 | 0.2044 | 1.0 | | 0.181 | 6.3333 | 57 | 0.1914 | 1.0 | | 0.1091 | 6.4444 | 58 | 0.1911 | 1.0 | | 0.1205 | 6.5556 | 59 | 0.1990 | 1.0 | | 0.16 | 6.6667 | 60 | 0.1984 | 1.0 | | 0.0967 | 6.7778 | 61 | 0.1852 | 1.0 | | 0.1812 | 6.8889 | 62 | 0.1976 | 1.0 | | 0.1711 | 7.0 | 63 | 0.1766 | 1.0 | | 0.1959 | 7.1111 | 64 | 0.2000 | 1.0 | | 0.4228 | 7.2222 | 65 | 0.2017 | 1.0 | | 0.506 | 7.3333 | 66 | 0.1828 | 1.0 | | 0.1869 | 7.4444 | 67 | 0.1728 | 1.0 | | 0.0914 | 7.5556 | 68 | 0.1846 | 1.0 | | 0.1622 | 7.6667 | 69 | 0.2005 | 1.0 | | 0.0884 | 7.7778 | 70 | 0.2367 | 1.0 | | 0.1018 | 7.8889 | 71 | 0.1785 | 1.0 | | 0.1132 | 8.0 | 72 | 0.2013 | 1.0 | ### Framework versions - Transformers 4.48.3 - Pytorch 2.6.0+cpu - Datasets 3.2.0 - Tokenizers 0.21.0
[ "healthy", "broken" ]
dskong07/charger-classif-model
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # charger-classif-model This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.2678 - Accuracy: 0.9231 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 2 - eval_batch_size: 2 - seed: 42 - gradient_accumulation_steps: 2 - total_train_batch_size: 4 - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - num_epochs: 8 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:------:|:----:|:---------------:|:--------:| | 0.4057 | 0.0769 | 1 | 0.5508 | 0.6923 | | 0.5194 | 0.1538 | 2 | 0.5735 | 0.6923 | | 0.4141 | 0.2308 | 3 | 0.5007 | 0.7692 | | 0.5442 | 0.3077 | 4 | 0.5160 | 0.8462 | | 0.43 | 0.3846 | 5 | 0.5931 | 0.7692 | | 0.4126 | 0.4615 | 6 | 0.5228 | 0.7692 | | 0.4151 | 0.5385 | 7 | 0.5552 | 0.7692 | | 0.3753 | 0.6154 | 8 | 0.5825 | 0.6154 | | 0.3468 | 0.6923 | 9 | 0.5637 | 0.6923 | | 0.3467 | 0.7692 | 10 | 0.5148 | 0.6923 | | 0.5188 | 0.8462 | 11 | 0.4735 | 0.7692 | | 0.4342 | 0.9231 | 12 | 0.5058 | 0.7692 | | 0.3888 | 1.0 | 13 | 0.5176 | 0.6923 | | 0.3977 | 1.0769 | 14 | 0.4865 | 0.7692 | | 0.1799 | 1.1538 | 15 | 0.5299 | 0.6923 | | 0.4628 | 1.2308 | 16 | 0.5614 | 0.6923 | | 0.8787 | 1.3077 | 17 | 0.5826 | 0.6923 | | 0.3396 | 1.3846 | 18 | 0.5337 | 0.7692 | | 0.2144 | 1.4615 | 19 | 0.5531 | 0.6923 | | 0.242 | 1.5385 | 20 | 0.5317 | 0.6923 | | 1.1866 | 1.6154 | 21 | 0.5042 | 0.6923 | | 0.2689 | 1.6923 | 22 | 0.4067 | 0.8462 | | 0.3953 | 1.7692 | 23 | 0.4513 | 0.8462 | | 0.1978 | 1.8462 | 24 | 0.5103 | 0.6923 | | 0.3293 | 1.9231 | 25 | 0.4829 | 0.6923 | | 0.3324 | 2.0 | 26 | 0.4915 | 0.8462 | | 0.2096 | 2.0769 | 27 | 0.5136 | 0.8462 | | 0.4142 | 2.1538 | 28 | 0.4490 | 0.7692 | | 0.4267 | 2.2308 | 29 | 0.4697 | 0.7692 | | 0.1871 | 2.3077 | 30 | 0.4744 | 0.7692 | | 0.3145 | 2.3846 | 31 | 0.5596 | 0.6923 | | 0.3417 | 2.4615 | 32 | 0.4589 | 0.6923 | | 0.1548 | 2.5385 | 33 | 0.5245 | 0.6923 | | 0.3131 | 2.6154 | 34 | 0.4507 | 0.6923 | | 0.1974 | 2.6923 | 35 | 0.4068 | 0.8462 | | 0.3148 | 2.7692 | 36 | 0.5019 | 0.6923 | | 0.5036 | 2.8462 | 37 | 0.4761 | 0.6923 | | 0.2178 | 2.9231 | 38 | 0.4132 | 0.9231 | | 0.4536 | 3.0 | 39 | 0.4745 | 0.7692 | | 0.3118 | 3.0769 | 40 | 0.4869 | 0.7692 | | 0.3465 | 3.1538 | 41 | 0.4473 | 0.7692 | | 0.096 | 3.2308 | 42 | 0.4376 | 0.8462 | | 0.1726 | 3.3077 | 43 | 0.5971 | 0.7692 | | 0.1685 | 3.3846 | 44 | 0.4768 | 0.7692 | | 0.2046 | 3.4615 | 45 | 0.3595 | 0.8462 | | 0.1297 | 3.5385 | 46 | 0.4701 | 0.7692 | | 0.4597 | 3.6154 | 47 | 0.4054 | 0.7692 | | 0.3474 | 3.6923 | 48 | 0.3927 | 0.8462 | | 0.4476 | 3.7692 | 49 | 0.5063 | 0.8462 | | 0.1062 | 3.8462 | 50 | 0.4741 | 0.7692 | | 0.5484 | 3.9231 | 51 | 0.4950 | 0.6923 | | 0.0945 | 4.0 | 52 | 0.4647 | 0.7692 | | 0.1053 | 4.0769 | 53 | 0.3743 | 0.8462 | | 0.4122 | 4.1538 | 54 | 0.4350 | 0.8462 | | 0.2825 | 4.2308 | 55 | 0.4246 | 0.8462 | | 0.2912 | 4.3077 | 56 | 0.5250 | 0.6923 | | 0.3193 | 4.3846 | 57 | 0.3639 | 0.8462 | | 0.066 | 4.4615 | 58 | 0.3574 | 0.9231 | | 0.0888 | 4.5385 | 59 | 0.4897 | 0.6923 | | 0.1046 | 4.6154 | 60 | 0.3032 | 0.9231 | | 0.2573 | 4.6923 | 61 | 0.5662 | 0.6154 | | 0.368 | 4.7692 | 62 | 0.3699 | 0.8462 | | 0.1484 | 4.8462 | 63 | 0.3517 | 0.8462 | | 0.1444 | 4.9231 | 64 | 0.2988 | 0.9231 | | 0.1492 | 5.0 | 65 | 0.3523 | 0.8462 | | 0.112 | 5.0769 | 66 | 0.4245 | 0.8462 | | 0.0711 | 5.1538 | 67 | 0.4451 | 0.6923 | | 0.2455 | 5.2308 | 68 | 0.4774 | 0.7692 | | 0.3981 | 5.3077 | 69 | 0.5084 | 0.7692 | | 0.1682 | 5.3846 | 70 | 0.4053 | 0.8462 | | 0.2809 | 5.4615 | 71 | 0.4574 | 0.6923 | | 0.1929 | 5.5385 | 72 | 0.3242 | 0.7692 | | 0.161 | 5.6154 | 73 | 0.3854 | 0.7692 | | 0.1475 | 5.6923 | 74 | 0.3935 | 0.7692 | | 0.1058 | 5.7692 | 75 | 0.5751 | 0.6923 | | 0.1103 | 5.8462 | 76 | 0.3874 | 0.8462 | | 0.1057 | 5.9231 | 77 | 0.3984 | 0.7692 | | 0.1593 | 6.0 | 78 | 0.3299 | 0.8462 | | 0.1154 | 6.0769 | 79 | 0.4778 | 0.7692 | | 0.3131 | 6.1538 | 80 | 0.4863 | 0.7692 | | 0.0791 | 6.2308 | 81 | 0.4897 | 0.7692 | | 0.0635 | 6.3077 | 82 | 0.5831 | 0.7692 | | 0.0704 | 6.3846 | 83 | 0.4384 | 0.8462 | | 0.0597 | 6.4615 | 84 | 0.5519 | 0.7692 | | 0.1117 | 6.5385 | 85 | 0.4525 | 0.7692 | | 0.1542 | 6.6154 | 86 | 0.5354 | 0.8462 | | 0.5737 | 6.6923 | 87 | 0.5034 | 0.7692 | | 0.4216 | 6.7692 | 88 | 0.4514 | 0.7692 | | 0.3276 | 6.8462 | 89 | 0.5688 | 0.7692 | | 0.119 | 6.9231 | 90 | 0.3433 | 0.9231 | | 0.1519 | 7.0 | 91 | 0.4454 | 0.7692 | | 0.1155 | 7.0769 | 92 | 0.3323 | 0.7692 | | 0.1264 | 7.1538 | 93 | 0.4030 | 0.6923 | | 0.0585 | 7.2308 | 94 | 0.3404 | 0.8462 | | 0.1404 | 7.3077 | 95 | 0.3507 | 0.8462 | | 0.0417 | 7.3846 | 96 | 0.4860 | 0.7692 | | 0.0873 | 7.4615 | 97 | 0.4896 | 0.8462 | | 0.0801 | 7.5385 | 98 | 0.4383 | 0.7692 | | 0.2163 | 7.6154 | 99 | 0.3764 | 0.8462 | | 0.1823 | 7.6923 | 100 | 0.4258 | 0.8462 | | 0.1832 | 7.7692 | 101 | 0.2890 | 0.8462 | | 0.0879 | 7.8462 | 102 | 0.2909 | 0.8462 | | 0.2345 | 7.9231 | 103 | 0.3617 | 0.8462 | | 0.1096 | 8.0 | 104 | 0.2678 | 0.9231 | ### Framework versions - Transformers 4.48.3 - Pytorch 2.6.0+cpu - Datasets 3.2.0 - Tokenizers 0.21.0
[ "healthy", "broken" ]
Snppuzzle/Lanna-model-con
# Model Card for Model ID <!-- Provide a quick summary of what the model is/does. --> ## Model Details ### Model Description <!-- Provide a longer summary of what this model is. --> This is the model card of a 🤗 transformers model that has been pushed on the Hub. This model card has been automatically generated. - **Developed by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Model type:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] - **Finetuned from model [optional]:** [More Information Needed] ### Model Sources [optional] <!-- Provide the basic links for the model. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. --> ### Direct Use <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. --> [More Information Needed] ### Downstream Use [optional] <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations. ## How to Get Started with the Model Use the code below to get started with the model. [More Information Needed] ## Training Details ### Training Data <!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. --> [More Information Needed] ### Training Procedure <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. --> #### Preprocessing [optional] [More Information Needed] #### Training Hyperparameters - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision --> #### Speeds, Sizes, Times [optional] <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. --> [More Information Needed] ## Evaluation <!-- This section describes the evaluation protocols and provides the results. --> ### Testing Data, Factors & Metrics #### Testing Data <!-- This should link to a Dataset Card if possible. --> [More Information Needed] #### Factors <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. --> [More Information Needed] #### Metrics <!-- These are the evaluation metrics being used, ideally with a description of why. --> [More Information Needed] ### Results [More Information Needed] #### Summary ## Model Examination [optional] <!-- Relevant interpretability work for the model goes here --> [More Information Needed] ## Environmental Impact <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly --> Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700). - **Hardware Type:** [More Information Needed] - **Hours used:** [More Information Needed] - **Cloud Provider:** [More Information Needed] - **Compute Region:** [More Information Needed] - **Carbon Emitted:** [More Information Needed] ## Technical Specifications [optional] ### Model Architecture and Objective [More Information Needed] ### Compute Infrastructure [More Information Needed] #### Hardware [More Information Needed] #### Software [More Information Needed] ## Citation [optional] <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Model Card Authors [optional] [More Information Needed] ## Model Card Contact [More Information Needed]
[ "aabnam", "aemo", "amnat", "anong", "anu", "aphet", "athik", "banman", "binbon", "bochai", "bokin", "bolao", "bopen", "boron", "buppe", "chaehom", "chaeyang", "chaidi", "chanan", "changhan", "chaofa", "chaomom", "chaomueang", "chata", "chatu", "chaya", "chiangdao", "chiangmai", "chingchang", "chokdi", "chon", "dangsaap", "deklek", "deumnai", "doilo", "doiluang", "doitao", "dokbua", "eka", "fanhan", "hangdong", "hangsat", "hungtam", "huwai", "inta", "itam", "iti", "kadi", "kamyao", "kanmo", "kapmo", "kephet", "kepphak", "khaikai", "khaipa", "khamaen", "khaoma", "khata", "kheumnguem", "khomchai", "khongbo", "khongtua", "khunyuam", "khwaluat", "khwamsuk", "kinkhao", "kinkhong", "kinmuea", "kinru", "kluaibo", "laemai", "laichiao", "lailong", "lampang", "lattho", "lok", "luathak", "luatok", "maechaem", "maechai", "maechan", "maecharim", "maelao", "maelim", "maephrik", "maetaeng", "maeth", "maetha", "maewang", "maha", "mahachai", "mam", "manpen", "manu", "mueangphan", "mueangyong", "nakrian", "nambo", "nanglong", "nangsue", "naokhong", "nara", "newin", "nganban", "nguenchae", "nguenchat", "omkoi", "oprom", "oram", "osot", "padaet", "phaideuan", "phak", "phakhawa", "phayao", "phoemwai", "phuphiang", "phusang", "phuttha", "phuttho", "pikad", "pikot", "piso", "puri", "rakha", "ratna", "roisai", "ruluem", "saichai", "saket", "sana", "sanam", "sanya", "sapha", "sawa", "sayong", "siri", "sitth", "soekho", "soekman", "somkhuan", "songkho", "sukhato", "sukka", "taefai", "taehai", "tanam", "taro", "thairat", "thamam", "thawai", "thewa", "thuti", "uru", "wailang", "wasa", "wati", "wihan", "witcha", "withu", "yapheng", "yukloek" ]
thenewsupercell/New_DF_Image_VIT_V1
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # New_DF_Image_VIT_V1 This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.0157 - Accuracy: 0.9971 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - num_epochs: 4 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:-----:|:---------------:|:--------:| | 0.0253 | 1.0 | 5252 | 0.0254 | 0.9936 | | 0.0011 | 2.0 | 10504 | 0.0205 | 0.9954 | | 0.0275 | 3.0 | 15756 | 0.0134 | 0.9970 | | 0.0084 | 4.0 | 21008 | 0.0157 | 0.9971 | ### Framework versions - Transformers 4.47.0 - Pytorch 2.5.1+cu121 - Datasets 3.2.0 - Tokenizers 0.21.0
[ "fake", "real" ]
djibri/mri_classifier
<!-- This model card has been generated automatically according to the information Keras had access to. You should probably proofread and complete it, then remove this comment. --> # djibri/mri_classifier This model is a fine-tuned version of [djibri/mri_classifier](https://huggingface.co/djibri/mri_classifier) on an unknown dataset. It achieves the following results on the evaluation set: - Train Loss: 0.7116 - Validation Loss: 0.7235 - Train Accuracy: 0.6880 - Epoch: 19 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - optimizer: {'name': 'Adam', 'weight_decay': None, 'clipnorm': None, 'global_clipnorm': None, 'clipvalue': None, 'use_ema': False, 'ema_momentum': 0.99, 'ema_overwrite_frequency': None, 'jit_compile': True, 'is_legacy_optimizer': False, 'learning_rate': 0.001, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-07, 'amsgrad': False} - training_precision: float32 ### Training results | Train Loss | Validation Loss | Train Accuracy | Epoch | |:----------:|:---------------:|:--------------:|:-----:| | 1.1610 | 0.9961 | 0.5301 | 0 | | 1.0175 | 1.0906 | 0.5408 | 1 | | 0.9817 | 1.1593 | 0.5324 | 2 | | 0.9390 | 0.9281 | 0.5957 | 3 | | 0.8854 | 0.9142 | 0.5934 | 4 | | 0.8572 | 0.8900 | 0.5652 | 5 | | 0.8584 | 0.8369 | 0.6056 | 6 | | 0.8174 | 0.8710 | 0.5812 | 7 | | 0.8190 | 0.8142 | 0.6209 | 8 | | 0.7977 | 0.8600 | 0.5774 | 9 | | 0.8046 | 0.8344 | 0.5988 | 10 | | 0.7905 | 0.7853 | 0.6568 | 11 | | 0.7773 | 0.9653 | 0.5728 | 12 | | 0.7825 | 0.7865 | 0.6217 | 13 | | 0.7833 | 0.8118 | 0.6575 | 14 | | 0.7693 | 0.8081 | 0.6484 | 15 | | 0.7503 | 0.7707 | 0.6690 | 16 | | 0.7425 | 0.7434 | 0.6903 | 17 | | 0.7700 | 0.7249 | 0.7109 | 18 | | 0.7116 | 0.7235 | 0.6880 | 19 | ### Framework versions - Transformers 4.47.0 - TensorFlow 2.17.1 - Datasets 3.3.1 - Tokenizers 0.21.0
[ "glioma", "meningioma", "notumor", "pituitary" ]
argish/vit-base-patch16-224-in21k-facial-emotion-classification
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit_fer_finetuned This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.5470 - Accuracy: 0.8399 - F1 Macro: 0.8459 - F1 Micro: 0.8399 - Precision Macro: 0.8451 - Recall Macro: 0.8472 - F1 Angry: 0.7908 - Precision Angry: 0.7735 - Recall Angry: 0.8090 - F1 Disgust: 0.9321 - Precision Disgust: 0.9364 - Recall Disgust: 0.9279 - F1 Fear: 0.7547 - Precision Fear: 0.7677 - Recall Fear: 0.7422 - F1 Happy: 0.9386 - Precision Happy: 0.9556 - Recall Happy: 0.9222 - F1 Neutral: 0.8262 - Precision Neutral: 0.8105 - Recall Neutral: 0.8427 - F1 Sad: 0.7626 - Precision Sad: 0.7692 - Recall Sad: 0.7562 - F1 Surprise: 0.9164 - Precision Surprise: 0.9030 - Recall Surprise: 0.9302 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 2e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 2 - total_train_batch_size: 64 - optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 5 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | F1 Macro | F1 Micro | Precision Macro | Recall Macro | F1 Angry | Precision Angry | Recall Angry | F1 Disgust | Precision Disgust | Recall Disgust | F1 Fear | Precision Fear | Recall Fear | F1 Happy | Precision Happy | Recall Happy | F1 Neutral | Precision Neutral | Recall Neutral | F1 Sad | Precision Sad | Recall Sad | F1 Surprise | Precision Surprise | Recall Surprise | |:-------------:|:-----:|:----:|:---------------:|:--------:|:--------:|:--------:|:---------------:|:------------:|:--------:|:---------------:|:------------:|:----------:|:-----------------:|:--------------:|:-------:|:--------------:|:-----------:|:--------:|:---------------:|:------------:|:----------:|:-----------------:|:--------------:|:------:|:-------------:|:----------:|:-----------:|:------------------:|:---------------:| | 1.0353 | 1.0 | 899 | 0.9539 | 0.6659 | 0.6128 | 0.6659 | 0.6136 | 0.6759 | 0.5457 | 0.6230 | 0.4854 | 0.4869 | 0.3312 | 0.9189 | 0.3857 | 0.6196 | 0.2800 | 0.8874 | 0.8872 | 0.8877 | 0.6550 | 0.5993 | 0.7220 | 0.5604 | 0.5187 | 0.6093 | 0.7683 | 0.7166 | 0.8281 | | 0.7413 | 2.0 | 1798 | 0.7735 | 0.7400 | 0.7367 | 0.7400 | 0.7297 | 0.7520 | 0.6732 | 0.5915 | 0.7812 | 0.8264 | 0.7634 | 0.9009 | 0.5777 | 0.6519 | 0.5187 | 0.9088 | 0.9434 | 0.8767 | 0.7174 | 0.7219 | 0.7130 | 0.6216 | 0.6297 | 0.6137 | 0.8318 | 0.8059 | 0.8595 | | 0.5094 | 3.0 | 2697 | 0.6381 | 0.7955 | 0.8039 | 0.7955 | 0.8033 | 0.8053 | 0.7633 | 0.7405 | 0.7875 | 0.9364 | 0.9450 | 0.9279 | 0.6834 | 0.6727 | 0.6945 | 0.9246 | 0.9425 | 0.9074 | 0.7696 | 0.7606 | 0.7788 | 0.6779 | 0.7035 | 0.6541 | 0.8723 | 0.8580 | 0.8871 | | 0.4165 | 4.0 | 3596 | 0.5596 | 0.8319 | 0.8391 | 0.8319 | 0.8428 | 0.8366 | 0.7983 | 0.8326 | 0.7667 | 0.9401 | 0.9623 | 0.9189 | 0.7452 | 0.7219 | 0.7701 | 0.9327 | 0.9515 | 0.9145 | 0.8100 | 0.7809 | 0.8413 | 0.7446 | 0.7482 | 0.7410 | 0.9028 | 0.9023 | 0.9034 | | 0.2772 | 5.0 | 4495 | 0.5151 | 0.8528 | 0.8575 | 0.8528 | 0.8582 | 0.8571 | 0.8179 | 0.7972 | 0.8396 | 0.9364 | 0.9450 | 0.9279 | 0.7839 | 0.8025 | 0.7662 | 0.9397 | 0.9486 | 0.9310 | 0.8335 | 0.8251 | 0.8421 | 0.7720 | 0.7696 | 0.7744 | 0.9190 | 0.9196 | 0.9184 | ### Framework versions - Transformers 4.47.0 - Pytorch 2.5.1+cu121 - Datasets 3.3.1 - Tokenizers 0.21.0
[ "angry", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
iamhjoo/resnet-18
# Model Card for Model ID <!-- Provide a quick summary of what the model is/does. --> ## Model Details ### Model Description <!-- Provide a longer summary of what this model is. --> This is the model card of a 🤗 transformers model that has been pushed on the Hub. This model card has been automatically generated. - **Developed by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Model type:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] - **Finetuned from model [optional]:** [More Information Needed] ### Model Sources [optional] <!-- Provide the basic links for the model. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. --> ### Direct Use <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. --> [More Information Needed] ### Downstream Use [optional] <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations. ## How to Get Started with the Model Use the code below to get started with the model. [More Information Needed] ## Training Details ### Training Data <!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. --> [More Information Needed] ### Training Procedure <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. --> #### Preprocessing [optional] [More Information Needed] #### Training Hyperparameters - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision --> #### Speeds, Sizes, Times [optional] <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. --> [More Information Needed] ## Evaluation <!-- This section describes the evaluation protocols and provides the results. --> ### Testing Data, Factors & Metrics #### Testing Data <!-- This should link to a Dataset Card if possible. --> [More Information Needed] #### Factors <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. --> [More Information Needed] #### Metrics <!-- These are the evaluation metrics being used, ideally with a description of why. --> [More Information Needed] ### Results [More Information Needed] #### Summary ## Model Examination [optional] <!-- Relevant interpretability work for the model goes here --> [More Information Needed] ## Environmental Impact <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly --> Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700). - **Hardware Type:** [More Information Needed] - **Hours used:** [More Information Needed] - **Cloud Provider:** [More Information Needed] - **Compute Region:** [More Information Needed] - **Carbon Emitted:** [More Information Needed] ## Technical Specifications [optional] ### Model Architecture and Objective [More Information Needed] ### Compute Infrastructure [More Information Needed] #### Hardware [More Information Needed] #### Software [More Information Needed] ## Citation [optional] <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Model Card Authors [optional] [More Information Needed] ## Model Card Contact [More Information Needed]
[ "tench, tinca tinca", "goldfish, carassius auratus", "great white shark, white shark, man-eater, man-eating shark, carcharodon carcharias", "tiger shark, galeocerdo cuvieri", "hammerhead, hammerhead shark", "electric ray, crampfish, numbfish, torpedo", "stingray", "cock", "hen", "ostrich, struthio camelus", "brambling, fringilla montifringilla", "goldfinch, carduelis carduelis", "house finch, linnet, carpodacus mexicanus", "junco, snowbird", "indigo bunting, indigo finch, indigo bird, passerina cyanea", "robin, american robin, turdus migratorius", "bulbul", "jay", "magpie", "chickadee", "water ouzel, dipper", "kite", "bald eagle, american eagle, haliaeetus leucocephalus", "vulture", "great grey owl, great gray owl, strix nebulosa", "european fire salamander, salamandra salamandra", "common newt, triturus vulgaris", "eft", "spotted salamander, ambystoma maculatum", "axolotl, mud puppy, ambystoma mexicanum", "bullfrog, rana catesbeiana", "tree frog, tree-frog", "tailed frog, bell toad, ribbed toad, tailed toad, ascaphus trui", "loggerhead, loggerhead turtle, caretta caretta", "leatherback turtle, leatherback, leathery turtle, dermochelys coriacea", "mud turtle", "terrapin", "box turtle, box tortoise", "banded gecko", "common iguana, iguana, iguana iguana", "american chameleon, anole, anolis carolinensis", "whiptail, whiptail lizard", "agama", "frilled lizard, chlamydosaurus kingi", "alligator lizard", "gila monster, heloderma suspectum", "green lizard, lacerta viridis", "african chameleon, chamaeleo chamaeleon", "komodo dragon, komodo lizard, dragon lizard, giant lizard, varanus komodoensis", "african crocodile, nile crocodile, crocodylus niloticus", "american alligator, alligator mississipiensis", "triceratops", "thunder snake, worm snake, carphophis amoenus", "ringneck snake, ring-necked snake, ring snake", "hognose snake, puff adder, sand viper", "green snake, grass snake", "king snake, kingsnake", "garter snake, grass snake", "water snake", "vine snake", "night snake, hypsiglena torquata", "boa constrictor, constrictor constrictor", "rock python, rock snake, python sebae", "indian cobra, naja naja", "green mamba", "sea snake", "horned viper, cerastes, sand viper, horned asp, cerastes cornutus", "diamondback, diamondback rattlesnake, crotalus adamanteus", "sidewinder, horned rattlesnake, crotalus cerastes", "trilobite", "harvestman, daddy longlegs, phalangium opilio", "scorpion", "black and gold garden spider, argiope aurantia", "barn spider, araneus cavaticus", "garden spider, aranea diademata", "black widow, latrodectus mactans", "tarantula", "wolf spider, hunting spider", "tick", "centipede", "black grouse", "ptarmigan", "ruffed grouse, partridge, bonasa umbellus", "prairie chicken, prairie grouse, prairie fowl", "peacock", "quail", "partridge", "african grey, african gray, psittacus erithacus", "macaw", "sulphur-crested cockatoo, kakatoe galerita, cacatua galerita", "lorikeet", "coucal", "bee eater", "hornbill", "hummingbird", "jacamar", "toucan", "drake", "red-breasted merganser, mergus serrator", "goose", "black swan, cygnus atratus", "tusker", "echidna, spiny anteater, anteater", "platypus, duckbill, duckbilled platypus, duck-billed platypus, ornithorhynchus anatinus", "wallaby, brush kangaroo", "koala, koala bear, kangaroo bear, native bear, phascolarctos cinereus", "wombat", "jellyfish", "sea anemone, anemone", "brain coral", "flatworm, platyhelminth", "nematode, nematode worm, roundworm", "conch", "snail", "slug", "sea slug, nudibranch", "chiton, coat-of-mail shell, sea cradle, polyplacophore", "chambered nautilus, pearly nautilus, nautilus", "dungeness crab, cancer magister", "rock crab, cancer irroratus", "fiddler crab", "king crab, alaska crab, alaskan king crab, alaska king crab, paralithodes camtschatica", "american lobster, northern lobster, maine lobster, homarus americanus", "spiny lobster, langouste, rock lobster, crawfish, crayfish, sea crawfish", "crayfish, crawfish, crawdad, crawdaddy", "hermit crab", "isopod", "white stork, ciconia ciconia", "black stork, ciconia nigra", "spoonbill", "flamingo", "little blue heron, egretta caerulea", "american egret, great white heron, egretta albus", "bittern", "crane", "limpkin, aramus pictus", "european gallinule, porphyrio porphyrio", "american coot, marsh hen, mud hen, water hen, fulica americana", "bustard", "ruddy turnstone, arenaria interpres", "red-backed sandpiper, dunlin, erolia alpina", "redshank, tringa totanus", "dowitcher", "oystercatcher, oyster catcher", "pelican", "king penguin, aptenodytes patagonica", "albatross, mollymawk", "grey whale, gray whale, devilfish, eschrichtius gibbosus, eschrichtius robustus", "killer whale, killer, orca, grampus, sea wolf, orcinus orca", "dugong, dugong dugon", "sea lion", "chihuahua", "japanese spaniel", "maltese dog, maltese terrier, maltese", "pekinese, pekingese, peke", "shih-tzu", "blenheim spaniel", "papillon", "toy terrier", "rhodesian ridgeback", "afghan hound, afghan", "basset, basset hound", "beagle", "bloodhound, sleuthhound", "bluetick", "black-and-tan coonhound", "walker hound, walker foxhound", "english foxhound", "redbone", "borzoi, russian wolfhound", "irish wolfhound", "italian greyhound", "whippet", "ibizan hound, ibizan podenco", "norwegian elkhound, elkhound", "otterhound, otter hound", "saluki, gazelle hound", "scottish deerhound, deerhound", "weimaraner", "staffordshire bullterrier, staffordshire bull terrier", "american staffordshire terrier, staffordshire terrier, american pit bull terrier, pit bull terrier", "bedlington terrier", "border terrier", "kerry blue terrier", "irish terrier", "norfolk terrier", "norwich terrier", "yorkshire terrier", "wire-haired fox terrier", "lakeland terrier", "sealyham terrier, sealyham", "airedale, airedale terrier", "cairn, cairn terrier", "australian terrier", "dandie dinmont, dandie dinmont terrier", "boston bull, boston terrier", "miniature schnauzer", "giant schnauzer", "standard schnauzer", "scotch terrier, scottish terrier, scottie", "tibetan terrier, chrysanthemum dog", "silky terrier, sydney silky", "soft-coated wheaten terrier", "west highland white terrier", "lhasa, lhasa apso", "flat-coated retriever", "curly-coated retriever", "golden retriever", "labrador retriever", "chesapeake bay retriever", "german short-haired pointer", "vizsla, hungarian pointer", "english setter", "irish setter, red setter", "gordon setter", "brittany spaniel", "clumber, clumber spaniel", "english springer, english springer spaniel", "welsh springer spaniel", "cocker spaniel, english cocker spaniel, cocker", "sussex spaniel", "irish water spaniel", "kuvasz", "schipperke", "groenendael", "malinois", "briard", "kelpie", "komondor", "old english sheepdog, bobtail", "shetland sheepdog, shetland sheep dog, shetland", "collie", "border collie", "bouvier des flandres, bouviers des flandres", "rottweiler", "german shepherd, german shepherd dog, german police dog, alsatian", "doberman, doberman pinscher", "miniature pinscher", "greater swiss mountain dog", "bernese mountain dog", "appenzeller", "entlebucher", "boxer", "bull mastiff", "tibetan mastiff", "french bulldog", "great dane", "saint bernard, st bernard", "eskimo dog, husky", "malamute, malemute, alaskan malamute", "siberian husky", "dalmatian, coach dog, carriage dog", "affenpinscher, monkey pinscher, monkey dog", "basenji", "pug, pug-dog", "leonberg", "newfoundland, newfoundland dog", "great pyrenees", "samoyed, samoyede", "pomeranian", "chow, chow chow", "keeshond", "brabancon griffon", "pembroke, pembroke welsh corgi", "cardigan, cardigan welsh corgi", "toy poodle", "miniature poodle", "standard poodle", "mexican hairless", "timber wolf, grey wolf, gray wolf, canis lupus", "white wolf, arctic wolf, canis lupus tundrarum", "red wolf, maned wolf, canis rufus, canis niger", "coyote, prairie wolf, brush wolf, canis latrans", "dingo, warrigal, warragal, canis dingo", "dhole, cuon alpinus", "african hunting dog, hyena dog, cape hunting dog, lycaon pictus", "hyena, hyaena", "red fox, vulpes vulpes", "kit fox, vulpes macrotis", "arctic fox, white fox, alopex lagopus", "grey fox, gray fox, urocyon cinereoargenteus", "tabby, tabby cat", "tiger cat", "persian cat", "siamese cat, siamese", "egyptian cat", "cougar, puma, catamount, mountain lion, painter, panther, felis concolor", "lynx, catamount", "leopard, panthera pardus", "snow leopard, ounce, panthera uncia", "jaguar, panther, panthera onca, felis onca", "lion, king of beasts, panthera leo", "tiger, panthera tigris", "cheetah, chetah, acinonyx jubatus", "brown bear, bruin, ursus arctos", "american black bear, black bear, ursus americanus, euarctos americanus", "ice bear, polar bear, ursus maritimus, thalarctos maritimus", "sloth bear, melursus ursinus, ursus ursinus", "mongoose", "meerkat, mierkat", "tiger beetle", "ladybug, ladybeetle, lady beetle, ladybird, ladybird beetle", "ground beetle, carabid beetle", "long-horned beetle, longicorn, longicorn beetle", "leaf beetle, chrysomelid", "dung beetle", "rhinoceros beetle", "weevil", "fly", "bee", "ant, emmet, pismire", "grasshopper, hopper", "cricket", "walking stick, walkingstick, stick insect", "cockroach, roach", "mantis, mantid", "cicada, cicala", "leafhopper", "lacewing, lacewing fly", "dragonfly, darning needle, devil's darning needle, sewing needle, snake feeder, snake doctor, mosquito hawk, skeeter hawk", "damselfly", "admiral", "ringlet, ringlet butterfly", "monarch, monarch butterfly, milkweed butterfly, danaus plexippus", "cabbage butterfly", "sulphur butterfly, sulfur butterfly", "lycaenid, lycaenid butterfly", "starfish, sea star", "sea urchin", "sea cucumber, holothurian", "wood rabbit, cottontail, cottontail rabbit", "hare", "angora, angora rabbit", "hamster", "porcupine, hedgehog", "fox squirrel, eastern fox squirrel, sciurus niger", "marmot", "beaver", "guinea pig, cavia cobaya", "sorrel", "zebra", "hog, pig, grunter, squealer, sus scrofa", "wild boar, boar, sus scrofa", "warthog", "hippopotamus, hippo, river horse, hippopotamus amphibius", "ox", "water buffalo, water ox, asiatic buffalo, bubalus bubalis", "bison", "ram, tup", "bighorn, bighorn sheep, cimarron, rocky mountain bighorn, rocky mountain sheep, ovis canadensis", "ibex, capra ibex", "hartebeest", "impala, aepyceros melampus", "gazelle", "arabian camel, dromedary, camelus dromedarius", "llama", "weasel", "mink", "polecat, fitch, foulmart, foumart, mustela putorius", "black-footed ferret, ferret, mustela nigripes", "otter", "skunk, polecat, wood pussy", "badger", "armadillo", "three-toed sloth, ai, bradypus tridactylus", "orangutan, orang, orangutang, pongo pygmaeus", "gorilla, gorilla gorilla", "chimpanzee, chimp, pan troglodytes", "gibbon, hylobates lar", "siamang, hylobates syndactylus, symphalangus syndactylus", "guenon, guenon monkey", "patas, hussar monkey, erythrocebus patas", "baboon", "macaque", "langur", "colobus, colobus monkey", "proboscis monkey, nasalis larvatus", "marmoset", "capuchin, ringtail, cebus capucinus", "howler monkey, howler", "titi, titi monkey", "spider monkey, ateles geoffroyi", "squirrel monkey, saimiri sciureus", "madagascar cat, ring-tailed lemur, lemur catta", "indri, indris, indri indri, indri brevicaudatus", "indian elephant, elephas maximus", "african elephant, loxodonta africana", "lesser panda, red panda, panda, bear cat, cat bear, ailurus fulgens", "giant panda, panda, panda bear, coon bear, ailuropoda melanoleuca", "barracouta, snoek", "eel", "coho, cohoe, coho salmon, blue jack, silver salmon, oncorhynchus kisutch", "rock beauty, holocanthus tricolor", "anemone fish", "sturgeon", "gar, garfish, garpike, billfish, lepisosteus osseus", "lionfish", "puffer, pufferfish, blowfish, globefish", "abacus", "abaya", "academic gown, academic robe, judge's robe", "accordion, piano accordion, squeeze box", "acoustic guitar", "aircraft carrier, carrier, flattop, attack aircraft carrier", "airliner", "airship, dirigible", "altar", "ambulance", "amphibian, amphibious vehicle", "analog clock", "apiary, bee house", "apron", "ashcan, trash can, garbage can, wastebin, ash bin, ash-bin, ashbin, dustbin, trash barrel, trash bin", "assault rifle, assault gun", "backpack, back pack, knapsack, packsack, rucksack, haversack", "bakery, bakeshop, bakehouse", "balance beam, beam", "balloon", "ballpoint, ballpoint pen, ballpen, biro", "band aid", "banjo", "bannister, banister, balustrade, balusters, handrail", "barbell", "barber chair", "barbershop", "barn", "barometer", "barrel, cask", "barrow, garden cart, lawn cart, wheelbarrow", "baseball", "basketball", "bassinet", "bassoon", "bathing cap, swimming cap", "bath towel", "bathtub, bathing tub, bath, tub", "beach wagon, station wagon, wagon, estate car, beach waggon, station waggon, waggon", "beacon, lighthouse, beacon light, pharos", "beaker", "bearskin, busby, shako", "beer bottle", "beer glass", "bell cote, bell cot", "bib", "bicycle-built-for-two, tandem bicycle, tandem", "bikini, two-piece", "binder, ring-binder", "binoculars, field glasses, opera glasses", "birdhouse", "boathouse", "bobsled, bobsleigh, bob", "bolo tie, bolo, bola tie, bola", "bonnet, poke bonnet", "bookcase", "bookshop, bookstore, bookstall", "bottlecap", "bow", "bow tie, bow-tie, bowtie", "brass, memorial tablet, plaque", "brassiere, bra, bandeau", "breakwater, groin, groyne, mole, bulwark, seawall, jetty", "breastplate, aegis, egis", "broom", "bucket, pail", "buckle", "bulletproof vest", "bullet train, bullet", "butcher shop, meat market", "cab, hack, taxi, taxicab", "caldron, cauldron", "candle, taper, wax light", "cannon", "canoe", "can opener, tin opener", "cardigan", "car mirror", "carousel, carrousel, merry-go-round, roundabout, whirligig", "carpenter's kit, tool kit", "carton", "car wheel", "cash machine, cash dispenser, automated teller machine, automatic teller machine, automated teller, automatic teller, atm", "cassette", "cassette player", "castle", "catamaran", "cd player", "cello, violoncello", "cellular telephone, cellular phone, cellphone, cell, mobile phone", "chain", "chainlink fence", "chain mail, ring mail, mail, chain armor, chain armour, ring armor, ring armour", "chain saw, chainsaw", "chest", "chiffonier, commode", "chime, bell, gong", "china cabinet, china closet", "christmas stocking", "church, church building", "cinema, movie theater, movie theatre, movie house, picture palace", "cleaver, meat cleaver, chopper", "cliff dwelling", "cloak", "clog, geta, patten, sabot", "cocktail shaker", "coffee mug", "coffeepot", "coil, spiral, volute, whorl, helix", "combination lock", "computer keyboard, keypad", "confectionery, confectionary, candy store", "container ship, containership, container vessel", "convertible", "corkscrew, bottle screw", "cornet, horn, trumpet, trump", "cowboy boot", "cowboy hat, ten-gallon hat", "cradle", "crane", "crash helmet", "crate", "crib, cot", "crock pot", "croquet ball", "crutch", "cuirass", "dam, dike, dyke", "desk", "desktop computer", "dial telephone, dial phone", "diaper, nappy, napkin", "digital clock", "digital watch", "dining table, board", "dishrag, dishcloth", "dishwasher, dish washer, dishwashing machine", "disk brake, disc brake", "dock, dockage, docking facility", "dogsled, dog sled, dog sleigh", "dome", "doormat, welcome mat", "drilling platform, offshore rig", "drum, membranophone, tympan", "drumstick", "dumbbell", "dutch oven", "electric fan, blower", "electric guitar", "electric locomotive", "entertainment center", "envelope", "espresso maker", "face powder", "feather boa, boa", "file, file cabinet, filing cabinet", "fireboat", "fire engine, fire truck", "fire screen, fireguard", "flagpole, flagstaff", "flute, transverse flute", "folding chair", "football helmet", "forklift", "fountain", "fountain pen", "four-poster", "freight car", "french horn, horn", "frying pan, frypan, skillet", "fur coat", "garbage truck, dustcart", "gasmask, respirator, gas helmet", "gas pump, gasoline pump, petrol pump, island dispenser", "goblet", "go-kart", "golf ball", "golfcart, golf cart", "gondola", "gong, tam-tam", "gown", "grand piano, grand", "greenhouse, nursery, glasshouse", "grille, radiator grille", "grocery store, grocery, food market, market", "guillotine", "hair slide", "hair spray", "half track", "hammer", "hamper", "hand blower, blow dryer, blow drier, hair dryer, hair drier", "hand-held computer, hand-held microcomputer", "handkerchief, hankie, hanky, hankey", "hard disc, hard disk, fixed disk", "harmonica, mouth organ, harp, mouth harp", "harp", "harvester, reaper", "hatchet", "holster", "home theater, home theatre", "honeycomb", "hook, claw", "hoopskirt, crinoline", "horizontal bar, high bar", "horse cart, horse-cart", "hourglass", "ipod", "iron, smoothing iron", "jack-o'-lantern", "jean, blue jean, denim", "jeep, landrover", "jersey, t-shirt, tee shirt", "jigsaw puzzle", "jinrikisha, ricksha, rickshaw", "joystick", "kimono", "knee pad", "knot", "lab coat, laboratory coat", "ladle", "lampshade, lamp shade", "laptop, laptop computer", "lawn mower, mower", "lens cap, lens cover", "letter opener, paper knife, paperknife", "library", "lifeboat", "lighter, light, igniter, ignitor", "limousine, limo", "liner, ocean liner", "lipstick, lip rouge", "loafer", "lotion", "loudspeaker, speaker, speaker unit, loudspeaker system, speaker system", "loupe, jeweler's loupe", "lumbermill, sawmill", "magnetic compass", "mailbag, postbag", "mailbox, letter box", "maillot", "maillot, tank suit", "manhole cover", "maraca", "marimba, xylophone", "mask", "matchstick", "maypole", "maze, labyrinth", "measuring cup", "medicine chest, medicine cabinet", "megalith, megalithic structure", "microphone, mike", "microwave, microwave oven", "military uniform", "milk can", "minibus", "miniskirt, mini", "minivan", "missile", "mitten", "mixing bowl", "mobile home, manufactured home", "model t", "modem", "monastery", "monitor", "moped", "mortar", "mortarboard", "mosque", "mosquito net", "motor scooter, scooter", "mountain bike, all-terrain bike, off-roader", "mountain tent", "mouse, computer mouse", "mousetrap", "moving van", "muzzle", "nail", "neck brace", "necklace", "nipple", "notebook, notebook computer", "obelisk", "oboe, hautboy, hautbois", "ocarina, sweet potato", "odometer, hodometer, mileometer, milometer", "oil filter", "organ, pipe organ", "oscilloscope, scope, cathode-ray oscilloscope, cro", "overskirt", "oxcart", "oxygen mask", "packet", "paddle, boat paddle", "paddlewheel, paddle wheel", "padlock", "paintbrush", "pajama, pyjama, pj's, jammies", "palace", "panpipe, pandean pipe, syrinx", "paper towel", "parachute, chute", "parallel bars, bars", "park bench", "parking meter", "passenger car, coach, carriage", "patio, terrace", "pay-phone, pay-station", "pedestal, plinth, footstall", "pencil box, pencil case", "pencil sharpener", "perfume, essence", "petri dish", "photocopier", "pick, plectrum, plectron", "pickelhaube", "picket fence, paling", "pickup, pickup truck", "pier", "piggy bank, penny bank", "pill bottle", "pillow", "ping-pong ball", "pinwheel", "pirate, pirate ship", "pitcher, ewer", "plane, carpenter's plane, woodworking plane", "planetarium", "plastic bag", "plate rack", "plow, plough", "plunger, plumber's helper", "polaroid camera, polaroid land camera", "pole", "police van, police wagon, paddy wagon, patrol wagon, wagon, black maria", "poncho", "pool table, billiard table, snooker table", "pop bottle, soda bottle", "pot, flowerpot", "potter's wheel", "power drill", "prayer rug, prayer mat", "printer", "prison, prison house", "projectile, missile", "projector", "puck, hockey puck", "punching bag, punch bag, punching ball, punchball", "purse", "quill, quill pen", "quilt, comforter, comfort, puff", "racer, race car, racing car", "racket, racquet", "radiator", "radio, wireless", "radio telescope, radio reflector", "rain barrel", "recreational vehicle, rv, r.v.", "reel", "reflex camera", "refrigerator, icebox", "remote control, remote", "restaurant, eating house, eating place, eatery", "revolver, six-gun, six-shooter", "rifle", "rocking chair, rocker", "rotisserie", "rubber eraser, rubber, pencil eraser", "rugby ball", "rule, ruler", "running shoe", "safe", "safety pin", "saltshaker, salt shaker", "sandal", "sarong", "sax, saxophone", "scabbard", "scale, weighing machine", "school bus", "schooner", "scoreboard", "screen, crt screen", "screw", "screwdriver", "seat belt, seatbelt", "sewing machine", "shield, buckler", "shoe shop, shoe-shop, shoe store", "shoji", "shopping basket", "shopping cart", "shovel", "shower cap", "shower curtain", "ski", "ski mask", "sleeping bag", "slide rule, slipstick", "sliding door", "slot, one-armed bandit", "snorkel", "snowmobile", "snowplow, snowplough", "soap dispenser", "soccer ball", "sock", "solar dish, solar collector, solar furnace", "sombrero", "soup bowl", "space bar", "space heater", "space shuttle", "spatula", "speedboat", "spider web, spider's web", "spindle", "sports car, sport car", "spotlight, spot", "stage", "steam locomotive", "steel arch bridge", "steel drum", "stethoscope", "stole", "stone wall", "stopwatch, stop watch", "stove", "strainer", "streetcar, tram, tramcar, trolley, trolley car", "stretcher", "studio couch, day bed", "stupa, tope", "submarine, pigboat, sub, u-boat", "suit, suit of clothes", "sundial", "sunglass", "sunglasses, dark glasses, shades", "sunscreen, sunblock, sun blocker", "suspension bridge", "swab, swob, mop", "sweatshirt", "swimming trunks, bathing trunks", "swing", "switch, electric switch, electrical switch", "syringe", "table lamp", "tank, army tank, armored combat vehicle, armoured combat vehicle", "tape player", "teapot", "teddy, teddy bear", "television, television system", "tennis ball", "thatch, thatched roof", "theater curtain, theatre curtain", "thimble", "thresher, thrasher, threshing machine", "throne", "tile roof", "toaster", "tobacco shop, tobacconist shop, tobacconist", "toilet seat", "torch", "totem pole", "tow truck, tow car, wrecker", "toyshop", "tractor", "trailer truck, tractor trailer, trucking rig, rig, articulated lorry, semi", "tray", "trench coat", "tricycle, trike, velocipede", "trimaran", "tripod", "triumphal arch", "trolleybus, trolley coach, trackless trolley", "trombone", "tub, vat", "turnstile", "typewriter keyboard", "umbrella", "unicycle, monocycle", "upright, upright piano", "vacuum, vacuum cleaner", "vase", "vault", "velvet", "vending machine", "vestment", "viaduct", "violin, fiddle", "volleyball", "waffle iron", "wall clock", "wallet, billfold, notecase, pocketbook", "wardrobe, closet, press", "warplane, military plane", "washbasin, handbasin, washbowl, lavabo, wash-hand basin", "washer, automatic washer, washing machine", "water bottle", "water jug", "water tower", "whiskey jug", "whistle", "wig", "window screen", "window shade", "windsor tie", "wine bottle", "wing", "wok", "wooden spoon", "wool, woolen, woollen", "worm fence, snake fence, snake-rail fence, virginia fence", "wreck", "yawl", "yurt", "web site, website, internet site, site", "comic book", "crossword puzzle, crossword", "street sign", "traffic light, traffic signal, stoplight", "book jacket, dust cover, dust jacket, dust wrapper", "menu", "plate", "guacamole", "consomme", "hot pot, hotpot", "trifle", "ice cream, icecream", "ice lolly, lolly, lollipop, popsicle", "french loaf", "bagel, beigel", "pretzel", "cheeseburger", "hotdog, hot dog, red hot", "mashed potato", "head cabbage", "broccoli", "cauliflower", "zucchini, courgette", "spaghetti squash", "acorn squash", "butternut squash", "cucumber, cuke", "artichoke, globe artichoke", "bell pepper", "cardoon", "mushroom", "granny smith", "strawberry", "orange", "lemon", "fig", "pineapple, ananas", "banana", "jackfruit, jak, jack", "custard apple", "pomegranate", "hay", "carbonara", "chocolate sauce, chocolate syrup", "dough", "meat loaf, meatloaf", "pizza, pizza pie", "potpie", "burrito", "red wine", "espresso", "cup", "eggnog", "alp", "bubble", "cliff, drop, drop-off", "coral reef", "geyser", "lakeside, lakeshore", "promontory, headland, head, foreland", "sandbar, sand bar", "seashore, coast, seacoast, sea-coast", "valley, vale", "volcano", "ballplayer, baseball player", "groom, bridegroom", "scuba diver", "rapeseed", "daisy", "yellow lady's slipper, yellow lady-slipper, cypripedium calceolus, cypripedium parviflorum", "corn", "acorn", "hip, rose hip, rosehip", "buckeye, horse chestnut, conker", "coral fungus", "agaric", "gyromitra", "stinkhorn, carrion fungus", "earthstar", "hen-of-the-woods, hen of the woods, polyporus frondosus, grifola frondosa", "bolete", "ear, spike, capitulum", "toilet tissue, toilet paper, bathroom tissue" ]
lumenggan/katara-detector
# Model Card for Katara Detector This model identifies whether an image contains Katara from Avatar: The Last Airbender. It achieves 96% accuracy and 96.1% F1 score on the validation set. ## Model Details ### Model Description A binary image classifier that determines if Katara from the animated series "Avatar: The Last Airbender" is present in an image. - **Developed by:** Your Name/Organization - **Model type:** Image Classification - **License:** MIT - **Finetuned from model:** facebook/dinov2-small ## Uses ### Direct Use This model can be used to: - Identify Katara in screenshots or fan art - Filter or categorize ATLA-related image collections - Power fan applications that track character appearances ```python # Use a pipeline as a high-level helper from PIL import Image from transformers import pipeline pipe = pipeline("image-classification", model="lumenggan/katara-detector") image = Image.open("yourimage.png") pipe(image) ``` ### Out-of-Scope Use This model should not be used for: - Critical identification tasks - Monitoring or surveillance purposes - Making judgments about real people ## Training Details ### Training Data The model was trained on a custom dataset of Katara images and non-Katara images from Avatar: The Last Airbender. The dataset was split 80/20 for training and validation. ### Training Procedure The model was fine-tuned from DINOv2-small using the following techniques: - Dropout regularization (rate=0.3) - Weight decay (0.01-0.05) - Cosine learning rate schedule with restarts #### Training Hyperparameters - **Learning rate:** 2e-5 - **Weight decay:** 0.01-0.05 - **Epochs:** 5-15 - **Batch size:** 16 (effective 32 with gradient accumulation) - **Training regime:** fp16 mixed precision ## Evaluation ### Metrics - **Accuracy:** 96.0% - **F1 Score:** 96.1% - **Precision:** 96.8% - **Recall:** 95.5% - **ROC AUC:** 99.4% ## Technical Specifications ### Model Architecture - Base model: facebook/dinov2-with-registers-small - Custom classification head with dropout - Input size: 224x224 RGB images ### Compute Infrastructure - GPU: (e.g., NVIDIA T4, A100, etc.) - Training time: Approximately 1-2 hours ## Model Card Contact https://github.com/unLomTrois/
[ "katara", "non_katara" ]
Master-Rapha7/mobilenetv2-typecoffee-2
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # mobilenetv2-typecoffee-2 This model is a fine-tuned version of [google/mobilenet_v2_1.0_224](https://huggingface.co/google/mobilenet_v2_1.0_224) on the Master-Rapha7/TypeCoffee_128x128 dataset. It achieves the following results on the evaluation set: - Loss: 0.3384 - Accuracy: 0.8939 - Precision: 0.8983 - Recall: 0.8949 - F1: 0.8958 - Precision Durariadorio 128x128: 0.9545 - Recall Durariadorio 128x128: 0.875 - F1 Durariadorio 128x128: 0.9130 - Precision Mole 128x128: 0.9858 - Recall Mole 128x128: 0.9653 - F1 Mole 128x128: 0.9754 - Precision Quebrado 128x128: 0.8063 - Recall Quebrado 128x128: 0.8958 - F1 Quebrado 128x128: 0.8487 - Precision Riadorio 128x128: 0.8158 - Recall Riadorio 128x128: 0.8158 - F1 Riadorio 128x128: 0.8158 - Precision Riofechado 128x128: 0.9291 - Recall Riofechado 128x128: 0.9225 - F1 Riofechado 128x128: 0.9258 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - num_epochs: 100.0 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | Precision | Recall | F1 | Precision Durariadorio 128x128 | Recall Durariadorio 128x128 | F1 Durariadorio 128x128 | Precision Mole 128x128 | Recall Mole 128x128 | F1 Mole 128x128 | Precision Quebrado 128x128 | Recall Quebrado 128x128 | F1 Quebrado 128x128 | Precision Riadorio 128x128 | Recall Riadorio 128x128 | F1 Riadorio 128x128 | Precision Riofechado 128x128 | Recall Riofechado 128x128 | F1 Riofechado 128x128 | |:-------------:|:-----:|:-----:|:---------------:|:--------:|:---------:|:------:|:------:|:------------------------------:|:---------------------------:|:-----------------------:|:----------------------:|:-------------------:|:---------------:|:--------------------------:|:-----------------------:|:-------------------:|:--------------------------:|:-----------------------:|:-------------------:|:----------------------------:|:-------------------------:|:---------------------:| | 1.0709 | 1.0 | 364 | 1.2074 | 0.5179 | 0.5840 | 0.5157 | 0.4791 | 0.75 | 0.25 | 0.375 | 0.6444 | 0.2014 | 0.3069 | 0.5543 | 0.9931 | 0.7114 | 0.3970 | 0.6974 | 0.5060 | 0.5741 | 0.4366 | 0.496 | | 0.7944 | 2.0 | 728 | 0.9347 | 0.6364 | 0.6970 | 0.6353 | 0.6462 | 0.7333 | 0.4583 | 0.5641 | 0.7431 | 0.5625 | 0.6403 | 0.8718 | 0.7083 | 0.7816 | 0.4191 | 0.75 | 0.5377 | 0.7174 | 0.6972 | 0.7071 | | 0.8081 | 3.0 | 1092 | 1.7043 | 0.3747 | 0.6537 | 0.3782 | 0.3347 | 0.5405 | 0.4167 | 0.4706 | 0.8 | 0.0833 | 0.1509 | 0.9630 | 0.1806 | 0.3041 | 0.6957 | 0.2105 | 0.3232 | 0.2694 | 1.0 | 0.4245 | | 0.8024 | 4.0 | 1456 | 1.5073 | 0.5014 | 0.6260 | 0.5063 | 0.4472 | 0.7778 | 0.1944 | 0.3111 | 0.6373 | 0.4514 | 0.5285 | 0.4613 | 0.9931 | 0.6300 | 0.8261 | 0.125 | 0.2171 | 0.4275 | 0.7676 | 0.5491 | | 0.9541 | 5.0 | 1820 | 2.2240 | 0.4504 | 0.5819 | 0.4514 | 0.4270 | 0.3275 | 0.7847 | 0.4622 | 0.4975 | 0.6875 | 0.5773 | 0.9286 | 0.1806 | 0.3023 | 0.6364 | 0.3224 | 0.4279 | 0.5195 | 0.2817 | 0.3653 | | 0.5358 | 6.0 | 2184 | 0.7352 | 0.7493 | 0.7677 | 0.7499 | 0.7374 | 0.8615 | 0.3889 | 0.5359 | 0.7517 | 0.7569 | 0.7543 | 0.8442 | 0.9028 | 0.8725 | 0.6096 | 0.75 | 0.6726 | 0.7714 | 0.9507 | 0.8517 | | 0.4848 | 7.0 | 2548 | 1.4124 | 0.5978 | 0.7078 | 0.5949 | 0.5972 | 0.8108 | 0.4167 | 0.5505 | 0.5612 | 0.7639 | 0.6471 | 0.8165 | 0.6181 | 0.7036 | 0.4207 | 0.8026 | 0.5520 | 0.9298 | 0.3732 | 0.5327 | | 0.6271 | 8.0 | 2912 | 1.1025 | 0.6433 | 0.7142 | 0.6495 | 0.6022 | 0.8143 | 0.3958 | 0.5327 | 0.6078 | 0.8611 | 0.7126 | 0.7421 | 0.8194 | 0.7789 | 0.8667 | 0.1711 | 0.2857 | 0.5399 | 1.0 | 0.7012 | | 0.4748 | 9.0 | 3276 | 1.0572 | 0.6694 | 0.7185 | 0.6700 | 0.6822 | 0.8319 | 0.6528 | 0.7315 | 0.6493 | 0.6042 | 0.6259 | 0.7987 | 0.8819 | 0.8383 | 0.4062 | 0.5987 | 0.4840 | 0.9062 | 0.6127 | 0.7311 | | 0.4721 | 10.0 | 3640 | 2.0767 | 0.5854 | 0.7156 | 0.5921 | 0.5216 | 0.5939 | 0.8125 | 0.6862 | 0.5519 | 0.9236 | 0.6909 | 0.9123 | 0.3611 | 0.5174 | 1.0 | 0.0395 | 0.0759 | 0.52 | 0.8239 | 0.6376 | | 0.3891 | 11.0 | 4004 | 0.8680 | 0.7493 | 0.8243 | 0.7488 | 0.7505 | 0.8083 | 0.6736 | 0.7348 | 1.0 | 0.4028 | 0.5743 | 0.8693 | 0.9236 | 0.8956 | 0.4885 | 0.8355 | 0.6165 | 0.9556 | 0.9085 | 0.9314 | | 0.4385 | 12.0 | 4368 | 0.5012 | 0.8320 | 0.8392 | 0.8343 | 0.8301 | 0.7821 | 0.8472 | 0.8133 | 0.9302 | 0.8333 | 0.8791 | 0.7692 | 0.9722 | 0.8589 | 0.8571 | 0.6316 | 0.7273 | 0.8571 | 0.8873 | 0.8720 | | 0.5092 | 13.0 | 4732 | 0.8346 | 0.7782 | 0.8347 | 0.7790 | 0.7706 | 0.9667 | 0.4028 | 0.5686 | 0.9160 | 0.8333 | 0.8727 | 0.8658 | 0.8958 | 0.8805 | 0.8593 | 0.7632 | 0.8084 | 0.5657 | 1.0 | 0.7226 | | 0.3634 | 14.0 | 5096 | 1.3165 | 0.6446 | 0.7551 | 0.6448 | 0.6445 | 0.7348 | 0.6736 | 0.7029 | 0.4410 | 0.9861 | 0.6094 | 0.9815 | 0.3681 | 0.5354 | 0.7881 | 0.6118 | 0.6889 | 0.83 | 0.5845 | 0.6860 | | 0.4783 | 15.0 | 5460 | 1.6690 | 0.5565 | 0.7117 | 0.5523 | 0.5504 | 0.7286 | 0.3542 | 0.4766 | 0.6891 | 0.5694 | 0.6236 | 0.7868 | 0.7431 | 0.7643 | 0.3542 | 0.8553 | 0.5010 | 1.0 | 0.2394 | 0.3864 | | 0.3286 | 16.0 | 5824 | 0.8830 | 0.7369 | 0.7873 | 0.7387 | 0.7307 | 0.8258 | 0.7569 | 0.7899 | 0.825 | 0.9167 | 0.8684 | 0.5517 | 1.0 | 0.7111 | 0.8681 | 0.5197 | 0.6502 | 0.8659 | 0.5 | 0.6339 | | 0.441 | 17.0 | 6188 | 1.1930 | 0.6694 | 0.7584 | 0.6686 | 0.6672 | 0.9344 | 0.3958 | 0.5561 | 0.9570 | 0.6181 | 0.7511 | 0.5187 | 0.9653 | 0.6748 | 0.5781 | 0.7303 | 0.6453 | 0.8036 | 0.6338 | 0.7087 | | 0.4224 | 18.0 | 6552 | 0.5639 | 0.8278 | 0.8410 | 0.8293 | 0.8258 | 0.9320 | 0.6667 | 0.7773 | 0.8274 | 0.9653 | 0.8910 | 0.7553 | 0.9861 | 0.8554 | 0.7465 | 0.6974 | 0.7211 | 0.944 | 0.8310 | 0.8839 | | 0.3715 | 19.0 | 6916 | 0.9846 | 0.7025 | 0.7610 | 0.7047 | 0.7044 | 0.8632 | 0.5694 | 0.6862 | 0.9307 | 0.6528 | 0.7673 | 0.5182 | 0.9861 | 0.6794 | 0.7207 | 0.5263 | 0.6084 | 0.7724 | 0.7887 | 0.7805 | | 0.4228 | 20.0 | 7280 | 0.9673 | 0.7452 | 0.7956 | 0.7449 | 0.7434 | 0.9125 | 0.5069 | 0.6518 | 0.9583 | 0.6389 | 0.7667 | 0.8382 | 0.7917 | 0.8143 | 0.5841 | 0.8224 | 0.6831 | 0.685 | 0.9648 | 0.8012 | | 0.3596 | 21.0 | 7644 | 0.7574 | 0.7796 | 0.8143 | 0.7803 | 0.7759 | 0.8585 | 0.6319 | 0.728 | 0.9884 | 0.5903 | 0.7391 | 0.8221 | 0.9306 | 0.8730 | 0.7647 | 0.7697 | 0.7672 | 0.6376 | 0.9789 | 0.7722 | | 0.3422 | 22.0 | 8008 | 2.4545 | 0.4931 | 0.7341 | 0.4988 | 0.4481 | 0.4324 | 0.7778 | 0.5558 | 0.975 | 0.2708 | 0.4239 | 0.9259 | 0.3472 | 0.5051 | 0.9412 | 0.1053 | 0.1893 | 0.3961 | 0.9930 | 0.5663 | | 0.4047 | 23.0 | 8372 | 0.8565 | 0.7645 | 0.8034 | 0.7642 | 0.7735 | 0.6859 | 0.7431 | 0.7133 | 0.9118 | 0.8611 | 0.8857 | 0.8615 | 0.7778 | 0.8175 | 0.5577 | 0.7632 | 0.6444 | 1.0 | 0.6761 | 0.8067 | | 0.3281 | 24.0 | 8736 | 1.9366 | 0.5923 | 0.7187 | 0.5887 | 0.5898 | 0.7324 | 0.3611 | 0.4837 | 0.7226 | 0.6875 | 0.7046 | 0.7794 | 0.7361 | 0.7571 | 0.3810 | 0.8421 | 0.5246 | 0.9783 | 0.3169 | 0.4787 | | 0.4524 | 25.0 | 9100 | 1.9752 | 0.6460 | 0.7253 | 0.6464 | 0.6129 | 1.0 | 0.1667 | 0.2857 | 0.775 | 0.6458 | 0.7045 | 0.6667 | 0.8472 | 0.7462 | 0.6623 | 0.6711 | 0.6667 | 0.5224 | 0.9014 | 0.6615 | | 0.2905 | 26.0 | 9464 | 0.9349 | 0.7645 | 0.7864 | 0.7653 | 0.7682 | 0.7872 | 0.7708 | 0.7789 | 0.8738 | 0.625 | 0.7287 | 0.8571 | 0.75 | 0.8 | 0.5657 | 0.7368 | 0.64 | 0.8481 | 0.9437 | 0.8933 | | 0.3872 | 27.0 | 9828 | 1.3411 | 0.6915 | 0.7712 | 0.6931 | 0.6904 | 0.6748 | 0.7639 | 0.7166 | 0.9538 | 0.4306 | 0.5933 | 0.9065 | 0.6736 | 0.7729 | 0.8174 | 0.6184 | 0.7041 | 0.5036 | 0.9789 | 0.6651 | | 0.2387 | 28.0 | 10192 | 1.0073 | 0.7672 | 0.8063 | 0.7670 | 0.7638 | 0.9167 | 0.6111 | 0.7333 | 0.8609 | 0.9028 | 0.8814 | 0.6372 | 1.0 | 0.7784 | 0.6848 | 0.7434 | 0.7129 | 0.9318 | 0.5775 | 0.7130 | | 0.2321 | 29.0 | 10556 | 0.6733 | 0.8085 | 0.8290 | 0.8107 | 0.8001 | 0.9615 | 0.5208 | 0.6757 | 0.6961 | 0.9861 | 0.8161 | 0.8526 | 0.9236 | 0.8867 | 0.7615 | 0.6513 | 0.7021 | 0.8734 | 0.9718 | 0.92 | | 0.2687 | 30.0 | 10920 | 1.1098 | 0.7603 | 0.8017 | 0.7601 | 0.7616 | 0.8198 | 0.6319 | 0.7137 | 0.9765 | 0.5764 | 0.7249 | 0.7037 | 0.9236 | 0.7988 | 0.5913 | 0.8092 | 0.6833 | 0.9173 | 0.8592 | 0.8873 | | 0.2047 | 31.0 | 11284 | 0.5737 | 0.8333 | 0.8484 | 0.8353 | 0.8309 | 0.7382 | 0.9792 | 0.8418 | 0.9545 | 0.875 | 0.9130 | 0.8509 | 0.6736 | 0.7519 | 0.9211 | 0.6908 | 0.7895 | 0.7771 | 0.9577 | 0.8580 | | 0.3821 | 32.0 | 11648 | 0.6089 | 0.7741 | 0.8123 | 0.7745 | 0.7798 | 0.9438 | 0.5833 | 0.7210 | 0.7578 | 0.8472 | 0.8 | 0.8862 | 0.7569 | 0.8165 | 0.5519 | 0.7697 | 0.6429 | 0.9220 | 0.9155 | 0.9187 | | 0.3214 | 33.0 | 12012 | 1.1225 | 0.7879 | 0.8225 | 0.7884 | 0.7823 | 0.8433 | 0.7847 | 0.8129 | 1.0 | 0.4722 | 0.6415 | 0.8118 | 0.9583 | 0.8790 | 0.6041 | 0.7829 | 0.6819 | 0.8535 | 0.9437 | 0.8963 | | 0.3972 | 34.0 | 12376 | 0.4285 | 0.8815 | 0.8951 | 0.8855 | 0.8747 | 0.7877 | 0.9792 | 0.8731 | 0.8812 | 0.9792 | 0.9276 | 0.8494 | 0.9792 | 0.9097 | 0.9647 | 0.5395 | 0.6920 | 0.9926 | 0.9507 | 0.9712 | | 0.1635 | 35.0 | 12740 | 0.9206 | 0.7686 | 0.8260 | 0.7678 | 0.7673 | 0.975 | 0.5417 | 0.6964 | 0.9556 | 0.8958 | 0.9247 | 0.9011 | 0.5694 | 0.6979 | 0.5726 | 0.8816 | 0.6943 | 0.7258 | 0.9507 | 0.8232 | | 0.2116 | 36.0 | 13104 | 2.1126 | 0.5868 | 0.6752 | 0.5881 | 0.5671 | 0.8667 | 0.4514 | 0.5936 | 0.5756 | 0.8194 | 0.6762 | 0.8333 | 0.2431 | 0.3763 | 0.6457 | 0.5395 | 0.5878 | 0.4549 | 0.8873 | 0.6014 | | 0.2941 | 37.0 | 13468 | 0.8622 | 0.7741 | 0.8090 | 0.7754 | 0.7795 | 0.9145 | 0.7431 | 0.8199 | 0.5939 | 0.9444 | 0.7292 | 0.8870 | 0.7083 | 0.7876 | 0.7214 | 0.6645 | 0.6918 | 0.928 | 0.8169 | 0.8689 | | 0.1385 | 38.0 | 13832 | 1.1555 | 0.7837 | 0.8127 | 0.7849 | 0.7840 | 0.9495 | 0.6528 | 0.7737 | 0.9167 | 0.6875 | 0.7857 | 0.8435 | 0.8611 | 0.8522 | 0.6727 | 0.7303 | 0.7003 | 0.6812 | 0.9930 | 0.8080 | | 0.1781 | 39.0 | 14196 | 0.5697 | 0.8182 | 0.8548 | 0.8182 | 0.8208 | 0.9524 | 0.5556 | 0.7018 | 0.9084 | 0.8264 | 0.8655 | 0.8639 | 0.8819 | 0.8729 | 0.5909 | 0.8553 | 0.6989 | 0.9583 | 0.9718 | 0.9650 | | 0.2708 | 40.0 | 14560 | 0.3549 | 0.8871 | 0.8911 | 0.8885 | 0.8879 | 0.8397 | 0.9097 | 0.8733 | 0.9478 | 0.8819 | 0.9137 | 0.8424 | 0.9653 | 0.8997 | 0.8406 | 0.7632 | 0.8 | 0.9850 | 0.9225 | 0.9527 | | 0.2474 | 41.0 | 14924 | 1.4169 | 0.6997 | 0.7712 | 0.6990 | 0.7026 | 0.8723 | 0.5694 | 0.6891 | 0.9451 | 0.5972 | 0.7319 | 0.8842 | 0.5833 | 0.7029 | 0.5274 | 0.8224 | 0.6427 | 0.6268 | 0.9225 | 0.7464 | | 0.3544 | 42.0 | 15288 | 1.5741 | 0.7094 | 0.7636 | 0.7140 | 0.6947 | 0.7890 | 0.5972 | 0.6798 | 0.8803 | 0.7153 | 0.7893 | 0.6278 | 0.9722 | 0.7629 | 0.9138 | 0.3487 | 0.5048 | 0.6073 | 0.9366 | 0.7368 | | 0.3016 | 43.0 | 15652 | 3.1537 | 0.4931 | 0.7287 | 0.4873 | 0.4791 | 0.6304 | 0.2014 | 0.3053 | 0.7710 | 0.7014 | 0.7345 | 0.9298 | 0.3681 | 0.5274 | 0.3124 | 0.9474 | 0.4698 | 1.0 | 0.2183 | 0.3584 | | 0.2507 | 44.0 | 16016 | 0.7001 | 0.8072 | 0.8686 | 0.8066 | 0.8156 | 1.0 | 0.5486 | 0.7085 | 0.9338 | 0.8819 | 0.9071 | 0.8824 | 0.8333 | 0.8571 | 0.5425 | 0.8816 | 0.6717 | 0.9844 | 0.8873 | 0.9333 | | 0.3607 | 45.0 | 16380 | 0.9100 | 0.8017 | 0.8393 | 0.8043 | 0.7983 | 0.9540 | 0.5764 | 0.7186 | 0.6111 | 0.9931 | 0.7566 | 0.8701 | 0.9306 | 0.8993 | 0.8571 | 0.5921 | 0.7004 | 0.9041 | 0.9296 | 0.9167 | | 0.2023 | 46.0 | 16744 | 1.1856 | 0.7121 | 0.7901 | 0.7086 | 0.6832 | 0.8101 | 0.8889 | 0.8477 | 0.9924 | 0.9028 | 0.9455 | 0.6062 | 0.6736 | 0.6382 | 0.5418 | 0.8947 | 0.6749 | 1.0 | 0.1831 | 0.3095 | | 0.2303 | 47.0 | 17108 | 0.5505 | 0.8388 | 0.8703 | 0.8387 | 0.8466 | 0.9573 | 0.7778 | 0.8582 | 0.9328 | 0.8681 | 0.8993 | 0.8852 | 0.75 | 0.8120 | 0.6129 | 0.875 | 0.7209 | 0.9632 | 0.9225 | 0.9424 | | 0.2238 | 48.0 | 17472 | 1.6496 | 0.5950 | 0.7650 | 0.5905 | 0.6007 | 0.7207 | 0.5556 | 0.6275 | 0.8889 | 0.6111 | 0.7243 | 0.8431 | 0.5972 | 0.6992 | 0.3723 | 0.9211 | 0.5303 | 1.0 | 0.2676 | 0.4222 | | 0.2632 | 49.0 | 17836 | 1.6022 | 0.7507 | 0.8091 | 0.7501 | 0.7150 | 1.0 | 0.1806 | 0.3059 | 0.8072 | 0.9306 | 0.8645 | 0.7667 | 0.9583 | 0.8519 | 0.5753 | 0.8289 | 0.6792 | 0.8963 | 0.8521 | 0.8736 | | 0.1897 | 50.0 | 18200 | 1.0318 | 0.7824 | 0.8140 | 0.7825 | 0.7778 | 0.9375 | 0.5208 | 0.6696 | 0.7416 | 0.9167 | 0.8199 | 0.75 | 0.9792 | 0.8494 | 0.6686 | 0.7566 | 0.7099 | 0.9722 | 0.7394 | 0.84 | | 0.3303 | 51.0 | 18564 | 0.7386 | 0.7865 | 0.8104 | 0.7865 | 0.7834 | 0.8582 | 0.8403 | 0.8491 | 0.8046 | 0.9722 | 0.8805 | 0.7625 | 0.8472 | 0.8026 | 0.6395 | 0.7237 | 0.6790 | 0.9873 | 0.5493 | 0.7059 | | 0.3398 | 52.0 | 18928 | 0.5642 | 0.8154 | 0.8351 | 0.8157 | 0.8154 | 0.8881 | 0.8264 | 0.8561 | 0.9580 | 0.7917 | 0.8669 | 0.8785 | 0.6528 | 0.7490 | 0.7456 | 0.8289 | 0.7850 | 0.7056 | 0.9789 | 0.8201 | | 0.3094 | 53.0 | 19292 | 0.9977 | 0.7562 | 0.8091 | 0.7556 | 0.7492 | 0.7530 | 0.8681 | 0.8065 | 1.0 | 0.4097 | 0.5813 | 0.8644 | 0.7083 | 0.7786 | 0.5702 | 0.8553 | 0.6842 | 0.8581 | 0.9366 | 0.8956 | | 0.3199 | 54.0 | 19656 | 0.3904 | 0.8595 | 0.8734 | 0.8600 | 0.8621 | 0.9433 | 0.9236 | 0.9333 | 1.0 | 0.8264 | 0.9049 | 0.8537 | 0.7292 | 0.7865 | 0.6957 | 0.8421 | 0.7619 | 0.8742 | 0.9789 | 0.9236 | | 0.539 | 55.0 | 20020 | 0.3384 | 0.8939 | 0.8983 | 0.8949 | 0.8958 | 0.9545 | 0.875 | 0.9130 | 0.9858 | 0.9653 | 0.9754 | 0.8063 | 0.8958 | 0.8487 | 0.8158 | 0.8158 | 0.8158 | 0.9291 | 0.9225 | 0.9258 | | 0.1798 | 56.0 | 20384 | 0.5997 | 0.8017 | 0.8244 | 0.8024 | 0.8031 | 0.9077 | 0.8194 | 0.8613 | 0.9706 | 0.6875 | 0.8049 | 0.8359 | 0.7431 | 0.7868 | 0.68 | 0.7829 | 0.7278 | 0.7277 | 0.9789 | 0.8348 | | 0.2783 | 57.0 | 20748 | 0.7346 | 0.8567 | 0.8622 | 0.8609 | 0.8497 | 0.7624 | 0.9583 | 0.8492 | 0.9184 | 0.9375 | 0.9278 | 0.8313 | 0.9236 | 0.875 | 0.8667 | 0.5132 | 0.6446 | 0.9324 | 0.9718 | 0.9517 | | 0.2132 | 58.0 | 21112 | 1.3325 | 0.7645 | 0.8246 | 0.7652 | 0.7498 | 0.9717 | 0.7153 | 0.824 | 0.715 | 0.9931 | 0.8314 | 0.9636 | 0.3681 | 0.5327 | 0.8647 | 0.7566 | 0.8070 | 0.6078 | 0.9930 | 0.7540 | | 0.1655 | 59.0 | 21476 | 2.4867 | 0.7011 | 0.7585 | 0.7015 | 0.6968 | 0.7857 | 0.6875 | 0.7333 | 0.9833 | 0.4097 | 0.5784 | 0.8527 | 0.7639 | 0.8059 | 0.5556 | 0.7237 | 0.6286 | 0.6150 | 0.9225 | 0.7380 | | 0.3627 | 60.0 | 21840 | 1.6926 | 0.7011 | 0.7439 | 0.6996 | 0.6998 | 0.6892 | 0.7083 | 0.6986 | 0.7188 | 0.7986 | 0.7566 | 0.8485 | 0.7778 | 0.8116 | 0.5367 | 0.7697 | 0.6324 | 0.9265 | 0.4437 | 0.6 | | 0.2631 | 61.0 | 22204 | 1.3416 | 0.7094 | 0.7883 | 0.7087 | 0.7048 | 0.925 | 0.5139 | 0.6607 | 0.9855 | 0.4722 | 0.6385 | 0.8254 | 0.7222 | 0.7704 | 0.5246 | 0.8421 | 0.6465 | 0.6812 | 0.9930 | 0.8080 | | 0.2012 | 62.0 | 22568 | 0.6623 | 0.8457 | 0.8531 | 0.8475 | 0.8449 | 0.7797 | 0.9583 | 0.8598 | 0.9658 | 0.7847 | 0.8659 | 0.8540 | 0.8125 | 0.8327 | 0.8258 | 0.7171 | 0.7676 | 0.8405 | 0.9648 | 0.8984 | | 0.2564 | 63.0 | 22932 | 1.3535 | 0.7204 | 0.7862 | 0.7195 | 0.7135 | 0.8529 | 0.8056 | 0.8286 | 0.9841 | 0.4306 | 0.5990 | 0.8602 | 0.5556 | 0.6751 | 0.5485 | 0.8553 | 0.6684 | 0.6853 | 0.9507 | 0.7965 | | 0.1741 | 64.0 | 23296 | 0.5313 | 0.8485 | 0.8566 | 0.8511 | 0.8473 | 0.8980 | 0.9167 | 0.9072 | 0.8393 | 0.9792 | 0.9038 | 0.7371 | 0.8958 | 0.8088 | 0.8087 | 0.6118 | 0.6966 | 1.0 | 0.8521 | 0.9202 | | 0.3129 | 65.0 | 23660 | 1.4864 | 0.6488 | 0.7434 | 0.6484 | 0.6380 | 0.9 | 0.25 | 0.3913 | 0.5612 | 0.9236 | 0.6982 | 0.8058 | 0.7778 | 0.7915 | 0.4615 | 0.6711 | 0.5469 | 0.9888 | 0.6197 | 0.7619 | | 0.2494 | 66.0 | 24024 | 1.5495 | 0.7287 | 0.8151 | 0.7278 | 0.7267 | 0.9143 | 0.4444 | 0.5981 | 1.0 | 0.4931 | 0.6605 | 0.8333 | 0.9028 | 0.8667 | 0.4797 | 0.8553 | 0.6147 | 0.8481 | 0.9437 | 0.8933 | | 0.2422 | 67.0 | 24388 | 1.2390 | 0.7686 | 0.7879 | 0.7718 | 0.7642 | 0.8129 | 0.7847 | 0.7986 | 0.8235 | 0.6806 | 0.7452 | 0.6355 | 0.8958 | 0.7435 | 0.8710 | 0.5329 | 0.6612 | 0.7965 | 0.9648 | 0.8726 | | 0.1332 | 68.0 | 24752 | 1.7990 | 0.7410 | 0.8158 | 0.7404 | 0.7342 | 1.0 | 0.3333 | 0.5 | 0.8235 | 0.7778 | 0.8 | 0.7829 | 0.9514 | 0.8589 | 0.5061 | 0.8224 | 0.6266 | 0.9667 | 0.8169 | 0.8855 | | 0.2093 | 69.0 | 25116 | 0.5063 | 0.8540 | 0.8663 | 0.8569 | 0.8529 | 0.7222 | 0.9931 | 0.8363 | 0.9444 | 0.9444 | 0.9444 | 0.8456 | 0.875 | 0.8601 | 0.8273 | 0.5987 | 0.6947 | 0.992 | 0.8732 | 0.9288 | | 0.3587 | 70.0 | 25480 | 0.9657 | 0.7879 | 0.8288 | 0.7878 | 0.7808 | 1.0 | 0.4583 | 0.6286 | 0.8836 | 0.8958 | 0.8897 | 0.8618 | 0.7361 | 0.7940 | 0.6143 | 0.8487 | 0.7127 | 0.7845 | 1.0 | 0.8793 | | 0.2032 | 71.0 | 25844 | 0.9718 | 0.7590 | 0.8034 | 0.7588 | 0.7572 | 0.9091 | 0.625 | 0.7407 | 0.9884 | 0.5903 | 0.7391 | 0.8 | 0.75 | 0.7742 | 0.63 | 0.8289 | 0.7159 | 0.6893 | 1.0 | 0.8161 | | 0.2602 | 72.0 | 26208 | 1.3336 | 0.7052 | 0.7281 | 0.7065 | 0.7031 | 0.8242 | 0.5208 | 0.6383 | 0.5919 | 0.9167 | 0.7193 | 0.7721 | 0.7292 | 0.75 | 0.6525 | 0.6053 | 0.6280 | 0.8 | 0.7606 | 0.7798 | | 0.2728 | 73.0 | 26572 | 0.7606 | 0.8058 | 0.8219 | 0.8061 | 0.8084 | 0.8248 | 0.7847 | 0.8043 | 0.9434 | 0.6944 | 0.8 | 0.7945 | 0.8056 | 0.8 | 0.6543 | 0.8092 | 0.7235 | 0.8926 | 0.9366 | 0.9141 | | 0.1652 | 74.0 | 26936 | 0.8673 | 0.7906 | 0.8111 | 0.7909 | 0.7869 | 0.9224 | 0.7431 | 0.8231 | 0.8926 | 0.9236 | 0.9078 | 0.8556 | 0.5347 | 0.6581 | 0.6816 | 0.8026 | 0.7372 | 0.7031 | 0.9507 | 0.8084 | | 0.2047 | 75.0 | 27300 | 1.5109 | 0.7218 | 0.7986 | 0.7243 | 0.7181 | 1.0 | 0.4653 | 0.6351 | 0.9912 | 0.7847 | 0.8760 | 0.5743 | 0.9931 | 0.7277 | 0.8163 | 0.5263 | 0.64 | 0.6111 | 0.8521 | 0.7118 | | 0.1685 | 76.0 | 27664 | 0.9601 | 0.8030 | 0.8222 | 0.8043 | 0.8044 | 0.8846 | 0.7986 | 0.8394 | 0.9596 | 0.6597 | 0.7819 | 0.6757 | 0.8681 | 0.7599 | 0.7070 | 0.7303 | 0.7184 | 0.8839 | 0.9648 | 0.9226 | | 0.2161 | 77.0 | 28028 | 0.8242 | 0.7686 | 0.8093 | 0.7676 | 0.7768 | 0.8195 | 0.7569 | 0.7870 | 0.9380 | 0.8403 | 0.8864 | 0.8165 | 0.6181 | 0.7036 | 0.5485 | 0.8553 | 0.6684 | 0.9237 | 0.7676 | 0.8385 | | 0.1214 | 78.0 | 28392 | 1.5758 | 0.7066 | 0.7803 | 0.7059 | 0.7097 | 0.7946 | 0.6181 | 0.6953 | 1.0 | 0.4792 | 0.6479 | 0.5917 | 0.8958 | 0.7127 | 0.5509 | 0.7829 | 0.6467 | 0.9640 | 0.7535 | 0.8458 | | 0.1458 | 79.0 | 28756 | 1.7201 | 0.6667 | 0.8239 | 0.6641 | 0.6700 | 0.9756 | 0.2778 | 0.4324 | 0.9615 | 0.6944 | 0.8065 | 0.7857 | 0.9167 | 0.8462 | 0.3964 | 0.8684 | 0.5443 | 1.0 | 0.5634 | 0.7207 | | 0.3304 | 80.0 | 29120 | 1.1946 | 0.7824 | 0.8309 | 0.7823 | 0.7837 | 0.9524 | 0.5556 | 0.7018 | 0.9798 | 0.6736 | 0.7984 | 0.8378 | 0.8611 | 0.8493 | 0.5714 | 0.8421 | 0.6809 | 0.8129 | 0.9789 | 0.8882 | | 0.1858 | 81.0 | 29484 | 2.7812 | 0.6460 | 0.7563 | 0.6460 | 0.6443 | 0.9062 | 0.4028 | 0.5577 | 0.9205 | 0.5625 | 0.6983 | 0.9070 | 0.5417 | 0.6783 | 0.5635 | 0.7303 | 0.6361 | 0.4845 | 0.9930 | 0.6513 | | 0.1235 | 82.0 | 29848 | 1.0269 | 0.7865 | 0.8142 | 0.7862 | 0.7930 | 0.8571 | 0.7083 | 0.7757 | 0.8333 | 0.8681 | 0.8503 | 0.8346 | 0.7361 | 0.7823 | 0.5877 | 0.8158 | 0.6832 | 0.9580 | 0.8028 | 0.8736 | | 0.134 | 83.0 | 30212 | 1.6236 | 0.6818 | 0.8405 | 0.6797 | 0.7026 | 0.975 | 0.5417 | 0.6964 | 1.0 | 0.4514 | 0.6220 | 0.8692 | 0.6458 | 0.7410 | 0.3983 | 0.9145 | 0.5549 | 0.96 | 0.8451 | 0.8989 | | 0.2552 | 84.0 | 30576 | 1.6298 | 0.7548 | 0.7811 | 0.7574 | 0.7412 | 0.9841 | 0.4306 | 0.5990 | 0.6875 | 0.8403 | 0.7562 | 0.8354 | 0.9514 | 0.8896 | 0.6617 | 0.5789 | 0.6175 | 0.7368 | 0.9859 | 0.8434 | | 0.1101 | 85.0 | 30940 | 1.9185 | 0.6956 | 0.8079 | 0.6932 | 0.7031 | 0.9375 | 0.4167 | 0.5769 | 0.8519 | 0.7986 | 0.8244 | 0.8414 | 0.8472 | 0.8443 | 0.4342 | 0.8684 | 0.5789 | 0.9744 | 0.5352 | 0.6909 | | 0.1947 | 86.0 | 31304 | 1.8975 | 0.7355 | 0.7956 | 0.7393 | 0.7341 | 0.8607 | 0.7292 | 0.7895 | 0.8151 | 0.8264 | 0.8207 | 0.8033 | 0.6806 | 0.7368 | 0.9589 | 0.4605 | 0.6222 | 0.5399 | 1.0 | 0.7012 | | 0.1428 | 87.0 | 31668 | 0.6888 | 0.7796 | 0.8120 | 0.7798 | 0.7845 | 0.7901 | 0.8889 | 0.8366 | 0.9380 | 0.8403 | 0.8864 | 0.8105 | 0.5347 | 0.6444 | 0.5535 | 0.7829 | 0.6485 | 0.968 | 0.8521 | 0.9064 | | 0.2229 | 88.0 | 32032 | 2.4581 | 0.5744 | 0.8051 | 0.5697 | 0.5661 | 0.9762 | 0.2847 | 0.4409 | 0.8284 | 0.7708 | 0.7986 | 0.8860 | 0.7014 | 0.7829 | 0.3350 | 0.9013 | 0.4884 | 1.0 | 0.1901 | 0.3195 | | 0.1858 | 89.0 | 32396 | 2.8383 | 0.6804 | 0.7342 | 0.6809 | 0.6517 | 1.0 | 0.2153 | 0.3543 | 0.6294 | 0.7431 | 0.6815 | 0.7468 | 0.8194 | 0.7815 | 0.5889 | 0.6974 | 0.6386 | 0.7059 | 0.9296 | 0.8024 | | 0.1742 | 90.0 | 32760 | 1.0500 | 0.7879 | 0.8373 | 0.7878 | 0.7895 | 0.9759 | 0.5625 | 0.7137 | 0.9804 | 0.6944 | 0.8130 | 0.8662 | 0.8542 | 0.8601 | 0.5818 | 0.8421 | 0.6882 | 0.7821 | 0.9859 | 0.8723 | | 0.2212 | 91.0 | 33124 | 0.9733 | 0.7906 | 0.8096 | 0.7951 | 0.7814 | 0.6823 | 0.9097 | 0.7798 | 0.7240 | 0.9653 | 0.8274 | 0.8582 | 0.7986 | 0.8273 | 0.8289 | 0.4145 | 0.5526 | 0.9545 | 0.8873 | 0.9197 | | 0.276 | 92.0 | 33488 | 1.9019 | 0.7507 | 0.7940 | 0.7512 | 0.7318 | 1.0 | 0.3194 | 0.4842 | 0.8671 | 0.8611 | 0.8641 | 0.7020 | 0.9653 | 0.8129 | 0.6257 | 0.7368 | 0.6767 | 0.775 | 0.8732 | 0.8212 | | 0.2309 | 93.0 | 33852 | 2.3563 | 0.6309 | 0.7691 | 0.6332 | 0.6273 | 0.7966 | 0.6528 | 0.7176 | 0.9167 | 0.7639 | 0.8333 | 0.8974 | 0.2431 | 0.3825 | 0.8387 | 0.5132 | 0.6367 | 0.3961 | 0.9930 | 0.5663 | | 0.2288 | 94.0 | 34216 | 1.2082 | 0.7397 | 0.7988 | 0.7402 | 0.7399 | 0.6168 | 0.9167 | 0.7374 | 1.0 | 0.4514 | 0.6220 | 0.8028 | 0.7917 | 0.7972 | 0.5829 | 0.7171 | 0.6431 | 0.9915 | 0.8239 | 0.9 | | 0.2247 | 95.0 | 34580 | 1.0517 | 0.7686 | 0.7968 | 0.7701 | 0.7677 | 0.8830 | 0.5764 | 0.6975 | 0.6745 | 0.9931 | 0.8034 | 0.8038 | 0.8819 | 0.8411 | 0.6316 | 0.6316 | 0.6316 | 0.9909 | 0.7676 | 0.8651 | | 0.1909 | 96.0 | 34944 | 3.2917 | 0.5744 | 0.7583 | 0.5718 | 0.5322 | 0.8404 | 0.5486 | 0.6639 | 0.9818 | 0.375 | 0.5427 | 1.0 | 0.1042 | 0.1887 | 0.4331 | 0.8947 | 0.5837 | 0.5363 | 0.9366 | 0.6821 | | 0.2927 | 97.0 | 35308 | 4.0534 | 0.6129 | 0.7066 | 0.6129 | 0.5919 | 0.9091 | 0.2083 | 0.3390 | 0.8545 | 0.6528 | 0.7402 | 0.7476 | 0.5347 | 0.6235 | 0.5271 | 0.7039 | 0.6028 | 0.4946 | 0.9648 | 0.6539 | | 0.1792 | 98.0 | 35672 | 1.4354 | 0.7176 | 0.7802 | 0.7177 | 0.7066 | 0.9808 | 0.3542 | 0.5204 | 0.6558 | 0.9792 | 0.7855 | 0.7349 | 0.8472 | 0.7871 | 0.5684 | 0.7105 | 0.6316 | 0.9612 | 0.6972 | 0.8082 | | 0.2467 | 99.0 | 36036 | 1.2316 | 0.7493 | 0.7880 | 0.7493 | 0.7279 | 0.7829 | 0.9514 | 0.8589 | 0.8424 | 0.9653 | 0.8997 | 0.6294 | 0.8611 | 0.7273 | 0.6853 | 0.6447 | 0.6644 | 1.0 | 0.3239 | 0.4894 | | 0.2984 | 100.0 | 36400 | 2.6716 | 0.6708 | 0.8169 | 0.6696 | 0.6597 | 1.0 | 0.2569 | 0.4088 | 0.9839 | 0.4236 | 0.5922 | 0.8633 | 0.8333 | 0.8481 | 0.4050 | 0.8553 | 0.5497 | 0.8323 | 0.9789 | 0.8997 | ### Framework versions - Transformers 4.48.3 - Pytorch 2.5.1+cu124 - Datasets 3.3.2 - Tokenizers 0.21.0
[ "durariadorio_128x128", "mole_128x128", "quebrado_128x128", "riadorio_128x128", "riofechado_128x128" ]
ricardoSLabs/jaffe_V2
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # jaffe_V2 This model is a fine-tuned version of [microsoft/beit-base-patch16-224-pt22k-ft22k](https://huggingface.co/microsoft/beit-base-patch16-224-pt22k-ft22k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.9499 - Accuracy: 0.2344 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 5 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 1 | 2.1789 | 0.1562 | | No log | 2.0 | 2 | 2.2850 | 0.1406 | | No log | 3.0 | 3 | 2.1473 | 0.1562 | | No log | 4.0 | 4 | 2.0046 | 0.1562 | | No log | 5.0 | 5 | 1.9499 | 0.2344 | ### Framework versions - Transformers 4.47.0 - Pytorch 2.5.1+cu121 - Datasets 3.3.1 - Tokenizers 0.21.0
[ "an", "di", "fe", "ha", "ne", "sa", "su" ]
ricardoSLabs/fer_plus_V2
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # fer_plus_V2 This model is a fine-tuned version of [microsoft/beit-base-patch16-224-pt22k-ft22k](https://huggingface.co/microsoft/beit-base-patch16-224-pt22k-ft22k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.7483 - Accuracy: 0.7598 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 5 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 1.0697 | 1.0 | 222 | 1.0167 | 0.6354 | | 0.7784 | 2.0 | 444 | 0.8059 | 0.7124 | | 0.5911 | 3.0 | 666 | 0.7499 | 0.7384 | | 0.4609 | 4.0 | 888 | 0.7586 | 0.7502 | | 0.3712 | 5.0 | 1110 | 0.7483 | 0.7598 | ### Framework versions - Transformers 4.47.0 - Pytorch 2.5.1+cu121 - Datasets 3.3.1 - Tokenizers 0.21.0
[ "anger", "contempt", "disgust", "fear", "happiness", "neutral", "sadness", "surprise" ]
Master-Rapha7/mobilenetv2-typecoffee-3
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # mobilenetv2-typecoffee-3 This model is a fine-tuned version of [google/mobilenet_v2_1.0_224](https://huggingface.co/google/mobilenet_v2_1.0_224) on the Master-Rapha7/TypeCoffee_64x64 dataset. It achieves the following results on the evaluation set: - Loss: 0.4428 - Accuracy: 0.8733 - Precision: 0.8809 - Recall: 0.8742 - F1: 0.8751 - Precision Durariadorio 64x64: 0.9512 - Recall Durariadorio 64x64: 0.8125 - F1 Durariadorio 64x64: 0.8764 - Precision Mole 64x64: 0.9357 - Recall Mole 64x64: 0.9097 - F1 Mole 64x64: 0.9225 - Precision Quebrado 64x64: 0.8059 - Recall Quebrado 64x64: 0.9514 - F1 Quebrado 64x64: 0.8726 - Precision Riadorio 64x64: 0.7707 - Recall Riadorio 64x64: 0.7961 - F1 Riadorio 64x64: 0.7832 - Precision Riofechado 64x64: 0.9412 - Recall Riofechado 64x64: 0.9014 - F1 Riofechado 64x64: 0.9209 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - num_epochs: 100.0 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | Precision | Recall | F1 | Precision Durariadorio 64x64 | Recall Durariadorio 64x64 | F1 Durariadorio 64x64 | Precision Mole 64x64 | Recall Mole 64x64 | F1 Mole 64x64 | Precision Quebrado 64x64 | Recall Quebrado 64x64 | F1 Quebrado 64x64 | Precision Riadorio 64x64 | Recall Riadorio 64x64 | F1 Riadorio 64x64 | Precision Riofechado 64x64 | Recall Riofechado 64x64 | F1 Riofechado 64x64 | |:-------------:|:-----:|:-----:|:---------------:|:--------:|:---------:|:------:|:------:|:----------------------------:|:-------------------------:|:---------------------:|:--------------------:|:-----------------:|:-------------:|:------------------------:|:---------------------:|:-----------------:|:------------------------:|:---------------------:|:-----------------:|:--------------------------:|:-----------------------:|:-------------------:| | 1.8339 | 1.0 | 364 | 1.3719 | 0.3815 | 0.5231 | 0.3834 | 0.3480 | 0.3265 | 0.2222 | 0.2645 | 0.2779 | 0.8472 | 0.4185 | 0.75 | 0.6042 | 0.6692 | 0.3860 | 0.1447 | 0.2105 | 0.875 | 0.0986 | 0.1772 | | 0.8841 | 2.0 | 728 | 1.2402 | 0.4959 | 0.6099 | 0.4958 | 0.4690 | 0.4345 | 0.4375 | 0.4360 | 0.75 | 0.25 | 0.375 | 0.9231 | 0.25 | 0.3934 | 0.5254 | 0.6118 | 0.5653 | 0.4164 | 0.9296 | 0.5752 | | 0.9358 | 3.0 | 1092 | 0.5863 | 0.7975 | 0.8044 | 0.8001 | 0.7957 | 0.6630 | 0.8472 | 0.7439 | 0.8403 | 0.6944 | 0.7605 | 0.8503 | 0.9861 | 0.9132 | 0.7417 | 0.5855 | 0.6544 | 0.9265 | 0.8873 | 0.9065 | | 0.5744 | 4.0 | 1456 | 0.5526 | 0.8058 | 0.8012 | 0.8085 | 0.8009 | 0.784 | 0.6806 | 0.7286 | 0.8 | 0.8056 | 0.8028 | 0.8606 | 0.9861 | 0.9191 | 0.736 | 0.6053 | 0.6643 | 0.8253 | 0.9648 | 0.8896 | | 3.8423 | 5.0 | 1820 | 2.2945 | 0.4545 | 0.5793 | 0.4494 | 0.4180 | 0.4943 | 0.2986 | 0.3723 | 0.4118 | 0.6806 | 0.5131 | 1.0 | 0.2361 | 0.3820 | 0.3994 | 0.8487 | 0.5432 | 0.5909 | 0.1831 | 0.2796 | | 0.9289 | 6.0 | 2184 | 1.0301 | 0.6736 | 0.7565 | 0.6724 | 0.6429 | 1.0 | 0.1806 | 0.3059 | 0.8067 | 0.6667 | 0.7300 | 0.6606 | 1.0 | 0.7956 | 0.5106 | 0.7895 | 0.6202 | 0.8047 | 0.7254 | 0.7630 | | 1.4282 | 7.0 | 2548 | 2.2074 | 0.4573 | 0.6324 | 0.4593 | 0.4141 | 0.3813 | 0.7917 | 0.5147 | 0.3992 | 0.7014 | 0.5088 | 1.0 | 0.0625 | 0.1176 | 0.7925 | 0.2763 | 0.4098 | 0.5893 | 0.4648 | 0.5197 | | 0.8611 | 8.0 | 2912 | 0.7746 | 0.7879 | 0.8003 | 0.7895 | 0.7744 | 0.8732 | 0.4306 | 0.5767 | 0.8759 | 0.8819 | 0.8789 | 0.8218 | 0.9931 | 0.8994 | 0.7222 | 0.6842 | 0.7027 | 0.7083 | 0.9577 | 0.8144 | | 0.9293 | 9.0 | 3276 | 1.0409 | 0.7107 | 0.7522 | 0.7112 | 0.7095 | 0.8068 | 0.4931 | 0.6121 | 0.9545 | 0.7292 | 0.8268 | 0.5975 | 1.0 | 0.7481 | 0.5952 | 0.6579 | 0.625 | 0.8067 | 0.6761 | 0.7356 | | 0.1113 | 10.0 | 3640 | 1.3318 | 0.6474 | 0.7588 | 0.6525 | 0.6272 | 0.9048 | 0.3958 | 0.5507 | 0.4537 | 0.9861 | 0.6214 | 0.8846 | 0.6389 | 0.7419 | 0.8542 | 0.2697 | 0.41 | 0.6970 | 0.9718 | 0.8118 | | 0.5141 | 11.0 | 4004 | 1.3893 | 0.6653 | 0.7754 | 0.6668 | 0.6485 | 0.4462 | 0.9792 | 0.6130 | 0.9667 | 0.2014 | 0.3333 | 0.8898 | 0.7847 | 0.8339 | 0.7107 | 0.5658 | 0.6300 | 0.8636 | 0.8028 | 0.8321 | | 0.2037 | 12.0 | 4368 | 1.9170 | 0.6405 | 0.7596 | 0.6422 | 0.6220 | 0.9032 | 0.1944 | 0.32 | 0.9121 | 0.5764 | 0.7064 | 0.7901 | 0.8889 | 0.8366 | 0.7699 | 0.5724 | 0.6566 | 0.4225 | 0.9789 | 0.5902 | | 0.7657 | 13.0 | 4732 | 1.2913 | 0.7094 | 0.7793 | 0.7150 | 0.6832 | 0.5721 | 0.8819 | 0.6940 | 0.9737 | 0.5139 | 0.6727 | 0.7989 | 0.9653 | 0.8742 | 0.9091 | 0.2632 | 0.4082 | 0.6429 | 0.9507 | 0.7670 | | 1.7904 | 14.0 | 5096 | 1.5750 | 0.5771 | 0.7363 | 0.5736 | 0.5883 | 0.6311 | 0.5347 | 0.5789 | 1.0 | 0.3403 | 0.5078 | 0.8972 | 0.6667 | 0.7649 | 0.3607 | 0.8684 | 0.5097 | 0.7927 | 0.4577 | 0.5804 | | 1.8144 | 15.0 | 5460 | 0.8439 | 0.7397 | 0.7902 | 0.7404 | 0.7316 | 0.5870 | 0.9375 | 0.7219 | 0.7258 | 0.9375 | 0.8182 | 0.8605 | 0.7708 | 0.8132 | 0.7931 | 0.6053 | 0.6866 | 0.9846 | 0.4507 | 0.6184 | | 0.787 | 16.0 | 5824 | 0.7087 | 0.7851 | 0.7908 | 0.7871 | 0.7813 | 0.8349 | 0.6319 | 0.7194 | 0.7862 | 0.8681 | 0.8251 | 0.8092 | 0.8542 | 0.8311 | 0.7967 | 0.6447 | 0.7127 | 0.7268 | 0.9366 | 0.8185 | | 1.6885 | 17.0 | 6188 | 1.1906 | 0.6667 | 0.8084 | 0.6644 | 0.6495 | 1.0 | 0.2014 | 0.3353 | 1.0 | 0.6111 | 0.7586 | 0.6825 | 1.0 | 0.8113 | 0.4396 | 0.8618 | 0.5822 | 0.92 | 0.6479 | 0.7603 | | 0.7317 | 18.0 | 6552 | 0.9377 | 0.7342 | 0.7888 | 0.7337 | 0.7421 | 0.7063 | 0.7847 | 0.7434 | 0.9036 | 0.5208 | 0.6608 | 0.8643 | 0.8403 | 0.8521 | 0.5064 | 0.7763 | 0.6130 | 0.9636 | 0.7465 | 0.8413 | | 1.1095 | 19.0 | 6916 | 1.2331 | 0.7548 | 0.7869 | 0.7556 | 0.7375 | 0.9091 | 0.3472 | 0.5025 | 0.8926 | 0.75 | 0.8151 | 0.8114 | 0.9861 | 0.8903 | 0.6550 | 0.7368 | 0.6935 | 0.6667 | 0.9577 | 0.7861 | | 0.3625 | 20.0 | 7280 | 0.9931 | 0.7410 | 0.7846 | 0.7424 | 0.7329 | 0.6329 | 0.9097 | 0.7464 | 0.9853 | 0.4653 | 0.6321 | 0.8673 | 0.6806 | 0.7626 | 0.7357 | 0.6776 | 0.7055 | 0.7020 | 0.9789 | 0.8176 | | 0.9243 | 21.0 | 7644 | 1.7097 | 0.6309 | 0.7730 | 0.6292 | 0.5856 | 0.9032 | 0.1944 | 0.32 | 1.0 | 0.2222 | 0.3636 | 0.6792 | 1.0 | 0.8090 | 0.4197 | 0.8421 | 0.5602 | 0.8630 | 0.8873 | 0.875 | | 1.4692 | 22.0 | 8008 | 3.5682 | 0.4518 | 0.6827 | 0.4515 | 0.4158 | 0.8696 | 0.1389 | 0.2395 | 0.8615 | 0.3889 | 0.5359 | 0.76 | 0.1319 | 0.2249 | 0.62 | 0.6118 | 0.6159 | 0.3024 | 0.9859 | 0.4628 | | 0.11 | 23.0 | 8372 | 1.5003 | 0.6983 | 0.7763 | 0.6972 | 0.6926 | 0.8889 | 0.2778 | 0.4233 | 0.7656 | 0.6806 | 0.7206 | 0.8733 | 0.9097 | 0.8912 | 0.4496 | 0.8224 | 0.5814 | 0.904 | 0.7958 | 0.8464 | | 0.1686 | 24.0 | 8736 | 0.7475 | 0.7837 | 0.8280 | 0.7834 | 0.7885 | 0.7288 | 0.8958 | 0.8037 | 1.0 | 0.5903 | 0.7424 | 0.8864 | 0.8125 | 0.8478 | 0.5905 | 0.8158 | 0.6851 | 0.9344 | 0.8028 | 0.8636 | | 2.2972 | 25.0 | 9100 | 2.4067 | 0.6433 | 0.7428 | 0.6423 | 0.5978 | 0.9375 | 0.1042 | 0.1875 | 0.8514 | 0.4375 | 0.5780 | 0.7833 | 0.9792 | 0.8704 | 0.44 | 0.7961 | 0.5667 | 0.7017 | 0.8944 | 0.7864 | | 0.7441 | 26.0 | 9464 | 1.5594 | 0.6736 | 0.8149 | 0.6714 | 0.6613 | 0.7969 | 0.7083 | 0.75 | 1.0 | 0.1597 | 0.2754 | 0.8731 | 0.8125 | 0.8417 | 0.4134 | 0.8947 | 0.5655 | 0.9911 | 0.7817 | 0.8740 | | 0.2518 | 27.0 | 9828 | 1.4697 | 0.6956 | 0.7674 | 0.6954 | 0.6803 | 1.0 | 0.2778 | 0.4348 | 0.7642 | 0.6528 | 0.7041 | 0.8623 | 0.8264 | 0.8440 | 0.4979 | 0.7763 | 0.6067 | 0.7128 | 0.9437 | 0.8121 | | 0.8784 | 28.0 | 10192 | 1.6700 | 0.6391 | 0.7410 | 0.6380 | 0.6292 | 0.95 | 0.2639 | 0.4130 | 0.7048 | 0.5139 | 0.5944 | 0.7065 | 0.9861 | 0.8232 | 0.4116 | 0.75 | 0.5315 | 0.9320 | 0.6761 | 0.7837 | | 1.1915 | 29.0 | 10556 | 0.9770 | 0.7975 | 0.8223 | 0.7984 | 0.7919 | 0.9351 | 0.5 | 0.6516 | 0.9058 | 0.8681 | 0.8865 | 0.8759 | 0.8819 | 0.8789 | 0.7030 | 0.7632 | 0.7319 | 0.6915 | 0.9789 | 0.8105 | | 0.2653 | 30.0 | 10920 | 0.6579 | 0.8499 | 0.8601 | 0.8510 | 0.8497 | 0.9550 | 0.7361 | 0.8314 | 0.8759 | 0.8819 | 0.8789 | 0.7688 | 0.9931 | 0.8667 | 0.7877 | 0.7566 | 0.7718 | 0.9130 | 0.8873 | 0.9 | | 0.6757 | 31.0 | 11284 | 1.1712 | 0.7204 | 0.7686 | 0.7212 | 0.7020 | 0.6450 | 0.7569 | 0.6965 | 1.0 | 0.2986 | 0.4599 | 0.8759 | 0.8819 | 0.8789 | 0.5815 | 0.7039 | 0.6369 | 0.7405 | 0.9648 | 0.8379 | | 0.1863 | 32.0 | 11648 | 0.7389 | 0.7824 | 0.8141 | 0.7817 | 0.7839 | 0.7355 | 0.7917 | 0.7625 | 0.8947 | 0.8264 | 0.8592 | 0.825 | 0.9167 | 0.8684 | 0.6154 | 0.7895 | 0.6916 | 1.0 | 0.5845 | 0.7378 | | 4.8154 | 33.0 | 12012 | 2.8633 | 0.6143 | 0.7092 | 0.6140 | 0.5631 | 0.7808 | 0.3958 | 0.5253 | 1.0 | 0.1111 | 0.2 | 0.6636 | 0.9861 | 0.7933 | 0.4756 | 0.7039 | 0.5676 | 0.6263 | 0.8732 | 0.7294 | | 0.3842 | 34.0 | 12376 | 0.5939 | 0.8430 | 0.8488 | 0.8459 | 0.8397 | 0.9084 | 0.8264 | 0.8655 | 0.7727 | 0.9444 | 0.85 | 0.8070 | 0.9583 | 0.8762 | 0.8411 | 0.5921 | 0.6950 | 0.9149 | 0.9085 | 0.9117 | | 0.9703 | 35.0 | 12740 | 2.1655 | 0.5317 | 0.7189 | 0.5326 | 0.5165 | 0.3357 | 0.9722 | 0.4991 | 0.7703 | 0.7917 | 0.7808 | 0.8649 | 0.2222 | 0.3536 | 0.7867 | 0.3882 | 0.5198 | 0.8367 | 0.2887 | 0.4293 | | 4.9934 | 36.0 | 13104 | 1.3484 | 0.6860 | 0.8082 | 0.6833 | 0.7067 | 0.8390 | 0.6875 | 0.7557 | 0.9434 | 0.6944 | 0.8 | 0.8632 | 0.5694 | 0.6862 | 0.4198 | 0.8947 | 0.5714 | 0.9759 | 0.5704 | 0.72 | | 0.1061 | 37.0 | 13468 | 1.9914 | 0.6047 | 0.7035 | 0.6071 | 0.5994 | 0.7606 | 0.375 | 0.5023 | 0.4167 | 0.9722 | 0.5833 | 0.9 | 0.4375 | 0.5888 | 0.6634 | 0.4408 | 0.5296 | 0.7770 | 0.8099 | 0.7931 | | 1.1014 | 38.0 | 13832 | 0.7707 | 0.8278 | 0.8347 | 0.8294 | 0.8284 | 0.7432 | 0.7639 | 0.7534 | 0.9825 | 0.7778 | 0.8682 | 0.8808 | 0.9236 | 0.9017 | 0.7415 | 0.7171 | 0.7291 | 0.8253 | 0.9648 | 0.8896 | | 0.4693 | 39.0 | 14196 | 0.8032 | 0.7796 | 0.8153 | 0.7806 | 0.7751 | 0.9595 | 0.4931 | 0.6514 | 0.8897 | 0.8403 | 0.8643 | 0.8759 | 0.8333 | 0.8541 | 0.7107 | 0.7434 | 0.7267 | 0.6409 | 0.9930 | 0.7790 | | 0.03 | 40.0 | 14560 | 2.2705 | 0.6212 | 0.7527 | 0.6183 | 0.6087 | 0.8983 | 0.3681 | 0.5222 | 0.9821 | 0.3819 | 0.55 | 0.6278 | 0.9722 | 0.7629 | 0.4348 | 0.8553 | 0.5765 | 0.8202 | 0.5141 | 0.6320 | | 1.0635 | 41.0 | 14924 | 0.8656 | 0.7700 | 0.8146 | 0.7694 | 0.7701 | 0.8731 | 0.8125 | 0.8417 | 0.9610 | 0.5139 | 0.6697 | 0.8917 | 0.7431 | 0.8106 | 0.5822 | 0.8618 | 0.6950 | 0.7647 | 0.9155 | 0.8333 | | 1.7808 | 42.0 | 15288 | 0.4428 | 0.8733 | 0.8809 | 0.8742 | 0.8751 | 0.9512 | 0.8125 | 0.8764 | 0.9357 | 0.9097 | 0.9225 | 0.8059 | 0.9514 | 0.8726 | 0.7707 | 0.7961 | 0.7832 | 0.9412 | 0.9014 | 0.9209 | | 1.2959 | 43.0 | 15652 | 2.8245 | 0.5386 | 0.6741 | 0.5340 | 0.5194 | 0.4741 | 0.3819 | 0.4231 | 0.7273 | 0.8333 | 0.7767 | 0.8627 | 0.3056 | 0.4513 | 0.3796 | 0.8816 | 0.5307 | 0.9268 | 0.2676 | 0.4153 | | 1.0648 | 44.0 | 16016 | 2.2485 | 0.6419 | 0.7630 | 0.6411 | 0.6168 | 0.9714 | 0.2361 | 0.3799 | 0.9667 | 0.4028 | 0.5686 | 0.8182 | 0.8125 | 0.8153 | 0.4706 | 0.7895 | 0.5897 | 0.5880 | 0.9648 | 0.7307 | | 1.6683 | 45.0 | 16380 | 1.3286 | 0.7576 | 0.8073 | 0.7586 | 0.7488 | 0.9492 | 0.3889 | 0.5517 | 0.8794 | 0.8611 | 0.8702 | 0.8971 | 0.8472 | 0.8714 | 0.7365 | 0.7171 | 0.7267 | 0.5744 | 0.9789 | 0.7240 | | 1.4987 | 46.0 | 16744 | 1.4056 | 0.6915 | 0.7938 | 0.6885 | 0.6877 | 0.7051 | 0.7639 | 0.7333 | 1.0 | 0.7292 | 0.8434 | 0.7958 | 0.7847 | 0.7902 | 0.4679 | 0.8618 | 0.6065 | 1.0 | 0.3028 | 0.4649 | | 0.1716 | 47.0 | 17108 | 0.9573 | 0.7645 | 0.8122 | 0.7645 | 0.7661 | 0.9310 | 0.5625 | 0.7013 | 0.6438 | 0.9792 | 0.7769 | 0.8561 | 0.8264 | 0.8410 | 0.6298 | 0.75 | 0.6847 | 1.0 | 0.7042 | 0.8264 | | 1.2902 | 48.0 | 17472 | 0.7744 | 0.8168 | 0.8580 | 0.8163 | 0.8245 | 0.8905 | 0.8472 | 0.8683 | 0.9901 | 0.6944 | 0.8163 | 0.8333 | 0.9028 | 0.8667 | 0.5936 | 0.8553 | 0.7008 | 0.9823 | 0.7817 | 0.8706 | | 1.074 | 49.0 | 17836 | 5.5839 | 0.4105 | 0.6861 | 0.4055 | 0.3065 | 1.0 | 0.0486 | 0.0927 | 1.0 | 0.0556 | 0.1053 | 0.4780 | 0.9792 | 0.6424 | 0.3159 | 0.7961 | 0.4523 | 0.6364 | 0.1479 | 0.24 | | 0.3016 | 50.0 | 18200 | 1.3900 | 0.7273 | 0.7944 | 0.7266 | 0.7389 | 0.7636 | 0.5833 | 0.6614 | 0.9457 | 0.6042 | 0.7373 | 0.8176 | 0.9028 | 0.8581 | 0.4727 | 0.7961 | 0.5931 | 0.9725 | 0.7465 | 0.8446 | | 0.7923 | 51.0 | 18564 | 1.1039 | 0.7369 | 0.8038 | 0.7354 | 0.7273 | 1.0 | 0.5417 | 0.7027 | 0.7394 | 0.9653 | 0.8373 | 0.5917 | 0.8958 | 0.7127 | 0.7029 | 0.8092 | 0.7523 | 0.9851 | 0.4648 | 0.6316 | | 0.079 | 52.0 | 18928 | 0.9145 | 0.7686 | 0.8069 | 0.7685 | 0.7712 | 0.8440 | 0.6389 | 0.7273 | 0.9804 | 0.6944 | 0.8130 | 0.8974 | 0.7292 | 0.8046 | 0.6188 | 0.8224 | 0.7062 | 0.6939 | 0.9577 | 0.8047 | | 0.7387 | 53.0 | 19292 | 1.1572 | 0.7645 | 0.8182 | 0.7636 | 0.7750 | 0.7744 | 0.7153 | 0.7437 | 0.9896 | 0.6597 | 0.7917 | 0.8699 | 0.7431 | 0.8015 | 0.5347 | 0.8618 | 0.6599 | 0.9225 | 0.8380 | 0.8782 | | 0.4333 | 54.0 | 19656 | 0.8459 | 0.7727 | 0.8256 | 0.7723 | 0.7789 | 0.9074 | 0.6806 | 0.7778 | 0.9681 | 0.6319 | 0.7647 | 0.8814 | 0.7222 | 0.7939 | 0.5458 | 0.8618 | 0.6684 | 0.8253 | 0.9648 | 0.8896 | | 0.135 | 55.0 | 20020 | 0.9281 | 0.7534 | 0.8099 | 0.7532 | 0.7554 | 0.6813 | 0.7569 | 0.7171 | 1.0 | 0.4583 | 0.6286 | 0.8613 | 0.8194 | 0.8399 | 0.5439 | 0.8158 | 0.6526 | 0.9630 | 0.9155 | 0.9386 | | 1.4615 | 56.0 | 20384 | 1.5940 | 0.7245 | 0.7685 | 0.7254 | 0.7098 | 0.6688 | 0.7153 | 0.6913 | 1.0 | 0.3333 | 0.5 | 0.8121 | 0.9306 | 0.8673 | 0.5544 | 0.7039 | 0.6203 | 0.8072 | 0.9437 | 0.8701 | | 0.6157 | 57.0 | 20748 | 0.7882 | 0.7810 | 0.8076 | 0.7860 | 0.7739 | 0.5907 | 0.9722 | 0.7349 | 0.9580 | 0.7917 | 0.8669 | 0.8129 | 0.875 | 0.8428 | 0.7273 | 0.3684 | 0.4891 | 0.9493 | 0.9225 | 0.9357 | | 0.2383 | 58.0 | 21112 | 1.1022 | 0.7466 | 0.8318 | 0.7452 | 0.7589 | 0.8739 | 0.6736 | 0.7608 | 0.9895 | 0.6528 | 0.7866 | 0.9022 | 0.5764 | 0.7034 | 0.4842 | 0.9079 | 0.6316 | 0.9091 | 0.9155 | 0.9123 | | 0.0679 | 59.0 | 21476 | 1.0813 | 0.7851 | 0.8158 | 0.7855 | 0.7816 | 0.8411 | 0.625 | 0.7171 | 1.0 | 0.6111 | 0.7586 | 0.8408 | 0.9167 | 0.8771 | 0.7289 | 0.7961 | 0.7610 | 0.6683 | 0.9789 | 0.7943 | | 2.4259 | 60.0 | 21840 | 2.6947 | 0.5909 | 0.7476 | 0.5867 | 0.5688 | 0.5792 | 0.8125 | 0.6763 | 0.9252 | 0.6875 | 0.7888 | 0.8261 | 0.3958 | 0.5352 | 0.4074 | 0.8684 | 0.5546 | 1.0 | 0.1690 | 0.2892 | | 0.011 | 61.0 | 22204 | 0.8938 | 0.7603 | 0.8205 | 0.7599 | 0.7670 | 0.9647 | 0.5694 | 0.7162 | 0.9905 | 0.7222 | 0.8353 | 0.6753 | 0.9097 | 0.7751 | 0.5616 | 0.8092 | 0.6631 | 0.9106 | 0.7887 | 0.8453 | | 0.4961 | 62.0 | 22568 | 1.7525 | 0.6915 | 0.8236 | 0.6889 | 0.7159 | 0.9681 | 0.6319 | 0.7647 | 0.9327 | 0.6736 | 0.7823 | 0.8739 | 0.6736 | 0.7608 | 0.4121 | 0.8947 | 0.5643 | 0.9310 | 0.5704 | 0.7074 | | 0.0413 | 63.0 | 22932 | 0.7127 | 0.8264 | 0.8341 | 0.8281 | 0.8268 | 0.7673 | 0.8472 | 0.8053 | 0.9576 | 0.7847 | 0.8626 | 0.8797 | 0.8125 | 0.8448 | 0.7622 | 0.7171 | 0.7390 | 0.8035 | 0.9789 | 0.8825 | | 1.2568 | 64.0 | 23296 | 3.7240 | 0.5055 | 0.7495 | 0.5005 | 0.5234 | 0.9649 | 0.3819 | 0.5473 | 0.8171 | 0.4653 | 0.5929 | 0.8056 | 0.4028 | 0.5370 | 0.3028 | 0.9145 | 0.4550 | 0.8571 | 0.3380 | 0.4848 | | 0.3057 | 65.0 | 23660 | 1.0150 | 0.7479 | 0.7988 | 0.7474 | 0.7517 | 0.9109 | 0.6389 | 0.7510 | 0.6233 | 0.9306 | 0.7465 | 0.8409 | 0.7708 | 0.8043 | 0.6190 | 0.7697 | 0.6862 | 1.0 | 0.6268 | 0.7706 | | 0.5195 | 66.0 | 24024 | 7.4947 | 0.3953 | 0.6566 | 0.3941 | 0.3246 | 0.5714 | 0.0278 | 0.0530 | 1.0 | 0.2014 | 0.3353 | 0.8947 | 0.1181 | 0.2086 | 0.5323 | 0.6513 | 0.5858 | 0.2845 | 0.9718 | 0.4402 | | 0.0658 | 67.0 | 24388 | 0.5080 | 0.8320 | 0.8389 | 0.8330 | 0.8335 | 0.7892 | 0.9097 | 0.8452 | 0.9609 | 0.8542 | 0.9044 | 0.848 | 0.7361 | 0.7881 | 0.7188 | 0.7566 | 0.7372 | 0.8776 | 0.9085 | 0.8927 | | 1.3597 | 68.0 | 24752 | 1.2537 | 0.7576 | 0.8153 | 0.7580 | 0.7413 | 1.0 | 0.3056 | 0.4681 | 0.6812 | 0.9792 | 0.8034 | 0.8506 | 0.9097 | 0.8792 | 0.5765 | 0.7434 | 0.6494 | 0.968 | 0.8521 | 0.9064 | | 1.0185 | 69.0 | 25116 | 0.6910 | 0.8237 | 0.8381 | 0.8252 | 0.8283 | 0.7167 | 0.8958 | 0.7963 | 0.9535 | 0.8542 | 0.9011 | 0.8686 | 0.8264 | 0.8470 | 0.6839 | 0.6974 | 0.6906 | 0.968 | 0.8521 | 0.9064 | | 1.5741 | 70.0 | 25480 | 4.0807 | 0.5826 | 0.7741 | 0.5802 | 0.5317 | 0.9583 | 0.1597 | 0.2738 | 0.9412 | 0.1111 | 0.1988 | 0.7791 | 0.9306 | 0.8481 | 0.3548 | 0.8684 | 0.5038 | 0.8369 | 0.8310 | 0.8339 | | 0.0528 | 71.0 | 25844 | 0.9495 | 0.7645 | 0.8058 | 0.7642 | 0.7715 | 0.7257 | 0.8819 | 0.7962 | 0.9516 | 0.8194 | 0.8806 | 0.7879 | 0.7222 | 0.7536 | 0.5637 | 0.7566 | 0.6461 | 1.0 | 0.6408 | 0.7811 | | 2.4556 | 72.0 | 26208 | 3.1869 | 0.5620 | 0.6926 | 0.5614 | 0.5393 | 0.7326 | 0.4375 | 0.5478 | 0.8246 | 0.6528 | 0.7287 | 1.0 | 0.1597 | 0.2754 | 0.4234 | 0.6908 | 0.525 | 0.4824 | 0.8662 | 0.6196 | | 1.1308 | 73.0 | 26572 | 1.5193 | 0.7452 | 0.8115 | 0.7445 | 0.7366 | 0.8468 | 0.6528 | 0.7373 | 1.0 | 0.3611 | 0.5306 | 0.8562 | 0.9097 | 0.8822 | 0.5221 | 0.8553 | 0.6484 | 0.8323 | 0.9437 | 0.8845 | | 0.1167 | 74.0 | 26936 | 1.2731 | 0.7094 | 0.7975 | 0.7074 | 0.7241 | 0.7478 | 0.5972 | 0.6641 | 0.9333 | 0.7778 | 0.8485 | 0.8538 | 0.7708 | 0.8102 | 0.4523 | 0.8421 | 0.5885 | 1.0 | 0.5493 | 0.7091 | | 0.2747 | 75.0 | 27300 | 0.5770 | 0.8650 | 0.8689 | 0.8674 | 0.8623 | 0.9048 | 0.7917 | 0.8444 | 0.8718 | 0.9444 | 0.9067 | 0.8274 | 0.9653 | 0.8910 | 0.8957 | 0.6776 | 0.7715 | 0.8447 | 0.9577 | 0.8977 | | 0.7676 | 76.0 | 27664 | 0.9433 | 0.8030 | 0.8342 | 0.8030 | 0.8042 | 0.9107 | 0.7083 | 0.7969 | 1.0 | 0.6458 | 0.7848 | 0.7611 | 0.9514 | 0.8457 | 0.6477 | 0.8224 | 0.7246 | 0.8514 | 0.8873 | 0.8690 | | 0.8577 | 77.0 | 28028 | 1.2856 | 0.7521 | 0.7730 | 0.7578 | 0.7285 | 0.7016 | 0.9306 | 0.8 | 0.6290 | 0.9653 | 0.7616 | 0.8377 | 0.8958 | 0.8658 | 0.7059 | 0.2368 | 0.3547 | 0.9908 | 0.7606 | 0.8606 | | 0.3569 | 78.0 | 28392 | 1.0886 | 0.7741 | 0.8440 | 0.7733 | 0.7850 | 0.9506 | 0.5347 | 0.6844 | 0.9826 | 0.7847 | 0.8726 | 0.8623 | 0.8264 | 0.8440 | 0.5077 | 0.8684 | 0.6408 | 0.9167 | 0.8521 | 0.8832 | | 0.8389 | 79.0 | 28756 | 2.0285 | 0.6226 | 0.8028 | 0.6193 | 0.6470 | 0.9178 | 0.4653 | 0.6175 | 0.9368 | 0.6181 | 0.7448 | 0.8333 | 0.6944 | 0.7576 | 0.3566 | 0.875 | 0.5067 | 0.9692 | 0.4437 | 0.6087 | | 1.0882 | 80.0 | 29120 | 1.5803 | 0.6970 | 0.7947 | 0.6953 | 0.6929 | 0.8652 | 0.5347 | 0.6609 | 0.9815 | 0.3681 | 0.5354 | 0.7010 | 0.9444 | 0.8047 | 0.4781 | 0.8618 | 0.6150 | 0.9478 | 0.7676 | 0.8482 | | 0.6209 | 81.0 | 29484 | 1.2037 | 0.7796 | 0.8182 | 0.7853 | 0.7647 | 0.5755 | 0.9792 | 0.7249 | 0.8797 | 0.8125 | 0.8448 | 0.8693 | 0.9236 | 0.8956 | 0.8519 | 0.3026 | 0.4466 | 0.9149 | 0.9085 | 0.9117 | | 0.1106 | 82.0 | 29848 | 0.6860 | 0.8168 | 0.8427 | 0.8176 | 0.8179 | 0.9775 | 0.6042 | 0.7468 | 0.7473 | 0.9444 | 0.8344 | 0.8551 | 0.8194 | 0.8369 | 0.6556 | 0.7763 | 0.7108 | 0.9781 | 0.9437 | 0.9606 | | 0.0101 | 83.0 | 30212 | 6.1928 | 0.4904 | 0.6662 | 0.4894 | 0.4496 | 0.8571 | 0.0833 | 0.1519 | 0.9245 | 0.3403 | 0.4975 | 0.6543 | 0.3681 | 0.4711 | 0.5381 | 0.6974 | 0.6074 | 0.3570 | 0.9577 | 0.5201 | | 0.9885 | 84.0 | 30576 | 1.1667 | 0.7479 | 0.7940 | 0.7517 | 0.7439 | 0.8571 | 0.625 | 0.7229 | 0.8193 | 0.9444 | 0.8774 | 0.8843 | 0.7431 | 0.8075 | 0.8554 | 0.4671 | 0.6043 | 0.5538 | 0.9789 | 0.7074 | | 0.078 | 85.0 | 30940 | 0.7273 | 0.8168 | 0.8514 | 0.8164 | 0.8224 | 0.9905 | 0.7222 | 0.8353 | 0.8725 | 0.9028 | 0.8874 | 0.7590 | 0.875 | 0.8129 | 0.635 | 0.8355 | 0.7216 | 1.0 | 0.7465 | 0.8548 | | 1.4247 | 86.0 | 31304 | 1.0005 | 0.7769 | 0.8310 | 0.7813 | 0.7733 | 0.5586 | 0.9931 | 0.715 | 0.9907 | 0.7361 | 0.8446 | 0.8611 | 0.8611 | 0.8611 | 0.8732 | 0.4079 | 0.5561 | 0.8716 | 0.9085 | 0.8897 | | 0.3335 | 87.0 | 31668 | 1.4127 | 0.7617 | 0.7912 | 0.7621 | 0.7646 | 0.8947 | 0.5903 | 0.7113 | 0.8681 | 0.8681 | 0.8681 | 0.8621 | 0.6944 | 0.7692 | 0.5659 | 0.7632 | 0.6499 | 0.7651 | 0.8944 | 0.8247 | | 0.0995 | 88.0 | 32032 | 1.0813 | 0.7658 | 0.8151 | 0.7675 | 0.7694 | 0.5738 | 0.9722 | 0.7216 | 0.9464 | 0.7361 | 0.8281 | 0.8377 | 0.8958 | 0.8658 | 0.7177 | 0.5855 | 0.6449 | 1.0 | 0.6479 | 0.7863 | | 1.8463 | 89.0 | 32396 | 3.0008 | 0.5716 | 0.7663 | 0.5672 | 0.5426 | 0.9643 | 0.375 | 0.54 | 0.9359 | 0.5069 | 0.6577 | 0.5895 | 0.9375 | 0.7239 | 0.3871 | 0.8684 | 0.5355 | 0.9545 | 0.1479 | 0.2561 | | 0.0148 | 90.0 | 32760 | 0.5001 | 0.8623 | 0.8811 | 0.8622 | 0.8675 | 0.9021 | 0.8958 | 0.8990 | 0.9597 | 0.8264 | 0.8881 | 0.8806 | 0.8194 | 0.8489 | 0.6786 | 0.875 | 0.7644 | 0.9845 | 0.8944 | 0.9373 | | 0.2192 | 91.0 | 33124 | 0.6181 | 0.8595 | 0.8669 | 0.8606 | 0.8587 | 0.9630 | 0.7222 | 0.8254 | 0.9060 | 0.9375 | 0.9215 | 0.8639 | 0.8819 | 0.8729 | 0.7947 | 0.7895 | 0.7921 | 0.8070 | 0.9718 | 0.8818 | | 2.2473 | 92.0 | 33488 | 0.6960 | 0.8306 | 0.8506 | 0.8301 | 0.8323 | 0.9344 | 0.7917 | 0.8571 | 0.9433 | 0.9236 | 0.9333 | 0.7333 | 0.9167 | 0.8148 | 0.7095 | 0.8355 | 0.7674 | 0.9327 | 0.6831 | 0.7886 | | 0.0272 | 93.0 | 33852 | 1.6524 | 0.6584 | 0.7209 | 0.6600 | 0.6521 | 0.5794 | 0.8611 | 0.6927 | 0.6182 | 0.9444 | 0.7473 | 0.9012 | 0.5069 | 0.6489 | 0.5328 | 0.4803 | 0.5052 | 0.9730 | 0.5070 | 0.6667 | | 0.0859 | 94.0 | 34216 | 1.7388 | 0.6529 | 0.8228 | 0.6502 | 0.6715 | 0.9375 | 0.625 | 0.75 | 0.9818 | 0.375 | 0.5427 | 0.8322 | 0.8264 | 0.8293 | 0.375 | 0.8684 | 0.5238 | 0.9875 | 0.5563 | 0.7117 | | 0.6228 | 95.0 | 34580 | 1.6017 | 0.6570 | 0.8381 | 0.6528 | 0.6709 | 0.9252 | 0.6875 | 0.7888 | 1.0 | 0.7917 | 0.8837 | 0.8795 | 0.5069 | 0.6432 | 0.3856 | 0.9539 | 0.5492 | 1.0 | 0.3239 | 0.4894 | | 0.3703 | 96.0 | 34944 | 1.7469 | 0.6970 | 0.7750 | 0.6965 | 0.6883 | 0.5455 | 0.9583 | 0.6952 | 0.9839 | 0.4236 | 0.5922 | 0.9231 | 0.5 | 0.6486 | 0.6464 | 0.7697 | 0.7027 | 0.7763 | 0.8310 | 0.8027 | | 2.6558 | 97.0 | 35308 | 1.6194 | 0.7025 | 0.8071 | 0.7008 | 0.7179 | 0.9070 | 0.5417 | 0.6783 | 0.9062 | 0.8056 | 0.8529 | 0.7945 | 0.8056 | 0.8 | 0.4276 | 0.8158 | 0.5611 | 1.0 | 0.5352 | 0.6972 | | 1.1283 | 98.0 | 35672 | 1.3466 | 0.7287 | 0.8044 | 0.7269 | 0.7223 | 0.9579 | 0.6319 | 0.7615 | 0.7657 | 0.9306 | 0.8401 | 0.7901 | 0.8889 | 0.8366 | 0.5083 | 0.8026 | 0.6224 | 1.0 | 0.3803 | 0.5510 | | 2.2577 | 99.0 | 36036 | 0.7778 | 0.7920 | 0.8171 | 0.7929 | 0.7984 | 0.7143 | 0.9028 | 0.7975 | 0.928 | 0.8056 | 0.8625 | 0.8359 | 0.7431 | 0.7868 | 0.6158 | 0.7171 | 0.6626 | 0.9912 | 0.7958 | 0.8828 | | 0.8955 | 100.0 | 36400 | 1.7110 | 0.7369 | 0.8185 | 0.7359 | 0.7259 | 0.98 | 0.3403 | 0.5052 | 0.9663 | 0.5972 | 0.7382 | 0.7901 | 0.9931 | 0.88 | 0.5116 | 0.8684 | 0.6439 | 0.8446 | 0.8803 | 0.8621 | ### Framework versions - Transformers 4.48.3 - Pytorch 2.5.1+cu124 - Datasets 3.3.2 - Tokenizers 0.21.0
[ "durariadorio_64x64", "mole_64x64", "quebrado_64x64", "riadorio_64x64", "riofechado_64x64" ]
Master-Rapha7/mobilenetv2-typecoffee-4
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # mobilenetv2-typecoffee-4 This model is a fine-tuned version of [google/mobilenet_v2_1.0_224](https://huggingface.co/google/mobilenet_v2_1.0_224) on the Master-Rapha7/TypeCoffee_64x64 dataset. It achieves the following results on the evaluation set: - Loss: 0.5280 - Accuracy: 0.8581 - Precision: 0.8602 - Recall: 0.8599 - F1: 0.8583 - Precision Durariadorio 64x64: 0.7963 - Recall Durariadorio 64x64: 0.8958 - F1 Durariadorio 64x64: 0.8431 - Precision Mole 64x64: 0.9385 - Recall Mole 64x64: 0.8472 - F1 Mole 64x64: 0.8905 - Precision Quebrado 64x64: 0.8671 - Recall Quebrado 64x64: 0.8611 - F1 Quebrado 64x64: 0.8641 - Precision Riadorio 64x64: 0.8029 - Recall Riadorio 64x64: 0.7237 - F1 Riadorio 64x64: 0.7612 - Precision Riofechado 64x64: 0.8961 - Recall Riofechado 64x64: 0.9718 - F1 Riofechado 64x64: 0.9324 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - num_epochs: 150.0 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | Precision | Recall | F1 | Precision Durariadorio 64x64 | Recall Durariadorio 64x64 | F1 Durariadorio 64x64 | Precision Mole 64x64 | Recall Mole 64x64 | F1 Mole 64x64 | Precision Quebrado 64x64 | Recall Quebrado 64x64 | F1 Quebrado 64x64 | Precision Riadorio 64x64 | Recall Riadorio 64x64 | F1 Riadorio 64x64 | Precision Riofechado 64x64 | Recall Riofechado 64x64 | F1 Riofechado 64x64 | |:-------------:|:-----:|:-----:|:---------------:|:--------:|:---------:|:------:|:------:|:----------------------------:|:-------------------------:|:---------------------:|:--------------------:|:-----------------:|:-------------:|:------------------------:|:---------------------:|:-----------------:|:------------------------:|:---------------------:|:-----------------:|:--------------------------:|:-----------------------:|:-------------------:| | 1.1481 | 1.0 | 364 | 1.3938 | 0.3609 | 0.4758 | 0.3633 | 0.3250 | 0.3093 | 0.2083 | 0.2490 | 0.2604 | 0.8264 | 0.3960 | 0.7391 | 0.5903 | 0.6564 | 0.3429 | 0.0789 | 0.1283 | 0.7273 | 0.1127 | 0.1951 | | 1.0723 | 2.0 | 728 | 1.1584 | 0.5399 | 0.6226 | 0.5421 | 0.5264 | 0.4620 | 0.5486 | 0.5016 | 0.6638 | 0.5347 | 0.5923 | 0.9070 | 0.2708 | 0.4171 | 0.6321 | 0.4408 | 0.5194 | 0.4483 | 0.9155 | 0.6019 | | 0.7898 | 3.0 | 1092 | 0.6299 | 0.7741 | 0.8007 | 0.7769 | 0.7725 | 0.5853 | 0.8819 | 0.7036 | 0.88 | 0.6111 | 0.7213 | 0.8314 | 0.9931 | 0.9051 | 0.7615 | 0.5461 | 0.6360 | 0.9453 | 0.8521 | 0.8963 | | 0.7548 | 4.0 | 1456 | 0.5709 | 0.8003 | 0.8014 | 0.8040 | 0.7931 | 0.7044 | 0.7778 | 0.7393 | 0.7815 | 0.8194 | 0.8 | 0.8402 | 0.9861 | 0.9073 | 0.8172 | 0.5 | 0.6204 | 0.8636 | 0.9366 | 0.8986 | | 0.9531 | 5.0 | 1820 | 2.1597 | 0.4559 | 0.5832 | 0.4511 | 0.4305 | 0.5 | 0.3819 | 0.4331 | 0.4241 | 0.5625 | 0.4836 | 0.9667 | 0.2014 | 0.3333 | 0.3810 | 0.8421 | 0.5246 | 0.6441 | 0.2676 | 0.3781 | | 0.5557 | 6.0 | 2184 | 0.7730 | 0.7052 | 0.7442 | 0.7074 | 0.6834 | 0.9130 | 0.2917 | 0.4421 | 0.5650 | 0.875 | 0.6866 | 0.7598 | 0.9444 | 0.8421 | 0.6829 | 0.5526 | 0.6109 | 0.8 | 0.8732 | 0.8350 | | 0.6538 | 7.0 | 2548 | 1.9892 | 0.4408 | 0.6329 | 0.4432 | 0.3945 | 0.3541 | 0.9097 | 0.5097 | 0.4204 | 0.6597 | 0.5135 | 1.0 | 0.0972 | 0.1772 | 0.7143 | 0.1974 | 0.3093 | 0.6757 | 0.3521 | 0.4630 | | 0.6547 | 8.0 | 2912 | 0.8791 | 0.7534 | 0.7689 | 0.7552 | 0.7392 | 0.8116 | 0.3889 | 0.5258 | 0.8686 | 0.8264 | 0.8470 | 0.8103 | 0.9792 | 0.8868 | 0.7206 | 0.6447 | 0.6806 | 0.6333 | 0.9366 | 0.7557 | | 0.6081 | 9.0 | 3276 | 0.9145 | 0.7204 | 0.7610 | 0.7211 | 0.7197 | 0.8 | 0.5 | 0.6154 | 0.9160 | 0.7569 | 0.8289 | 0.5902 | 1.0 | 0.7423 | 0.6149 | 0.6513 | 0.6326 | 0.8839 | 0.6972 | 0.7795 | | 0.4939 | 10.0 | 3640 | 1.0787 | 0.6763 | 0.7720 | 0.6816 | 0.6545 | 0.9344 | 0.3958 | 0.5561 | 0.4764 | 0.9792 | 0.6409 | 0.8889 | 0.7778 | 0.8296 | 0.84 | 0.2763 | 0.4158 | 0.7202 | 0.9789 | 0.8299 | | 0.5342 | 11.0 | 4004 | 1.0320 | 0.7052 | 0.7919 | 0.7079 | 0.6955 | 0.4844 | 0.9722 | 0.6467 | 0.9423 | 0.3403 | 0.5 | 0.8777 | 0.8472 | 0.8622 | 0.8652 | 0.5066 | 0.6390 | 0.7898 | 0.8732 | 0.8294 | | 0.4245 | 12.0 | 4368 | 1.6048 | 0.6391 | 0.7139 | 0.6422 | 0.6172 | 0.8125 | 0.2708 | 0.4062 | 0.8763 | 0.5903 | 0.7054 | 0.675 | 0.9375 | 0.7849 | 0.7283 | 0.4408 | 0.5492 | 0.4775 | 0.9718 | 0.6404 | | 0.47 | 13.0 | 4732 | 1.3465 | 0.6570 | 0.7300 | 0.6616 | 0.6355 | 0.5789 | 0.7639 | 0.6587 | 0.9636 | 0.3681 | 0.5327 | 0.8165 | 0.8958 | 0.8543 | 0.7656 | 0.3224 | 0.4537 | 0.5251 | 0.9577 | 0.6783 | | 0.3446 | 14.0 | 5096 | 1.4362 | 0.6116 | 0.7217 | 0.6087 | 0.6093 | 0.8315 | 0.5139 | 0.6352 | 0.8333 | 0.7292 | 0.7778 | 0.9574 | 0.3125 | 0.4712 | 0.4328 | 0.8684 | 0.5777 | 0.5535 | 0.6197 | 0.5847 | | 0.5964 | 15.0 | 5460 | 0.7144 | 0.7617 | 0.7975 | 0.7626 | 0.7612 | 0.6280 | 0.9028 | 0.7407 | 0.7065 | 0.9028 | 0.7927 | 0.8810 | 0.7708 | 0.8222 | 0.7951 | 0.6382 | 0.7080 | 0.9770 | 0.5986 | 0.7424 | | 0.4593 | 16.0 | 5824 | 0.8750 | 0.7672 | 0.7855 | 0.7698 | 0.7641 | 0.8584 | 0.6736 | 0.7549 | 0.7665 | 0.8889 | 0.8232 | 0.8168 | 0.7431 | 0.7782 | 0.8318 | 0.5855 | 0.6873 | 0.6538 | 0.9577 | 0.7771 | | 0.4267 | 17.0 | 6188 | 0.7799 | 0.7534 | 0.8171 | 0.7525 | 0.7514 | 0.9275 | 0.4444 | 0.6009 | 0.9902 | 0.7014 | 0.8211 | 0.7114 | 0.9931 | 0.8290 | 0.5541 | 0.8421 | 0.6684 | 0.9024 | 0.7817 | 0.8377 | | 0.5223 | 18.0 | 6552 | 0.7103 | 0.7810 | 0.7983 | 0.7814 | 0.7848 | 0.7671 | 0.7778 | 0.7724 | 0.8704 | 0.6528 | 0.7460 | 0.8741 | 0.8681 | 0.8711 | 0.6042 | 0.7632 | 0.6744 | 0.8759 | 0.8451 | 0.8602 | | 0.4332 | 19.0 | 6916 | 0.8565 | 0.7879 | 0.7985 | 0.7891 | 0.7781 | 0.8947 | 0.4722 | 0.6182 | 0.7593 | 0.8542 | 0.8039 | 0.8528 | 0.9653 | 0.9055 | 0.7032 | 0.7171 | 0.7101 | 0.7824 | 0.9366 | 0.8526 | | 0.5014 | 20.0 | 7280 | 0.8282 | 0.7810 | 0.8140 | 0.7822 | 0.7774 | 0.6895 | 0.9097 | 0.7844 | 1.0 | 0.5625 | 0.72 | 0.8852 | 0.75 | 0.8120 | 0.7842 | 0.7171 | 0.7491 | 0.7113 | 0.9718 | 0.8214 | | 0.3175 | 21.0 | 7644 | 2.0891 | 0.6019 | 0.7480 | 0.6007 | 0.5468 | 0.9048 | 0.1319 | 0.2303 | 0.9697 | 0.2222 | 0.3616 | 0.5275 | 1.0 | 0.6906 | 0.4595 | 0.7829 | 0.5791 | 0.8786 | 0.8662 | 0.8723 | | 0.386 | 22.0 | 8008 | 2.6377 | 0.4725 | 0.7236 | 0.4744 | 0.4505 | 0.8889 | 0.1667 | 0.2807 | 0.8087 | 0.6458 | 0.7181 | 0.88 | 0.1528 | 0.2604 | 0.7442 | 0.4211 | 0.5378 | 0.2960 | 0.9859 | 0.4553 | | 0.3569 | 23.0 | 8372 | 1.2083 | 0.7438 | 0.8051 | 0.7432 | 0.7255 | 0.9487 | 0.2569 | 0.4044 | 0.75 | 0.8542 | 0.7987 | 0.8712 | 0.9861 | 0.9251 | 0.5210 | 0.8158 | 0.6359 | 0.9344 | 0.8028 | 0.8636 | | 0.4323 | 24.0 | 8736 | 0.6160 | 0.8347 | 0.8466 | 0.8363 | 0.8360 | 0.9153 | 0.75 | 0.8244 | 0.7297 | 0.9375 | 0.8207 | 0.8654 | 0.9375 | 0.9 | 0.7465 | 0.6974 | 0.7211 | 0.976 | 0.8592 | 0.9139 | | 0.4918 | 25.0 | 9100 | 2.0400 | 0.6763 | 0.7469 | 0.6786 | 0.6300 | 0.9444 | 0.1181 | 0.2099 | 0.6755 | 0.8819 | 0.7651 | 0.8377 | 0.8958 | 0.8658 | 0.7455 | 0.5395 | 0.6260 | 0.5312 | 0.9577 | 0.6834 | | 0.4046 | 26.0 | 9464 | 1.0405 | 0.7493 | 0.8171 | 0.7485 | 0.7405 | 0.7727 | 0.7083 | 0.7391 | 1.0 | 0.3333 | 0.5 | 0.8571 | 0.9167 | 0.8859 | 0.5198 | 0.8618 | 0.6485 | 0.9357 | 0.9225 | 0.9291 | | 0.3128 | 27.0 | 9828 | 1.1911 | 0.7410 | 0.7797 | 0.7414 | 0.7333 | 0.9180 | 0.3889 | 0.5463 | 0.8074 | 0.7569 | 0.7814 | 0.8865 | 0.8681 | 0.8772 | 0.5556 | 0.7566 | 0.6407 | 0.7308 | 0.9366 | 0.8210 | | 0.2976 | 28.0 | 10192 | 1.6928 | 0.6694 | 0.7389 | 0.6709 | 0.6448 | 0.9655 | 0.1944 | 0.3237 | 0.5631 | 0.8056 | 0.6629 | 0.7527 | 0.9722 | 0.8485 | 0.4780 | 0.5724 | 0.5210 | 0.9350 | 0.8099 | 0.8679 | | 0.3439 | 29.0 | 10556 | 1.2054 | 0.7328 | 0.7920 | 0.7337 | 0.7183 | 0.9583 | 0.3194 | 0.4792 | 0.8497 | 0.9028 | 0.8754 | 0.88 | 0.7639 | 0.8178 | 0.72 | 0.7105 | 0.7152 | 0.552 | 0.9718 | 0.7041 | | 0.2825 | 30.0 | 10920 | 0.7000 | 0.8209 | 0.8385 | 0.8220 | 0.8169 | 0.9659 | 0.5903 | 0.7328 | 0.7514 | 0.9236 | 0.8287 | 0.7901 | 0.9931 | 0.88 | 0.7671 | 0.7368 | 0.7517 | 0.9179 | 0.8662 | 0.8913 | | 0.368 | 31.0 | 11284 | 1.3418 | 0.6791 | 0.7540 | 0.6799 | 0.6625 | 0.5847 | 0.7431 | 0.6544 | 1.0 | 0.2847 | 0.4432 | 0.9279 | 0.7153 | 0.8078 | 0.6732 | 0.6776 | 0.6754 | 0.5840 | 0.9789 | 0.7316 | | 0.3205 | 32.0 | 11648 | 0.6867 | 0.8099 | 0.8408 | 0.8110 | 0.8065 | 0.6377 | 0.9167 | 0.7521 | 0.8590 | 0.9306 | 0.8933 | 0.8528 | 0.9653 | 0.9055 | 0.8547 | 0.6579 | 0.7435 | 1.0 | 0.5845 | 0.7378 | | 0.308 | 33.0 | 12012 | 2.3068 | 0.6488 | 0.7220 | 0.6487 | 0.6079 | 0.7053 | 0.4653 | 0.5607 | 1.0 | 0.1597 | 0.2754 | 0.7391 | 0.9444 | 0.8293 | 0.4955 | 0.7303 | 0.5904 | 0.67 | 0.9437 | 0.7836 | | 0.4305 | 34.0 | 12376 | 0.5981 | 0.8209 | 0.8234 | 0.8246 | 0.8158 | 0.8333 | 0.8681 | 0.8503 | 0.7326 | 0.9514 | 0.8278 | 0.8639 | 0.8819 | 0.8729 | 0.7723 | 0.5132 | 0.6166 | 0.9149 | 0.9085 | 0.9117 | | 0.326 | 35.0 | 12740 | 2.0705 | 0.5937 | 0.7128 | 0.5948 | 0.5693 | 0.4230 | 0.8958 | 0.5746 | 0.6044 | 0.9444 | 0.7371 | 0.8378 | 0.2153 | 0.3425 | 0.8434 | 0.4605 | 0.5957 | 0.8553 | 0.4577 | 0.5963 | | 0.247 | 36.0 | 13104 | 1.2846 | 0.7066 | 0.7747 | 0.7047 | 0.7113 | 0.7516 | 0.8194 | 0.7841 | 0.8188 | 0.7847 | 0.8014 | 0.8721 | 0.5208 | 0.6522 | 0.4904 | 0.8421 | 0.6199 | 0.9405 | 0.5563 | 0.6991 | | 0.2769 | 37.0 | 13468 | 1.9667 | 0.6143 | 0.6984 | 0.6169 | 0.6059 | 0.7358 | 0.5417 | 0.624 | 0.4434 | 0.9792 | 0.6104 | 0.8413 | 0.3681 | 0.5121 | 0.7619 | 0.4211 | 0.5424 | 0.7097 | 0.7746 | 0.7407 | | 0.2787 | 38.0 | 13832 | 0.6121 | 0.8361 | 0.8412 | 0.8377 | 0.8361 | 0.7771 | 0.8472 | 0.8106 | 0.9237 | 0.8403 | 0.88 | 0.8671 | 0.8611 | 0.8641 | 0.8571 | 0.7105 | 0.7770 | 0.7811 | 0.9296 | 0.8489 | | 0.2929 | 39.0 | 14196 | 0.9656 | 0.7245 | 0.8110 | 0.7252 | 0.7242 | 0.9333 | 0.3889 | 0.5490 | 0.9516 | 0.8194 | 0.8806 | 0.9167 | 0.6875 | 0.7857 | 0.7603 | 0.7303 | 0.7450 | 0.4931 | 1.0 | 0.6605 | | 0.2187 | 40.0 | 14560 | 1.1442 | 0.7190 | 0.7916 | 0.7177 | 0.7162 | 0.8142 | 0.6389 | 0.7160 | 1.0 | 0.4375 | 0.6087 | 0.6814 | 0.9653 | 0.7989 | 0.5359 | 0.8355 | 0.6530 | 0.9266 | 0.7113 | 0.8048 | | 0.333 | 41.0 | 14924 | 1.3527 | 0.6639 | 0.7567 | 0.6622 | 0.6582 | 0.7623 | 0.6458 | 0.6992 | 1.0 | 0.375 | 0.5455 | 0.8876 | 0.5486 | 0.6781 | 0.4944 | 0.8684 | 0.6301 | 0.6392 | 0.8732 | 0.7381 | | 0.2879 | 42.0 | 15288 | 0.5472 | 0.8581 | 0.8593 | 0.8596 | 0.8591 | 0.8944 | 0.8819 | 0.8881 | 0.9353 | 0.9028 | 0.9187 | 0.8414 | 0.8472 | 0.8443 | 0.7635 | 0.7434 | 0.7533 | 0.8618 | 0.9225 | 0.8912 | | 0.3225 | 43.0 | 15652 | 3.7992 | 0.4229 | 0.6611 | 0.4168 | 0.4050 | 0.4272 | 0.3056 | 0.3563 | 0.8111 | 0.5069 | 0.6239 | 0.8684 | 0.2292 | 0.3626 | 0.2896 | 0.9013 | 0.4384 | 0.9091 | 0.1408 | 0.2439 | | 0.3459 | 44.0 | 16016 | 3.1857 | 0.6350 | 0.7444 | 0.6340 | 0.6105 | 0.9643 | 0.1875 | 0.3140 | 0.8455 | 0.6458 | 0.7323 | 0.8660 | 0.5833 | 0.6971 | 0.5169 | 0.8026 | 0.6289 | 0.5294 | 0.9507 | 0.6801 | | 0.4206 | 45.0 | 16380 | 1.3051 | 0.7218 | 0.7783 | 0.7231 | 0.7049 | 0.9783 | 0.3125 | 0.4737 | 0.86 | 0.8958 | 0.8776 | 0.7551 | 0.7708 | 0.7629 | 0.7372 | 0.6645 | 0.6990 | 0.5610 | 0.9718 | 0.7113 | | 0.3223 | 46.0 | 16744 | 1.1472 | 0.7355 | 0.7930 | 0.7335 | 0.7201 | 0.7342 | 0.8056 | 0.7682 | 0.9209 | 0.8889 | 0.9046 | 0.7561 | 0.8611 | 0.8052 | 0.5541 | 0.8092 | 0.6578 | 1.0 | 0.3028 | 0.4649 | | 0.3565 | 47.0 | 17108 | 0.9661 | 0.7782 | 0.8263 | 0.7774 | 0.7787 | 0.9351 | 0.5 | 0.6516 | 0.76 | 0.9236 | 0.8339 | 0.8690 | 0.875 | 0.8720 | 0.5766 | 0.8421 | 0.6845 | 0.9907 | 0.7465 | 0.8514 | | 0.1936 | 48.0 | 17472 | 0.6488 | 0.8306 | 0.8447 | 0.8316 | 0.8283 | 0.7616 | 0.9097 | 0.8291 | 0.9792 | 0.6528 | 0.7833 | 0.7919 | 0.9514 | 0.8644 | 0.8394 | 0.7566 | 0.7958 | 0.8514 | 0.8873 | 0.8690 | | 0.3125 | 49.0 | 17836 | 4.5120 | 0.4738 | 0.7968 | 0.4693 | 0.4270 | 1.0 | 0.0556 | 0.1053 | 1.0 | 0.0694 | 0.1299 | 0.84 | 0.5833 | 0.6885 | 0.2907 | 0.9408 | 0.4441 | 0.8534 | 0.6972 | 0.7674 | | 0.286 | 50.0 | 18200 | 1.1066 | 0.7631 | 0.7712 | 0.7643 | 0.7537 | 0.7952 | 0.4583 | 0.5815 | 0.8872 | 0.8194 | 0.8520 | 0.7254 | 0.9722 | 0.8309 | 0.6980 | 0.6842 | 0.6910 | 0.75 | 0.8873 | 0.8129 | | 0.3194 | 51.0 | 18564 | 1.5146 | 0.6543 | 0.7627 | 0.6513 | 0.6395 | 0.9825 | 0.3889 | 0.5572 | 0.7937 | 0.8819 | 0.8355 | 0.6270 | 0.8056 | 0.7052 | 0.4689 | 0.8421 | 0.6024 | 0.9412 | 0.3380 | 0.4974 | | 0.2739 | 52.0 | 18928 | 1.0688 | 0.7452 | 0.8069 | 0.7468 | 0.7521 | 0.8534 | 0.6875 | 0.7615 | 0.9512 | 0.8125 | 0.8764 | 0.8866 | 0.5972 | 0.7137 | 0.8264 | 0.6579 | 0.7326 | 0.5167 | 0.9789 | 0.6764 | | 0.2115 | 53.0 | 19292 | 0.9830 | 0.7837 | 0.8130 | 0.7836 | 0.7858 | 0.7628 | 0.8264 | 0.7933 | 0.9780 | 0.6181 | 0.7574 | 0.8780 | 0.75 | 0.8090 | 0.6269 | 0.8289 | 0.7139 | 0.8194 | 0.8944 | 0.8552 | | 0.1773 | 54.0 | 19656 | 1.1812 | 0.7287 | 0.8288 | 0.7274 | 0.7351 | 0.9091 | 0.625 | 0.7407 | 1.0 | 0.4236 | 0.5951 | 0.8730 | 0.7639 | 0.8148 | 0.4642 | 0.8947 | 0.6112 | 0.8980 | 0.9296 | 0.9135 | | 0.4166 | 55.0 | 20020 | 0.7748 | 0.7851 | 0.8210 | 0.7854 | 0.7897 | 0.7059 | 0.8333 | 0.7643 | 0.9770 | 0.5903 | 0.7359 | 0.85 | 0.8264 | 0.8380 | 0.6030 | 0.7895 | 0.6838 | 0.9692 | 0.8873 | 0.9265 | | 0.2494 | 56.0 | 20384 | 0.8547 | 0.7782 | 0.8037 | 0.7787 | 0.7789 | 0.7917 | 0.7917 | 0.7917 | 0.9787 | 0.6389 | 0.7731 | 0.8512 | 0.7153 | 0.7774 | 0.6398 | 0.7829 | 0.7041 | 0.7569 | 0.9648 | 0.8483 | | 0.2968 | 57.0 | 20748 | 0.7095 | 0.7810 | 0.8145 | 0.7859 | 0.7709 | 0.6422 | 0.9722 | 0.7735 | 0.9908 | 0.75 | 0.8538 | 0.7037 | 0.9236 | 0.7988 | 0.8143 | 0.375 | 0.5135 | 0.9214 | 0.9085 | 0.9149 | | 0.2671 | 58.0 | 21112 | 1.2633 | 0.7466 | 0.8275 | 0.7452 | 0.7637 | 0.9263 | 0.6111 | 0.7364 | 0.9661 | 0.7917 | 0.8702 | 0.8333 | 0.7292 | 0.7778 | 0.4746 | 0.8618 | 0.6121 | 0.9369 | 0.7324 | 0.8221 | | 0.1583 | 59.0 | 21476 | 0.9247 | 0.7879 | 0.8116 | 0.7890 | 0.7865 | 0.7891 | 0.7014 | 0.7426 | 0.9892 | 0.6389 | 0.7764 | 0.8497 | 0.9028 | 0.8754 | 0.7603 | 0.7303 | 0.7450 | 0.6699 | 0.9718 | 0.7931 | | 0.5213 | 60.0 | 21840 | 2.4814 | 0.6088 | 0.7614 | 0.6051 | 0.6010 | 0.5336 | 0.8264 | 0.6485 | 0.9647 | 0.5694 | 0.7162 | 0.8675 | 0.5 | 0.6344 | 0.4411 | 0.8618 | 0.5835 | 1.0 | 0.2676 | 0.4222 | | 0.3362 | 61.0 | 22204 | 0.9654 | 0.7810 | 0.8263 | 0.7810 | 0.7841 | 0.9438 | 0.5833 | 0.7210 | 0.9903 | 0.7083 | 0.8259 | 0.6717 | 0.9236 | 0.7778 | 0.6193 | 0.8026 | 0.6991 | 0.9065 | 0.8873 | 0.8968 | | 0.3507 | 62.0 | 22568 | 2.6933 | 0.6074 | 0.8067 | 0.6041 | 0.6323 | 0.9677 | 0.4167 | 0.5825 | 0.9577 | 0.4722 | 0.6326 | 0.8496 | 0.6667 | 0.7471 | 0.3495 | 0.9013 | 0.5037 | 0.9091 | 0.5634 | 0.6957 | | 0.2981 | 63.0 | 22932 | 0.5280 | 0.8581 | 0.8602 | 0.8599 | 0.8583 | 0.7963 | 0.8958 | 0.8431 | 0.9385 | 0.8472 | 0.8905 | 0.8671 | 0.8611 | 0.8641 | 0.8029 | 0.7237 | 0.7612 | 0.8961 | 0.9718 | 0.9324 | | 0.1905 | 64.0 | 23296 | 1.9495 | 0.6212 | 0.7104 | 0.6198 | 0.6317 | 0.8986 | 0.4306 | 0.5822 | 0.7397 | 0.75 | 0.7448 | 0.75 | 0.5208 | 0.6148 | 0.3925 | 0.7566 | 0.5169 | 0.7712 | 0.6408 | 0.7 | | 0.3515 | 65.0 | 23660 | 1.3202 | 0.7066 | 0.7771 | 0.7053 | 0.7020 | 0.8971 | 0.4236 | 0.5755 | 0.5947 | 0.9375 | 0.7278 | 0.8321 | 0.7917 | 0.8114 | 0.5735 | 0.7961 | 0.6667 | 0.9880 | 0.5775 | 0.7289 | | 0.2444 | 66.0 | 24024 | 9.7033 | 0.3595 | 0.5270 | 0.3560 | 0.2437 | 0.0 | 0.0 | 0.0 | 1.0 | 0.0069 | 0.0138 | 0.9524 | 0.1389 | 0.2424 | 0.3538 | 0.7961 | 0.4899 | 0.3287 | 0.8380 | 0.4722 | | 0.284 | 67.0 | 24388 | 0.5948 | 0.7810 | 0.8167 | 0.7803 | 0.7881 | 0.8158 | 0.8611 | 0.8378 | 0.9727 | 0.7431 | 0.8425 | 0.7540 | 0.6597 | 0.7037 | 0.5837 | 0.8487 | 0.6917 | 0.9573 | 0.7887 | 0.8649 | | 0.2848 | 68.0 | 24752 | 1.3576 | 0.7727 | 0.8217 | 0.7738 | 0.7511 | 1.0 | 0.2847 | 0.4432 | 0.6635 | 0.9722 | 0.7887 | 0.8718 | 0.9444 | 0.9067 | 0.6158 | 0.7171 | 0.6626 | 0.9574 | 0.9507 | 0.9541 | | 0.2382 | 69.0 | 25116 | 0.5605 | 0.8485 | 0.8468 | 0.8510 | 0.8469 | 0.8630 | 0.875 | 0.8690 | 0.8344 | 0.9444 | 0.8860 | 0.8503 | 0.8681 | 0.8591 | 0.776 | 0.6382 | 0.7004 | 0.9103 | 0.9296 | 0.9199 | | 0.3392 | 70.0 | 25480 | 3.4095 | 0.6074 | 0.7559 | 0.6055 | 0.5693 | 0.8056 | 0.2014 | 0.3222 | 1.0 | 0.1875 | 0.3158 | 0.7836 | 0.9306 | 0.8508 | 0.3772 | 0.8487 | 0.5223 | 0.8133 | 0.8592 | 0.8356 | | 0.2065 | 71.0 | 25844 | 0.6199 | 0.8264 | 0.8513 | 0.8261 | 0.8323 | 0.8247 | 0.8819 | 0.8523 | 0.9767 | 0.875 | 0.9231 | 0.8333 | 0.7639 | 0.7971 | 0.64 | 0.8421 | 0.7273 | 0.9820 | 0.7676 | 0.8617 | | 0.2027 | 72.0 | 26208 | 2.3450 | 0.6116 | 0.7172 | 0.6119 | 0.5953 | 0.8125 | 0.4514 | 0.5804 | 0.7862 | 0.7917 | 0.7889 | 0.9730 | 0.25 | 0.3978 | 0.5650 | 0.6579 | 0.6079 | 0.4495 | 0.9085 | 0.6014 | | 0.2532 | 73.0 | 26572 | 1.3719 | 0.7452 | 0.8450 | 0.7441 | 0.7545 | 0.9651 | 0.5764 | 0.7217 | 0.9857 | 0.4792 | 0.6449 | 0.8844 | 0.9028 | 0.8935 | 0.4634 | 0.875 | 0.6059 | 0.9265 | 0.8873 | 0.9065 | | 0.1423 | 74.0 | 26936 | 1.2969 | 0.7342 | 0.8113 | 0.7327 | 0.7463 | 0.8810 | 0.5139 | 0.6491 | 0.9 | 0.8125 | 0.8540 | 0.8519 | 0.7986 | 0.8244 | 0.4727 | 0.8553 | 0.6089 | 0.9510 | 0.6831 | 0.7951 | | 0.1761 | 75.0 | 27300 | 0.5368 | 0.8623 | 0.8682 | 0.8634 | 0.8632 | 0.9492 | 0.7778 | 0.8550 | 0.9 | 0.9375 | 0.9184 | 0.8 | 0.9167 | 0.8544 | 0.7697 | 0.7697 | 0.7697 | 0.9220 | 0.9155 | 0.9187 | | 0.3582 | 76.0 | 27664 | 1.1069 | 0.7796 | 0.8323 | 0.7791 | 0.7814 | 0.8812 | 0.6181 | 0.7265 | 1.0 | 0.5625 | 0.72 | 0.8649 | 0.8889 | 0.8767 | 0.5546 | 0.8684 | 0.6769 | 0.8608 | 0.9577 | 0.9067 | | 0.1635 | 77.0 | 28028 | 0.9810 | 0.7934 | 0.8106 | 0.7980 | 0.7840 | 0.7647 | 0.8125 | 0.7879 | 0.6605 | 0.9861 | 0.7911 | 0.8516 | 0.9167 | 0.8829 | 0.7922 | 0.4013 | 0.5328 | 0.9841 | 0.8732 | 0.9254 | | 0.2325 | 78.0 | 28392 | 1.0597 | 0.7879 | 0.8341 | 0.7873 | 0.7961 | 0.9394 | 0.6458 | 0.7654 | 0.9652 | 0.7708 | 0.8571 | 0.8605 | 0.7708 | 0.8132 | 0.5598 | 0.8618 | 0.6788 | 0.8456 | 0.8873 | 0.8660 | | 0.2405 | 79.0 | 28756 | 2.0543 | 0.6653 | 0.7992 | 0.6629 | 0.6757 | 0.9273 | 0.3542 | 0.5126 | 0.8521 | 0.8403 | 0.8462 | 0.8443 | 0.7153 | 0.7744 | 0.3976 | 0.8553 | 0.5428 | 0.975 | 0.5493 | 0.7027 | | 0.4338 | 80.0 | 29120 | 1.6266 | 0.7121 | 0.7562 | 0.7119 | 0.6903 | 0.7536 | 0.3611 | 0.4883 | 1.0 | 0.4653 | 0.6351 | 0.7448 | 0.9931 | 0.8512 | 0.5941 | 0.7895 | 0.6780 | 0.6888 | 0.9507 | 0.7988 | | 0.189 | 81.0 | 29484 | 0.9406 | 0.7879 | 0.8029 | 0.7923 | 0.7809 | 0.6347 | 0.9653 | 0.7658 | 0.8667 | 0.8125 | 0.8387 | 0.8571 | 0.8333 | 0.8451 | 0.7647 | 0.4276 | 0.5485 | 0.8912 | 0.9225 | 0.9066 | | 0.275 | 82.0 | 29848 | 0.8044 | 0.7810 | 0.8149 | 0.7814 | 0.7838 | 0.9255 | 0.6042 | 0.7311 | 0.6650 | 0.9514 | 0.7829 | 0.8473 | 0.7708 | 0.8073 | 0.6534 | 0.7566 | 0.7012 | 0.9832 | 0.8239 | 0.8966 | | 0.2297 | 83.0 | 30212 | 5.9964 | 0.5289 | 0.6712 | 0.5279 | 0.4845 | 0.9333 | 0.0972 | 0.1761 | 0.8364 | 0.3194 | 0.4623 | 0.5954 | 0.5417 | 0.5673 | 0.5904 | 0.7303 | 0.6529 | 0.4006 | 0.9507 | 0.5637 | | 0.1263 | 84.0 | 30576 | 1.2614 | 0.7493 | 0.7718 | 0.7511 | 0.7400 | 0.8784 | 0.4514 | 0.5963 | 0.7778 | 0.9236 | 0.8444 | 0.8571 | 0.7917 | 0.8231 | 0.7029 | 0.6382 | 0.6690 | 0.6429 | 0.9507 | 0.7670 | | 0.2283 | 85.0 | 30940 | 0.7674 | 0.8030 | 0.8427 | 0.8027 | 0.8098 | 0.9706 | 0.6875 | 0.8049 | 0.9104 | 0.8472 | 0.8777 | 0.7471 | 0.9028 | 0.8176 | 0.6039 | 0.8224 | 0.6964 | 0.9817 | 0.7535 | 0.8526 | | 0.1926 | 86.0 | 31304 | 1.0907 | 0.7837 | 0.8336 | 0.7875 | 0.7812 | 0.5703 | 0.9861 | 0.7226 | 0.9891 | 0.6319 | 0.7712 | 0.8618 | 0.9097 | 0.8851 | 0.8488 | 0.4803 | 0.6134 | 0.8980 | 0.9296 | 0.9135 | | 0.2539 | 87.0 | 31668 | 1.1948 | 0.7782 | 0.8173 | 0.7782 | 0.7832 | 0.9091 | 0.5556 | 0.6897 | 0.9147 | 0.8194 | 0.8645 | 0.8582 | 0.7986 | 0.8273 | 0.5511 | 0.8158 | 0.6578 | 0.8533 | 0.9014 | 0.8767 | | 0.2203 | 88.0 | 32032 | 0.9832 | 0.7645 | 0.7998 | 0.7644 | 0.7664 | 0.6667 | 0.9306 | 0.7768 | 0.9355 | 0.8056 | 0.8657 | 0.7655 | 0.7708 | 0.7682 | 0.6433 | 0.7237 | 0.6811 | 0.9882 | 0.5915 | 0.7401 | | 0.2361 | 89.0 | 32396 | 4.8401 | 0.4711 | 0.7411 | 0.4656 | 0.4000 | 1.0 | 0.1181 | 0.2112 | 0.9455 | 0.3611 | 0.5226 | 0.5625 | 0.9375 | 0.7031 | 0.3227 | 0.8618 | 0.4695 | 0.875 | 0.0493 | 0.0933 | | 0.2091 | 90.0 | 32760 | 1.1061 | 0.7479 | 0.8401 | 0.7469 | 0.7662 | 0.9877 | 0.5556 | 0.7111 | 0.9091 | 0.7639 | 0.8302 | 0.8605 | 0.7708 | 0.8132 | 0.4607 | 0.8487 | 0.5972 | 0.9826 | 0.7958 | 0.8794 | | 0.2262 | 91.0 | 33124 | 0.9346 | 0.7769 | 0.8112 | 0.7790 | 0.7742 | 0.9451 | 0.5972 | 0.7319 | 0.8144 | 0.9444 | 0.8746 | 0.8537 | 0.7292 | 0.7865 | 0.8291 | 0.6382 | 0.7212 | 0.6140 | 0.9859 | 0.7568 | | 0.1587 | 92.0 | 33488 | 0.5911 | 0.8747 | 0.8747 | 0.8769 | 0.8738 | 0.9214 | 0.8958 | 0.9085 | 0.8476 | 0.9653 | 0.9026 | 0.8553 | 0.9028 | 0.8784 | 0.8125 | 0.6842 | 0.7429 | 0.9366 | 0.9366 | 0.9366 | | 0.1668 | 93.0 | 33852 | 1.6945 | 0.6584 | 0.7523 | 0.6569 | 0.6586 | 0.4872 | 0.9236 | 0.6379 | 0.8230 | 0.6458 | 0.7237 | 0.8710 | 0.5625 | 0.6835 | 0.5968 | 0.7303 | 0.6568 | 0.9836 | 0.4225 | 0.5911 | | 0.1473 | 94.0 | 34216 | 1.6512 | 0.6832 | 0.8307 | 0.6804 | 0.6985 | 0.9620 | 0.5278 | 0.6816 | 0.9792 | 0.6528 | 0.7833 | 0.8038 | 0.8819 | 0.8411 | 0.4085 | 0.8816 | 0.5583 | 1.0 | 0.4577 | 0.6280 | | 0.2726 | 95.0 | 34580 | 1.9192 | 0.6143 | 0.8272 | 0.6106 | 0.6425 | 0.9545 | 0.4375 | 0.6 | 0.9789 | 0.6458 | 0.7782 | 0.8558 | 0.6181 | 0.7177 | 0.3467 | 0.9079 | 0.5018 | 1.0 | 0.4437 | 0.6146 | | 0.2871 | 96.0 | 34944 | 1.9940 | 0.6570 | 0.7514 | 0.6566 | 0.6525 | 0.4792 | 0.9583 | 0.6389 | 0.95 | 0.3958 | 0.5588 | 0.9103 | 0.4931 | 0.6396 | 0.6545 | 0.7105 | 0.6814 | 0.7630 | 0.7254 | 0.7437 | | 0.2305 | 97.0 | 35308 | 1.3152 | 0.7080 | 0.8119 | 0.7063 | 0.7285 | 0.9457 | 0.6042 | 0.7373 | 0.8992 | 0.8056 | 0.8498 | 0.7949 | 0.6458 | 0.7126 | 0.4310 | 0.8421 | 0.5702 | 0.9890 | 0.6338 | 0.7725 | | 0.2465 | 98.0 | 35672 | 1.1943 | 0.7672 | 0.8213 | 0.7656 | 0.7637 | 0.9333 | 0.6806 | 0.7871 | 0.8654 | 0.9375 | 0.9 | 0.7288 | 0.8958 | 0.8037 | 0.5792 | 0.8421 | 0.6863 | 1.0 | 0.4718 | 0.6411 | | 0.1739 | 99.0 | 36036 | 0.8183 | 0.8264 | 0.8244 | 0.8286 | 0.8254 | 0.8369 | 0.8194 | 0.8281 | 0.8693 | 0.9236 | 0.8956 | 0.84 | 0.875 | 0.8571 | 0.7424 | 0.6447 | 0.6901 | 0.8333 | 0.8803 | 0.8562 | | 0.1917 | 100.0 | 36400 | 2.1049 | 0.6901 | 0.7855 | 0.6890 | 0.6700 | 1.0 | 0.3403 | 0.5078 | 0.9844 | 0.4375 | 0.6058 | 0.6484 | 0.9861 | 0.7824 | 0.5385 | 0.8289 | 0.6528 | 0.7562 | 0.8521 | 0.8013 | | 0.1258 | 101.0 | 36764 | 0.9476 | 0.8264 | 0.8401 | 0.8275 | 0.8227 | 0.8788 | 0.6042 | 0.7160 | 0.9669 | 0.8125 | 0.8830 | 0.8313 | 0.9583 | 0.8903 | 0.7852 | 0.7697 | 0.7774 | 0.7382 | 0.9930 | 0.8468 | | 0.3102 | 102.0 | 37128 | 2.2261 | 0.6474 | 0.7617 | 0.6454 | 0.6117 | 1.0 | 0.3611 | 0.5306 | 0.6832 | 0.9583 | 0.7977 | 0.6296 | 0.9444 | 0.7556 | 0.4955 | 0.7237 | 0.5882 | 1.0 | 0.2394 | 0.3864 | | 0.2339 | 103.0 | 37492 | 4.9547 | 0.4394 | 0.7228 | 0.4347 | 0.4335 | 0.8824 | 0.2083 | 0.3371 | 0.9 | 0.25 | 0.3913 | 0.8605 | 0.2569 | 0.3957 | 0.2753 | 0.8947 | 0.4211 | 0.6957 | 0.5634 | 0.6226 | | 0.1609 | 104.0 | 37856 | 1.2982 | 0.7700 | 0.8024 | 0.7717 | 0.7664 | 0.975 | 0.5417 | 0.6964 | 0.8199 | 0.9167 | 0.8656 | 0.8538 | 0.7708 | 0.8102 | 0.7319 | 0.6645 | 0.6966 | 0.6313 | 0.9648 | 0.7632 | | 0.2037 | 105.0 | 38220 | 1.7334 | 0.6736 | 0.8253 | 0.6716 | 0.6974 | 0.9853 | 0.4653 | 0.6321 | 0.9176 | 0.5417 | 0.6812 | 0.8534 | 0.6875 | 0.7615 | 0.3878 | 0.875 | 0.5374 | 0.9825 | 0.7887 | 0.875 | | 0.1627 | 106.0 | 38584 | 1.2770 | 0.7906 | 0.8201 | 0.7921 | 0.7798 | 1.0 | 0.4583 | 0.6286 | 0.8471 | 0.9236 | 0.8837 | 0.7888 | 0.8819 | 0.8328 | 0.7714 | 0.7105 | 0.7397 | 0.6931 | 0.9859 | 0.8140 | | 0.1985 | 107.0 | 38948 | 0.7336 | 0.8223 | 0.8381 | 0.8222 | 0.8258 | 0.8551 | 0.8194 | 0.8369 | 0.9044 | 0.8542 | 0.8786 | 0.8038 | 0.8819 | 0.8411 | 0.6721 | 0.8092 | 0.7343 | 0.9550 | 0.7465 | 0.8379 | | 0.2462 | 108.0 | 39312 | 0.6392 | 0.8554 | 0.8583 | 0.8581 | 0.8530 | 0.8198 | 0.9792 | 0.8924 | 0.9603 | 0.8403 | 0.8963 | 0.8723 | 0.8542 | 0.8632 | 0.8017 | 0.6382 | 0.7106 | 0.8373 | 0.9789 | 0.9026 | | 0.165 | 109.0 | 39676 | 4.0560 | 0.6377 | 0.7508 | 0.6394 | 0.6200 | 0.7683 | 0.4375 | 0.5575 | 1.0 | 0.2639 | 0.4176 | 0.8481 | 0.9306 | 0.8874 | 0.704 | 0.5789 | 0.6354 | 0.4334 | 0.9859 | 0.6022 | | 0.2116 | 110.0 | 40040 | 3.4035 | 0.5909 | 0.7279 | 0.5878 | 0.5355 | 1.0 | 0.1597 | 0.2754 | 0.6885 | 0.875 | 0.7706 | 0.6 | 0.9167 | 0.7253 | 0.4419 | 0.7763 | 0.5632 | 0.9091 | 0.2113 | 0.3429 | | 0.1988 | 111.0 | 40404 | 0.9592 | 0.8030 | 0.8312 | 0.8031 | 0.8077 | 0.8462 | 0.8403 | 0.8432 | 0.8675 | 0.9097 | 0.8881 | 0.8472 | 0.8472 | 0.8472 | 0.5949 | 0.7632 | 0.6686 | 1.0 | 0.6549 | 0.7915 | | 0.1728 | 112.0 | 40768 | 1.4039 | 0.7562 | 0.8180 | 0.7552 | 0.7537 | 0.9252 | 0.6875 | 0.7888 | 0.8912 | 0.9097 | 0.9003 | 0.5643 | 0.9444 | 0.7065 | 0.7091 | 0.7697 | 0.7382 | 1.0 | 0.4648 | 0.6346 | | 0.2872 | 113.0 | 41132 | 1.0270 | 0.7603 | 0.8068 | 0.7599 | 0.7613 | 0.6453 | 0.9097 | 0.7550 | 0.9810 | 0.7153 | 0.8273 | 0.7764 | 0.8681 | 0.8197 | 0.6441 | 0.75 | 0.6930 | 0.9875 | 0.5563 | 0.7117 | | 0.1523 | 114.0 | 41496 | 1.8516 | 0.6873 | 0.7650 | 0.6853 | 0.6985 | 0.6759 | 0.6806 | 0.6782 | 0.9231 | 0.75 | 0.8276 | 0.8539 | 0.5278 | 0.6524 | 0.4659 | 0.8553 | 0.6032 | 0.9062 | 0.6127 | 0.7311 | | 0.1601 | 115.0 | 41860 | 1.4252 | 0.7686 | 0.8087 | 0.7690 | 0.7453 | 1.0 | 0.2986 | 0.4599 | 0.9034 | 0.9097 | 0.9066 | 0.7081 | 0.9097 | 0.7964 | 0.6941 | 0.7763 | 0.7329 | 0.7377 | 0.9507 | 0.8308 | | 0.1934 | 116.0 | 42224 | 1.0761 | 0.8003 | 0.8124 | 0.8065 | 0.7753 | 0.6792 | 1.0 | 0.8090 | 0.8571 | 0.9167 | 0.8859 | 0.8366 | 0.8889 | 0.8620 | 0.8235 | 0.2763 | 0.4138 | 0.8654 | 0.9507 | 0.9060 | | 0.1296 | 117.0 | 42588 | 0.9574 | 0.8182 | 0.8567 | 0.8181 | 0.8211 | 0.9759 | 0.5625 | 0.7137 | 0.9538 | 0.8611 | 0.9051 | 0.8280 | 0.9028 | 0.8638 | 0.5972 | 0.8487 | 0.7011 | 0.9286 | 0.9155 | 0.9220 | | 0.1667 | 118.0 | 42952 | 1.5463 | 0.7052 | 0.7376 | 0.7064 | 0.7098 | 0.7218 | 0.6667 | 0.6931 | 0.655 | 0.9097 | 0.7616 | 0.8469 | 0.5764 | 0.6860 | 0.5167 | 0.6118 | 0.5602 | 0.9478 | 0.7676 | 0.8482 | | 0.1485 | 119.0 | 43316 | 4.6610 | 0.5207 | 0.6658 | 0.5226 | 0.4893 | 0.8788 | 0.2014 | 0.3277 | 0.8571 | 0.2917 | 0.4352 | 0.4098 | 0.9306 | 0.5690 | 0.6966 | 0.4079 | 0.5145 | 0.4868 | 0.7817 | 0.6 | | 0.1318 | 120.0 | 43680 | 1.0993 | 0.7893 | 0.8193 | 0.7892 | 0.7906 | 0.9444 | 0.5903 | 0.7265 | 0.8929 | 0.8681 | 0.8803 | 0.8455 | 0.7222 | 0.7790 | 0.6029 | 0.8289 | 0.6981 | 0.8110 | 0.9366 | 0.8693 | | 0.2371 | 121.0 | 44044 | 8.0913 | 0.4972 | 0.6707 | 0.4972 | 0.4656 | 0.8333 | 0.1042 | 0.1852 | 0.7805 | 0.4444 | 0.5664 | 0.8070 | 0.3194 | 0.4577 | 0.5864 | 0.625 | 0.6051 | 0.3464 | 0.9930 | 0.5137 | | 0.1721 | 122.0 | 44408 | 1.0303 | 0.8168 | 0.8227 | 0.8183 | 0.8150 | 0.8909 | 0.6806 | 0.7717 | 0.9 | 0.9375 | 0.9184 | 0.7711 | 0.8889 | 0.8258 | 0.7970 | 0.6974 | 0.7439 | 0.7545 | 0.8873 | 0.8155 | | 0.3003 | 123.0 | 44772 | 4.5488 | 0.4339 | 0.7836 | 0.4344 | 0.4222 | 0.85 | 0.2361 | 0.3696 | 1.0 | 0.2569 | 0.4088 | 1.0 | 0.1597 | 0.2754 | 0.8 | 0.5263 | 0.6349 | 0.2681 | 0.9930 | 0.4222 | | 0.1306 | 124.0 | 45136 | 1.0231 | 0.7879 | 0.8620 | 0.7868 | 0.8018 | 0.9789 | 0.6458 | 0.7782 | 1.0 | 0.7986 | 0.8880 | 0.8113 | 0.8958 | 0.8515 | 0.5197 | 0.8684 | 0.6502 | 1.0 | 0.7254 | 0.8408 | | 0.1423 | 125.0 | 45500 | 0.9325 | 0.8168 | 0.8355 | 0.8174 | 0.8180 | 0.7950 | 0.8889 | 0.8393 | 1.0 | 0.7847 | 0.8794 | 0.9083 | 0.6875 | 0.7826 | 0.7186 | 0.7895 | 0.7524 | 0.7557 | 0.9366 | 0.8365 | | 0.1867 | 126.0 | 45864 | 3.4378 | 0.6804 | 0.7780 | 0.6803 | 0.6426 | 0.6957 | 0.7778 | 0.7344 | 1.0 | 0.0833 | 0.1538 | 0.8408 | 0.9167 | 0.8771 | 0.4346 | 0.7434 | 0.5485 | 0.9191 | 0.8803 | 0.8993 | | 0.1402 | 127.0 | 46228 | 2.3312 | 0.7025 | 0.8111 | 0.7014 | 0.6846 | 1.0 | 0.2014 | 0.3353 | 0.9596 | 0.6597 | 0.7819 | 0.7895 | 0.9375 | 0.8571 | 0.4523 | 0.8421 | 0.5885 | 0.8542 | 0.8662 | 0.8601 | | 0.2222 | 128.0 | 46592 | 2.4998 | 0.7300 | 0.7824 | 0.7298 | 0.6892 | 1.0 | 0.1597 | 0.2754 | 0.7389 | 0.9236 | 0.8210 | 0.7616 | 0.9097 | 0.8291 | 0.6010 | 0.7829 | 0.68 | 0.8105 | 0.8732 | 0.8407 | | 0.1835 | 129.0 | 46956 | 5.8303 | 0.4435 | 0.4147 | 0.4381 | 0.3798 | 0.4444 | 0.6111 | 0.5146 | 0.7736 | 0.5694 | 0.656 | 0.0 | 0.0 | 0.0 | 0.3439 | 0.8553 | 0.4906 | 0.5116 | 0.1549 | 0.2378 | | 0.2269 | 130.0 | 47320 | 2.9031 | 0.6240 | 0.7550 | 0.6222 | 0.6167 | 0.9512 | 0.2708 | 0.4216 | 0.9062 | 0.6042 | 0.725 | 0.8571 | 0.5 | 0.6316 | 0.4175 | 0.8487 | 0.5597 | 0.6429 | 0.8873 | 0.7456 | | 0.136 | 131.0 | 47684 | 1.6164 | 0.6653 | 0.7618 | 0.6647 | 0.6609 | 0.8721 | 0.5208 | 0.6522 | 0.9364 | 0.7153 | 0.8110 | 0.9298 | 0.3681 | 0.5274 | 0.5613 | 0.7829 | 0.6538 | 0.5096 | 0.9366 | 0.6600 | | 0.1439 | 132.0 | 48048 | 2.2665 | 0.6983 | 0.7866 | 0.6974 | 0.6912 | 0.9756 | 0.2778 | 0.4324 | 0.8105 | 0.8611 | 0.8350 | 0.8559 | 0.7014 | 0.7710 | 0.4542 | 0.8158 | 0.5835 | 0.8369 | 0.8310 | 0.8339 | | 0.1593 | 133.0 | 48412 | 0.8721 | 0.8347 | 0.8601 | 0.8347 | 0.8420 | 0.8971 | 0.8472 | 0.8714 | 0.9143 | 0.8889 | 0.9014 | 0.875 | 0.7778 | 0.8235 | 0.6225 | 0.8355 | 0.7135 | 0.9915 | 0.8239 | 0.9 | | 0.202 | 134.0 | 48776 | 0.7894 | 0.8182 | 0.8372 | 0.8188 | 0.8206 | 0.9394 | 0.6458 | 0.7654 | 0.8649 | 0.8889 | 0.8767 | 0.8696 | 0.8333 | 0.8511 | 0.6316 | 0.7895 | 0.7018 | 0.8808 | 0.9366 | 0.9078 | | 0.7143 | 135.0 | 49140 | 2.3021 | 0.6708 | 0.7874 | 0.6693 | 0.6432 | 0.5278 | 0.9236 | 0.6717 | 1.0 | 0.1597 | 0.2754 | 0.8662 | 0.8542 | 0.8601 | 0.5430 | 0.7895 | 0.6434 | 1.0 | 0.6197 | 0.7652 | | 0.1899 | 136.0 | 49504 | 1.6460 | 0.6515 | 0.8051 | 0.6490 | 0.6772 | 0.8767 | 0.4444 | 0.5899 | 0.96 | 0.6667 | 0.7869 | 0.8218 | 0.5764 | 0.6776 | 0.3775 | 0.8816 | 0.5286 | 0.9897 | 0.6761 | 0.8033 | | 0.1565 | 137.0 | 49868 | 1.7804 | 0.7149 | 0.7747 | 0.7147 | 0.7063 | 0.7652 | 0.7014 | 0.7319 | 1.0 | 0.3611 | 0.5306 | 0.8561 | 0.7847 | 0.8188 | 0.5333 | 0.7895 | 0.6366 | 0.7189 | 0.9366 | 0.8135 | | 0.1363 | 138.0 | 50232 | 3.4435 | 0.5041 | 0.6933 | 0.5017 | 0.4899 | 0.7843 | 0.2778 | 0.4103 | 0.3621 | 0.875 | 0.5122 | 0.8571 | 0.4583 | 0.5973 | 0.4630 | 0.6579 | 0.5435 | 1.0 | 0.2394 | 0.3864 | | 0.0519 | 139.0 | 50596 | 0.8779 | 0.7975 | 0.8098 | 0.7984 | 0.7992 | 0.6703 | 0.8611 | 0.7538 | 0.8898 | 0.7292 | 0.8015 | 0.8819 | 0.7778 | 0.8266 | 0.7724 | 0.7368 | 0.7542 | 0.8344 | 0.8873 | 0.8601 | | 0.4079 | 140.0 | 50960 | 1.7080 | 0.6612 | 0.7434 | 0.6618 | 0.6599 | 0.8018 | 0.6181 | 0.6980 | 0.9462 | 0.6111 | 0.7426 | 0.8676 | 0.4097 | 0.5566 | 0.6047 | 0.6842 | 0.6420 | 0.4965 | 0.9859 | 0.6604 | | 0.1648 | 141.0 | 51324 | 2.5691 | 0.6598 | 0.7517 | 0.6587 | 0.6078 | 0.8235 | 0.4861 | 0.6114 | 1.0 | 0.0972 | 0.1772 | 0.7458 | 0.9167 | 0.8224 | 0.504 | 0.8289 | 0.6269 | 0.685 | 0.9648 | 0.8012 | | 0.134 | 142.0 | 51688 | 1.5630 | 0.7700 | 0.8081 | 0.7738 | 0.7631 | 0.8976 | 0.7917 | 0.8413 | 0.7316 | 0.9653 | 0.8323 | 0.8727 | 0.6667 | 0.7559 | 0.9114 | 0.4737 | 0.6234 | 0.6273 | 0.9718 | 0.7624 | | 0.1752 | 143.0 | 52052 | 1.2265 | 0.7755 | 0.8027 | 0.7761 | 0.7720 | 0.7419 | 0.7986 | 0.7692 | 0.9875 | 0.5486 | 0.7054 | 0.8582 | 0.7986 | 0.8273 | 0.7267 | 0.7697 | 0.7476 | 0.6990 | 0.9648 | 0.8107 | | 0.1366 | 144.0 | 52416 | 2.4039 | 0.6129 | 0.7119 | 0.6101 | 0.5804 | 0.5780 | 0.6944 | 0.6309 | 0.6461 | 0.7986 | 0.7143 | 0.8684 | 0.6875 | 0.7674 | 0.4672 | 0.75 | 0.5758 | 1.0 | 0.1197 | 0.2138 | | 0.4067 | 145.0 | 52780 | 1.7158 | 0.7176 | 0.8031 | 0.7165 | 0.7189 | 0.7714 | 0.75 | 0.7606 | 1.0 | 0.3681 | 0.5381 | 0.82 | 0.8542 | 0.8367 | 0.4757 | 0.8355 | 0.6062 | 0.9483 | 0.7746 | 0.8527 | | 0.1673 | 146.0 | 53144 | 1.4298 | 0.7727 | 0.7929 | 0.7743 | 0.7668 | 0.8902 | 0.5069 | 0.6460 | 0.8639 | 0.8819 | 0.8729 | 0.8207 | 0.8264 | 0.8235 | 0.7324 | 0.6842 | 0.7075 | 0.6571 | 0.9718 | 0.7841 | | 0.1853 | 147.0 | 53508 | 0.9835 | 0.7906 | 0.8068 | 0.7916 | 0.7900 | 0.9238 | 0.6736 | 0.7791 | 0.7619 | 0.8889 | 0.8205 | 0.875 | 0.6806 | 0.7656 | 0.6628 | 0.75 | 0.7037 | 0.8107 | 0.9648 | 0.8810 | | 0.1954 | 148.0 | 53872 | 0.9705 | 0.7837 | 0.8044 | 0.7848 | 0.7806 | 0.7182 | 0.9028 | 0.8 | 0.8187 | 0.9722 | 0.8889 | 0.8182 | 0.8125 | 0.8153 | 0.6667 | 0.6447 | 0.6555 | 1.0 | 0.5915 | 0.7434 | | 0.118 | 149.0 | 54236 | 0.6951 | 0.8402 | 0.8627 | 0.8403 | 0.8455 | 0.9274 | 0.7986 | 0.8582 | 0.9688 | 0.8611 | 0.9118 | 0.7674 | 0.9167 | 0.8354 | 0.6757 | 0.8224 | 0.7418 | 0.9744 | 0.8028 | 0.8803 | | 0.1205 | 150.0 | 54600 | 0.8478 | 0.8609 | 0.8648 | 0.8628 | 0.8606 | 0.8 | 0.9444 | 0.8662 | 0.8832 | 0.8403 | 0.8612 | 0.85 | 0.9444 | 0.8947 | 0.8217 | 0.6974 | 0.7544 | 0.9692 | 0.8873 | 0.9265 | ### Framework versions - Transformers 4.48.3 - Pytorch 2.5.1+cu124 - Datasets 3.3.2 - Tokenizers 0.21.0
[ "durariadorio_64x64", "mole_64x64", "quebrado_64x64", "riadorio_64x64", "riofechado_64x64" ]
Malharr11/finetuned-indian-food
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # finetuned-indian-food This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the indian_food_images dataset. It achieves the following results on the evaluation set: - Loss: 0.2435 - Accuracy: 0.9373 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0002 - train_batch_size: 16 - eval_batch_size: 8 - seed: 42 - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - num_epochs: 4 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:------:|:----:|:---------------:|:--------:| | 0.9196 | 0.3003 | 100 | 0.9078 | 0.8544 | | 0.858 | 0.6006 | 200 | 0.5913 | 0.8831 | | 0.709 | 0.9009 | 300 | 0.4833 | 0.8927 | | 0.304 | 1.2012 | 400 | 0.3871 | 0.9160 | | 0.3863 | 1.5015 | 500 | 0.3949 | 0.9033 | | 0.4374 | 1.8018 | 600 | 0.3701 | 0.9033 | | 0.2408 | 2.1021 | 700 | 0.3211 | 0.9150 | | 0.2891 | 2.4024 | 800 | 0.3366 | 0.9075 | | 0.1858 | 2.7027 | 900 | 0.2775 | 0.9352 | | 0.2829 | 3.0030 | 1000 | 0.2767 | 0.9309 | | 0.2698 | 3.3033 | 1100 | 0.2587 | 0.9352 | | 0.2201 | 3.6036 | 1200 | 0.2465 | 0.9362 | | 0.11 | 3.9039 | 1300 | 0.2435 | 0.9373 | ### Framework versions - Transformers 4.48.3 - Pytorch 2.5.1+cu124 - Datasets 3.3.2 - Tokenizers 0.21.0
[ "burger", "butter_naan", "kaathi_rolls", "kadai_paneer", "kulfi", "masala_dosa", "momos", "paani_puri", "pakode", "pav_bhaji", "pizza", "samosa", "chai", "chapati", "chole_bhature", "dal_makhani", "dhokla", "fried_rice", "idli", "jalebi" ]
Malharr11/finetuned-indian-food-9
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # finetuned-indian-food-9 This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the indian_food_images_9 dataset. It achieves the following results on the evaluation set: - Loss: 0.3239 - Accuracy: 0.9227 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0002 - train_batch_size: 16 - eval_batch_size: 8 - seed: 42 - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - num_epochs: 4 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:------:|:----:|:---------------:|:--------:| | 1.3678 | 0.2874 | 100 | 1.2110 | 0.8291 | | 0.8714 | 0.5747 | 200 | 0.8336 | 0.8494 | | 0.8788 | 0.8621 | 300 | 0.6678 | 0.8474 | | 0.5413 | 1.1494 | 400 | 0.5275 | 0.8850 | | 0.4618 | 1.4368 | 500 | 0.5324 | 0.8688 | | 0.4924 | 1.7241 | 600 | 0.5178 | 0.8698 | | 0.3667 | 2.0115 | 700 | 0.4564 | 0.8942 | | 0.2604 | 2.2989 | 800 | 0.4188 | 0.8973 | | 0.2148 | 2.5862 | 900 | 0.4108 | 0.8993 | | 0.2383 | 2.8736 | 1000 | 0.3312 | 0.9145 | | 0.1441 | 3.1609 | 1100 | 0.3467 | 0.9156 | | 0.186 | 3.4483 | 1200 | 0.3284 | 0.9237 | | 0.19 | 3.7356 | 1300 | 0.3239 | 0.9227 | ### Framework versions - Transformers 4.48.3 - Pytorch 2.5.1+cu124 - Datasets 3.3.2 - Tokenizers 0.21.0
[ "burger", "butter_naan", "idli", "jalebi", "kadai_paneer", "khaman", "kulfi", "masala_dosa", "meduvada", "mirchi_bajji", "momos", "paani_puri", "chai", "pakode", "pizza", "poha", "rasgulla", "samosa", "sandwitch", "vadapav", "chapati", "chole_bhature", "dal_makhani", "frankie", "fried_rice", "gajar_halwa", "gulab_jamun" ]
distill-lab/distill-n4_00-01_combined_cls_v1b3_siglip2_focal-loss
# Model Card for Model ID <!-- Provide a quick summary of what the model is/does. --> current batches: - `nv3[v0] (1700) | nv4[v1-2k] (4000) | nv4[v1-210k] (b1-b3: 6000)` training samples (throw / keep): - `8929, 2784` ## Training train command: ```sh #!/bin/bash # =================== BEGIN NOTES ======================= # nothing new new in this one. dig previous scripts for details # =================== END NOTES ========================== # Define variables BASE_MODEL="google/siglip2-large-patch16-512" DATASET="distill-lab/COMBINE_nai-distill_00-01_eagle.library" TASK="classification" NUM_EPOCHS=10 # Run training command python -m trainlib.hf_trainer.cli \ --model_name_or_path $BASE_MODEL \ --dataset_name $DATASET \ --output_dir distill-n4_00-01_combined_cls_v1b3_siglip2_focal-loss \ --remove_unused_columns False \ --label_column_name star \ --task $TASK \ --do_train \ --do_eval \ --eval_strategy steps \ --eval_steps 100 \ --learning_rate 5e-6 \ --num_train_epochs $NUM_EPOCHS \ --per_device_train_batch_size 22 \ --per_device_eval_batch_size 22 \ --logging_strategy steps \ --logging_steps 2 \ --save_total_limit 1 \ --seed 1337 \ --lr_scheduler_type cosine \ --dataloader_num_workers 16 \ --ignore_mismatched_sizes True \ --fp16 True # EXTRA ARGUMENT ``` ## Eval eval results: (~1.5% higher accuracy than v1b2, by adding 2000 samples) ``` wandb: Run summary: wandb: eval/accuracy 0.7852 wandb: eval/f1 0.46247 wandb: eval/loss 0.23888 wandb: eval/precision 0.53352 wandb: eval/recall 0.40812 wandb: eval/roc_auc 0.78516 wandb: eval/runtime 19.6053 wandb: eval/samples_per_second 105.431 wandb: eval/steps_per_second 0.612 wandb: total_flos 1.744816776738767e+20 wandb: train/epoch 10.0 wandb: train/global_step 670 wandb: train/grad_norm 279129.6875 wandb: train/learning_rate 0.0 wandb: train/loss 0.1785 wandb: train_loss 0.21612 wandb: train_runtime 1212.1372 wandb: train_samples_per_second 96.631 wandb: train_steps_per_second 0.553 ```
[ "star_0", "star_1" ]
Malharr11/finetuned-indian-food-26
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # finetuned-indian-food-26 This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the indian_food_images_26 dataset. It achieves the following results on the evaluation set: - Loss: 0.3562 - Accuracy: 0.9207 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0002 - train_batch_size: 16 - eval_batch_size: 8 - seed: 42 - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - num_epochs: 10 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:------:|:----:|:---------------:|:--------:| | 0.2639 | 0.2874 | 100 | 0.6565 | 0.8372 | | 0.2524 | 0.5747 | 200 | 0.5063 | 0.8678 | | 0.4061 | 0.8621 | 300 | 0.4874 | 0.8759 | | 0.2374 | 1.1494 | 400 | 0.5066 | 0.8769 | | 0.2202 | 1.4368 | 500 | 0.5157 | 0.8708 | | 0.2895 | 1.7241 | 600 | 0.5908 | 0.8494 | | 0.2652 | 2.0115 | 700 | 0.5432 | 0.8688 | | 0.1772 | 2.2989 | 800 | 0.5406 | 0.8667 | | 0.165 | 2.5862 | 900 | 0.5264 | 0.8749 | | 0.358 | 2.8736 | 1000 | 0.5183 | 0.8779 | | 0.1573 | 3.1609 | 1100 | 0.5022 | 0.8840 | | 0.2839 | 3.4483 | 1200 | 0.5130 | 0.8881 | | 0.2695 | 3.7356 | 1300 | 0.4671 | 0.8891 | | 0.2392 | 4.0230 | 1400 | 0.5282 | 0.8708 | | 0.2909 | 4.3103 | 1500 | 0.4564 | 0.8932 | | 0.1485 | 4.5977 | 1600 | 0.4200 | 0.9034 | | 0.1445 | 4.8851 | 1700 | 0.4566 | 0.8922 | | 0.1331 | 5.1724 | 1800 | 0.4032 | 0.9044 | | 0.1931 | 5.4598 | 1900 | 0.4350 | 0.9044 | | 0.1389 | 5.7471 | 2000 | 0.3991 | 0.9084 | | 0.0952 | 6.0345 | 2100 | 0.4777 | 0.8881 | | 0.0906 | 6.3218 | 2200 | 0.3946 | 0.9034 | | 0.1537 | 6.6092 | 2300 | 0.4495 | 0.8962 | | 0.1049 | 6.8966 | 2400 | 0.4333 | 0.9044 | | 0.0593 | 7.1839 | 2500 | 0.4269 | 0.9054 | | 0.154 | 7.4713 | 2600 | 0.3678 | 0.9135 | | 0.124 | 7.7586 | 2700 | 0.3875 | 0.9176 | | 0.1862 | 8.0460 | 2800 | 0.3923 | 0.9105 | | 0.1579 | 8.3333 | 2900 | 0.3827 | 0.9156 | | 0.1045 | 8.6207 | 3000 | 0.3829 | 0.9125 | | 0.0069 | 8.9080 | 3100 | 0.3562 | 0.9207 | | 0.0407 | 9.1954 | 3200 | 0.3833 | 0.9156 | | 0.1204 | 9.4828 | 3300 | 0.3766 | 0.9176 | | 0.1442 | 9.7701 | 3400 | 0.3734 | 0.9207 | ### Framework versions - Transformers 4.48.3 - Pytorch 2.5.1+cu124 - Datasets 3.3.2 - Tokenizers 0.21.0
[ "burger", "butter_naan", "idli", "jalebi", "kadai_paneer", "khaman", "kulfi", "masala_dosa", "meduvada", "mirchi_bajji", "momos", "paani_puri", "chai", "pakode", "pizza", "poha", "rasgulla", "samosa", "sandwitch", "vadapav", "chapati", "chole_bhature", "dal_makhani", "frankie", "fried_rice", "gajar_halwa", "gulab_jamun" ]
niko132/vit-base-oxford-iiit-pets
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-base-oxford-iiit-pets This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on the pcuenq/oxford-pets dataset. It achieves the following results on the evaluation set: - Loss: 0.2248 - Accuracy: 0.9296 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0003 - train_batch_size: 16 - eval_batch_size: 8 - seed: 42 - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - num_epochs: 5 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.3856 | 1.0 | 370 | 0.2812 | 0.9283 | | 0.2134 | 2.0 | 740 | 0.2208 | 0.9364 | | 0.1724 | 3.0 | 1110 | 0.1981 | 0.9405 | | 0.15 | 4.0 | 1480 | 0.1918 | 0.9378 | | 0.1471 | 5.0 | 1850 | 0.1891 | 0.9391 | ### Framework versions - Transformers 4.48.3 - Pytorch 2.5.1+cu124 - Datasets 3.3.2 - Tokenizers 0.21.0
[ "siamese", "birman", "shiba inu", "staffordshire bull terrier", "basset hound", "bombay", "japanese chin", "chihuahua", "german shorthaired", "pomeranian", "beagle", "english cocker spaniel", "american pit bull terrier", "ragdoll", "persian", "egyptian mau", "miniature pinscher", "sphynx", "maine coon", "keeshond", "yorkshire terrier", "havanese", "leonberger", "wheaten terrier", "american bulldog", "english setter", "boxer", "newfoundland", "bengal", "samoyed", "british shorthair", "great pyrenees", "abyssinian", "pug", "saint bernard", "russian blue", "scottish terrier" ]
ufal/vit-historical-page
# Image classification using fine-tuned ViT - for historical :bowtie: documents sorting ### Goal: solve a task of archive page images sorting (for their further content-based processing) **Scope:** Processing of images, training and evaluation of ViT model, input file/directory processing, class 🏷️ (category) results of top N predictions output, predictions summarizing into a tabular format, HF 😊 hub support for the model ## Versions 🏁 There are currently 2 version of the model available for download, both of them have the same set of categories, but different data annotations. The latest approved `v2.1` is considered to be default and can be found in the `main` branch of HF 😊 hub [^1] 🔗 | Version | Base | Pages | PDFs | Description | |--------:|------------------------|:-----:|:--------:|:--------------------------------------------------------------------------| | `v2.0` | `vit-base-path16-224` | 10073 | **3896** | annotations with mistakes, more heterogenous data | | `v2.1` | `vit-base-path16-224` | 11940 | **5002** | `main`: more diverse pages in each category, less annotation mistakes | | `v2.2` | `vit-base-path16-224` | 15855 | **5730** | same data as `v2.1` + some restored pages from `v2.0` | | `v3.2` | `vit-base-path16-384` | 15855 | **5730** | same data as `v2.2`, but a bit larger model base with higher resolution | | `v5.2` | `vit-large-path16-384` | 15855 | **5730** | same data as `v2.2`, but the largest model base with higher resolution | ## Model description 📇 🔲 Fine-tuned model repository: vit-historical-page [^1] 🔗 🔳 Base model repository: Google's **vit-base-patch16-224**, **vit-base-patch16-384**, **vit-large-patch16-284** [^2] [^13] [^14] 🔗 ### Data 📜 Training set of the model: **8950** images for `v2.0` Training set of the model: **10745** images for `v2.1` Training set of the model: **14565** images for `v2.2`, `v3.2` and `v5.2` ### Categories 🏷️ | Label️ | Description | |----------:|:-----------------------------------------------------------------------------------------------------------------| | `DRAW` | **📈 - drawings, maps, paintings, schematics, or graphics, potentially containing some text labels or captions** | | `DRAW_L` | **📈📏 - drawings, etc but presented within a table-like layout or includes a legend formatted as a table** | | `LINE_HW` | **✏️📏 - handwritten text organized in a tabular or form-like structure** | | `LINE_P` | **📏 - printed text organized in a tabular or form-like structure** | | `LINE_T` | **📏 - machine-typed text organized in a tabular or form-like structure** | | `PHOTO` | **🌄 - photographs or photographic cutouts, potentially with text captions** | | `PHOTO_L` | **🌄📏 - photos presented within a table-like layout or accompanied by tabular annotations** | | `TEXT` | **📰 - mixtures of printed, handwritten, and/or typed text, potentially with minor graphical elements** | | `TEXT_HW` | **✏️📄 - only handwritten text in paragraph or block form (non-tabular)** | | `TEXT_P` | **📄 - only printed text in paragraph or block form (non-tabular)** | | `TEXT_T` | **📄 - only machine-typed text in paragraph or block form (non-tabular)** | Evaluation set: **1290** images (taken from `v2.2` annotations) #### Data preprocessing During training the following transforms were applied randomly with a 50% chance: * transforms.ColorJitter(brightness 0.5) * transforms.ColorJitter(contrast 0.5) * transforms.ColorJitter(saturation 0.5) * transforms.ColorJitter(hue 0.5) * transforms.Lambda(lambda img: ImageEnhance.Sharpness(img).enhance(random.uniform(0.5, 1.5))) * transforms.Lambda(lambda img: img.filter(ImageFilter.GaussianBlur(radius=random.uniform(0, 2)))) ### Training Hyperparameters * eval_strategy "epoch" * save_strategy "epoch" * learning_rate 5e-5 * per_device_train_batch_size 8 * per_device_eval_batch_size 8 * num_train_epochs 3 * warmup_ratio 0.1 * logging_steps 10 * load_best_model_at_end True * metric_for_best_model "accuracy" ### Results 📊 **v2.0** Evaluation set's accuracy (**Top-3**): **95.58%** ![TOP-3 confusion matrix - trained ViT](https://github.com/ufal/atrium-page-classification/blob/main/result/plots/20250526-1147_model_v20_conf_mat_TOP-3.png?raw=true) **v2.1** Evaluation set's accuracy (**Top-3**): **99.84%** ![TOP-3 confusion matrix - trained ViT](https://github.com/ufal/atrium-page-classification/blob/main/result/plots/20250526-1157_model_v21_conf_mat_TOP-3.png?raw=true) **v2.2** Evaluation set's accuracy (**Top-3**): **100.00%** ![TOP-3 confusion matrix - trained ViT](https://github.com/ufal/atrium-page-classification/blob/main/result/plots/20250526-1201_model_v22_conf_mat_TOP-3.png?raw=true) **v2.0** Evaluation set's accuracy (**Top-1**): **84.96%** ![TOP-1 confusion matrix - trained ViT](https://github.com/ufal/atrium-page-classification/blob/main/result/plots/20250526-1152_model_v20_conf_mat_TOP-1.png?raw=true) **v2.1** Evaluation set's accuracy (**Top-1**): **96.36%** ![TOP-1 confusion matrix - trained ViT](https://github.com/ufal/atrium-page-classification/blob/main/result/plots/20250526-1156_model_v21_conf_mat_TOP-1.png?raw=true) **v2.2** Evaluation set's accuracy (**Top-1**): **99.61%** ![TOP-1 confusion matrix - trained ViT](https://github.com/ufal/atrium-page-classification/blob/main/result/plots/20250526-1202_model_v22_conf_mat_TOP-1.png?raw=true) #### Result tables - **v2.0** Manually ✍ **checked** evaluation dataset results (TOP-3): [model_TOP-3_EVAL.csv](https://github.com/ufal/atrium-page-classification/blob/main/result/tables/20250526-1142_model_v20_TOP-3_EVAL.csv) 🔗 - **v2.0** Manually ✍ **checked** evaluation dataset results (TOP-1): [model_TOP-1_EVAL.csv](https://github.com/ufal/atrium-page-classification/blob/main/result/tables/20250526-1148_model_v20_TOP-1_EVAL.csv) 🔗 - **v2.1** Manually ✍ **checked** evaluation dataset results (TOP-3): [model_TOP-3_EVAL.csv](https://github.com/ufal/atrium-page-classification/blob/main/result/tables/20250526-1153_model_v21_TOP-3_EVAL.csv) 🔗 - **v2.1** Manually ✍ **checked** evaluation dataset results (TOP-1): [model_TOP-1_EVAL.csv](https://github.com/ufal/atrium-page-classification/blob/main/result/tables/20250526-1151_model_v21_TOP-1_EVAL.csv) 🔗 - **v2.2** Manually ✍ **checked** evaluation dataset results (TOP-3): [model_TOP-3_EVAL.csv](https://github.com/ufal/atrium-page-classification/blob/main/result/tables/20250526-1156_model_v22_TOP-3_EVAL.csv) 🔗 - **v2.2** Manually ✍ **checked** evaluation dataset results (TOP-1): [model_TOP-1_EVAL.csv](https://github.com/ufal/atrium-page-classification/blob/main/result/tables/20250526-1158_model_v22_TOP-1_EVAL.csv) 🔗 #### Table columns - **FILE** - name of the file - **PAGE** - number of the page - **CLASS-N** - label of the category 🏷️, guess TOP-N - **SCORE-N** - score of the category 🏷️, guess TOP-N - **TRUE** - actual label of the category 🏷️ ### Contacts 📧 For support write to 📧 [email protected] 📧 Official repository: UFAL [^3] ### Acknowledgements 🙏 - **Developed by** UFAL [^5] 👥 - **Funded by** ATRIUM [^4] 💰 - **Shared by** ATRIUM [^4] & UFAL [^5] - **Model type:** fine-tuned ViT with a 224x224 [^2] 🔗 or 384x384 [^13] [^14] 🔗 resolution size **©️ 2022 UFAL & ATRIUM** [^1]: https://huggingface.co/k4tel/vit-historical-page [^2]: https://huggingface.co/google/vit-base-patch16-224 [^3]: https://github.com/ufal/atrium-page-classification [^4]: https://atrium-research.eu/ [^5]: https://ufal.mff.cuni.cz/home-page [^6]: https://huggingface.co/google/vit-base-patch16-384 [^7]: https://huggingface.co/google/vit-large-patch16-384
[ "label_0", "label_1", "label_2", "label_3", "label_4", "label_5", "label_6", "label_7", "label_8", "label_9", "label_10" ]
shahad-alh/arabichar-finetuned-v2
# Model Card for Model ID This CNN fine-tuned model is designed to identify Alphabet characters written by children. ## Model Details ### Model Description This model focuses on adapting a pre-trained CNN model. It is built of three convolutional layers with 32 filters, followed by max-pooling and batch normalization. Another set of three convolutional layers with 64 filters extracts deeper features, followed by another pooling and normalization step. The extracted features are passed through two fully connected layers with dropout, and the final softmax layer classifies the characters into 28 categories. The base model, trained on AHCD, and I've fine-tuned it on Dhad-Hijja Dataset collection, - **License:** [MIT] - **Finetuned from model [optional]:** [shahad-alh/arabichar-finetuned-v1] ## Uses Could be used by anyone who is interested in buliding App, Model, anything that should identify Children's characters handwriting. ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations. ## Evaluation training-set accuracy = 80% Validation-set accuracy = 72% Test-set Accuuracy = 69% this is just draft - I'm still working on the training- ## 🚀 Demo Try it out live here: [![Gradio demo](https://img.shields.io/badge/Gradio-Demo-blue)](https://huggingface.co/spaces/your-username/your-space-name) ## Model Card Contact [More Information Needed]
[ "alif", "ayen", "baa", "dal", "dhad", "faa", "ghayen", "h_aa", "haa", "jeem", "kaf", "khaa", "lam", "meem", "noon", "qaf", "raa", "sad", "seen", "sheen", "t_aa", "taa", "th_aa", "thaa", "thal", "waw", "yaa", "zay" ]
Master-Rapha7/mobilenetv2-typecoffee-5
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # mobilenetv2-typecoffee-5 This model is a fine-tuned version of [google/mobilenet_v2_1.0_224](https://huggingface.co/google/mobilenet_v2_1.0_224) on the Master-Rapha7/TypeCoffee_32x32 dataset. It achieves the following results on the evaluation set: - Loss: 0.4214 - Accuracy: 0.8520 - Precision: 0.8543 - Recall: 0.8531 - F1: 0.8530 - Precision Durariadorio 32x32: 0.8105 - Recall Durariadorio 32x32: 0.8542 - F1 Durariadorio 32x32: 0.8318 - Precision Mole 32x32: 0.9086 - Recall Mole 32x32: 0.8281 - F1 Mole 32x32: 0.8665 - Precision Quebrado 32x32: 0.8625 - Recall Quebrado 32x32: 0.9253 - F1 Quebrado 32x32: 0.8928 - Precision Riadorio 32x32: 0.7709 - Recall Riadorio 32x32: 0.7582 - F1 Riadorio 32x32: 0.7645 - Precision Riofechado 32x32: 0.9192 - Recall Riofechado 32x32: 0.8998 - F1 Riofechado 32x32: 0.9094 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - num_epochs: 100.0 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | Precision | Recall | F1 | Precision Durariadorio 32x32 | Recall Durariadorio 32x32 | F1 Durariadorio 32x32 | Precision Mole 32x32 | Recall Mole 32x32 | F1 Mole 32x32 | Precision Quebrado 32x32 | Recall Quebrado 32x32 | F1 Quebrado 32x32 | Precision Riadorio 32x32 | Recall Riadorio 32x32 | F1 Riadorio 32x32 | Precision Riofechado 32x32 | Recall Riofechado 32x32 | F1 Riofechado 32x32 | |:-------------:|:-----:|:------:|:---------------:|:--------:|:---------:|:------:|:------:|:----------------------------:|:-------------------------:|:---------------------:|:--------------------:|:-----------------:|:-------------:|:------------------------:|:---------------------:|:-----------------:|:------------------------:|:---------------------:|:-----------------:|:--------------------------:|:-----------------------:|:-------------------:| | 1.1437 | 1.0 | 1453 | 0.9896 | 0.6055 | 0.6371 | 0.6049 | 0.6042 | 0.5118 | 0.6042 | 0.5541 | 0.6273 | 0.4705 | 0.5377 | 0.7538 | 0.8663 | 0.8061 | 0.4676 | 0.6283 | 0.5361 | 0.8248 | 0.4552 | 0.5866 | | 0.9483 | 2.0 | 2906 | 0.8851 | 0.6561 | 0.6812 | 0.6611 | 0.6352 | 0.6019 | 0.5590 | 0.5797 | 0.7471 | 0.6823 | 0.7132 | 0.7504 | 0.9132 | 0.8238 | 0.7647 | 0.2566 | 0.3842 | 0.5421 | 0.8946 | 0.6751 | | 0.8019 | 3.0 | 4359 | 0.7375 | 0.7133 | 0.7174 | 0.7151 | 0.7047 | 0.7552 | 0.5035 | 0.6042 | 0.7056 | 0.8281 | 0.7620 | 0.6929 | 0.9635 | 0.8061 | 0.6601 | 0.5493 | 0.5996 | 0.7732 | 0.7311 | 0.7516 | | 0.8397 | 4.0 | 5812 | 0.6973 | 0.7339 | 0.7599 | 0.7355 | 0.7352 | 0.5708 | 0.8403 | 0.6798 | 0.7906 | 0.7274 | 0.7577 | 0.8135 | 0.9010 | 0.8550 | 0.6988 | 0.5724 | 0.6293 | 0.9258 | 0.6362 | 0.7542 | | 0.8214 | 5.0 | 7265 | 0.6209 | 0.7639 | 0.7770 | 0.7653 | 0.7564 | 0.8522 | 0.5104 | 0.6384 | 0.6899 | 0.9271 | 0.7911 | 0.7723 | 0.9601 | 0.8560 | 0.7067 | 0.6382 | 0.6707 | 0.8637 | 0.7909 | 0.8257 | | 0.9074 | 6.0 | 8718 | 0.6062 | 0.7580 | 0.7878 | 0.7582 | 0.7618 | 0.7020 | 0.8056 | 0.7502 | 0.9311 | 0.5868 | 0.7199 | 0.8555 | 0.8837 | 0.8693 | 0.5754 | 0.7401 | 0.6475 | 0.875 | 0.7750 | 0.8220 | | 0.8228 | 7.0 | 10171 | 0.6415 | 0.7621 | 0.7812 | 0.7628 | 0.7600 | 0.6774 | 0.7292 | 0.7023 | 0.9341 | 0.5417 | 0.6857 | 0.8528 | 0.9253 | 0.8876 | 0.6320 | 0.7286 | 0.6769 | 0.8096 | 0.8893 | 0.8476 | | 0.7683 | 8.0 | 11624 | 0.5603 | 0.7866 | 0.7937 | 0.7883 | 0.7826 | 0.7844 | 0.6441 | 0.7073 | 0.7361 | 0.9201 | 0.8179 | 0.7609 | 0.9670 | 0.8517 | 0.7807 | 0.6266 | 0.6953 | 0.9065 | 0.7838 | 0.8407 | | 0.6675 | 9.0 | 13077 | 0.7410 | 0.7188 | 0.7695 | 0.7180 | 0.7228 | 0.6933 | 0.7222 | 0.7075 | 0.9178 | 0.5816 | 0.7120 | 0.92 | 0.5990 | 0.7256 | 0.5342 | 0.8224 | 0.6477 | 0.7822 | 0.8647 | 0.8214 | | 0.7493 | 10.0 | 14530 | 0.5432 | 0.7928 | 0.8062 | 0.7941 | 0.7910 | 0.6846 | 0.8628 | 0.7634 | 0.9233 | 0.6267 | 0.7466 | 0.8119 | 0.9514 | 0.8761 | 0.7505 | 0.6826 | 0.7149 | 0.8607 | 0.8471 | 0.8539 | | 0.7794 | 11.0 | 15983 | 0.5717 | 0.7886 | 0.7925 | 0.7895 | 0.7878 | 0.7760 | 0.6858 | 0.7281 | 0.8260 | 0.8160 | 0.8210 | 0.7737 | 0.9618 | 0.8576 | 0.7044 | 0.7056 | 0.7050 | 0.8825 | 0.7786 | 0.8273 | | 0.6935 | 12.0 | 17436 | 0.5277 | 0.8055 | 0.8136 | 0.8069 | 0.8010 | 0.9144 | 0.5938 | 0.72 | 0.7840 | 0.9201 | 0.8466 | 0.8057 | 0.9288 | 0.8629 | 0.7465 | 0.7023 | 0.7237 | 0.8174 | 0.8893 | 0.8519 | | 0.6499 | 13.0 | 18889 | 0.4573 | 0.8231 | 0.8292 | 0.8236 | 0.8246 | 0.8 | 0.7917 | 0.7958 | 0.8783 | 0.8142 | 0.8450 | 0.8249 | 0.9323 | 0.8753 | 0.7189 | 0.7697 | 0.7434 | 0.9238 | 0.8102 | 0.8633 | | 0.6982 | 14.0 | 20342 | 0.6069 | 0.7818 | 0.7949 | 0.7828 | 0.7831 | 0.6640 | 0.8785 | 0.7564 | 0.8384 | 0.8108 | 0.8244 | 0.8993 | 0.6979 | 0.7859 | 0.7540 | 0.7007 | 0.7263 | 0.8188 | 0.8260 | 0.8224 | | 0.7184 | 15.0 | 21795 | 0.6539 | 0.7625 | 0.7959 | 0.7623 | 0.7562 | 0.9402 | 0.4913 | 0.6454 | 0.7606 | 0.8993 | 0.8242 | 0.7152 | 0.9635 | 0.8210 | 0.6490 | 0.7632 | 0.7014 | 0.9144 | 0.6942 | 0.7892 | | 0.6292 | 16.0 | 23248 | 0.4354 | 0.8434 | 0.8430 | 0.8448 | 0.8427 | 0.8450 | 0.7760 | 0.8090 | 0.8464 | 0.8993 | 0.8721 | 0.8744 | 0.8941 | 0.8841 | 0.8098 | 0.7352 | 0.7707 | 0.8395 | 0.9192 | 0.8775 | | 0.6507 | 17.0 | 24701 | 0.4339 | 0.8365 | 0.8390 | 0.8373 | 0.8375 | 0.8312 | 0.7778 | 0.8036 | 0.8845 | 0.8247 | 0.8535 | 0.8746 | 0.8837 | 0.8791 | 0.7387 | 0.7812 | 0.7594 | 0.8659 | 0.9192 | 0.8917 | | 0.6504 | 18.0 | 26154 | 0.4613 | 0.8293 | 0.8384 | 0.8304 | 0.8305 | 0.7278 | 0.9097 | 0.8086 | 0.9153 | 0.7691 | 0.8358 | 0.8600 | 0.9062 | 0.8825 | 0.7696 | 0.7253 | 0.7468 | 0.9194 | 0.8418 | 0.8789 | | 0.6098 | 19.0 | 27607 | 1.0089 | 0.6833 | 0.7793 | 0.6816 | 0.6783 | 0.5381 | 0.9201 | 0.6791 | 0.9423 | 0.5104 | 0.6622 | 0.8601 | 0.8003 | 0.8291 | 0.5604 | 0.7780 | 0.6515 | 0.9956 | 0.3989 | 0.5696 | | 0.6226 | 20.0 | 29060 | 0.4602 | 0.8355 | 0.8379 | 0.8368 | 0.8355 | 0.8109 | 0.8264 | 0.8186 | 0.8356 | 0.8646 | 0.8498 | 0.8316 | 0.9514 | 0.8874 | 0.775 | 0.7138 | 0.7432 | 0.9364 | 0.8278 | 0.8787 | | 0.5477 | 21.0 | 30513 | 0.6087 | 0.7924 | 0.8108 | 0.7940 | 0.7886 | 0.7119 | 0.875 | 0.7850 | 0.9544 | 0.6181 | 0.7503 | 0.7588 | 0.9774 | 0.8543 | 0.8333 | 0.6579 | 0.7353 | 0.7957 | 0.8418 | 0.8181 | | 0.6007 | 22.0 | 31966 | 0.4432 | 0.8410 | 0.8451 | 0.8439 | 0.8371 | 0.7990 | 0.8628 | 0.8297 | 0.8508 | 0.9010 | 0.8752 | 0.8647 | 0.9323 | 0.8972 | 0.8960 | 0.5954 | 0.7154 | 0.8148 | 0.9279 | 0.8677 | | 0.6439 | 23.0 | 33419 | 0.4214 | 0.8520 | 0.8543 | 0.8531 | 0.8530 | 0.8105 | 0.8542 | 0.8318 | 0.9086 | 0.8281 | 0.8665 | 0.8625 | 0.9253 | 0.8928 | 0.7709 | 0.7582 | 0.7645 | 0.9192 | 0.8998 | 0.9094 | | 0.6058 | 24.0 | 34872 | 0.5593 | 0.8017 | 0.8086 | 0.8033 | 0.8012 | 0.7716 | 0.7917 | 0.7815 | 0.9172 | 0.7691 | 0.8366 | 0.7411 | 0.9392 | 0.8285 | 0.7934 | 0.6694 | 0.7261 | 0.8197 | 0.8471 | 0.8332 | | 0.6746 | 25.0 | 36325 | 0.4657 | 0.8365 | 0.8385 | 0.8392 | 0.8332 | 0.8243 | 0.8229 | 0.8236 | 0.8812 | 0.8889 | 0.8850 | 0.8315 | 0.9427 | 0.8836 | 0.8555 | 0.6135 | 0.7146 | 0.8 | 0.9279 | 0.8592 | | 0.5352 | 26.0 | 37778 | 0.4285 | 0.8413 | 0.8434 | 0.8427 | 0.8417 | 0.8551 | 0.8299 | 0.8423 | 0.9049 | 0.8420 | 0.8723 | 0.8715 | 0.8594 | 0.8654 | 0.7755 | 0.7385 | 0.7565 | 0.8100 | 0.9438 | 0.8718 | | 0.6651 | 27.0 | 39231 | 0.5568 | 0.8265 | 0.8425 | 0.8298 | 0.8233 | 0.6822 | 0.9427 | 0.7915 | 0.8915 | 0.8420 | 0.8661 | 0.8678 | 0.9115 | 0.8891 | 0.8952 | 0.5477 | 0.6796 | 0.8759 | 0.9051 | 0.8902 | | 0.5999 | 28.0 | 40684 | 0.6360 | 0.7838 | 0.8009 | 0.7852 | 0.7853 | 0.6978 | 0.8819 | 0.7791 | 0.7812 | 0.9115 | 0.8413 | 0.8810 | 0.7708 | 0.8222 | 0.6776 | 0.6464 | 0.6616 | 0.9667 | 0.7153 | 0.8222 | | 0.542 | 29.0 | 42137 | 0.4588 | 0.8317 | 0.8461 | 0.8318 | 0.8354 | 0.8878 | 0.7552 | 0.8161 | 0.9062 | 0.8385 | 0.8711 | 0.8757 | 0.8194 | 0.8466 | 0.6649 | 0.8388 | 0.7418 | 0.8958 | 0.9069 | 0.9013 | | 0.4775 | 30.0 | 43590 | 0.5173 | 0.8213 | 0.8209 | 0.8234 | 0.8209 | 0.8013 | 0.8611 | 0.8301 | 0.8042 | 0.8559 | 0.8293 | 0.8725 | 0.8316 | 0.8516 | 0.7590 | 0.6579 | 0.7048 | 0.8677 | 0.9104 | 0.8885 | | 0.5762 | 31.0 | 45043 | 0.5442 | 0.8117 | 0.8230 | 0.8124 | 0.8135 | 0.8436 | 0.7587 | 0.7989 | 0.9224 | 0.7639 | 0.8357 | 0.8729 | 0.8108 | 0.8407 | 0.6825 | 0.7812 | 0.7285 | 0.7938 | 0.9473 | 0.8638 | | 0.4745 | 32.0 | 46496 | 0.4626 | 0.8410 | 0.8445 | 0.8419 | 0.8425 | 0.8029 | 0.8767 | 0.8382 | 0.8957 | 0.8351 | 0.8643 | 0.8793 | 0.8351 | 0.8566 | 0.75 | 0.7697 | 0.7597 | 0.8944 | 0.8928 | 0.8936 | | 0.4433 | 33.0 | 47949 | 0.7783 | 0.7828 | 0.8055 | 0.7841 | 0.7732 | 0.9617 | 0.4792 | 0.6396 | 0.8282 | 0.8785 | 0.8526 | 0.7624 | 0.9635 | 0.8512 | 0.7729 | 0.6941 | 0.7314 | 0.7026 | 0.9051 | 0.7911 | | 0.5862 | 34.0 | 49402 | 0.5676 | 0.8348 | 0.8412 | 0.8380 | 0.8297 | 0.8079 | 0.8472 | 0.8271 | 0.7948 | 0.8941 | 0.8415 | 0.8441 | 0.9497 | 0.8938 | 0.9151 | 0.5674 | 0.7005 | 0.8439 | 0.9315 | 0.8855 | | 0.5729 | 35.0 | 50855 | 0.4472 | 0.8389 | 0.8379 | 0.8411 | 0.8373 | 0.8478 | 0.8316 | 0.8396 | 0.8733 | 0.9097 | 0.8912 | 0.8294 | 0.8611 | 0.8450 | 0.804 | 0.6612 | 0.7256 | 0.8349 | 0.9420 | 0.8852 | | 0.5992 | 36.0 | 52308 | 0.5261 | 0.8213 | 0.8282 | 0.8227 | 0.8202 | 0.8262 | 0.7674 | 0.7957 | 0.9464 | 0.7361 | 0.8281 | 0.8117 | 0.9427 | 0.8723 | 0.7635 | 0.7220 | 0.7422 | 0.7935 | 0.9455 | 0.8629 | | 0.4596 | 37.0 | 53761 | 0.4428 | 0.8540 | 0.8540 | 0.8557 | 0.8533 | 0.8512 | 0.8142 | 0.8323 | 0.8402 | 0.9219 | 0.8791 | 0.8491 | 0.9184 | 0.8824 | 0.8308 | 0.7188 | 0.7707 | 0.8988 | 0.9051 | 0.9019 | | 0.5097 | 38.0 | 55214 | 0.4856 | 0.8337 | 0.8394 | 0.8344 | 0.8363 | 0.8370 | 0.8472 | 0.8421 | 0.8818 | 0.8681 | 0.8749 | 0.8485 | 0.8264 | 0.8373 | 0.7072 | 0.7747 | 0.7394 | 0.9223 | 0.8559 | 0.8879 | | 0.4476 | 39.0 | 56667 | 0.6009 | 0.8024 | 0.8070 | 0.8050 | 0.7994 | 0.8419 | 0.7951 | 0.8179 | 0.7377 | 0.9375 | 0.8257 | 0.8745 | 0.75 | 0.8075 | 0.7700 | 0.6003 | 0.6747 | 0.8109 | 0.9420 | 0.8715 | | 0.4583 | 40.0 | 58120 | 0.5397 | 0.8361 | 0.8430 | 0.8391 | 0.8326 | 0.7434 | 0.8854 | 0.8082 | 0.8468 | 0.8924 | 0.8690 | 0.8401 | 0.9392 | 0.8869 | 0.8914 | 0.5806 | 0.7032 | 0.8934 | 0.8981 | 0.8957 | | 0.5016 | 41.0 | 59573 | 0.5419 | 0.8224 | 0.8274 | 0.8237 | 0.8225 | 0.7983 | 0.8385 | 0.8180 | 0.8536 | 0.8906 | 0.8717 | 0.7729 | 0.9097 | 0.8357 | 0.7768 | 0.6924 | 0.7322 | 0.9353 | 0.7873 | 0.8550 | | 0.408 | 42.0 | 61026 | 0.5450 | 0.8238 | 0.8344 | 0.8248 | 0.8244 | 0.9122 | 0.7031 | 0.7941 | 0.7483 | 0.9497 | 0.8370 | 0.8669 | 0.8368 | 0.8516 | 0.7295 | 0.7451 | 0.7372 | 0.9150 | 0.8893 | 0.9020 | | 0.4116 | 43.0 | 62479 | 0.5567 | 0.8048 | 0.8198 | 0.8053 | 0.8086 | 0.7471 | 0.8924 | 0.8133 | 0.8900 | 0.7865 | 0.8350 | 0.8526 | 0.7830 | 0.8163 | 0.6671 | 0.7615 | 0.7112 | 0.9423 | 0.8032 | 0.8672 | | 0.5416 | 44.0 | 63932 | 0.5574 | 0.8127 | 0.8267 | 0.8129 | 0.8136 | 0.8704 | 0.7812 | 0.8234 | 0.8619 | 0.8993 | 0.8802 | 0.8836 | 0.6458 | 0.7462 | 0.6557 | 0.8174 | 0.7277 | 0.8618 | 0.9209 | 0.8904 | | 0.6373 | 45.0 | 65385 | 0.5181 | 0.8258 | 0.8389 | 0.8260 | 0.8298 | 0.8659 | 0.7847 | 0.8233 | 0.9075 | 0.8351 | 0.8698 | 0.8616 | 0.8108 | 0.8354 | 0.6636 | 0.8240 | 0.7351 | 0.8957 | 0.8752 | 0.8853 | | 0.4469 | 46.0 | 66838 | 0.4808 | 0.8361 | 0.8461 | 0.8368 | 0.8395 | 0.8210 | 0.8837 | 0.8512 | 0.9304 | 0.8351 | 0.8801 | 0.8776 | 0.7969 | 0.8353 | 0.6877 | 0.7895 | 0.7351 | 0.9141 | 0.8787 | 0.8961 | | 0.5169 | 47.0 | 68291 | 0.6467 | 0.8079 | 0.8294 | 0.8081 | 0.8104 | 0.9231 | 0.6667 | 0.7742 | 0.9183 | 0.8003 | 0.8553 | 0.7641 | 0.9392 | 0.8427 | 0.6514 | 0.7961 | 0.7165 | 0.8899 | 0.8383 | 0.8633 | | 0.4898 | 48.0 | 69744 | 0.5967 | 0.8120 | 0.8268 | 0.8124 | 0.8116 | 0.9197 | 0.6163 | 0.7380 | 0.8849 | 0.8542 | 0.8693 | 0.8452 | 0.8628 | 0.8540 | 0.6736 | 0.8043 | 0.7331 | 0.8105 | 0.9244 | 0.8637 | | 0.4576 | 49.0 | 71197 | 0.5606 | 0.8234 | 0.8350 | 0.8237 | 0.8267 | 0.8728 | 0.7743 | 0.8206 | 0.8928 | 0.8819 | 0.8873 | 0.8039 | 0.8681 | 0.8347 | 0.6770 | 0.7928 | 0.7303 | 0.9287 | 0.8014 | 0.8604 | | 0.455 | 50.0 | 72650 | 0.6599 | 0.8021 | 0.8166 | 0.8048 | 0.8014 | 0.6590 | 0.9462 | 0.7769 | 0.8919 | 0.8021 | 0.8446 | 0.8615 | 0.8420 | 0.8516 | 0.7941 | 0.5707 | 0.6641 | 0.8768 | 0.8629 | 0.8698 | | 0.4791 | 51.0 | 74103 | 0.4719 | 0.8334 | 0.8348 | 0.8345 | 0.8344 | 0.8217 | 0.8403 | 0.8309 | 0.875 | 0.875 | 0.875 | 0.8690 | 0.8177 | 0.8426 | 0.7422 | 0.7434 | 0.7428 | 0.8659 | 0.8963 | 0.8808 | | 0.4923 | 52.0 | 75556 | 0.5172 | 0.8289 | 0.8339 | 0.8302 | 0.8301 | 0.7482 | 0.9028 | 0.8183 | 0.8667 | 0.8351 | 0.8506 | 0.8785 | 0.8281 | 0.8525 | 0.7680 | 0.7188 | 0.7426 | 0.9079 | 0.8664 | 0.8867 | | 0.3447 | 53.0 | 77009 | 0.6166 | 0.8286 | 0.8385 | 0.8297 | 0.8266 | 0.9570 | 0.6562 | 0.7786 | 0.7920 | 0.9583 | 0.8672 | 0.8344 | 0.8837 | 0.8583 | 0.7533 | 0.7434 | 0.7483 | 0.8557 | 0.9069 | 0.8805 | | 0.3916 | 54.0 | 78462 | 0.5289 | 0.8320 | 0.8399 | 0.8328 | 0.8343 | 0.7988 | 0.8889 | 0.8414 | 0.9418 | 0.7865 | 0.8571 | 0.8444 | 0.8385 | 0.8415 | 0.7152 | 0.7681 | 0.7407 | 0.8996 | 0.8822 | 0.8909 | | 0.4174 | 55.0 | 79915 | 0.8007 | 0.7886 | 0.8067 | 0.7912 | 0.7883 | 0.6498 | 0.9340 | 0.7664 | 0.9432 | 0.7205 | 0.8169 | 0.8213 | 0.8854 | 0.8521 | 0.7446 | 0.5707 | 0.6462 | 0.8745 | 0.8453 | 0.8597 | | 0.3712 | 56.0 | 81368 | 0.6271 | 0.8138 | 0.8200 | 0.8151 | 0.8137 | 0.8671 | 0.7361 | 0.7962 | 0.8921 | 0.8472 | 0.8691 | 0.8369 | 0.8194 | 0.8281 | 0.7608 | 0.7220 | 0.7409 | 0.7431 | 0.9508 | 0.8342 | | 0.3353 | 57.0 | 82821 | 0.8486 | 0.7718 | 0.7907 | 0.7732 | 0.7700 | 0.8481 | 0.6493 | 0.7355 | 0.9196 | 0.7153 | 0.8047 | 0.6983 | 0.9444 | 0.8030 | 0.7834 | 0.6661 | 0.72 | 0.7042 | 0.8910 | 0.7867 | | 0.4091 | 58.0 | 84274 | 0.7401 | 0.8069 | 0.8199 | 0.8107 | 0.7987 | 0.8852 | 0.75 | 0.8120 | 0.7347 | 0.9566 | 0.8311 | 0.8365 | 0.9062 | 0.87 | 0.8721 | 0.4934 | 0.6303 | 0.7711 | 0.9473 | 0.8502 | | 0.4316 | 59.0 | 85727 | 0.6575 | 0.7976 | 0.8139 | 0.7972 | 0.8009 | 0.8346 | 0.7708 | 0.8014 | 0.8569 | 0.8837 | 0.8701 | 0.8252 | 0.7951 | 0.8099 | 0.6391 | 0.8125 | 0.7154 | 0.9135 | 0.7241 | 0.8078 | | 0.352 | 60.0 | 87180 | 0.7022 | 0.8138 | 0.8279 | 0.8144 | 0.8153 | 0.8683 | 0.7552 | 0.8078 | 0.9367 | 0.7188 | 0.8134 | 0.8586 | 0.8542 | 0.8564 | 0.6737 | 0.7878 | 0.7263 | 0.8024 | 0.9561 | 0.8725 | | 0.3877 | 61.0 | 88633 | 0.6978 | 0.8179 | 0.8279 | 0.8210 | 0.8142 | 0.7105 | 0.9288 | 0.8051 | 0.8995 | 0.8542 | 0.8762 | 0.8232 | 0.8889 | 0.8548 | 0.8608 | 0.5493 | 0.6707 | 0.8454 | 0.8840 | 0.8643 | | 0.4239 | 62.0 | 90086 | 0.5458 | 0.8272 | 0.8368 | 0.8274 | 0.8294 | 0.8318 | 0.7986 | 0.8149 | 0.8694 | 0.8785 | 0.8739 | 0.8176 | 0.8872 | 0.8510 | 0.7086 | 0.7961 | 0.7498 | 0.9567 | 0.7768 | 0.8574 | | 0.4038 | 63.0 | 91539 | 0.5327 | 0.8327 | 0.8416 | 0.8337 | 0.8345 | 0.7674 | 0.9167 | 0.8354 | 0.9470 | 0.7760 | 0.8531 | 0.8322 | 0.8524 | 0.8422 | 0.7443 | 0.7516 | 0.7480 | 0.9168 | 0.8717 | 0.8937 | | 0.3743 | 64.0 | 92992 | 0.5202 | 0.8413 | 0.8468 | 0.8422 | 0.8436 | 0.8630 | 0.8420 | 0.8524 | 0.9331 | 0.8472 | 0.8881 | 0.8595 | 0.8281 | 0.8435 | 0.7149 | 0.7796 | 0.7459 | 0.8638 | 0.9139 | 0.8881 | | 0.3162 | 65.0 | 94445 | 0.7327 | 0.8272 | 0.8298 | 0.8287 | 0.8243 | 0.8787 | 0.6788 | 0.7659 | 0.8429 | 0.9132 | 0.8767 | 0.8047 | 0.9444 | 0.8690 | 0.8007 | 0.7072 | 0.7511 | 0.8218 | 0.8998 | 0.8591 | | 0.3394 | 66.0 | 95898 | 0.5431 | 0.8317 | 0.8341 | 0.8328 | 0.8331 | 0.8442 | 0.8090 | 0.8262 | 0.8797 | 0.8889 | 0.8843 | 0.8616 | 0.8212 | 0.8409 | 0.7172 | 0.7467 | 0.7317 | 0.8676 | 0.8981 | 0.8826 | | 0.4806 | 67.0 | 97351 | 0.5874 | 0.8210 | 0.8339 | 0.8212 | 0.8231 | 0.7866 | 0.8576 | 0.8206 | 0.8720 | 0.875 | 0.8735 | 0.8423 | 0.8715 | 0.8567 | 0.6971 | 0.7796 | 0.7360 | 0.9716 | 0.7223 | 0.8286 | | 0.4937 | 68.0 | 98804 | 0.5646 | 0.8182 | 0.8194 | 0.8203 | 0.8169 | 0.7892 | 0.8646 | 0.8252 | 0.7918 | 0.9045 | 0.8444 | 0.8669 | 0.7917 | 0.8276 | 0.7923 | 0.6464 | 0.7120 | 0.8569 | 0.8946 | 0.8753 | | 0.2899 | 69.0 | 100257 | 0.6118 | 0.8341 | 0.8351 | 0.8359 | 0.8334 | 0.7955 | 0.8576 | 0.8254 | 0.9006 | 0.8021 | 0.8485 | 0.8497 | 0.9028 | 0.8754 | 0.7872 | 0.6875 | 0.7340 | 0.8424 | 0.9297 | 0.8839 | | 0.2905 | 70.0 | 101710 | 0.6764 | 0.8134 | 0.8268 | 0.8145 | 0.8091 | 0.9659 | 0.5903 | 0.7328 | 0.8165 | 0.9271 | 0.8683 | 0.7765 | 0.9288 | 0.8458 | 0.7646 | 0.7319 | 0.7479 | 0.8105 | 0.8946 | 0.8505 | | 0.3629 | 71.0 | 103163 | 0.5410 | 0.8317 | 0.8309 | 0.8341 | 0.8301 | 0.8035 | 0.8733 | 0.8369 | 0.8660 | 0.9201 | 0.8923 | 0.8074 | 0.8663 | 0.8358 | 0.7860 | 0.6283 | 0.6984 | 0.8917 | 0.8822 | 0.8869 | | 0.4107 | 72.0 | 104616 | 0.6758 | 0.8086 | 0.8358 | 0.8082 | 0.8141 | 0.8083 | 0.8490 | 0.8281 | 0.9246 | 0.8733 | 0.8982 | 0.8521 | 0.7899 | 0.8198 | 0.6182 | 0.8257 | 0.7070 | 0.9756 | 0.7030 | 0.8172 | | 0.4228 | 73.0 | 106069 | 0.6898 | 0.8041 | 0.8110 | 0.8062 | 0.8038 | 0.7178 | 0.9184 | 0.8058 | 0.8481 | 0.9115 | 0.8787 | 0.8852 | 0.7361 | 0.8038 | 0.7084 | 0.6234 | 0.6632 | 0.8953 | 0.8418 | 0.8678 | | 0.4379 | 74.0 | 107522 | 0.6787 | 0.8186 | 0.8245 | 0.8209 | 0.8176 | 0.7285 | 0.9132 | 0.8105 | 0.9030 | 0.8247 | 0.8621 | 0.8707 | 0.7951 | 0.8312 | 0.7930 | 0.6365 | 0.7062 | 0.8274 | 0.9350 | 0.8779 | | 0.4287 | 75.0 | 108975 | 0.7383 | 0.8138 | 0.8175 | 0.8170 | 0.8089 | 0.8211 | 0.8524 | 0.8365 | 0.7476 | 0.9566 | 0.8393 | 0.8509 | 0.8420 | 0.8464 | 0.8184 | 0.5411 | 0.6515 | 0.8495 | 0.8928 | 0.8706 | | 0.3734 | 76.0 | 110428 | 0.6164 | 0.8258 | 0.8353 | 0.8265 | 0.8283 | 0.8884 | 0.7326 | 0.8030 | 0.9057 | 0.8507 | 0.8774 | 0.8163 | 0.8715 | 0.8430 | 0.6805 | 0.7812 | 0.7274 | 0.8854 | 0.8963 | 0.8908 | | 0.3053 | 77.0 | 111881 | 0.6833 | 0.8127 | 0.8267 | 0.8137 | 0.8128 | 0.7661 | 0.8872 | 0.8222 | 0.9717 | 0.6562 | 0.7834 | 0.8270 | 0.8715 | 0.8487 | 0.7028 | 0.7467 | 0.7241 | 0.8658 | 0.9069 | 0.8858 | | 0.4056 | 78.0 | 113334 | 0.7220 | 0.8059 | 0.8178 | 0.8070 | 0.8036 | 0.9381 | 0.6319 | 0.7552 | 0.7586 | 0.9601 | 0.8475 | 0.8474 | 0.8194 | 0.8332 | 0.7285 | 0.7237 | 0.7261 | 0.8166 | 0.8998 | 0.8562 | | 0.2861 | 79.0 | 114787 | 0.5705 | 0.8403 | 0.8435 | 0.8416 | 0.8413 | 0.7876 | 0.9010 | 0.8405 | 0.8973 | 0.8194 | 0.8566 | 0.8535 | 0.8698 | 0.8616 | 0.7625 | 0.7286 | 0.7452 | 0.9167 | 0.8893 | 0.9028 | | 0.4598 | 80.0 | 116240 | 0.5696 | 0.8386 | 0.8384 | 0.8405 | 0.8380 | 0.8630 | 0.8420 | 0.8524 | 0.8858 | 0.8889 | 0.8873 | 0.8479 | 0.8420 | 0.8449 | 0.7759 | 0.6891 | 0.7300 | 0.8193 | 0.9402 | 0.8756 | | 0.2818 | 81.0 | 117693 | 0.5117 | 0.8379 | 0.8401 | 0.8391 | 0.8383 | 0.9008 | 0.7882 | 0.8407 | 0.8859 | 0.9028 | 0.8942 | 0.7981 | 0.8715 | 0.8332 | 0.7705 | 0.7401 | 0.7550 | 0.8453 | 0.8928 | 0.8684 | | 0.3172 | 82.0 | 119146 | 0.6467 | 0.8172 | 0.8296 | 0.8176 | 0.8197 | 0.9146 | 0.7066 | 0.7973 | 0.8741 | 0.8559 | 0.8649 | 0.8235 | 0.8507 | 0.8369 | 0.6676 | 0.7961 | 0.7262 | 0.8681 | 0.8787 | 0.8734 | | 0.2857 | 83.0 | 120599 | 0.5935 | 0.8320 | 0.8378 | 0.8340 | 0.8308 | 0.7820 | 0.8906 | 0.8328 | 0.9409 | 0.7743 | 0.8495 | 0.8291 | 0.9010 | 0.8636 | 0.8276 | 0.6711 | 0.7411 | 0.8095 | 0.9332 | 0.8669 | | 0.4051 | 84.0 | 122052 | 0.6718 | 0.8151 | 0.8209 | 0.8168 | 0.8151 | 0.7852 | 0.8316 | 0.8078 | 0.9283 | 0.7413 | 0.8243 | 0.8616 | 0.8646 | 0.8631 | 0.7366 | 0.6990 | 0.7173 | 0.7926 | 0.9473 | 0.8631 | | 0.5564 | 85.0 | 123505 | 0.6895 | 0.8024 | 0.8172 | 0.8040 | 0.8012 | 0.7017 | 0.9149 | 0.7943 | 0.9540 | 0.6476 | 0.7715 | 0.8133 | 0.8698 | 0.8406 | 0.7615 | 0.6826 | 0.7199 | 0.8555 | 0.9051 | 0.8796 | | 0.304 | 86.0 | 124958 | 0.8515 | 0.7897 | 0.8019 | 0.7919 | 0.7857 | 0.9134 | 0.6406 | 0.7531 | 0.7182 | 0.9514 | 0.8185 | 0.7644 | 0.8958 | 0.8249 | 0.7752 | 0.6069 | 0.6808 | 0.8382 | 0.8647 | 0.8512 | | 0.3907 | 87.0 | 126411 | 1.1472 | 0.7570 | 0.7973 | 0.7568 | 0.7534 | 0.9427 | 0.4566 | 0.6152 | 0.9198 | 0.7569 | 0.8305 | 0.7908 | 0.8663 | 0.8268 | 0.5835 | 0.8043 | 0.6763 | 0.7496 | 0.8998 | 0.8179 | | 0.3848 | 88.0 | 127864 | 0.8595 | 0.7959 | 0.8235 | 0.7956 | 0.7967 | 0.9459 | 0.5764 | 0.7163 | 0.8328 | 0.9253 | 0.8766 | 0.8234 | 0.8420 | 0.8326 | 0.6170 | 0.8240 | 0.7056 | 0.8986 | 0.8102 | 0.8521 | | 0.3333 | 89.0 | 129317 | 0.7752 | 0.8028 | 0.8178 | 0.8053 | 0.8003 | 0.8670 | 0.6788 | 0.7614 | 0.8944 | 0.8819 | 0.8881 | 0.8381 | 0.8628 | 0.8503 | 0.8162 | 0.6135 | 0.7005 | 0.6734 | 0.9895 | 0.8014 | | 0.3305 | 90.0 | 130770 | 0.7524 | 0.7976 | 0.8101 | 0.7986 | 0.7987 | 0.8301 | 0.7378 | 0.7812 | 0.7148 | 0.9444 | 0.8138 | 0.8388 | 0.8490 | 0.8438 | 0.7169 | 0.6957 | 0.7062 | 0.9499 | 0.7663 | 0.8482 | | 0.3058 | 91.0 | 132223 | 0.7231 | 0.8234 | 0.8256 | 0.8246 | 0.8242 | 0.8617 | 0.7899 | 0.8243 | 0.8177 | 0.8958 | 0.8550 | 0.8585 | 0.8108 | 0.8339 | 0.7292 | 0.7352 | 0.7322 | 0.8608 | 0.8910 | 0.8756 | | 0.2857 | 92.0 | 133676 | 0.6262 | 0.8227 | 0.8272 | 0.8241 | 0.8232 | 0.8810 | 0.7708 | 0.8222 | 0.8868 | 0.8837 | 0.8852 | 0.8563 | 0.7865 | 0.8199 | 0.7182 | 0.7253 | 0.7218 | 0.7939 | 0.9543 | 0.8667 | | 0.3236 | 93.0 | 135129 | 0.6963 | 0.8014 | 0.8212 | 0.8016 | 0.8051 | 0.7284 | 0.9219 | 0.8138 | 0.9145 | 0.7795 | 0.8416 | 0.8615 | 0.7778 | 0.8175 | 0.6643 | 0.7681 | 0.7124 | 0.9372 | 0.7610 | 0.8400 | | 0.3231 | 94.0 | 136582 | 0.8142 | 0.7948 | 0.8094 | 0.7966 | 0.7944 | 0.7996 | 0.7691 | 0.7841 | 0.9622 | 0.7066 | 0.8148 | 0.8297 | 0.8628 | 0.8460 | 0.7495 | 0.6743 | 0.7100 | 0.7059 | 0.9701 | 0.8172 | | 0.434 | 95.0 | 138035 | 0.6152 | 0.8241 | 0.8246 | 0.8265 | 0.8227 | 0.7687 | 0.9115 | 0.8340 | 0.8569 | 0.8837 | 0.8701 | 0.8485 | 0.8559 | 0.8522 | 0.7678 | 0.6201 | 0.6861 | 0.8813 | 0.8612 | 0.8711 | | 0.2914 | 96.0 | 139488 | 0.6123 | 0.8255 | 0.8282 | 0.8268 | 0.8262 | 0.7705 | 0.8802 | 0.8217 | 0.8643 | 0.8958 | 0.8798 | 0.8507 | 0.8212 | 0.8357 | 0.7566 | 0.7105 | 0.7328 | 0.8987 | 0.8260 | 0.8608 | | 0.5311 | 97.0 | 140941 | 0.6176 | 0.8389 | 0.8486 | 0.8394 | 0.8424 | 0.8904 | 0.7899 | 0.8372 | 0.9052 | 0.8785 | 0.8916 | 0.8499 | 0.8455 | 0.8477 | 0.6835 | 0.8026 | 0.7383 | 0.9142 | 0.8805 | 0.8970 | | 0.335 | 98.0 | 142394 | 0.6316 | 0.8258 | 0.8374 | 0.8264 | 0.8293 | 0.8675 | 0.8299 | 0.8483 | 0.9259 | 0.7812 | 0.8475 | 0.8545 | 0.8160 | 0.8348 | 0.6671 | 0.7944 | 0.7252 | 0.8721 | 0.9104 | 0.8908 | | 0.2365 | 99.0 | 143847 | 0.6399 | 0.8131 | 0.8284 | 0.8132 | 0.8170 | 0.8489 | 0.8385 | 0.8437 | 0.8905 | 0.8611 | 0.8756 | 0.7934 | 0.8333 | 0.8129 | 0.6566 | 0.7862 | 0.7156 | 0.9529 | 0.7469 | 0.8374 | | 0.4297 | 100.0 | 145300 | 0.7762 | 0.8059 | 0.8219 | 0.8061 | 0.8072 | 0.9204 | 0.6823 | 0.7836 | 0.8418 | 0.8681 | 0.8547 | 0.7765 | 0.9288 | 0.8458 | 0.6695 | 0.7796 | 0.7204 | 0.9014 | 0.7715 | 0.8314 | ### Framework versions - Transformers 4.48.3 - Pytorch 2.5.1+cu124 - Datasets 3.3.2 - Tokenizers 0.21.0
[ "durariadorio_32x32", "mole_32x32", "quebrado_32x32", "riadorio_32x32", "riofechado_32x32" ]
Master-Rapha7/mobilenetv2-typecoffee-6
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # mobilenetv2-typecoffee-6 This model is a fine-tuned version of [google/mobilenet_v2_1.0_224](https://huggingface.co/google/mobilenet_v2_1.0_224) on the Master-Rapha7/TypeCoffee_16x16 dataset. It achieves the following results on the evaluation set: - Loss: 0.9977 - Accuracy: 0.6484 - Precision: 0.6521 - Recall: 0.6499 - F1: 0.6481 - Precision Durariadorio 16x16: 0.6117 - Recall Durariadorio 16x16: 0.6476 - F1 Durariadorio 16x16: 0.6291 - Precision Mole 16x16: 0.6444 - Recall Mole 16x16: 0.7439 - F1 Mole 16x16: 0.6906 - Precision Quebrado 16x16: 0.7168 - Recall Quebrado 16x16: 0.7635 - F1 Quebrado 16x16: 0.7394 - Precision Riadorio 16x16: 0.5405 - Recall Riadorio 16x16: 0.5049 - F1 Riadorio 16x16: 0.5221 - Precision Riofechado 16x16: 0.7474 - Recall Riofechado 16x16: 0.5896 - F1 Riofechado 16x16: 0.6591 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - num_epochs: 100.0 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | Precision | Recall | F1 | Precision Durariadorio 16x16 | Recall Durariadorio 16x16 | F1 Durariadorio 16x16 | Precision Mole 16x16 | Recall Mole 16x16 | F1 Mole 16x16 | Precision Quebrado 16x16 | Recall Quebrado 16x16 | F1 Quebrado 16x16 | Precision Riadorio 16x16 | Recall Riadorio 16x16 | F1 Riadorio 16x16 | Precision Riofechado 16x16 | Recall Riofechado 16x16 | F1 Riofechado 16x16 | |:-------------:|:-----:|:------:|:---------------:|:--------:|:---------:|:------:|:------:|:----------------------------:|:-------------------------:|:---------------------:|:--------------------:|:-----------------:|:-------------:|:------------------------:|:---------------------:|:-----------------:|:------------------------:|:---------------------:|:-----------------:|:--------------------------:|:-----------------------:|:-------------------:| | 1.2625 | 1.0 | 5812 | 1.6692 | 0.4037 | 0.4782 | 0.4022 | 0.3558 | 0.5077 | 0.2014 | 0.2884 | 0.7417 | 0.1159 | 0.2005 | 0.4020 | 0.9197 | 0.5595 | 0.3520 | 0.5103 | 0.4166 | 0.3875 | 0.2638 | 0.3139 | | 1.2046 | 2.0 | 11624 | 1.4500 | 0.4525 | 0.5101 | 0.4517 | 0.4112 | 0.5068 | 0.3728 | 0.4296 | 0.7453 | 0.1029 | 0.1808 | 0.4531 | 0.9284 | 0.6090 | 0.3914 | 0.5016 | 0.4397 | 0.4540 | 0.3529 | 0.3971 | | 1.1274 | 3.0 | 17436 | 1.2716 | 0.5298 | 0.5536 | 0.5309 | 0.5182 | 0.5601 | 0.3924 | 0.4615 | 0.7089 | 0.4227 | 0.5296 | 0.4842 | 0.9106 | 0.6322 | 0.5134 | 0.4260 | 0.4656 | 0.5015 | 0.5026 | 0.5021 | | 1.138 | 4.0 | 23248 | 1.1629 | 0.5511 | 0.6036 | 0.5514 | 0.5411 | 0.4085 | 0.6619 | 0.5052 | 0.6667 | 0.5130 | 0.5798 | 0.6363 | 0.8286 | 0.7198 | 0.4929 | 0.4716 | 0.4820 | 0.8137 | 0.2818 | 0.4187 | | 1.1274 | 5.0 | 29060 | 1.8812 | 0.4208 | 0.5119 | 0.4213 | 0.3963 | 0.5736 | 0.3433 | 0.4295 | 0.6998 | 0.2478 | 0.3660 | 0.3307 | 0.9709 | 0.4933 | 0.5429 | 0.3302 | 0.4106 | 0.4125 | 0.2142 | 0.2820 | | 1.1279 | 6.0 | 34872 | 1.1897 | 0.5340 | 0.6094 | 0.5350 | 0.5403 | 0.3720 | 0.8077 | 0.5094 | 0.7060 | 0.4627 | 0.5590 | 0.7809 | 0.5647 | 0.6554 | 0.4942 | 0.4182 | 0.4530 | 0.6939 | 0.4219 | 0.5247 | | 1.058 | 7.0 | 40684 | 1.5140 | 0.5188 | 0.5621 | 0.5174 | 0.4943 | 0.7164 | 0.2687 | 0.3908 | 0.6128 | 0.2878 | 0.3916 | 0.5492 | 0.9006 | 0.6823 | 0.4179 | 0.6427 | 0.5065 | 0.5144 | 0.4873 | 0.5005 | | 1.0996 | 8.0 | 46496 | 1.2113 | 0.5762 | 0.6049 | 0.5799 | 0.5530 | 0.5131 | 0.5456 | 0.5288 | 0.4731 | 0.8624 | 0.6110 | 0.7035 | 0.8589 | 0.7735 | 0.5741 | 0.2134 | 0.3112 | 0.7610 | 0.4192 | 0.5406 | | 1.0587 | 9.0 | 52308 | 2.0438 | 0.4199 | 0.4449 | 0.4234 | 0.3784 | 0.4612 | 0.2452 | 0.3202 | 0.5010 | 0.2144 | 0.3003 | 0.3869 | 0.8563 | 0.5330 | 0.4523 | 0.1501 | 0.2254 | 0.4231 | 0.6510 | 0.5129 | | 0.9988 | 10.0 | 58120 | 1.2635 | 0.5622 | 0.6269 | 0.5646 | 0.5670 | 0.3884 | 0.8273 | 0.5286 | 0.68 | 0.4353 | 0.5308 | 0.8403 | 0.6302 | 0.7202 | 0.5170 | 0.3503 | 0.4176 | 0.7087 | 0.5799 | 0.6379 | | 1.0754 | 11.0 | 63932 | 1.2551 | 0.5402 | 0.5648 | 0.5416 | 0.5280 | 0.6612 | 0.3668 | 0.4718 | 0.6002 | 0.5877 | 0.5939 | 0.4749 | 0.9080 | 0.6236 | 0.4877 | 0.3980 | 0.4383 | 0.6001 | 0.4473 | 0.5126 | | 0.9792 | 12.0 | 69744 | 1.1130 | 0.5909 | 0.6336 | 0.5910 | 0.5967 | 0.5380 | 0.6450 | 0.5867 | 0.6287 | 0.7018 | 0.6632 | 0.8552 | 0.4922 | 0.6248 | 0.4332 | 0.5773 | 0.4950 | 0.7130 | 0.5386 | 0.6137 | | 0.9902 | 13.0 | 75556 | 1.7618 | 0.5069 | 0.5646 | 0.5062 | 0.4692 | 0.6817 | 0.1897 | 0.2968 | 0.7108 | 0.2305 | 0.3481 | 0.5603 | 0.9193 | 0.6963 | 0.4503 | 0.5851 | 0.5089 | 0.4196 | 0.6067 | 0.4961 | | 1.0104 | 14.0 | 81368 | 1.2619 | 0.5685 | 0.5930 | 0.5680 | 0.5674 | 0.5790 | 0.4627 | 0.5144 | 0.6687 | 0.3837 | 0.4876 | 0.7634 | 0.7548 | 0.7591 | 0.4355 | 0.6197 | 0.5115 | 0.5186 | 0.6194 | 0.5645 | | 1.0061 | 15.0 | 87180 | 1.2753 | 0.5459 | 0.6480 | 0.5439 | 0.5399 | 0.4603 | 0.7070 | 0.5576 | 0.7841 | 0.2144 | 0.3367 | 0.8868 | 0.5851 | 0.7050 | 0.4004 | 0.7109 | 0.5123 | 0.7084 | 0.5022 | 0.5877 | | 1.0244 | 16.0 | 92992 | 1.1749 | 0.5976 | 0.6072 | 0.6005 | 0.5860 | 0.6103 | 0.4779 | 0.5360 | 0.6754 | 0.5799 | 0.6240 | 0.5897 | 0.8772 | 0.7053 | 0.6251 | 0.3565 | 0.4540 | 0.5354 | 0.7112 | 0.6109 | | 0.9428 | 17.0 | 98804 | 1.5114 | 0.5323 | 0.5746 | 0.5342 | 0.5212 | 0.5866 | 0.4544 | 0.5121 | 0.7265 | 0.3863 | 0.5044 | 0.4402 | 0.9384 | 0.5993 | 0.5061 | 0.3557 | 0.4178 | 0.6135 | 0.5364 | 0.5724 | | 1.01 | 18.0 | 104616 | 1.2251 | 0.6045 | 0.6491 | 0.6042 | 0.5962 | 0.7151 | 0.3954 | 0.5092 | 0.7972 | 0.3889 | 0.5228 | 0.7216 | 0.8359 | 0.7746 | 0.4595 | 0.6641 | 0.5431 | 0.5520 | 0.7366 | 0.6311 | | 0.9718 | 19.0 | 110428 | 1.5229 | 0.5230 | 0.5801 | 0.5265 | 0.4906 | 0.5040 | 0.5972 | 0.5467 | 0.8532 | 0.2144 | 0.3427 | 0.5845 | 0.8724 | 0.7000 | 0.5310 | 0.2397 | 0.3303 | 0.4277 | 0.7090 | 0.5335 | | 1.0278 | 20.0 | 116240 | 1.8706 | 0.4949 | 0.6150 | 0.4966 | 0.4757 | 0.4317 | 0.6020 | 0.5028 | 0.8457 | 0.1380 | 0.2373 | 0.8845 | 0.5152 | 0.6511 | 0.5237 | 0.4091 | 0.4594 | 0.3893 | 0.8187 | 0.5277 | | 0.9218 | 21.0 | 122052 | 2.4841 | 0.4389 | 0.4934 | 0.4423 | 0.3747 | 0.4871 | 0.2452 | 0.3262 | 0.3765 | 0.7951 | 0.5110 | 0.4594 | 0.9084 | 0.6102 | 0.4320 | 0.0822 | 0.1382 | 0.7123 | 0.1804 | 0.2879 | | 0.9428 | 22.0 | 127864 | 1.3792 | 0.5750 | 0.6286 | 0.5790 | 0.5715 | 0.5966 | 0.5521 | 0.5735 | 0.7476 | 0.5039 | 0.6020 | 0.8167 | 0.6710 | 0.7367 | 0.5645 | 0.2841 | 0.3780 | 0.4176 | 0.8837 | 0.5672 | | 0.9655 | 23.0 | 133676 | 1.0168 | 0.6582 | 0.6850 | 0.6617 | 0.6480 | 0.5303 | 0.7409 | 0.6181 | 0.6220 | 0.7769 | 0.6909 | 0.7292 | 0.8542 | 0.7867 | 0.7668 | 0.3339 | 0.4652 | 0.7770 | 0.6027 | 0.6789 | | 0.9555 | 24.0 | 139488 | 1.3301 | 0.5775 | 0.6266 | 0.5812 | 0.5469 | 0.7714 | 0.2344 | 0.3595 | 0.5999 | 0.6918 | 0.6426 | 0.6177 | 0.8485 | 0.7149 | 0.6728 | 0.3018 | 0.4167 | 0.4712 | 0.8292 | 0.6009 | | 0.9223 | 25.0 | 145300 | 0.9977 | 0.6484 | 0.6521 | 0.6499 | 0.6481 | 0.6117 | 0.6476 | 0.6291 | 0.6444 | 0.7439 | 0.6906 | 0.7168 | 0.7635 | 0.7394 | 0.5405 | 0.5049 | 0.5221 | 0.7474 | 0.5896 | 0.6591 | | 0.9173 | 26.0 | 151112 | 1.1396 | 0.6089 | 0.6132 | 0.6098 | 0.6064 | 0.6331 | 0.4427 | 0.5211 | 0.6615 | 0.6862 | 0.6736 | 0.6317 | 0.7743 | 0.6958 | 0.4870 | 0.5304 | 0.5078 | 0.6527 | 0.6155 | 0.6335 | | 0.9415 | 27.0 | 156924 | 2.3761 | 0.4914 | 0.5752 | 0.4919 | 0.4544 | 0.3927 | 0.6246 | 0.4822 | 0.8830 | 0.0720 | 0.1332 | 0.5918 | 0.8173 | 0.6865 | 0.4004 | 0.4498 | 0.4237 | 0.6083 | 0.4956 | 0.5462 | | 0.8774 | 28.0 | 162736 | 2.6208 | 0.4556 | 0.4789 | 0.4593 | 0.4250 | 0.6447 | 0.2331 | 0.3424 | 0.4919 | 0.4631 | 0.4771 | 0.4246 | 0.8030 | 0.5555 | 0.3879 | 0.1587 | 0.2253 | 0.4452 | 0.6387 | 0.5247 | | 0.9192 | 29.0 | 168548 | 1.4115 | 0.5582 | 0.6661 | 0.5546 | 0.5623 | 0.5501 | 0.5786 | 0.5640 | 0.7143 | 0.3971 | 0.5105 | 0.8507 | 0.5786 | 0.6887 | 0.3844 | 0.8392 | 0.5273 | 0.8309 | 0.3797 | 0.5212 | | 0.9385 | 30.0 | 174360 | 1.7991 | 0.5242 | 0.6013 | 0.5258 | 0.4951 | 0.8682 | 0.1801 | 0.2983 | 0.6005 | 0.6471 | 0.6229 | 0.4334 | 0.9523 | 0.5957 | 0.5297 | 0.3701 | 0.4357 | 0.5747 | 0.4794 | 0.5227 | | 0.8657 | 31.0 | 180172 | 1.5598 | 0.5604 | 0.6076 | 0.5600 | 0.5487 | 0.7050 | 0.1940 | 0.3043 | 0.5674 | 0.6502 | 0.6060 | 0.7655 | 0.7001 | 0.7314 | 0.3876 | 0.6147 | 0.4754 | 0.6124 | 0.6409 | 0.6263 | | 0.9113 | 32.0 | 185984 | 1.5645 | 0.5471 | 0.5585 | 0.5502 | 0.5393 | 0.5583 | 0.4154 | 0.4764 | 0.6045 | 0.5651 | 0.5841 | 0.6806 | 0.6910 | 0.6858 | 0.5007 | 0.3121 | 0.3845 | 0.4483 | 0.7673 | 0.5660 | | 0.7984 | 33.0 | 191796 | 1.9436 | 0.5234 | 0.5926 | 0.5261 | 0.4854 | 0.7592 | 0.1437 | 0.2416 | 0.7091 | 0.4783 | 0.5713 | 0.4905 | 0.9327 | 0.6429 | 0.5586 | 0.3273 | 0.4128 | 0.4456 | 0.7485 | 0.5587 | | 0.8127 | 34.0 | 197608 | 1.5944 | 0.5472 | 0.6128 | 0.5480 | 0.5344 | 0.6432 | 0.4327 | 0.5174 | 0.4922 | 0.7852 | 0.6051 | 0.9132 | 0.2878 | 0.4376 | 0.4700 | 0.5160 | 0.4920 | 0.5452 | 0.7182 | 0.6198 | | 0.8669 | 35.0 | 203420 | 1.3820 | 0.6149 | 0.6440 | 0.6154 | 0.6036 | 0.6784 | 0.5100 | 0.5823 | 0.7929 | 0.3607 | 0.4958 | 0.5846 | 0.8984 | 0.7083 | 0.5163 | 0.5876 | 0.5496 | 0.6478 | 0.7204 | 0.6822 | | 0.8443 | 36.0 | 209232 | 1.5135 | 0.5782 | 0.5985 | 0.5781 | 0.5678 | 0.6336 | 0.4414 | 0.5203 | 0.5498 | 0.7431 | 0.6320 | 0.648 | 0.8086 | 0.7194 | 0.4598 | 0.5456 | 0.4991 | 0.7014 | 0.3516 | 0.4684 | | 0.8108 | 37.0 | 215044 | 1.4274 | 0.6031 | 0.6483 | 0.6056 | 0.6011 | 0.5206 | 0.7300 | 0.6078 | 0.6865 | 0.5920 | 0.6357 | 0.8774 | 0.5 | 0.6370 | 0.6460 | 0.4120 | 0.5031 | 0.5110 | 0.7941 | 0.6219 | | 0.8155 | 38.0 | 220856 | 2.1082 | 0.5718 | 0.5900 | 0.5727 | 0.5611 | 0.5689 | 0.5373 | 0.5527 | 0.7209 | 0.3173 | 0.4406 | 0.5964 | 0.8312 | 0.6945 | 0.4600 | 0.5132 | 0.4851 | 0.6037 | 0.6646 | 0.6327 | | 0.8587 | 39.0 | 226668 | 1.2546 | 0.6413 | 0.6524 | 0.6442 | 0.6351 | 0.5574 | 0.6549 | 0.6023 | 0.5898 | 0.7426 | 0.6574 | 0.7458 | 0.7986 | 0.7713 | 0.7196 | 0.3799 | 0.4973 | 0.6491 | 0.6449 | 0.6470 | | 0.8124 | 40.0 | 232480 | 1.8312 | 0.5579 | 0.5858 | 0.5629 | 0.5373 | 0.4888 | 0.6150 | 0.5447 | 0.6586 | 0.5165 | 0.5789 | 0.7529 | 0.6997 | 0.7253 | 0.5702 | 0.1587 | 0.2483 | 0.4585 | 0.8248 | 0.5894 | | 0.8602 | 41.0 | 238292 | 1.9507 | 0.5134 | 0.5892 | 0.5146 | 0.5000 | 0.5973 | 0.3902 | 0.4720 | 0.6989 | 0.2196 | 0.3342 | 0.7692 | 0.5816 | 0.6624 | 0.4921 | 0.4873 | 0.4897 | 0.3885 | 0.8942 | 0.5417 | | 0.8136 | 42.0 | 244104 | 1.2960 | 0.6210 | 0.6213 | 0.6227 | 0.6174 | 0.6458 | 0.5564 | 0.5978 | 0.6721 | 0.6680 | 0.6700 | 0.6059 | 0.8142 | 0.6948 | 0.5570 | 0.4663 | 0.5076 | 0.6257 | 0.6084 | 0.6170 | | 0.8146 | 43.0 | 249916 | 1.5908 | 0.5647 | 0.5820 | 0.5687 | 0.5432 | 0.5705 | 0.4214 | 0.4848 | 0.6037 | 0.6150 | 0.6093 | 0.5778 | 0.8668 | 0.6934 | 0.6651 | 0.2360 | 0.3484 | 0.4929 | 0.7041 | 0.5799 | | 0.8045 | 44.0 | 255728 | 2.1165 | 0.5435 | 0.6027 | 0.5436 | 0.5250 | 0.6968 | 0.375 | 0.4876 | 0.8010 | 0.2812 | 0.4163 | 0.4908 | 0.9158 | 0.6391 | 0.4781 | 0.5440 | 0.5089 | 0.5469 | 0.6018 | 0.5730 | | 0.8172 | 45.0 | 261540 | 2.4596 | 0.5096 | 0.6248 | 0.5128 | 0.5039 | 0.7288 | 0.2496 | 0.3718 | 0.6504 | 0.5048 | 0.5684 | 0.8274 | 0.5660 | 0.6722 | 0.5714 | 0.3109 | 0.4027 | 0.3458 | 0.9328 | 0.5045 | | 0.7635 | 46.0 | 267352 | 1.6403 | 0.5456 | 0.6158 | 0.5429 | 0.5529 | 0.5380 | 0.3806 | 0.4459 | 0.6343 | 0.4757 | 0.5437 | 0.8389 | 0.6237 | 0.7155 | 0.3719 | 0.7685 | 0.5013 | 0.6959 | 0.4662 | 0.5584 | | 0.7885 | 47.0 | 273164 | 1.7048 | 0.5754 | 0.6149 | 0.5775 | 0.5724 | 0.4429 | 0.6671 | 0.5324 | 0.7703 | 0.3741 | 0.5037 | 0.7741 | 0.6888 | 0.7290 | 0.5489 | 0.4157 | 0.4731 | 0.5380 | 0.7419 | 0.6237 | | 0.8135 | 48.0 | 278976 | 1.3005 | 0.6211 | 0.6418 | 0.6230 | 0.6218 | 0.6381 | 0.5938 | 0.6151 | 0.6928 | 0.5755 | 0.6287 | 0.8042 | 0.6098 | 0.6937 | 0.5398 | 0.5016 | 0.5200 | 0.5342 | 0.8341 | 0.6512 | | 0.8136 | 49.0 | 284788 | 1.7161 | 0.5690 | 0.5809 | 0.5705 | 0.5596 | 0.5968 | 0.4536 | 0.5154 | 0.6844 | 0.4028 | 0.5071 | 0.5826 | 0.8116 | 0.6783 | 0.5104 | 0.4650 | 0.4867 | 0.5306 | 0.7195 | 0.6108 | | 0.7742 | 50.0 | 290600 | 1.5161 | 0.5997 | 0.6067 | 0.6009 | 0.6006 | 0.5444 | 0.6380 | 0.5875 | 0.6864 | 0.5330 | 0.6000 | 0.6999 | 0.7613 | 0.7293 | 0.4729 | 0.4910 | 0.4817 | 0.6299 | 0.5812 | 0.6046 | | 0.7774 | 51.0 | 296412 | 4.2904 | 0.4461 | 0.5894 | 0.4508 | 0.4202 | 0.6654 | 0.1493 | 0.2439 | 0.5924 | 0.4896 | 0.5361 | 0.8191 | 0.5443 | 0.6540 | 0.5688 | 0.1291 | 0.2105 | 0.3013 | 0.9416 | 0.4566 | | 0.7166 | 52.0 | 302224 | 2.6086 | 0.5367 | 0.5850 | 0.5397 | 0.5122 | 0.6281 | 0.3555 | 0.4540 | 0.4136 | 0.9210 | 0.5709 | 0.6617 | 0.8251 | 0.7344 | 0.4680 | 0.2348 | 0.3127 | 0.7534 | 0.3622 | 0.4892 | | 0.7684 | 53.0 | 308036 | 2.5523 | 0.5268 | 0.5602 | 0.5295 | 0.4903 | 0.6517 | 0.1697 | 0.2693 | 0.5046 | 0.7331 | 0.5978 | 0.4800 | 0.9227 | 0.6315 | 0.5375 | 0.2919 | 0.3784 | 0.6270 | 0.5299 | 0.5744 | | 0.7749 | 54.0 | 313848 | 2.1679 | 0.5323 | 0.6005 | 0.5360 | 0.5274 | 0.5914 | 0.4184 | 0.4901 | 0.7419 | 0.4280 | 0.5428 | 0.7818 | 0.6671 | 0.7199 | 0.5097 | 0.2693 | 0.3524 | 0.3777 | 0.8973 | 0.5316 | | 0.7422 | 55.0 | 319660 | 1.3644 | 0.6202 | 0.6819 | 0.6187 | 0.6235 | 0.5310 | 0.7170 | 0.6102 | 0.7858 | 0.4666 | 0.5855 | 0.8068 | 0.7088 | 0.7546 | 0.4655 | 0.7290 | 0.5682 | 0.8206 | 0.4719 | 0.5992 | | 0.7326 | 56.0 | 325472 | 1.6948 | 0.6008 | 0.6421 | 0.6013 | 0.5895 | 0.6777 | 0.4180 | 0.5170 | 0.7943 | 0.3485 | 0.4845 | 0.7396 | 0.8286 | 0.7816 | 0.4917 | 0.5958 | 0.5388 | 0.5074 | 0.8156 | 0.6256 | | 0.7648 | 57.0 | 331284 | 1.9124 | 0.5460 | 0.5785 | 0.5464 | 0.5449 | 0.5254 | 0.5074 | 0.5162 | 0.7337 | 0.3420 | 0.4665 | 0.7054 | 0.6923 | 0.6988 | 0.4079 | 0.5362 | 0.4633 | 0.5201 | 0.6541 | 0.5794 | | 0.7768 | 58.0 | 337096 | 2.0001 | 0.5515 | 0.6271 | 0.5547 | 0.5314 | 0.5096 | 0.5998 | 0.5510 | 0.8704 | 0.2448 | 0.3821 | 0.7121 | 0.7739 | 0.7417 | 0.6209 | 0.3178 | 0.4205 | 0.4226 | 0.8371 | 0.5616 | | 0.7521 | 59.0 | 342908 | 2.1328 | 0.5616 | 0.5853 | 0.5636 | 0.5562 | 0.4727 | 0.6059 | 0.5311 | 0.5094 | 0.7261 | 0.5988 | 0.7370 | 0.7470 | 0.7420 | 0.4509 | 0.3458 | 0.3914 | 0.7561 | 0.3933 | 0.5175 | | 0.7693 | 60.0 | 348720 | 2.0178 | 0.5387 | 0.5614 | 0.5399 | 0.5341 | 0.6972 | 0.3407 | 0.4577 | 0.56 | 0.6562 | 0.6043 | 0.5339 | 0.7240 | 0.6146 | 0.3921 | 0.4301 | 0.4102 | 0.6238 | 0.5487 | 0.5838 | | 0.7369 | 61.0 | 354532 | 1.8310 | 0.5869 | 0.6164 | 0.5900 | 0.5759 | 0.6740 | 0.4110 | 0.5106 | 0.7007 | 0.5273 | 0.6018 | 0.6516 | 0.8424 | 0.7348 | 0.5999 | 0.3581 | 0.4485 | 0.4558 | 0.8108 | 0.5836 | | 0.7791 | 62.0 | 360344 | 3.3122 | 0.4796 | 0.5204 | 0.4814 | 0.4547 | 0.6131 | 0.2552 | 0.3604 | 0.6397 | 0.2912 | 0.4002 | 0.4378 | 0.8867 | 0.5862 | 0.4395 | 0.3495 | 0.3894 | 0.4718 | 0.6242 | 0.5374 | | 0.7385 | 63.0 | 366156 | 2.7403 | 0.5317 | 0.5487 | 0.5335 | 0.5204 | 0.5388 | 0.5091 | 0.5235 | 0.6591 | 0.5195 | 0.5811 | 0.4790 | 0.9002 | 0.6253 | 0.4569 | 0.3396 | 0.3896 | 0.6097 | 0.3990 | 0.4824 | | 0.6977 | 64.0 | 371968 | 3.8466 | 0.4547 | 0.5072 | 0.4577 | 0.4241 | 0.6136 | 0.1840 | 0.2831 | 0.5676 | 0.2843 | 0.3788 | 0.5798 | 0.7739 | 0.6629 | 0.4322 | 0.2492 | 0.3161 | 0.3426 | 0.7972 | 0.4793 | | 0.7026 | 65.0 | 377780 | 2.2445 | 0.5336 | 0.5846 | 0.5334 | 0.5219 | 0.6372 | 0.4779 | 0.5461 | 0.6531 | 0.4518 | 0.5341 | 0.4459 | 0.9167 | 0.6 | 0.4843 | 0.5074 | 0.4956 | 0.7028 | 0.3134 | 0.4335 | | 0.7319 | 66.0 | 383592 | 1.8517 | 0.5921 | 0.6097 | 0.5940 | 0.5934 | 0.5156 | 0.6020 | 0.5555 | 0.5821 | 0.7292 | 0.6474 | 0.8411 | 0.5629 | 0.6745 | 0.4964 | 0.4276 | 0.4595 | 0.6131 | 0.6484 | 0.6303 | | 0.7037 | 67.0 | 389404 | 1.6035 | 0.6197 | 0.6541 | 0.6205 | 0.6211 | 0.6533 | 0.4874 | 0.5583 | 0.7281 | 0.5347 | 0.6166 | 0.8408 | 0.6510 | 0.7339 | 0.5427 | 0.5925 | 0.5665 | 0.5053 | 0.8367 | 0.6301 | | 0.7307 | 68.0 | 395216 | 2.4152 | 0.5647 | 0.6316 | 0.5692 | 0.5429 | 0.5066 | 0.5955 | 0.5475 | 0.8107 | 0.3681 | 0.5063 | 0.6879 | 0.8168 | 0.7468 | 0.7131 | 0.2188 | 0.3348 | 0.4398 | 0.8468 | 0.5789 | | 0.731 | 69.0 | 401028 | 3.0934 | 0.4891 | 0.5599 | 0.4921 | 0.4684 | 0.6102 | 0.4193 | 0.4970 | 0.7122 | 0.3147 | 0.4365 | 0.3881 | 0.9666 | 0.5538 | 0.5608 | 0.2294 | 0.3256 | 0.5280 | 0.5303 | 0.5291 | | 0.7365 | 70.0 | 406840 | 1.7706 | 0.5989 | 0.6174 | 0.6003 | 0.5913 | 0.6586 | 0.4722 | 0.5501 | 0.5229 | 0.8168 | 0.6376 | 0.6442 | 0.7960 | 0.7121 | 0.5197 | 0.4404 | 0.4767 | 0.7416 | 0.4763 | 0.5801 | | 0.7129 | 71.0 | 412652 | 1.9947 | 0.5514 | 0.6603 | 0.5481 | 0.5586 | 0.7073 | 0.3555 | 0.4731 | 0.7332 | 0.4175 | 0.5321 | 0.7925 | 0.6797 | 0.7318 | 0.3516 | 0.8244 | 0.4929 | 0.7169 | 0.4636 | 0.5630 | | 0.6914 | 72.0 | 418464 | 2.5763 | 0.5167 | 0.5818 | 0.5157 | 0.5044 | 0.4784 | 0.6866 | 0.5639 | 0.7268 | 0.3325 | 0.4562 | 0.5528 | 0.7183 | 0.6248 | 0.3971 | 0.5559 | 0.4633 | 0.7541 | 0.2853 | 0.4140 | | 0.6815 | 73.0 | 424276 | 2.3497 | 0.5551 | 0.5953 | 0.5563 | 0.5521 | 0.6806 | 0.3043 | 0.4205 | 0.6536 | 0.5846 | 0.6172 | 0.7610 | 0.6480 | 0.7000 | 0.4260 | 0.4844 | 0.4533 | 0.4553 | 0.7603 | 0.5695 | | 0.7438 | 74.0 | 430088 | 3.6554 | 0.4889 | 0.6087 | 0.4886 | 0.4610 | 0.6236 | 0.3207 | 0.4236 | 0.8442 | 0.1011 | 0.1806 | 0.7997 | 0.6359 | 0.7084 | 0.3633 | 0.5818 | 0.4473 | 0.4127 | 0.8033 | 0.5453 | | 0.6709 | 75.0 | 435900 | 2.0066 | 0.6008 | 0.6045 | 0.6033 | 0.5884 | 0.6725 | 0.3867 | 0.4910 | 0.6082 | 0.6797 | 0.6419 | 0.6225 | 0.8372 | 0.7140 | 0.5517 | 0.3968 | 0.4616 | 0.5675 | 0.7160 | 0.6332 | | 0.76 | 76.0 | 441712 | 1.7663 | 0.5992 | 0.6356 | 0.5986 | 0.5949 | 0.6654 | 0.4566 | 0.5416 | 0.7895 | 0.3989 | 0.5300 | 0.7035 | 0.8021 | 0.7495 | 0.4595 | 0.6669 | 0.5441 | 0.5599 | 0.6686 | 0.6094 | | 0.7148 | 77.0 | 447524 | 2.0687 | 0.6083 | 0.6330 | 0.6080 | 0.6120 | 0.6335 | 0.5056 | 0.5624 | 0.6311 | 0.6593 | 0.6449 | 0.7473 | 0.7522 | 0.7497 | 0.4352 | 0.6188 | 0.5110 | 0.7179 | 0.5040 | 0.5922 | | 0.6471 | 78.0 | 453336 | 3.5390 | 0.4843 | 0.5407 | 0.4854 | 0.4555 | 0.7602 | 0.1623 | 0.2675 | 0.5483 | 0.5004 | 0.5233 | 0.4311 | 0.9206 | 0.5872 | 0.4446 | 0.3746 | 0.4066 | 0.5194 | 0.4693 | 0.4931 | | 0.7424 | 79.0 | 459148 | 2.3235 | 0.5606 | 0.6087 | 0.5637 | 0.5556 | 0.4784 | 0.7075 | 0.5708 | 0.7296 | 0.4262 | 0.5381 | 0.8071 | 0.5629 | 0.6633 | 0.5565 | 0.3302 | 0.4145 | 0.4721 | 0.7915 | 0.5914 | | 0.7054 | 80.0 | 464960 | 3.6877 | 0.4990 | 0.5770 | 0.5002 | 0.4718 | 0.3735 | 0.8186 | 0.5129 | 0.7128 | 0.0894 | 0.1589 | 0.7956 | 0.6489 | 0.7148 | 0.4430 | 0.3960 | 0.4182 | 0.5601 | 0.5483 | 0.5541 | | 0.6304 | 81.0 | 470772 | 2.7415 | 0.5360 | 0.5735 | 0.5394 | 0.5115 | 0.4136 | 0.7409 | 0.5309 | 0.5405 | 0.6545 | 0.5921 | 0.6376 | 0.7904 | 0.7058 | 0.5196 | 0.1854 | 0.2733 | 0.7564 | 0.3257 | 0.4554 | | 0.7155 | 82.0 | 476584 | 2.7197 | 0.5313 | 0.5558 | 0.5320 | 0.5323 | 0.5025 | 0.4735 | 0.4876 | 0.6630 | 0.3698 | 0.4748 | 0.7162 | 0.6714 | 0.6931 | 0.3879 | 0.4910 | 0.4334 | 0.5092 | 0.6545 | 0.5728 | | 0.6031 | 83.0 | 482396 | 2.0149 | 0.5929 | 0.6357 | 0.5956 | 0.5856 | 0.6568 | 0.4154 | 0.5089 | 0.5173 | 0.8168 | 0.6335 | 0.8628 | 0.5595 | 0.6788 | 0.6258 | 0.3906 | 0.4810 | 0.5158 | 0.7959 | 0.6259 | | 0.6239 | 84.0 | 488208 | 2.4324 | 0.5538 | 0.6084 | 0.5545 | 0.5352 | 0.4871 | 0.6745 | 0.5657 | 0.8627 | 0.2509 | 0.3887 | 0.5444 | 0.8724 | 0.6704 | 0.5 | 0.4766 | 0.488 | 0.6478 | 0.4982 | 0.5633 | | 0.6656 | 85.0 | 494020 | 2.6047 | 0.5341 | 0.5676 | 0.5330 | 0.5389 | 0.5842 | 0.4080 | 0.4804 | 0.6188 | 0.5087 | 0.5584 | 0.7051 | 0.6693 | 0.6867 | 0.3702 | 0.6164 | 0.4626 | 0.5597 | 0.4627 | 0.5066 | | 0.6743 | 86.0 | 499832 | 2.3569 | 0.5811 | 0.5956 | 0.5854 | 0.5653 | 0.5293 | 0.4944 | 0.5112 | 0.6458 | 0.6055 | 0.625 | 0.6936 | 0.7713 | 0.7304 | 0.6217 | 0.2447 | 0.3511 | 0.4875 | 0.8112 | 0.6090 | | 0.6588 | 87.0 | 505644 | 2.1204 | 0.5947 | 0.6213 | 0.5971 | 0.5896 | 0.5594 | 0.6254 | 0.5906 | 0.7598 | 0.4036 | 0.5272 | 0.7491 | 0.7465 | 0.7478 | 0.5464 | 0.4235 | 0.4772 | 0.4919 | 0.7862 | 0.6052 | | 0.6914 | 88.0 | 511456 | 3.1284 | 0.5051 | 0.5568 | 0.5039 | 0.4926 | 0.6088 | 0.3134 | 0.4138 | 0.7347 | 0.2921 | 0.4180 | 0.5543 | 0.7756 | 0.6465 | 0.3783 | 0.6118 | 0.4676 | 0.5078 | 0.5268 | 0.5171 | | 0.6262 | 89.0 | 517268 | 2.3910 | 0.5641 | 0.6059 | 0.5673 | 0.5319 | 0.7953 | 0.2192 | 0.3437 | 0.5283 | 0.7548 | 0.6215 | 0.5409 | 0.8607 | 0.6643 | 0.5945 | 0.3039 | 0.4022 | 0.5707 | 0.6980 | 0.6280 | | 0.6209 | 90.0 | 523080 | 3.3476 | 0.4826 | 0.5516 | 0.4862 | 0.4752 | 0.3511 | 0.7886 | 0.4859 | 0.5591 | 0.4353 | 0.4895 | 0.8627 | 0.4336 | 0.5771 | 0.4466 | 0.1838 | 0.2604 | 0.5385 | 0.5896 | 0.5629 | | 0.6806 | 91.0 | 528892 | 3.0868 | 0.5251 | 0.5438 | 0.5264 | 0.5099 | 0.6458 | 0.2595 | 0.3703 | 0.6048 | 0.4722 | 0.5303 | 0.5530 | 0.8312 | 0.6641 | 0.4041 | 0.4297 | 0.4165 | 0.5116 | 0.6396 | 0.5685 | | 0.6452 | 92.0 | 534704 | 2.9336 | 0.5532 | 0.6052 | 0.5524 | 0.5364 | 0.6882 | 0.3468 | 0.4612 | 0.7733 | 0.3095 | 0.4420 | 0.5319 | 0.8728 | 0.6610 | 0.4517 | 0.6308 | 0.5264 | 0.5809 | 0.6023 | 0.5914 | | 0.7151 | 93.0 | 540516 | 2.4665 | 0.5535 | 0.6065 | 0.5529 | 0.5360 | 0.5988 | 0.4696 | 0.5264 | 0.8445 | 0.2452 | 0.3801 | 0.5560 | 0.8615 | 0.6759 | 0.4449 | 0.6110 | 0.5149 | 0.5881 | 0.5773 | 0.5826 | | 0.6112 | 94.0 | 546328 | 2.5413 | 0.5656 | 0.5860 | 0.5660 | 0.5434 | 0.6813 | 0.2218 | 0.3346 | 0.6448 | 0.5469 | 0.5918 | 0.5683 | 0.8989 | 0.6964 | 0.5101 | 0.5395 | 0.5244 | 0.5252 | 0.6229 | 0.5699 | | 0.6329 | 95.0 | 552140 | 2.4825 | 0.5741 | 0.5935 | 0.5754 | 0.5659 | 0.5737 | 0.5187 | 0.5448 | 0.7532 | 0.4332 | 0.5500 | 0.5380 | 0.8919 | 0.6711 | 0.5457 | 0.4544 | 0.4958 | 0.5568 | 0.5790 | 0.5677 | | 0.6281 | 96.0 | 557952 | 1.9398 | 0.5998 | 0.6073 | 0.6039 | 0.5876 | 0.5117 | 0.6654 | 0.5785 | 0.6928 | 0.5638 | 0.6217 | 0.7149 | 0.7726 | 0.7426 | 0.5688 | 0.2636 | 0.3602 | 0.5482 | 0.7542 | 0.6349 | | 0.6549 | 97.0 | 563764 | 2.9523 | 0.5368 | 0.5574 | 0.5376 | 0.5173 | 0.6224 | 0.3455 | 0.4443 | 0.5288 | 0.7083 | 0.6056 | 0.5107 | 0.8789 | 0.6460 | 0.4900 | 0.4235 | 0.4543 | 0.6353 | 0.3319 | 0.4360 | | 0.6468 | 98.0 | 569576 | 2.7905 | 0.5503 | 0.5699 | 0.5503 | 0.5437 | 0.6375 | 0.3984 | 0.4904 | 0.6687 | 0.5590 | 0.6090 | 0.5370 | 0.8433 | 0.6562 | 0.4346 | 0.5271 | 0.4764 | 0.5717 | 0.4236 | 0.4866 | | 0.6366 | 99.0 | 575388 | 2.2962 | 0.5737 | 0.6114 | 0.5757 | 0.5667 | 0.5676 | 0.4501 | 0.5021 | 0.7789 | 0.3885 | 0.5184 | 0.7234 | 0.7717 | 0.7467 | 0.5370 | 0.4383 | 0.4827 | 0.4499 | 0.8301 | 0.5836 | | 0.6384 | 100.0 | 581200 | 1.8486 | 0.6184 | 0.6367 | 0.6194 | 0.6204 | 0.5979 | 0.5512 | 0.5736 | 0.5639 | 0.7756 | 0.6530 | 0.8543 | 0.6081 | 0.7104 | 0.5330 | 0.5312 | 0.5321 | 0.6344 | 0.6308 | 0.6326 | ### Framework versions - Transformers 4.49.0 - Pytorch 2.6.0+cu118 - Datasets 3.3.2 - Tokenizers 0.21.0
[ "durariadorio_16x16", "mole_16x16", "quebrado_16x16", "riadorio_16x16", "riofechado_16x16" ]
Ky0145/swin-tiny-patch4-window7-224-finetuned-eurosat
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # swin-tiny-patch4-window7-224-finetuned-eurosat This model is a fine-tuned version of [microsoft/swin-tiny-patch4-window7-224](https://huggingface.co/microsoft/swin-tiny-patch4-window7-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.0155 - Accuracy: 0.9933 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 192 - eval_batch_size: 192 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 768 - optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:------:|:----:|:---------------:|:--------:| | 0.0344 | 1.0 | 58 | 0.0204 | 0.9921 | | 0.018 | 2.0 | 116 | 0.0150 | 0.9947 | | 0.0123 | 2.9524 | 171 | 0.0155 | 0.9933 | ### Framework versions - Transformers 4.47.0 - Pytorch 2.5.1+cu121 - Datasets 3.3.1 - Tokenizers 0.21.0
[ "cartoon", "photo" ]
lewisnjue/my_awesome_food_model
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # my_awesome_food_model This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 1.6181 - Accuracy: 0.915 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 64 - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 2.721 | 1.0 | 63 | 2.5158 | 0.865 | | 1.8368 | 2.0 | 126 | 1.7752 | 0.891 | | 1.6317 | 2.96 | 186 | 1.6181 | 0.915 | ### Framework versions - Transformers 4.48.3 - Pytorch 2.5.1+cu124 - Datasets 3.3.2 - Tokenizers 0.21.0
[ "apple_pie", "baby_back_ribs", "bruschetta", "waffles", "caesar_salad", "cannoli", "caprese_salad", "carrot_cake", "ceviche", "cheesecake", "cheese_plate", "chicken_curry", "chicken_quesadilla", "baklava", "chicken_wings", "chocolate_cake", "chocolate_mousse", "churros", "clam_chowder", "club_sandwich", "crab_cakes", "creme_brulee", "croque_madame", "cup_cakes", "beef_carpaccio", "deviled_eggs", "donuts", "dumplings", "edamame", "eggs_benedict", "escargots", "falafel", "filet_mignon", "fish_and_chips", "foie_gras", "beef_tartare", "french_fries", "french_onion_soup", "french_toast", "fried_calamari", "fried_rice", "frozen_yogurt", "garlic_bread", "gnocchi", "greek_salad", "grilled_cheese_sandwich", "beet_salad", "grilled_salmon", "guacamole", "gyoza", "hamburger", "hot_and_sour_soup", "hot_dog", "huevos_rancheros", "hummus", "ice_cream", "lasagna", "beignets", "lobster_bisque", "lobster_roll_sandwich", "macaroni_and_cheese", "macarons", "miso_soup", "mussels", "nachos", "omelette", "onion_rings", "oysters", "bibimbap", "pad_thai", "paella", "pancakes", "panna_cotta", "peking_duck", "pho", "pizza", "pork_chop", "poutine", "prime_rib", "bread_pudding", "pulled_pork_sandwich", "ramen", "ravioli", "red_velvet_cake", "risotto", "samosa", "sashimi", "scallops", "seaweed_salad", "shrimp_and_grits", "breakfast_burrito", "spaghetti_bolognese", "spaghetti_carbonara", "spring_rolls", "steak", "strawberry_shortcake", "sushi", "tacos", "takoyaki", "tiramisu", "tuna_tartare" ]
Ky0145/swin-tiny-patch4-window7-224-finetuned-eurosat-finetuned-eurosat_2
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # swin-tiny-patch4-window7-224-finetuned-eurosat-finetuned-eurosat_2 This model is a fine-tuned version of [Ky0145/swin-tiny-patch4-window7-224-finetuned-eurosat](https://huggingface.co/Ky0145/swin-tiny-patch4-window7-224-finetuned-eurosat) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.0080 - Accuracy: 0.9968 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 192 - eval_batch_size: 192 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 768 - optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 2 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:------:|:----:|:---------------:|:--------:| | 0.0144 | 1.0 | 63 | 0.0118 | 0.9955 | | 0.0102 | 1.9799 | 124 | 0.0080 | 0.9968 | ### Framework versions - Transformers 4.47.0 - Pytorch 2.5.1+cu121 - Datasets 3.3.1 - Tokenizers 0.21.0
[ "cartoon", "photo" ]
corranm/square_run_square_run_first_vote_full_pic_25
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # square_run_square_run_first_vote_full_pic_25 This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 1.8536 - F1 Macro: 0.0897 - F1 Micro: 0.2273 - F1 Weighted: 0.1159 - Precision Macro: 0.1014 - Precision Micro: 0.2273 - Precision Weighted: 0.1202 - Recall Macro: 0.1612 - Recall Micro: 0.2273 - Recall Weighted: 0.2273 - Accuracy: 0.2273 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0001 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Use OptimizerNames.ADAMW_BNB with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 30 ### Training results | Training Loss | Epoch | Step | Validation Loss | F1 Macro | F1 Micro | F1 Weighted | Precision Macro | Precision Micro | Precision Weighted | Recall Macro | Recall Micro | Recall Weighted | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:|:--------:|:-----------:|:---------------:|:---------------:|:------------------:|:------------:|:------------:|:---------------:|:--------:| | 2.0541 | 1.0 | 58 | 1.9294 | 0.0745 | 0.1742 | 0.0883 | 0.0677 | 0.1742 | 0.0775 | 0.1392 | 0.1742 | 0.1742 | 0.1742 | | 1.899 | 2.0 | 116 | 2.0056 | 0.0914 | 0.1667 | 0.1071 | 0.1733 | 0.1667 | 0.2233 | 0.1507 | 0.1667 | 0.1667 | 0.1667 | | 1.8937 | 3.0 | 174 | 1.8754 | 0.0736 | 0.2197 | 0.0978 | 0.1254 | 0.2197 | 0.1492 | 0.1565 | 0.2197 | 0.2197 | 0.2197 | | 1.6804 | 4.0 | 232 | 1.8873 | 0.1517 | 0.2424 | 0.1844 | 0.1762 | 0.2424 | 0.2120 | 0.1913 | 0.2424 | 0.2424 | 0.2424 | | 1.9232 | 5.0 | 290 | 1.8961 | 0.1135 | 0.2197 | 0.1375 | 0.1588 | 0.2197 | 0.1867 | 0.1798 | 0.2197 | 0.2197 | 0.2197 | | 1.9641 | 6.0 | 348 | 1.8811 | 0.1590 | 0.2576 | 0.1993 | 0.1760 | 0.2576 | 0.2156 | 0.2014 | 0.2576 | 0.2576 | 0.2576 | | 2.0347 | 7.0 | 406 | 1.9128 | 0.1464 | 0.25 | 0.1728 | 0.2850 | 0.25 | 0.3156 | 0.2085 | 0.25 | 0.25 | 0.25 | | 1.6055 | 8.0 | 464 | 1.8785 | 0.0897 | 0.1894 | 0.1134 | 0.1093 | 0.1894 | 0.1335 | 0.1456 | 0.1894 | 0.1894 | 0.1894 | | 1.7139 | 9.0 | 522 | 1.8898 | 0.1370 | 0.2273 | 0.1573 | 0.1198 | 0.2273 | 0.1335 | 0.1915 | 0.2273 | 0.2273 | 0.2273 | | 1.6365 | 10.0 | 580 | 2.0175 | 0.1712 | 0.2197 | 0.1920 | 0.2880 | 0.2197 | 0.3202 | 0.2121 | 0.2197 | 0.2197 | 0.2197 | | 1.8532 | 11.0 | 638 | 1.9556 | 0.1660 | 0.2273 | 0.1964 | 0.1554 | 0.2273 | 0.1836 | 0.1913 | 0.2273 | 0.2273 | 0.2273 | | 1.2227 | 12.0 | 696 | 2.0035 | 0.2204 | 0.2576 | 0.2473 | 0.2152 | 0.2576 | 0.2460 | 0.2339 | 0.2576 | 0.2576 | 0.2576 | | 1.6314 | 13.0 | 754 | 2.2851 | 0.1228 | 0.1515 | 0.1330 | 0.1522 | 0.1515 | 0.1744 | 0.1455 | 0.1515 | 0.1515 | 0.1515 | | 1.4037 | 14.0 | 812 | 2.4487 | 0.2087 | 0.2045 | 0.2052 | 0.2603 | 0.2045 | 0.2314 | 0.1982 | 0.2045 | 0.2045 | 0.2045 | | 1.1671 | 15.0 | 870 | 2.3369 | 0.2516 | 0.2576 | 0.2536 | 0.2666 | 0.2576 | 0.2603 | 0.2534 | 0.2576 | 0.2576 | 0.2576 | | 1.2144 | 16.0 | 928 | 2.5458 | 0.2020 | 0.25 | 0.2314 | 0.2126 | 0.25 | 0.2378 | 0.2129 | 0.25 | 0.25 | 0.25 | | 0.7161 | 17.0 | 986 | 2.8359 | 0.1935 | 0.2273 | 0.2223 | 0.2512 | 0.2273 | 0.2767 | 0.1989 | 0.2273 | 0.2273 | 0.2273 | | 0.7918 | 18.0 | 1044 | 2.9960 | 0.1857 | 0.25 | 0.2207 | 0.2161 | 0.25 | 0.2431 | 0.2032 | 0.25 | 0.25 | 0.25 | | 0.3944 | 19.0 | 1102 | 3.1088 | 0.2928 | 0.2955 | 0.2922 | 0.3139 | 0.2955 | 0.3033 | 0.2887 | 0.2955 | 0.2955 | 0.2955 | | 0.1982 | 20.0 | 1160 | 3.2756 | 0.2663 | 0.2652 | 0.2559 | 0.2978 | 0.2652 | 0.2843 | 0.2714 | 0.2652 | 0.2652 | 0.2652 | | 0.282 | 21.0 | 1218 | 3.3012 | 0.2658 | 0.2803 | 0.2746 | 0.2849 | 0.2803 | 0.2807 | 0.2630 | 0.2803 | 0.2803 | 0.2803 | | 0.6119 | 22.0 | 1276 | 3.3820 | 0.2684 | 0.2879 | 0.2772 | 0.2949 | 0.2879 | 0.3025 | 0.2745 | 0.2879 | 0.2879 | 0.2879 | | 0.0598 | 23.0 | 1334 | 3.4127 | 0.2546 | 0.2652 | 0.2601 | 0.2570 | 0.2652 | 0.2616 | 0.2602 | 0.2652 | 0.2652 | 0.2652 | | 0.0219 | 24.0 | 1392 | 3.6584 | 0.2391 | 0.2576 | 0.2488 | 0.2711 | 0.2576 | 0.2541 | 0.2377 | 0.2576 | 0.2576 | 0.2576 | | 0.2059 | 25.0 | 1450 | 3.6858 | 0.2517 | 0.2652 | 0.2600 | 0.2717 | 0.2652 | 0.2684 | 0.2488 | 0.2652 | 0.2652 | 0.2652 | | 0.5641 | 26.0 | 1508 | 3.8254 | 0.2395 | 0.2576 | 0.2515 | 0.2455 | 0.2576 | 0.2537 | 0.2406 | 0.2576 | 0.2576 | 0.2576 | | 0.3301 | 27.0 | 1566 | 3.9264 | 0.2529 | 0.2727 | 0.2655 | 0.2580 | 0.2727 | 0.2670 | 0.2550 | 0.2727 | 0.2727 | 0.2727 | | 0.2333 | 28.0 | 1624 | 4.0348 | 0.2007 | 0.2197 | 0.2100 | 0.1977 | 0.2197 | 0.2053 | 0.2077 | 0.2197 | 0.2197 | 0.2197 | | 0.1264 | 29.0 | 1682 | 4.0264 | 0.2224 | 0.2348 | 0.2295 | 0.2281 | 0.2348 | 0.2308 | 0.2225 | 0.2348 | 0.2348 | 0.2348 | | 0.0029 | 30.0 | 1740 | 4.0181 | 0.2304 | 0.2424 | 0.2376 | 0.2351 | 0.2424 | 0.2380 | 0.2304 | 0.2424 | 0.2424 | 0.2424 | ### Framework versions - Transformers 4.49.0 - Pytorch 2.6.0+cu124 - Datasets 3.3.1 - Tokenizers 0.21.0
[ "-", "0", "1", "2", "3", "4", "5" ]
corranm/square_run_square_run_first_vote_full_pic_25_age_gender_double_check
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # square_run_square_run_first_vote_full_pic_25_age_gender_double_check This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 1.8779 - F1 Macro: 0.1462 - F1 Micro: 0.2727 - F1 Weighted: 0.1862 - Precision Macro: 0.1210 - Precision Micro: 0.2727 - Precision Weighted: 0.1558 - Recall Macro: 0.2214 - Recall Micro: 0.2727 - Recall Weighted: 0.2727 - Accuracy: 0.2727 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0001 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Use OptimizerNames.ADAMW_BNB with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 30 ### Training results | Training Loss | Epoch | Step | Validation Loss | F1 Macro | F1 Micro | F1 Weighted | Precision Macro | Precision Micro | Precision Weighted | Recall Macro | Recall Micro | Recall Weighted | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:|:--------:|:-----------:|:---------------:|:---------------:|:------------------:|:------------:|:------------:|:---------------:|:--------:| | 1.9984 | 1.0 | 58 | 1.9048 | 0.0813 | 0.2121 | 0.1005 | 0.1353 | 0.2121 | 0.1566 | 0.1633 | 0.2121 | 0.2121 | 0.2121 | | 1.9544 | 2.0 | 116 | 1.9687 | 0.0786 | 0.1818 | 0.0854 | 0.0563 | 0.1818 | 0.0614 | 0.1690 | 0.1818 | 0.1818 | 0.1818 | | 1.9357 | 3.0 | 174 | 1.8873 | 0.1234 | 0.2197 | 0.1573 | 0.0993 | 0.2197 | 0.1272 | 0.1735 | 0.2197 | 0.2197 | 0.2197 | | 1.8482 | 4.0 | 232 | 1.8944 | 0.1334 | 0.2348 | 0.1712 | 0.1448 | 0.2348 | 0.1770 | 0.1753 | 0.2348 | 0.2348 | 0.2348 | | 1.8804 | 5.0 | 290 | 1.9047 | 0.1417 | 0.2273 | 0.1739 | 0.1270 | 0.2273 | 0.1589 | 0.1901 | 0.2273 | 0.2273 | 0.2273 | | 1.772 | 6.0 | 348 | 1.9212 | 0.0984 | 0.1591 | 0.1172 | 0.0860 | 0.1591 | 0.1038 | 0.1339 | 0.1591 | 0.1591 | 0.1591 | | 2.0324 | 7.0 | 406 | 2.0659 | 0.1718 | 0.1894 | 0.1870 | 0.2037 | 0.1894 | 0.2178 | 0.1737 | 0.1894 | 0.1894 | 0.1894 | | 1.5526 | 8.0 | 464 | 1.9661 | 0.1335 | 0.2197 | 0.1600 | 0.1270 | 0.2197 | 0.1564 | 0.1822 | 0.2197 | 0.2197 | 0.2197 | | 1.8192 | 9.0 | 522 | 2.0842 | 0.1261 | 0.2121 | 0.1558 | 0.1107 | 0.2121 | 0.1363 | 0.1703 | 0.2121 | 0.2121 | 0.2121 | | 1.6192 | 10.0 | 580 | 2.1918 | 0.1604 | 0.2197 | 0.1925 | 0.2040 | 0.2197 | 0.2344 | 0.1870 | 0.2197 | 0.2197 | 0.2197 | | 1.4988 | 11.0 | 638 | 2.2040 | 0.1677 | 0.2197 | 0.2019 | 0.1822 | 0.2197 | 0.2118 | 0.1799 | 0.2197 | 0.2197 | 0.2197 | | 0.8077 | 12.0 | 696 | 2.2833 | 0.1875 | 0.2197 | 0.2155 | 0.1876 | 0.2197 | 0.2164 | 0.1925 | 0.2197 | 0.2197 | 0.2197 | | 0.8889 | 13.0 | 754 | 2.5556 | 0.1482 | 0.1818 | 0.1681 | 0.2320 | 0.1818 | 0.2746 | 0.1676 | 0.1818 | 0.1818 | 0.1818 | | 1.1727 | 14.0 | 812 | 2.4706 | 0.1735 | 0.1970 | 0.1885 | 0.1760 | 0.1970 | 0.1911 | 0.1802 | 0.1970 | 0.1970 | 0.1970 | | 1.2074 | 15.0 | 870 | 2.5722 | 0.1823 | 0.2273 | 0.2159 | 0.1953 | 0.2273 | 0.2250 | 0.1899 | 0.2273 | 0.2273 | 0.2273 | | 0.6141 | 16.0 | 928 | 2.7573 | 0.1482 | 0.2045 | 0.1815 | 0.1513 | 0.2045 | 0.1792 | 0.1626 | 0.2045 | 0.2045 | 0.2045 | | 0.1625 | 17.0 | 986 | 2.9300 | 0.1506 | 0.1970 | 0.1782 | 0.1535 | 0.1970 | 0.1767 | 0.1620 | 0.1970 | 0.1970 | 0.1970 | | 0.274 | 18.0 | 1044 | 2.9748 | 0.1920 | 0.2273 | 0.2269 | 0.2315 | 0.2273 | 0.2633 | 0.1905 | 0.2273 | 0.2273 | 0.2273 | | 0.276 | 19.0 | 1102 | 3.2485 | 0.1822 | 0.2424 | 0.2117 | 0.2296 | 0.2424 | 0.2520 | 0.2041 | 0.2424 | 0.2424 | 0.2424 | | 0.0902 | 20.0 | 1160 | 3.3161 | 0.2049 | 0.2121 | 0.2057 | 0.2054 | 0.2121 | 0.2120 | 0.2200 | 0.2121 | 0.2121 | 0.2121 | | 0.0152 | 21.0 | 1218 | 3.6108 | 0.1833 | 0.1894 | 0.1926 | 0.1912 | 0.1894 | 0.2011 | 0.1805 | 0.1894 | 0.1894 | 0.1894 | | 0.184 | 22.0 | 1276 | 3.7744 | 0.2043 | 0.2348 | 0.2199 | 0.2286 | 0.2348 | 0.2407 | 0.2130 | 0.2348 | 0.2348 | 0.2348 | | 0.0195 | 23.0 | 1334 | 3.9045 | 0.1368 | 0.1818 | 0.1671 | 0.1293 | 0.1818 | 0.1571 | 0.1482 | 0.1818 | 0.1818 | 0.1818 | | 0.0565 | 24.0 | 1392 | 3.8040 | 0.1602 | 0.1894 | 0.1868 | 0.1592 | 0.1894 | 0.1853 | 0.1622 | 0.1894 | 0.1894 | 0.1894 | | 0.2566 | 25.0 | 1450 | 3.9869 | 0.1929 | 0.2121 | 0.2008 | 0.1900 | 0.2121 | 0.1953 | 0.2009 | 0.2121 | 0.2121 | 0.2121 | | 0.005 | 26.0 | 1508 | 4.0096 | 0.1816 | 0.2273 | 0.2120 | 0.1854 | 0.2273 | 0.2129 | 0.1922 | 0.2273 | 0.2273 | 0.2273 | | 0.0156 | 27.0 | 1566 | 4.0930 | 0.1740 | 0.2121 | 0.2021 | 0.1716 | 0.2121 | 0.1987 | 0.1820 | 0.2121 | 0.2121 | 0.2121 | | 0.0062 | 28.0 | 1624 | 4.1002 | 0.2042 | 0.2273 | 0.2190 | 0.2041 | 0.2273 | 0.2169 | 0.2095 | 0.2273 | 0.2273 | 0.2273 | | 0.0998 | 29.0 | 1682 | 4.1345 | 0.2011 | 0.2273 | 0.2182 | 0.2031 | 0.2273 | 0.2191 | 0.2079 | 0.2273 | 0.2273 | 0.2273 | | 0.0034 | 30.0 | 1740 | 4.1492 | 0.2018 | 0.2273 | 0.2190 | 0.2050 | 0.2273 | 0.2213 | 0.2079 | 0.2273 | 0.2273 | 0.2273 | ### Framework versions - Transformers 4.49.0 - Pytorch 2.6.0+cu124 - Datasets 3.3.1 - Tokenizers 0.21.0
[ "-", "0", "1", "2", "3", "4", "5" ]
GBoula/swin-tiny-patch4-window7-224-finetuned-eurosat
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # swin-tiny-patch4-window7-224-finetuned-eurosat This model is a fine-tuned version of [microsoft/swin-tiny-patch4-window7-224](https://huggingface.co/microsoft/swin-tiny-patch4-window7-224) on the None dataset. It achieves the following results on the evaluation set: - Loss: 0.0129 - Accuracy: 1.0 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:------:|:----:|:---------------:|:--------:| | No log | 1.0 | 9 | 0.0129 | 1.0 | | 0.0676 | 2.0 | 18 | 0.0120 | 0.9917 | | 0.0166 | 2.7059 | 24 | 0.0117 | 0.9917 | ### Framework versions - Transformers 4.48.3 - Pytorch 2.5.1+cu124 - Datasets 3.3.2 - Tokenizers 0.21.0
[ "cloudy", "desert", "green_area", "water" ]
Aya-Ch/brain-tumor-classifier
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # brain-tumor-classifier This model is a fine-tuned version of [microsoft/swin-tiny-patch4-window7-224](https://huggingface.co/microsoft/swin-tiny-patch4-window7-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.2279 - Accuracy: 0.9121 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:------:|:----:|:---------------:|:--------:| | 0.5843 | 0.9655 | 21 | 0.4451 | 0.8078 | | 0.3136 | 1.9655 | 42 | 0.2622 | 0.9023 | | 0.2411 | 2.9655 | 63 | 0.2279 | 0.9121 | ### Framework versions - Transformers 4.48.3 - Pytorch 2.5.1+cu124 - Datasets 3.3.2 - Tokenizers 0.21.0
[ "1", "2", "3" ]
ricardoSLabs/jaffe_V2_50
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # jaffe_V2_50 This model is a fine-tuned version of [microsoft/beit-base-patch16-224-pt22k-ft22k](https://huggingface.co/microsoft/beit-base-patch16-224-pt22k-ft22k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.7217 - Accuracy: 0.8 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 20 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 1 | 1.9638 | 0.2333 | | No log | 2.0 | 2 | 1.7893 | 0.3333 | | No log | 3.0 | 3 | 1.8759 | 0.1667 | | No log | 4.0 | 4 | 1.6759 | 0.3667 | | No log | 5.0 | 5 | 1.5139 | 0.5 | | No log | 6.0 | 6 | 1.4280 | 0.5667 | | No log | 7.0 | 7 | 1.3688 | 0.5667 | | No log | 8.0 | 8 | 1.2819 | 0.6 | | No log | 9.0 | 9 | 1.1884 | 0.6 | | 1.5329 | 10.0 | 10 | 1.1448 | 0.6 | | 1.5329 | 11.0 | 11 | 1.0732 | 0.7 | | 1.5329 | 12.0 | 12 | 0.9793 | 0.7333 | | 1.5329 | 13.0 | 13 | 0.8830 | 0.7333 | | 1.5329 | 14.0 | 14 | 0.8366 | 0.7667 | | 1.5329 | 15.0 | 15 | 0.8027 | 0.7333 | | 1.5329 | 16.0 | 16 | 0.7952 | 0.7333 | | 1.5329 | 17.0 | 17 | 0.7746 | 0.7333 | | 1.5329 | 18.0 | 18 | 0.7571 | 0.7667 | | 1.5329 | 19.0 | 19 | 0.7256 | 0.7667 | | 0.5232 | 20.0 | 20 | 0.7217 | 0.8 | ### Framework versions - Transformers 4.47.0 - Pytorch 2.5.1+cu121 - Datasets 3.3.1 - Tokenizers 0.21.0
[ "an", "di", "fe", "ha", "ne", "sa", "su" ]
Aya-Ch/swin-tiny-patch4-window7-224-finetuned-brain-tumor
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # swin-tiny-patch4-window7-224-finetuned-brain-tumor This model is a fine-tuned version of [microsoft/swin-tiny-patch4-window7-224](https://huggingface.co/microsoft/swin-tiny-patch4-window7-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.2026 - Accuracy: 0.9414 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:------:|:----:|:---------------:|:--------:| | 0.6275 | 0.9655 | 21 | 0.3692 | 0.8534 | | 0.3569 | 1.9655 | 42 | 0.2435 | 0.8990 | | 0.2822 | 2.9655 | 63 | 0.2026 | 0.9414 | ### Framework versions - Transformers 4.48.3 - Pytorch 2.5.1+cu124 - Datasets 3.3.2 - Tokenizers 0.21.0
[ "1", "2", "3" ]
ricardoSLabs/jaffe_V2_20_1
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # jaffe_V2_20_1 This model is a fine-tuned version of [WinKawaks/vit-tiny-patch16-224](https://huggingface.co/WinKawaks/vit-tiny-patch16-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.5014 - Accuracy: 0.4333 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 20 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 1 | 2.4111 | 0.1333 | | No log | 2.0 | 2 | 2.0318 | 0.1667 | | No log | 3.0 | 3 | 2.0433 | 0.1667 | | No log | 4.0 | 4 | 2.0298 | 0.1 | | No log | 5.0 | 5 | 1.9400 | 0.2 | | No log | 6.0 | 6 | 1.8599 | 0.2667 | | No log | 7.0 | 7 | 1.7569 | 0.2333 | | No log | 8.0 | 8 | 1.8134 | 0.1333 | | No log | 9.0 | 9 | 1.6201 | 0.3667 | | 1.971 | 10.0 | 10 | 1.6919 | 0.2667 | | 1.971 | 11.0 | 11 | 1.6849 | 0.2667 | | 1.971 | 12.0 | 12 | 1.6344 | 0.3333 | | 1.971 | 13.0 | 13 | 1.6805 | 0.3 | | 1.971 | 14.0 | 14 | 1.5784 | 0.4667 | | 1.971 | 15.0 | 15 | 1.5104 | 0.4667 | | 1.971 | 16.0 | 16 | 1.4978 | 0.4333 | | 1.971 | 17.0 | 17 | 1.5169 | 0.4 | | 1.971 | 18.0 | 18 | 1.4790 | 0.4 | | 1.971 | 19.0 | 19 | 1.4223 | 0.6 | | 1.5094 | 20.0 | 20 | 1.5014 | 0.4333 | ### Framework versions - Transformers 4.47.0 - Pytorch 2.5.1+cu121 - Datasets 3.3.1 - Tokenizers 0.21.0
[ "an", "di", "fe", "ha", "ne", "sa", "su" ]
ricardoSLabs/jaffe_V2_100_1
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # jaffe_V2_100_1 This model is a fine-tuned version of [WinKawaks/vit-tiny-patch16-224](https://huggingface.co/WinKawaks/vit-tiny-patch16-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.6446 - Accuracy: 0.7333 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 100 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 1 | 2.2977 | 0.1333 | | No log | 2.0 | 2 | 2.0741 | 0.1 | | No log | 3.0 | 3 | 2.0275 | 0.2333 | | No log | 4.0 | 4 | 2.1234 | 0.0667 | | No log | 5.0 | 5 | 2.0852 | 0.0333 | | No log | 6.0 | 6 | 2.0259 | 0.1667 | | No log | 7.0 | 7 | 2.0362 | 0.2667 | | No log | 8.0 | 8 | 2.0153 | 0.2333 | | No log | 9.0 | 9 | 1.7472 | 0.3333 | | 1.9971 | 10.0 | 10 | 1.9598 | 0.2 | | 1.9971 | 11.0 | 11 | 1.9367 | 0.3333 | | 1.9971 | 12.0 | 12 | 1.8312 | 0.3333 | | 1.9971 | 13.0 | 13 | 1.7299 | 0.3 | | 1.9971 | 14.0 | 14 | 1.6306 | 0.4333 | | 1.9971 | 15.0 | 15 | 1.5377 | 0.4333 | | 1.9971 | 16.0 | 16 | 1.4326 | 0.5333 | | 1.9971 | 17.0 | 17 | 1.5047 | 0.4 | | 1.9971 | 18.0 | 18 | 1.4929 | 0.4333 | | 1.9971 | 19.0 | 19 | 1.5326 | 0.4 | | 1.5087 | 20.0 | 20 | 1.5017 | 0.4667 | | 1.5087 | 21.0 | 21 | 1.4978 | 0.4333 | | 1.5087 | 22.0 | 22 | 1.2678 | 0.5667 | | 1.5087 | 23.0 | 23 | 1.2538 | 0.5 | | 1.5087 | 24.0 | 24 | 1.2526 | 0.5 | | 1.5087 | 25.0 | 25 | 1.3660 | 0.4 | | 1.5087 | 26.0 | 26 | 1.3206 | 0.5 | | 1.5087 | 27.0 | 27 | 1.2053 | 0.4333 | | 1.5087 | 28.0 | 28 | 1.1457 | 0.6333 | | 1.5087 | 29.0 | 29 | 1.0761 | 0.6 | | 1.007 | 30.0 | 30 | 1.1556 | 0.5 | | 1.007 | 31.0 | 31 | 1.0172 | 0.6333 | | 1.007 | 32.0 | 32 | 1.1851 | 0.5333 | | 1.007 | 33.0 | 33 | 1.0535 | 0.5333 | | 1.007 | 34.0 | 34 | 1.1161 | 0.5333 | | 1.007 | 35.0 | 35 | 0.9928 | 0.5667 | | 1.007 | 36.0 | 36 | 0.9970 | 0.7 | | 1.007 | 37.0 | 37 | 1.1090 | 0.4667 | | 1.007 | 38.0 | 38 | 0.9536 | 0.6 | | 1.007 | 39.0 | 39 | 1.2752 | 0.5 | | 0.6664 | 40.0 | 40 | 0.8948 | 0.6667 | | 0.6664 | 41.0 | 41 | 0.8891 | 0.6667 | | 0.6664 | 42.0 | 42 | 0.8382 | 0.6333 | | 0.6664 | 43.0 | 43 | 0.7498 | 0.7 | | 0.6664 | 44.0 | 44 | 0.8668 | 0.6667 | | 0.6664 | 45.0 | 45 | 1.1427 | 0.6667 | | 0.6664 | 46.0 | 46 | 0.8066 | 0.6 | | 0.6664 | 47.0 | 47 | 0.9161 | 0.6333 | | 0.6664 | 48.0 | 48 | 0.8266 | 0.6 | | 0.6664 | 49.0 | 49 | 0.9943 | 0.5667 | | 0.469 | 50.0 | 50 | 0.6892 | 0.6333 | | 0.469 | 51.0 | 51 | 0.7529 | 0.7667 | | 0.469 | 52.0 | 52 | 0.9834 | 0.5333 | | 0.469 | 53.0 | 53 | 0.8994 | 0.6 | | 0.469 | 54.0 | 54 | 0.6394 | 0.7667 | | 0.469 | 55.0 | 55 | 0.6854 | 0.7 | | 0.469 | 56.0 | 56 | 0.6051 | 0.8 | | 0.469 | 57.0 | 57 | 0.8493 | 0.6667 | | 0.469 | 58.0 | 58 | 0.6897 | 0.7333 | | 0.469 | 59.0 | 59 | 0.6698 | 0.6667 | | 0.3604 | 60.0 | 60 | 0.6562 | 0.7667 | | 0.3604 | 61.0 | 61 | 0.7638 | 0.6 | | 0.3604 | 62.0 | 62 | 0.6217 | 0.7333 | | 0.3604 | 63.0 | 63 | 0.7635 | 0.7 | | 0.3604 | 64.0 | 64 | 0.7777 | 0.7667 | | 0.3604 | 65.0 | 65 | 0.6505 | 0.8 | | 0.3604 | 66.0 | 66 | 0.6469 | 0.7333 | | 0.3604 | 67.0 | 67 | 0.7266 | 0.7333 | | 0.3604 | 68.0 | 68 | 0.7613 | 0.6667 | | 0.3604 | 69.0 | 69 | 0.4647 | 0.8 | | 0.2726 | 70.0 | 70 | 0.6390 | 0.7 | | 0.2726 | 71.0 | 71 | 0.6155 | 0.7333 | | 0.2726 | 72.0 | 72 | 0.6113 | 0.8 | | 0.2726 | 73.0 | 73 | 0.5648 | 0.8 | | 0.2726 | 74.0 | 74 | 0.7042 | 0.7 | | 0.2726 | 75.0 | 75 | 0.6263 | 0.8333 | | 0.2726 | 76.0 | 76 | 0.7464 | 0.7333 | | 0.2726 | 77.0 | 77 | 0.7640 | 0.6333 | | 0.2726 | 78.0 | 78 | 0.7129 | 0.7667 | | 0.2726 | 79.0 | 79 | 0.7362 | 0.7333 | | 0.2157 | 80.0 | 80 | 0.7122 | 0.7667 | | 0.2157 | 81.0 | 81 | 0.5565 | 0.7333 | | 0.2157 | 82.0 | 82 | 0.6734 | 0.7667 | | 0.2157 | 83.0 | 83 | 0.6057 | 0.7 | | 0.2157 | 84.0 | 84 | 0.5287 | 0.7667 | | 0.2157 | 85.0 | 85 | 0.7490 | 0.7333 | | 0.2157 | 86.0 | 86 | 0.5841 | 0.7333 | | 0.2157 | 87.0 | 87 | 0.5641 | 0.7667 | | 0.2157 | 88.0 | 88 | 0.8243 | 0.6667 | | 0.2157 | 89.0 | 89 | 0.5287 | 0.7667 | | 0.1946 | 90.0 | 90 | 1.0455 | 0.7 | | 0.1946 | 91.0 | 91 | 0.6091 | 0.7333 | | 0.1946 | 92.0 | 92 | 0.5152 | 0.7667 | | 0.1946 | 93.0 | 93 | 0.5850 | 0.8 | | 0.1946 | 94.0 | 94 | 0.5806 | 0.7333 | | 0.1946 | 95.0 | 95 | 0.6017 | 0.7667 | | 0.1946 | 96.0 | 96 | 0.5606 | 0.7667 | | 0.1946 | 97.0 | 97 | 0.5931 | 0.7667 | | 0.1946 | 98.0 | 98 | 0.5299 | 0.7667 | | 0.1946 | 99.0 | 99 | 0.7117 | 0.7333 | | 0.1647 | 100.0 | 100 | 0.6446 | 0.7333 | ### Framework versions - Transformers 4.47.0 - Pytorch 2.5.1+cu121 - Datasets 3.3.1 - Tokenizers 0.21.0
[ "an", "di", "fe", "ha", "ne", "sa", "su" ]
ricardoSLabs/jaffe_V2_200_1
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # jaffe_V2_200_1 This model is a fine-tuned version of [WinKawaks/vit-tiny-patch16-224](https://huggingface.co/WinKawaks/vit-tiny-patch16-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.3747 - Accuracy: 0.9 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 200 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 1 | 2.4997 | 0.0667 | | No log | 2.0 | 2 | 2.6037 | 0.1 | | No log | 3.0 | 3 | 2.3924 | 0.0667 | | No log | 4.0 | 4 | 2.3152 | 0.1 | | No log | 5.0 | 5 | 2.1146 | 0.1667 | | No log | 6.0 | 6 | 2.1610 | 0.2333 | | No log | 7.0 | 7 | 2.1346 | 0.1333 | | No log | 8.0 | 8 | 2.1400 | 0.1 | | No log | 9.0 | 9 | 2.1422 | 0.0667 | | 2.3217 | 10.0 | 10 | 2.0948 | 0.1333 | | 2.3217 | 11.0 | 11 | 2.0994 | 0.2 | | 2.3217 | 12.0 | 12 | 1.8570 | 0.3333 | | 2.3217 | 13.0 | 13 | 1.9750 | 0.2667 | | 2.3217 | 14.0 | 14 | 1.8089 | 0.3 | | 2.3217 | 15.0 | 15 | 1.8738 | 0.3 | | 2.3217 | 16.0 | 16 | 1.7751 | 0.3333 | | 2.3217 | 17.0 | 17 | 1.7744 | 0.2 | | 2.3217 | 18.0 | 18 | 1.7998 | 0.3333 | | 2.3217 | 19.0 | 19 | 1.7048 | 0.2667 | | 1.798 | 20.0 | 20 | 1.6367 | 0.4 | | 1.798 | 21.0 | 21 | 1.6092 | 0.3 | | 1.798 | 22.0 | 22 | 1.5605 | 0.3667 | | 1.798 | 23.0 | 23 | 1.4219 | 0.5 | | 1.798 | 24.0 | 24 | 1.5037 | 0.4 | | 1.798 | 25.0 | 25 | 1.3966 | 0.4333 | | 1.798 | 26.0 | 26 | 1.4327 | 0.4 | | 1.798 | 27.0 | 27 | 1.3484 | 0.4 | | 1.798 | 28.0 | 28 | 1.3958 | 0.4 | | 1.798 | 29.0 | 29 | 1.2789 | 0.4667 | | 1.1133 | 30.0 | 30 | 1.2002 | 0.4333 | | 1.1133 | 31.0 | 31 | 1.1080 | 0.4667 | | 1.1133 | 32.0 | 32 | 0.9814 | 0.6 | | 1.1133 | 33.0 | 33 | 1.0498 | 0.5667 | | 1.1133 | 34.0 | 34 | 0.9709 | 0.6333 | | 1.1133 | 35.0 | 35 | 0.9985 | 0.5333 | | 1.1133 | 36.0 | 36 | 0.8779 | 0.6667 | | 1.1133 | 37.0 | 37 | 0.7959 | 0.7 | | 1.1133 | 38.0 | 38 | 0.7583 | 0.7 | | 1.1133 | 39.0 | 39 | 1.0074 | 0.5667 | | 0.5945 | 40.0 | 40 | 0.6441 | 0.6667 | | 0.5945 | 41.0 | 41 | 0.7701 | 0.6667 | | 0.5945 | 42.0 | 42 | 0.8433 | 0.6667 | | 0.5945 | 43.0 | 43 | 0.7998 | 0.6667 | | 0.5945 | 44.0 | 44 | 0.7087 | 0.7 | | 0.5945 | 45.0 | 45 | 0.5793 | 0.8333 | | 0.5945 | 46.0 | 46 | 0.5024 | 0.8 | | 0.5945 | 47.0 | 47 | 0.8088 | 0.7 | | 0.5945 | 48.0 | 48 | 0.7690 | 0.7 | | 0.5945 | 49.0 | 49 | 0.8561 | 0.6667 | | 0.3008 | 50.0 | 50 | 0.4728 | 0.8667 | | 0.3008 | 51.0 | 51 | 0.5935 | 0.6667 | | 0.3008 | 52.0 | 52 | 0.3772 | 0.9 | | 0.3008 | 53.0 | 53 | 0.6337 | 0.6333 | | 0.3008 | 54.0 | 54 | 0.6097 | 0.7 | | 0.3008 | 55.0 | 55 | 0.4838 | 0.8333 | | 0.3008 | 56.0 | 56 | 0.5487 | 0.8333 | | 0.3008 | 57.0 | 57 | 0.5395 | 0.8 | | 0.3008 | 58.0 | 58 | 0.5078 | 0.7667 | | 0.3008 | 59.0 | 59 | 0.4211 | 0.8 | | 0.1792 | 60.0 | 60 | 0.4578 | 0.8333 | | 0.1792 | 61.0 | 61 | 0.4603 | 0.8333 | | 0.1792 | 62.0 | 62 | 0.2765 | 0.9 | | 0.1792 | 63.0 | 63 | 0.6634 | 0.7333 | | 0.1792 | 64.0 | 64 | 0.3247 | 0.9 | | 0.1792 | 65.0 | 65 | 0.6290 | 0.6667 | | 0.1792 | 66.0 | 66 | 0.5741 | 0.8 | | 0.1792 | 67.0 | 67 | 0.3994 | 0.8333 | | 0.1792 | 68.0 | 68 | 0.4273 | 0.8333 | | 0.1792 | 69.0 | 69 | 0.4240 | 0.7333 | | 0.1158 | 70.0 | 70 | 0.4269 | 0.8333 | | 0.1158 | 71.0 | 71 | 0.4764 | 0.8333 | | 0.1158 | 72.0 | 72 | 0.3892 | 0.8667 | | 0.1158 | 73.0 | 73 | 0.5258 | 0.8 | | 0.1158 | 74.0 | 74 | 0.3253 | 0.8333 | | 0.1158 | 75.0 | 75 | 0.5055 | 0.7667 | | 0.1158 | 76.0 | 76 | 0.6183 | 0.7667 | | 0.1158 | 77.0 | 77 | 0.3801 | 0.9 | | 0.1158 | 78.0 | 78 | 0.5568 | 0.7333 | | 0.1158 | 79.0 | 79 | 0.3794 | 0.8333 | | 0.0936 | 80.0 | 80 | 0.2896 | 0.9 | | 0.0936 | 81.0 | 81 | 0.5924 | 0.7667 | | 0.0936 | 82.0 | 82 | 0.5123 | 0.8333 | | 0.0936 | 83.0 | 83 | 0.6333 | 0.8 | | 0.0936 | 84.0 | 84 | 0.4452 | 0.7333 | | 0.0936 | 85.0 | 85 | 0.4296 | 0.8333 | | 0.0936 | 86.0 | 86 | 0.3000 | 0.8667 | | 0.0936 | 87.0 | 87 | 0.3882 | 0.8667 | | 0.0936 | 88.0 | 88 | 0.5478 | 0.7333 | | 0.0936 | 89.0 | 89 | 0.3075 | 0.8667 | | 0.0473 | 90.0 | 90 | 0.5298 | 0.8 | | 0.0473 | 91.0 | 91 | 0.6640 | 0.7333 | | 0.0473 | 92.0 | 92 | 0.4580 | 0.8333 | | 0.0473 | 93.0 | 93 | 0.5458 | 0.7333 | | 0.0473 | 94.0 | 94 | 0.4686 | 0.8333 | | 0.0473 | 95.0 | 95 | 0.2982 | 0.8333 | | 0.0473 | 96.0 | 96 | 0.4537 | 0.8333 | | 0.0473 | 97.0 | 97 | 0.3308 | 0.8667 | | 0.0473 | 98.0 | 98 | 0.4839 | 0.8 | | 0.0473 | 99.0 | 99 | 0.4554 | 0.8 | | 0.0443 | 100.0 | 100 | 0.2150 | 0.9667 | | 0.0443 | 101.0 | 101 | 0.3185 | 0.9333 | | 0.0443 | 102.0 | 102 | 0.2575 | 0.9 | | 0.0443 | 103.0 | 103 | 0.3313 | 0.8667 | | 0.0443 | 104.0 | 104 | 0.4836 | 0.8333 | | 0.0443 | 105.0 | 105 | 0.3910 | 0.8667 | | 0.0443 | 106.0 | 106 | 0.5569 | 0.8333 | | 0.0443 | 107.0 | 107 | 0.4688 | 0.8667 | | 0.0443 | 108.0 | 108 | 0.2292 | 0.9333 | | 0.0443 | 109.0 | 109 | 0.4958 | 0.8 | | 0.0353 | 110.0 | 110 | 0.3628 | 0.9 | | 0.0353 | 111.0 | 111 | 0.6191 | 0.7333 | | 0.0353 | 112.0 | 112 | 0.5096 | 0.8 | | 0.0353 | 113.0 | 113 | 0.3478 | 0.9 | | 0.0353 | 114.0 | 114 | 0.3585 | 0.8667 | | 0.0353 | 115.0 | 115 | 0.3859 | 0.8 | | 0.0353 | 116.0 | 116 | 0.3952 | 0.8333 | | 0.0353 | 117.0 | 117 | 0.4491 | 0.8333 | | 0.0353 | 118.0 | 118 | 0.4710 | 0.8 | | 0.0353 | 119.0 | 119 | 0.5375 | 0.8 | | 0.0292 | 120.0 | 120 | 0.6853 | 0.8333 | | 0.0292 | 121.0 | 121 | 0.4836 | 0.8 | | 0.0292 | 122.0 | 122 | 0.5246 | 0.8 | | 0.0292 | 123.0 | 123 | 0.4446 | 0.8667 | | 0.0292 | 124.0 | 124 | 0.4238 | 0.8 | | 0.0292 | 125.0 | 125 | 0.3543 | 0.8333 | | 0.0292 | 126.0 | 126 | 0.2007 | 0.9333 | | 0.0292 | 127.0 | 127 | 0.2274 | 0.9333 | | 0.0292 | 128.0 | 128 | 0.3778 | 0.8333 | | 0.0292 | 129.0 | 129 | 0.4544 | 0.8333 | | 0.0296 | 130.0 | 130 | 0.2613 | 0.8667 | | 0.0296 | 131.0 | 131 | 0.3248 | 0.9 | | 0.0296 | 132.0 | 132 | 0.4552 | 0.8 | | 0.0296 | 133.0 | 133 | 0.4356 | 0.8333 | | 0.0296 | 134.0 | 134 | 0.3427 | 0.9 | | 0.0296 | 135.0 | 135 | 0.1513 | 1.0 | | 0.0296 | 136.0 | 136 | 0.3139 | 0.8333 | | 0.0296 | 137.0 | 137 | 0.3094 | 0.9 | | 0.0296 | 138.0 | 138 | 0.3401 | 0.8667 | | 0.0296 | 139.0 | 139 | 0.4339 | 0.9333 | | 0.0178 | 140.0 | 140 | 0.2465 | 0.9 | | 0.0178 | 141.0 | 141 | 0.4604 | 0.8667 | | 0.0178 | 142.0 | 142 | 0.4860 | 0.8 | | 0.0178 | 143.0 | 143 | 0.3710 | 0.8333 | | 0.0178 | 144.0 | 144 | 0.4719 | 0.8333 | | 0.0178 | 145.0 | 145 | 0.3030 | 0.9333 | | 0.0178 | 146.0 | 146 | 0.6212 | 0.7667 | | 0.0178 | 147.0 | 147 | 0.2716 | 0.9 | | 0.0178 | 148.0 | 148 | 0.4297 | 0.8333 | | 0.0178 | 149.0 | 149 | 0.3456 | 0.8333 | | 0.0103 | 150.0 | 150 | 0.4718 | 0.8667 | | 0.0103 | 151.0 | 151 | 0.3841 | 0.8333 | | 0.0103 | 152.0 | 152 | 0.4124 | 0.9333 | | 0.0103 | 153.0 | 153 | 0.2595 | 0.9333 | | 0.0103 | 154.0 | 154 | 0.2666 | 0.8667 | | 0.0103 | 155.0 | 155 | 0.4872 | 0.7333 | | 0.0103 | 156.0 | 156 | 0.4039 | 0.8333 | | 0.0103 | 157.0 | 157 | 0.3004 | 0.8667 | | 0.0103 | 158.0 | 158 | 0.3021 | 0.9 | | 0.0103 | 159.0 | 159 | 0.4477 | 0.9 | | 0.0075 | 160.0 | 160 | 0.3548 | 0.9333 | | 0.0075 | 161.0 | 161 | 0.2648 | 0.9333 | | 0.0075 | 162.0 | 162 | 0.3269 | 0.9333 | | 0.0075 | 163.0 | 163 | 0.5231 | 0.8 | | 0.0075 | 164.0 | 164 | 0.2841 | 0.8667 | | 0.0075 | 165.0 | 165 | 0.3145 | 0.9 | | 0.0075 | 166.0 | 166 | 0.4291 | 0.8667 | | 0.0075 | 167.0 | 167 | 0.5396 | 0.8333 | | 0.0075 | 168.0 | 168 | 0.3873 | 0.9 | | 0.0075 | 169.0 | 169 | 0.3150 | 0.9333 | | 0.0062 | 170.0 | 170 | 0.3809 | 0.9 | | 0.0062 | 171.0 | 171 | 0.2062 | 0.9 | | 0.0062 | 172.0 | 172 | 0.3242 | 0.8667 | | 0.0062 | 173.0 | 173 | 0.3500 | 0.9 | | 0.0062 | 174.0 | 174 | 0.2784 | 0.9 | | 0.0062 | 175.0 | 175 | 0.2553 | 0.8667 | | 0.0062 | 176.0 | 176 | 0.4475 | 0.9 | | 0.0062 | 177.0 | 177 | 0.3598 | 0.9333 | | 0.0062 | 178.0 | 178 | 0.3488 | 0.8333 | | 0.0062 | 179.0 | 179 | 0.2966 | 0.8333 | | 0.0056 | 180.0 | 180 | 0.4635 | 0.8 | | 0.0056 | 181.0 | 181 | 0.2402 | 0.9 | | 0.0056 | 182.0 | 182 | 0.3984 | 0.8667 | | 0.0056 | 183.0 | 183 | 0.2032 | 0.9 | | 0.0056 | 184.0 | 184 | 0.2633 | 0.8333 | | 0.0056 | 185.0 | 185 | 0.3015 | 0.9333 | | 0.0056 | 186.0 | 186 | 0.3774 | 0.9 | | 0.0056 | 187.0 | 187 | 0.5716 | 0.8333 | | 0.0056 | 188.0 | 188 | 0.3961 | 0.8667 | | 0.0056 | 189.0 | 189 | 0.3915 | 0.9 | | 0.0048 | 190.0 | 190 | 0.3788 | 0.8333 | | 0.0048 | 191.0 | 191 | 0.4823 | 0.8667 | | 0.0048 | 192.0 | 192 | 0.3158 | 0.8667 | | 0.0048 | 193.0 | 193 | 0.2184 | 0.8667 | | 0.0048 | 194.0 | 194 | 0.3363 | 0.8667 | | 0.0048 | 195.0 | 195 | 0.3996 | 0.9 | | 0.0048 | 196.0 | 196 | 0.2263 | 0.8333 | | 0.0048 | 197.0 | 197 | 0.4634 | 0.8333 | | 0.0048 | 198.0 | 198 | 0.3492 | 0.8667 | | 0.0048 | 199.0 | 199 | 0.3086 | 0.9 | | 0.0034 | 200.0 | 200 | 0.3747 | 0.9 | ### Framework versions - Transformers 4.47.0 - Pytorch 2.5.1+cu121 - Datasets 3.3.1 - Tokenizers 0.21.0
[ "an", "di", "fe", "ha", "ne", "sa", "su" ]
RobertoSonic/swinv2-tiny-patch4-window8-256-dmae-humeda-DAV60
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # swinv2-tiny-patch4-window8-256-dmae-humeda-DAV60 This model is a fine-tuned version of [microsoft/swinv2-tiny-patch4-window8-256](https://huggingface.co/microsoft/swinv2-tiny-patch4-window8-256) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.5934 - Accuracy: 0.8333 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 4e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 64 - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 40 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-------:|:----:|:---------------:|:--------:| | 1.5779 | 0.9630 | 13 | 1.5147 | 0.3509 | | 1.532 | 1.9630 | 26 | 1.3726 | 0.5395 | | 1.459 | 2.9630 | 39 | 0.9852 | 0.6184 | | 1.0357 | 3.9630 | 52 | 0.8558 | 0.6140 | | 0.9678 | 4.9630 | 65 | 0.7997 | 0.6623 | | 0.9602 | 5.9630 | 78 | 0.7789 | 0.6711 | | 0.7664 | 6.9630 | 91 | 0.8890 | 0.6184 | | 0.8537 | 7.9630 | 104 | 0.6860 | 0.7105 | | 0.7566 | 8.9630 | 117 | 0.6138 | 0.7588 | | 0.7025 | 9.9630 | 130 | 0.5207 | 0.7939 | | 0.6081 | 10.9630 | 143 | 0.5644 | 0.7763 | | 0.631 | 11.9630 | 156 | 0.5859 | 0.7544 | | 0.6163 | 12.9630 | 169 | 0.7182 | 0.7105 | | 0.5743 | 13.9630 | 182 | 0.5643 | 0.7763 | | 0.5752 | 14.9630 | 195 | 0.5028 | 0.7939 | | 0.461 | 15.9630 | 208 | 0.5465 | 0.7807 | | 0.4145 | 16.9630 | 221 | 0.5868 | 0.7719 | | 0.4065 | 17.9630 | 234 | 0.5470 | 0.7807 | | 0.501 | 18.9630 | 247 | 0.5406 | 0.7939 | | 0.4374 | 19.9630 | 260 | 0.5534 | 0.7939 | | 0.4614 | 20.9630 | 273 | 0.5485 | 0.8158 | | 0.3836 | 21.9630 | 286 | 0.6217 | 0.7851 | | 0.4474 | 22.9630 | 299 | 0.6069 | 0.7763 | | 0.3893 | 23.9630 | 312 | 0.5981 | 0.7939 | | 0.3548 | 24.9630 | 325 | 0.6003 | 0.7895 | | 0.3454 | 25.9630 | 338 | 0.5897 | 0.8114 | | 0.2857 | 26.9630 | 351 | 0.6031 | 0.8158 | | 0.3282 | 27.9630 | 364 | 0.6140 | 0.7763 | | 0.3088 | 28.9630 | 377 | 0.5934 | 0.8333 | | 0.2943 | 29.9630 | 390 | 0.6545 | 0.7895 | | 0.2857 | 30.9630 | 403 | 0.6423 | 0.7851 | | 0.2882 | 31.9630 | 416 | 0.6610 | 0.7939 | | 0.3342 | 32.9630 | 429 | 0.6815 | 0.7807 | | 0.2679 | 33.9630 | 442 | 0.6481 | 0.8114 | | 0.2756 | 34.9630 | 455 | 0.6518 | 0.8158 | | 0.2733 | 35.9630 | 468 | 0.6514 | 0.8026 | | 0.2749 | 36.9630 | 481 | 0.6525 | 0.8070 | | 0.2416 | 37.9630 | 494 | 0.6447 | 0.8202 | | 0.2766 | 38.9630 | 507 | 0.6359 | 0.8202 | | 0.2528 | 39.9630 | 520 | 0.6481 | 0.8158 | ### Framework versions - Transformers 4.48.3 - Pytorch 2.5.1+cu124 - Datasets 3.3.2 - Tokenizers 0.21.0
[ "avanzada", "avanzada humeda", "leve", "moderada", "no dmae" ]
Anjnay/vit-base-oxford-iiit-pets
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-base-oxford-iiit-pets This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.2232 - Accuracy: 0.9463 - F1: 0.9468 - Precision: 0.9486 - Recall: 0.9463 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0005 - train_batch_size: 64 - eval_batch_size: 128 - seed: 42 - gradient_accumulation_steps: 2 - total_train_batch_size: 128 - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 5 - mixed_precision_training: Native AMP ### Training results ### Framework versions - Transformers 4.49.0 - Pytorch 2.6.0+cpu - Datasets 3.3.2 - Tokenizers 0.21.0
[ "0", "1", "2", "3", "4", "5" ]
prithivMLmods/Deepfake-Quality-Classifier-SigLIP2
![13.png](https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/K1tNeqC5aS_eZ37OYyhhb.png) # **Deepfake-Quality-Classifier-SigLIP2** > **Deepfake-Quality-Classifier-SigLIP2** is an image classification vision-language encoder model fine-tuned from **google/siglip2-base-patch16-224** for a single-label classification task. It is designed to assess the quality of deepfake images using the **SiglipForImageClassification** architecture. ```python Classification Report: precision recall f1-score support Issue In Deepfake 0.8350 0.7870 0.8103 3750 High Quality Deepfake 0.8025 0.8500 0.8257 3750 accuracy 0.8185 7500 macro avg 0.8188 0.8185 0.8180 7500 weighted avg 0.8188 0.8185 0.8180 7500 ``` The model categorizes images into two classes: - **Class 0:** "Issue In Deepfake" – indicating that the deepfake image has noticeable flaws or inconsistencies. - **Class 1:** "High Quality Deepfake" – indicating that the deepfake image is of high quality and appears more realistic. # **Run with Transformers🤗** ```python !pip install -q transformers torch pillow gradio ``` ```python import gradio as gr from transformers import AutoImageProcessor from transformers import SiglipForImageClassification from transformers.image_utils import load_image from PIL import Image import torch # Load model and processor model_name = "prithivMLmods/Deepfake-Quality-Classifier-SigLIP2" model = SiglipForImageClassification.from_pretrained(model_name) processor = AutoImageProcessor.from_pretrained(model_name) def deepfake_detection(image): """Predicts deepfake probability scores for an image.""" image = Image.fromarray(image).convert("RGB") inputs = processor(images=image, return_tensors="pt") with torch.no_grad(): outputs = model(**inputs) logits = outputs.logits probs = torch.nn.functional.softmax(logits, dim=1).squeeze().tolist() labels = {0: "Issue In Deepfake", 1: "High Quality Deepfake"} predictions = {labels[i]: round(probs[i], 3) for i in range(len(probs))} return predictions # Create Gradio interface iface = gr.Interface( fn=deepfake_detection, inputs=gr.Image(type="numpy"), outputs=gr.Label(label="Prediction Scores"), title="Deepfake Quality Detection", description="Upload an image to check its deepfake probability scores." ) # Launch the app if __name__ == "__main__": iface.launch() ``` # **Intended Use:** The **Deepfake-Quality-Classifier-SigLIP2** model is designed to evaluate the quality of deepfake images. It helps distinguish between high-quality deepfakes and those with noticeable issues. Potential use cases include: - **Deepfake Quality Assessment:** Identifying whether a generated deepfake meets high-quality standards or contains artifacts and inconsistencies. - **Content Moderation:** Assisting in filtering low-quality deepfake images in digital media platforms. - **Forensic Analysis:** Supporting researchers and analysts in assessing the credibility of synthetic images. - **Deepfake Model Benchmarking:** Helping developers compare and improve deepfake generation models.
[ "issue in deepfake", "high quality deepfake" ]
prithivMLmods/Deepfake-Quality-Classifier2-SigLIP2
![14.png](https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/IgM9DtsT7Pn8IfELuXmta.png) # **Deepfake-Quality-Classifier2-SigLIP2** > **Deepfake-Quality-Classifier2-SigLIP2** is an image classification vision-language encoder model fine-tuned from **google/siglip2-base-patch16-224** for a single-label classification task. It is designed to assess the quality of deepfake images using the **SiglipForImageClassification** architecture. ```python Classification Report: precision recall f1-score support Issue In Deepfake 0.8352 0.7800 0.8067 5000 High Quality Deepfake 0.7951 0.8500 0.8217 5000 accuracy 0.8245 10000 macro avg 0.8152 0.8245 0.8142 10000 weighted avg 0.8152 0.8245 0.8142 10000 ``` The model categorizes images into two classes: - **Class 0:** "Issue In Deepfake" – indicating that the deepfake image has noticeable flaws or inconsistencies. - **Class 1:** "High Quality Deepfake" – indicating that the deepfake image is of high quality and appears more realistic. # **Run with Transformers🤗** ```python !pip install -q transformers torch pillow gradio ``` ```python import gradio as gr from transformers import AutoImageProcessor from transformers import SiglipForImageClassification from transformers.image_utils import load_image from PIL import Image import torch # Load model and processor model_name = "prithivMLmods/Deepfake-Quality-Classifier2-SigLIP2" model = SiglipForImageClassification.from_pretrained(model_name) processor = AutoImageProcessor.from_pretrained(model_name) def deepfake_detection(image): """Predicts deepfake probability scores for an image.""" image = Image.fromarray(image).convert("RGB") inputs = processor(images=image, return_tensors="pt") with torch.no_grad(): outputs = model(**inputs) logits = outputs.logits probs = torch.nn.functional.softmax(logits, dim=1).squeeze().tolist() labels = {"0": "Issue In Deepfake", "1": "High Quality Deepfake"} predictions = {labels[str(i)]: round(probs[i], 3) for i in range(len(probs))} return predictions # Create Gradio interface iface = gr.Interface( fn=deepfake_detection, inputs=gr.Image(type="numpy"), outputs=gr.Label(label="Prediction Scores"), title="Deepfake Quality Detection", description="Upload an image to check its deepfake probability scores." ) # Launch the app if __name__ == "__main__": iface.launch() ``` # **Intended Use:** The **Deepfake-Quality-Classifier2-SigLIP2** model is designed to evaluate the quality of deepfake images. It helps distinguish between high-quality deepfakes and those with noticeable issues. Potential use cases include: - **Deepfake Quality Assessment:** Identifying whether a generated deepfake meets high-quality standards or contains artifacts and inconsistencies. - **Content Moderation:** Assisting in filtering low-quality deepfake images in digital media platforms. - **Forensic Analysis:** Supporting researchers and analysts in assessing the credibility of synthetic images. - **Deepfake Model Benchmarking:** Helping developers compare and improve deepfake generation models.
[ "issue in deepfake", "high quality deepfake" ]
Joshhhhhhhhhh/vit-base-oxford-iiit-pets
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-base-oxford-iiit-pets This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on the pcuenq/oxford-pets dataset. It achieves the following results on the evaluation set: - Loss: 0.2292 - Accuracy: 0.9269 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0003 - train_batch_size: 16 - eval_batch_size: 8 - seed: 42 - optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - num_epochs: 5 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.377 | 1.0 | 370 | 0.2943 | 0.9269 | | 0.2115 | 2.0 | 740 | 0.2292 | 0.9296 | | 0.1563 | 3.0 | 1110 | 0.2062 | 0.9405 | | 0.1437 | 4.0 | 1480 | 0.1965 | 0.9418 | | 0.1214 | 5.0 | 1850 | 0.1949 | 0.9405 | ### Framework versions - Transformers 4.46.3 - Pytorch 2.4.1+cu121 - Datasets 3.1.0 - Tokenizers 0.20.3
[ "siamese", "birman", "shiba inu", "staffordshire bull terrier", "basset hound", "bombay", "japanese chin", "chihuahua", "german shorthaired", "pomeranian", "beagle", "english cocker spaniel", "american pit bull terrier", "ragdoll", "persian", "egyptian mau", "miniature pinscher", "sphynx", "maine coon", "keeshond", "yorkshire terrier", "havanese", "leonberger", "wheaten terrier", "american bulldog", "english setter", "boxer", "newfoundland", "bengal", "samoyed", "british shorthair", "great pyrenees", "abyssinian", "pug", "saint bernard", "russian blue", "scottish terrier" ]
paacamo/image-classification
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # image-classification This model is a fine-tuned version of [microsoft/resnet-50](https://huggingface.co/microsoft/resnet-50) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.8185 - Accuracy: 0.8203 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - gradient_accumulation_steps: 2 - total_train_batch_size: 16 - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 10 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 1.0965 | 1.0 | 65 | 1.0814 | 0.5113 | | 1.0585 | 2.0 | 130 | 1.0459 | 0.6466 | | 1.0026 | 3.0 | 195 | 0.9979 | 0.7068 | | 0.9557 | 4.0 | 260 | 0.9329 | 0.8120 | | 0.896 | 5.0 | 325 | 0.8637 | 0.7820 | | 0.8539 | 6.0 | 390 | 0.8104 | 0.8271 | | 0.8085 | 7.0 | 455 | 0.7348 | 0.7744 | | 0.7525 | 8.0 | 520 | 0.7049 | 0.8120 | | 0.7449 | 9.0 | 585 | 0.6939 | 0.8195 | | 0.7167 | 10.0 | 650 | 0.6809 | 0.8271 | ### Framework versions - Transformers 4.48.3 - Pytorch 2.5.1+cu124 - Datasets 3.3.2 - Tokenizers 0.21.0
[ "angular_leaf_spot", "bean_rust", "healthy" ]
prithivMLmods/Gym-Workout-Classifier-SigLIP2
![sdfvdfv.png](https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/kJ1YXUFjmOahI6LmpI--x.png) # **Gym-Workout-Classifier-SigLIP2** > **Gym-Workout-Classifier-SigLIP2** is an image classification vision-language encoder model fine-tuned from **google/siglip2-base-patch16-224** for a single-label classification task. It is designed to classify gym workout exercises using the **SiglipForImageClassification** architecture. ```py Classification Report: precision recall f1-score support barbell biceps curl 0.9613 0.9574 0.9593 493 bench press 0.9402 0.9359 0.9381 437 chest fly machine 0.9694 0.9484 0.9588 368 deadlift 0.9833 0.9542 0.9685 371 decline bench press 0.9884 0.9499 0.9688 359 hammer curl 0.9917 0.9398 0.9651 382 hip thrust 0.9692 0.9717 0.9705 389 incline bench press 0.9297 0.9588 0.9440 510 lat pulldown 0.9607 0.9735 0.9670 452 lateral raises 0.9539 0.9814 0.9674 590 leg extension 0.9573 0.9854 0.9712 410 leg raises 0.9939 0.9109 0.9506 359 plank 0.9828 0.9856 0.9842 695 pull up 0.9882 0.9744 0.9813 430 push up 0.9382 0.9762 0.9568 420 romanian deadlift 0.9617 0.9716 0.9667 388 russian twist 0.8702 0.9918 0.9270 365 shoulder press 0.9499 0.9525 0.9512 358 squat 0.9761 0.9441 0.9598 519 t bar row 0.9806 0.9743 0.9774 467 tricep dips 0.9834 0.9713 0.9773 488 tricep pushdown 0.9837 0.9657 0.9746 437 accuracy 0.9638 9687 macro avg 0.9643 0.9625 0.9630 9687 weighted avg 0.9647 0.9638 0.9639 9687 ``` ![download.png](https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/czlXLMN1O6q2yL4364ySm.png) The model categorizes images into 22 workout classes: - **Class 0:** "barbell biceps curl" - **Class 1:** "bench press" - **Class 2:** "chest fly machine" - **Class 3:** "deadlift" - **Class 4:** "decline bench press" - **Class 5:** "hammer curl" - **Class 6:** "hip thrust" - **Class 7:** "incline bench press" - **Class 8:** "lat pulldown" - **Class 9:** "lateral raises" - **Class 10:** "leg extension" - **Class 11:** "leg raises" - **Class 12:** "plank" - **Class 13:** "pull up" - **Class 14:** "push up" - **Class 15:** "romanian deadlift" - **Class 16:** "russian twist" - **Class 17:** "shoulder press" - **Class 18:** "squat" - **Class 19:** "t bar row" - **Class 20:** "tricep dips" - **Class 21:** "tricep pushdown" # **Dataset ID2LABEL** ```py from datasets import load_dataset # Load the dataset dataset = load_dataset("YOUR-DATASET-HERE") # Extract unique labels labels = dataset["train"].features["label"].names # Create id2label mapping id2label = {str(i): label for i, label in enumerate(labels)} # Print the mapping print(id2label) ``` # **Run with Transformers🤗** ```python !pip install -q transformers torch pillow gradio ``` ```python import gradio as gr from transformers import AutoImageProcessor from transformers import SiglipForImageClassification from transformers.image_utils import load_image from PIL import Image import torch # Load model and processor model_name = "prithivMLmods/Gym-Workout-Classifier-SigLIP2" model = SiglipForImageClassification.from_pretrained(model_name) processor = AutoImageProcessor.from_pretrained(model_name) def workout_classification(image): """Predicts workout exercise classification for an image.""" image = Image.fromarray(image).convert("RGB") inputs = processor(images=image, return_tensors="pt") with torch.no_grad(): outputs = model(**inputs) logits = outputs.logits probs = torch.nn.functional.softmax(logits, dim=1).squeeze().tolist() labels = { "0": "barbell biceps curl", "1": "bench press", "2": "chest fly machine", "3": "deadlift", "4": "decline bench press", "5": "hammer curl", "6": "hip thrust", "7": "incline bench press", "8": "lat pulldown", "9": "lateral raises", "10": "leg extension", "11": "leg raises", "12": "plank", "13": "pull up", "14": "push up", "15": "romanian deadlift", "16": "russian twist", "17": "shoulder press", "18": "squat", "19": "t bar row", "20": "tricep dips", "21": "tricep pushdown" } predictions = {labels[str(i)]: round(probs[i], 3) for i in range(len(probs))} return predictions # Create Gradio interface iface = gr.Interface( fn=workout_classification, inputs=gr.Image(type="numpy"), outputs=gr.Label(label="Prediction Scores"), title="Gym Workout Classification", description="Upload an image to classify the workout exercise." ) # Launch the app if __name__ == "__main__": iface.launch() ``` # **Intended Use:** The **Gym-Workout-Classifier-SigLIP2** model is designed to classify different gym exercises based on images. Potential use cases include: - **Workout Tracking:** Identifying exercises performed during a workout session. - **Personal Training Assistance:** Helping trainers analyze and correct exercise form. - **Gym Activity Monitoring:** Automating exercise logging and analysis in fitness apps. - **AI-Powered Fitness Coaching:** Supporting AI-based fitness programs with real-time workout recognition.
[ "barbell biceps curl", "bench press", "chest fly machine", "deadlift", "decline bench press", "hammer curl", "hip thrust", "incline bench press", "lat pulldown", "lateral raises", "leg extension", "leg raises", "plank", "pull up", "push up", "romanian deadlift", "russian twist", "shoulder press", "squat", "t bar row", "tricep dips", "tricep pushdown" ]
shivrajbadu/food_cv_transformer_img_classifier
<!-- This model card has been generated automatically according to the information Keras had access to. You should probably proofread and complete it, then remove this comment. --> # shivrajbadu/food_cv_transformer_img_classifier This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Train Loss: 0.4980 - Validation Loss: 0.4167 - Train Accuracy: 0.912 - Epoch: 1 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - optimizer: {'name': 'AdamWeightDecay', 'learning_rate': {'module': 'keras.optimizers.schedules', 'class_name': 'PolynomialDecay', 'config': {'initial_learning_rate': 3e-05, 'decay_steps': 8000, 'end_learning_rate': 0.0, 'power': 1.0, 'cycle': False, 'name': None}, 'registered_name': None}, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-08, 'amsgrad': False, 'weight_decay_rate': 0.01} - training_precision: float32 ### Training results | Train Loss | Validation Loss | Train Accuracy | Epoch | |:----------:|:---------------:|:--------------:|:-----:| | 0.6866 | 0.5529 | 0.9 | 0 | | 0.4980 | 0.4167 | 0.912 | 1 | ### Framework versions - Transformers 4.48.3 - TensorFlow 2.18.0 - Datasets 3.3.2 - Tokenizers 0.21.0
[ "apple_pie", "baby_back_ribs", "bruschetta", "waffles", "caesar_salad", "cannoli", "caprese_salad", "carrot_cake", "ceviche", "cheesecake", "cheese_plate", "chicken_curry", "chicken_quesadilla", "baklava", "chicken_wings", "chocolate_cake", "chocolate_mousse", "churros", "clam_chowder", "club_sandwich", "crab_cakes", "creme_brulee", "croque_madame", "cup_cakes", "beef_carpaccio", "deviled_eggs", "donuts", "dumplings", "edamame", "eggs_benedict", "escargots", "falafel", "filet_mignon", "fish_and_chips", "foie_gras", "beef_tartare", "french_fries", "french_onion_soup", "french_toast", "fried_calamari", "fried_rice", "frozen_yogurt", "garlic_bread", "gnocchi", "greek_salad", "grilled_cheese_sandwich", "beet_salad", "grilled_salmon", "guacamole", "gyoza", "hamburger", "hot_and_sour_soup", "hot_dog", "huevos_rancheros", "hummus", "ice_cream", "lasagna", "beignets", "lobster_bisque", "lobster_roll_sandwich", "macaroni_and_cheese", "macarons", "miso_soup", "mussels", "nachos", "omelette", "onion_rings", "oysters", "bibimbap", "pad_thai", "paella", "pancakes", "panna_cotta", "peking_duck", "pho", "pizza", "pork_chop", "poutine", "prime_rib", "bread_pudding", "pulled_pork_sandwich", "ramen", "ravioli", "red_velvet_cake", "risotto", "samosa", "sashimi", "scallops", "seaweed_salad", "shrimp_and_grits", "breakfast_burrito", "spaghetti_bolognese", "spaghetti_carbonara", "spring_rolls", "steak", "strawberry_shortcake", "sushi", "tacos", "takoyaki", "tiramisu", "tuna_tartare" ]
prithivMLmods/Facial-Emotion-Detection-SigLIP2
![fsedfs.png](https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/IDBZcJQvQ2UvmczGMYS-W.png) # **Facial-Emotion-Detection-SigLIP2** > **Facial-Emotion-Detection-SigLIP2** is an image classification vision-language encoder model fine-tuned from **google/siglip2-base-patch16-224** for a single-label classification task. It is designed to classify different facial emotions using the **SiglipForImageClassification** architecture. ```py Classification Report: precision recall f1-score support Ahegao 0.9916 0.9801 0.9858 1205 Angry 0.8633 0.7502 0.8028 1313 Happy 0.9494 0.9684 0.9588 3740 Neutral 0.7635 0.8781 0.8168 4027 Sad 0.8595 0.7794 0.8175 3934 Surprise 0.9025 0.8104 0.8540 1234 accuracy 0.8665 15453 macro avg 0.8883 0.8611 0.8726 15453 weighted avg 0.8703 0.8665 0.8663 15453 ``` ![download.png](https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/j29921aYUCg9a5ZqXQ8P2.png) The model categorizes images into 6 facial emotion classes: Class 0: "Ahegao" Class 1: "Angry" Class 2: "Happy" Class 3: "Neutral" Class 4: "Sad" Class 5: "Surprise" ```python !pip install -q transformers torch pillow gradio ``` ```python import gradio as gr from transformers import AutoImageProcessor from transformers import SiglipForImageClassification from transformers.image_utils import load_image from PIL import Image import torch # Load model and processor model_name = "prithivMLmods/Facial-Emotion-Detection-SigLIP2" model = SiglipForImageClassification.from_pretrained(model_name) processor = AutoImageProcessor.from_pretrained(model_name) def emotion_classification(image): """Predicts facial emotion classification for an image.""" image = Image.fromarray(image).convert("RGB") inputs = processor(images=image, return_tensors="pt") with torch.no_grad(): outputs = model(**inputs) logits = outputs.logits probs = torch.nn.functional.softmax(logits, dim=1).squeeze().tolist() labels = { "0": "Ahegao", "1": "Angry", "2": "Happy", "3": "Neutral", "4": "Sad", "5": "Surprise" } predictions = {labels[str(i)]: round(probs[i], 3) for i in range(len(probs))} return predictions # Create Gradio interface iface = gr.Interface( fn=emotion_classification, inputs=gr.Image(type="numpy"), outputs=gr.Label(label="Prediction Scores"), title="Facial Emotion Detection", description="Upload an image to classify the facial emotion." ) # Launch the app if __name__ == "__main__": iface.launch() ``` # **Intended Use:** The **Facial-Emotion-Detection-SigLIP2** model is designed to classify different facial emotions based on images. Potential use cases include: - **Mental Health Monitoring:** Detecting emotional states for well-being analysis. - **Human-Computer Interaction:** Enhancing user experience by recognizing emotions. - **Security & Surveillance:** Identifying suspicious or aggressive behaviors. - **AI-Powered Assistants:** Supporting AI-based emotion recognition for various applications.
[ "ahegao", "angry", "happy", "neutral", "sad", "surprise" ]
rmezapi/dementia-vit
This project was intended to test the limits of the ViT on a tough dementia dataset. The data used can be found on HuggingFace at: https://huggingface.co/datasets/Falah/Alzheimer_MRI. The project follows closely the following tutorials: https://www.youtube.com/watch?v=r88L_yLJ4CE&ab_channel=code_your_own_AI https://www.youtube.com/watch?v=qU7wO02urYU&ab_channel=JamesBriggs I modify the code presented in the video and tune all parameters to optimize performance using mostly the same libraries and tools. This is a practice project for myself as I return to coding/designing ML models after dedicating time to AI/ML theory (model architectures, transfer learning) ![image/png](https://cdn-uploads.huggingface.co/production/uploads/65d291d26130ef7be00c9753/1K_D874KlBp8HWq45NFjq.png) ![image/png](https://cdn-uploads.huggingface.co/production/uploads/65d291d26130ef7be00c9753/n1FICtrHxqievgXt0vdXk.png) ![image/png](https://cdn-uploads.huggingface.co/production/uploads/65d291d26130ef7be00c9753/YxJjXB3KH5DwufYvaSC2X.png) ![image/png](https://cdn-uploads.huggingface.co/production/uploads/65d291d26130ef7be00c9753/nUvQVvbiBUhTSSxR1KK2y.png)
[ "label_0", "label_1", "label_2", "label_3" ]
parasparani/Swinv2_tiny_Finetuned_ESP
# Model Card for Model ID <!-- Provide a quick summary of what the model is/does. --> ## Model Details ### Model Description <!-- Provide a longer summary of what this model is. --> This is the model card of a 🤗 transformers model that has been pushed on the Hub. This model card has been automatically generated. - **Developed by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Model type:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] - **Finetuned from model [optional]:** [More Information Needed] ### Model Sources [optional] <!-- Provide the basic links for the model. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. --> ### Direct Use <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. --> [More Information Needed] ### Downstream Use [optional] <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations. ## How to Get Started with the Model Use the code below to get started with the model. [More Information Needed] ## Training Details ### Training Data <!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. --> [More Information Needed] ### Training Procedure <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. --> #### Preprocessing [optional] [More Information Needed] #### Training Hyperparameters - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision --> #### Speeds, Sizes, Times [optional] <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. --> [More Information Needed] ## Evaluation <!-- This section describes the evaluation protocols and provides the results. --> ### Testing Data, Factors & Metrics #### Testing Data <!-- This should link to a Dataset Card if possible. --> [More Information Needed] #### Factors <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. --> [More Information Needed] #### Metrics <!-- These are the evaluation metrics being used, ideally with a description of why. --> [More Information Needed] ### Results [More Information Needed] #### Summary ## Model Examination [optional] <!-- Relevant interpretability work for the model goes here --> [More Information Needed] ## Environmental Impact <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly --> Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700). - **Hardware Type:** [More Information Needed] - **Hours used:** [More Information Needed] - **Cloud Provider:** [More Information Needed] - **Compute Region:** [More Information Needed] - **Carbon Emitted:** [More Information Needed] ## Technical Specifications [optional] ### Model Architecture and Objective [More Information Needed] ### Compute Infrastructure [More Information Needed] #### Hardware [More Information Needed] #### Software [More Information Needed] ## Citation [optional] <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Model Card Authors [optional] [More Information Needed] ## Model Card Contact [More Information Needed]
[ "0", "1" ]
prithivMLmods/Augmented-Waste-Classifier-SigLIP2
![awsdawd.png](https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/mRNCplvIRLYqoqRT2JKpP.png) # Augmented-Waste-Classifier-SigLIP2 > **Augmented-Waste-Classifier-SigLIP2** is an image classification vision-language encoder model fine-tuned from **google/siglip2-base-patch16-224** for a single-label classification task. It is designed to classify waste types using the **SiglipForImageClassification** architecture. ```py Classification Report: precision recall f1-score support Battery 0.9987 0.9987 0.9987 3840 Biological 0.9998 0.9960 0.9979 4036 Cardboard 0.9956 0.9909 0.9932 3628 Clothes 0.9957 0.9914 0.9935 5336 Glass 0.9800 0.9914 0.9856 4048 Metal 0.9892 0.9965 0.9929 3136 Paper 0.9937 0.9891 0.9914 4308 Plastic 0.9865 0.9798 0.9831 3568 Shoes 0.9876 0.9990 0.9933 3990 Trash 1.0000 0.9939 0.9970 2796 accuracy 0.9926 38686 macro avg 0.9927 0.9927 0.9927 38686 weighted avg 0.9926 0.9926 0.9926 38686 ``` ![download.png](https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/0lXpKNyqS0i8ZjTRr42gl.png) The model categorizes images into 10 waste classes: Class 0: "Battery" Class 1: "Biological" Class 2: "Cardboard" Class 3: "Clothes" Class 4: "Glass" Class 5: "Metal" Class 6: "Paper" Class 7: "Plastic" Class 8: "Shoes" Class 9: "Trash" ```python !pip install -q transformers torch pillow gradio ``` ```python import gradio as gr from transformers import AutoImageProcessor from transformers import SiglipForImageClassification from transformers.image_utils import load_image from PIL import Image import torch # Load model and processor model_name = "prithivMLmods/Augmented-Waste-Classifier-SigLIP2" model = SiglipForImageClassification.from_pretrained(model_name) processor = AutoImageProcessor.from_pretrained(model_name) def waste_classification(image): """Predicts waste classification for an image.""" image = Image.fromarray(image).convert("RGB") inputs = processor(images=image, return_tensors="pt") with torch.no_grad(): outputs = model(**inputs) logits = outputs.logits probs = torch.nn.functional.softmax(logits, dim=1).squeeze().tolist() labels = { "0": "Battery", "1": "Biological", "2": "Cardboard", "3": "Clothes", "4": "Glass", "5": "Metal", "6": "Paper", "7": "Plastic", "8": "Shoes", "9": "Trash" } predictions = {labels[str(i)]: round(probs[i], 3) for i in range(len(probs))} return predictions # Create Gradio interface iface = gr.Interface( fn=waste_classification, inputs=gr.Image(type="numpy"), outputs=gr.Label(label="Prediction Scores"), title="Augmented Waste Classification", description="Upload an image to classify the type of waste." ) # Launch the app if __name__ == "__main__": iface.launch() ``` # Intended Use: The **Augmented-Waste-Classifier-SigLIP2** model is designed to classify different types of waste based on images. Potential use cases include: - **Waste Management:** Identifying and categorizing waste materials for proper disposal. - **Recycling Assistance:** Helping users determine recyclable materials. - **Environmental Monitoring:** Automating waste classification for smart cities. - **AI-Powered Sustainability Solutions:** Supporting AI-based waste sorting systems to improve recycling efficiency.
[ "battery", "biological", "cardboard", "clothes", "glass", "metal", "paper", "plastic", "shoes", "trash" ]
zicheng1225/vit-base-beans
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-base-beans This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 2e-05 - train_batch_size: 8 - eval_batch_size: 8 - seed: 1337 - optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - num_epochs: 5.0 ### Training results ### Framework versions - Transformers 4.50.0.dev0 - Pytorch 2.6.0+cu124 - Datasets 3.3.2 - Tokenizers 0.21.0
[ "angular_leaf_spot", "bean_rust", "healthy" ]
ricardoSLabs/fer_plus_V2_50_1
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # fer_plus_V2_50_1 This model is a fine-tuned version of [WinKawaks/vit-tiny-patch16-224](https://huggingface.co/WinKawaks/vit-tiny-patch16-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - eval_loss: 0.5401 - eval_accuracy: 0.8112 - eval_runtime: 46.8732 - eval_samples_per_second: 121.135 - eval_steps_per_second: 3.797 - epoch: 10.0 - step: 1780 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 50 ### Framework versions - Transformers 4.47.0 - Pytorch 2.5.1+cu121 - Datasets 3.3.1 - Tokenizers 0.21.0
[ "anger", "contempt", "disgust", "fear", "happiness", "neutral", "sadness", "surprise" ]
vkushwahaa/vit-Facial-Expression-Recognition
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-Facial-Expression-Recognition This model is a fine-tuned version of [motheecreator/vit-Facial-Expression-Recognition](https://huggingface.co/motheecreator/vit-Facial-Expression-Recognition) on the None dataset. It achieves the following results on the evaluation set: - Loss: 0.5369 - Accuracy: 0.8095 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 3e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 8 - total_train_batch_size: 256 - optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: cosine - lr_scheduler_warmup_steps: 1000 - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:------:|:----:|:---------------:|:--------:| | 0.8526 | 0.2077 | 100 | 0.6490 | 0.7761 | | 0.8116 | 0.4155 | 200 | 0.6106 | 0.7860 | | 0.7759 | 0.6232 | 300 | 0.5918 | 0.7908 | | 0.7708 | 0.8310 | 400 | 0.5883 | 0.7904 | | 0.75 | 1.0374 | 500 | 0.5749 | 0.7971 | | 0.7496 | 1.2451 | 600 | 0.5737 | 0.7976 | | 0.7601 | 1.4529 | 700 | 0.5698 | 0.7971 | | 0.7516 | 1.6606 | 800 | 0.5692 | 0.7991 | | 0.7359 | 1.8683 | 900 | 0.5740 | 0.7947 | | 0.6968 | 2.0748 | 1000 | 0.5738 | 0.7953 | | 0.6854 | 2.2825 | 1100 | 0.5617 | 0.7980 | | 0.6992 | 2.4903 | 1200 | 0.5566 | 0.8030 | | 0.6609 | 2.6980 | 1300 | 0.5495 | 0.8035 | | 0.664 | 2.9057 | 1400 | 0.5370 | 0.8101 | ### Framework versions - Transformers 4.47.0 - Pytorch 2.5.1+cu121 - Datasets 3.3.1 - Tokenizers 0.21.0
[ "angry", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
jjiw/densenet161-onnx
# DenseNet161 키위 잎 분류 모델 이 모델은 키위 잎의 질병을 분류하는 DenseNet161 기반 모델입니다. ## 모델 설명 - 태스크: 이미지 분류 - 클래스: 잎_점무늬병, 잎_정상, 잎_총채벌레
[ "잎_점무늬병", "잎_정상", "잎_총채벌레" ]
SynchoPass/food_classifier
<!-- This model card has been generated automatically according to the information Keras had access to. You should probably proofread and complete it, then remove this comment. --> # SynchoPass/food_classifier This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Train Loss: 0.3965 - Validation Loss: 0.3180 - Train Accuracy: 0.926 - Epoch: 4 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - optimizer: {'name': 'AdamWeightDecay', 'learning_rate': {'module': 'keras.optimizers.schedules', 'class_name': 'PolynomialDecay', 'config': {'initial_learning_rate': 3e-05, 'decay_steps': 20000, 'end_learning_rate': 0.0, 'power': 1.0, 'cycle': False, 'name': None}, 'registered_name': None}, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-08, 'amsgrad': False, 'weight_decay_rate': 0.01} - training_precision: float32 ### Training results | Train Loss | Validation Loss | Train Accuracy | Epoch | |:----------:|:---------------:|:--------------:|:-----:| | 2.7774 | 1.6598 | 0.815 | 0 | | 1.2065 | 0.7856 | 0.907 | 1 | | 0.6840 | 0.5208 | 0.913 | 2 | | 0.4886 | 0.4015 | 0.919 | 3 | | 0.3965 | 0.3180 | 0.926 | 4 | ### Framework versions - Transformers 4.48.3 - TensorFlow 2.18.0 - Datasets 3.3.2 - Tokenizers 0.21.0
[ "apple_pie", "baby_back_ribs", "bruschetta", "waffles", "caesar_salad", "cannoli", "caprese_salad", "carrot_cake", "ceviche", "cheesecake", "cheese_plate", "chicken_curry", "chicken_quesadilla", "baklava", "chicken_wings", "chocolate_cake", "chocolate_mousse", "churros", "clam_chowder", "club_sandwich", "crab_cakes", "creme_brulee", "croque_madame", "cup_cakes", "beef_carpaccio", "deviled_eggs", "donuts", "dumplings", "edamame", "eggs_benedict", "escargots", "falafel", "filet_mignon", "fish_and_chips", "foie_gras", "beef_tartare", "french_fries", "french_onion_soup", "french_toast", "fried_calamari", "fried_rice", "frozen_yogurt", "garlic_bread", "gnocchi", "greek_salad", "grilled_cheese_sandwich", "beet_salad", "grilled_salmon", "guacamole", "gyoza", "hamburger", "hot_and_sour_soup", "hot_dog", "huevos_rancheros", "hummus", "ice_cream", "lasagna", "beignets", "lobster_bisque", "lobster_roll_sandwich", "macaroni_and_cheese", "macarons", "miso_soup", "mussels", "nachos", "omelette", "onion_rings", "oysters", "bibimbap", "pad_thai", "paella", "pancakes", "panna_cotta", "peking_duck", "pho", "pizza", "pork_chop", "poutine", "prime_rib", "bread_pudding", "pulled_pork_sandwich", "ramen", "ravioli", "red_velvet_cake", "risotto", "samosa", "sashimi", "scallops", "seaweed_salad", "shrimp_and_grits", "breakfast_burrito", "spaghetti_bolognese", "spaghetti_carbonara", "spring_rolls", "steak", "strawberry_shortcake", "sushi", "tacos", "takoyaki", "tiramisu", "tuna_tartare" ]
fitrisalamah10/vit-base-oxford-iiit-pets
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-base-oxford-iiit-pets This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on the pcuenq/oxford-pets dataset. It achieves the following results on the evaluation set: - Loss: 0.4398 - Accuracy: 0.9296 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0003 - train_batch_size: 16 - eval_batch_size: 8 - seed: 42 - optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: linear - num_epochs: 1 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.4855 | 1.0 | 370 | 0.4115 | 0.9283 | ### Framework versions - Transformers 4.48.3 - Pytorch 2.5.1+cu124 - Datasets 3.3.2 - Tokenizers 0.21.0
[ "siamese", "birman", "shiba inu", "staffordshire bull terrier", "basset hound", "bombay", "japanese chin", "chihuahua", "german shorthaired", "pomeranian", "beagle", "english cocker spaniel", "american pit bull terrier", "ragdoll", "persian", "egyptian mau", "miniature pinscher", "sphynx", "maine coon", "keeshond", "yorkshire terrier", "havanese", "leonberger", "wheaten terrier", "american bulldog", "english setter", "boxer", "newfoundland", "bengal", "samoyed", "british shorthair", "great pyrenees", "abyssinian", "pug", "saint bernard", "russian blue", "scottish terrier" ]
SynchoPass/food_image_classification
<!-- This model card has been generated automatically according to the information Keras had access to. You should probably proofread and complete it, then remove this comment. --> # SynchoPass/food_image_classification This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Train Loss: 0.7709 - Validation Loss: 0.6785 - Train Accuracy: 0.8213 - Epoch: 4 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - optimizer: {'name': 'AdamWeightDecay', 'learning_rate': {'module': 'keras.optimizers.schedules', 'class_name': 'PolynomialDecay', 'config': {'initial_learning_rate': 3e-05, 'decay_steps': 202000, 'end_learning_rate': 0.0, 'power': 1.0, 'cycle': False, 'name': None}, 'registered_name': None}, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-08, 'amsgrad': False, 'weight_decay_rate': 0.01} - training_precision: float32 ### Training results | Train Loss | Validation Loss | Train Accuracy | Epoch | |:----------:|:---------------:|:--------------:|:-----:| | 2.9035 | 1.6030 | 0.7173 | 0 | | 1.3379 | 0.9149 | 0.7897 | 1 | | 0.9824 | 0.7740 | 0.8057 | 2 | | 0.8481 | 0.7310 | 0.8097 | 3 | | 0.7709 | 0.6785 | 0.8213 | 4 | ### Framework versions - Transformers 4.48.3 - TensorFlow 2.18.0 - Datasets 3.3.2 - Tokenizers 0.21.0
[ "apple_pie", "baby_back_ribs", "bruschetta", "waffles", "caesar_salad", "cannoli", "caprese_salad", "carrot_cake", "ceviche", "cheesecake", "cheese_plate", "chicken_curry", "chicken_quesadilla", "baklava", "chicken_wings", "chocolate_cake", "chocolate_mousse", "churros", "clam_chowder", "club_sandwich", "crab_cakes", "creme_brulee", "croque_madame", "cup_cakes", "beef_carpaccio", "deviled_eggs", "donuts", "dumplings", "edamame", "eggs_benedict", "escargots", "falafel", "filet_mignon", "fish_and_chips", "foie_gras", "beef_tartare", "french_fries", "french_onion_soup", "french_toast", "fried_calamari", "fried_rice", "frozen_yogurt", "garlic_bread", "gnocchi", "greek_salad", "grilled_cheese_sandwich", "beet_salad", "grilled_salmon", "guacamole", "gyoza", "hamburger", "hot_and_sour_soup", "hot_dog", "huevos_rancheros", "hummus", "ice_cream", "lasagna", "beignets", "lobster_bisque", "lobster_roll_sandwich", "macaroni_and_cheese", "macarons", "miso_soup", "mussels", "nachos", "omelette", "onion_rings", "oysters", "bibimbap", "pad_thai", "paella", "pancakes", "panna_cotta", "peking_duck", "pho", "pizza", "pork_chop", "poutine", "prime_rib", "bread_pudding", "pulled_pork_sandwich", "ramen", "ravioli", "red_velvet_cake", "risotto", "samosa", "sashimi", "scallops", "seaweed_salad", "shrimp_and_grits", "breakfast_burrito", "spaghetti_bolognese", "spaghetti_carbonara", "spring_rolls", "steak", "strawberry_shortcake", "sushi", "tacos", "takoyaki", "tiramisu", "tuna_tartare" ]
wolf1729/vit-Facial-Expression-Recognition
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-Facial-Expression-Recognition This model is a fine-tuned version of [motheecreator/vit-Facial-Expression-Recognition](https://huggingface.co/motheecreator/vit-Facial-Expression-Recognition) on the None dataset. It achieves the following results on the evaluation set: - Loss: 0.5465 - Accuracy: 0.8040 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 3e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 8 - total_train_batch_size: 256 - optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments - lr_scheduler_type: cosine - lr_scheduler_warmup_steps: 1000 - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:------:|:----:|:---------------:|:--------:| | 0.8497 | 0.2077 | 100 | 0.6552 | 0.7754 | | 0.7899 | 0.4155 | 200 | 0.6210 | 0.7806 | | 0.7952 | 0.6232 | 300 | 0.6013 | 0.7876 | | 0.7838 | 0.8310 | 400 | 0.5912 | 0.7891 | | 0.7701 | 1.0374 | 500 | 0.5825 | 0.7920 | | 0.75 | 1.2451 | 600 | 0.5841 | 0.7927 | | 0.743 | 1.4529 | 700 | 0.5807 | 0.7916 | | 0.7443 | 1.6606 | 800 | 0.5793 | 0.7939 | | 0.7298 | 1.8683 | 900 | 0.5776 | 0.7938 | | 0.7024 | 2.0748 | 1000 | 0.5859 | 0.7903 | | 0.6962 | 2.2825 | 1100 | 0.5854 | 0.7910 | | 0.6824 | 2.4903 | 1200 | 0.5603 | 0.8014 | | 0.6653 | 2.6980 | 1300 | 0.5544 | 0.8016 | | 0.6614 | 2.9057 | 1400 | 0.5468 | 0.8041 | ### Framework versions - Transformers 4.47.0 - Pytorch 2.5.1+cu121 - Datasets 3.3.1 - Tokenizers 0.21.0
[ "angry", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
prithivMLmods/AI-vs-Deepfake-vs-Real-v2.0
![bXfKBT3LQkbeLzPCBHTGT.png](https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/Hav19ftsb_5u76rmEa7OL.png) # **AI-vs-Deepfake-vs-Real-v2.0** > **AI-vs-Deepfake-vs-Real-v2.0** is an image classification vision-language encoder model fine-tuned from `google/siglip2-base-patch16-224` for a single-label classification task. It is designed to distinguish AI-generated images, deepfake images, and real images using the `SiglipForImageClassification` architecture. ```py "label2id": { "Artificial": 0, "Deepfake": 1, "Real": 2 }, ``` ```py "log_history": [ { "epoch": 1.0, "eval_accuracy": 0.9915991599159916, "eval_loss": 0.0240725576877594, "eval_model_preparation_time": 0.0023, "eval_runtime": 248.0631, "eval_samples_per_second": 40.308, "eval_steps_per_second": 5.039, "step": 313 } ``` The model categorizes images into three classes: - **Class 0:** "AI" – The image is fully AI-generated, created by machine learning models. - **Class 1:** "Deepfake" – The image is a manipulated deepfake, where real content has been altered. - **Class 2:** "Real" – The image is an authentic, unaltered photograph. # **Run with Transformers🤗** ```python !pip install -q transformers torch pillow gradio ``` ```python import gradio as gr from transformers import AutoImageProcessor, SiglipForImageClassification from PIL import Image import torch # Load model and processor model_name = "prithivMLmods/AI-vs-Deepfake-vs-Real-v2.0" model = SiglipForImageClassification.from_pretrained(model_name) processor = AutoImageProcessor.from_pretrained(model_name) def image_classification(image): """Classifies an image as AI-generated, deepfake, or real.""" image = Image.fromarray(image).convert("RGB") inputs = processor(images=image, return_tensors="pt") with torch.no_grad(): outputs = model(**inputs) logits = outputs.logits probs = torch.nn.functional.softmax(logits, dim=1).squeeze().tolist() labels = model.config.id2label predictions = {labels[i]: round(probs[i], 3) for i in range(len(probs))} return predictions # Create Gradio interface iface = gr.Interface( fn=image_classification, inputs=gr.Image(type="numpy"), outputs=gr.Label(label="Classification Result"), title="AI vs Deepfake vs Real Image Classification", description="Upload an image to determine whether it is AI-generated, a deepfake, or a real image." ) # Launch the app if __name__ == "__main__": iface.launch() ``` # **Intended Use** The **AI-vs-Deepfake-vs-Real-v2.0** model is designed to classify images into three categories: **AI-generated, deepfake, or real**. It helps in identifying whether an image is fully synthetic, altered through deepfake techniques, or an unaltered real image. ### Potential Use Cases: - **Deepfake Detection:** Identifying manipulated deepfake content in media. - **AI-Generated Image Identification:** Distinguishing AI-generated images from real or deepfake images. - **Content Verification:** Supporting fact-checking and digital forensics in assessing image authenticity. - **Social Media and News Filtering:** Helping platforms flag AI-generated or deepfake content.
[ "artificial", "deepfake", "real" ]
prithivMLmods/Bird-Species-Classifier-526
![5.png](https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/GhCH6eTGur3RKo9tbKiWm.png) # **Bird-Species-Classifier-526** > **Bird-Species-Classifier-526** is an image classification vision-language encoder model fine-tuned from **google/siglip2-base-patch16-224** for a single-label classification task. It is designed to classify different bird species using the **SiglipForImageClassification** architecture. ```python Prediction metrics: {'test_loss': 0.5863810181617737, 'test_model_preparation_time': 0.0029, 'test_accuracy': 0.8992497193832338, 'test_runtime': 1005.4898, 'test_samples_per_second': 84.173, 'test_steps_per_second': 10.522} Accuracy: 0.8992 F1 Score: 0.8985 ``` ```py labels_list = [ 'ABBOTTS BABBLER', 'ABBOTTS BOOBY', 'ABYSSINIAN GROUND HORNBILL', 'AFRICAN CROWNED CRANE', 'AFRICAN EMERALD CUCKOO', 'AFRICAN FIREFINCH', 'AFRICAN OYSTER CATCHER', 'AFRICAN PIED HORNBILL', 'AFRICAN PYGMY GOOSE', 'ALBATROSS', 'ALBERTS TOWHEE', 'ALEXANDRINE PARAKEET', 'ALPINE CHOUGH', 'ALTAMIRA YELLOWTHROAT', 'AMERICAN AVOCET', 'AMERICAN BITTERN', 'AMERICAN COOT', 'AMERICAN DIPPER', 'AMERICAN FLAMINGO', 'AMERICAN GOLDFINCH', 'AMERICAN KESTREL', 'AMERICAN PIPIT', 'AMERICAN REDSTART', 'AMERICAN ROBIN', 'AMERICAN WIGEON', 'AMETHYST WOODSTAR', 'ANDEAN GOOSE', 'ANDEAN LAPWING', 'ANDEAN SISKIN', 'ANHINGA', 'ANIANIAU', 'ANNAS HUMMINGBIRD', 'ANTBIRD', 'ANTILLEAN EUPHONIA', 'APAPANE', 'APOSTLEBIRD', 'ARARIPE MANAKIN', 'ASHY STORM PETREL', 'ASHY THRUSHBIRD', 'ASIAN CRESTED IBIS', 'ASIAN DOLLARD BIRD', 'ASIAN GREEN BEE EATER', 'ASIAN OPENBILL STORK', 'AUCKLAND SHAQ', 'AUSTRAL CANASTERO', 'AUSTRALASIAN FIGBIRD', 'AVADAVAT', 'AZARAS SPINETAIL', 'AZURE BREASTED PITTA', 'AZURE JAY', 'AZURE TANAGER', 'AZURE TIT', 'BAIKAL TEAL', 'BALD EAGLE', 'BALD IBIS', 'BALI STARLING', 'BALTIMORE ORIOLE', 'BANANAQUIT', 'BAND TAILED GUAN', 'BANDED BROADBILL', 'BANDED PITA', 'BANDED STILT', 'BAR-TAILED GODWIT', 'BARN OWL', 'BARN SWALLOW', 'BARRED PUFFBIRD', 'BARROWS GOLDENEYE', 'BAY-BREASTED WARBLER', 'BEARDED BARBET', 'BEARDED BELLBIRD', 'BEARDED REEDLING', 'BELTED KINGFISHER', 'BIRD OF PARADISE', 'BLACK AND YELLOW BROADBILL', 'BLACK BAZA', 'BLACK BREASTED PUFFBIRD', 'BLACK COCKATO', 'BLACK FACED SPOONBILL', 'BLACK FRANCOLIN', 'BLACK HEADED CAIQUE', 'BLACK NECKED STILT', 'BLACK SKIMMER', 'BLACK SWAN', 'BLACK TAIL CRAKE', 'BLACK THROATED BUSHTIT', 'BLACK THROATED HUET', 'BLACK THROATED WARBLER', 'BLACK VENTED SHEARWATER', 'BLACK VULTURE', 'BLACK-CAPPED CHICKADEE', 'BLACK-NECKED GREBE', 'BLACK-THROATED SPARROW', 'BLACKBURNIAM WARBLER', 'BLONDE CRESTED WOODPECKER', 'BLOOD PHEASANT', 'BLUE COAU', 'BLUE DACNIS', 'BLUE GRAY GNATCATCHER', 'BLUE GROSBEAK', 'BLUE GROUSE', 'BLUE HERON', 'BLUE MALKOHA', 'BLUE THROATED PIPING GUAN', 'BLUE THROATED TOUCANET', 'BOBOLINK', 'BORNEAN BRISTLEHEAD', 'BORNEAN LEAFBIRD', 'BORNEAN PHEASANT', 'BRANDT CORMARANT', 'BREWERS BLACKBIRD', 'BROWN CREPPER', 'BROWN HEADED COWBIRD', 'BROWN NOODY', 'BROWN THRASHER', 'BUFFLEHEAD', 'BULWERS PHEASANT', 'BURCHELLS COURSER', 'BUSH TURKEY', 'CAATINGA CACHOLOTE', 'CABOTS TRAGOPAN', 'CACTUS WREN', 'CALIFORNIA CONDOR', 'CALIFORNIA GULL', 'CALIFORNIA QUAIL', 'CAMPO FLICKER', 'CANARY', 'CANVASBACK', 'CAPE GLOSSY STARLING', 'CAPE LONGCLAW', 'CAPE MAY WARBLER', 'CAPE ROCK THRUSH', 'CAPPED HERON', 'CAPUCHINBIRD', 'CARMINE BEE-EATER', 'CASPIAN TERN', 'CASSOWARY', 'CEDAR WAXWING', 'CERULEAN WARBLER', 'CHARA DE COLLAR', 'CHATTERING LORY', 'CHESTNET BELLIED EUPHONIA', 'CHESTNUT WINGED CUCKOO', 'CHINESE BAMBOO PARTRIDGE', 'CHINESE POND HERON', 'CHIPPING SPARROW', 'CHUCAO TAPACULO', 'CHUKAR PARTRIDGE', 'CINNAMON ATTILA', 'CINNAMON FLYCATCHER', 'CINNAMON TEAL', 'CLARKS GREBE', 'CLARKS NUTCRACKER', 'COCK OF THE ROCK', 'COCKATOO', 'COLLARED ARACARI', 'COLLARED CRESCENTCHEST', 'COMMON FIRECREST', 'COMMON GRACKLE', 'COMMON HOUSE MARTIN', 'COMMON IORA', 'COMMON LOON', 'COMMON POORWILL', 'COMMON STARLING', 'COPPERSMITH BARBET', 'COPPERY TAILED COUCAL', 'CRAB PLOVER', 'CRANE HAWK', 'CREAM COLORED WOODPECKER', 'CRESTED AUKLET', 'CRESTED CARACARA', 'CRESTED COUA', 'CRESTED FIREBACK', 'CRESTED KINGFISHER', 'CRESTED NUTHATCH', 'CRESTED OROPENDOLA', 'CRESTED SERPENT EAGLE', 'CRESTED SHRIKETIT', 'CRESTED WOOD PARTRIDGE', 'CRIMSON CHAT', 'CRIMSON SUNBIRD', 'CROW', 'CUBAN TODY', 'CUBAN TROGON', 'CURL CRESTED ARACURI', 'D-ARNAUDS BARBET', 'DALMATIAN PELICAN', 'DARJEELING WOODPECKER', 'DARK EYED JUNCO', 'DAURIAN REDSTART', 'DEMOISELLE CRANE', 'DOUBLE BARRED FINCH', 'DOUBLE BRESTED CORMARANT', 'DOUBLE EYED FIG PARROT', 'DOWNY WOODPECKER', 'DUNLIN', 'DUSKY LORY', 'DUSKY ROBIN', 'EARED PITA', 'EASTERN BLUEBIRD', 'EASTERN BLUEBONNET', 'EASTERN GOLDEN WEAVER', 'EASTERN MEADOWLARK', 'EASTERN ROSELLA', 'EASTERN TOWEE', 'EASTERN WIP POOR WILL', 'EASTERN YELLOW ROBIN', 'ECUADORIAN HILLSTAR', 'EGYPTIAN GOOSE', 'ELEGANT TROGON', 'ELLIOTS PHEASANT', 'EMERALD TANAGER', 'EMPEROR PENGUIN', 'EMU', 'ENGGANO MYNA', 'EURASIAN BULLFINCH', 'EURASIAN GOLDEN ORIOLE', 'EURASIAN MAGPIE', 'EUROPEAN GOLDFINCH', 'EUROPEAN TURTLE DOVE', 'EVENING GROSBEAK', 'FAIRY BLUEBIRD', 'FAIRY PENGUIN', 'FAIRY TERN', 'FAN TAILED WIDOW', 'FASCIATED WREN', 'FIERY MINIVET', 'FIORDLAND PENGUIN', 'FIRE TAILLED MYZORNIS', 'FLAME BOWERBIRD', 'FLAME TANAGER', 'FOREST WAGTAIL', 'FRIGATE', 'FRILL BACK PIGEON', 'GAMBELS QUAIL', 'GANG GANG COCKATOO', 'GILA WOODPECKER', 'GILDED FLICKER', 'GLOSSY IBIS', 'GO AWAY BIRD', 'GOLD WING WARBLER', 'GOLDEN BOWER BIRD', 'GOLDEN CHEEKED WARBLER', 'GOLDEN CHLOROPHONIA', 'GOLDEN EAGLE', 'GOLDEN PARAKEET', 'GOLDEN PHEASANT', 'GOLDEN PIPIT', 'GOULDIAN FINCH', 'GRANDALA', 'GRAY CATBIRD', 'GRAY KINGBIRD', 'GRAY PARTRIDGE', 'GREAT ARGUS', 'GREAT GRAY OWL', 'GREAT JACAMAR', 'GREAT KISKADEE', 'GREAT POTOO', 'GREAT TINAMOU', 'GREAT XENOPS', 'GREATER PEWEE', 'GREATER PRAIRIE CHICKEN', 'GREATOR SAGE GROUSE', 'GREEN BROADBILL', 'GREEN JAY', 'GREEN MAGPIE', 'GREEN WINGED DOVE', 'GREY CUCKOOSHRIKE', 'GREY HEADED CHACHALACA', 'GREY HEADED FISH EAGLE', 'GREY PLOVER', 'GROVED BILLED ANI', 'GUINEA TURACO', 'GUINEAFOWL', 'GURNEYS PITTA', 'GYRFALCON', 'HAMERKOP', 'HARLEQUIN DUCK', 'HARLEQUIN QUAIL', 'HARPY EAGLE', 'HAWAIIAN GOOSE', 'HAWFINCH', 'HELMET VANGA', 'HEPATIC TANAGER', 'HIMALAYAN BLUETAIL', 'HIMALAYAN MONAL', 'HOATZIN', 'HOODED MERGANSER', 'HOOPOES', 'HORNED GUAN', 'HORNED LARK', 'HORNED SUNGEM', 'HOUSE FINCH', 'HOUSE SPARROW', 'HYACINTH MACAW', 'IBERIAN MAGPIE', 'IBISBILL', 'IMPERIAL SHAQ', 'INCA TERN', 'INDIAN BUSTARD', 'INDIAN PITTA', 'INDIAN ROLLER', 'INDIAN VULTURE', 'INDIGO BUNTING', 'INDIGO FLYCATCHER', 'INLAND DOTTEREL', 'IVORY BILLED ARACARI', 'IVORY GULL', 'IWI', 'JABIRU', 'JACK SNIPE', 'JACOBIN PIGEON', 'JANDAYA PARAKEET', 'JAPANESE ROBIN', 'JAVA SPARROW', 'JOCOTOCO ANTPITTA', 'KAGU', 'KAKAPO', 'KILLDEAR', 'KING EIDER', 'KING VULTURE', 'KIWI', 'KNOB BILLED DUCK', 'KOOKABURRA', 'LARK BUNTING', 'LAUGHING GULL', 'LAZULI BUNTING', 'LESSER ADJUTANT', 'LILAC ROLLER', 'LIMPKIN', 'LITTLE AUK', 'LOGGERHEAD SHRIKE', 'LONG-EARED OWL', 'LOONEY BIRDS', 'LUCIFER HUMMINGBIRD', 'MAGPIE GOOSE', 'MALABAR HORNBILL', 'MALACHITE KINGFISHER', 'MALAGASY WHITE EYE', 'MALEO', 'MALLARD DUCK', 'MANDRIN DUCK', 'MANGROVE CUCKOO', 'MARABOU STORK', 'MASKED BOBWHITE', 'MASKED BOOBY', 'MASKED LAPWING', 'MCKAYS BUNTING', 'MERLIN', 'MIKADO PHEASANT', 'MILITARY MACAW', 'MOURNING DOVE', 'MYNA', 'NICOBAR PIGEON', 'NOISY FRIARBIRD', 'NORTHERN BEARDLESS TYRANNULET', 'NORTHERN CARDINAL', 'NORTHERN FLICKER', 'NORTHERN FULMAR', 'NORTHERN GANNET', 'NORTHERN GOSHAWK', 'NORTHERN JACANA', 'NORTHERN MOCKINGBIRD', 'NORTHERN PARULA', 'NORTHERN RED BISHOP', 'NORTHERN SHOVELER', 'OCELLATED TURKEY', 'OILBIRD', 'OKINAWA RAIL', 'ORANGE BREASTED TROGON', 'ORANGE BRESTED BUNTING', 'ORIENTAL BAY OWL', 'ORNATE HAWK EAGLE', 'OSPREY', 'OSTRICH', 'OVENBIRD', 'OYSTER CATCHER', 'PAINTED BUNTING', 'PALILA', 'PALM NUT VULTURE', 'PARADISE TANAGER', 'PARAKETT AUKLET', 'PARAKETT AUKLET', 'PARUS MAJOR', 'PATAGONIAN SIERRA FINCH', 'PEACOCK', 'PEREGRINE FALCON', 'PHAINOPEPLA', 'PHILIPPINE EAGLE', 'PINK ROBIN', 'PLUSH CRESTED JAY', 'POMARINE JAEGER', 'PUFFIN', 'PUNA TEAL', 'PURPLE FINCH', 'PURPLE GALLINULE', 'PURPLE MARTIN', 'PURPLE SWAMPHEN', 'PYGMY KINGFISHER', 'PYRRHULOXIA', 'QUETZAL', 'RAINBOW LORIKEET', 'RAZORBILL', 'RED BEARDED BEE EATER', 'RED BELLIED PITTA', 'RED BILLED TROPICBIRD', 'RED BROWED FINCH', 'RED CROSSBILL', 'RED FACED CORMORANT', 'RED FACED WARBLER', 'RED FODY', 'RED HEADED DUCK', 'RED HEADED WOODPECKER', 'RED KNOT', 'RED LEGGED HONEYCREEPER', 'RED NAPED TROGON', 'RED SHOULDERED HAWK', 'RED TAILED HAWK', 'RED TAILED THRUSH', 'RED WINGED BLACKBIRD', 'RED WISKERED BULBUL', 'REGENT BOWERBIRD', 'RING-NECKED PHEASANT', 'ROADRUNNER', 'ROCK DOVE', 'ROSE BREASTED COCKATOO', 'ROSE BREASTED GROSBEAK', 'ROSEATE SPOONBILL', 'ROSY FACED LOVEBIRD', 'ROUGH LEG BUZZARD', 'ROYAL FLYCATCHER', 'RUBY CROWNED KINGLET', 'RUBY THROATED HUMMINGBIRD', 'RUDDY SHELDUCK', 'RUDY KINGFISHER', 'RUFOUS KINGFISHER', 'RUFOUS TREPE', 'RUFUOS MOTMOT', 'SAMATRAN THRUSH', 'SAND MARTIN', 'SANDHILL CRANE', 'SATYR TRAGOPAN', 'SAYS PHOEBE', 'SCARLET CROWNED FRUIT DOVE', 'SCARLET FACED LIOCICHLA', 'SCARLET IBIS', 'SCARLET MACAW', 'SCARLET TANAGER', 'SHOEBILL', 'SHORT BILLED DOWITCHER', 'SMITHS LONGSPUR', 'SNOW GOOSE', 'SNOW PARTRIDGE', 'SNOWY EGRET', 'SNOWY OWL', 'SNOWY PLOVER', 'SNOWY SHEATHBILL', 'SORA', 'SPANGLED COTINGA', 'SPLENDID WREN', 'SPOON BILED SANDPIPER', 'SPOTTED CATBIRD', 'SPOTTED WHISTLING DUCK', 'SQUACCO HERON', 'SRI LANKA BLUE MAGPIE', 'STEAMER DUCK', 'STORK BILLED KINGFISHER', 'STRIATED CARACARA', 'STRIPED OWL', 'STRIPPED MANAKIN', 'STRIPPED SWALLOW', 'SUNBITTERN', 'SUPERB STARLING', 'SURF SCOTER', 'SWINHOES PHEASANT', 'TAILORBIRD', 'TAIWAN MAGPIE', 'TAKAHE', 'TASMANIAN HEN', 'TAWNY FROGMOUTH', 'TEAL DUCK', 'TIT MOUSE', 'TOUCHAN', 'TOWNSENDS WARBLER', 'TREE SWALLOW', 'TRICOLORED BLACKBIRD', 'TROPICAL KINGBIRD', 'TRUMPTER SWAN', 'TURKEY VULTURE', 'TURQUOISE MOTMOT', 'UMBRELLA BIRD', 'VARIED THRUSH', 'VEERY', 'VENEZUELIAN TROUPIAL', 'VERDIN', 'VERMILION FLYCATHER', 'VICTORIA CROWNED PIGEON', 'VIOLET BACKED STARLING', 'VIOLET CUCKOO', 'VIOLET GREEN SWALLOW', 'VIOLET TURACO', 'VISAYAN HORNBILL', 'VULTURINE GUINEAFOWL', 'WALL CREAPER', 'WATTLED CURASSOW', 'WATTLED LAPWING', 'WHIMBREL', 'WHITE BREASTED WATERHEN', 'WHITE BROWED CRAKE', 'WHITE CHEEKED TURACO', 'WHITE CRESTED HORNBILL', 'WHITE EARED HUMMINGBIRD', 'WHITE NECKED RAVEN', 'WHITE TAILED TROPIC', 'WHITE THROATED BEE EATER', 'WILD TURKEY', 'WILLOW PTARMIGAN', 'WILSONS BIRD OF PARADISE', 'WOOD DUCK', 'WOOD THRUSH', 'WOODLAND KINGFISHER', 'WRENTIT', 'YELLOW BELLIED FLOWERPECKER', 'YELLOW BREASTED CHAT', 'YELLOW CACIQUE', 'YELLOW HEADED BLACKBIRD', 'ZEBRA DOVE' ] ``` # **Run with Transformers**🤗 ```python !pip install -q transformers torch pillow gradio ``` ```python import gradio as gr from transformers import AutoImageProcessor from transformers import SiglipForImageClassification from PIL import Image import torch # Load model and processor with the new bird classifier name model_name = "prithivMLmods/Bird-Species-Classifier-526" model = SiglipForImageClassification.from_pretrained(model_name) processor = AutoImageProcessor.from_pretrained(model_name) def bird_classification(image): """Predicts bird species classification for an image.""" image = Image.fromarray(image).convert("RGB") inputs = processor(images=image, return_tensors="pt") with torch.no_grad(): outputs = model(**inputs) logits = outputs.logits probs = torch.nn.functional.softmax(logits, dim=1).squeeze().tolist() labels = { "0": "ABBOTTS BABBLER", "1": "ABBOTTS BOOBY", "2": "ABYSSINIAN GROUND HORNBILL", "3": "AFRICAN CROWNED CRANE", "4": "AFRICAN EMERALD CUCKOO", "5": "AFRICAN FIREFINCH", "6": "AFRICAN OYSTER CATCHER", "7": "AFRICAN PIED HORNBILL", "8": "AFRICAN PYGMY GOOSE", "9": "ALBATROSS", "10": "ALBERTS TOWHEE", "11": "ALEXANDRINE PARAKEET", "12": "ALPINE CHOUGH", "13": "ALTAMIRA YELLOWTHROAT", "14": "AMERICAN AVOCET", "15": "AMERICAN BITTERN", "16": "AMERICAN COOT", "17": "AMERICAN DIPPER", "18": "AMERICAN FLAMINGO", "19": "AMERICAN GOLDFINCH", "20": "AMERICAN KESTREL", "21": "AMERICAN PIPIT", "22": "AMERICAN REDSTART", "23": "AMERICAN ROBIN", "24": "AMERICAN WIGEON", "25": "AMETHYST WOODSTAR", "26": "ANDEAN GOOSE", "27": "ANDEAN LAPWING", "28": "ANDEAN SISKIN", "29": "ANHINGA", "30": "ANIANIAU", "31": "ANNAS HUMMINGBIRD", "32": "ANTBIRD", "33": "ANTILLEAN EUPHONIA", "34": "APAPANE", "35": "APOSTLEBIRD", "36": "ARARIPE MANAKIN", "37": "ASHY STORM PETREL", "38": "ASHY THRUSHBIRD", "39": "ASIAN CRESTED IBIS", "40": "ASIAN DOLLARD BIRD", "41": "ASIAN GREEN BEE EATER", "42": "ASIAN OPENBILL STORK", "43": "AUCKLAND SHAQ", "44": "AUSTRAL CANASTERO", "45": "AUSTRALASIAN FIGBIRD", "46": "AVADAVAT", "47": "AZARAS SPINETAIL", "48": "AZURE BREASTED PITTA", "49": "AZURE JAY", "50": "AZURE TANAGER", "51": "AZURE TIT", "52": "BAIKAL TEAL", "53": "BALD EAGLE", "54": "BALD IBIS", "55": "BALI STARLING", "56": "BALTIMORE ORIOLE", "57": "BANANAQUIT", "58": "BAND TAILED GUAN", "59": "BANDED BROADBILL", "60": "BANDED PITA", "61": "BANDED STILT", "62": "BAR-TAILED GODWIT", "63": "BARN OWL", "64": "BARN SWALLOW", "65": "BARRED PUFFBIRD", "66": "BARROWS GOLDENEYE", "67": "BAY-BREASTED WARBLER", "68": "BEARDED BARBET", "69": "BEARDED BELLBIRD", "70": "BEARDED REEDLING", "71": "BELTED KINGFISHER", "72": "BIRD OF PARADISE", "73": "BLACK AND YELLOW BROADBILL", "74": "BLACK BAZA", "75": "BLACK BREASTED PUFFBIRD", "76": "BLACK COCKATO", "77": "BLACK FACED SPOONBILL", "78": "BLACK FRANCOLIN", "79": "BLACK HEADED CAIQUE", "80": "BLACK NECKED STILT", "81": "BLACK SKIMMER", "82": "BLACK SWAN", "83": "BLACK TAIL CRAKE", "84": "BLACK THROATED BUSHTIT", "85": "BLACK THROATED HUET", "86": "BLACK THROATED WARBLER", "87": "BLACK VENTED SHEARWATER", "88": "BLACK VULTURE", "89": "BLACK-CAPPED CHICKADEE", "90": "BLACK-NECKED GREBE", "91": "BLACK-THROATED SPARROW", "92": "BLACKBURNIAM WARBLER", "93": "BLONDE CRESTED WOODPECKER", "94": "BLOOD PHEASANT", "95": "BLUE COAU", "96": "BLUE DACNIS", "97": "BLUE GRAY GNATCATCHER", "98": "BLUE GROSBEAK", "99": "BLUE GROUSE", "100": "BLUE HERON", "101": "BLUE MALKOHA", "102": "BLUE THROATED PIPING GUAN", "103": "BLUE THROATED TOUCANET", "104": "BOBOLINK", "105": "BORNEAN BRISTLEHEAD", "106": "BORNEAN LEAFBIRD", "107": "BORNEAN PHEASANT", "108": "BRANDT CORMARANT", "109": "BREWERS BLACKBIRD", "110": "BROWN CREPPER", "111": "BROWN HEADED COWBIRD", "112": "BROWN NOODY", "113": "BROWN THRASHER", "114": "BUFFLEHEAD", "115": "BULWERS PHEASANT", "116": "BURCHELLS COURSER", "117": "BUSH TURKEY", "118": "CAATINGA CACHOLOTE", "119": "CABOTS TRAGOPAN", "120": "CACTUS WREN", "121": "CALIFORNIA CONDOR", "122": "CALIFORNIA GULL", "123": "CALIFORNIA QUAIL", "124": "CAMPO FLICKER", "125": "CANARY", "126": "CANVASBACK", "127": "CAPE GLOSSY STARLING", "128": "CAPE LONGCLAW", "129": "CAPE MAY WARBLER", "130": "CAPE ROCK THRUSH", "131": "CAPPED HERON", "132": "CAPUCHINBIRD", "133": "CARMINE BEE-EATER", "134": "CASPIAN TERN", "135": "CASSOWARY", "136": "CEDAR WAXWING", "137": "CERULEAN WARBLER", "138": "CHARA DE COLLAR", "139": "CHATTERING LORY", "140": "CHESTNET BELLIED EUPHONIA", "141": "CHESTNUT WINGED CUCKOO", "142": "CHINESE BAMBOO PARTRIDGE", "143": "CHINESE POND HERON", "144": "CHIPPING SPARROW", "145": "CHUCAO TAPACULO", "146": "CHUKAR PARTRIDGE", "147": "CINNAMON ATTILA", "148": "CINNAMON FLYCATCHER", "149": "CINNAMON TEAL", "150": "CLARKS GREBE", "151": "CLARKS NUTCRACKER", "152": "COCK OF THE ROCK", "153": "COCKATOO", "154": "COLLARED ARACARI", "155": "COLLARED CRESCENTCHEST", "156": "COMMON FIRECREST", "157": "COMMON GRACKLE", "158": "COMMON HOUSE MARTIN", "159": "COMMON IORA", "160": "COMMON LOON", "161": "COMMON POORWILL", "162": "COMMON STARLING", "163": "COPPERSMITH BARBET", "164": "COPPERY TAILED COUCAL", "165": "CRAB PLOVER", "166": "CRANE HAWK", "167": "CREAM COLORED WOODPECKER", "168": "CRESTED AUKLET", "169": "CRESTED CARACARA", "170": "CRESTED COUA", "171": "CRESTED FIREBACK", "172": "CRESTED KINGFISHER", "173": "CRESTED NUTHATCH", "174": "CRESTED OROPENDOLA", "175": "CRESTED SERPENT EAGLE", "176": "CRESTED SHRIKETIT", "177": "CRESTED WOOD PARTRIDGE", "178": "CRIMSON CHAT", "179": "CRIMSON SUNBIRD", "180": "CROW", "181": "CUBAN TODY", "182": "CUBAN TROGON", "183": "CURL CRESTED ARACURI", "184": "D-ARNAUDS BARBET", "185": "DALMATIAN PELICAN", "186": "DARJEELING WOODPECKER", "187": "DARK EYED JUNCO", "188": "DAURIAN REDSTART", "189": "DEMOISELLE CRANE", "190": "DOUBLE BARRED FINCH", "191": "DOUBLE BRESTED CORMARANT", "192": "DOUBLE EYED FIG PARROT", "193": "DOWNY WOODPECKER", "194": "DUNLIN", "195": "DUSKY LORY", "196": "DUSKY ROBIN", "197": "EARED PITA", "198": "EASTERN BLUEBIRD", "199": "EASTERN BLUEBONNET", "200": "EASTERN GOLDEN WEAVER", "201": "EASTERN MEADOWLARK", "202": "EASTERN ROSELLA", "203": "EASTERN TOWEE", "204": "EASTERN WIP POOR WILL", "205": "EASTERN YELLOW ROBIN", "206": "ECUADORIAN HILLSTAR", "207": "EGYPTIAN GOOSE", "208": "ELEGANT TROGON", "209": "ELLIOTS PHEASANT", "210": "EMERALD TANAGER", "211": "EMPEROR PENGUIN", "212": "EMU", "213": "ENGGANO MYNA", "214": "EURASIAN BULLFINCH", "215": "EURASIAN GOLDEN ORIOLE", "216": "EURASIAN MAGPIE", "217": "EUROPEAN GOLDFINCH", "218": "EUROPEAN TURTLE DOVE", "219": "EVENING GROSBEAK", "220": "FAIRY BLUEBIRD", "221": "FAIRY PENGUIN", "222": "FAIRY TERN", "223": "FAN TAILED WIDOW", "224": "FASCIATED WREN", "225": "FIERY MINIVET", "226": "FIORDLAND PENGUIN", "227": "FIRE TAILLED MYZORNIS", "228": "FLAME BOWERBIRD", "229": "FLAME TANAGER", "230": "FOREST WAGTAIL", "231": "FRIGATE", "232": "FRILL BACK PIGEON", "233": "GAMBELS QUAIL", "234": "GANG GANG COCKATOO", "235": "GILA WOODPECKER", "236": "GILDED FLICKER", "237": "GLOSSY IBIS", "238": "GO AWAY BIRD", "239": "GOLD WING WARBLER", "240": "GOLDEN BOWER BIRD", "241": "GOLDEN CHEEKED WARBLER", "242": "GOLDEN CHLOROPHONIA", "243": "GOLDEN EAGLE", "244": "GOLDEN PARAKEET", "245": "GOLDEN PHEASANT", "246": "GOLDEN PIPIT", "247": "GOULDIAN FINCH", "248": "GRANDALA", "249": "GRAY CATBIRD", "250": "GRAY KINGBIRD", "251": "GRAY PARTRIDGE", "252": "GREAT ARGUS", "253": "GREAT GRAY OWL", "254": "GREAT JACAMAR", "255": "GREAT KISKADEE", "256": "GREAT POTOO", "257": "GREAT TINAMOU", "258": "GREAT XENOPS", "259": "GREATER PEWEE", "260": "GREATER PRAIRIE CHICKEN", "261": "GREATOR SAGE GROUSE", "262": "GREEN BROADBILL", "263": "GREEN JAY", "264": "GREEN MAGPIE", "265": "GREEN WINGED DOVE", "266": "GREY CUCKOOSHRIKE", "267": "GREY HEADED CHACHALACA", "268": "GREY HEADED FISH EAGLE", "269": "GREY PLOVER", "270": "GROVED BILLED ANI", "271": "GUINEA TURACO", "272": "GUINEAFOWL", "273": "GURNEYS PITTA", "274": "GYRFALCON", "275": "HAMERKOP", "276": "HARLEQUIN DUCK", "277": "HARLEQUIN QUAIL", "278": "HARPY EAGLE", "279": "HAWAIIAN GOOSE", "280": "HAWFINCH", "281": "HELMET VANGA", "282": "HEPATIC TANAGER", "283": "HIMALAYAN BLUETAIL", "284": "HIMALAYAN MONAL", "285": "HOATZIN", "286": "HOODED MERGANSER", "287": "HOOPOES", "288": "HORNED GUAN", "289": "HORNED LARK", "290": "HORNED SUNGEM", "291": "HOUSE FINCH", "292": "HOUSE SPARROW", "293": "HYACINTH MACAW", "294": "IBERIAN MAGPIE", "295": "IBISBILL", "296": "IMPERIAL SHAQ", "297": "INCA TERN", "298": "INDIAN BUSTARD", "299": "INDIAN PITTA", "300": "INDIAN ROLLER", "301": "INDIAN VULTURE", "302": "INDIGO BUNTING", "303": "INDIGO FLYCATCHER", "304": "INLAND DOTTEREL", "305": "IVORY BILLED ARACARI", "306": "IVORY GULL", "307": "IWI", "308": "JABIRU", "309": "JACK SNIPE", "310": "JACOBIN PIGEON", "311": "JANDAYA PARAKEET", "312": "JAPANESE ROBIN", "313": "JAVA SPARROW", "314": "JOCOTOCO ANTPITTA", "315": "KAGU", "316": "KAKAPO", "317": "KILLDEAR", "318": "KING EIDER", "319": "KING VULTURE", "320": "KIWI", "321": "KNOB BILLED DUCK", "322": "KOOKABURRA", "323": "LARK BUNTING", "324": "LAUGHING GULL", "325": "LAZULI BUNTING", "326": "LESSER ADJUTANT", "327": "LILAC ROLLER", "328": "LIMPKIN", "329": "LITTLE AUK", "330": "LOGGERHEAD SHRIKE", "331": "LONG-EARED OWL", "332": "LOONEY BIRDS", "333": "LUCIFER HUMMINGBIRD", "334": "MAGPIE GOOSE", "335": "MALABAR HORNBILL", "336": "MALACHITE KINGFISHER", "337": "MALAGASY WHITE EYE", "338": "MALEO", "339": "MALLARD DUCK", "340": "MANDRIN DUCK", "341": "MANGROVE CUCKOO", "342": "MARABOU STORK", "343": "MASKED BOBWHITE", "344": "MASKED BOOBY", "345": "MASKED LAPWING", "346": "MCKAYS BUNTING", "347": "MERLIN", "348": "MIKADO PHEASANT", "349": "MILITARY MACAW", "350": "MOURNING DOVE", "351": "MYNA", "352": "NICOBAR PIGEON", "353": "NOISY FRIARBIRD", "354": "NORTHERN BEARDLESS TYRANNULET", "355": "NORTHERN CARDINAL", "356": "NORTHERN FLICKER", "357": "NORTHERN FULMAR", "358": "NORTHERN GANNET", "359": "NORTHERN GOSHAWK", "360": "NORTHERN JACANA", "361": "NORTHERN MOCKINGBIRD", "362": "NORTHERN PARULA", "363": "NORTHERN RED BISHOP", "364": "NORTHERN SHOVELER", "365": "OCELLATED TURKEY", "366": "OILBIRD", "367": "OKINAWA RAIL", "368": "ORANGE BREASTED TROGON", "369": "ORANGE BRESTED BUNTING", "370": "ORIENTAL BAY OWL", "371": "ORNATE HAWK EAGLE", "372": "OSPREY", "373": "OSTRICH", "374": "OVENBIRD", "375": "OYSTER CATCHER", "376": "PAINTED BUNTING", "377": "PALILA", "378": "PALM NUT VULTURE", "379": "PARADISE TANAGER", "380": "PARAKETT AUKLET", "381": "PARAKETT AUKLET", "382": "PARUS MAJOR", "383": "PATAGONIAN SIERRA FINCH", "384": "PEACOCK", "385": "PEREGRINE FALCON", "386": "PHAINOPEPLA", "387": "PHILIPPINE EAGLE", "388": "PINK ROBIN", "389": "PLUSH CRESTED JAY", "390": "POMARINE JAEGER", "391": "PUFFIN", "392": "PUNA TEAL", "393": "PURPLE FINCH", "394": "PURPLE GALLINULE", "395": "PURPLE MARTIN", "396": "PURPLE SWAMPHEN", "397": "PYGMY KINGFISHER", "398": "PYRRHULOXIA", "399": "QUETZAL", "400": "RAINBOW LORIKEET", "401": "RAZORBILL", "402": "RED BEARDED BEE EATER", "403": "RED BELLIED PITTA", "404": "RED BILLED TROPICBIRD", "405": "RED BROWED FINCH", "406": "RED CROSSBILL", "407": "RED FACED CORMORANT", "408": "RED FACED WARBLER", "409": "RED FODY", "410": "RED HEADED DUCK", "411": "RED HEADED WOODPECKER", "412": "RED KNOT", "413": "RED LEGGED HONEYCREEPER", "414": "RED NAPED TROGON", "415": "RED SHOULDERED HAWK", "416": "RED TAILED HAWK", "417": "RED TAILED THRUSH", "418": "RED WINGED BLACKBIRD", "419": "RED WISKERED BULBUL", "420": "REGENT BOWERBIRD", "421": "RING-NECKED PHEASANT", "422": "ROADRUNNER", "423": "ROCK DOVE", "424": "ROSE BREASTED COCKATOO", "425": "ROSE BREASTED GROSBEAK", "426": "ROSEATE SPOONBILL", "427": "ROSY FACED LOVEBIRD", "428": "ROUGH LEG BUZZARD", "429": "ROYAL FLYCATCHER", "430": "RUBY CROWNED KINGLET", "431": "RUBY THROATED HUMMINGBIRD", "432": "RUDDY SHELDUCK", "433": "RUDY KINGFISHER", "434": "RUFOUS KINGFISHER", "435": "RUFOUS TREPE", "436": "RUFUOS MOTMOT", "437": "SAMATRAN THRUSH", "438": "SAND MARTIN", "439": "SANDHILL CRANE", "440": "SATYR TRAGOPAN", "441": "SAYS PHOEBE", "442": "SCARLET CROWNED FRUIT DOVE", "443": "SCARLET FACED LIOCICHLA", "444": "SCARLET IBIS", "445": "SCARLET MACAW", "446": "SCARLET TANAGER", "447": "SHOEBILL", "448": "SHORT BILLED DOWITCHER", "449": "SMITHS LONGSPUR", "450": "SNOW GOOSE", "451": "SNOW PARTRIDGE", "452": "SNOWY EGRET", "453": "SNOWY OWL", "454": "SNOWY PLOVER", "455": "SNOWY SHEATHBILL", "456": "SORA", "457": "SPANGLED COTINGA", "458": "SPLENDID WREN", "459": "SPOON BILED SANDPIPER", "460": "SPOTTED CATBIRD", "461": "SPOTTED WHISTLING DUCK", "462": "SQUACCO HERON", "463": "SRI LANKA BLUE MAGPIE", "464": "STEAMER DUCK", "465": "STORK BILLED KINGFISHER", "466": "STRIATED CARACARA", "467": "STRIPED OWL", "468": "STRIPPED MANAKIN", "469": "STRIPPED SWALLOW", "470": "SUNBITTERN", "471": "SUPERB STARLING", "472": "SURF SCOTER", "473": "SWINHOES PHEASANT", "474": "TAILORBIRD", "475": "TAIWAN MAGPIE", "476": "TAKAHE", "477": "TASMANIAN HEN", "478": "TAWNY FROGMOUTH", "479": "TEAL DUCK", "480": "TIT MOUSE", "481": "TOUCHAN", "482": "TOWNSENDS WARBLER", "483": "TREE SWALLOW", "484": "TRICOLORED BLACKBIRD", "485": "TROPICAL KINGBIRD", "486": "TRUMPTER SWAN", "487": "TURKEY VULTURE", "488": "TURQUOISE MOTMOT", "489": "UMBRELLA BIRD", "490": "VARIED THRUSH", "491": "VEERY", "492": "VENEZUELIAN TROUPIAL", "493": "VERDIN", "494": "VERMILION FLYCATHER", "495": "VICTORIA CROWNED PIGEON", "496": "VIOLET BACKED STARLING", "497": "VIOLET CUCKOO", "498": "VIOLET GREEN SWALLOW", "499": "VIOLET TURACO", "500": "VISAYAN HORNBILL", "501": "VULTURINE GUINEAFOWL", "502": "WALL CREAPER", "503": "WATTLED CURASSOW", "504": "WATTLED LAPWING", "505": "WHIMBREL", "506": "WHITE BREASTED WATERHEN", "507": "WHITE BROWED CRAKE", "508": "WHITE CHEEKED TURACO", "509": "WHITE CRESTED HORNBILL", "510": "WHITE EARED HUMMINGBIRD", "511": "WHITE NECKED RAVEN", "512": "WHITE TAILED TROPIC", "513": "WHITE THROATED BEE EATER", "514": "WILD TURKEY", "515": "WILLOW PTARMIGAN", "516": "WILSONS BIRD OF PARADISE", "517": "WOOD DUCK", "518": "WOOD THRUSH", "519": "WOODLAND KINGFISHER", "520": "WRENTIT", "521": "YELLOW BELLIED FLOWERPECKER", "522": "YELLOW BREASTED CHAT", "523": "YELLOW CACIQUE", "524": "YELLOW HEADED BLACKBIRD", "525": "ZEBRA DOVE" } predictions = {labels[str(i)]: round(probs[i], 3) for i in range(len(probs))} return predictions # Create Gradio interface for the bird species classifier iface = gr.Interface( fn=bird_classification, inputs=gr.Image(type="numpy"), outputs=gr.Label(label="Prediction Scores"), title="Bird Species Classifier", description="Upload an image to classify the bird species." ) if __name__ == "__main__": iface.launch() ``` # **Intended Use:** The **Bird-Species-Classifier-526** model is designed to classify different bird species based on images. Potential use cases include: - **Wildlife Conservation:** Identifying and tracking bird species for ecological research. - **Birdwatching & Education:** Assisting enthusiasts and researchers in species identification. - **Environmental Monitoring:** Detecting changes in bird populations as indicators of ecosystem health. - **AI-Powered Assistants:** Supporting AI-based species recognition for various applications.
[ "abbotts babbler", "abbotts booby", "abyssinian ground hornbill", "african crowned crane", "african emerald cuckoo", "african firefinch", "african oyster catcher", "african pied hornbill", "african pygmy goose", "albatross", "alberts towhee", "alexandrine parakeet", "alpine chough", "altamira yellowthroat", "american avocet", "american bittern", "american coot", "american dipper", "american flamingo", "american goldfinch", "american kestrel", "american pipit", "american redstart", "american robin", "american wigeon", "amethyst woodstar", "andean goose", "andean lapwing", "andean siskin", "anhinga", "anianiau", "annas hummingbird", "antbird", "antillean euphonia", "apapane", "apostlebird", "araripe manakin", "ashy storm petrel", "ashy thrushbird", "asian crested ibis", "asian dollard bird", "asian green bee eater", "asian openbill stork", "auckland shaq", "austral canastero", "australasian figbird", "avadavat", "azaras spinetail", "azure breasted pitta", "azure jay", "azure tanager", "azure tit", "baikal teal", "bald eagle", "bald ibis", "bali starling", "baltimore oriole", "bananaquit", "band tailed guan", "banded broadbill", "banded pita", "banded stilt", "bar-tailed godwit", "barn owl", "barn swallow", "barred puffbird", "barrows goldeneye", "bay-breasted warbler", "bearded barbet", "bearded bellbird", "bearded reedling", "belted kingfisher", "bird of paradise", "black and yellow broadbill", "black baza", "black breasted puffbird", "black cockato", "black faced spoonbill", "black francolin", "black headed caique", "black necked stilt", "black skimmer", "black swan", "black tail crake", "black throated bushtit", "black throated huet", "black throated warbler", "black vented shearwater", "black vulture", "black-capped chickadee", "black-necked grebe", "black-throated sparrow", "blackburniam warbler", "blonde crested woodpecker", "blood pheasant", "blue coau", "blue dacnis", "blue gray gnatcatcher", "blue grosbeak", "blue grouse", "blue heron", "blue malkoha", "blue throated piping guan", "blue throated toucanet", "bobolink", "bornean bristlehead", "bornean leafbird", "bornean pheasant", "brandt cormarant", "brewers blackbird", "brown crepper", "brown headed cowbird", "brown noody", "brown thrasher", "bufflehead", "bulwers pheasant", "burchells courser", "bush turkey", "caatinga cacholote", "cabots tragopan", "cactus wren", "california condor", "california gull", "california quail", "campo flicker", "canary", "canvasback", "cape glossy starling", "cape longclaw", "cape may warbler", "cape rock thrush", "capped heron", "capuchinbird", "carmine bee-eater", "caspian tern", "cassowary", "cedar waxwing", "cerulean warbler", "chara de collar", "chattering lory", "chestnet bellied euphonia", "chestnut winged cuckoo", "chinese bamboo partridge", "chinese pond heron", "chipping sparrow", "chucao tapaculo", "chukar partridge", "cinnamon attila", "cinnamon flycatcher", "cinnamon teal", "clarks grebe", "clarks nutcracker", "cock of the rock", "cockatoo", "collared aracari", "collared crescentchest", "common firecrest", "common grackle", "common house martin", "common iora", "common loon", "common poorwill", "common starling", "coppersmith barbet", "coppery tailed coucal", "crab plover", "crane hawk", "cream colored woodpecker", "crested auklet", "crested caracara", "crested coua", "crested fireback", "crested kingfisher", "crested nuthatch", "crested oropendola", "crested serpent eagle", "crested shriketit", "crested wood partridge", "crimson chat", "crimson sunbird", "crow", "cuban tody", "cuban trogon", "curl crested aracuri", "d-arnauds barbet", "dalmatian pelican", "darjeeling woodpecker", "dark eyed junco", "daurian redstart", "demoiselle crane", "double barred finch", "double brested cormarant", "double eyed fig parrot", "downy woodpecker", "dunlin", "dusky lory", "dusky robin", "eared pita", "eastern bluebird", "eastern bluebonnet", "eastern golden weaver", "eastern meadowlark", "eastern rosella", "eastern towee", "eastern wip poor will", "eastern yellow robin", "ecuadorian hillstar", "egyptian goose", "elegant trogon", "elliots pheasant", "emerald tanager", "emperor penguin", "emu", "enggano myna", "eurasian bullfinch", "eurasian golden oriole", "eurasian magpie", "european goldfinch", "european turtle dove", "evening grosbeak", "fairy bluebird", "fairy penguin", "fairy tern", "fan tailed widow", "fasciated wren", "fiery minivet", "fiordland penguin", "fire tailled myzornis", "flame bowerbird", "flame tanager", "forest wagtail", "frigate", "frill back pigeon", "gambels quail", "gang gang cockatoo", "gila woodpecker", "gilded flicker", "glossy ibis", "go away bird", "gold wing warbler", "golden bower bird", "golden cheeked warbler", "golden chlorophonia", "golden eagle", "golden parakeet", "golden pheasant", "golden pipit", "gouldian finch", "grandala", "gray catbird", "gray kingbird", "gray partridge", "great argus", "great gray owl", "great jacamar", "great kiskadee", "great potoo", "great tinamou", "great xenops", "greater pewee", "greater prairie chicken", "greator sage grouse", "green broadbill", "green jay", "green magpie", "green winged dove", "grey cuckooshrike", "grey headed chachalaca", "grey headed fish eagle", "grey plover", "groved billed ani", "guinea turaco", "guineafowl", "gurneys pitta", "gyrfalcon", "hamerkop", "harlequin duck", "harlequin quail", "harpy eagle", "hawaiian goose", "hawfinch", "helmet vanga", "hepatic tanager", "himalayan bluetail", "himalayan monal", "hoatzin", "hooded merganser", "hoopoes", "horned guan", "horned lark", "horned sungem", "house finch", "house sparrow", "hyacinth macaw", "iberian magpie", "ibisbill", "imperial shaq", "inca tern", "indian bustard", "indian pitta", "indian roller", "indian vulture", "indigo bunting", "indigo flycatcher", "inland dotterel", "ivory billed aracari", "ivory gull", "iwi", "jabiru", "jack snipe", "jacobin pigeon", "jandaya parakeet", "japanese robin", "java sparrow", "jocotoco antpitta", "kagu", "kakapo", "killdear", "king eider", "king vulture", "kiwi", "knob billed duck", "kookaburra", "lark bunting", "laughing gull", "lazuli bunting", "lesser adjutant", "lilac roller", "limpkin", "little auk", "loggerhead shrike", "long-eared owl", "looney birds", "lucifer hummingbird", "magpie goose", "malabar hornbill", "malachite kingfisher", "malagasy white eye", "maleo", "mallard duck", "mandrin duck", "mangrove cuckoo", "marabou stork", "masked bobwhite", "masked booby", "masked lapwing", "mckays bunting", "merlin", "mikado pheasant", "military macaw", "mourning dove", "myna", "nicobar pigeon", "noisy friarbird", "northern beardless tyrannulet", "northern cardinal", "northern flicker", "northern fulmar", "northern gannet", "northern goshawk", "northern jacana", "northern mockingbird", "northern parula", "northern red bishop", "northern shoveler", "ocellated turkey", "oilbird", "okinawa rail", "orange breasted trogon", "orange brested bunting", "oriental bay owl", "ornate hawk eagle", "osprey", "ostrich", "ovenbird", "oyster catcher", "painted bunting", "palila", "palm nut vulture", "paradise tanager", "parakett auklet", "parakett auklet", "parus major", "patagonian sierra finch", "peacock", "peregrine falcon", "phainopepla", "philippine eagle", "pink robin", "plush crested jay", "pomarine jaeger", "puffin", "puna teal", "purple finch", "purple gallinule", "purple martin", "purple swamphen", "pygmy kingfisher", "pyrrhuloxia", "quetzal", "rainbow lorikeet", "razorbill", "red bearded bee eater", "red bellied pitta", "red billed tropicbird", "red browed finch", "red crossbill", "red faced cormorant", "red faced warbler", "red fody", "red headed duck", "red headed woodpecker", "red knot", "red legged honeycreeper", "red naped trogon", "red shouldered hawk", "red tailed hawk", "red tailed thrush", "red winged blackbird", "red wiskered bulbul", "regent bowerbird", "ring-necked pheasant", "roadrunner", "rock dove", "rose breasted cockatoo", "rose breasted grosbeak", "roseate spoonbill", "rosy faced lovebird", "rough leg buzzard", "royal flycatcher", "ruby crowned kinglet", "ruby throated hummingbird", "ruddy shelduck", "rudy kingfisher", "rufous kingfisher", "rufous trepe", "rufuos motmot", "samatran thrush", "sand martin", "sandhill crane", "satyr tragopan", "says phoebe", "scarlet crowned fruit dove", "scarlet faced liocichla", "scarlet ibis", "scarlet macaw", "scarlet tanager", "shoebill", "short billed dowitcher", "smiths longspur", "snow goose", "snow partridge", "snowy egret", "snowy owl", "snowy plover", "snowy sheathbill", "sora", "spangled cotinga", "splendid wren", "spoon biled sandpiper", "spotted catbird", "spotted whistling duck", "squacco heron", "sri lanka blue magpie", "steamer duck", "stork billed kingfisher", "striated caracara", "striped owl", "stripped manakin", "stripped swallow", "sunbittern", "superb starling", "surf scoter", "swinhoes pheasant", "tailorbird", "taiwan magpie", "takahe", "tasmanian hen", "tawny frogmouth", "teal duck", "tit mouse", "touchan", "townsends warbler", "tree swallow", "tricolored blackbird", "tropical kingbird", "trumpter swan", "turkey vulture", "turquoise motmot", "umbrella bird", "varied thrush", "veery", "venezuelian troupial", "verdin", "vermilion flycather", "victoria crowned pigeon", "violet backed starling", "violet cuckoo", "violet green swallow", "violet turaco", "visayan hornbill", "vulturine guineafowl", "wall creaper", "wattled curassow", "wattled lapwing", "whimbrel", "white breasted waterhen", "white browed crake", "white cheeked turaco", "white crested hornbill", "white eared hummingbird", "white necked raven", "white tailed tropic", "white throated bee eater", "wild turkey", "willow ptarmigan", "wilsons bird of paradise", "wood duck", "wood thrush", "woodland kingfisher", "wrentit", "yellow bellied flowerpecker", "yellow breasted chat", "yellow cacique", "yellow headed blackbird", "zebra dove" ]