model_id
stringlengths
7
105
model_card
stringlengths
1
130k
model_labels
listlengths
2
80k
IsaacMwesigwa/footballer-recognition-2
# Model Trained Using AutoTrain - Problem type: Image Classification ## Validation Metricsg loss: 5.661193370819092 f1_macro: 0.014131400288297163 f1_micro: 0.03746085280655264 f1_weighted: 0.014145017633792991 precision_macro: 0.015760162960355265 precision_micro: 0.03746085280655264 precision_weighted: 0.015775349819387167 recall_macro: 0.03742478941034898 recall_micro: 0.03746085280655264 recall_weighted: 0.03746085280655264 accuracy: 0.03746085280655264
[ "aaron long", "aaron mooy", "aaron ramsdale", "aaron ramsey", "abde ezzalzouli", "abdelhamid sabiri", "abdelkarim hassan", "abderrazak hamdallah", "abdou diallo", "abdul fatawu issahaku", "abdul manaf nurudeen", "abdulaziz hatem", "abdulelah al-amri", "abdulellah al-malki", "abdullah madu", "abdullah otayf", "abdulrahman al-aboud", "abolfazl jalali", "achraf dari", "achraf hakimi", "adam davies", "adrien rabiot", "agustín canobbio", "ahmad nourollahi", "ahmed alaaeldin", "ahmed reda tagnaouti", "ajdin hrustic", "akram afif", "alan franco", "alejandro balde", "aleksandar mitrović", "alex sandro", "alex telles", "alexander bah", "alexander djiku", "alexander domínguez", "alexis mac allister", "alexis vega", "alfred gomis", "alfredo talavera", "ali abdi", "ali al-bulaihi", "ali al-hassan", "ali assadalla", "ali gholizadeh", "ali karimi", "ali maâloul", "alidu seidu", "alireza beiranvand", "alireza jahanbakhsh", "alisson", "alistair johnston", "almoez ali", "alphonse areola", "alphonso davies", "amadou onana", "amir abedzadeh", "anass zaroury", "andreas christensen", "andreas cornelius", "andreas skov olsen", "andrej kramarić", "andrew redmayne", "andries noppert", "andrija živković", "andré ayew ", "andré onana", "andré silva", "andré-frank zambo anguissa", "andrés guardado ", "anis ben slimane", "ansu fati", "ante budimir", "anthony contreras", "anthony hernández", "antoine griezmann", "antoine semenyo", "antonee robinson", "antonio rüdiger", "antony", "antónio silva", "ao tanaka", "ardon jashari", "arkadiusz milik", "armel bella-kotchap", "arthur theate", "artur jędrzejczyk", "assim madibo", "atiba hutchinson ", "aurélien tchouaméni", "awer mabil", "axel disasi", "axel witsel", "ayase ueda", "aymen dahmen", "aymen mathlouthi", "aymeric laporte", "ayrton preciado", "aziz behich", "azzedine ounahi", "aïssa laïdouni", "baba rahman", "badr benoun", "bailey wright", "bamba dieng", "bartosz bereszyński", "bassam al-rawi", "bechir ben saïd", "ben cabango", "ben davies", "ben white", "benjamin pavard", "bernardo silva", "bilal el khannous", "bilel ifa", "borna barišić", "borna sosa", "boualem khoukhi", "boulaye dia", "brandon aguilera", "breel embolo", "bremer", "brenden aaronson", "brennan johnson", "bruno fernandes", "bruno guimarães", "bruno petković", "bryan mbeumo", "bryan oviedo", "bryan ruiz ", "bukayo saka", "callum wilson", "cameron carter-vickers", "cameron devlin", "carlos gruezo", "carlos martínez", "carlos rodríguez", "carlos soler", "casemiro", "celso borges", "charles de ketelaere", "cheikhou kouyaté", "cho gue-sung", "cho yu-min", "chris gunter", "chris mepham", "christian bassogog", "christian eriksen", "christian fassnacht", "christian günter", "christian nørgaard", "christian pulisic", "christopher wooh", "cody gakpo", "collins fai", "connor roberts", "conor coady", "conor gallagher", "craig goodwin", "cristian roldan", "cristian romero", "cristiano ronaldo ", "cyle larin", "césar azpilicueta", "césar montes", "daichi kamada", "daizen maeda", "daley blind", "damian szymański", "dani alves", "dani carvajal", "dani olmo", "daniel afriyie", "daniel amartey", "daniel chacón", "daniel james", "daniel schmidt", "daniel wass", "daniel-kofi kyereh", "danilo", "danilo pereira", "danny vukovic", "danny ward", "darko lazović", "darwin núñez", "david raum", "david raya", "david wotherspoon", "davy klaassen", "dayne st. clair", "dayot upamecano", "deandre yedlin", "declan rice", "dejan lovren", "denis odoi", "denis zakaria", "denzel dumfries", "derek cornelius", "devis epassy", "diego godín ", "diego palacios", "diogo costa", "diogo dalot", "djibril sow", "djorkaeff reasco", "domagoj vida", "dominik livaković", "douglas lópez", "dries mertens", "dušan tadić ", "dušan vlahović", "dylan bronn", "dylan levitt", "eden hazard ", "ederson", "edimilson fernandes", "edinson cavani", "edson álvarez", "eduardo camavinga", "ehsan hajsafi ", "eiji kawashima", "elisha owusu", "ellyes skhiri", "emiliano martínez", "enner valencia ", "enzo ebosse", "enzo fernández", "eray cömert", "eric dier", "eric garcía", "eric maxim choupo-moting", "esteban alvarado", "ethan ampadu", "ethan horvath", "exequiel palacios", "fabian frei", "fabian rieder", "fabian schär", "fabinho", "facundo pellistri", "facundo torres", "famara diédhiou", "federico valverde", "ferjani sassi", "fernando muslera", "ferran torres", "filip kostić", "filip mladenović", "filip đuričić", "firas al-buraikan", "fodé ballo-touré", "formose mendy", "fran karačić", "francisco calvo", "franco armani", "fred", "frederik rønnow", "frenkie de jong", "félix torres", "gabriel jesus", "gabriel martinelli", "gaku shibasaki", "garang kuol", "gareth bale ", "gavi", "gaël ondoua", "georges-kévin nkoudou", "gerardo arteaga", "germán pezzella", "gerson torres", "gerónimo rulli", "ghailene chaalali", "gideon mensah", "giorgian de arrascaeta", "giovanni reyna", "gonzalo montiel", "gonzalo plata", "gonçalo ramos", "granit xhaka ", "gregor kobel", "grzegorz krychowiak", "guido rodríguez", "guillermo ochoa", "guillermo varela", "haitham asiri", "haji wright", "hakim ziyech", "hannibal mejbri", "hans vanaken", "haris seferovic", "harry kane ", "harry maguire", "harry souttar", "harry wilson", "hassan al-haydos ", "hassan al-tambakti", "hattan bahebri", "henry martín", "hernán galíndez", "hidemasa morita", "hiroki ito", "hiroki sakai", "hirving lozano", "homam ahmed", "hong chul", "hossein hosseini", "hossein kanaanizadegan", "hugo guillamón", "hugo lloris ", "hwang hee-chan", "hwang in-beom", "hwang ui-jo", "héctor herrera", "héctor moreno", "ibrahim danlad", "ibrahima konaté", "idrissa gueye", "iké ugbo", "ilias chair", "iliman ndiaye", "ismaeel mohammad", "ismail jakobs", "ismaël koné", "ismaïla sarr", "issam jebali", "ivan ilić", "ivan perišić", "ivica ivušić", "ivo grbić", "iñaki williams", "jack grealish", "jackson irvine", "jackson porozo", "jakub kamiński", "jakub kiwior", "jamal musiala", "james maddison", "james pantemis", "jamie maclaren", "jan bednarek", "jan vertonghen", "jason cummings", "jassem gaber", "jawad el yamiq", "jean-charles castelletto", "jean-pierre nsame", "jens stryger larsen", "jeong woo-yeong", "jeremie frimpong", "jeremy sarmiento", "jerome ngom mbekeli", "jesper lindstrøm", "jesús ferreira", "jesús gallardo", "jewison bennette", "jo hyeon-woo", "joachim andersen", "joakim mæhle", "joe allen", "joe morrell", "joe rodon", "joe scally", "joel campbell", "joel king", "joel waterman", "johan venegas", "johan vásquez", "john stones", "jonas hofmann", "jonas omlin", "jonas wind", "jonathan david", "jonathan osorio", "jonny williams", "jordan ayew", "jordan henderson", "jordan morris", "jordan pickford", "jordan veretout", "jordi alba", "jorge sánchez", "joseph aidoo", "josh sargent", "joshua kimmich", "josip juranović", "josip stanišić", "josip šutalo", "josé cifuentes", "josé giménez", "josé luis rodríguez", "josé sá", "joão cancelo", "joão félix", "joão mário", "joão palhinha", "joško gvardiol", "juan foyth", "juan pablo vargas", "jude bellingham", "jules koundé", "julian brandt", "julián álvarez", "jung woo-young", "junior hoilett", "junya ito", "jurriën timber", "justin bijlow", "jérémy doku", "kai havertz", "kalidou koulibaly ", "kalvin phillips", "kamal miller", "kamal sowah", "kamaldeen sulemana", "kamil glik", "kamil grabara", "kamil grosicki", "kaoru mitoma", "karim adeyemi", "karim ansarifard", "karim benzema", "karim boudiaf", "karl toko ekambi", "karol świderski", "kasper dolberg", "kasper schmeichel", "keanu baccus", "kellyn acosta", "kendall waston", "kenneth taylor", "kevin de bruyne", "kevin rodríguez", "kevin trapp", "kevin álvarez", "keylor navas", "keysher fuller", "khalid muneer", "kieffer moore", "kieran trippier", "kim jin-su", "kim min-jae", "kim moon-hwan", "kim seung-gyu", "kim tae-hwan", "kim young-gwon", "kingsley coman", "ko itakura", "koen casteels", "koke", "kristijan jakić", "krystian bielik", "krzysztof piątek", "krépin diatta", "kwon chang-hoon", "kwon kyung-won", "kye rowles", "kyle walker", "kylian mbappé", "lautaro martínez", "lawrence ati-zigi", "leander dendoncker", "leandro paredes", "leandro trossard", "lee jae-sung", "lee kang-in", "leon goretzka", "leroy sané", "liam fraser", "liam millar", "lionel messi ", "lisandro martínez", "lovro majer", "loïs openda", "luca de la torre", "lucas cavallini", "lucas hernandez", "lucas paquetá", "lucas torreira", "luis chávez", "luis romo", "luis suárez", "luka jović", "luka modrić ", "luka sučić", "lukas klostermann", "luke shaw", "luuk de jong", "majid hosseini", "mamadou loum", "manuel akanji", "manuel neuer ", "manuel ugarte", "marc-andré ter stegen", "marcelo brozović", "marco asensio", "marcos acuña", "marcos llorente", "marcus rashford", "marcus thuram", "mario götze", "mario pašalić", "mark harris", "mark-anthony kaye", "marko dmitrović", "marko grujić", "marko livaja", "marquinhos", "marten de roon", "martin boyle", "martin braithwaite", "martin erlić", "martin hongla", "martín cáceres", "mason mount", "mateo kovačić", "mateusz wieteska", "matheus nunes", "mathew leckie", "mathew ryan ", "mathias jensen", "mathías olivera", "matt turner", "matteo guendouzi", "matthew smith", "matthias ginter", "matthijs de ligt", "matty cash", "matías vecino", "matías viña", "maxi gómez", "maya yoshida ", "mehdi taremi", "mehdi torabi", "memphis depay", "meshaal barsham", "michael estrada", "michał skóraś", "michel aebischer", "michy batshuayi", "miki yamane", "mikkel damsgaard", "milad mohammadi", "milan borjan", "miloš degenek", "miloš veljković", "mislav oršić", "mitchell duke", "mohamed ali ben romdhane", "mohamed dräger", "mohamed kanno", "mohammed al-breik", "mohammed al-owais", "mohammed al-rubaie", "mohammed kudus", "mohammed muntari", "mohammed salisu", "mohammed waad", "moisés caicedo", "moisés ramírez", "montassar talbi", "morteza pouraliganji", "mostafa meshaal", "mouez hassen", "moumi ngamaleu", "moustapha name", "munir mohamedi", "musab kheder", "na sang-ho", "nader ghandri", "nahuel molina", "naif al-hadhrami", "nampalys mendy", "nasser al-dawsari", "nathan aké", "nathaniel atkinson", "nawaf al-abed", "nawaf al-aqidi", "nayef aguerd", "naïm sliti", "neco williams", "nemanja gudelj", "nemanja maksimović", "nemanja radonjić", "neymar", "nick pope", "niclas füllkrug", "nico elvedi", "nico schlotterbeck", "nico williams", "nicola zalewski", "nicolas jackson", "nicolas nkoulou", "nicolás otamendi", "nicolás tagliafico", "nicolás de la cruz", "niklas süle", "nikola milenković", "nikola vlašić", "noa lang", "noah okafor", "nouhou tolo", "noussair mazraoui", "nuno mendes", "néstor araujo", "oliver christensen", "olivier giroud", "olivier mbaizo", "olivier ntcham", "orbelín pineda", "osman bukari", "otávio", "ousmane dembélé", "pablo sarabia", "paik seung-ho", "pape abou cissé", "pape gueye", "pape matar sarr", "papu gómez", "pathé ciss", "patrick sequeira", "pau torres", "paulo dybala", "payam niazmand", "pedri", "pedro", "pepe", "pervis estupiñán", "phil foden", "philipp köhn", "piero hincapié", "pierre kunde", "pierre-emile højbjerg", "piotr zieliński", "predrag rajković", "przemysław frankowski", "rafael leão", "raheem sterling", "ramin rezaeian", "randal kolo muani", "raphaël guerreiro", "raphaël varane", "raphinha", "rasmus kristensen", "raúl jiménez", "remko pasveer", "remo freuler", "renato steffen", "ricardo horta", "ricardo rodriguez", "richarlison", "richie laryea", "riley mcgree", "ritsu dōan", "riyadh sharahili", "roan wilson", "robert arboleda", "robert gumny", "robert lewandowski ", "robert skov", "robert sánchez", "roberto alvarado", "rodolfo cota", "rodri", "rodrigo bentancur", "rodrigo de paul", "rodrygo", "rogelio funes mori", "romain saïss ", "romario ibarra", "romelu lukaku", "ronald araújo", "rouzbeh cheshmi", "ruben vargas", "rubin colwill", "rui patrício", "ró-ró", "rónald matarrita", "rúben dias", "rúben neves", "saad al-sheeb", "sadegh moharrami", "sadio mané", "saeid ezatolahi", "saleh al-shehri", "salem al-dawsari", "salem al-hajri", "salis abdul samed", "salman al-faraj ", "sam adekugbe", "saman ghoddos", "sami al-najei", "samuel gouet", "samuel piette", "sardar azmoun", "saud abdulhamid", "saša lukić", "sean johnson", "sebas méndez", "sebastian szymański", "sebastián coates", "sebastián sosa", "seifeddine jaziri", "selim amallah", "seny dieng", "serge gnabry", "sergej milinković-savić", "sergio busquets ", "sergio rochet", "sergiño dest", "shaq moore", "shogo taniguchi", "shojae khalilzadeh", "shuto machino", "shūichi gonda", "silvan widmer", "simon kjær ", "simon mignolet", "simon ngapandouetnbu", "sofiane boufal", "sofyan amrabat", "son heung-min ", "son jun-ho", "song bum-keun", "song min-kyu", "sorba thomas", "souaibou marou", "srđan babić", "stefan mitrović", "stefan de vrij", "stephen eustáquio", "steve mandanda", "steven berghuis", "steven bergwijn", "steven vitória", "strahinja eraković", "strahinja pavlović", "sultan al-ghannam", "szymon żurkowski", "taha yassine khenissi", "tajon buchanan", "takefusa kubo", "takehiro tomiyasu", "takuma asano", "takumi minamino", "tarek salman", "tariq lamptey", "teun koopmeiners", "theo hernandez", "thiago almada", "thiago silva ", "thibaut courtois", "thilo kehrer", "thomas delaney", "thomas deng", "thomas meunier", "thomas müller", "thomas partey", "thorgan hazard", "tim ream", "timothy castagne", "timothy weah", "toby alderweireld", "tom lockyer", "trent alexander-arnold", "tyler adams", "tyrell malacia", "unai simón", "uriel antuna", "uroš račić", "vahid amiri", "vanja milinković-savić", "victor nelsson", "vincent aboubakar ", "vincent janssen", "vinícius júnior", "virgil van dijk ", "vitinha", "wahbi khazri", "wajdi kechrida", "walid cheddira", "walker zimmerman", "wataru endo", "wayne hennessey", "weston mckennie", "weverton", "william carvalho", "william pacho", "william saliba", "wojciech szczęsny", "wout faes", "wout weghorst", "xavi simons", "xavier arreaga", "xherdan shaqiri", "yahia attiyat allah", "yahya jabrane", "yann sommer", "yannick carrasco", "yasser al-shahrani", "yassine bounou", "yassine meriah", "yeltsin tejeda", "yeremy pino", "yoon jong-gyu", "youri tielemans", "yousef hassan", "youssef en-nesyri", "youssef msakni ", "youssouf fofana", "youssouf sabaly", "youssoufa moukoko", "youstin salas", "yuki soma", "yunus musah", "yussuf poulsen", "yuto nagatomo", "zakaria aboukhlal", "zeno debast", "álvaro morata", "álvaro zamora", "ángel correa", "ángel di maría", "ángel mena", "ángelo preciado", "éder militão", "édouard mendy", "érick gutiérrez", "éverton ribeiro", "óscar duarte", "i̇lkay gündoğan", "łukasz skorupski" ]
Amadeus99/image_classification
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # image_classification This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - eval_loss: 1.1938 - eval_accuracy: 0.6375 - eval_runtime: 2.3845 - eval_samples_per_second: 67.099 - eval_steps_per_second: 1.258 - epoch: 38.7 - step: 387 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 64 - eval_batch_size: 64 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 50 ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.1
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
sruthis/feb7th
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # feb7th This model is a fine-tuned version of [facebook/deit-base-distilled-patch16-224](https://huggingface.co/facebook/deit-base-distilled-patch16-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.0464 - Accuracy: 0.9899 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 8 - seed: 1234 - gradient_accumulation_steps: 10 - total_train_batch_size: 160 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 5 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 0.97 | 12 | 0.0598 | 0.9798 | | No log | 1.94 | 24 | 0.0480 | 0.9879 | | No log | 2.98 | 37 | 0.0531 | 0.9838 | | No log | 3.95 | 49 | 0.0456 | 0.9899 | | No log | 4.84 | 60 | 0.0464 | 0.9899 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.15.1
[ "2-4d_upper", "bacterial_spot_upper", "cold_injury_upper", "early_blight_upper", "healthy_upperside", "little_leaf_upperside", "nutritional_disorder_upper", "spider_mite_upper", "tsw_upper", "tylc_upper" ]
MichalGas/vit-base-patch16-224-in21k-finetuned-mgasior-07-02-2024
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-base-patch16-224-in21k-finetuned-mgasior-07-02-2024 This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.8842 - F1: 0.7717 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 2e-05 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 32 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 10 ### Training results | Training Loss | Epoch | Step | Validation Loss | F1 | |:-------------:|:-----:|:----:|:---------------:|:------:| | 1.731 | 0.98 | 35 | 1.6748 | 0.3386 | | 1.5196 | 1.99 | 71 | 1.4890 | 0.4173 | | 1.3727 | 2.99 | 107 | 1.2938 | 0.5276 | | 1.2194 | 4.0 | 143 | 1.1519 | 0.6457 | | 1.1538 | 4.98 | 178 | 1.0544 | 0.6693 | | 1.0379 | 5.99 | 214 | 0.9852 | 0.7165 | | 1.0232 | 6.99 | 250 | 0.9439 | 0.7323 | | 0.9586 | 8.0 | 286 | 0.9136 | 0.7480 | | 0.9374 | 8.98 | 321 | 0.8946 | 0.7638 | | 0.96 | 9.79 | 350 | 0.8842 | 0.7717 | ### Framework versions - Transformers 4.36.1 - Pytorch 2.1.2+cu121 - Datasets 2.15.0 - Tokenizers 0.15.0
[ "bipolars", "clippers", "graspers", "hooks", "irrigators", "scissorss" ]
jvbjkbjkbfjis/swin-tiny-patch4-window7-224-finetuned-eurosat
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # swin-tiny-patch4-window7-224-finetuned-eurosat This model is a fine-tuned version of [microsoft/swin-tiny-patch4-window7-224](https://huggingface.co/microsoft/swin-tiny-patch4-window7-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.1135 - Accuracy: 0.9703 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 5 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.1503 | 1.0 | 57 | 0.2204 | 0.9381 | | 0.1349 | 2.0 | 114 | 0.1394 | 0.9567 | | 0.0552 | 3.0 | 171 | 0.1430 | 0.9678 | | 0.0722 | 4.0 | 228 | 0.1568 | 0.9629 | | 0.0523 | 5.0 | 285 | 0.1135 | 0.9703 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.15.1
[ "tench, tinca tinca", "goldfish, carassius auratus", "great white shark, white shark, man-eater, man-eating shark, carcharodon carcharias", "tiger shark, galeocerdo cuvieri", "hammerhead, hammerhead shark", "electric ray, crampfish, numbfish, torpedo", "stingray", "cock", "hen", "ostrich, struthio camelus", "brambling, fringilla montifringilla", "goldfinch, carduelis carduelis", "house finch, linnet, carpodacus mexicanus", "junco, snowbird", "indigo bunting, indigo finch, indigo bird, passerina cyanea", "robin, american robin, turdus migratorius", "bulbul", "jay", "magpie", "chickadee", "water ouzel, dipper", "kite", "bald eagle, american eagle, haliaeetus leucocephalus", "vulture", "great grey owl, great gray owl, strix nebulosa", "european fire salamander, salamandra salamandra", "common newt, triturus vulgaris", "eft", "spotted salamander, ambystoma maculatum", "axolotl, mud puppy, ambystoma mexicanum", "bullfrog, rana catesbeiana", "tree frog, tree-frog", "tailed frog, bell toad, ribbed toad, tailed toad, ascaphus trui", "loggerhead, loggerhead turtle, caretta caretta", "leatherback turtle, leatherback, leathery turtle, dermochelys coriacea", "mud turtle", "terrapin", "box turtle, box tortoise", "banded gecko", "common iguana, iguana, iguana iguana", "american chameleon, anole, anolis carolinensis", "whiptail, whiptail lizard", "agama", "frilled lizard, chlamydosaurus kingi", "alligator lizard", "gila monster, heloderma suspectum", "green lizard, lacerta viridis", "african chameleon, chamaeleo chamaeleon", "komodo dragon, komodo lizard, dragon lizard, giant lizard, varanus komodoensis", "african crocodile, nile crocodile, crocodylus niloticus", "american alligator, alligator mississipiensis", "triceratops", "thunder snake, worm snake, carphophis amoenus", "ringneck snake, ring-necked snake, ring snake", "hognose snake, puff adder, sand viper", "green snake, grass snake", "king snake, kingsnake", "garter snake, grass snake", "water snake", "vine snake", "night snake, hypsiglena torquata", "boa constrictor, constrictor constrictor", "rock python, rock snake, python sebae", "indian cobra, naja naja", "green mamba", "sea snake", "horned viper, cerastes, sand viper, horned asp, cerastes cornutus", "diamondback, diamondback rattlesnake, crotalus adamanteus", "sidewinder, horned rattlesnake, crotalus cerastes", "trilobite", "harvestman, daddy longlegs, phalangium opilio", "scorpion", "black and gold garden spider, argiope aurantia", "barn spider, araneus cavaticus", "garden spider, aranea diademata", "black widow, latrodectus mactans", "tarantula", "wolf spider, hunting spider", "tick", "centipede", "black grouse", "ptarmigan", "ruffed grouse, partridge, bonasa umbellus", "prairie chicken, prairie grouse, prairie fowl", "peacock", "quail", "partridge", "african grey, african gray, psittacus erithacus", "macaw", "sulphur-crested cockatoo, kakatoe galerita, cacatua galerita", "lorikeet", "coucal", "bee eater", "hornbill", "hummingbird", "jacamar", "toucan", "drake", "red-breasted merganser, mergus serrator", "goose", "black swan, cygnus atratus", "tusker", "echidna, spiny anteater, anteater", "platypus, duckbill, duckbilled platypus, duck-billed platypus, ornithorhynchus anatinus", "wallaby, brush kangaroo", "koala, koala bear, kangaroo bear, native bear, phascolarctos cinereus", "wombat", "jellyfish", "sea anemone, anemone", "brain coral", "flatworm, platyhelminth", "nematode, nematode worm, roundworm", "conch", "snail", "slug", "sea slug, nudibranch", "chiton, coat-of-mail shell, sea cradle, polyplacophore", "chambered nautilus, pearly nautilus, nautilus", "dungeness crab, cancer magister", "rock crab, cancer irroratus", "fiddler crab", "king crab, alaska crab, alaskan king crab, alaska king crab, paralithodes camtschatica", "american lobster, northern lobster, maine lobster, homarus americanus", "spiny lobster, langouste, rock lobster, crawfish, crayfish, sea crawfish", "crayfish, crawfish, crawdad, crawdaddy", "hermit crab", "isopod", "white stork, ciconia ciconia", "black stork, ciconia nigra", "spoonbill", "flamingo", "little blue heron, egretta caerulea", "american egret, great white heron, egretta albus", "bittern", "crane", "limpkin, aramus pictus", "european gallinule, porphyrio porphyrio", "american coot, marsh hen, mud hen, water hen, fulica americana", "bustard", "ruddy turnstone, arenaria interpres", "red-backed sandpiper, dunlin, erolia alpina", "redshank, tringa totanus", "dowitcher", "oystercatcher, oyster catcher", "pelican", "king penguin, aptenodytes patagonica", "albatross, mollymawk", "grey whale, gray whale, devilfish, eschrichtius gibbosus, eschrichtius robustus", "killer whale, killer, orca, grampus, sea wolf, orcinus orca", "dugong, dugong dugon", "sea lion", "chihuahua", "japanese spaniel", "maltese dog, maltese terrier, maltese", "pekinese, pekingese, peke", "shih-tzu", "blenheim spaniel", "papillon", "toy terrier", "rhodesian ridgeback", "afghan hound, afghan", "basset, basset hound", "beagle", "bloodhound, sleuthhound", "bluetick", "black-and-tan coonhound", "walker hound, walker foxhound", "english foxhound", "redbone", "borzoi, russian wolfhound", "irish wolfhound", "italian greyhound", "whippet", "ibizan hound, ibizan podenco", "norwegian elkhound, elkhound", "otterhound, otter hound", "saluki, gazelle hound", "scottish deerhound, deerhound", "weimaraner", "staffordshire bullterrier, staffordshire bull terrier", "american staffordshire terrier, staffordshire terrier, american pit bull terrier, pit bull terrier", "bedlington terrier", "border terrier", "kerry blue terrier", "irish terrier", "norfolk terrier", "norwich terrier", "yorkshire terrier", "wire-haired fox terrier", "lakeland terrier", "sealyham terrier, sealyham", "airedale, airedale terrier", "cairn, cairn terrier", "australian terrier", "dandie dinmont, dandie dinmont terrier", "boston bull, boston terrier", "miniature schnauzer", "giant schnauzer", "standard schnauzer", "scotch terrier, scottish terrier, scottie", "tibetan terrier, chrysanthemum dog", "silky terrier, sydney silky", "soft-coated wheaten terrier", "west highland white terrier", "lhasa, lhasa apso", "flat-coated retriever", "curly-coated retriever", "golden retriever", "labrador retriever", "chesapeake bay retriever", "german short-haired pointer", "vizsla, hungarian pointer", "english setter", "irish setter, red setter", "gordon setter", "brittany spaniel", "clumber, clumber spaniel", "english springer, english springer spaniel", "welsh springer spaniel", "cocker spaniel, english cocker spaniel, cocker", "sussex spaniel", "irish water spaniel", "kuvasz", "schipperke", "groenendael", "malinois", "briard", "kelpie", "komondor", "old english sheepdog, bobtail", "shetland sheepdog, shetland sheep dog, shetland", "collie", "border collie", "bouvier des flandres, bouviers des flandres", "rottweiler", "german shepherd, german shepherd dog, german police dog, alsatian", "doberman, doberman pinscher", "miniature pinscher", "greater swiss mountain dog", "bernese mountain dog", "appenzeller", "entlebucher", "boxer", "bull mastiff", "tibetan mastiff", "french bulldog", "great dane", "saint bernard, st bernard", "eskimo dog, husky", "malamute, malemute, alaskan malamute", "siberian husky", "dalmatian, coach dog, carriage dog", "affenpinscher, monkey pinscher, monkey dog", "basenji", "pug, pug-dog", "leonberg", "newfoundland, newfoundland dog", "great pyrenees", "samoyed, samoyede", "pomeranian", "chow, chow chow", "keeshond", "brabancon griffon", "pembroke, pembroke welsh corgi", "cardigan, cardigan welsh corgi", "toy poodle", "miniature poodle", "standard poodle", "mexican hairless", "timber wolf, grey wolf, gray wolf, canis lupus", "white wolf, arctic wolf, canis lupus tundrarum", "red wolf, maned wolf, canis rufus, canis niger", "coyote, prairie wolf, brush wolf, canis latrans", "dingo, warrigal, warragal, canis dingo", "dhole, cuon alpinus", "african hunting dog, hyena dog, cape hunting dog, lycaon pictus", "hyena, hyaena", "red fox, vulpes vulpes", "kit fox, vulpes macrotis", "arctic fox, white fox, alopex lagopus", "grey fox, gray fox, urocyon cinereoargenteus", "tabby, tabby cat", "tiger cat", "persian cat", "siamese cat, siamese", "egyptian cat", "cougar, puma, catamount, mountain lion, painter, panther, felis concolor", "lynx, catamount", "leopard, panthera pardus", "snow leopard, ounce, panthera uncia", "jaguar, panther, panthera onca, felis onca", "lion, king of beasts, panthera leo", "tiger, panthera tigris", "cheetah, chetah, acinonyx jubatus", "brown bear, bruin, ursus arctos", "american black bear, black bear, ursus americanus, euarctos americanus", "ice bear, polar bear, ursus maritimus, thalarctos maritimus", "sloth bear, melursus ursinus, ursus ursinus", "mongoose", "meerkat, mierkat", "tiger beetle", "ladybug, ladybeetle, lady beetle, ladybird, ladybird beetle", "ground beetle, carabid beetle", "long-horned beetle, longicorn, longicorn beetle", "leaf beetle, chrysomelid", "dung beetle", "rhinoceros beetle", "weevil", "fly", "bee", "ant, emmet, pismire", "grasshopper, hopper", "cricket", "walking stick, walkingstick, stick insect", "cockroach, roach", "mantis, mantid", "cicada, cicala", "leafhopper", "lacewing, lacewing fly", "dragonfly, darning needle, devil's darning needle, sewing needle, snake feeder, snake doctor, mosquito hawk, skeeter hawk", "damselfly", "admiral", "ringlet, ringlet butterfly", "monarch, monarch butterfly, milkweed butterfly, danaus plexippus", "cabbage butterfly", "sulphur butterfly, sulfur butterfly", "lycaenid, lycaenid butterfly", "starfish, sea star", "sea urchin", "sea cucumber, holothurian", "wood rabbit, cottontail, cottontail rabbit", "hare", "angora, angora rabbit", "hamster", "porcupine, hedgehog", "fox squirrel, eastern fox squirrel, sciurus niger", "marmot", "beaver", "guinea pig, cavia cobaya", "sorrel", "zebra", "hog, pig, grunter, squealer, sus scrofa", "wild boar, boar, sus scrofa", "warthog", "hippopotamus, hippo, river horse, hippopotamus amphibius", "ox", "water buffalo, water ox, asiatic buffalo, bubalus bubalis", "bison", "ram, tup", "bighorn, bighorn sheep, cimarron, rocky mountain bighorn, rocky mountain sheep, ovis canadensis", "ibex, capra ibex", "hartebeest", "impala, aepyceros melampus", "gazelle", "arabian camel, dromedary, camelus dromedarius", "llama", "weasel", "mink", "polecat, fitch, foulmart, foumart, mustela putorius", "black-footed ferret, ferret, mustela nigripes", "otter", "skunk, polecat, wood pussy", "badger", "armadillo", "three-toed sloth, ai, bradypus tridactylus", "orangutan, orang, orangutang, pongo pygmaeus", "gorilla, gorilla gorilla", "chimpanzee, chimp, pan troglodytes", "gibbon, hylobates lar", "siamang, hylobates syndactylus, symphalangus syndactylus", "guenon, guenon monkey", "patas, hussar monkey, erythrocebus patas", "baboon", "macaque", "langur", "colobus, colobus monkey", "proboscis monkey, nasalis larvatus", "marmoset", "capuchin, ringtail, cebus capucinus", "howler monkey, howler", "titi, titi monkey", "spider monkey, ateles geoffroyi", "squirrel monkey, saimiri sciureus", "madagascar cat, ring-tailed lemur, lemur catta", "indri, indris, indri indri, indri brevicaudatus", "indian elephant, elephas maximus", "african elephant, loxodonta africana", "lesser panda, red panda, panda, bear cat, cat bear, ailurus fulgens", "giant panda, panda, panda bear, coon bear, ailuropoda melanoleuca", "barracouta, snoek", "eel", "coho, cohoe, coho salmon, blue jack, silver salmon, oncorhynchus kisutch", "rock beauty, holocanthus tricolor", "anemone fish", "sturgeon", "gar, garfish, garpike, billfish, lepisosteus osseus", "lionfish", "puffer, pufferfish, blowfish, globefish", "abacus", "abaya", "academic gown, academic robe, judge's robe", "accordion, piano accordion, squeeze box", "acoustic guitar", "aircraft carrier, carrier, flattop, attack aircraft carrier", "airliner", "airship, dirigible", "altar", "ambulance", "amphibian, amphibious vehicle", "analog clock", "apiary, bee house", "apron", "ashcan, trash can, garbage can, wastebin, ash bin, ash-bin, ashbin, dustbin, trash barrel, trash bin", "assault rifle, assault gun", "backpack, back pack, knapsack, packsack, rucksack, haversack", "bakery, bakeshop, bakehouse", "balance beam, beam", "balloon", "ballpoint, ballpoint pen, ballpen, biro", "band aid", "banjo", "bannister, banister, balustrade, balusters, handrail", "barbell", "barber chair", "barbershop", "barn", "barometer", "barrel, cask", "barrow, garden cart, lawn cart, wheelbarrow", "baseball", "basketball", "bassinet", "bassoon", "bathing cap, swimming cap", "bath towel", "bathtub, bathing tub, bath, tub", "beach wagon, station wagon, wagon, estate car, beach waggon, station waggon, waggon", "beacon, lighthouse, beacon light, pharos", "beaker", "bearskin, busby, shako", "beer bottle", "beer glass", "bell cote, bell cot", "bib", "bicycle-built-for-two, tandem bicycle, tandem", "bikini, two-piece", "binder, ring-binder", "binoculars, field glasses, opera glasses", "birdhouse", "boathouse", "bobsled, bobsleigh, bob", "bolo tie, bolo, bola tie, bola", "bonnet, poke bonnet", "bookcase", "bookshop, bookstore, bookstall", "bottlecap", "bow", "bow tie, bow-tie, bowtie", "brass, memorial tablet, plaque", "brassiere, bra, bandeau", "breakwater, groin, groyne, mole, bulwark, seawall, jetty", "breastplate, aegis, egis", "broom", "bucket, pail", "buckle", "bulletproof vest", "bullet train, bullet", "butcher shop, meat market", "cab, hack, taxi, taxicab", "caldron, cauldron", "candle, taper, wax light", "cannon", "canoe", "can opener, tin opener", "cardigan", "car mirror", "carousel, carrousel, merry-go-round, roundabout, whirligig", "carpenter's kit, tool kit", "carton", "car wheel", "cash machine, cash dispenser, automated teller machine, automatic teller machine, automated teller, automatic teller, atm", "cassette", "cassette player", "castle", "catamaran", "cd player", "cello, violoncello", "cellular telephone, cellular phone, cellphone, cell, mobile phone", "chain", "chainlink fence", "chain mail, ring mail, mail, chain armor, chain armour, ring armor, ring armour", "chain saw, chainsaw", "chest", "chiffonier, commode", "chime, bell, gong", "china cabinet, china closet", "christmas stocking", "church, church building", "cinema, movie theater, movie theatre, movie house, picture palace", "cleaver, meat cleaver, chopper", "cliff dwelling", "cloak", "clog, geta, patten, sabot", "cocktail shaker", "coffee mug", "coffeepot", "coil, spiral, volute, whorl, helix", "combination lock", "computer keyboard, keypad", "confectionery, confectionary, candy store", "container ship, containership, container vessel", "convertible", "corkscrew, bottle screw", "cornet, horn, trumpet, trump", "cowboy boot", "cowboy hat, ten-gallon hat", "cradle", "crane", "crash helmet", "crate", "crib, cot", "crock pot", "croquet ball", "crutch", "cuirass", "dam, dike, dyke", "desk", "desktop computer", "dial telephone, dial phone", "diaper, nappy, napkin", "digital clock", "digital watch", "dining table, board", "dishrag, dishcloth", "dishwasher, dish washer, dishwashing machine", "disk brake, disc brake", "dock, dockage, docking facility", "dogsled, dog sled, dog sleigh", "dome", "doormat, welcome mat", "drilling platform, offshore rig", "drum, membranophone, tympan", "drumstick", "dumbbell", "dutch oven", "electric fan, blower", "electric guitar", "electric locomotive", "entertainment center", "envelope", "espresso maker", "face powder", "feather boa, boa", "file, file cabinet, filing cabinet", "fireboat", "fire engine, fire truck", "fire screen, fireguard", "flagpole, flagstaff", "flute, transverse flute", "folding chair", "football helmet", "forklift", "fountain", "fountain pen", "four-poster", "freight car", "french horn, horn", "frying pan, frypan, skillet", "fur coat", "garbage truck, dustcart", "gasmask, respirator, gas helmet", "gas pump, gasoline pump, petrol pump, island dispenser", "goblet", "go-kart", "golf ball", "golfcart, golf cart", "gondola", "gong, tam-tam", "gown", "grand piano, grand", "greenhouse, nursery, glasshouse", "grille, radiator grille", "grocery store, grocery, food market, market", "guillotine", "hair slide", "hair spray", "half track", "hammer", "hamper", "hand blower, blow dryer, blow drier, hair dryer, hair drier", "hand-held computer, hand-held microcomputer", "handkerchief, hankie, hanky, hankey", "hard disc, hard disk, fixed disk", "harmonica, mouth organ, harp, mouth harp", "harp", "harvester, reaper", "hatchet", "holster", "home theater, home theatre", "honeycomb", "hook, claw", "hoopskirt, crinoline", "horizontal bar, high bar", "horse cart, horse-cart", "hourglass", "ipod", "iron, smoothing iron", "jack-o'-lantern", "jean, blue jean, denim", "jeep, landrover", "jersey, t-shirt, tee shirt", "jigsaw puzzle", "jinrikisha, ricksha, rickshaw", "joystick", "kimono", "knee pad", "knot", "lab coat, laboratory coat", "ladle", "lampshade, lamp shade", "laptop, laptop computer", "lawn mower, mower", "lens cap, lens cover", "letter opener, paper knife, paperknife", "library", "lifeboat", "lighter, light, igniter, ignitor", "limousine, limo", "liner, ocean liner", "lipstick, lip rouge", "loafer", "lotion", "loudspeaker, speaker, speaker unit, loudspeaker system, speaker system", "loupe, jeweler's loupe", "lumbermill, sawmill", "magnetic compass", "mailbag, postbag", "mailbox, letter box", "maillot", "maillot, tank suit", "manhole cover", "maraca", "marimba, xylophone", "mask", "matchstick", "maypole", "maze, labyrinth", "measuring cup", "medicine chest, medicine cabinet", "megalith, megalithic structure", "microphone, mike", "microwave, microwave oven", "military uniform", "milk can", "minibus", "miniskirt, mini", "minivan", "missile", "mitten", "mixing bowl", "mobile home, manufactured home", "model t", "modem", "monastery", "monitor", "moped", "mortar", "mortarboard", "mosque", "mosquito net", "motor scooter, scooter", "mountain bike, all-terrain bike, off-roader", "mountain tent", "mouse, computer mouse", "mousetrap", "moving van", "muzzle", "nail", "neck brace", "necklace", "nipple", "notebook, notebook computer", "obelisk", "oboe, hautboy, hautbois", "ocarina, sweet potato", "odometer, hodometer, mileometer, milometer", "oil filter", "organ, pipe organ", "oscilloscope, scope, cathode-ray oscilloscope, cro", "overskirt", "oxcart", "oxygen mask", "packet", "paddle, boat paddle", "paddlewheel, paddle wheel", "padlock", "paintbrush", "pajama, pyjama, pj's, jammies", "palace", "panpipe, pandean pipe, syrinx", "paper towel", "parachute, chute", "parallel bars, bars", "park bench", "parking meter", "passenger car, coach, carriage", "patio, terrace", "pay-phone, pay-station", "pedestal, plinth, footstall", "pencil box, pencil case", "pencil sharpener", "perfume, essence", "petri dish", "photocopier", "pick, plectrum, plectron", "pickelhaube", "picket fence, paling", "pickup, pickup truck", "pier", "piggy bank, penny bank", "pill bottle", "pillow", "ping-pong ball", "pinwheel", "pirate, pirate ship", "pitcher, ewer", "plane, carpenter's plane, woodworking plane", "planetarium", "plastic bag", "plate rack", "plow, plough", "plunger, plumber's helper", "polaroid camera, polaroid land camera", "pole", "police van, police wagon, paddy wagon, patrol wagon, wagon, black maria", "poncho", "pool table, billiard table, snooker table", "pop bottle, soda bottle", "pot, flowerpot", "potter's wheel", "power drill", "prayer rug, prayer mat", "printer", "prison, prison house", "projectile, missile", "projector", "puck, hockey puck", "punching bag, punch bag, punching ball, punchball", "purse", "quill, quill pen", "quilt, comforter, comfort, puff", "racer, race car, racing car", "racket, racquet", "radiator", "radio, wireless", "radio telescope, radio reflector", "rain barrel", "recreational vehicle, rv, r.v.", "reel", "reflex camera", "refrigerator, icebox", "remote control, remote", "restaurant, eating house, eating place, eatery", "revolver, six-gun, six-shooter", "rifle", "rocking chair, rocker", "rotisserie", "rubber eraser, rubber, pencil eraser", "rugby ball", "rule, ruler", "running shoe", "safe", "safety pin", "saltshaker, salt shaker", "sandal", "sarong", "sax, saxophone", "scabbard", "scale, weighing machine", "school bus", "schooner", "scoreboard", "screen, crt screen", "screw", "screwdriver", "seat belt, seatbelt", "sewing machine", "shield, buckler", "shoe shop, shoe-shop, shoe store", "shoji", "shopping basket", "shopping cart", "shovel", "shower cap", "shower curtain", "ski", "ski mask", "sleeping bag", "slide rule, slipstick", "sliding door", "slot, one-armed bandit", "snorkel", "snowmobile", "snowplow, snowplough", "soap dispenser", "soccer ball", "sock", "solar dish, solar collector, solar furnace", "sombrero", "soup bowl", "space bar", "space heater", "space shuttle", "spatula", "speedboat", "spider web, spider's web", "spindle", "sports car, sport car", "spotlight, spot", "stage", "steam locomotive", "steel arch bridge", "steel drum", "stethoscope", "stole", "stone wall", "stopwatch, stop watch", "stove", "strainer", "streetcar, tram, tramcar, trolley, trolley car", "stretcher", "studio couch, day bed", "stupa, tope", "submarine, pigboat, sub, u-boat", "suit, suit of clothes", "sundial", "sunglass", "sunglasses, dark glasses, shades", "sunscreen, sunblock, sun blocker", "suspension bridge", "swab, swob, mop", "sweatshirt", "swimming trunks, bathing trunks", "swing", "switch, electric switch, electrical switch", "syringe", "table lamp", "tank, army tank, armored combat vehicle, armoured combat vehicle", "tape player", "teapot", "teddy, teddy bear", "television, television system", "tennis ball", "thatch, thatched roof", "theater curtain, theatre curtain", "thimble", "thresher, thrasher, threshing machine", "throne", "tile roof", "toaster", "tobacco shop, tobacconist shop, tobacconist", "toilet seat", "torch", "totem pole", "tow truck, tow car, wrecker", "toyshop", "tractor", "trailer truck, tractor trailer, trucking rig, rig, articulated lorry, semi", "tray", "trench coat", "tricycle, trike, velocipede", "trimaran", "tripod", "triumphal arch", "trolleybus, trolley coach, trackless trolley", "trombone", "tub, vat", "turnstile", "typewriter keyboard", "umbrella", "unicycle, monocycle", "upright, upright piano", "vacuum, vacuum cleaner", "vase", "vault", "velvet", "vending machine", "vestment", "viaduct", "violin, fiddle", "volleyball", "waffle iron", "wall clock", "wallet, billfold, notecase, pocketbook", "wardrobe, closet, press", "warplane, military plane", "washbasin, handbasin, washbowl, lavabo, wash-hand basin", "washer, automatic washer, washing machine", "water bottle", "water jug", "water tower", "whiskey jug", "whistle", "wig", "window screen", "window shade", "windsor tie", "wine bottle", "wing", "wok", "wooden spoon", "wool, woolen, woollen", "worm fence, snake fence, snake-rail fence, virginia fence", "wreck", "yawl", "yurt", "web site, website, internet site, site", "comic book", "crossword puzzle, crossword", "street sign", "traffic light, traffic signal, stoplight", "book jacket, dust cover, dust jacket, dust wrapper", "menu", "plate", "guacamole", "consomme", "hot pot, hotpot", "trifle", "ice cream, icecream", "ice lolly, lolly, lollipop, popsicle", "french loaf", "bagel, beigel", "pretzel", "cheeseburger", "hotdog, hot dog, red hot", "mashed potato", "head cabbage", "broccoli", "cauliflower", "zucchini, courgette", "spaghetti squash", "acorn squash", "butternut squash", "cucumber, cuke", "artichoke, globe artichoke", "bell pepper", "cardoon", "mushroom", "granny smith", "strawberry", "orange", "lemon", "fig", "pineapple, ananas", "banana", "jackfruit, jak, jack", "custard apple", "pomegranate", "hay", "carbonara", "chocolate sauce, chocolate syrup", "dough", "meat loaf, meatloaf", "pizza, pizza pie", "potpie", "burrito", "red wine", "espresso", "cup", "eggnog", "alp", "bubble", "cliff, drop, drop-off", "coral reef", "geyser", "lakeside, lakeshore", "promontory, headland, head, foreland", "sandbar, sand bar", "seashore, coast, seacoast, sea-coast", "valley, vale", "volcano", "ballplayer, baseball player", "groom, bridegroom", "scuba diver", "rapeseed", "daisy", "yellow lady's slipper, yellow lady-slipper, cypripedium calceolus, cypripedium parviflorum", "corn", "acorn", "hip, rose hip, rosehip", "buckeye, horse chestnut, conker", "coral fungus", "agaric", "gyromitra", "stinkhorn, carrion fungus", "earthstar", "hen-of-the-woods, hen of the woods, polyporus frondosus, grifola frondosa", "bolete", "ear, spike, capitulum", "toilet tissue, toilet paper, bathroom tissue" ]
ivnvan/image_classification
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # image_classification This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.2259 - Accuracy: 0.5625 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: polynomial - num_epochs: 25 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 1.8751 | 1.0 | 20 | 1.7512 | 0.3 | | 1.3825 | 2.0 | 40 | 1.4946 | 0.425 | | 1.1532 | 3.0 | 60 | 1.3387 | 0.45 | | 0.9865 | 4.0 | 80 | 1.3469 | 0.4562 | | 0.8767 | 5.0 | 100 | 1.2275 | 0.55 | | 0.7586 | 6.0 | 120 | 1.2560 | 0.5062 | | 0.5985 | 7.0 | 140 | 1.2596 | 0.5062 | | 0.5052 | 8.0 | 160 | 1.3010 | 0.5687 | | 0.4243 | 9.0 | 180 | 1.2613 | 0.5563 | | 0.387 | 10.0 | 200 | 1.2750 | 0.5312 | | 0.3529 | 11.0 | 220 | 1.3103 | 0.55 | | 0.218 | 12.0 | 240 | 1.1832 | 0.55 | | 0.2428 | 13.0 | 260 | 1.2527 | 0.5563 | | 0.2399 | 14.0 | 280 | 1.4836 | 0.5375 | | 0.218 | 15.0 | 300 | 1.4056 | 0.4875 | | 0.1784 | 16.0 | 320 | 1.3879 | 0.5563 | | 0.2021 | 17.0 | 340 | 1.4346 | 0.5375 | | 0.1342 | 18.0 | 360 | 1.4666 | 0.4813 | | 0.1499 | 19.0 | 380 | 1.4104 | 0.5687 | | 0.1032 | 20.0 | 400 | 1.5402 | 0.525 | | 0.1214 | 21.0 | 420 | 1.4114 | 0.55 | | 0.153 | 22.0 | 440 | 1.5887 | 0.525 | | 0.1276 | 23.0 | 460 | 1.4588 | 0.5188 | | 0.1114 | 24.0 | 480 | 1.4866 | 0.5312 | | 0.1305 | 25.0 | 500 | 1.4203 | 0.5687 | ### Framework versions - Transformers 4.37.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.1
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
arpanl/Fine-Tuned_Model2
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # Fine-Tuned_Model2 This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on the imagefolder dataset. ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 100 ### Training results ### Framework versions - Transformers 4.37.2 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.15.1
[ "aeroplane", "blackboard", "clouds", "coins", "cycles", "deer", "desk", "dogs", "dogsledge", "door-frame", "factory", "fireman", "boat", "firetruck", "food", "helicopter", "horse", "horsepipe", "instrument", "ladder", "lake", "landscape", "machinaries", "books", "mountains", "painting", "people", "pole", "railway", "river", "road", "ship", "ski", "sky", "bridge", "snow", "stairs", "table", "telephone", "tree", "water", "building", "car", "chair", "children", "church" ]
arpanl/Fine-Tuned_Model3
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # Fine-Tuned_Model3 This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.7362 - Accuracy: 0.608 - F1: 0.5096 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 30 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | F1 | |:-------------:|:-----:|:----:|:---------------:|:--------:|:------:| | 3.2255 | 5.0 | 20 | 1.9574 | 0.512 | 0.3083 | | 1.3773 | 10.0 | 40 | 0.8854 | 0.584 | 0.4617 | | 0.869 | 15.0 | 60 | 0.7880 | 0.608 | 0.4795 | | 0.7966 | 20.0 | 80 | 0.7732 | 0.6 | 0.4846 | | 0.8458 | 25.0 | 100 | 0.7795 | 0.576 | 0.4112 | | 0.8135 | 30.0 | 120 | 0.7362 | 0.608 | 0.5096 | ### Framework versions - Transformers 4.37.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.1
[ "aeroplane", "blackboard", "clouds", "coins", "cycles", "deer", "desk", "dogs", "dogsledge", "door-frame", "factory", "fireman", "boat", "firetruck", "food", "horse", "horsepipe", "instrument", "ladder", "lake", "landscape", "machinaries", "mountains", "books", "painting", "people", "pole", "railway", "river", "road", "ship", "ski", "sky", "snow", "bridge", "stairs", "telephone", "tree", "water", "building", "car", "chair", "children", "church" ]
yangswei/visual-emotion-classification
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # image_classification This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.1599 - Accuracy: 0.5813 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 13 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 40 | 1.8887 | 0.35 | | No log | 2.0 | 80 | 1.5494 | 0.425 | | No log | 3.0 | 120 | 1.4015 | 0.5188 | | No log | 4.0 | 160 | 1.2919 | 0.55 | | No log | 5.0 | 200 | 1.2205 | 0.5813 | | No log | 6.0 | 240 | 1.2246 | 0.575 | | No log | 7.0 | 280 | 1.2053 | 0.5312 | | No log | 8.0 | 320 | 1.1487 | 0.5687 | | No log | 9.0 | 360 | 1.1727 | 0.5437 | | No log | 10.0 | 400 | 1.1459 | 0.55 | | No log | 11.0 | 440 | 1.1313 | 0.5813 | | No log | 12.0 | 480 | 1.0990 | 0.6062 | | 1.1138 | 13.0 | 520 | 1.1020 | 0.6188 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.15.1
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
rendy-k/image_classification
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # image_classification This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.1874 - Accuracy: 0.9517 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 52 | 0.2941 | 0.9227 | | No log | 2.0 | 104 | 0.2064 | 0.9517 | | No log | 3.0 | 156 | 0.2221 | 0.9372 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.15.1
[ "angular_leaf_spot", "bean_rust", "healthy" ]
citradiani/emotion_model_1
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # emotion_model_1 This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.5356 - Accuracy: 0.4437 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 64 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 10 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 2.0785 | 1.0 | 10 | 2.0617 | 0.125 | | 2.0054 | 2.0 | 20 | 1.9826 | 0.275 | | 1.8694 | 3.0 | 30 | 1.8516 | 0.325 | | 1.7212 | 4.0 | 40 | 1.7082 | 0.3812 | | 1.6101 | 5.0 | 50 | 1.6297 | 0.4375 | | 1.5409 | 6.0 | 60 | 1.5981 | 0.4188 | | 1.4801 | 7.0 | 70 | 1.5526 | 0.4437 | | 1.433 | 8.0 | 80 | 1.5574 | 0.4813 | | 1.4056 | 9.0 | 90 | 1.5094 | 0.5062 | | 1.3797 | 10.0 | 100 | 1.5232 | 0.4688 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.15.1
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
rendy-k/face_emotion_recognizer
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # face_emotion_recognizer This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.7251 - Accuracy: 0.4188 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 40 | 1.9125 | 0.4125 | | No log | 2.0 | 80 | 1.7183 | 0.4188 | | No log | 3.0 | 120 | 1.6596 | 0.4125 | ### Framework versions - Transformers 4.37.2 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.15.1
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
stray-light/dit-base-finetuned-rvlcdip-finetuned-custom-first
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # dit-base-finetuned-rvlcdip-finetuned-custom-first This model is a fine-tuned version of [microsoft/dit-base-finetuned-rvlcdip](https://huggingface.co/microsoft/dit-base-finetuned-rvlcdip) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.0567 - Accuracy: 0.9949 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.3686 | 1.0 | 79 | 0.2356 | 0.9746 | | 0.0891 | 2.0 | 158 | 0.0792 | 0.9936 | | 0.0652 | 3.0 | 237 | 0.0567 | 0.9949 | ### Framework versions - Transformers 4.38.0.dev0 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.15.1
[ "ben_forms", "degraded_nid", "eng_forms", "eng_online", "nid", "others", "pass_photos" ]
am-infoweb/MRR_image_classification_dit_29_jan-finetuned
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # MRR_image_classification_dit_29_jan-finetuned-eurosat This model is a fine-tuned version of [microsoft/dit-large](https://huggingface.co/microsoft/dit-large) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.4995 - Accuracy: 0.8250 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 32 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 1.0588 | 1.0 | 175 | 0.8931 | 0.6622 | | 0.7206 | 2.0 | 351 | 0.6266 | 0.7774 | | 0.6833 | 2.99 | 525 | 0.4995 | 0.8250 | ### Framework versions - Transformers 4.37.2 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.15.1
[ "billing", "diagnostics", "lab reports", "office visit", "operative report", "orders", "other", "physical_therapy_(ptot)" ]
papayalovers/emotion_image_classification
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # emotion_image_classification This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.3343 - Accuracy: 0.5875 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0005 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 5 - total_train_batch_size: 160 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 30 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 0.87 | 4 | 2.0221 | 0.1 | | No log | 1.96 | 9 | 1.6982 | 0.25 | | No log | 2.83 | 13 | 1.7868 | 0.225 | | No log | 3.91 | 18 | 1.6731 | 0.2625 | | No log | 5.0 | 23 | 1.6196 | 0.175 | | No log | 5.87 | 27 | 1.5399 | 0.3 | | No log | 6.96 | 32 | 1.5348 | 0.375 | | No log | 7.83 | 36 | 1.6157 | 0.3125 | | No log | 8.91 | 41 | 1.4275 | 0.45 | | No log | 10.0 | 46 | 1.3832 | 0.425 | | No log | 10.87 | 50 | 1.4440 | 0.425 | | No log | 11.96 | 55 | 1.5841 | 0.4375 | | No log | 12.83 | 59 | 1.4398 | 0.4625 | | No log | 13.91 | 64 | 1.4413 | 0.475 | | No log | 15.0 | 69 | 1.3143 | 0.5375 | | No log | 15.87 | 73 | 1.3667 | 0.5625 | | No log | 16.96 | 78 | 1.4028 | 0.5 | | No log | 17.83 | 82 | 1.4485 | 0.5375 | | No log | 18.91 | 87 | 1.9334 | 0.3875 | | No log | 20.0 | 92 | 1.4611 | 0.55 | | No log | 20.87 | 96 | 1.3279 | 0.5875 | | No log | 21.96 | 101 | 1.6526 | 0.45 | | No log | 22.83 | 105 | 1.4921 | 0.4875 | | No log | 23.91 | 110 | 1.3962 | 0.5875 | | No log | 25.0 | 115 | 1.7038 | 0.4375 | | No log | 25.87 | 119 | 1.5210 | 0.55 | | No log | 26.09 | 120 | 1.5141 | 0.5125 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.1
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
muhnatha/image_classification
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # image_classification This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.3535 - Accuracy: 0.5437 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 5 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 40 | 1.5549 | 0.45 | | No log | 2.0 | 80 | 1.4415 | 0.4875 | | No log | 3.0 | 120 | 1.3983 | 0.4688 | | No log | 4.0 | 160 | 1.3540 | 0.5437 | | No log | 5.0 | 200 | 1.3569 | 0.5 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.1
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
ghermoso/vit-eGTZANplus
# Vision Transformer (ViT) for Music Genre Classification ## Model Overview - **Model Name:** [ghermoso/vit-eGTZANplus](https://huggingface.co/ghermoso/vit-eGTZANplus) - **Task:** Image Classification - **Dataset:** [egtzan_plus](https://huggingface.co/datasets/ghermoso/egtzan_plus) - **Model Architecture:** [Vision Transformer (ViT)](https://huggingface.co/docs/transformers/model_doc/vit) - **Finetuned from model:** This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an [egtzan_plus](https://huggingface.co/datasets/ghermoso/egtzan_plus) dataset. It achieves the following results on the evaluation set: - Loss: 0.8358 - Accuracy: 0.7460
[ "afro", "classical", "reggae", "rock", "country", "disco", "electro", "jazz", "latin", "metal", "pop", "rap" ]
miifta-hs/image_classification
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # image_classification This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.1919 - Accuracy: 0.9609 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0001 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 10 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 65 | 0.1913 | 0.9609 | | No log | 2.0 | 130 | 0.2691 | 0.9141 | | No log | 3.0 | 195 | 0.2559 | 0.9219 | | No log | 4.0 | 260 | 0.2436 | 0.9219 | | No log | 5.0 | 325 | 0.2779 | 0.9219 | | No log | 6.0 | 390 | 0.2464 | 0.9297 | | No log | 7.0 | 455 | 0.1589 | 0.9609 | | 0.1457 | 8.0 | 520 | 0.0968 | 0.9766 | | 0.1457 | 9.0 | 585 | 0.2216 | 0.9453 | | 0.1457 | 10.0 | 650 | 0.1490 | 0.9609 | ### Framework versions - Transformers 4.37.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.1 - Tokenizers 0.15.2
[ "angular_leaf_spot", "bean_rust", "healthy" ]
malifiahm/emotion_classification
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # emotion_classification This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.1249 - Accuracy: 0.6188 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 15 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 40 | 1.8344 | 0.3 | | No log | 2.0 | 80 | 1.5609 | 0.4375 | | No log | 3.0 | 120 | 1.4819 | 0.4562 | | No log | 4.0 | 160 | 1.3477 | 0.5188 | | No log | 5.0 | 200 | 1.2618 | 0.5813 | | No log | 6.0 | 240 | 1.1946 | 0.5813 | | No log | 7.0 | 280 | 1.1800 | 0.5875 | | No log | 8.0 | 320 | 1.1921 | 0.5625 | | No log | 9.0 | 360 | 1.1274 | 0.6 | | No log | 10.0 | 400 | 1.0886 | 0.65 | | No log | 11.0 | 440 | 1.0750 | 0.6125 | | No log | 12.0 | 480 | 1.1349 | 0.575 | | 1.0832 | 13.0 | 520 | 1.0841 | 0.5875 | | 1.0832 | 14.0 | 560 | 1.1195 | 0.5813 | | 1.0832 | 15.0 | 600 | 1.0865 | 0.6188 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.16.1 - Tokenizers 0.15.1
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
basavaakash002/autotrain-vhhui-btea8
# Model Trained Using AutoTrain - Problem type: Image Classification ## Validation Metricsg loss: 1.5556857585906982 f1_macro: 0.2 f1_micro: 0.2631578947368421 f1_weighted: 0.21052631578947367 precision_macro: 0.27272727272727276 precision_micro: 0.2631578947368421 precision_weighted: 0.2535885167464115 recall_macro: 0.22666666666666666 recall_micro: 0.2631578947368421 recall_weighted: 0.2631578947368421 accuracy: 0.2631578947368421
[ "belly", "burping", "discomfort", "hungry", "tired" ]
basavaakash002/autotrain-z7maa-wyroe
# Model Trained Using AutoTrain - Problem type: Image Classification ## Validation Metricsg loss: 1.574344515800476 f1_macro: 0.08333333333333334 f1_micro: 0.2631578947368421 f1_weighted: 0.10964912280701755 precision_macro: 0.05263157894736842 precision_micro: 0.2631578947368421 precision_weighted: 0.06925207756232686 recall_macro: 0.2 recall_micro: 0.2631578947368421 recall_weighted: 0.2631578947368421 accuracy: 0.2631578947368421
[ "belly", "burping", "discomfort", "hungry", "tired" ]
tferreira6/autotrain-vhcyp-17adu
# Model Trained Using AutoTrain - Problem type: Image Classification ## Validation Metricsg loss: 3.527578115463257 f1_macro: 0.23136507936507936 f1_micro: 0.30666666666666664 f1_weighted: 0.23136507936507936 precision_macro: 0.20333333333333334 precision_micro: 0.30666666666666664 precision_weighted: 0.20333333333333334 recall_macro: 0.30666666666666664 recall_micro: 0.30666666666666664 recall_weighted: 0.30666666666666664 accuracy: 0.30666666666666664
[ "real adonis", "real african giant swallowtail", "real american snoot", "real an 88", "real appollo", "real atala", "real banded orange heliconian", "real banded peacock", "real beckers white", "real black hairstreak", "real blue morpho", "real blue spotted crow", "real brown siproeta", "real cabbage white", "real cairns birdwing", "real checquered skipper", "real chestnut", "real cleopatra", "real clodius parnassian", "real clouded sulphur", "real common banded awl", "real common wood-nymph", "real copper tail", "real crecent", "real crimson patch", "real danaid eggfly", "real eastern coma", "real eastern dapple white", "real eastern pine elfin", "real elbowed pierrot", "real gold banded", "real great eggfly", "real great jay", "real green celled cattleheart", "real grey hairstreak", "real indra swallow", "real iphiclus sister", "real julia", "real large marble", "real malachite", "real mangrove skipper", "real mestra", "real metalmark", "real milberts tortoiseshell", "real monarch", "real mourning cloak", "real orange oakleaf", "real orange tip", "real orchard swallow", "real painted lady", "real paper kite", "real peacock", "real pine white", "real pipevine swallow", "real popinjay", "real purple hairstreak", "real purplish copper", "real question mark", "real red admiral", "real red cracker", "real red postman", "real red spotted purple", "real scarce swallow", "real silver spot skipper", "real sleepy orange", "real sootywing", "real southern dogface", "real straited queen", "real tropical leafwing", "real two barred flasher", "real ulyses", "real viceroy", "real wood satyr", "real yellow swallow tail", "real zebra long wing" ]
tferreira6/autotrain-lhscs-6ppe1
# Model Trained Using AutoTrain - Problem type: Image Classification ## Validation Metricsg loss: 3.4111719131469727 f1_macro: 0.34247619047619043 f1_micro: 0.42666666666666675 f1_weighted: 0.34247619047619043 precision_macro: 0.3122222222222222 precision_micro: 0.4266666666666667 precision_weighted: 0.3122222222222222 recall_macro: 0.4266666666666667 recall_micro: 0.4266666666666667 recall_weighted: 0.4266666666666667 accuracy: 0.4266666666666667
[ "real adonis", "real african giant swallowtail", "real american snoot", "real an 88", "real appollo", "real atala", "real banded orange heliconian", "real banded peacock", "real beckers white", "real black hairstreak", "real blue morpho", "real blue spotted crow", "real brown siproeta", "real cabbage white", "real cairns birdwing", "real checquered skipper", "real chestnut", "real cleopatra", "real clodius parnassian", "real clouded sulphur", "real common banded awl", "real common wood-nymph", "real copper tail", "real crecent", "real crimson patch", "real danaid eggfly", "real eastern coma", "real eastern dapple white", "real eastern pine elfin", "real elbowed pierrot", "real gold banded", "real great eggfly", "real great jay", "real green celled cattleheart", "real grey hairstreak", "real indra swallow", "real iphiclus sister", "real julia", "real large marble", "real malachite", "real mangrove skipper", "real mestra", "real metalmark", "real milberts tortoiseshell", "real monarch", "real mourning cloak", "real orange oakleaf", "real orange tip", "real orchard swallow", "real painted lady", "real paper kite", "real peacock", "real pine white", "real pipevine swallow", "real popinjay", "real purple hairstreak", "real purplish copper", "real question mark", "real red admiral", "real red cracker", "real red postman", "real red spotted purple", "real scarce swallow", "real silver spot skipper", "real sleepy orange", "real sootywing", "real southern dogface", "real straited queen", "real tropical leafwing", "real two barred flasher", "real ulyses", "real viceroy", "real wood satyr", "real yellow swallow tail", "real zebra long wing" ]
tferreira6/autotrain-xrwes-v4giy
# Model Trained Using AutoTrain - Problem type: Image Classification ## Validation Metricsg loss: nan f1_macro: 0.0003508771929824561 f1_micro: 0.013333333333333336 f1_weighted: 0.0003508771929824561 precision_macro: 0.00017777777777777779 precision_micro: 0.013333333333333334 precision_weighted: 0.00017777777777777779 recall_macro: 0.013333333333333334 recall_micro: 0.013333333333333334 recall_weighted: 0.013333333333333334 accuracy: 0.013333333333333334
[ "real adonis", "real african giant swallowtail", "real american snoot", "real an 88", "real appollo", "real atala", "real banded orange heliconian", "real banded peacock", "real beckers white", "real black hairstreak", "real blue morpho", "real blue spotted crow", "real brown siproeta", "real cabbage white", "real cairns birdwing", "real checquered skipper", "real chestnut", "real cleopatra", "real clodius parnassian", "real clouded sulphur", "real common banded awl", "real common wood-nymph", "real copper tail", "real crecent", "real crimson patch", "real danaid eggfly", "real eastern coma", "real eastern dapple white", "real eastern pine elfin", "real elbowed pierrot", "real gold banded", "real great eggfly", "real great jay", "real green celled cattleheart", "real grey hairstreak", "real indra swallow", "real iphiclus sister", "real julia", "real large marble", "real malachite", "real mangrove skipper", "real mestra", "real metalmark", "real milberts tortoiseshell", "real monarch", "real mourning cloak", "real orange oakleaf", "real orange tip", "real orchard swallow", "real painted lady", "real paper kite", "real peacock", "real pine white", "real pipevine swallow", "real popinjay", "real purple hairstreak", "real purplish copper", "real question mark", "real red admiral", "real red cracker", "real red postman", "real red spotted purple", "real scarce swallow", "real silver spot skipper", "real sleepy orange", "real sootywing", "real southern dogface", "real straited queen", "real tropical leafwing", "real two barred flasher", "real ulyses", "real viceroy", "real wood satyr", "real yellow swallow tail", "real zebra long wing" ]
ppriatiningtyaz/image_classification
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # image_classification This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.6440 - Accuracy: 0.4437 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 40 | 1.9509 | 0.3312 | | No log | 2.0 | 80 | 1.7328 | 0.375 | | No log | 3.0 | 120 | 1.6260 | 0.4562 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.1
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
JinJung/food_classifier
<!-- This model card has been generated automatically according to the information Keras had access to. You should probably proofread and complete it, then remove this comment. --> # JinJung/food_classifier This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Train Loss: 2.8490 - Validation Loss: 1.7203 - Train Accuracy: 0.793 - Epoch: 0 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - optimizer: {'name': 'AdamWeightDecay', 'learning_rate': {'module': 'keras.optimizers.schedules', 'class_name': 'PolynomialDecay', 'config': {'initial_learning_rate': 3e-05, 'decay_steps': 4000, 'end_learning_rate': 0.0, 'power': 1.0, 'cycle': False, 'name': None}, 'registered_name': None}, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-08, 'amsgrad': False, 'weight_decay_rate': 0.01} - training_precision: float32 ### Training results | Train Loss | Validation Loss | Train Accuracy | Epoch | |:----------:|:---------------:|:--------------:|:-----:| | 2.8490 | 1.7203 | 0.793 | 0 | ### Framework versions - Transformers 4.35.2 - TensorFlow 2.15.0 - Datasets 2.16.1 - Tokenizers 0.15.1
[ "apple_pie", "baby_back_ribs", "bruschetta", "waffles", "caesar_salad", "cannoli", "caprese_salad", "carrot_cake", "ceviche", "cheesecake", "cheese_plate", "chicken_curry", "chicken_quesadilla", "baklava", "chicken_wings", "chocolate_cake", "chocolate_mousse", "churros", "clam_chowder", "club_sandwich", "crab_cakes", "creme_brulee", "croque_madame", "cup_cakes", "beef_carpaccio", "deviled_eggs", "donuts", "dumplings", "edamame", "eggs_benedict", "escargots", "falafel", "filet_mignon", "fish_and_chips", "foie_gras", "beef_tartare", "french_fries", "french_onion_soup", "french_toast", "fried_calamari", "fried_rice", "frozen_yogurt", "garlic_bread", "gnocchi", "greek_salad", "grilled_cheese_sandwich", "beet_salad", "grilled_salmon", "guacamole", "gyoza", "hamburger", "hot_and_sour_soup", "hot_dog", "huevos_rancheros", "hummus", "ice_cream", "lasagna", "beignets", "lobster_bisque", "lobster_roll_sandwich", "macaroni_and_cheese", "macarons", "miso_soup", "mussels", "nachos", "omelette", "onion_rings", "oysters", "bibimbap", "pad_thai", "paella", "pancakes", "panna_cotta", "peking_duck", "pho", "pizza", "pork_chop", "poutine", "prime_rib", "bread_pudding", "pulled_pork_sandwich", "ramen", "ravioli", "red_velvet_cake", "risotto", "samosa", "sashimi", "scallops", "seaweed_salad", "shrimp_and_grits", "breakfast_burrito", "spaghetti_bolognese", "spaghetti_carbonara", "spring_rolls", "steak", "strawberry_shortcake", "sushi", "tacos", "takoyaki", "tiramisu", "tuna_tartare" ]
Cithan/vit-emotions-fp16
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-emotions-fp16 This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.3051 - Accuracy: 0.9287 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 25 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 50 | 1.7679 | 0.3862 | | No log | 2.0 | 100 | 1.4584 | 0.5375 | | No log | 3.0 | 150 | 1.3209 | 0.5162 | | No log | 4.0 | 200 | 1.1580 | 0.62 | | No log | 5.0 | 250 | 0.9946 | 0.7275 | | No log | 6.0 | 300 | 0.8519 | 0.7887 | | No log | 7.0 | 350 | 0.7374 | 0.8325 | | No log | 8.0 | 400 | 0.7250 | 0.815 | | No log | 9.0 | 450 | 0.5821 | 0.88 | | 1.1152 | 10.0 | 500 | 0.5239 | 0.8838 | | 1.1152 | 11.0 | 550 | 0.5121 | 0.8712 | | 1.1152 | 12.0 | 600 | 0.4444 | 0.9038 | | 1.1152 | 13.0 | 650 | 0.3894 | 0.9137 | | 1.1152 | 14.0 | 700 | 0.3956 | 0.9137 | | 1.1152 | 15.0 | 750 | 0.3806 | 0.91 | | 1.1152 | 16.0 | 800 | 0.3328 | 0.9375 | | 1.1152 | 17.0 | 850 | 0.3076 | 0.9287 | | 1.1152 | 18.0 | 900 | 0.3026 | 0.9363 | | 1.1152 | 19.0 | 950 | 0.2388 | 0.96 | | 0.3752 | 20.0 | 1000 | 0.2892 | 0.935 | | 0.3752 | 21.0 | 1050 | 0.2539 | 0.9413 | | 0.3752 | 22.0 | 1100 | 0.2299 | 0.9525 | | 0.3752 | 23.0 | 1150 | 0.2131 | 0.9575 | | 0.3752 | 24.0 | 1200 | 0.2300 | 0.9525 | | 0.3752 | 25.0 | 1250 | 0.2393 | 0.9537 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.1
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
kerwinnl/vit-emotions-fp16
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-emotions-fp16 This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.2406 - Accuracy: 0.9487 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 20 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 50 | 1.1462 | 0.605 | | No log | 2.0 | 100 | 0.9580 | 0.7175 | | No log | 3.0 | 150 | 0.9513 | 0.68 | | No log | 4.0 | 200 | 0.7491 | 0.7937 | | No log | 5.0 | 250 | 0.6535 | 0.8387 | | No log | 6.0 | 300 | 0.5758 | 0.87 | | No log | 7.0 | 350 | 0.5042 | 0.8688 | | No log | 8.0 | 400 | 0.4789 | 0.8775 | | No log | 9.0 | 450 | 0.3602 | 0.9337 | | 0.7188 | 10.0 | 500 | 0.3826 | 0.905 | | 0.7188 | 11.0 | 550 | 0.3889 | 0.8938 | | 0.7188 | 12.0 | 600 | 0.3590 | 0.9137 | | 0.7188 | 13.0 | 650 | 0.2929 | 0.92 | | 0.7188 | 14.0 | 700 | 0.2881 | 0.9213 | | 0.7188 | 15.0 | 750 | 0.2652 | 0.9363 | | 0.7188 | 16.0 | 800 | 0.2611 | 0.9463 | | 0.7188 | 17.0 | 850 | 0.2267 | 0.9537 | | 0.7188 | 18.0 | 900 | 0.2394 | 0.95 | | 0.7188 | 19.0 | 950 | 0.1969 | 0.9525 | | 0.2754 | 20.0 | 1000 | 0.2419 | 0.9413 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.1
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
IsaacMwesigwa/footballer-recognition-5
# Model Trained Using AutoTrain - Problem type: Image Classification ## Validation Metricsg loss: 1.2659317351795386e+24 f1_macro: 2.895499120347367e-06 f1_micro: 0.0012045290291496024 f1_weighted: 2.8982892905428356e-06 precision_macro: 1.4494934165458512e-06 precision_micro: 0.0012045290291496024 precision_weighted: 1.4508901820640839e-06 recall_macro: 0.0012033694344163659 recall_micro: 0.0012045290291496024 recall_weighted: 0.0012045290291496024 accuracy: 0.0012045290291496024
[ "aaron long", "aaron mooy", "aaron ramsdale", "aaron ramsey", "abde ezzalzouli", "abdelhamid sabiri", "abdelkarim hassan", "abderrazak hamdallah", "abdou diallo", "abdul fatawu issahaku", "abdul manaf nurudeen", "abdulaziz hatem", "abdulelah al-amri", "abdulellah al-malki", "abdullah madu", "abdullah otayf", "abdulrahman al-aboud", "abolfazl jalali", "achraf dari", "achraf hakimi", "adam davies", "adrien rabiot", "agustín canobbio", "ahmad nourollahi", "ahmed alaaeldin", "ahmed reda tagnaouti", "ajdin hrustic", "akram afif", "alan franco", "alejandro balde", "aleksandar mitrović", "alex sandro", "alex telles", "alexander bah", "alexander djiku", "alexander domínguez", "alexis mac allister", "alexis vega", "alfred gomis", "alfredo talavera", "ali abdi", "ali al-bulaihi", "ali al-hassan", "ali assadalla", "ali gholizadeh", "ali karimi", "ali maâloul", "alidu seidu", "alireza beiranvand", "alireza jahanbakhsh", "alisson", "alistair johnston", "almoez ali", "alphonse areola", "alphonso davies", "amadou onana", "amir abedzadeh", "anass zaroury", "andreas christensen", "andreas cornelius", "andreas skov olsen", "andrej kramarić", "andrew redmayne", "andries noppert", "andrija živković", "andré ayew ", "andré onana", "andré silva", "andré-frank zambo anguissa", "andrés guardado ", "anis ben slimane", "ansu fati", "ante budimir", "anthony contreras", "anthony hernández", "antoine griezmann", "antoine semenyo", "antonee robinson", "antonio rüdiger", "antony", "antónio silva", "ao tanaka", "ardon jashari", "arkadiusz milik", "armel bella-kotchap", "arthur theate", "artur jędrzejczyk", "assim madibo", "atiba hutchinson ", "aurélien tchouaméni", "awer mabil", "axel disasi", "axel witsel", "ayase ueda", "aymen dahmen", "aymen mathlouthi", "aymeric laporte", "ayrton preciado", "aziz behich", "azzedine ounahi", "aïssa laïdouni", "baba rahman", "badr benoun", "bailey wright", "bamba dieng", "bartosz bereszyński", "bassam al-rawi", "bechir ben saïd", "ben cabango", "ben davies", "ben white", "benjamin pavard", "bernardo silva", "bilal el khannous", "bilel ifa", "borna barišić", "borna sosa", "boualem khoukhi", "boulaye dia", "brandon aguilera", "breel embolo", "bremer", "brenden aaronson", "brennan johnson", "bruno fernandes", "bruno guimarães", "bruno petković", "bryan mbeumo", "bryan oviedo", "bryan ruiz ", "bukayo saka", "callum wilson", "cameron carter-vickers", "cameron devlin", "carlos gruezo", "carlos martínez", "carlos rodríguez", "carlos soler", "casemiro", "celso borges", "charles de ketelaere", "cheikhou kouyaté", "cho gue-sung", "cho yu-min", "chris gunter", "chris mepham", "christian bassogog", "christian eriksen", "christian fassnacht", "christian günter", "christian nørgaard", "christian pulisic", "christopher wooh", "cody gakpo", "collins fai", "connor roberts", "conor coady", "conor gallagher", "craig goodwin", "cristian roldan", "cristian romero", "cristiano ronaldo ", "cyle larin", "césar azpilicueta", "césar montes", "daichi kamada", "daizen maeda", "daley blind", "damian szymański", "dani alves", "dani carvajal", "dani olmo", "daniel afriyie", "daniel amartey", "daniel chacón", "daniel james", "daniel schmidt", "daniel wass", "daniel-kofi kyereh", "danilo", "danilo pereira", "danny vukovic", "danny ward", "darko lazović", "darwin núñez", "david raum", "david raya", "david wotherspoon", "davy klaassen", "dayne st. clair", "dayot upamecano", "deandre yedlin", "declan rice", "dejan lovren", "denis odoi", "denis zakaria", "denzel dumfries", "derek cornelius", "devis epassy", "diego godín ", "diego palacios", "diogo costa", "diogo dalot", "djibril sow", "djorkaeff reasco", "domagoj vida", "dominik livaković", "douglas lópez", "dries mertens", "dušan tadić ", "dušan vlahović", "dylan bronn", "dylan levitt", "eden hazard ", "ederson", "edimilson fernandes", "edinson cavani", "edson álvarez", "eduardo camavinga", "ehsan hajsafi ", "eiji kawashima", "elisha owusu", "ellyes skhiri", "emiliano martínez", "enner valencia ", "enzo ebosse", "enzo fernández", "eray cömert", "eric dier", "eric garcía", "eric maxim choupo-moting", "esteban alvarado", "ethan ampadu", "ethan horvath", "exequiel palacios", "fabian frei", "fabian rieder", "fabian schär", "fabinho", "facundo pellistri", "facundo torres", "famara diédhiou", "federico valverde", "ferjani sassi", "fernando muslera", "ferran torres", "filip kostić", "filip mladenović", "filip đuričić", "firas al-buraikan", "fodé ballo-touré", "formose mendy", "fran karačić", "francisco calvo", "franco armani", "fred", "frederik rønnow", "frenkie de jong", "félix torres", "gabriel jesus", "gabriel martinelli", "gaku shibasaki", "garang kuol", "gareth bale ", "gavi", "gaël ondoua", "georges-kévin nkoudou", "gerardo arteaga", "germán pezzella", "gerson torres", "gerónimo rulli", "ghailene chaalali", "gideon mensah", "giorgian de arrascaeta", "giovanni reyna", "gonzalo montiel", "gonzalo plata", "gonçalo ramos", "granit xhaka ", "gregor kobel", "grzegorz krychowiak", "guido rodríguez", "guillermo ochoa", "guillermo varela", "haitham asiri", "haji wright", "hakim ziyech", "hannibal mejbri", "hans vanaken", "haris seferovic", "harry kane ", "harry maguire", "harry souttar", "harry wilson", "hassan al-haydos ", "hassan al-tambakti", "hattan bahebri", "henry martín", "hernán galíndez", "hidemasa morita", "hiroki ito", "hiroki sakai", "hirving lozano", "homam ahmed", "hong chul", "hossein hosseini", "hossein kanaanizadegan", "hugo guillamón", "hugo lloris ", "hwang hee-chan", "hwang in-beom", "hwang ui-jo", "héctor herrera", "héctor moreno", "ibrahim danlad", "ibrahima konaté", "idrissa gueye", "iké ugbo", "ilias chair", "iliman ndiaye", "ismaeel mohammad", "ismail jakobs", "ismaël koné", "ismaïla sarr", "issam jebali", "ivan ilić", "ivan perišić", "ivica ivušić", "ivo grbić", "iñaki williams", "jack grealish", "jackson irvine", "jackson porozo", "jakub kamiński", "jakub kiwior", "jamal musiala", "james maddison", "james pantemis", "jamie maclaren", "jan bednarek", "jan vertonghen", "jason cummings", "jassem gaber", "jawad el yamiq", "jean-charles castelletto", "jean-pierre nsame", "jens stryger larsen", "jeong woo-yeong", "jeremie frimpong", "jeremy sarmiento", "jerome ngom mbekeli", "jesper lindstrøm", "jesús ferreira", "jesús gallardo", "jewison bennette", "jo hyeon-woo", "joachim andersen", "joakim mæhle", "joe allen", "joe morrell", "joe rodon", "joe scally", "joel campbell", "joel king", "joel waterman", "johan venegas", "johan vásquez", "john stones", "jonas hofmann", "jonas omlin", "jonas wind", "jonathan david", "jonathan osorio", "jonny williams", "jordan ayew", "jordan henderson", "jordan morris", "jordan pickford", "jordan veretout", "jordi alba", "jorge sánchez", "joseph aidoo", "josh sargent", "joshua kimmich", "josip juranović", "josip stanišić", "josip šutalo", "josé cifuentes", "josé giménez", "josé luis rodríguez", "josé sá", "joão cancelo", "joão félix", "joão mário", "joão palhinha", "joško gvardiol", "juan foyth", "juan pablo vargas", "jude bellingham", "jules koundé", "julian brandt", "julián álvarez", "jung woo-young", "junior hoilett", "junya ito", "jurriën timber", "justin bijlow", "jérémy doku", "kai havertz", "kalidou koulibaly ", "kalvin phillips", "kamal miller", "kamal sowah", "kamaldeen sulemana", "kamil glik", "kamil grabara", "kamil grosicki", "kaoru mitoma", "karim adeyemi", "karim ansarifard", "karim benzema", "karim boudiaf", "karl toko ekambi", "karol świderski", "kasper dolberg", "kasper schmeichel", "keanu baccus", "kellyn acosta", "kendall waston", "kenneth taylor", "kevin de bruyne", "kevin rodríguez", "kevin trapp", "kevin álvarez", "keylor navas", "keysher fuller", "khalid muneer", "kieffer moore", "kieran trippier", "kim jin-su", "kim min-jae", "kim moon-hwan", "kim seung-gyu", "kim tae-hwan", "kim young-gwon", "kingsley coman", "ko itakura", "koen casteels", "koke", "kristijan jakić", "krystian bielik", "krzysztof piątek", "krépin diatta", "kwon chang-hoon", "kwon kyung-won", "kye rowles", "kyle walker", "kylian mbappé", "lautaro martínez", "lawrence ati-zigi", "leander dendoncker", "leandro paredes", "leandro trossard", "lee jae-sung", "lee kang-in", "leon goretzka", "leroy sané", "liam fraser", "liam millar", "lionel messi ", "lisandro martínez", "lovro majer", "loïs openda", "luca de la torre", "lucas cavallini", "lucas hernandez", "lucas paquetá", "lucas torreira", "luis chávez", "luis romo", "luis suárez", "luka jović", "luka modrić ", "luka sučić", "lukas klostermann", "luke shaw", "luuk de jong", "majid hosseini", "mamadou loum", "manuel akanji", "manuel neuer ", "manuel ugarte", "marc-andré ter stegen", "marcelo brozović", "marco asensio", "marcos acuña", "marcos llorente", "marcus rashford", "marcus thuram", "mario götze", "mario pašalić", "mark harris", "mark-anthony kaye", "marko dmitrović", "marko grujić", "marko livaja", "marquinhos", "marten de roon", "martin boyle", "martin braithwaite", "martin erlić", "martin hongla", "martín cáceres", "mason mount", "mateo kovačić", "mateusz wieteska", "matheus nunes", "mathew leckie", "mathew ryan ", "mathias jensen", "mathías olivera", "matt turner", "matteo guendouzi", "matthew smith", "matthias ginter", "matthijs de ligt", "matty cash", "matías vecino", "matías viña", "maxi gómez", "maya yoshida ", "mehdi taremi", "mehdi torabi", "memphis depay", "meshaal barsham", "michael estrada", "michał skóraś", "michel aebischer", "michy batshuayi", "miki yamane", "mikkel damsgaard", "milad mohammadi", "milan borjan", "miloš degenek", "miloš veljković", "mislav oršić", "mitchell duke", "mohamed ali ben romdhane", "mohamed dräger", "mohamed kanno", "mohammed al-breik", "mohammed al-owais", "mohammed al-rubaie", "mohammed kudus", "mohammed muntari", "mohammed salisu", "mohammed waad", "moisés caicedo", "moisés ramírez", "montassar talbi", "morteza pouraliganji", "mostafa meshaal", "mouez hassen", "moumi ngamaleu", "moustapha name", "munir mohamedi", "musab kheder", "na sang-ho", "nader ghandri", "nahuel molina", "naif al-hadhrami", "nampalys mendy", "nasser al-dawsari", "nathan aké", "nathaniel atkinson", "nawaf al-abed", "nawaf al-aqidi", "nayef aguerd", "naïm sliti", "neco williams", "nemanja gudelj", "nemanja maksimović", "nemanja radonjić", "neymar", "nick pope", "niclas füllkrug", "nico elvedi", "nico schlotterbeck", "nico williams", "nicola zalewski", "nicolas jackson", "nicolas nkoulou", "nicolás otamendi", "nicolás tagliafico", "nicolás de la cruz", "niklas süle", "nikola milenković", "nikola vlašić", "noa lang", "noah okafor", "nouhou tolo", "noussair mazraoui", "nuno mendes", "néstor araujo", "oliver christensen", "olivier giroud", "olivier mbaizo", "olivier ntcham", "orbelín pineda", "osman bukari", "otávio", "ousmane dembélé", "pablo sarabia", "paik seung-ho", "pape abou cissé", "pape gueye", "pape matar sarr", "papu gómez", "pathé ciss", "patrick sequeira", "pau torres", "paulo dybala", "payam niazmand", "pedri", "pedro", "pepe", "pervis estupiñán", "phil foden", "philipp köhn", "piero hincapié", "pierre kunde", "pierre-emile højbjerg", "piotr zieliński", "predrag rajković", "przemysław frankowski", "rafael leão", "raheem sterling", "ramin rezaeian", "randal kolo muani", "raphaël guerreiro", "raphaël varane", "raphinha", "rasmus kristensen", "raúl jiménez", "remko pasveer", "remo freuler", "renato steffen", "ricardo horta", "ricardo rodriguez", "richarlison", "richie laryea", "riley mcgree", "ritsu dōan", "riyadh sharahili", "roan wilson", "robert arboleda", "robert gumny", "robert lewandowski ", "robert skov", "robert sánchez", "roberto alvarado", "rodolfo cota", "rodri", "rodrigo bentancur", "rodrigo de paul", "rodrygo", "rogelio funes mori", "romain saïss ", "romario ibarra", "romelu lukaku", "ronald araújo", "rouzbeh cheshmi", "ruben vargas", "rubin colwill", "rui patrício", "ró-ró", "rónald matarrita", "rúben dias", "rúben neves", "saad al-sheeb", "sadegh moharrami", "sadio mané", "saeid ezatolahi", "saleh al-shehri", "salem al-dawsari", "salem al-hajri", "salis abdul samed", "salman al-faraj ", "sam adekugbe", "saman ghoddos", "sami al-najei", "samuel gouet", "samuel piette", "sardar azmoun", "saud abdulhamid", "saša lukić", "sean johnson", "sebas méndez", "sebastian szymański", "sebastián coates", "sebastián sosa", "seifeddine jaziri", "selim amallah", "seny dieng", "serge gnabry", "sergej milinković-savić", "sergio busquets ", "sergio rochet", "sergiño dest", "shaq moore", "shogo taniguchi", "shojae khalilzadeh", "shuto machino", "shūichi gonda", "silvan widmer", "simon kjær ", "simon mignolet", "simon ngapandouetnbu", "sofiane boufal", "sofyan amrabat", "son heung-min ", "son jun-ho", "song bum-keun", "song min-kyu", "sorba thomas", "souaibou marou", "srđan babić", "stefan mitrović", "stefan de vrij", "stephen eustáquio", "steve mandanda", "steven berghuis", "steven bergwijn", "steven vitória", "strahinja eraković", "strahinja pavlović", "sultan al-ghannam", "szymon żurkowski", "taha yassine khenissi", "tajon buchanan", "takefusa kubo", "takehiro tomiyasu", "takuma asano", "takumi minamino", "tarek salman", "tariq lamptey", "teun koopmeiners", "theo hernandez", "thiago almada", "thiago silva ", "thibaut courtois", "thilo kehrer", "thomas delaney", "thomas deng", "thomas meunier", "thomas müller", "thomas partey", "thorgan hazard", "tim ream", "timothy castagne", "timothy weah", "toby alderweireld", "tom lockyer", "trent alexander-arnold", "tyler adams", "tyrell malacia", "unai simón", "uriel antuna", "uroš račić", "vahid amiri", "vanja milinković-savić", "victor nelsson", "vincent aboubakar ", "vincent janssen", "vinícius júnior", "virgil van dijk ", "vitinha", "wahbi khazri", "wajdi kechrida", "walid cheddira", "walker zimmerman", "wataru endo", "wayne hennessey", "weston mckennie", "weverton", "william carvalho", "william pacho", "william saliba", "wojciech szczęsny", "wout faes", "wout weghorst", "xavi simons", "xavier arreaga", "xherdan shaqiri", "yahia attiyat allah", "yahya jabrane", "yann sommer", "yannick carrasco", "yasser al-shahrani", "yassine bounou", "yassine meriah", "yeltsin tejeda", "yeremy pino", "yoon jong-gyu", "youri tielemans", "yousef hassan", "youssef en-nesyri", "youssef msakni ", "youssouf fofana", "youssouf sabaly", "youssoufa moukoko", "youstin salas", "yuki soma", "yunus musah", "yussuf poulsen", "yuto nagatomo", "zakaria aboukhlal", "zeno debast", "álvaro morata", "álvaro zamora", "ángel correa", "ángel di maría", "ángel mena", "ángelo preciado", "éder militão", "édouard mendy", "érick gutiérrez", "éverton ribeiro", "óscar duarte", "i̇lkay gündoğan", "łukasz skorupski" ]
am-infoweb/MRR_image_classification_dit_29_jan_small75-finetuned
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # MRR_image_classification_dit_29_jan_small75-finetuned-eurosat This model is a fine-tuned version of [microsoft/dit-large](https://huggingface.co/microsoft/dit-large) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.5785 - Accuracy: 0.4756 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 32 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 1.8795 | 0.98 | 10 | 1.6437 | 0.3049 | | 1.6681 | 1.95 | 20 | 1.6446 | 0.4146 | | 1.5603 | 2.93 | 30 | 1.5785 | 0.4756 | ### Framework versions - Transformers 4.37.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.1
[ "billing", "diagnostics", "lab reports", "office visit", "operative report", "orders", "physical_therapy_(ptot)" ]
aziznurrohman/image_classification
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # image_classification This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.2076 - Accuracy: 0.5312 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 10 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 40 | 1.8919 | 0.375 | | No log | 2.0 | 80 | 1.5790 | 0.3625 | | No log | 3.0 | 120 | 1.4930 | 0.45 | | No log | 4.0 | 160 | 1.3281 | 0.5188 | | No log | 5.0 | 200 | 1.2732 | 0.5687 | | No log | 6.0 | 240 | 1.2483 | 0.5687 | | No log | 7.0 | 280 | 1.2356 | 0.5625 | | No log | 8.0 | 320 | 1.1672 | 0.6 | | No log | 9.0 | 360 | 1.1776 | 0.5938 | | No log | 10.0 | 400 | 1.1561 | 0.5813 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.1
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
RivanAji/results
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # results This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.9450 - Accuracy: 0.3125 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 2e-05 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 80 | 2.0363 | 0.2375 | | No log | 2.0 | 160 | 1.9738 | 0.3063 | | No log | 3.0 | 240 | 1.9450 | 0.3125 | ### Framework versions - Transformers 4.37.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.2
[ "label_0", "label_1", "label_2", "label_3", "label_4", "label_5", "label_6", "label_7" ]
quocviethere/ueh-vdr-vit
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # ueh-vdr-vit This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on UEH Visual Dish Recognition (UEH-VDR) dataset. It achieves the following results on the evaluation set: - Loss: 0.4856 - Accuracy: 0.9296 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 2e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 197 | 0.8112 | 0.8943 | | No log | 2.0 | 394 | 0.5428 | 0.9220 | | 0.9 | 3.0 | 591 | 0.4856 | 0.9296 | ### Framework versions - Transformers 4.37.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.1
[ "bánh chưng", "bánh mì", "bánh tét", "bánh tráng", "bánh xèo", "bún", "cơm tấm", "gỏi cuốn", "phở" ]
ikbalms/vit-emotions-fp16
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-emotions-fp16 This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.2405 - Accuracy: 0.6438 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 5 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 50 | 1.7496 | 0.3962 | | No log | 2.0 | 100 | 1.5077 | 0.5275 | | No log | 3.0 | 150 | 1.3827 | 0.5587 | | No log | 4.0 | 200 | 1.2849 | 0.62 | | No log | 5.0 | 250 | 1.2359 | 0.6362 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.1
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
Rifqiakmals/model
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # model This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.4897 - Accuracy: 0.6 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 25 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 80 | 1.7001 | 0.325 | | No log | 2.0 | 160 | 1.4642 | 0.4875 | | No log | 3.0 | 240 | 1.3522 | 0.4625 | | No log | 4.0 | 320 | 1.3493 | 0.4688 | | No log | 5.0 | 400 | 1.2052 | 0.55 | | No log | 6.0 | 480 | 1.2267 | 0.5563 | | 1.2917 | 7.0 | 560 | 1.1744 | 0.6062 | | 1.2917 | 8.0 | 640 | 1.2969 | 0.5437 | | 1.2917 | 9.0 | 720 | 1.2519 | 0.5687 | | 1.2917 | 10.0 | 800 | 1.3108 | 0.5125 | | 1.2917 | 11.0 | 880 | 1.2725 | 0.5875 | | 1.2917 | 12.0 | 960 | 1.3437 | 0.55 | | 0.5002 | 13.0 | 1040 | 1.3790 | 0.5375 | | 0.5002 | 14.0 | 1120 | 1.3432 | 0.625 | | 0.5002 | 15.0 | 1200 | 1.4395 | 0.55 | | 0.5002 | 16.0 | 1280 | 1.3672 | 0.5875 | | 0.5002 | 17.0 | 1360 | 1.3928 | 0.575 | | 0.5002 | 18.0 | 1440 | 1.3016 | 0.5875 | | 0.2523 | 19.0 | 1520 | 1.4815 | 0.5625 | | 0.2523 | 20.0 | 1600 | 1.3394 | 0.6062 | | 0.2523 | 21.0 | 1680 | 1.3450 | 0.5938 | | 0.2523 | 22.0 | 1760 | 1.3924 | 0.6312 | | 0.2523 | 23.0 | 1840 | 1.4664 | 0.5813 | | 0.2523 | 24.0 | 1920 | 1.2635 | 0.65 | | 0.1723 | 25.0 | 2000 | 1.4154 | 0.5625 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.1
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
eecspatents/vit-base-patch16-224-finetuned-flower
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-base-patch16-224-finetuned-flower This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on the imagefolder dataset. ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 5 ### Training results ### Framework versions - Transformers 4.24.0 - Pytorch 2.1.0+cu121 - Datasets 2.7.1 - Tokenizers 0.13.3
[ "daisy", "dandelion", "roses", "sunflowers", "tulips" ]
IsaacMwesigwa/footballer-recognition-4
# Model Trained Using AutoTrain - Problem type: Image Classification ## Validation Metricsg loss: 4.787786483764648 f1_macro: 0.1332837581377327 f1_micro: 0.1617682486147916 f1_weighted: 0.13341219346236555 precision_macro: 0.15686758830215874 precision_micro: 0.1617682486147916 precision_weighted: 0.15701874955323286 recall_macro: 0.16161251504211793 recall_micro: 0.1617682486147916 recall_weighted: 0.1617682486147916 accuracy: 0.1617682486147916
[ "aaron long", "aaron mooy", "aaron ramsdale", "aaron ramsey", "abde ezzalzouli", "abdelhamid sabiri", "abdelkarim hassan", "abderrazak hamdallah", "abdou diallo", "abdul fatawu issahaku", "abdul manaf nurudeen", "abdulaziz hatem", "abdulelah al-amri", "abdulellah al-malki", "abdullah madu", "abdullah otayf", "abdulrahman al-aboud", "abolfazl jalali", "achraf dari", "achraf hakimi", "adam davies", "adrien rabiot", "agustín canobbio", "ahmad nourollahi", "ahmed alaaeldin", "ahmed reda tagnaouti", "ajdin hrustic", "akram afif", "alan franco", "alejandro balde", "aleksandar mitrović", "alex sandro", "alex telles", "alexander bah", "alexander djiku", "alexander domínguez", "alexis mac allister", "alexis vega", "alfred gomis", "alfredo talavera", "ali abdi", "ali al-bulaihi", "ali al-hassan", "ali assadalla", "ali gholizadeh", "ali karimi", "ali maâloul", "alidu seidu", "alireza beiranvand", "alireza jahanbakhsh", "alisson", "alistair johnston", "almoez ali", "alphonse areola", "alphonso davies", "amadou onana", "amir abedzadeh", "anass zaroury", "andreas christensen", "andreas cornelius", "andreas skov olsen", "andrej kramarić", "andrew redmayne", "andries noppert", "andrija živković", "andré ayew ", "andré onana", "andré silva", "andré-frank zambo anguissa", "andrés guardado ", "anis ben slimane", "ansu fati", "ante budimir", "anthony contreras", "anthony hernández", "antoine griezmann", "antoine semenyo", "antonee robinson", "antonio rüdiger", "antony", "antónio silva", "ao tanaka", "ardon jashari", "arkadiusz milik", "armel bella-kotchap", "arthur theate", "artur jędrzejczyk", "assim madibo", "atiba hutchinson ", "aurélien tchouaméni", "awer mabil", "axel disasi", "axel witsel", "ayase ueda", "aymen dahmen", "aymen mathlouthi", "aymeric laporte", "ayrton preciado", "aziz behich", "azzedine ounahi", "aïssa laïdouni", "baba rahman", "badr benoun", "bailey wright", "bamba dieng", "bartosz bereszyński", "bassam al-rawi", "bechir ben saïd", "ben cabango", "ben davies", "ben white", "benjamin pavard", "bernardo silva", "bilal el khannous", "bilel ifa", "borna barišić", "borna sosa", "boualem khoukhi", "boulaye dia", "brandon aguilera", "breel embolo", "bremer", "brenden aaronson", "brennan johnson", "bruno fernandes", "bruno guimarães", "bruno petković", "bryan mbeumo", "bryan oviedo", "bryan ruiz ", "bukayo saka", "callum wilson", "cameron carter-vickers", "cameron devlin", "carlos gruezo", "carlos martínez", "carlos rodríguez", "carlos soler", "casemiro", "celso borges", "charles de ketelaere", "cheikhou kouyaté", "cho gue-sung", "cho yu-min", "chris gunter", "chris mepham", "christian bassogog", "christian eriksen", "christian fassnacht", "christian günter", "christian nørgaard", "christian pulisic", "christopher wooh", "cody gakpo", "collins fai", "connor roberts", "conor coady", "conor gallagher", "craig goodwin", "cristian roldan", "cristian romero", "cristiano ronaldo ", "cyle larin", "césar azpilicueta", "césar montes", "daichi kamada", "daizen maeda", "daley blind", "damian szymański", "dani alves", "dani carvajal", "dani olmo", "daniel afriyie", "daniel amartey", "daniel chacón", "daniel james", "daniel schmidt", "daniel wass", "daniel-kofi kyereh", "danilo", "danilo pereira", "danny vukovic", "danny ward", "darko lazović", "darwin núñez", "david raum", "david raya", "david wotherspoon", "davy klaassen", "dayne st. clair", "dayot upamecano", "deandre yedlin", "declan rice", "dejan lovren", "denis odoi", "denis zakaria", "denzel dumfries", "derek cornelius", "devis epassy", "diego godín ", "diego palacios", "diogo costa", "diogo dalot", "djibril sow", "djorkaeff reasco", "domagoj vida", "dominik livaković", "douglas lópez", "dries mertens", "dušan tadić ", "dušan vlahović", "dylan bronn", "dylan levitt", "eden hazard ", "ederson", "edimilson fernandes", "edinson cavani", "edson álvarez", "eduardo camavinga", "ehsan hajsafi ", "eiji kawashima", "elisha owusu", "ellyes skhiri", "emiliano martínez", "enner valencia ", "enzo ebosse", "enzo fernández", "eray cömert", "eric dier", "eric garcía", "eric maxim choupo-moting", "esteban alvarado", "ethan ampadu", "ethan horvath", "exequiel palacios", "fabian frei", "fabian rieder", "fabian schär", "fabinho", "facundo pellistri", "facundo torres", "famara diédhiou", "federico valverde", "ferjani sassi", "fernando muslera", "ferran torres", "filip kostić", "filip mladenović", "filip đuričić", "firas al-buraikan", "fodé ballo-touré", "formose mendy", "fran karačić", "francisco calvo", "franco armani", "fred", "frederik rønnow", "frenkie de jong", "félix torres", "gabriel jesus", "gabriel martinelli", "gaku shibasaki", "garang kuol", "gareth bale ", "gavi", "gaël ondoua", "georges-kévin nkoudou", "gerardo arteaga", "germán pezzella", "gerson torres", "gerónimo rulli", "ghailene chaalali", "gideon mensah", "giorgian de arrascaeta", "giovanni reyna", "gonzalo montiel", "gonzalo plata", "gonçalo ramos", "granit xhaka ", "gregor kobel", "grzegorz krychowiak", "guido rodríguez", "guillermo ochoa", "guillermo varela", "haitham asiri", "haji wright", "hakim ziyech", "hannibal mejbri", "hans vanaken", "haris seferovic", "harry kane ", "harry maguire", "harry souttar", "harry wilson", "hassan al-haydos ", "hassan al-tambakti", "hattan bahebri", "henry martín", "hernán galíndez", "hidemasa morita", "hiroki ito", "hiroki sakai", "hirving lozano", "homam ahmed", "hong chul", "hossein hosseini", "hossein kanaanizadegan", "hugo guillamón", "hugo lloris ", "hwang hee-chan", "hwang in-beom", "hwang ui-jo", "héctor herrera", "héctor moreno", "ibrahim danlad", "ibrahima konaté", "idrissa gueye", "iké ugbo", "ilias chair", "iliman ndiaye", "ismaeel mohammad", "ismail jakobs", "ismaël koné", "ismaïla sarr", "issam jebali", "ivan ilić", "ivan perišić", "ivica ivušić", "ivo grbić", "iñaki williams", "jack grealish", "jackson irvine", "jackson porozo", "jakub kamiński", "jakub kiwior", "jamal musiala", "james maddison", "james pantemis", "jamie maclaren", "jan bednarek", "jan vertonghen", "jason cummings", "jassem gaber", "jawad el yamiq", "jean-charles castelletto", "jean-pierre nsame", "jens stryger larsen", "jeong woo-yeong", "jeremie frimpong", "jeremy sarmiento", "jerome ngom mbekeli", "jesper lindstrøm", "jesús ferreira", "jesús gallardo", "jewison bennette", "jo hyeon-woo", "joachim andersen", "joakim mæhle", "joe allen", "joe morrell", "joe rodon", "joe scally", "joel campbell", "joel king", "joel waterman", "johan venegas", "johan vásquez", "john stones", "jonas hofmann", "jonas omlin", "jonas wind", "jonathan david", "jonathan osorio", "jonny williams", "jordan ayew", "jordan henderson", "jordan morris", "jordan pickford", "jordan veretout", "jordi alba", "jorge sánchez", "joseph aidoo", "josh sargent", "joshua kimmich", "josip juranović", "josip stanišić", "josip šutalo", "josé cifuentes", "josé giménez", "josé luis rodríguez", "josé sá", "joão cancelo", "joão félix", "joão mário", "joão palhinha", "joško gvardiol", "juan foyth", "juan pablo vargas", "jude bellingham", "jules koundé", "julian brandt", "julián álvarez", "jung woo-young", "junior hoilett", "junya ito", "jurriën timber", "justin bijlow", "jérémy doku", "kai havertz", "kalidou koulibaly ", "kalvin phillips", "kamal miller", "kamal sowah", "kamaldeen sulemana", "kamil glik", "kamil grabara", "kamil grosicki", "kaoru mitoma", "karim adeyemi", "karim ansarifard", "karim benzema", "karim boudiaf", "karl toko ekambi", "karol świderski", "kasper dolberg", "kasper schmeichel", "keanu baccus", "kellyn acosta", "kendall waston", "kenneth taylor", "kevin de bruyne", "kevin rodríguez", "kevin trapp", "kevin álvarez", "keylor navas", "keysher fuller", "khalid muneer", "kieffer moore", "kieran trippier", "kim jin-su", "kim min-jae", "kim moon-hwan", "kim seung-gyu", "kim tae-hwan", "kim young-gwon", "kingsley coman", "ko itakura", "koen casteels", "koke", "kristijan jakić", "krystian bielik", "krzysztof piątek", "krépin diatta", "kwon chang-hoon", "kwon kyung-won", "kye rowles", "kyle walker", "kylian mbappé", "lautaro martínez", "lawrence ati-zigi", "leander dendoncker", "leandro paredes", "leandro trossard", "lee jae-sung", "lee kang-in", "leon goretzka", "leroy sané", "liam fraser", "liam millar", "lionel messi ", "lisandro martínez", "lovro majer", "loïs openda", "luca de la torre", "lucas cavallini", "lucas hernandez", "lucas paquetá", "lucas torreira", "luis chávez", "luis romo", "luis suárez", "luka jović", "luka modrić ", "luka sučić", "lukas klostermann", "luke shaw", "luuk de jong", "majid hosseini", "mamadou loum", "manuel akanji", "manuel neuer ", "manuel ugarte", "marc-andré ter stegen", "marcelo brozović", "marco asensio", "marcos acuña", "marcos llorente", "marcus rashford", "marcus thuram", "mario götze", "mario pašalić", "mark harris", "mark-anthony kaye", "marko dmitrović", "marko grujić", "marko livaja", "marquinhos", "marten de roon", "martin boyle", "martin braithwaite", "martin erlić", "martin hongla", "martín cáceres", "mason mount", "mateo kovačić", "mateusz wieteska", "matheus nunes", "mathew leckie", "mathew ryan ", "mathias jensen", "mathías olivera", "matt turner", "matteo guendouzi", "matthew smith", "matthias ginter", "matthijs de ligt", "matty cash", "matías vecino", "matías viña", "maxi gómez", "maya yoshida ", "mehdi taremi", "mehdi torabi", "memphis depay", "meshaal barsham", "michael estrada", "michał skóraś", "michel aebischer", "michy batshuayi", "miki yamane", "mikkel damsgaard", "milad mohammadi", "milan borjan", "miloš degenek", "miloš veljković", "mislav oršić", "mitchell duke", "mohamed ali ben romdhane", "mohamed dräger", "mohamed kanno", "mohammed al-breik", "mohammed al-owais", "mohammed al-rubaie", "mohammed kudus", "mohammed muntari", "mohammed salisu", "mohammed waad", "moisés caicedo", "moisés ramírez", "montassar talbi", "morteza pouraliganji", "mostafa meshaal", "mouez hassen", "moumi ngamaleu", "moustapha name", "munir mohamedi", "musab kheder", "na sang-ho", "nader ghandri", "nahuel molina", "naif al-hadhrami", "nampalys mendy", "nasser al-dawsari", "nathan aké", "nathaniel atkinson", "nawaf al-abed", "nawaf al-aqidi", "nayef aguerd", "naïm sliti", "neco williams", "nemanja gudelj", "nemanja maksimović", "nemanja radonjić", "neymar", "nick pope", "niclas füllkrug", "nico elvedi", "nico schlotterbeck", "nico williams", "nicola zalewski", "nicolas jackson", "nicolas nkoulou", "nicolás otamendi", "nicolás tagliafico", "nicolás de la cruz", "niklas süle", "nikola milenković", "nikola vlašić", "noa lang", "noah okafor", "nouhou tolo", "noussair mazraoui", "nuno mendes", "néstor araujo", "oliver christensen", "olivier giroud", "olivier mbaizo", "olivier ntcham", "orbelín pineda", "osman bukari", "otávio", "ousmane dembélé", "pablo sarabia", "paik seung-ho", "pape abou cissé", "pape gueye", "pape matar sarr", "papu gómez", "pathé ciss", "patrick sequeira", "pau torres", "paulo dybala", "payam niazmand", "pedri", "pedro", "pepe", "pervis estupiñán", "phil foden", "philipp köhn", "piero hincapié", "pierre kunde", "pierre-emile højbjerg", "piotr zieliński", "predrag rajković", "przemysław frankowski", "rafael leão", "raheem sterling", "ramin rezaeian", "randal kolo muani", "raphaël guerreiro", "raphaël varane", "raphinha", "rasmus kristensen", "raúl jiménez", "remko pasveer", "remo freuler", "renato steffen", "ricardo horta", "ricardo rodriguez", "richarlison", "richie laryea", "riley mcgree", "ritsu dōan", "riyadh sharahili", "roan wilson", "robert arboleda", "robert gumny", "robert lewandowski ", "robert skov", "robert sánchez", "roberto alvarado", "rodolfo cota", "rodri", "rodrigo bentancur", "rodrigo de paul", "rodrygo", "rogelio funes mori", "romain saïss ", "romario ibarra", "romelu lukaku", "ronald araújo", "rouzbeh cheshmi", "ruben vargas", "rubin colwill", "rui patrício", "ró-ró", "rónald matarrita", "rúben dias", "rúben neves", "saad al-sheeb", "sadegh moharrami", "sadio mané", "saeid ezatolahi", "saleh al-shehri", "salem al-dawsari", "salem al-hajri", "salis abdul samed", "salman al-faraj ", "sam adekugbe", "saman ghoddos", "sami al-najei", "samuel gouet", "samuel piette", "sardar azmoun", "saud abdulhamid", "saša lukić", "sean johnson", "sebas méndez", "sebastian szymański", "sebastián coates", "sebastián sosa", "seifeddine jaziri", "selim amallah", "seny dieng", "serge gnabry", "sergej milinković-savić", "sergio busquets ", "sergio rochet", "sergiño dest", "shaq moore", "shogo taniguchi", "shojae khalilzadeh", "shuto machino", "shūichi gonda", "silvan widmer", "simon kjær ", "simon mignolet", "simon ngapandouetnbu", "sofiane boufal", "sofyan amrabat", "son heung-min ", "son jun-ho", "song bum-keun", "song min-kyu", "sorba thomas", "souaibou marou", "srđan babić", "stefan mitrović", "stefan de vrij", "stephen eustáquio", "steve mandanda", "steven berghuis", "steven bergwijn", "steven vitória", "strahinja eraković", "strahinja pavlović", "sultan al-ghannam", "szymon żurkowski", "taha yassine khenissi", "tajon buchanan", "takefusa kubo", "takehiro tomiyasu", "takuma asano", "takumi minamino", "tarek salman", "tariq lamptey", "teun koopmeiners", "theo hernandez", "thiago almada", "thiago silva ", "thibaut courtois", "thilo kehrer", "thomas delaney", "thomas deng", "thomas meunier", "thomas müller", "thomas partey", "thorgan hazard", "tim ream", "timothy castagne", "timothy weah", "toby alderweireld", "tom lockyer", "trent alexander-arnold", "tyler adams", "tyrell malacia", "unai simón", "uriel antuna", "uroš račić", "vahid amiri", "vanja milinković-savić", "victor nelsson", "vincent aboubakar ", "vincent janssen", "vinícius júnior", "virgil van dijk ", "vitinha", "wahbi khazri", "wajdi kechrida", "walid cheddira", "walker zimmerman", "wataru endo", "wayne hennessey", "weston mckennie", "weverton", "william carvalho", "william pacho", "william saliba", "wojciech szczęsny", "wout faes", "wout weghorst", "xavi simons", "xavier arreaga", "xherdan shaqiri", "yahia attiyat allah", "yahya jabrane", "yann sommer", "yannick carrasco", "yasser al-shahrani", "yassine bounou", "yassine meriah", "yeltsin tejeda", "yeremy pino", "yoon jong-gyu", "youri tielemans", "yousef hassan", "youssef en-nesyri", "youssef msakni ", "youssouf fofana", "youssouf sabaly", "youssoufa moukoko", "youstin salas", "yuki soma", "yunus musah", "yussuf poulsen", "yuto nagatomo", "zakaria aboukhlal", "zeno debast", "álvaro morata", "álvaro zamora", "ángel correa", "ángel di maría", "ángel mena", "ángelo preciado", "éder militão", "édouard mendy", "érick gutiérrez", "éverton ribeiro", "óscar duarte", "i̇lkay gündoğan", "łukasz skorupski" ]
IsaacMwesigwa/footballer-recognition-3
# Model Trained Using AutoTrain - Problem type: Image Classification ## Validation Metricsg loss: 3.78243088722229 f1_macro: 0.2252701799090606 f1_micro: 0.24680799807275355 f1_weighted: 0.22536680258302744 precision_macro: 0.23393521704585818 precision_micro: 0.24680799807275355 precision_weighted: 0.2340401895508409 recall_macro: 0.246690734055355 recall_micro: 0.24680799807275355 recall_weighted: 0.24680799807275355 accuracy: 0.24680799807275355
[ "aaron long", "aaron mooy", "aaron ramsdale", "aaron ramsey", "abde ezzalzouli", "abdelhamid sabiri", "abdelkarim hassan", "abderrazak hamdallah", "abdou diallo", "abdul fatawu issahaku", "abdul manaf nurudeen", "abdulaziz hatem", "abdulelah al-amri", "abdulellah al-malki", "abdullah madu", "abdullah otayf", "abdulrahman al-aboud", "abolfazl jalali", "achraf dari", "achraf hakimi", "adam davies", "adrien rabiot", "agustín canobbio", "ahmad nourollahi", "ahmed alaaeldin", "ahmed reda tagnaouti", "ajdin hrustic", "akram afif", "alan franco", "alejandro balde", "aleksandar mitrović", "alex sandro", "alex telles", "alexander bah", "alexander djiku", "alexander domínguez", "alexis mac allister", "alexis vega", "alfred gomis", "alfredo talavera", "ali abdi", "ali al-bulaihi", "ali al-hassan", "ali assadalla", "ali gholizadeh", "ali karimi", "ali maâloul", "alidu seidu", "alireza beiranvand", "alireza jahanbakhsh", "alisson", "alistair johnston", "almoez ali", "alphonse areola", "alphonso davies", "amadou onana", "amir abedzadeh", "anass zaroury", "andreas christensen", "andreas cornelius", "andreas skov olsen", "andrej kramarić", "andrew redmayne", "andries noppert", "andrija živković", "andré ayew ", "andré onana", "andré silva", "andré-frank zambo anguissa", "andrés guardado ", "anis ben slimane", "ansu fati", "ante budimir", "anthony contreras", "anthony hernández", "antoine griezmann", "antoine semenyo", "antonee robinson", "antonio rüdiger", "antony", "antónio silva", "ao tanaka", "ardon jashari", "arkadiusz milik", "armel bella-kotchap", "arthur theate", "artur jędrzejczyk", "assim madibo", "atiba hutchinson ", "aurélien tchouaméni", "awer mabil", "axel disasi", "axel witsel", "ayase ueda", "aymen dahmen", "aymen mathlouthi", "aymeric laporte", "ayrton preciado", "aziz behich", "azzedine ounahi", "aïssa laïdouni", "baba rahman", "badr benoun", "bailey wright", "bamba dieng", "bartosz bereszyński", "bassam al-rawi", "bechir ben saïd", "ben cabango", "ben davies", "ben white", "benjamin pavard", "bernardo silva", "bilal el khannous", "bilel ifa", "borna barišić", "borna sosa", "boualem khoukhi", "boulaye dia", "brandon aguilera", "breel embolo", "bremer", "brenden aaronson", "brennan johnson", "bruno fernandes", "bruno guimarães", "bruno petković", "bryan mbeumo", "bryan oviedo", "bryan ruiz ", "bukayo saka", "callum wilson", "cameron carter-vickers", "cameron devlin", "carlos gruezo", "carlos martínez", "carlos rodríguez", "carlos soler", "casemiro", "celso borges", "charles de ketelaere", "cheikhou kouyaté", "cho gue-sung", "cho yu-min", "chris gunter", "chris mepham", "christian bassogog", "christian eriksen", "christian fassnacht", "christian günter", "christian nørgaard", "christian pulisic", "christopher wooh", "cody gakpo", "collins fai", "connor roberts", "conor coady", "conor gallagher", "craig goodwin", "cristian roldan", "cristian romero", "cristiano ronaldo ", "cyle larin", "césar azpilicueta", "césar montes", "daichi kamada", "daizen maeda", "daley blind", "damian szymański", "dani alves", "dani carvajal", "dani olmo", "daniel afriyie", "daniel amartey", "daniel chacón", "daniel james", "daniel schmidt", "daniel wass", "daniel-kofi kyereh", "danilo", "danilo pereira", "danny vukovic", "danny ward", "darko lazović", "darwin núñez", "david raum", "david raya", "david wotherspoon", "davy klaassen", "dayne st. clair", "dayot upamecano", "deandre yedlin", "declan rice", "dejan lovren", "denis odoi", "denis zakaria", "denzel dumfries", "derek cornelius", "devis epassy", "diego godín ", "diego palacios", "diogo costa", "diogo dalot", "djibril sow", "djorkaeff reasco", "domagoj vida", "dominik livaković", "douglas lópez", "dries mertens", "dušan tadić ", "dušan vlahović", "dylan bronn", "dylan levitt", "eden hazard ", "ederson", "edimilson fernandes", "edinson cavani", "edson álvarez", "eduardo camavinga", "ehsan hajsafi ", "eiji kawashima", "elisha owusu", "ellyes skhiri", "emiliano martínez", "enner valencia ", "enzo ebosse", "enzo fernández", "eray cömert", "eric dier", "eric garcía", "eric maxim choupo-moting", "esteban alvarado", "ethan ampadu", "ethan horvath", "exequiel palacios", "fabian frei", "fabian rieder", "fabian schär", "fabinho", "facundo pellistri", "facundo torres", "famara diédhiou", "federico valverde", "ferjani sassi", "fernando muslera", "ferran torres", "filip kostić", "filip mladenović", "filip đuričić", "firas al-buraikan", "fodé ballo-touré", "formose mendy", "fran karačić", "francisco calvo", "franco armani", "fred", "frederik rønnow", "frenkie de jong", "félix torres", "gabriel jesus", "gabriel martinelli", "gaku shibasaki", "garang kuol", "gareth bale ", "gavi", "gaël ondoua", "georges-kévin nkoudou", "gerardo arteaga", "germán pezzella", "gerson torres", "gerónimo rulli", "ghailene chaalali", "gideon mensah", "giorgian de arrascaeta", "giovanni reyna", "gonzalo montiel", "gonzalo plata", "gonçalo ramos", "granit xhaka ", "gregor kobel", "grzegorz krychowiak", "guido rodríguez", "guillermo ochoa", "guillermo varela", "haitham asiri", "haji wright", "hakim ziyech", "hannibal mejbri", "hans vanaken", "haris seferovic", "harry kane ", "harry maguire", "harry souttar", "harry wilson", "hassan al-haydos ", "hassan al-tambakti", "hattan bahebri", "henry martín", "hernán galíndez", "hidemasa morita", "hiroki ito", "hiroki sakai", "hirving lozano", "homam ahmed", "hong chul", "hossein hosseini", "hossein kanaanizadegan", "hugo guillamón", "hugo lloris ", "hwang hee-chan", "hwang in-beom", "hwang ui-jo", "héctor herrera", "héctor moreno", "ibrahim danlad", "ibrahima konaté", "idrissa gueye", "iké ugbo", "ilias chair", "iliman ndiaye", "ismaeel mohammad", "ismail jakobs", "ismaël koné", "ismaïla sarr", "issam jebali", "ivan ilić", "ivan perišić", "ivica ivušić", "ivo grbić", "iñaki williams", "jack grealish", "jackson irvine", "jackson porozo", "jakub kamiński", "jakub kiwior", "jamal musiala", "james maddison", "james pantemis", "jamie maclaren", "jan bednarek", "jan vertonghen", "jason cummings", "jassem gaber", "jawad el yamiq", "jean-charles castelletto", "jean-pierre nsame", "jens stryger larsen", "jeong woo-yeong", "jeremie frimpong", "jeremy sarmiento", "jerome ngom mbekeli", "jesper lindstrøm", "jesús ferreira", "jesús gallardo", "jewison bennette", "jo hyeon-woo", "joachim andersen", "joakim mæhle", "joe allen", "joe morrell", "joe rodon", "joe scally", "joel campbell", "joel king", "joel waterman", "johan venegas", "johan vásquez", "john stones", "jonas hofmann", "jonas omlin", "jonas wind", "jonathan david", "jonathan osorio", "jonny williams", "jordan ayew", "jordan henderson", "jordan morris", "jordan pickford", "jordan veretout", "jordi alba", "jorge sánchez", "joseph aidoo", "josh sargent", "joshua kimmich", "josip juranović", "josip stanišić", "josip šutalo", "josé cifuentes", "josé giménez", "josé luis rodríguez", "josé sá", "joão cancelo", "joão félix", "joão mário", "joão palhinha", "joško gvardiol", "juan foyth", "juan pablo vargas", "jude bellingham", "jules koundé", "julian brandt", "julián álvarez", "jung woo-young", "junior hoilett", "junya ito", "jurriën timber", "justin bijlow", "jérémy doku", "kai havertz", "kalidou koulibaly ", "kalvin phillips", "kamal miller", "kamal sowah", "kamaldeen sulemana", "kamil glik", "kamil grabara", "kamil grosicki", "kaoru mitoma", "karim adeyemi", "karim ansarifard", "karim benzema", "karim boudiaf", "karl toko ekambi", "karol świderski", "kasper dolberg", "kasper schmeichel", "keanu baccus", "kellyn acosta", "kendall waston", "kenneth taylor", "kevin de bruyne", "kevin rodríguez", "kevin trapp", "kevin álvarez", "keylor navas", "keysher fuller", "khalid muneer", "kieffer moore", "kieran trippier", "kim jin-su", "kim min-jae", "kim moon-hwan", "kim seung-gyu", "kim tae-hwan", "kim young-gwon", "kingsley coman", "ko itakura", "koen casteels", "koke", "kristijan jakić", "krystian bielik", "krzysztof piątek", "krépin diatta", "kwon chang-hoon", "kwon kyung-won", "kye rowles", "kyle walker", "kylian mbappé", "lautaro martínez", "lawrence ati-zigi", "leander dendoncker", "leandro paredes", "leandro trossard", "lee jae-sung", "lee kang-in", "leon goretzka", "leroy sané", "liam fraser", "liam millar", "lionel messi ", "lisandro martínez", "lovro majer", "loïs openda", "luca de la torre", "lucas cavallini", "lucas hernandez", "lucas paquetá", "lucas torreira", "luis chávez", "luis romo", "luis suárez", "luka jović", "luka modrić ", "luka sučić", "lukas klostermann", "luke shaw", "luuk de jong", "majid hosseini", "mamadou loum", "manuel akanji", "manuel neuer ", "manuel ugarte", "marc-andré ter stegen", "marcelo brozović", "marco asensio", "marcos acuña", "marcos llorente", "marcus rashford", "marcus thuram", "mario götze", "mario pašalić", "mark harris", "mark-anthony kaye", "marko dmitrović", "marko grujić", "marko livaja", "marquinhos", "marten de roon", "martin boyle", "martin braithwaite", "martin erlić", "martin hongla", "martín cáceres", "mason mount", "mateo kovačić", "mateusz wieteska", "matheus nunes", "mathew leckie", "mathew ryan ", "mathias jensen", "mathías olivera", "matt turner", "matteo guendouzi", "matthew smith", "matthias ginter", "matthijs de ligt", "matty cash", "matías vecino", "matías viña", "maxi gómez", "maya yoshida ", "mehdi taremi", "mehdi torabi", "memphis depay", "meshaal barsham", "michael estrada", "michał skóraś", "michel aebischer", "michy batshuayi", "miki yamane", "mikkel damsgaard", "milad mohammadi", "milan borjan", "miloš degenek", "miloš veljković", "mislav oršić", "mitchell duke", "mohamed ali ben romdhane", "mohamed dräger", "mohamed kanno", "mohammed al-breik", "mohammed al-owais", "mohammed al-rubaie", "mohammed kudus", "mohammed muntari", "mohammed salisu", "mohammed waad", "moisés caicedo", "moisés ramírez", "montassar talbi", "morteza pouraliganji", "mostafa meshaal", "mouez hassen", "moumi ngamaleu", "moustapha name", "munir mohamedi", "musab kheder", "na sang-ho", "nader ghandri", "nahuel molina", "naif al-hadhrami", "nampalys mendy", "nasser al-dawsari", "nathan aké", "nathaniel atkinson", "nawaf al-abed", "nawaf al-aqidi", "nayef aguerd", "naïm sliti", "neco williams", "nemanja gudelj", "nemanja maksimović", "nemanja radonjić", "neymar", "nick pope", "niclas füllkrug", "nico elvedi", "nico schlotterbeck", "nico williams", "nicola zalewski", "nicolas jackson", "nicolas nkoulou", "nicolás otamendi", "nicolás tagliafico", "nicolás de la cruz", "niklas süle", "nikola milenković", "nikola vlašić", "noa lang", "noah okafor", "nouhou tolo", "noussair mazraoui", "nuno mendes", "néstor araujo", "oliver christensen", "olivier giroud", "olivier mbaizo", "olivier ntcham", "orbelín pineda", "osman bukari", "otávio", "ousmane dembélé", "pablo sarabia", "paik seung-ho", "pape abou cissé", "pape gueye", "pape matar sarr", "papu gómez", "pathé ciss", "patrick sequeira", "pau torres", "paulo dybala", "payam niazmand", "pedri", "pedro", "pepe", "pervis estupiñán", "phil foden", "philipp köhn", "piero hincapié", "pierre kunde", "pierre-emile højbjerg", "piotr zieliński", "predrag rajković", "przemysław frankowski", "rafael leão", "raheem sterling", "ramin rezaeian", "randal kolo muani", "raphaël guerreiro", "raphaël varane", "raphinha", "rasmus kristensen", "raúl jiménez", "remko pasveer", "remo freuler", "renato steffen", "ricardo horta", "ricardo rodriguez", "richarlison", "richie laryea", "riley mcgree", "ritsu dōan", "riyadh sharahili", "roan wilson", "robert arboleda", "robert gumny", "robert lewandowski ", "robert skov", "robert sánchez", "roberto alvarado", "rodolfo cota", "rodri", "rodrigo bentancur", "rodrigo de paul", "rodrygo", "rogelio funes mori", "romain saïss ", "romario ibarra", "romelu lukaku", "ronald araújo", "rouzbeh cheshmi", "ruben vargas", "rubin colwill", "rui patrício", "ró-ró", "rónald matarrita", "rúben dias", "rúben neves", "saad al-sheeb", "sadegh moharrami", "sadio mané", "saeid ezatolahi", "saleh al-shehri", "salem al-dawsari", "salem al-hajri", "salis abdul samed", "salman al-faraj ", "sam adekugbe", "saman ghoddos", "sami al-najei", "samuel gouet", "samuel piette", "sardar azmoun", "saud abdulhamid", "saša lukić", "sean johnson", "sebas méndez", "sebastian szymański", "sebastián coates", "sebastián sosa", "seifeddine jaziri", "selim amallah", "seny dieng", "serge gnabry", "sergej milinković-savić", "sergio busquets ", "sergio rochet", "sergiño dest", "shaq moore", "shogo taniguchi", "shojae khalilzadeh", "shuto machino", "shūichi gonda", "silvan widmer", "simon kjær ", "simon mignolet", "simon ngapandouetnbu", "sofiane boufal", "sofyan amrabat", "son heung-min ", "son jun-ho", "song bum-keun", "song min-kyu", "sorba thomas", "souaibou marou", "srđan babić", "stefan mitrović", "stefan de vrij", "stephen eustáquio", "steve mandanda", "steven berghuis", "steven bergwijn", "steven vitória", "strahinja eraković", "strahinja pavlović", "sultan al-ghannam", "szymon żurkowski", "taha yassine khenissi", "tajon buchanan", "takefusa kubo", "takehiro tomiyasu", "takuma asano", "takumi minamino", "tarek salman", "tariq lamptey", "teun koopmeiners", "theo hernandez", "thiago almada", "thiago silva ", "thibaut courtois", "thilo kehrer", "thomas delaney", "thomas deng", "thomas meunier", "thomas müller", "thomas partey", "thorgan hazard", "tim ream", "timothy castagne", "timothy weah", "toby alderweireld", "tom lockyer", "trent alexander-arnold", "tyler adams", "tyrell malacia", "unai simón", "uriel antuna", "uroš račić", "vahid amiri", "vanja milinković-savić", "victor nelsson", "vincent aboubakar ", "vincent janssen", "vinícius júnior", "virgil van dijk ", "vitinha", "wahbi khazri", "wajdi kechrida", "walid cheddira", "walker zimmerman", "wataru endo", "wayne hennessey", "weston mckennie", "weverton", "william carvalho", "william pacho", "william saliba", "wojciech szczęsny", "wout faes", "wout weghorst", "xavi simons", "xavier arreaga", "xherdan shaqiri", "yahia attiyat allah", "yahya jabrane", "yann sommer", "yannick carrasco", "yasser al-shahrani", "yassine bounou", "yassine meriah", "yeltsin tejeda", "yeremy pino", "yoon jong-gyu", "youri tielemans", "yousef hassan", "youssef en-nesyri", "youssef msakni ", "youssouf fofana", "youssouf sabaly", "youssoufa moukoko", "youstin salas", "yuki soma", "yunus musah", "yussuf poulsen", "yuto nagatomo", "zakaria aboukhlal", "zeno debast", "álvaro morata", "álvaro zamora", "ángel correa", "ángel di maría", "ángel mena", "ángelo preciado", "éder militão", "édouard mendy", "érick gutiérrez", "éverton ribeiro", "óscar duarte", "i̇lkay gündoğan", "łukasz skorupski" ]
jvbjkbjkbfjis/swin-tiny-patch4-window7-224-finetuned-fraud-detection
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # swin-tiny-patch4-window7-224-finetuned-fraud-detection This model is a fine-tuned version of [microsoft/swin-tiny-patch4-window7-224](https://huggingface.co/microsoft/swin-tiny-patch4-window7-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.1294 - Accuracy: 0.9592 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 10 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.1859 | 1.0 | 57 | 0.2052 | 0.9394 | | 0.14 | 2.0 | 114 | 0.1544 | 0.9505 | | 0.1296 | 3.0 | 171 | 0.1620 | 0.9530 | | 0.1208 | 4.0 | 228 | 0.1573 | 0.9493 | | 0.0889 | 5.0 | 285 | 0.1294 | 0.9592 | | 0.0846 | 6.0 | 342 | 0.1400 | 0.9517 | | 0.0775 | 7.0 | 399 | 0.1222 | 0.9567 | | 0.0774 | 8.0 | 456 | 0.1564 | 0.9418 | | 0.0577 | 9.0 | 513 | 0.1274 | 0.9579 | | 0.0722 | 10.0 | 570 | 0.1332 | 0.9579 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.1
[ "non-fraudulent", "fraudulent" ]
basavaakash002/autotrain-wxiks-epi11
# Model Trained Using AutoTrain - Problem type: Image Classification ## Validation Metricsg loss: 1.7068815231323242 f1_macro: 0.178021978021978 f1_micro: 0.3157894736842105 f1_weighted: 0.18565644881434354 precision_macro: 0.12666666666666665 precision_micro: 0.3157894736842105 precision_weighted: 0.1350877192982456 recall_macro: 0.32 recall_micro: 0.3157894736842105 recall_weighted: 0.3157894736842105 accuracy: 0.3157894736842105
[ "belly", "burping", "discomfort", "hungry", "tired" ]
basavaakash002/autotrain-b1slj-0mb6t
# Model Trained Using AutoTrain - Problem type: Image Classification ## Validation Metricsg loss: 1.5408477783203125 f1_macro: 0.13725490196078433 f1_micro: 0.2631578947368421 f1_weighted: 0.18059855521155832 precision_macro: 0.10714285714285714 precision_micro: 0.2631578947368421 precision_weighted: 0.14097744360902253 recall_macro: 0.2 recall_micro: 0.2631578947368421 recall_weighted: 0.2631578947368421 accuracy: 0.2631578947368421
[ "belly", "burping", "discomfort", "hungry", "tired" ]
basavaakash002/autotrain-377sg-8o19r
# Model Trained Using AutoTrain - Problem type: Image Classification ## Validation Metricsg loss: 1.6438376903533936 f1_macro: 0.08333333333333334 f1_micro: 0.2631578947368421 f1_weighted: 0.10964912280701755 precision_macro: 0.05263157894736842 precision_micro: 0.2631578947368421 precision_weighted: 0.06925207756232686 recall_macro: 0.2 recall_micro: 0.2631578947368421 recall_weighted: 0.2631578947368421 accuracy: 0.2631578947368421
[ "belly", "burping", "discomfort", "hungry", "tired" ]
dvs/autotrain-0kxqs-2lz9p
# Model Trained Using AutoTrain - Problem type: Image Classification ## Validation Metricsg loss: 0.378662109375 f1: 1.0 precision: 1.0 recall: 1.0 auc: 1.0 accuracy: 1.0
[ "mulder", "scully" ]
dvs/autotrain-5ozhd-efa4o
# Model Trained Using AutoTrain - Problem type: Image Classification ## Validation Metricsg loss: 0.04287773743271828 f1: 1.0 precision: 1.0 recall: 1.0 auc: 1.0 accuracy: 1.0
[ "mulder", "scully" ]
Nick6154-dev/vit-base-patch16-224-finetuned-crochets-clothes
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-base-patch16-224-finetuned-crochets-clothes This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.0186 - Accuracy: 0.9953 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.0292 | 1.0 | 89 | 0.0236 | 0.9937 | | 0.0191 | 2.0 | 178 | 0.0186 | 0.9953 | | 0.0093 | 3.0 | 267 | 0.0207 | 0.9937 | ### Framework versions - Transformers 4.37.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.1
[ "crochets", "no-crochets" ]
IoriU/emotion_classifier
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # emotion_classifier This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.2783 - Accuracy: 0.5521 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 1e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: constant_with_warmup - num_epochs: 50 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 35 | 2.0697 | 0.2014 | | No log | 2.0 | 70 | 2.0539 | 0.1875 | | No log | 3.0 | 105 | 2.0278 | 0.2014 | | No log | 4.0 | 140 | 1.9869 | 0.2639 | | No log | 5.0 | 175 | 1.9248 | 0.2986 | | No log | 6.0 | 210 | 1.8172 | 0.3403 | | No log | 7.0 | 245 | 1.7661 | 0.375 | | No log | 8.0 | 280 | 1.6933 | 0.4306 | | No log | 9.0 | 315 | 1.6493 | 0.4514 | | No log | 10.0 | 350 | 1.6028 | 0.4514 | | No log | 11.0 | 385 | 1.5580 | 0.4444 | | No log | 12.0 | 420 | 1.5267 | 0.5 | | No log | 13.0 | 455 | 1.4934 | 0.4861 | | No log | 14.0 | 490 | 1.4605 | 0.5208 | | 1.6139 | 15.0 | 525 | 1.4499 | 0.5278 | | 1.6139 | 16.0 | 560 | 1.4228 | 0.5347 | | 1.6139 | 17.0 | 595 | 1.4109 | 0.5208 | | 1.6139 | 18.0 | 630 | 1.3872 | 0.5139 | | 1.6139 | 19.0 | 665 | 1.3640 | 0.5556 | | 1.6139 | 20.0 | 700 | 1.3787 | 0.5208 | | 1.6139 | 21.0 | 735 | 1.3820 | 0.5278 | | 1.6139 | 22.0 | 770 | 1.3649 | 0.5069 | | 1.6139 | 23.0 | 805 | 1.3508 | 0.5347 | | 1.6139 | 24.0 | 840 | 1.3322 | 0.5417 | | 1.6139 | 25.0 | 875 | 1.3577 | 0.5347 | | 1.6139 | 26.0 | 910 | 1.3337 | 0.5625 | | 1.6139 | 27.0 | 945 | 1.3578 | 0.5139 | | 1.6139 | 28.0 | 980 | 1.3256 | 0.5556 | | 0.8303 | 29.0 | 1015 | 1.3139 | 0.5833 | | 0.8303 | 30.0 | 1050 | 1.3575 | 0.5139 | | 0.8303 | 31.0 | 1085 | 1.3214 | 0.5625 | | 0.8303 | 32.0 | 1120 | 1.3185 | 0.5486 | | 0.8303 | 33.0 | 1155 | 1.3285 | 0.5417 | | 0.8303 | 34.0 | 1190 | 1.3259 | 0.5903 | | 0.8303 | 35.0 | 1225 | 1.3492 | 0.5556 | | 0.8303 | 36.0 | 1260 | 1.3164 | 0.5764 | | 0.8303 | 37.0 | 1295 | 1.3645 | 0.5417 | | 0.8303 | 38.0 | 1330 | 1.3431 | 0.5347 | | 0.8303 | 39.0 | 1365 | 1.3272 | 0.5278 | | 0.8303 | 40.0 | 1400 | 1.3326 | 0.5972 | | 0.8303 | 41.0 | 1435 | 1.3375 | 0.5486 | | 0.8303 | 42.0 | 1470 | 1.3641 | 0.5556 | | 0.3516 | 43.0 | 1505 | 1.3633 | 0.5278 | | 0.3516 | 44.0 | 1540 | 1.3532 | 0.5278 | | 0.3516 | 45.0 | 1575 | 1.3473 | 0.5903 | | 0.3516 | 46.0 | 1610 | 1.3413 | 0.5833 | | 0.3516 | 47.0 | 1645 | 1.4158 | 0.5556 | | 0.3516 | 48.0 | 1680 | 1.3747 | 0.5903 | | 0.3516 | 49.0 | 1715 | 1.4364 | 0.5347 | | 0.3516 | 50.0 | 1750 | 1.4659 | 0.5417 | ### Framework versions - Transformers 4.37.2 - Pytorch 2.2.0+cu121 - Datasets 2.16.1 - Tokenizers 0.15.1
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
Ghiffari4869/emotional_classification
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # emotional_classification This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.1401 - Accuracy: 0.5938 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 20 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 40 | 1.7535 | 0.3375 | | No log | 2.0 | 80 | 1.5659 | 0.3875 | | No log | 3.0 | 120 | 1.4480 | 0.4562 | | No log | 4.0 | 160 | 1.3839 | 0.4938 | | No log | 5.0 | 200 | 1.3246 | 0.5125 | | No log | 6.0 | 240 | 1.3297 | 0.4875 | | No log | 7.0 | 280 | 1.2665 | 0.5312 | | No log | 8.0 | 320 | 1.2979 | 0.5625 | | No log | 9.0 | 360 | 1.1959 | 0.5687 | | No log | 10.0 | 400 | 1.2104 | 0.55 | | No log | 11.0 | 440 | 1.2401 | 0.5687 | | No log | 12.0 | 480 | 1.1960 | 0.5437 | | 1.0885 | 13.0 | 520 | 1.1571 | 0.6188 | | 1.0885 | 14.0 | 560 | 1.2241 | 0.575 | | 1.0885 | 15.0 | 600 | 1.2355 | 0.5625 | | 1.0885 | 16.0 | 640 | 1.1830 | 0.6 | | 1.0885 | 17.0 | 680 | 1.2906 | 0.5625 | | 1.0885 | 18.0 | 720 | 1.2705 | 0.525 | | 1.0885 | 19.0 | 760 | 1.2643 | 0.5563 | | 1.0885 | 20.0 | 800 | 1.1538 | 0.5813 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.1
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
chetinator/autotrain-wa6y6-kdrt6
# Model Trained Using AutoTrain - Problem type: Image Classification ## Validation Metricsg loss: 0.11094751209020615 f1_macro: 0.969943620778024 f1_micro: 0.97 f1_weighted: 0.9699436207780239 precision_macro: 0.9722943722943723 precision_micro: 0.97 precision_weighted: 0.9722943722943722 recall_macro: 0.97 recall_micro: 0.97 recall_weighted: 0.97 accuracy: 0.97
[ "daisy", "dandelion", "rose", "sunflower", "tulip" ]
akashmaggon/classification-vit
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # classification-vit This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the None dataset. It achieves the following results on the evaluation set: - Loss: 0.1143 - F1: 0.8957 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0002 - train_batch_size: 32 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 4 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | F1 | |:-------------:|:-----:|:----:|:---------------:|:------:| | 0.3758 | 1.0 | 238 | 0.2390 | 0.7505 | | 0.2044 | 2.0 | 476 | 0.1605 | 0.8798 | | 0.133 | 3.0 | 714 | 0.1221 | 0.8957 | | 0.0942 | 4.0 | 952 | 0.1143 | 0.8957 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.1
[ "fraudulent", "non-fraudulent" ]
Dricz/emotion_recognition2
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # emotion_recognition2 This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.4483 - Accuracy: 0.575 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 40 | 1.4497 | 0.5188 | | No log | 2.0 | 80 | 1.5957 | 0.5062 | | No log | 3.0 | 120 | 1.4758 | 0.5625 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.1
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
fitrahar/vit-emotions-fp16
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-emotions-fp16 This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.1640 - Accuracy: 0.955 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 10 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 50 | 0.4043 | 0.9 | | No log | 2.0 | 100 | 0.3688 | 0.9 | | No log | 3.0 | 150 | 0.4178 | 0.8825 | | No log | 4.0 | 200 | 0.2808 | 0.9213 | | No log | 5.0 | 250 | 0.2260 | 0.9387 | | No log | 6.0 | 300 | 0.2191 | 0.9375 | | No log | 7.0 | 350 | 0.2247 | 0.9363 | | No log | 8.0 | 400 | 0.1965 | 0.9413 | | No log | 9.0 | 450 | 0.1976 | 0.9463 | | 0.216 | 10.0 | 500 | 0.1736 | 0.9587 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.1
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
jvbjkbjkbfjis/swin-tiny-patch4-window7-224-finetuned-fraud-detection_upd
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # swin-tiny-patch4-window7-224-finetuned-fraud-detection_upd This model is a fine-tuned version of [microsoft/swin-tiny-patch4-window7-224](https://huggingface.co/microsoft/swin-tiny-patch4-window7-224) on the None dataset. It achieves the following results on the evaluation set: - Loss: 0.0007 - Accuracy: 1.0 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0002 - train_batch_size: 256 - eval_batch_size: 32 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 5 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.1076 | 1.0 | 61 | 0.0389 | 0.9890 | | 0.0192 | 2.0 | 122 | 0.0132 | 0.9965 | | 0.0168 | 3.0 | 183 | 0.0101 | 0.9959 | | 0.0025 | 4.0 | 244 | 0.0020 | 0.9994 | | 0.0014 | 5.0 | 305 | 0.0007 | 1.0 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.1
[ "non-fraudulent", "fraudulent" ]
reenalad/vit-base-patch16-224-finetuned-flower
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-base-patch16-224-finetuned-flower This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on the imagefolder dataset. ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 5 ### Training results ### Framework versions - Transformers 4.24.0 - Pytorch 2.1.0+cu121 - Datasets 2.7.1 - Tokenizers 0.13.3
[ "daisy", "dandelion", "roses", "sunflowers", "tulips" ]
firdhokk/visual-emotion-recognition
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # visual-emotion-recognition This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.1334 - Accuracy: 0.6375 - Precision: 0.6498 - Recall: 0.6375 - F1: 0.6341 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 1e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - gradient_accumulation_steps: 3 - total_train_batch_size: 48 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 100 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | Precision | Recall | F1 | |:-------------:|:-----:|:----:|:---------------:|:--------:|:---------:|:------:|:------:| | 2.0671 | 0.97 | 13 | 2.0660 | 0.125 | 0.2709 | 0.125 | 0.1135 | | 2.0576 | 1.95 | 26 | 2.0563 | 0.1562 | 0.2932 | 0.1562 | 0.1402 | | 2.044 | 3.0 | 40 | 2.0439 | 0.1875 | 0.2554 | 0.1875 | 0.1827 | | 2.0209 | 3.98 | 53 | 2.0309 | 0.2062 | 0.2405 | 0.2062 | 0.1961 | | 1.9938 | 4.95 | 66 | 2.0176 | 0.2188 | 0.2410 | 0.2188 | 0.2062 | | 1.9894 | 6.0 | 80 | 1.9960 | 0.2625 | 0.2700 | 0.2625 | 0.2438 | | 1.9667 | 6.97 | 93 | 1.9743 | 0.3125 | 0.3089 | 0.3125 | 0.2901 | | 1.9158 | 7.95 | 106 | 1.9421 | 0.3063 | 0.2557 | 0.3063 | 0.2687 | | 1.8834 | 9.0 | 120 | 1.9042 | 0.3375 | 0.4019 | 0.3375 | 0.2888 | | 1.8461 | 9.97 | 133 | 1.8521 | 0.3625 | 0.4132 | 0.3625 | 0.3021 | | 1.7917 | 10.95 | 146 | 1.8023 | 0.3688 | 0.4144 | 0.3688 | 0.3056 | | 1.7685 | 12.0 | 160 | 1.7552 | 0.375 | 0.4062 | 0.375 | 0.2978 | | 1.7072 | 12.97 | 173 | 1.7071 | 0.3875 | 0.4266 | 0.3875 | 0.3164 | | 1.6926 | 13.95 | 186 | 1.6742 | 0.375 | 0.4056 | 0.375 | 0.2996 | | 1.6084 | 15.0 | 200 | 1.6476 | 0.3937 | 0.4411 | 0.3937 | 0.3358 | | 1.6264 | 15.97 | 213 | 1.6231 | 0.3812 | 0.4357 | 0.3812 | 0.3311 | | 1.5531 | 16.95 | 226 | 1.6019 | 0.4125 | 0.4676 | 0.4125 | 0.3626 | | 1.5804 | 18.0 | 240 | 1.5773 | 0.3937 | 0.4442 | 0.3937 | 0.3428 | | 1.54 | 18.98 | 253 | 1.5606 | 0.4 | 0.4565 | 0.4 | 0.3527 | | 1.5461 | 19.95 | 266 | 1.5464 | 0.4437 | 0.5084 | 0.4437 | 0.4028 | | 1.4841 | 21.0 | 280 | 1.5323 | 0.4313 | 0.4950 | 0.4313 | 0.3881 | | 1.4765 | 21.98 | 293 | 1.5121 | 0.4313 | 0.4884 | 0.4313 | 0.3822 | | 1.4838 | 22.95 | 306 | 1.4978 | 0.4375 | 0.5138 | 0.4375 | 0.4012 | | 1.4487 | 24.0 | 320 | 1.4791 | 0.4437 | 0.5059 | 0.4437 | 0.4001 | | 1.4272 | 24.98 | 333 | 1.4617 | 0.4562 | 0.5304 | 0.4562 | 0.4180 | | 1.3886 | 25.95 | 346 | 1.4488 | 0.4625 | 0.5418 | 0.4625 | 0.4303 | | 1.4529 | 27.0 | 360 | 1.4436 | 0.45 | 0.5147 | 0.45 | 0.4035 | | 1.3894 | 27.98 | 373 | 1.4267 | 0.4688 | 0.5488 | 0.4688 | 0.4355 | | 1.3848 | 28.95 | 386 | 1.4153 | 0.4625 | 0.5337 | 0.4625 | 0.4264 | | 1.3561 | 30.0 | 400 | 1.3993 | 0.4875 | 0.5521 | 0.4875 | 0.4554 | | 1.3184 | 30.98 | 413 | 1.3852 | 0.4813 | 0.5526 | 0.4813 | 0.4470 | | 1.282 | 31.95 | 426 | 1.3703 | 0.4813 | 0.5480 | 0.4813 | 0.4449 | | 1.2988 | 33.0 | 440 | 1.3674 | 0.4688 | 0.5541 | 0.4688 | 0.4395 | | 1.2507 | 33.98 | 453 | 1.3594 | 0.4688 | 0.5347 | 0.4688 | 0.4307 | | 1.2446 | 34.95 | 466 | 1.3519 | 0.4813 | 0.5616 | 0.4813 | 0.4514 | | 1.2877 | 36.0 | 480 | 1.3547 | 0.4875 | 0.5599 | 0.4875 | 0.4605 | | 1.2237 | 36.98 | 493 | 1.3342 | 0.5 | 0.5744 | 0.5 | 0.4654 | | 1.2416 | 37.95 | 506 | 1.3214 | 0.4813 | 0.5693 | 0.4813 | 0.4551 | | 1.1786 | 39.0 | 520 | 1.3122 | 0.4875 | 0.5674 | 0.4875 | 0.4586 | | 1.193 | 39.98 | 533 | 1.2989 | 0.5 | 0.5755 | 0.5 | 0.4774 | | 1.148 | 40.95 | 546 | 1.2962 | 0.5125 | 0.5811 | 0.5125 | 0.4755 | | 1.1904 | 42.0 | 560 | 1.2860 | 0.5188 | 0.5863 | 0.5188 | 0.4928 | | 1.1311 | 42.98 | 573 | 1.2893 | 0.5312 | 0.5936 | 0.5312 | 0.5117 | | 1.1396 | 43.95 | 586 | 1.2860 | 0.4938 | 0.5633 | 0.4938 | 0.4698 | | 1.1235 | 45.0 | 600 | 1.2802 | 0.5 | 0.5725 | 0.5 | 0.4758 | | 1.1638 | 45.98 | 613 | 1.2596 | 0.525 | 0.5909 | 0.525 | 0.5058 | | 1.0777 | 46.95 | 626 | 1.2668 | 0.5188 | 0.5796 | 0.5188 | 0.4861 | | 1.1136 | 48.0 | 640 | 1.2520 | 0.55 | 0.6100 | 0.55 | 0.5291 | | 1.047 | 48.98 | 653 | 1.2437 | 0.5375 | 0.5963 | 0.5375 | 0.5279 | | 1.1101 | 49.95 | 666 | 1.2527 | 0.55 | 0.6195 | 0.55 | 0.5279 | | 1.0412 | 51.0 | 680 | 1.2455 | 0.525 | 0.5927 | 0.525 | 0.5156 | | 1.041 | 51.98 | 693 | 1.2245 | 0.55 | 0.6073 | 0.55 | 0.5353 | | 0.9906 | 52.95 | 706 | 1.2307 | 0.575 | 0.6420 | 0.575 | 0.5600 | | 0.9863 | 54.0 | 720 | 1.2307 | 0.5563 | 0.6150 | 0.5563 | 0.5362 | | 0.943 | 54.98 | 733 | 1.2270 | 0.55 | 0.6152 | 0.55 | 0.5302 | | 0.9557 | 55.95 | 746 | 1.2063 | 0.5312 | 0.5964 | 0.5312 | 0.5239 | | 0.9518 | 57.0 | 760 | 1.2122 | 0.55 | 0.6232 | 0.55 | 0.5433 | | 0.9545 | 57.98 | 773 | 1.1955 | 0.575 | 0.6144 | 0.575 | 0.5563 | | 0.9195 | 58.95 | 786 | 1.2139 | 0.5563 | 0.6052 | 0.5563 | 0.5459 | | 0.9267 | 60.0 | 800 | 1.1907 | 0.5687 | 0.6052 | 0.5687 | 0.5595 | | 0.9384 | 60.98 | 813 | 1.1899 | 0.575 | 0.6449 | 0.575 | 0.5650 | | 0.8727 | 61.95 | 826 | 1.1854 | 0.5813 | 0.6312 | 0.5813 | 0.5651 | | 0.8541 | 63.0 | 840 | 1.1957 | 0.575 | 0.6407 | 0.575 | 0.5632 | | 0.8899 | 63.98 | 853 | 1.1604 | 0.575 | 0.6196 | 0.575 | 0.5694 | | 0.9036 | 64.95 | 866 | 1.1859 | 0.5563 | 0.6310 | 0.5563 | 0.5306 | | 0.8177 | 66.0 | 880 | 1.1498 | 0.6125 | 0.6316 | 0.6125 | 0.6116 | | 0.7854 | 66.97 | 893 | 1.1842 | 0.5687 | 0.6142 | 0.5687 | 0.5582 | | 0.8054 | 67.95 | 906 | 1.1695 | 0.5938 | 0.6275 | 0.5938 | 0.5830 | | 0.8582 | 69.0 | 920 | 1.1882 | 0.5687 | 0.6057 | 0.5687 | 0.5495 | | 0.7603 | 69.97 | 933 | 1.2067 | 0.55 | 0.6025 | 0.55 | 0.5348 | | 0.763 | 70.95 | 946 | 1.1690 | 0.5625 | 0.6036 | 0.5625 | 0.5439 | | 0.8261 | 72.0 | 960 | 1.1616 | 0.6062 | 0.6306 | 0.6062 | 0.6016 | | 0.884 | 72.97 | 973 | 1.1952 | 0.5625 | 0.6082 | 0.5625 | 0.5436 | | 0.7843 | 73.95 | 986 | 1.1583 | 0.5687 | 0.5953 | 0.5687 | 0.5633 | | 0.801 | 75.0 | 1000 | 1.1547 | 0.575 | 0.6013 | 0.575 | 0.5745 | | 0.7454 | 75.97 | 1013 | 1.1372 | 0.5875 | 0.6193 | 0.5875 | 0.5761 | | 0.7325 | 76.95 | 1026 | 1.1696 | 0.5938 | 0.6351 | 0.5938 | 0.5919 | | 0.7931 | 78.0 | 1040 | 1.1511 | 0.6062 | 0.6342 | 0.6062 | 0.6053 | | 0.7487 | 78.97 | 1053 | 1.1655 | 0.5625 | 0.5898 | 0.5625 | 0.5496 | | 0.7262 | 79.95 | 1066 | 1.1394 | 0.6125 | 0.6295 | 0.6125 | 0.6048 | | 0.7669 | 81.0 | 1080 | 1.1748 | 0.575 | 0.5966 | 0.575 | 0.5697 | | 0.7028 | 81.97 | 1093 | 1.1418 | 0.5875 | 0.6178 | 0.5875 | 0.5885 | | 0.7749 | 82.95 | 1106 | 1.1736 | 0.55 | 0.5446 | 0.55 | 0.5255 | | 0.7233 | 84.0 | 1120 | 1.1645 | 0.5813 | 0.5973 | 0.5813 | 0.5699 | | 0.5915 | 84.97 | 1133 | 1.1376 | 0.5875 | 0.6167 | 0.5875 | 0.5867 | | 0.6985 | 85.95 | 1146 | 1.1665 | 0.5687 | 0.5868 | 0.5687 | 0.5533 | | 0.6572 | 87.0 | 1160 | 1.1341 | 0.6 | 0.6245 | 0.6 | 0.5963 | | 0.6317 | 87.97 | 1173 | 1.1327 | 0.6125 | 0.6288 | 0.6125 | 0.6026 | | 0.6546 | 88.95 | 1186 | 1.1668 | 0.5687 | 0.5797 | 0.5687 | 0.5528 | | 0.5801 | 90.0 | 1200 | 1.1521 | 0.5875 | 0.6161 | 0.5875 | 0.5818 | | 0.6958 | 90.97 | 1213 | 1.1401 | 0.5875 | 0.6083 | 0.5875 | 0.5774 | | 0.5856 | 91.95 | 1226 | 1.1379 | 0.5875 | 0.5888 | 0.5875 | 0.5760 | | 0.6281 | 93.0 | 1240 | 1.1379 | 0.6125 | 0.6429 | 0.6125 | 0.6123 | | 0.6518 | 93.97 | 1253 | 1.1619 | 0.6312 | 0.6547 | 0.6312 | 0.6247 | | 0.6055 | 94.95 | 1266 | 1.1700 | 0.575 | 0.5962 | 0.575 | 0.5673 | | 0.6181 | 96.0 | 1280 | 1.1550 | 0.5938 | 0.6281 | 0.5938 | 0.5970 | | 0.6601 | 96.97 | 1293 | 1.1334 | 0.6375 | 0.6498 | 0.6375 | 0.6341 | | 0.6112 | 97.5 | 1300 | 1.1007 | 0.6188 | 0.6341 | 0.6188 | 0.6207 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.1
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
dvs/mulder-scully-2024
# Model Trained Using AutoTrain - Problem type: Image Classification ## Validation Metricsg loss: 0.01787894032895565 f1: 1.0 precision: 1.0 recall: 1.0 auc: 1.0 accuracy: 1.0
[ "mulder", "scully" ]
mo37373/finetuned-air-quality
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # finetuned-air-quality This model is a fine-tuned version of [microsoft/resnet-50](https://huggingface.co/microsoft/resnet-50) on an unknown dataset. ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0002 - train_batch_size: 16 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 4 ### Training results ### Framework versions - Transformers 4.44.2 - Pytorch 2.4.1+cu121 - Datasets 3.0.1 - Tokenizers 0.19.1
[ "label_0", "label_1", "label_2", "label_3", "label_4", "label_5" ]
0xhzx/swin-tiny-patch4-window7-224-finetuned-eurosat
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # swin-tiny-patch4-window7-224-finetuned-eurosat This model is a fine-tuned version of [microsoft/swin-tiny-patch4-window7-224](https://huggingface.co/microsoft/swin-tiny-patch4-window7-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.2469 - Accuracy: 0.9383 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 30 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.9843 | 0.99 | 43 | 0.8500 | 0.6948 | | 0.5335 | 2.0 | 87 | 0.5584 | 0.7825 | | 0.4263 | 2.99 | 130 | 0.4791 | 0.8117 | | 0.3308 | 4.0 | 174 | 0.4269 | 0.8344 | | 0.2882 | 4.99 | 217 | 0.3567 | 0.8636 | | 0.2517 | 6.0 | 261 | 0.3317 | 0.8701 | | 0.1908 | 6.99 | 304 | 0.3082 | 0.8815 | | 0.187 | 8.0 | 348 | 0.3230 | 0.8799 | | 0.1434 | 8.99 | 391 | 0.3323 | 0.9010 | | 0.1277 | 10.0 | 435 | 0.2489 | 0.9075 | | 0.156 | 10.99 | 478 | 0.3246 | 0.8880 | | 0.0781 | 12.0 | 522 | 0.3121 | 0.9010 | | 0.1001 | 12.99 | 565 | 0.2708 | 0.9058 | | 0.0892 | 14.0 | 609 | 0.2582 | 0.9140 | | 0.0644 | 14.99 | 652 | 0.2486 | 0.9221 | | 0.0689 | 16.0 | 696 | 0.2465 | 0.9237 | | 0.0547 | 16.99 | 739 | 0.2402 | 0.9334 | | 0.0597 | 18.0 | 783 | 0.2534 | 0.9237 | | 0.0512 | 18.99 | 826 | 0.2400 | 0.9318 | | 0.041 | 20.0 | 870 | 0.2397 | 0.9286 | | 0.0376 | 20.99 | 913 | 0.2663 | 0.9269 | | 0.0412 | 22.0 | 957 | 0.3026 | 0.9221 | | 0.0423 | 22.99 | 1000 | 0.2678 | 0.9302 | | 0.0266 | 24.0 | 1044 | 0.2510 | 0.9318 | | 0.0313 | 24.99 | 1087 | 0.2542 | 0.9334 | | 0.0207 | 26.0 | 1131 | 0.2743 | 0.9334 | | 0.0292 | 26.99 | 1174 | 0.2614 | 0.9318 | | 0.0242 | 28.0 | 1218 | 0.2469 | 0.9383 | | 0.0201 | 28.99 | 1261 | 0.2534 | 0.9367 | | 0.0354 | 29.66 | 1290 | 0.2525 | 0.9367 | ### Framework versions - Transformers 4.37.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.1
[ "0", "1", "2", "3", "4", "5", "6" ]
dewifaj/image_classification
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # image_classification This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.2412 - Accuracy: 0.55 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 20 | 1.2182 | 0.5625 | | No log | 2.0 | 40 | 1.2392 | 0.5312 | | No log | 3.0 | 60 | 1.1474 | 0.6 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.1
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
p1atdev/hiera_mae_in1k_ft_in1k_very_experimental_do_not_use
# Hiera mae_in1k_ft_in1k This model is the transformers format converted version of the **Hiera** model `mae_in1k_ft_in1k` (https://github.com/facebookresearch/hiera) [Hiera: A Hierarchical Vision Transformer without the Bells-and-Whistles](https://arxiv.org/abs/2306.00989) ```py from PIL import Image import torch from transformers import AutoModelForImageClassification, AutoImageProcessor REPO = "p1atdev/hiera_mae_in1k_ft_in1k" processor = AutoImageProcessor.from_pretrained(REPO) model = AutoModelForImageClassification.from_pretrained(REPO, trust_remote_code=True) image = Image.open("image.png") with torch.no_grad(): outputs = model(**processor(image, return_tensors="pt")) print(outputs.logits.argmax().item()) # 207 (golden retriever (imagenet-1k)) ```
[ "label_0", "label_1", "label_2", "label_3", "label_4", "label_5", "label_6", "label_7", "label_8", "label_9", "label_10", "label_11", "label_12", "label_13", "label_14", "label_15", "label_16", "label_17", "label_18", "label_19", "label_20", "label_21", "label_22", "label_23", "label_24", "label_25", "label_26", "label_27", "label_28", "label_29", "label_30", "label_31", "label_32", "label_33", "label_34", "label_35", "label_36", "label_37", "label_38", "label_39", "label_40", "label_41", "label_42", "label_43", "label_44", "label_45", "label_46", "label_47", "label_48", "label_49", "label_50", "label_51", "label_52", "label_53", "label_54", "label_55", "label_56", "label_57", "label_58", "label_59", "label_60", "label_61", "label_62", "label_63", "label_64", "label_65", "label_66", "label_67", "label_68", "label_69", "label_70", "label_71", "label_72", "label_73", "label_74", "label_75", "label_76", "label_77", "label_78", "label_79", "label_80", "label_81", "label_82", "label_83", "label_84", "label_85", "label_86", "label_87", "label_88", "label_89", "label_90", "label_91", "label_92", "label_93", "label_94", "label_95", "label_96", "label_97", "label_98", "label_99", "label_100", "label_101", "label_102", "label_103", "label_104", "label_105", "label_106", "label_107", "label_108", "label_109", "label_110", "label_111", "label_112", "label_113", "label_114", "label_115", "label_116", "label_117", "label_118", "label_119", "label_120", "label_121", "label_122", "label_123", "label_124", "label_125", "label_126", "label_127", "label_128", "label_129", "label_130", "label_131", "label_132", "label_133", "label_134", "label_135", "label_136", "label_137", "label_138", "label_139", "label_140", "label_141", "label_142", "label_143", "label_144", "label_145", "label_146", "label_147", "label_148", "label_149", "label_150", "label_151", "label_152", "label_153", "label_154", "label_155", "label_156", "label_157", "label_158", "label_159", "label_160", "label_161", "label_162", "label_163", "label_164", "label_165", "label_166", "label_167", "label_168", "label_169", "label_170", "label_171", "label_172", "label_173", "label_174", "label_175", "label_176", "label_177", "label_178", "label_179", "label_180", "label_181", "label_182", "label_183", "label_184", "label_185", "label_186", "label_187", "label_188", "label_189", "label_190", "label_191", "label_192", "label_193", "label_194", "label_195", "label_196", "label_197", "label_198", "label_199", "label_200", "label_201", "label_202", "label_203", "label_204", "label_205", "label_206", "label_207", "label_208", "label_209", "label_210", "label_211", "label_212", "label_213", "label_214", "label_215", "label_216", "label_217", "label_218", "label_219", "label_220", "label_221", "label_222", "label_223", "label_224", "label_225", "label_226", "label_227", "label_228", "label_229", "label_230", "label_231", "label_232", "label_233", "label_234", "label_235", "label_236", "label_237", "label_238", "label_239", "label_240", "label_241", "label_242", "label_243", "label_244", "label_245", "label_246", "label_247", "label_248", "label_249", "label_250", "label_251", "label_252", "label_253", "label_254", "label_255", "label_256", "label_257", "label_258", "label_259", "label_260", "label_261", "label_262", "label_263", "label_264", "label_265", "label_266", "label_267", "label_268", "label_269", "label_270", "label_271", "label_272", "label_273", "label_274", "label_275", "label_276", "label_277", "label_278", "label_279", "label_280", "label_281", "label_282", "label_283", "label_284", "label_285", "label_286", "label_287", "label_288", "label_289", "label_290", "label_291", "label_292", "label_293", "label_294", "label_295", "label_296", "label_297", "label_298", "label_299", "label_300", "label_301", "label_302", "label_303", "label_304", "label_305", "label_306", "label_307", "label_308", "label_309", "label_310", "label_311", "label_312", "label_313", "label_314", "label_315", "label_316", "label_317", "label_318", "label_319", "label_320", "label_321", "label_322", "label_323", "label_324", "label_325", "label_326", "label_327", "label_328", "label_329", "label_330", "label_331", "label_332", "label_333", "label_334", "label_335", "label_336", "label_337", "label_338", "label_339", "label_340", "label_341", "label_342", "label_343", "label_344", "label_345", "label_346", "label_347", "label_348", "label_349", "label_350", "label_351", "label_352", "label_353", "label_354", "label_355", "label_356", "label_357", "label_358", "label_359", "label_360", "label_361", "label_362", "label_363", "label_364", "label_365", "label_366", "label_367", "label_368", "label_369", "label_370", "label_371", "label_372", "label_373", "label_374", "label_375", "label_376", "label_377", "label_378", "label_379", "label_380", "label_381", "label_382", "label_383", "label_384", "label_385", "label_386", "label_387", "label_388", "label_389", "label_390", "label_391", "label_392", "label_393", "label_394", "label_395", "label_396", "label_397", "label_398", "label_399", "label_400", "label_401", "label_402", "label_403", "label_404", "label_405", "label_406", "label_407", "label_408", "label_409", "label_410", "label_411", "label_412", "label_413", "label_414", "label_415", "label_416", "label_417", "label_418", "label_419", "label_420", "label_421", "label_422", "label_423", "label_424", "label_425", "label_426", "label_427", "label_428", "label_429", "label_430", "label_431", "label_432", "label_433", "label_434", "label_435", "label_436", "label_437", "label_438", "label_439", "label_440", "label_441", "label_442", "label_443", "label_444", "label_445", "label_446", "label_447", "label_448", "label_449", "label_450", "label_451", "label_452", "label_453", "label_454", "label_455", "label_456", "label_457", "label_458", "label_459", "label_460", "label_461", "label_462", "label_463", "label_464", "label_465", "label_466", "label_467", "label_468", "label_469", "label_470", "label_471", "label_472", "label_473", "label_474", "label_475", "label_476", "label_477", "label_478", "label_479", "label_480", "label_481", "label_482", "label_483", "label_484", "label_485", "label_486", "label_487", "label_488", "label_489", "label_490", "label_491", "label_492", "label_493", "label_494", "label_495", "label_496", "label_497", "label_498", "label_499", "label_500", "label_501", "label_502", "label_503", "label_504", "label_505", "label_506", "label_507", "label_508", "label_509", "label_510", "label_511", "label_512", "label_513", "label_514", "label_515", "label_516", "label_517", "label_518", "label_519", "label_520", "label_521", "label_522", "label_523", "label_524", "label_525", "label_526", "label_527", "label_528", "label_529", "label_530", "label_531", "label_532", "label_533", "label_534", "label_535", "label_536", "label_537", "label_538", "label_539", "label_540", "label_541", "label_542", "label_543", "label_544", "label_545", "label_546", "label_547", "label_548", "label_549", "label_550", "label_551", "label_552", "label_553", "label_554", "label_555", "label_556", "label_557", "label_558", "label_559", "label_560", "label_561", "label_562", "label_563", "label_564", "label_565", "label_566", "label_567", "label_568", "label_569", "label_570", "label_571", "label_572", "label_573", "label_574", "label_575", "label_576", "label_577", "label_578", "label_579", "label_580", "label_581", "label_582", "label_583", "label_584", "label_585", "label_586", "label_587", "label_588", "label_589", "label_590", "label_591", "label_592", "label_593", "label_594", "label_595", "label_596", "label_597", "label_598", "label_599", "label_600", "label_601", "label_602", "label_603", "label_604", "label_605", "label_606", "label_607", "label_608", "label_609", "label_610", "label_611", "label_612", "label_613", "label_614", "label_615", "label_616", "label_617", "label_618", "label_619", "label_620", "label_621", "label_622", "label_623", "label_624", "label_625", "label_626", "label_627", "label_628", "label_629", "label_630", "label_631", "label_632", "label_633", "label_634", "label_635", "label_636", "label_637", "label_638", "label_639", "label_640", "label_641", "label_642", "label_643", "label_644", "label_645", "label_646", "label_647", "label_648", "label_649", "label_650", "label_651", "label_652", "label_653", "label_654", "label_655", "label_656", "label_657", "label_658", "label_659", "label_660", "label_661", "label_662", "label_663", "label_664", "label_665", "label_666", "label_667", "label_668", "label_669", "label_670", "label_671", "label_672", "label_673", "label_674", "label_675", "label_676", "label_677", "label_678", "label_679", "label_680", "label_681", "label_682", "label_683", "label_684", "label_685", "label_686", "label_687", "label_688", "label_689", "label_690", "label_691", "label_692", "label_693", "label_694", "label_695", "label_696", "label_697", "label_698", "label_699", "label_700", "label_701", "label_702", "label_703", "label_704", "label_705", "label_706", "label_707", "label_708", "label_709", "label_710", "label_711", "label_712", "label_713", "label_714", "label_715", "label_716", "label_717", "label_718", "label_719", "label_720", "label_721", "label_722", "label_723", "label_724", "label_725", "label_726", "label_727", "label_728", "label_729", "label_730", "label_731", "label_732", "label_733", "label_734", "label_735", "label_736", "label_737", "label_738", "label_739", "label_740", "label_741", "label_742", "label_743", "label_744", "label_745", "label_746", "label_747", "label_748", "label_749", "label_750", "label_751", "label_752", "label_753", "label_754", "label_755", "label_756", "label_757", "label_758", "label_759", "label_760", "label_761", "label_762", "label_763", "label_764", "label_765", "label_766", "label_767", "label_768", "label_769", "label_770", "label_771", "label_772", "label_773", "label_774", "label_775", "label_776", "label_777", "label_778", "label_779", "label_780", "label_781", "label_782", "label_783", "label_784", "label_785", "label_786", "label_787", "label_788", "label_789", "label_790", "label_791", "label_792", "label_793", "label_794", "label_795", "label_796", "label_797", "label_798", "label_799", "label_800", "label_801", "label_802", "label_803", "label_804", "label_805", "label_806", "label_807", "label_808", "label_809", "label_810", "label_811", "label_812", "label_813", "label_814", "label_815", "label_816", "label_817", "label_818", "label_819", "label_820", "label_821", "label_822", "label_823", "label_824", "label_825", "label_826", "label_827", "label_828", "label_829", "label_830", "label_831", "label_832", "label_833", "label_834", "label_835", "label_836", "label_837", "label_838", "label_839", "label_840", "label_841", "label_842", "label_843", "label_844", "label_845", "label_846", "label_847", "label_848", "label_849", "label_850", "label_851", "label_852", "label_853", "label_854", "label_855", "label_856", "label_857", "label_858", "label_859", "label_860", "label_861", "label_862", "label_863", "label_864", "label_865", "label_866", "label_867", "label_868", "label_869", "label_870", "label_871", "label_872", "label_873", "label_874", "label_875", "label_876", "label_877", "label_878", "label_879", "label_880", "label_881", "label_882", "label_883", "label_884", "label_885", "label_886", "label_887", "label_888", "label_889", "label_890", "label_891", "label_892", "label_893", "label_894", "label_895", "label_896", "label_897", "label_898", "label_899", "label_900", "label_901", "label_902", "label_903", "label_904", "label_905", "label_906", "label_907", "label_908", "label_909", "label_910", "label_911", "label_912", "label_913", "label_914", "label_915", "label_916", "label_917", "label_918", "label_919", "label_920", "label_921", "label_922", "label_923", "label_924", "label_925", "label_926", "label_927", "label_928", "label_929", "label_930", "label_931", "label_932", "label_933", "label_934", "label_935", "label_936", "label_937", "label_938", "label_939", "label_940", "label_941", "label_942", "label_943", "label_944", "label_945", "label_946", "label_947", "label_948", "label_949", "label_950", "label_951", "label_952", "label_953", "label_954", "label_955", "label_956", "label_957", "label_958", "label_959", "label_960", "label_961", "label_962", "label_963", "label_964", "label_965", "label_966", "label_967", "label_968", "label_969", "label_970", "label_971", "label_972", "label_973", "label_974", "label_975", "label_976", "label_977", "label_978", "label_979", "label_980", "label_981", "label_982", "label_983", "label_984", "label_985", "label_986", "label_987", "label_988", "label_989", "label_990", "label_991", "label_992", "label_993", "label_994", "label_995", "label_996", "label_997", "label_998", "label_999" ]
andikamandalaa/image_classification
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # image_classification This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.5686 - Accuracy: 0.927 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 250 | 1.0221 | 0.904 | | 1.4226 | 2.0 | 500 | 0.5814 | 0.929 | | 1.4226 | 3.0 | 750 | 0.4850 | 0.927 | ### Framework versions - Transformers 4.37.2 - Pytorch 2.1.2+cu121 - Datasets 2.16.1 - Tokenizers 0.15.1
[ "apple_pie", "baby_back_ribs", "bruschetta", "waffles", "caesar_salad", "cannoli", "caprese_salad", "carrot_cake", "ceviche", "cheesecake", "cheese_plate", "chicken_curry", "chicken_quesadilla", "baklava", "chicken_wings", "chocolate_cake", "chocolate_mousse", "churros", "clam_chowder", "club_sandwich", "crab_cakes", "creme_brulee", "croque_madame", "cup_cakes", "beef_carpaccio", "deviled_eggs", "donuts", "dumplings", "edamame", "eggs_benedict", "escargots", "falafel", "filet_mignon", "fish_and_chips", "foie_gras", "beef_tartare", "french_fries", "french_onion_soup", "french_toast", "fried_calamari", "fried_rice", "frozen_yogurt", "garlic_bread", "gnocchi", "greek_salad", "grilled_cheese_sandwich", "beet_salad", "grilled_salmon", "guacamole", "gyoza", "hamburger", "hot_and_sour_soup", "hot_dog", "huevos_rancheros", "hummus", "ice_cream", "lasagna", "beignets", "lobster_bisque", "lobster_roll_sandwich", "macaroni_and_cheese", "macarons", "miso_soup", "mussels", "nachos", "omelette", "onion_rings", "oysters", "bibimbap", "pad_thai", "paella", "pancakes", "panna_cotta", "peking_duck", "pho", "pizza", "pork_chop", "poutine", "prime_rib", "bread_pudding", "pulled_pork_sandwich", "ramen", "ravioli", "red_velvet_cake", "risotto", "samosa", "sashimi", "scallops", "seaweed_salad", "shrimp_and_grits", "breakfast_burrito", "spaghetti_bolognese", "spaghetti_carbonara", "spring_rolls", "steak", "strawberry_shortcake", "sushi", "tacos", "takoyaki", "tiramisu", "tuna_tartare" ]
SeyedAli/Image-Arousal
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # Image-Arousal This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the custom dataset. It achieves the following results on the evaluation set: - Loss: 0.8522 - Accuracy: 0.6294 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0002 - train_batch_size: 16 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 4 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.9023 | 0.78 | 100 | 0.8522 | 0.6294 | | 0.5376 | 1.56 | 200 | 0.8592 | 0.6686 | | 0.2473 | 2.34 | 300 | 0.9559 | 0.6510 | | 0.0691 | 3.12 | 400 | 1.1399 | 0.6275 | | 0.0821 | 3.91 | 500 | 1.2060 | 0.6392 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.1
[ "0", "1", "2", "3", "4" ]
SeyedAli/Image-Valence
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # Image-Valence This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the custom dataset. It achieves the following results on the evaluation set: - Loss: 1.4464 - Accuracy: 0.5863 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0002 - train_batch_size: 16 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 4 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 1.2256 | 0.78 | 100 | 1.0936 | 0.5451 | | 0.7315 | 1.56 | 200 | 0.9981 | 0.5882 | | 0.2118 | 2.34 | 300 | 1.1650 | 0.5902 | | 0.1119 | 3.12 | 400 | 1.2864 | 0.5863 | | 0.1116 | 3.91 | 500 | 1.4464 | 0.5863 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.1
[ "0", "1", "2", "3", "4" ]
IsaacMwesigwa/footballer-recognition-gray-nobg
# Model Trained Using AutoTrain - Problem type: Image Classification ## Validation Metricsg loss: 6.152068138122559 f1_macro: 0.002214415118096559 f1_micro: 0.012527101903155865 f1_weighted: 0.0022165489799304268 precision_macro: 0.0015895320987927826 precision_micro: 0.012527101903155867 precision_weighted: 0.0015910638088373914 recall_macro: 0.012515042117930204 recall_micro: 0.012527101903155867 recall_weighted: 0.012527101903155867 accuracy: 0.012527101903155867
[ "aaron long", "aaron mooy", "aaron ramsdale", "aaron ramsey", "abde ezzalzouli", "abdelhamid sabiri", "abdelkarim hassan", "abderrazak hamdallah", "abdou diallo", "abdul fatawu issahaku", "abdul manaf nurudeen", "abdulaziz hatem", "abdulelah al-amri", "abdulellah al-malki", "abdullah madu", "abdullah otayf", "abdulrahman al-aboud", "abolfazl jalali", "achraf dari", "achraf hakimi", "adam davies", "adrien rabiot", "agustín canobbio", "ahmad nourollahi", "ahmed alaaeldin", "ahmed reda tagnaouti", "ajdin hrustic", "akram afif", "alan franco", "alejandro balde", "aleksandar mitrović", "alex sandro", "alex telles", "alexander bah", "alexander djiku", "alexander domínguez", "alexis mac allister", "alexis vega", "alfred gomis", "alfredo talavera", "ali abdi", "ali al-bulaihi", "ali al-hassan", "ali assadalla", "ali gholizadeh", "ali karimi", "ali maâloul", "alidu seidu", "alireza beiranvand", "alireza jahanbakhsh", "alisson", "alistair johnston", "almoez ali", "alphonse areola", "alphonso davies", "amadou onana", "amir abedzadeh", "anass zaroury", "andreas christensen", "andreas cornelius", "andreas skov olsen", "andrej kramarić", "andrew redmayne", "andries noppert", "andrija živković", "andré ayew ", "andré onana", "andré silva", "andré-frank zambo anguissa", "andrés guardado ", "anis ben slimane", "ansu fati", "ante budimir", "anthony contreras", "anthony hernández", "antoine griezmann", "antoine semenyo", "antonee robinson", "antonio rüdiger", "antony", "antónio silva", "ao tanaka", "ardon jashari", "arkadiusz milik", "armel bella-kotchap", "arthur theate", "artur jędrzejczyk", "assim madibo", "atiba hutchinson ", "aurélien tchouaméni", "awer mabil", "axel disasi", "axel witsel", "ayase ueda", "aymen dahmen", "aymen mathlouthi", "aymeric laporte", "ayrton preciado", "aziz behich", "azzedine ounahi", "aïssa laïdouni", "baba rahman", "badr benoun", "bailey wright", "bamba dieng", "bartosz bereszyński", "bassam al-rawi", "bechir ben saïd", "ben cabango", "ben davies", "ben white", "benjamin pavard", "bernardo silva", "bilal el khannous", "bilel ifa", "borna barišić", "borna sosa", "boualem khoukhi", "boulaye dia", "brandon aguilera", "breel embolo", "bremer", "brenden aaronson", "brennan johnson", "bruno fernandes", "bruno guimarães", "bruno petković", "bryan mbeumo", "bryan oviedo", "bryan ruiz ", "bukayo saka", "callum wilson", "cameron carter-vickers", "cameron devlin", "carlos gruezo", "carlos martínez", "carlos rodríguez", "carlos soler", "casemiro", "celso borges", "charles de ketelaere", "cheikhou kouyaté", "cho gue-sung", "cho yu-min", "chris gunter", "chris mepham", "christian bassogog", "christian eriksen", "christian fassnacht", "christian günter", "christian nørgaard", "christian pulisic", "christopher wooh", "cody gakpo", "collins fai", "connor roberts", "conor coady", "conor gallagher", "craig goodwin", "cristian roldan", "cristian romero", "cristiano ronaldo ", "cyle larin", "césar azpilicueta", "césar montes", "daichi kamada", "daizen maeda", "daley blind", "damian szymański", "dani alves", "dani carvajal", "dani olmo", "daniel afriyie", "daniel amartey", "daniel chacón", "daniel james", "daniel schmidt", "daniel wass", "daniel-kofi kyereh", "danilo", "danilo pereira", "danny vukovic", "danny ward", "darko lazović", "darwin núñez", "david raum", "david raya", "david wotherspoon", "davy klaassen", "dayne st. clair", "dayot upamecano", "deandre yedlin", "declan rice", "dejan lovren", "denis odoi", "denis zakaria", "denzel dumfries", "derek cornelius", "devis epassy", "diego godín ", "diego palacios", "diogo costa", "diogo dalot", "djibril sow", "djorkaeff reasco", "domagoj vida", "dominik livaković", "douglas lópez", "dries mertens", "dušan tadić ", "dušan vlahović", "dylan bronn", "dylan levitt", "eden hazard ", "ederson", "edimilson fernandes", "edinson cavani", "edson álvarez", "eduardo camavinga", "ehsan hajsafi ", "eiji kawashima", "elisha owusu", "ellyes skhiri", "emiliano martínez", "enner valencia ", "enzo ebosse", "enzo fernández", "eray cömert", "eric dier", "eric garcía", "eric maxim choupo-moting", "esteban alvarado", "ethan ampadu", "ethan horvath", "exequiel palacios", "fabian frei", "fabian rieder", "fabian schär", "fabinho", "facundo pellistri", "facundo torres", "famara diédhiou", "federico valverde", "ferjani sassi", "fernando muslera", "ferran torres", "filip kostić", "filip mladenović", "filip đuričić", "firas al-buraikan", "fodé ballo-touré", "formose mendy", "fran karačić", "francisco calvo", "franco armani", "fred", "frederik rønnow", "frenkie de jong", "félix torres", "gabriel jesus", "gabriel martinelli", "gaku shibasaki", "garang kuol", "gareth bale ", "gavi", "gaël ondoua", "georges-kévin nkoudou", "gerardo arteaga", "germán pezzella", "gerson torres", "gerónimo rulli", "ghailene chaalali", "gideon mensah", "giorgian de arrascaeta", "giovanni reyna", "gonzalo montiel", "gonzalo plata", "gonçalo ramos", "granit xhaka ", "gregor kobel", "grzegorz krychowiak", "guido rodríguez", "guillermo ochoa", "guillermo varela", "haitham asiri", "haji wright", "hakim ziyech", "hannibal mejbri", "hans vanaken", "haris seferovic", "harry kane ", "harry maguire", "harry souttar", "harry wilson", "hassan al-haydos ", "hassan al-tambakti", "hattan bahebri", "henry martín", "hernán galíndez", "hidemasa morita", "hiroki ito", "hiroki sakai", "hirving lozano", "homam ahmed", "hong chul", "hossein hosseini", "hossein kanaanizadegan", "hugo guillamón", "hugo lloris ", "hwang hee-chan", "hwang in-beom", "hwang ui-jo", "héctor herrera", "héctor moreno", "ibrahim danlad", "ibrahima konaté", "idrissa gueye", "iké ugbo", "ilias chair", "iliman ndiaye", "ismaeel mohammad", "ismail jakobs", "ismaël koné", "ismaïla sarr", "issam jebali", "ivan ilić", "ivan perišić", "ivica ivušić", "ivo grbić", "iñaki williams", "jack grealish", "jackson irvine", "jackson porozo", "jakub kamiński", "jakub kiwior", "jamal musiala", "james maddison", "james pantemis", "jamie maclaren", "jan bednarek", "jan vertonghen", "jason cummings", "jassem gaber", "jawad el yamiq", "jean-charles castelletto", "jean-pierre nsame", "jens stryger larsen", "jeong woo-yeong", "jeremie frimpong", "jeremy sarmiento", "jerome ngom mbekeli", "jesper lindstrøm", "jesús ferreira", "jesús gallardo", "jewison bennette", "jo hyeon-woo", "joachim andersen", "joakim mæhle", "joe allen", "joe morrell", "joe rodon", "joe scally", "joel campbell", "joel king", "joel waterman", "johan venegas", "johan vásquez", "john stones", "jonas hofmann", "jonas omlin", "jonas wind", "jonathan david", "jonathan osorio", "jonny williams", "jordan ayew", "jordan henderson", "jordan morris", "jordan pickford", "jordan veretout", "jordi alba", "jorge sánchez", "joseph aidoo", "josh sargent", "joshua kimmich", "josip juranović", "josip stanišić", "josip šutalo", "josé cifuentes", "josé giménez", "josé luis rodríguez", "josé sá", "joão cancelo", "joão félix", "joão mário", "joão palhinha", "joško gvardiol", "juan foyth", "juan pablo vargas", "jude bellingham", "jules koundé", "julian brandt", "julián álvarez", "jung woo-young", "junior hoilett", "junya ito", "jurriën timber", "justin bijlow", "jérémy doku", "kai havertz", "kalidou koulibaly ", "kalvin phillips", "kamal miller", "kamal sowah", "kamaldeen sulemana", "kamil glik", "kamil grabara", "kamil grosicki", "kaoru mitoma", "karim adeyemi", "karim ansarifard", "karim benzema", "karim boudiaf", "karl toko ekambi", "karol świderski", "kasper dolberg", "kasper schmeichel", "keanu baccus", "kellyn acosta", "kendall waston", "kenneth taylor", "kevin de bruyne", "kevin rodríguez", "kevin trapp", "kevin álvarez", "keylor navas", "keysher fuller", "khalid muneer", "kieffer moore", "kieran trippier", "kim jin-su", "kim min-jae", "kim moon-hwan", "kim seung-gyu", "kim tae-hwan", "kim young-gwon", "kingsley coman", "ko itakura", "koen casteels", "koke", "kristijan jakić", "krystian bielik", "krzysztof piątek", "krépin diatta", "kwon chang-hoon", "kwon kyung-won", "kye rowles", "kyle walker", "kylian mbappé", "lautaro martínez", "lawrence ati-zigi", "leander dendoncker", "leandro paredes", "leandro trossard", "lee jae-sung", "lee kang-in", "leon goretzka", "leroy sané", "liam fraser", "liam millar", "lionel messi ", "lisandro martínez", "lovro majer", "loïs openda", "luca de la torre", "lucas cavallini", "lucas hernandez", "lucas paquetá", "lucas torreira", "luis chávez", "luis romo", "luis suárez", "luka jović", "luka modrić ", "luka sučić", "lukas klostermann", "luke shaw", "luuk de jong", "majid hosseini", "mamadou loum", "manuel akanji", "manuel neuer ", "manuel ugarte", "marc-andré ter stegen", "marcelo brozović", "marco asensio", "marcos acuña", "marcos llorente", "marcus rashford", "marcus thuram", "mario götze", "mario pašalić", "mark harris", "mark-anthony kaye", "marko dmitrović", "marko grujić", "marko livaja", "marquinhos", "marten de roon", "martin boyle", "martin braithwaite", "martin erlić", "martin hongla", "martín cáceres", "mason mount", "mateo kovačić", "mateusz wieteska", "matheus nunes", "mathew leckie", "mathew ryan ", "mathias jensen", "mathías olivera", "matt turner", "matteo guendouzi", "matthew smith", "matthias ginter", "matthijs de ligt", "matty cash", "matías vecino", "matías viña", "maxi gómez", "maya yoshida ", "mehdi taremi", "mehdi torabi", "memphis depay", "meshaal barsham", "michael estrada", "michał skóraś", "michel aebischer", "michy batshuayi", "miki yamane", "mikkel damsgaard", "milad mohammadi", "milan borjan", "miloš degenek", "miloš veljković", "mislav oršić", "mitchell duke", "mohamed ali ben romdhane", "mohamed dräger", "mohamed kanno", "mohammed al-breik", "mohammed al-owais", "mohammed al-rubaie", "mohammed kudus", "mohammed muntari", "mohammed salisu", "mohammed waad", "moisés caicedo", "moisés ramírez", "montassar talbi", "morteza pouraliganji", "mostafa meshaal", "mouez hassen", "moumi ngamaleu", "moustapha name", "munir mohamedi", "musab kheder", "na sang-ho", "nader ghandri", "nahuel molina", "naif al-hadhrami", "nampalys mendy", "nasser al-dawsari", "nathan aké", "nathaniel atkinson", "nawaf al-abed", "nawaf al-aqidi", "nayef aguerd", "naïm sliti", "neco williams", "nemanja gudelj", "nemanja maksimović", "nemanja radonjić", "neymar", "nick pope", "niclas füllkrug", "nico elvedi", "nico schlotterbeck", "nico williams", "nicola zalewski", "nicolas jackson", "nicolas nkoulou", "nicolás otamendi", "nicolás tagliafico", "nicolás de la cruz", "niklas süle", "nikola milenković", "nikola vlašić", "noa lang", "noah okafor", "nouhou tolo", "noussair mazraoui", "nuno mendes", "néstor araujo", "oliver christensen", "olivier giroud", "olivier mbaizo", "olivier ntcham", "orbelín pineda", "osman bukari", "otávio", "ousmane dembélé", "pablo sarabia", "paik seung-ho", "pape abou cissé", "pape gueye", "pape matar sarr", "papu gómez", "pathé ciss", "patrick sequeira", "pau torres", "paulo dybala", "payam niazmand", "pedri", "pedro", "pepe", "pervis estupiñán", "phil foden", "philipp köhn", "piero hincapié", "pierre kunde", "pierre-emile højbjerg", "piotr zieliński", "predrag rajković", "przemysław frankowski", "rafael leão", "raheem sterling", "ramin rezaeian", "randal kolo muani", "raphaël guerreiro", "raphaël varane", "raphinha", "rasmus kristensen", "raúl jiménez", "remko pasveer", "remo freuler", "renato steffen", "ricardo horta", "ricardo rodriguez", "richarlison", "richie laryea", "riley mcgree", "ritsu dōan", "riyadh sharahili", "roan wilson", "robert arboleda", "robert gumny", "robert lewandowski ", "robert skov", "robert sánchez", "roberto alvarado", "rodolfo cota", "rodri", "rodrigo bentancur", "rodrigo de paul", "rodrygo", "rogelio funes mori", "romain saïss ", "romario ibarra", "romelu lukaku", "ronald araújo", "rouzbeh cheshmi", "ruben vargas", "rubin colwill", "rui patrício", "ró-ró", "rónald matarrita", "rúben dias", "rúben neves", "saad al-sheeb", "sadegh moharrami", "sadio mané", "saeid ezatolahi", "saleh al-shehri", "salem al-dawsari", "salem al-hajri", "salis abdul samed", "salman al-faraj ", "sam adekugbe", "saman ghoddos", "sami al-najei", "samuel gouet", "samuel piette", "sardar azmoun", "saud abdulhamid", "saša lukić", "sean johnson", "sebas méndez", "sebastian szymański", "sebastián coates", "sebastián sosa", "seifeddine jaziri", "selim amallah", "seny dieng", "serge gnabry", "sergej milinković-savić", "sergio busquets ", "sergio rochet", "sergiño dest", "shaq moore", "shogo taniguchi", "shojae khalilzadeh", "shuto machino", "shūichi gonda", "silvan widmer", "simon kjær ", "simon mignolet", "simon ngapandouetnbu", "sofiane boufal", "sofyan amrabat", "son heung-min ", "son jun-ho", "song bum-keun", "song min-kyu", "sorba thomas", "souaibou marou", "srđan babić", "stefan mitrović", "stefan de vrij", "stephen eustáquio", "steve mandanda", "steven berghuis", "steven bergwijn", "steven vitória", "strahinja eraković", "strahinja pavlović", "sultan al-ghannam", "szymon żurkowski", "taha yassine khenissi", "tajon buchanan", "takefusa kubo", "takehiro tomiyasu", "takuma asano", "takumi minamino", "tarek salman", "tariq lamptey", "teun koopmeiners", "theo hernandez", "thiago almada", "thiago silva ", "thibaut courtois", "thilo kehrer", "thomas delaney", "thomas deng", "thomas meunier", "thomas müller", "thomas partey", "thorgan hazard", "tim ream", "timothy castagne", "timothy weah", "toby alderweireld", "tom lockyer", "trent alexander-arnold", "tyler adams", "tyrell malacia", "unai simón", "uriel antuna", "uroš račić", "vahid amiri", "vanja milinković-savić", "victor nelsson", "vincent aboubakar ", "vincent janssen", "vinícius júnior", "virgil van dijk ", "vitinha", "wahbi khazri", "wajdi kechrida", "walid cheddira", "walker zimmerman", "wataru endo", "wayne hennessey", "weston mckennie", "weverton", "william carvalho", "william pacho", "william saliba", "wojciech szczęsny", "wout faes", "wout weghorst", "xavi simons", "xavier arreaga", "xherdan shaqiri", "yahia attiyat allah", "yahya jabrane", "yann sommer", "yannick carrasco", "yasser al-shahrani", "yassine bounou", "yassine meriah", "yeltsin tejeda", "yeremy pino", "yoon jong-gyu", "youri tielemans", "yousef hassan", "youssef en-nesyri", "youssef msakni ", "youssouf fofana", "youssouf sabaly", "youssoufa moukoko", "youstin salas", "yuki soma", "yunus musah", "yussuf poulsen", "yuto nagatomo", "zakaria aboukhlal", "zeno debast", "álvaro morata", "álvaro zamora", "ángel correa", "ángel di maría", "ángel mena", "ángelo preciado", "éder militão", "édouard mendy", "érick gutiérrez", "éverton ribeiro", "óscar duarte", "i̇lkay gündoğan", "łukasz skorupski" ]
nashirab/vit-base-beans
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-base-beans This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.2658 - Accuracy: 0.4938 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0002 - train_batch_size: 16 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 4 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 4.7295 | 0.25 | 10 | 2.7467 | 0.1875 | | 2.3133 | 0.5 | 20 | 2.1258 | 0.2437 | | 2.031 | 0.75 | 30 | 1.9442 | 0.3187 | | 1.8773 | 1.0 | 40 | 1.6159 | 0.375 | | 1.4132 | 1.25 | 50 | 1.5585 | 0.4188 | | 1.4581 | 1.5 | 60 | 1.5269 | 0.35 | | 1.4697 | 1.75 | 70 | 1.5535 | 0.3625 | | 1.3575 | 2.0 | 80 | 1.3056 | 0.4375 | | 1.0615 | 2.25 | 90 | 1.4774 | 0.4 | | 1.1895 | 2.5 | 100 | 1.2384 | 0.45 | | 1.0659 | 2.75 | 110 | 1.3315 | 0.4938 | | 1.1517 | 3.0 | 120 | 1.1040 | 0.575 | | 0.7957 | 3.25 | 130 | 1.3480 | 0.4375 | | 0.8037 | 3.5 | 140 | 1.2879 | 0.525 | | 1.0157 | 3.75 | 150 | 1.1900 | 0.5 | | 0.7665 | 4.0 | 160 | 1.2039 | 0.4938 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.1
[ "tench, tinca tinca", "goldfish, carassius auratus", "great white shark, white shark, man-eater, man-eating shark, carcharodon carcharias", "tiger shark, galeocerdo cuvieri", "hammerhead, hammerhead shark", "electric ray, crampfish, numbfish, torpedo", "stingray", "cock", "hen", "ostrich, struthio camelus", "brambling, fringilla montifringilla", "goldfinch, carduelis carduelis", "house finch, linnet, carpodacus mexicanus", "junco, snowbird", "indigo bunting, indigo finch, indigo bird, passerina cyanea", "robin, american robin, turdus migratorius", "bulbul", "jay", "magpie", "chickadee", "water ouzel, dipper", "kite", "bald eagle, american eagle, haliaeetus leucocephalus", "vulture", "great grey owl, great gray owl, strix nebulosa", "european fire salamander, salamandra salamandra", "common newt, triturus vulgaris", "eft", "spotted salamander, ambystoma maculatum", "axolotl, mud puppy, ambystoma mexicanum", "bullfrog, rana catesbeiana", "tree frog, tree-frog", "tailed frog, bell toad, ribbed toad, tailed toad, ascaphus trui", "loggerhead, loggerhead turtle, caretta caretta", "leatherback turtle, leatherback, leathery turtle, dermochelys coriacea", "mud turtle", "terrapin", "box turtle, box tortoise", "banded gecko", "common iguana, iguana, iguana iguana", "american chameleon, anole, anolis carolinensis", "whiptail, whiptail lizard", "agama", "frilled lizard, chlamydosaurus kingi", "alligator lizard", "gila monster, heloderma suspectum", "green lizard, lacerta viridis", "african chameleon, chamaeleo chamaeleon", "komodo dragon, komodo lizard, dragon lizard, giant lizard, varanus komodoensis", "african crocodile, nile crocodile, crocodylus niloticus", "american alligator, alligator mississipiensis", "triceratops", "thunder snake, worm snake, carphophis amoenus", "ringneck snake, ring-necked snake, ring snake", "hognose snake, puff adder, sand viper", "green snake, grass snake", "king snake, kingsnake", "garter snake, grass snake", "water snake", "vine snake", "night snake, hypsiglena torquata", "boa constrictor, constrictor constrictor", "rock python, rock snake, python sebae", "indian cobra, naja naja", "green mamba", "sea snake", "horned viper, cerastes, sand viper, horned asp, cerastes cornutus", "diamondback, diamondback rattlesnake, crotalus adamanteus", "sidewinder, horned rattlesnake, crotalus cerastes", "trilobite", "harvestman, daddy longlegs, phalangium opilio", "scorpion", "black and gold garden spider, argiope aurantia", "barn spider, araneus cavaticus", "garden spider, aranea diademata", "black widow, latrodectus mactans", "tarantula", "wolf spider, hunting spider", "tick", "centipede", "black grouse", "ptarmigan", "ruffed grouse, partridge, bonasa umbellus", "prairie chicken, prairie grouse, prairie fowl", "peacock", "quail", "partridge", "african grey, african gray, psittacus erithacus", "macaw", "sulphur-crested cockatoo, kakatoe galerita, cacatua galerita", "lorikeet", "coucal", "bee eater", "hornbill", "hummingbird", "jacamar", "toucan", "drake", "red-breasted merganser, mergus serrator", "goose", "black swan, cygnus atratus", "tusker", "echidna, spiny anteater, anteater", "platypus, duckbill, duckbilled platypus, duck-billed platypus, ornithorhynchus anatinus", "wallaby, brush kangaroo", "koala, koala bear, kangaroo bear, native bear, phascolarctos cinereus", "wombat", "jellyfish", "sea anemone, anemone", "brain coral", "flatworm, platyhelminth", "nematode, nematode worm, roundworm", "conch", "snail", "slug", "sea slug, nudibranch", "chiton, coat-of-mail shell, sea cradle, polyplacophore", "chambered nautilus, pearly nautilus, nautilus", "dungeness crab, cancer magister", "rock crab, cancer irroratus", "fiddler crab", "king crab, alaska crab, alaskan king crab, alaska king crab, paralithodes camtschatica", "american lobster, northern lobster, maine lobster, homarus americanus", "spiny lobster, langouste, rock lobster, crawfish, crayfish, sea crawfish", "crayfish, crawfish, crawdad, crawdaddy", "hermit crab", "isopod", "white stork, ciconia ciconia", "black stork, ciconia nigra", "spoonbill", "flamingo", "little blue heron, egretta caerulea", "american egret, great white heron, egretta albus", "bittern", "crane", "limpkin, aramus pictus", "european gallinule, porphyrio porphyrio", "american coot, marsh hen, mud hen, water hen, fulica americana", "bustard", "ruddy turnstone, arenaria interpres", "red-backed sandpiper, dunlin, erolia alpina", "redshank, tringa totanus", "dowitcher", "oystercatcher, oyster catcher", "pelican", "king penguin, aptenodytes patagonica", "albatross, mollymawk", "grey whale, gray whale, devilfish, eschrichtius gibbosus, eschrichtius robustus", "killer whale, killer, orca, grampus, sea wolf, orcinus orca", "dugong, dugong dugon", "sea lion", "chihuahua", "japanese spaniel", "maltese dog, maltese terrier, maltese", "pekinese, pekingese, peke", "shih-tzu", "blenheim spaniel", "papillon", "toy terrier", "rhodesian ridgeback", "afghan hound, afghan", "basset, basset hound", "beagle", "bloodhound, sleuthhound", "bluetick", "black-and-tan coonhound", "walker hound, walker foxhound", "english foxhound", "redbone", "borzoi, russian wolfhound", "irish wolfhound", "italian greyhound", "whippet", "ibizan hound, ibizan podenco", "norwegian elkhound, elkhound", "otterhound, otter hound", "saluki, gazelle hound", "scottish deerhound, deerhound", "weimaraner", "staffordshire bullterrier, staffordshire bull terrier", "american staffordshire terrier, staffordshire terrier, american pit bull terrier, pit bull terrier", "bedlington terrier", "border terrier", "kerry blue terrier", "irish terrier", "norfolk terrier", "norwich terrier", "yorkshire terrier", "wire-haired fox terrier", "lakeland terrier", "sealyham terrier, sealyham", "airedale, airedale terrier", "cairn, cairn terrier", "australian terrier", "dandie dinmont, dandie dinmont terrier", "boston bull, boston terrier", "miniature schnauzer", "giant schnauzer", "standard schnauzer", "scotch terrier, scottish terrier, scottie", "tibetan terrier, chrysanthemum dog", "silky terrier, sydney silky", "soft-coated wheaten terrier", "west highland white terrier", "lhasa, lhasa apso", "flat-coated retriever", "curly-coated retriever", "golden retriever", "labrador retriever", "chesapeake bay retriever", "german short-haired pointer", "vizsla, hungarian pointer", "english setter", "irish setter, red setter", "gordon setter", "brittany spaniel", "clumber, clumber spaniel", "english springer, english springer spaniel", "welsh springer spaniel", "cocker spaniel, english cocker spaniel, cocker", "sussex spaniel", "irish water spaniel", "kuvasz", "schipperke", "groenendael", "malinois", "briard", "kelpie", "komondor", "old english sheepdog, bobtail", "shetland sheepdog, shetland sheep dog, shetland", "collie", "border collie", "bouvier des flandres, bouviers des flandres", "rottweiler", "german shepherd, german shepherd dog, german police dog, alsatian", "doberman, doberman pinscher", "miniature pinscher", "greater swiss mountain dog", "bernese mountain dog", "appenzeller", "entlebucher", "boxer", "bull mastiff", "tibetan mastiff", "french bulldog", "great dane", "saint bernard, st bernard", "eskimo dog, husky", "malamute, malemute, alaskan malamute", "siberian husky", "dalmatian, coach dog, carriage dog", "affenpinscher, monkey pinscher, monkey dog", "basenji", "pug, pug-dog", "leonberg", "newfoundland, newfoundland dog", "great pyrenees", "samoyed, samoyede", "pomeranian", "chow, chow chow", "keeshond", "brabancon griffon", "pembroke, pembroke welsh corgi", "cardigan, cardigan welsh corgi", "toy poodle", "miniature poodle", "standard poodle", "mexican hairless", "timber wolf, grey wolf, gray wolf, canis lupus", "white wolf, arctic wolf, canis lupus tundrarum", "red wolf, maned wolf, canis rufus, canis niger", "coyote, prairie wolf, brush wolf, canis latrans", "dingo, warrigal, warragal, canis dingo", "dhole, cuon alpinus", "african hunting dog, hyena dog, cape hunting dog, lycaon pictus", "hyena, hyaena", "red fox, vulpes vulpes", "kit fox, vulpes macrotis", "arctic fox, white fox, alopex lagopus", "grey fox, gray fox, urocyon cinereoargenteus", "tabby, tabby cat", "tiger cat", "persian cat", "siamese cat, siamese", "egyptian cat", "cougar, puma, catamount, mountain lion, painter, panther, felis concolor", "lynx, catamount", "leopard, panthera pardus", "snow leopard, ounce, panthera uncia", "jaguar, panther, panthera onca, felis onca", "lion, king of beasts, panthera leo", "tiger, panthera tigris", "cheetah, chetah, acinonyx jubatus", "brown bear, bruin, ursus arctos", "american black bear, black bear, ursus americanus, euarctos americanus", "ice bear, polar bear, ursus maritimus, thalarctos maritimus", "sloth bear, melursus ursinus, ursus ursinus", "mongoose", "meerkat, mierkat", "tiger beetle", "ladybug, ladybeetle, lady beetle, ladybird, ladybird beetle", "ground beetle, carabid beetle", "long-horned beetle, longicorn, longicorn beetle", "leaf beetle, chrysomelid", "dung beetle", "rhinoceros beetle", "weevil", "fly", "bee", "ant, emmet, pismire", "grasshopper, hopper", "cricket", "walking stick, walkingstick, stick insect", "cockroach, roach", "mantis, mantid", "cicada, cicala", "leafhopper", "lacewing, lacewing fly", "dragonfly, darning needle, devil's darning needle, sewing needle, snake feeder, snake doctor, mosquito hawk, skeeter hawk", "damselfly", "admiral", "ringlet, ringlet butterfly", "monarch, monarch butterfly, milkweed butterfly, danaus plexippus", "cabbage butterfly", "sulphur butterfly, sulfur butterfly", "lycaenid, lycaenid butterfly", "starfish, sea star", "sea urchin", "sea cucumber, holothurian", "wood rabbit, cottontail, cottontail rabbit", "hare", "angora, angora rabbit", "hamster", "porcupine, hedgehog", "fox squirrel, eastern fox squirrel, sciurus niger", "marmot", "beaver", "guinea pig, cavia cobaya", "sorrel", "zebra", "hog, pig, grunter, squealer, sus scrofa", "wild boar, boar, sus scrofa", "warthog", "hippopotamus, hippo, river horse, hippopotamus amphibius", "ox", "water buffalo, water ox, asiatic buffalo, bubalus bubalis", "bison", "ram, tup", "bighorn, bighorn sheep, cimarron, rocky mountain bighorn, rocky mountain sheep, ovis canadensis", "ibex, capra ibex", "hartebeest", "impala, aepyceros melampus", "gazelle", "arabian camel, dromedary, camelus dromedarius", "llama", "weasel", "mink", "polecat, fitch, foulmart, foumart, mustela putorius", "black-footed ferret, ferret, mustela nigripes", "otter", "skunk, polecat, wood pussy", "badger", "armadillo", "three-toed sloth, ai, bradypus tridactylus", "orangutan, orang, orangutang, pongo pygmaeus", "gorilla, gorilla gorilla", "chimpanzee, chimp, pan troglodytes", "gibbon, hylobates lar", "siamang, hylobates syndactylus, symphalangus syndactylus", "guenon, guenon monkey", "patas, hussar monkey, erythrocebus patas", "baboon", "macaque", "langur", "colobus, colobus monkey", "proboscis monkey, nasalis larvatus", "marmoset", "capuchin, ringtail, cebus capucinus", "howler monkey, howler", "titi, titi monkey", "spider monkey, ateles geoffroyi", "squirrel monkey, saimiri sciureus", "madagascar cat, ring-tailed lemur, lemur catta", "indri, indris, indri indri, indri brevicaudatus", "indian elephant, elephas maximus", "african elephant, loxodonta africana", "lesser panda, red panda, panda, bear cat, cat bear, ailurus fulgens", "giant panda, panda, panda bear, coon bear, ailuropoda melanoleuca", "barracouta, snoek", "eel", "coho, cohoe, coho salmon, blue jack, silver salmon, oncorhynchus kisutch", "rock beauty, holocanthus tricolor", "anemone fish", "sturgeon", "gar, garfish, garpike, billfish, lepisosteus osseus", "lionfish", "puffer, pufferfish, blowfish, globefish", "abacus", "abaya", "academic gown, academic robe, judge's robe", "accordion, piano accordion, squeeze box", "acoustic guitar", "aircraft carrier, carrier, flattop, attack aircraft carrier", "airliner", "airship, dirigible", "altar", "ambulance", "amphibian, amphibious vehicle", "analog clock", "apiary, bee house", "apron", "ashcan, trash can, garbage can, wastebin, ash bin, ash-bin, ashbin, dustbin, trash barrel, trash bin", "assault rifle, assault gun", "backpack, back pack, knapsack, packsack, rucksack, haversack", "bakery, bakeshop, bakehouse", "balance beam, beam", "balloon", "ballpoint, ballpoint pen, ballpen, biro", "band aid", "banjo", "bannister, banister, balustrade, balusters, handrail", "barbell", "barber chair", "barbershop", "barn", "barometer", "barrel, cask", "barrow, garden cart, lawn cart, wheelbarrow", "baseball", "basketball", "bassinet", "bassoon", "bathing cap, swimming cap", "bath towel", "bathtub, bathing tub, bath, tub", "beach wagon, station wagon, wagon, estate car, beach waggon, station waggon, waggon", "beacon, lighthouse, beacon light, pharos", "beaker", "bearskin, busby, shako", "beer bottle", "beer glass", "bell cote, bell cot", "bib", "bicycle-built-for-two, tandem bicycle, tandem", "bikini, two-piece", "binder, ring-binder", "binoculars, field glasses, opera glasses", "birdhouse", "boathouse", "bobsled, bobsleigh, bob", "bolo tie, bolo, bola tie, bola", "bonnet, poke bonnet", "bookcase", "bookshop, bookstore, bookstall", "bottlecap", "bow", "bow tie, bow-tie, bowtie", "brass, memorial tablet, plaque", "brassiere, bra, bandeau", "breakwater, groin, groyne, mole, bulwark, seawall, jetty", "breastplate, aegis, egis", "broom", "bucket, pail", "buckle", "bulletproof vest", "bullet train, bullet", "butcher shop, meat market", "cab, hack, taxi, taxicab", "caldron, cauldron", "candle, taper, wax light", "cannon", "canoe", "can opener, tin opener", "cardigan", "car mirror", "carousel, carrousel, merry-go-round, roundabout, whirligig", "carpenter's kit, tool kit", "carton", "car wheel", "cash machine, cash dispenser, automated teller machine, automatic teller machine, automated teller, automatic teller, atm", "cassette", "cassette player", "castle", "catamaran", "cd player", "cello, violoncello", "cellular telephone, cellular phone, cellphone, cell, mobile phone", "chain", "chainlink fence", "chain mail, ring mail, mail, chain armor, chain armour, ring armor, ring armour", "chain saw, chainsaw", "chest", "chiffonier, commode", "chime, bell, gong", "china cabinet, china closet", "christmas stocking", "church, church building", "cinema, movie theater, movie theatre, movie house, picture palace", "cleaver, meat cleaver, chopper", "cliff dwelling", "cloak", "clog, geta, patten, sabot", "cocktail shaker", "coffee mug", "coffeepot", "coil, spiral, volute, whorl, helix", "combination lock", "computer keyboard, keypad", "confectionery, confectionary, candy store", "container ship, containership, container vessel", "convertible", "corkscrew, bottle screw", "cornet, horn, trumpet, trump", "cowboy boot", "cowboy hat, ten-gallon hat", "cradle", "crane", "crash helmet", "crate", "crib, cot", "crock pot", "croquet ball", "crutch", "cuirass", "dam, dike, dyke", "desk", "desktop computer", "dial telephone, dial phone", "diaper, nappy, napkin", "digital clock", "digital watch", "dining table, board", "dishrag, dishcloth", "dishwasher, dish washer, dishwashing machine", "disk brake, disc brake", "dock, dockage, docking facility", "dogsled, dog sled, dog sleigh", "dome", "doormat, welcome mat", "drilling platform, offshore rig", "drum, membranophone, tympan", "drumstick", "dumbbell", "dutch oven", "electric fan, blower", "electric guitar", "electric locomotive", "entertainment center", "envelope", "espresso maker", "face powder", "feather boa, boa", "file, file cabinet, filing cabinet", "fireboat", "fire engine, fire truck", "fire screen, fireguard", "flagpole, flagstaff", "flute, transverse flute", "folding chair", "football helmet", "forklift", "fountain", "fountain pen", "four-poster", "freight car", "french horn, horn", "frying pan, frypan, skillet", "fur coat", "garbage truck, dustcart", "gasmask, respirator, gas helmet", "gas pump, gasoline pump, petrol pump, island dispenser", "goblet", "go-kart", "golf ball", "golfcart, golf cart", "gondola", "gong, tam-tam", "gown", "grand piano, grand", "greenhouse, nursery, glasshouse", "grille, radiator grille", "grocery store, grocery, food market, market", "guillotine", "hair slide", "hair spray", "half track", "hammer", "hamper", "hand blower, blow dryer, blow drier, hair dryer, hair drier", "hand-held computer, hand-held microcomputer", "handkerchief, hankie, hanky, hankey", "hard disc, hard disk, fixed disk", "harmonica, mouth organ, harp, mouth harp", "harp", "harvester, reaper", "hatchet", "holster", "home theater, home theatre", "honeycomb", "hook, claw", "hoopskirt, crinoline", "horizontal bar, high bar", "horse cart, horse-cart", "hourglass", "ipod", "iron, smoothing iron", "jack-o'-lantern", "jean, blue jean, denim", "jeep, landrover", "jersey, t-shirt, tee shirt", "jigsaw puzzle", "jinrikisha, ricksha, rickshaw", "joystick", "kimono", "knee pad", "knot", "lab coat, laboratory coat", "ladle", "lampshade, lamp shade", "laptop, laptop computer", "lawn mower, mower", "lens cap, lens cover", "letter opener, paper knife, paperknife", "library", "lifeboat", "lighter, light, igniter, ignitor", "limousine, limo", "liner, ocean liner", "lipstick, lip rouge", "loafer", "lotion", "loudspeaker, speaker, speaker unit, loudspeaker system, speaker system", "loupe, jeweler's loupe", "lumbermill, sawmill", "magnetic compass", "mailbag, postbag", "mailbox, letter box", "maillot", "maillot, tank suit", "manhole cover", "maraca", "marimba, xylophone", "mask", "matchstick", "maypole", "maze, labyrinth", "measuring cup", "medicine chest, medicine cabinet", "megalith, megalithic structure", "microphone, mike", "microwave, microwave oven", "military uniform", "milk can", "minibus", "miniskirt, mini", "minivan", "missile", "mitten", "mixing bowl", "mobile home, manufactured home", "model t", "modem", "monastery", "monitor", "moped", "mortar", "mortarboard", "mosque", "mosquito net", "motor scooter, scooter", "mountain bike, all-terrain bike, off-roader", "mountain tent", "mouse, computer mouse", "mousetrap", "moving van", "muzzle", "nail", "neck brace", "necklace", "nipple", "notebook, notebook computer", "obelisk", "oboe, hautboy, hautbois", "ocarina, sweet potato", "odometer, hodometer, mileometer, milometer", "oil filter", "organ, pipe organ", "oscilloscope, scope, cathode-ray oscilloscope, cro", "overskirt", "oxcart", "oxygen mask", "packet", "paddle, boat paddle", "paddlewheel, paddle wheel", "padlock", "paintbrush", "pajama, pyjama, pj's, jammies", "palace", "panpipe, pandean pipe, syrinx", "paper towel", "parachute, chute", "parallel bars, bars", "park bench", "parking meter", "passenger car, coach, carriage", "patio, terrace", "pay-phone, pay-station", "pedestal, plinth, footstall", "pencil box, pencil case", "pencil sharpener", "perfume, essence", "petri dish", "photocopier", "pick, plectrum, plectron", "pickelhaube", "picket fence, paling", "pickup, pickup truck", "pier", "piggy bank, penny bank", "pill bottle", "pillow", "ping-pong ball", "pinwheel", "pirate, pirate ship", "pitcher, ewer", "plane, carpenter's plane, woodworking plane", "planetarium", "plastic bag", "plate rack", "plow, plough", "plunger, plumber's helper", "polaroid camera, polaroid land camera", "pole", "police van, police wagon, paddy wagon, patrol wagon, wagon, black maria", "poncho", "pool table, billiard table, snooker table", "pop bottle, soda bottle", "pot, flowerpot", "potter's wheel", "power drill", "prayer rug, prayer mat", "printer", "prison, prison house", "projectile, missile", "projector", "puck, hockey puck", "punching bag, punch bag, punching ball, punchball", "purse", "quill, quill pen", "quilt, comforter, comfort, puff", "racer, race car, racing car", "racket, racquet", "radiator", "radio, wireless", "radio telescope, radio reflector", "rain barrel", "recreational vehicle, rv, r.v.", "reel", "reflex camera", "refrigerator, icebox", "remote control, remote", "restaurant, eating house, eating place, eatery", "revolver, six-gun, six-shooter", "rifle", "rocking chair, rocker", "rotisserie", "rubber eraser, rubber, pencil eraser", "rugby ball", "rule, ruler", "running shoe", "safe", "safety pin", "saltshaker, salt shaker", "sandal", "sarong", "sax, saxophone", "scabbard", "scale, weighing machine", "school bus", "schooner", "scoreboard", "screen, crt screen", "screw", "screwdriver", "seat belt, seatbelt", "sewing machine", "shield, buckler", "shoe shop, shoe-shop, shoe store", "shoji", "shopping basket", "shopping cart", "shovel", "shower cap", "shower curtain", "ski", "ski mask", "sleeping bag", "slide rule, slipstick", "sliding door", "slot, one-armed bandit", "snorkel", "snowmobile", "snowplow, snowplough", "soap dispenser", "soccer ball", "sock", "solar dish, solar collector, solar furnace", "sombrero", "soup bowl", "space bar", "space heater", "space shuttle", "spatula", "speedboat", "spider web, spider's web", "spindle", "sports car, sport car", "spotlight, spot", "stage", "steam locomotive", "steel arch bridge", "steel drum", "stethoscope", "stole", "stone wall", "stopwatch, stop watch", "stove", "strainer", "streetcar, tram, tramcar, trolley, trolley car", "stretcher", "studio couch, day bed", "stupa, tope", "submarine, pigboat, sub, u-boat", "suit, suit of clothes", "sundial", "sunglass", "sunglasses, dark glasses, shades", "sunscreen, sunblock, sun blocker", "suspension bridge", "swab, swob, mop", "sweatshirt", "swimming trunks, bathing trunks", "swing", "switch, electric switch, electrical switch", "syringe", "table lamp", "tank, army tank, armored combat vehicle, armoured combat vehicle", "tape player", "teapot", "teddy, teddy bear", "television, television system", "tennis ball", "thatch, thatched roof", "theater curtain, theatre curtain", "thimble", "thresher, thrasher, threshing machine", "throne", "tile roof", "toaster", "tobacco shop, tobacconist shop, tobacconist", "toilet seat", "torch", "totem pole", "tow truck, tow car, wrecker", "toyshop", "tractor", "trailer truck, tractor trailer, trucking rig, rig, articulated lorry, semi", "tray", "trench coat", "tricycle, trike, velocipede", "trimaran", "tripod", "triumphal arch", "trolleybus, trolley coach, trackless trolley", "trombone", "tub, vat", "turnstile", "typewriter keyboard", "umbrella", "unicycle, monocycle", "upright, upright piano", "vacuum, vacuum cleaner", "vase", "vault", "velvet", "vending machine", "vestment", "viaduct", "violin, fiddle", "volleyball", "waffle iron", "wall clock", "wallet, billfold, notecase, pocketbook", "wardrobe, closet, press", "warplane, military plane", "washbasin, handbasin, washbowl, lavabo, wash-hand basin", "washer, automatic washer, washing machine", "water bottle", "water jug", "water tower", "whiskey jug", "whistle", "wig", "window screen", "window shade", "windsor tie", "wine bottle", "wing", "wok", "wooden spoon", "wool, woolen, woollen", "worm fence, snake fence, snake-rail fence, virginia fence", "wreck", "yawl", "yurt", "web site, website, internet site, site", "comic book", "crossword puzzle, crossword", "street sign", "traffic light, traffic signal, stoplight", "book jacket, dust cover, dust jacket, dust wrapper", "menu", "plate", "guacamole", "consomme", "hot pot, hotpot", "trifle", "ice cream, icecream", "ice lolly, lolly, lollipop, popsicle", "french loaf", "bagel, beigel", "pretzel", "cheeseburger", "hotdog, hot dog, red hot", "mashed potato", "head cabbage", "broccoli", "cauliflower", "zucchini, courgette", "spaghetti squash", "acorn squash", "butternut squash", "cucumber, cuke", "artichoke, globe artichoke", "bell pepper", "cardoon", "mushroom", "granny smith", "strawberry", "orange", "lemon", "fig", "pineapple, ananas", "banana", "jackfruit, jak, jack", "custard apple", "pomegranate", "hay", "carbonara", "chocolate sauce, chocolate syrup", "dough", "meat loaf, meatloaf", "pizza, pizza pie", "potpie", "burrito", "red wine", "espresso", "cup", "eggnog", "alp", "bubble", "cliff, drop, drop-off", "coral reef", "geyser", "lakeside, lakeshore", "promontory, headland, head, foreland", "sandbar, sand bar", "seashore, coast, seacoast, sea-coast", "valley, vale", "volcano", "ballplayer, baseball player", "groom, bridegroom", "scuba diver", "rapeseed", "daisy", "yellow lady's slipper, yellow lady-slipper, cypripedium calceolus, cypripedium parviflorum", "corn", "acorn", "hip, rose hip, rosehip", "buckeye, horse chestnut, conker", "coral fungus", "agaric", "gyromitra", "stinkhorn, carrion fungus", "earthstar", "hen-of-the-woods, hen of the woods, polyporus frondosus, grifola frondosa", "bolete", "ear, spike, capitulum", "toilet tissue, toilet paper, bathroom tissue" ]
alhafizfadhil/results
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # results This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.2952 - Accuracy: 0.4875 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 80 | 1.6148 | 0.3375 | | 1.6678 | 2.0 | 160 | 1.3553 | 0.4625 | | 0.6788 | 3.0 | 240 | 1.2952 | 0.4875 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.1
[ "label_0", "label_1", "label_2", "label_3", "label_4", "label_5", "label_6", "label_7" ]
hfayuwardana/image_classification
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # image_classification This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.2333 - Accuracy: 0.55 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-06 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 15 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 40 | 1.3397 | 0.4938 | | No log | 2.0 | 80 | 1.3036 | 0.5312 | | No log | 3.0 | 120 | 1.3684 | 0.5125 | | No log | 4.0 | 160 | 1.3877 | 0.5 | | No log | 5.0 | 200 | 1.2441 | 0.5625 | | No log | 6.0 | 240 | 1.3767 | 0.5 | | No log | 7.0 | 280 | 1.2784 | 0.5437 | | No log | 8.0 | 320 | 1.3191 | 0.5188 | | No log | 9.0 | 360 | 1.3417 | 0.5062 | | No log | 10.0 | 400 | 1.3411 | 0.5125 | | No log | 11.0 | 440 | 1.3460 | 0.5062 | | No log | 12.0 | 480 | 1.4155 | 0.5 | | 0.483 | 13.0 | 520 | 1.2887 | 0.5375 | | 0.483 | 14.0 | 560 | 1.3648 | 0.5 | | 0.483 | 15.0 | 600 | 1.3337 | 0.5 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.1
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
platzi/platzi-vit-model-ivan-vargas
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # platzi-vit-model-ivan-vargas This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.0356 - Accuracy: 0.9925 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0002 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 4 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.1464 | 3.85 | 500 | 0.0356 | 0.9925 | ### Framework versions - Transformers 4.30.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.13.3
[ "angular_leaf_spot", "bean_rust", "healthy" ]
ansilmbabl/cards-blt-swin-tiny-patch4-window7-224-finetuned-v1
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # cards-blt-swin-tiny-patch4-window7-224-finetuned-v1 This model is a fine-tuned version of [microsoft/swin-tiny-patch4-window7-224](https://huggingface.co/microsoft/swin-tiny-patch4-window7-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.3476 - Accuracy: 0.4217 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 1.7046 | 1.0 | 56 | 1.4651 | 0.3422 | | 1.6543 | 1.99 | 112 | 1.4050 | 0.3917 | | 1.6565 | 2.99 | 168 | 1.3476 | 0.4217 | ### Framework versions - Transformers 4.37.2 - Pytorch 2.0.1+cu117 - Datasets 2.17.0 - Tokenizers 0.15.2
[ "grade_1", "grade_2", "grade_3", "grade_4", "grade_5", "grade_6", "grade_7", "grade_8", "grade_9" ]
ansilmbabl/cards-blt-swin-tiny-patch4-window7-224-finetuned-v2
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # cards-blt-swin-tiny-patch4-window7-224-finetuned-v2 This model is a fine-tuned version of [microsoft/swin-tiny-patch4-window7-224](https://huggingface.co/microsoft/swin-tiny-patch4-window7-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.2162 - Accuracy: 0.5022 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 100 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 1.4297 | 1.0 | 56 | 1.1976 | 0.4933 | | 1.4078 | 1.99 | 112 | 1.1964 | 0.5011 | | 1.417 | 2.99 | 168 | 1.2025 | 0.4961 | | 1.4163 | 4.0 | 225 | 1.2295 | 0.4883 | | 1.4318 | 5.0 | 281 | 1.2330 | 0.495 | | 1.4383 | 5.99 | 337 | 1.2162 | 0.5022 | | 1.4212 | 6.99 | 393 | 1.2634 | 0.4717 | | 1.4346 | 8.0 | 450 | 1.3083 | 0.4689 | | 1.419 | 9.0 | 506 | 1.2719 | 0.4806 | | 1.4252 | 9.99 | 562 | 1.3048 | 0.4911 | | 1.4522 | 10.99 | 618 | 1.2708 | 0.4794 | | 1.3748 | 12.0 | 675 | 1.3720 | 0.4383 | | 1.3966 | 13.0 | 731 | 1.3095 | 0.4594 | | 1.4507 | 13.99 | 787 | 1.2430 | 0.485 | | 1.4033 | 14.99 | 843 | 1.2728 | 0.4794 | | 1.3972 | 16.0 | 900 | 1.2611 | 0.4883 | | 1.4136 | 17.0 | 956 | 1.3166 | 0.45 | | 1.3992 | 17.99 | 1012 | 1.3103 | 0.4856 | | 1.3614 | 18.99 | 1068 | 1.3302 | 0.4422 | | 1.3747 | 20.0 | 1125 | 1.2919 | 0.4856 | | 1.3868 | 21.0 | 1181 | 1.3166 | 0.4728 | | 1.3399 | 21.99 | 1237 | 1.3200 | 0.4672 | | 1.3943 | 22.99 | 1293 | 1.2920 | 0.4811 | | 1.3635 | 24.0 | 1350 | 1.3109 | 0.4833 | | 1.3724 | 25.0 | 1406 | 1.3100 | 0.4644 | | 1.3141 | 25.99 | 1462 | 1.3263 | 0.4978 | | 1.3576 | 26.99 | 1518 | 1.3307 | 0.4772 | | 1.3022 | 28.0 | 1575 | 1.3409 | 0.4978 | | 1.2982 | 29.0 | 1631 | 1.3962 | 0.4583 | | 1.2657 | 29.99 | 1687 | 1.3329 | 0.4817 | | 1.3152 | 30.99 | 1743 | 1.2973 | 0.49 | | 1.2924 | 32.0 | 1800 | 1.3159 | 0.4833 | | 1.214 | 33.0 | 1856 | 1.3955 | 0.4833 | | 1.2717 | 33.99 | 1912 | 1.4583 | 0.46 | | 1.2692 | 34.99 | 1968 | 1.3504 | 0.4939 | | 1.2127 | 36.0 | 2025 | 1.3784 | 0.4833 | | 1.1956 | 37.0 | 2081 | 1.4184 | 0.4817 | | 1.2408 | 37.99 | 2137 | 1.3849 | 0.4944 | | 1.1699 | 38.99 | 2193 | 1.4298 | 0.4844 | | 1.1727 | 40.0 | 2250 | 1.4331 | 0.4772 | | 1.1485 | 41.0 | 2306 | 1.4597 | 0.4672 | | 1.1668 | 41.99 | 2362 | 1.4429 | 0.4783 | | 1.1881 | 42.99 | 2418 | 1.4555 | 0.4839 | | 1.1204 | 44.0 | 2475 | 1.4648 | 0.4783 | | 1.1523 | 45.0 | 2531 | 1.4744 | 0.4733 | | 1.1206 | 45.99 | 2587 | 1.4792 | 0.4906 | | 1.1135 | 46.99 | 2643 | 1.5009 | 0.4678 | | 1.1227 | 48.0 | 2700 | 1.5480 | 0.4733 | | 1.1017 | 49.0 | 2756 | 1.5907 | 0.4644 | | 1.1601 | 49.99 | 2812 | 1.5136 | 0.47 | | 1.1239 | 50.99 | 2868 | 1.5384 | 0.4789 | | 1.09 | 52.0 | 2925 | 1.5716 | 0.4711 | | 1.1023 | 53.0 | 2981 | 1.5736 | 0.4728 | | 1.1038 | 53.99 | 3037 | 1.5919 | 0.4556 | | 1.058 | 54.99 | 3093 | 1.5534 | 0.4772 | | 1.0405 | 56.0 | 3150 | 1.5788 | 0.4717 | | 1.0172 | 57.0 | 3206 | 1.5855 | 0.4767 | | 1.0036 | 57.99 | 3262 | 1.6425 | 0.455 | | 1.0124 | 58.99 | 3318 | 1.6039 | 0.4678 | | 1.0647 | 60.0 | 3375 | 1.5891 | 0.4572 | | 1.0143 | 61.0 | 3431 | 1.6265 | 0.4483 | | 1.0051 | 61.99 | 3487 | 1.6208 | 0.4633 | | 0.9571 | 62.99 | 3543 | 1.6874 | 0.4483 | | 0.9838 | 64.0 | 3600 | 1.6778 | 0.4517 | | 0.9995 | 65.0 | 3656 | 1.6248 | 0.4722 | | 1.0374 | 65.99 | 3712 | 1.6645 | 0.4667 | | 0.9483 | 66.99 | 3768 | 1.6307 | 0.4611 | | 0.9825 | 68.0 | 3825 | 1.6662 | 0.4661 | | 1.0023 | 69.0 | 3881 | 1.6650 | 0.46 | | 0.9642 | 69.99 | 3937 | 1.6953 | 0.4494 | | 0.9687 | 70.99 | 3993 | 1.7076 | 0.4661 | | 0.9542 | 72.0 | 4050 | 1.7012 | 0.4656 | | 0.9378 | 73.0 | 4106 | 1.7056 | 0.4533 | | 0.9542 | 73.99 | 4162 | 1.7331 | 0.4572 | | 0.9035 | 74.99 | 4218 | 1.7459 | 0.4417 | | 0.9631 | 76.0 | 4275 | 1.7236 | 0.465 | | 0.8759 | 77.0 | 4331 | 1.7294 | 0.455 | | 0.9218 | 77.99 | 4387 | 1.7654 | 0.4578 | | 0.9077 | 78.99 | 4443 | 1.7234 | 0.4594 | | 0.8924 | 80.0 | 4500 | 1.7256 | 0.4683 | | 0.9156 | 81.0 | 4556 | 1.7320 | 0.4678 | | 0.806 | 81.99 | 4612 | 1.7348 | 0.4661 | | 0.8863 | 82.99 | 4668 | 1.7514 | 0.4606 | | 0.8698 | 84.0 | 4725 | 1.7484 | 0.4661 | | 0.8623 | 85.0 | 4781 | 1.7420 | 0.4778 | | 0.8643 | 85.99 | 4837 | 1.7636 | 0.4617 | | 0.8914 | 86.99 | 4893 | 1.7552 | 0.465 | | 0.837 | 88.0 | 4950 | 1.7552 | 0.4644 | | 0.8217 | 89.0 | 5006 | 1.7532 | 0.4639 | | 0.8601 | 89.99 | 5062 | 1.7447 | 0.4683 | | 0.8293 | 90.99 | 5118 | 1.7622 | 0.4611 | | 0.8301 | 92.0 | 5175 | 1.7616 | 0.4633 | | 0.7752 | 93.0 | 5231 | 1.7585 | 0.4722 | | 0.8533 | 93.99 | 5287 | 1.7842 | 0.4617 | | 0.8156 | 94.99 | 5343 | 1.7837 | 0.4622 | | 0.8094 | 96.0 | 5400 | 1.7896 | 0.4583 | | 0.839 | 97.0 | 5456 | 1.7835 | 0.465 | | 0.839 | 97.99 | 5512 | 1.7883 | 0.46 | | 0.7763 | 98.99 | 5568 | 1.7838 | 0.4594 | | 0.8186 | 99.56 | 5600 | 1.7837 | 0.4606 | ### Framework versions - Transformers 4.37.2 - Pytorch 2.0.1+cu117 - Datasets 2.17.0 - Tokenizers 0.15.2
[ "grade_1", "grade_2", "grade_3", "grade_4", "grade_5", "grade_6", "grade_7", "grade_8", "grade_9" ]
felitrisnanto/ViT-emotion-classification
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # ViT-emotion-classification This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.2807 - Accuracy: 0.525 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 20 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 40 | 1.8038 | 0.3875 | | No log | 2.0 | 80 | 1.5444 | 0.4125 | | No log | 3.0 | 120 | 1.4651 | 0.4188 | | No log | 4.0 | 160 | 1.3985 | 0.4562 | | No log | 5.0 | 200 | 1.2891 | 0.525 | | No log | 6.0 | 240 | 1.2928 | 0.5 | | No log | 7.0 | 280 | 1.3412 | 0.5 | | No log | 8.0 | 320 | 1.3548 | 0.475 | | No log | 9.0 | 360 | 1.2867 | 0.5312 | | No log | 10.0 | 400 | 1.3636 | 0.475 | | No log | 11.0 | 440 | 1.3431 | 0.5188 | | No log | 12.0 | 480 | 1.2872 | 0.5312 | | 1.0092 | 13.0 | 520 | 1.3491 | 0.525 | | 1.0092 | 14.0 | 560 | 1.2864 | 0.5437 | | 1.0092 | 15.0 | 600 | 1.3278 | 0.5312 | | 1.0092 | 16.0 | 640 | 1.3772 | 0.5062 | | 1.0092 | 17.0 | 680 | 1.4458 | 0.5 | | 1.0092 | 18.0 | 720 | 1.3208 | 0.525 | | 1.0092 | 19.0 | 760 | 1.4037 | 0.5 | | 1.0092 | 20.0 | 800 | 1.2810 | 0.5375 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.1
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
joshjrreynaldo/image_classification
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # image_classification This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.2685 - Accuracy: 0.5563 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 10 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 40 | 1.2944 | 0.5312 | | No log | 2.0 | 80 | 1.2047 | 0.5625 | | No log | 3.0 | 120 | 1.2956 | 0.5125 | | No log | 4.0 | 160 | 1.2328 | 0.5312 | | No log | 5.0 | 200 | 1.1533 | 0.575 | | No log | 6.0 | 240 | 1.2436 | 0.5375 | | No log | 7.0 | 280 | 1.2940 | 0.5437 | | No log | 8.0 | 320 | 1.2115 | 0.5875 | | No log | 9.0 | 360 | 1.2147 | 0.5625 | | No log | 10.0 | 400 | 1.1741 | 0.5625 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.1
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
aitomation/mobilenet_v2_1.4_224-northern-pikes
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # mobilenet_v2_1.4_224-northern-pikes This model is a fine-tuned version of [google/mobilenet_v2_1.4_224](https://huggingface.co/google/mobilenet_v2_1.4_224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.0281 - Accuracy: 0.9937 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.3107 | 0.98 | 22 | 0.1320 | 0.9708 | | 0.0701 | 2.0 | 45 | 0.0362 | 0.9854 | | 0.0451 | 2.93 | 66 | 0.0281 | 0.9937 | ### Framework versions - Transformers 4.30.2 - Pytorch 1.13.1+cu117 - Datasets 2.13.1 - Tokenizers 0.13.3
[ "np", "ot" ]
ivandrian11/vit-emotions
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-emotions This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.2520 - Accuracy: 0.5625 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 4 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.4894 | 2.5 | 100 | 1.2520 | 0.5625 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.1
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
judith0/clasificacion_Ine_Pass
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # swin-tiny-patch4-window7-224-finetuned-eurosat This model is a fine-tuned version of [microsoft/swin-tiny-patch4-window7-224](https://huggingface.co/microsoft/swin-tiny-patch4-window7-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.1666 - Accuracy: 0.9765 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 1.2376 | 1.0 | 12 | 0.5182 | 0.7471 | | 0.4743 | 2.0 | 24 | 0.1666 | 0.9765 | | 0.2261 | 3.0 | 36 | 0.1135 | 0.9765 | ### Framework versions - Transformers 4.38.2 - Pytorch 2.2.1+cu121 - Datasets 2.18.0 - Tokenizers 0.15.2
[ "other", "ine anverso", "ine reverso", "other", "pasaporte" ]
Marxulia/emotion_classification
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # emotion_classification This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.3694 - Accuracy: 0.55 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 4e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 30 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 40 | 1.9385 | 0.35 | | No log | 2.0 | 80 | 1.6433 | 0.3875 | | No log | 3.0 | 120 | 1.4689 | 0.5375 | | No log | 4.0 | 160 | 1.3533 | 0.55 | | No log | 5.0 | 200 | 1.3162 | 0.5813 | | No log | 6.0 | 240 | 1.3131 | 0.5437 | | No log | 7.0 | 280 | 1.2160 | 0.6 | | No log | 8.0 | 320 | 1.2660 | 0.5437 | | No log | 9.0 | 360 | 1.2594 | 0.55 | | No log | 10.0 | 400 | 1.1873 | 0.5687 | | No log | 11.0 | 440 | 1.1169 | 0.5875 | | No log | 12.0 | 480 | 1.2015 | 0.5687 | | 1.125 | 13.0 | 520 | 1.2653 | 0.5375 | | 1.125 | 14.0 | 560 | 1.2801 | 0.5563 | | 1.125 | 15.0 | 600 | 1.2304 | 0.5563 | | 1.125 | 16.0 | 640 | 1.2341 | 0.5437 | | 1.125 | 17.0 | 680 | 1.2981 | 0.5312 | | 1.125 | 18.0 | 720 | 1.3277 | 0.5687 | | 1.125 | 19.0 | 760 | 1.2174 | 0.5875 | | 1.125 | 20.0 | 800 | 1.1810 | 0.6 | | 1.125 | 21.0 | 840 | 1.2280 | 0.5687 | | 1.125 | 22.0 | 880 | 1.3576 | 0.525 | | 1.125 | 23.0 | 920 | 1.3897 | 0.5375 | | 1.125 | 24.0 | 960 | 1.3216 | 0.5625 | | 0.3612 | 25.0 | 1000 | 1.3033 | 0.6062 | | 0.3612 | 26.0 | 1040 | 1.3501 | 0.5625 | | 0.3612 | 27.0 | 1080 | 1.2310 | 0.575 | | 0.3612 | 28.0 | 1120 | 1.2495 | 0.6062 | | 0.3612 | 29.0 | 1160 | 1.2974 | 0.5875 | | 0.3612 | 30.0 | 1200 | 1.2985 | 0.5813 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.1
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
ShimaGh/Brain-Tumor-Detection
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # Brain-Tumor-Detection This model is a fine-tuned version of [microsoft/swin-base-patch4-window7-224](https://huggingface.co/microsoft/swin-base-patch4-window7-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.0946 - Accuracy: 0.9804 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 64 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 5 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 0.92 | 3 | 0.4395 | 0.6667 | | No log | 1.85 | 6 | 0.2817 | 0.9020 | | No log | 2.77 | 9 | 0.1354 | 0.9608 | | 0.3994 | 4.0 | 13 | 0.0956 | 0.9804 | | 0.3994 | 4.62 | 15 | 0.0946 | 0.9804 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.1
[ "0", "1" ]
friedrice231/MemeDetector_SG
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # swin-tiny-patch4-window7-224-finetuned-eurosat This model is a fine-tuned version of [microsoft/swin-tiny-patch4-window7-224](https://huggingface.co/microsoft/swin-tiny-patch4-window7-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.1349 - Accuracy: 0.9476 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 64 - eval_batch_size: 64 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 256 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.2131 | 1.0 | 51 | 0.1843 | 0.9320 | | 0.1521 | 2.0 | 102 | 0.1215 | 0.9552 | | 0.1318 | 3.0 | 153 | 0.1349 | 0.9476 | ### Framework versions - Transformers 4.37.2 - Pytorch 1.13.1 - Datasets 2.16.1 - Tokenizers 0.15.1
[ "meme", "not_meme" ]
IsaacMwesigwa/footballer-retrain-1
# Model Trained Using AutoTrain - Problem type: Image Classification ## Validation Metricsg loss: nan f1_macro: 2.895499120347367e-06 f1_micro: 0.0012045290291496024 f1_weighted: 2.8982892905428352e-06 precision_macro: 1.4494934165458512e-06 precision_micro: 0.0012045290291496024 precision_weighted: 1.4508901820640839e-06 recall_macro: 0.0012033694344163659 recall_micro: 0.0012045290291496024 recall_weighted: 0.0012045290291496024 accuracy: 0.0012045290291496024
[ "aaron long", "aaron mooy", "aaron ramsdale", "aaron ramsey", "abde ezzalzouli", "abdelhamid sabiri", "abdelkarim hassan", "abderrazak hamdallah", "abdou diallo", "abdul fatawu issahaku", "abdul manaf nurudeen", "abdulaziz hatem", "abdulelah al-amri", "abdulellah al-malki", "abdullah madu", "abdullah otayf", "abdulrahman al-aboud", "abolfazl jalali", "achraf dari", "achraf hakimi", "adam davies", "adrien rabiot", "agustín canobbio", "ahmad nourollahi", "ahmed alaaeldin", "ahmed reda tagnaouti", "ajdin hrustic", "akram afif", "alan franco", "alejandro balde", "aleksandar mitrović", "alex sandro", "alex telles", "alexander bah", "alexander djiku", "alexander domínguez", "alexis mac allister", "alexis vega", "alfred gomis", "alfredo talavera", "ali abdi", "ali al-bulaihi", "ali al-hassan", "ali assadalla", "ali gholizadeh", "ali karimi", "ali maâloul", "alidu seidu", "alireza beiranvand", "alireza jahanbakhsh", "alisson", "alistair johnston", "almoez ali", "alphonse areola", "alphonso davies", "amadou onana", "amir abedzadeh", "anass zaroury", "andreas christensen", "andreas cornelius", "andreas skov olsen", "andrej kramarić", "andrew redmayne", "andries noppert", "andrija živković", "andré ayew ", "andré onana", "andré silva", "andré-frank zambo anguissa", "andrés guardado ", "anis ben slimane", "ansu fati", "ante budimir", "anthony contreras", "anthony hernández", "antoine griezmann", "antoine semenyo", "antonee robinson", "antonio rüdiger", "antony", "antónio silva", "ao tanaka", "ardon jashari", "arkadiusz milik", "armel bella-kotchap", "arthur theate", "artur jędrzejczyk", "assim madibo", "atiba hutchinson ", "aurélien tchouaméni", "awer mabil", "axel disasi", "axel witsel", "ayase ueda", "aymen dahmen", "aymen mathlouthi", "aymeric laporte", "ayrton preciado", "aziz behich", "azzedine ounahi", "aïssa laïdouni", "baba rahman", "badr benoun", "bailey wright", "bamba dieng", "bartosz bereszyński", "bassam al-rawi", "bechir ben saïd", "ben cabango", "ben davies", "ben white", "benjamin pavard", "bernardo silva", "bilal el khannous", "bilel ifa", "borna barišić", "borna sosa", "boualem khoukhi", "boulaye dia", "brandon aguilera", "breel embolo", "bremer", "brenden aaronson", "brennan johnson", "bruno fernandes", "bruno guimarães", "bruno petković", "bryan mbeumo", "bryan oviedo", "bryan ruiz ", "bukayo saka", "callum wilson", "cameron carter-vickers", "cameron devlin", "carlos gruezo", "carlos martínez", "carlos rodríguez", "carlos soler", "casemiro", "celso borges", "charles de ketelaere", "cheikhou kouyaté", "cho gue-sung", "cho yu-min", "chris gunter", "chris mepham", "christian bassogog", "christian eriksen", "christian fassnacht", "christian günter", "christian nørgaard", "christian pulisic", "christopher wooh", "cody gakpo", "collins fai", "connor roberts", "conor coady", "conor gallagher", "craig goodwin", "cristian roldan", "cristian romero", "cristiano ronaldo ", "cyle larin", "césar azpilicueta", "césar montes", "daichi kamada", "daizen maeda", "daley blind", "damian szymański", "dani alves", "dani carvajal", "dani olmo", "daniel afriyie", "daniel amartey", "daniel chacón", "daniel james", "daniel schmidt", "daniel wass", "daniel-kofi kyereh", "danilo", "danilo pereira", "danny vukovic", "danny ward", "darko lazović", "darwin núñez", "david raum", "david raya", "david wotherspoon", "davy klaassen", "dayne st. clair", "dayot upamecano", "deandre yedlin", "declan rice", "dejan lovren", "denis odoi", "denis zakaria", "denzel dumfries", "derek cornelius", "devis epassy", "diego godín ", "diego palacios", "diogo costa", "diogo dalot", "djibril sow", "djorkaeff reasco", "domagoj vida", "dominik livaković", "douglas lópez", "dries mertens", "dušan tadić ", "dušan vlahović", "dylan bronn", "dylan levitt", "eden hazard ", "ederson", "edimilson fernandes", "edinson cavani", "edson álvarez", "eduardo camavinga", "ehsan hajsafi ", "eiji kawashima", "elisha owusu", "ellyes skhiri", "emiliano martínez", "enner valencia ", "enzo ebosse", "enzo fernández", "eray cömert", "eric dier", "eric garcía", "eric maxim choupo-moting", "esteban alvarado", "ethan ampadu", "ethan horvath", "exequiel palacios", "fabian frei", "fabian rieder", "fabian schär", "fabinho", "facundo pellistri", "facundo torres", "famara diédhiou", "federico valverde", "ferjani sassi", "fernando muslera", "ferran torres", "filip kostić", "filip mladenović", "filip đuričić", "firas al-buraikan", "fodé ballo-touré", "formose mendy", "fran karačić", "francisco calvo", "franco armani", "fred", "frederik rønnow", "frenkie de jong", "félix torres", "gabriel jesus", "gabriel martinelli", "gaku shibasaki", "garang kuol", "gareth bale ", "gavi", "gaël ondoua", "georges-kévin nkoudou", "gerardo arteaga", "germán pezzella", "gerson torres", "gerónimo rulli", "ghailene chaalali", "gideon mensah", "giorgian de arrascaeta", "giovanni reyna", "gonzalo montiel", "gonzalo plata", "gonçalo ramos", "granit xhaka ", "gregor kobel", "grzegorz krychowiak", "guido rodríguez", "guillermo ochoa", "guillermo varela", "haitham asiri", "haji wright", "hakim ziyech", "hannibal mejbri", "hans vanaken", "haris seferovic", "harry kane ", "harry maguire", "harry souttar", "harry wilson", "hassan al-haydos ", "hassan al-tambakti", "hattan bahebri", "henry martín", "hernán galíndez", "hidemasa morita", "hiroki ito", "hiroki sakai", "hirving lozano", "homam ahmed", "hong chul", "hossein hosseini", "hossein kanaanizadegan", "hugo guillamón", "hugo lloris ", "hwang hee-chan", "hwang in-beom", "hwang ui-jo", "héctor herrera", "héctor moreno", "ibrahim danlad", "ibrahima konaté", "idrissa gueye", "iké ugbo", "ilias chair", "iliman ndiaye", "ismaeel mohammad", "ismail jakobs", "ismaël koné", "ismaïla sarr", "issam jebali", "ivan ilić", "ivan perišić", "ivica ivušić", "ivo grbić", "iñaki williams", "jack grealish", "jackson irvine", "jackson porozo", "jakub kamiński", "jakub kiwior", "jamal musiala", "james maddison", "james pantemis", "jamie maclaren", "jan bednarek", "jan vertonghen", "jason cummings", "jassem gaber", "jawad el yamiq", "jean-charles castelletto", "jean-pierre nsame", "jens stryger larsen", "jeong woo-yeong", "jeremie frimpong", "jeremy sarmiento", "jerome ngom mbekeli", "jesper lindstrøm", "jesús ferreira", "jesús gallardo", "jewison bennette", "jo hyeon-woo", "joachim andersen", "joakim mæhle", "joe allen", "joe morrell", "joe rodon", "joe scally", "joel campbell", "joel king", "joel waterman", "johan venegas", "johan vásquez", "john stones", "jonas hofmann", "jonas omlin", "jonas wind", "jonathan david", "jonathan osorio", "jonny williams", "jordan ayew", "jordan henderson", "jordan morris", "jordan pickford", "jordan veretout", "jordi alba", "jorge sánchez", "joseph aidoo", "josh sargent", "joshua kimmich", "josip juranović", "josip stanišić", "josip šutalo", "josé cifuentes", "josé giménez", "josé luis rodríguez", "josé sá", "joão cancelo", "joão félix", "joão mário", "joão palhinha", "joško gvardiol", "juan foyth", "juan pablo vargas", "jude bellingham", "jules koundé", "julian brandt", "julián álvarez", "jung woo-young", "junior hoilett", "junya ito", "jurriën timber", "justin bijlow", "jérémy doku", "kai havertz", "kalidou koulibaly ", "kalvin phillips", "kamal miller", "kamal sowah", "kamaldeen sulemana", "kamil glik", "kamil grabara", "kamil grosicki", "kaoru mitoma", "karim adeyemi", "karim ansarifard", "karim benzema", "karim boudiaf", "karl toko ekambi", "karol świderski", "kasper dolberg", "kasper schmeichel", "keanu baccus", "kellyn acosta", "kendall waston", "kenneth taylor", "kevin de bruyne", "kevin rodríguez", "kevin trapp", "kevin álvarez", "keylor navas", "keysher fuller", "khalid muneer", "kieffer moore", "kieran trippier", "kim jin-su", "kim min-jae", "kim moon-hwan", "kim seung-gyu", "kim tae-hwan", "kim young-gwon", "kingsley coman", "ko itakura", "koen casteels", "koke", "kristijan jakić", "krystian bielik", "krzysztof piątek", "krépin diatta", "kwon chang-hoon", "kwon kyung-won", "kye rowles", "kyle walker", "kylian mbappé", "lautaro martínez", "lawrence ati-zigi", "leander dendoncker", "leandro paredes", "leandro trossard", "lee jae-sung", "lee kang-in", "leon goretzka", "leroy sané", "liam fraser", "liam millar", "lionel messi ", "lisandro martínez", "lovro majer", "loïs openda", "luca de la torre", "lucas cavallini", "lucas hernandez", "lucas paquetá", "lucas torreira", "luis chávez", "luis romo", "luis suárez", "luka jović", "luka modrić ", "luka sučić", "lukas klostermann", "luke shaw", "luuk de jong", "majid hosseini", "mamadou loum", "manuel akanji", "manuel neuer ", "manuel ugarte", "marc-andré ter stegen", "marcelo brozović", "marco asensio", "marcos acuña", "marcos llorente", "marcus rashford", "marcus thuram", "mario götze", "mario pašalić", "mark harris", "mark-anthony kaye", "marko dmitrović", "marko grujić", "marko livaja", "marquinhos", "marten de roon", "martin boyle", "martin braithwaite", "martin erlić", "martin hongla", "martín cáceres", "mason mount", "mateo kovačić", "mateusz wieteska", "matheus nunes", "mathew leckie", "mathew ryan ", "mathias jensen", "mathías olivera", "matt turner", "matteo guendouzi", "matthew smith", "matthias ginter", "matthijs de ligt", "matty cash", "matías vecino", "matías viña", "maxi gómez", "maya yoshida ", "mehdi taremi", "mehdi torabi", "memphis depay", "meshaal barsham", "michael estrada", "michał skóraś", "michel aebischer", "michy batshuayi", "miki yamane", "mikkel damsgaard", "milad mohammadi", "milan borjan", "miloš degenek", "miloš veljković", "mislav oršić", "mitchell duke", "mohamed ali ben romdhane", "mohamed dräger", "mohamed kanno", "mohammed al-breik", "mohammed al-owais", "mohammed al-rubaie", "mohammed kudus", "mohammed muntari", "mohammed salisu", "mohammed waad", "moisés caicedo", "moisés ramírez", "montassar talbi", "morteza pouraliganji", "mostafa meshaal", "mouez hassen", "moumi ngamaleu", "moustapha name", "munir mohamedi", "musab kheder", "na sang-ho", "nader ghandri", "nahuel molina", "naif al-hadhrami", "nampalys mendy", "nasser al-dawsari", "nathan aké", "nathaniel atkinson", "nawaf al-abed", "nawaf al-aqidi", "nayef aguerd", "naïm sliti", "neco williams", "nemanja gudelj", "nemanja maksimović", "nemanja radonjić", "neymar", "nick pope", "niclas füllkrug", "nico elvedi", "nico schlotterbeck", "nico williams", "nicola zalewski", "nicolas jackson", "nicolas nkoulou", "nicolás otamendi", "nicolás tagliafico", "nicolás de la cruz", "niklas süle", "nikola milenković", "nikola vlašić", "noa lang", "noah okafor", "nouhou tolo", "noussair mazraoui", "nuno mendes", "néstor araujo", "oliver christensen", "olivier giroud", "olivier mbaizo", "olivier ntcham", "orbelín pineda", "osman bukari", "otávio", "ousmane dembélé", "pablo sarabia", "paik seung-ho", "pape abou cissé", "pape gueye", "pape matar sarr", "papu gómez", "pathé ciss", "patrick sequeira", "pau torres", "paulo dybala", "payam niazmand", "pedri", "pedro", "pepe", "pervis estupiñán", "phil foden", "philipp köhn", "piero hincapié", "pierre kunde", "pierre-emile højbjerg", "piotr zieliński", "predrag rajković", "przemysław frankowski", "rafael leão", "raheem sterling", "ramin rezaeian", "randal kolo muani", "raphaël guerreiro", "raphaël varane", "raphinha", "rasmus kristensen", "raúl jiménez", "remko pasveer", "remo freuler", "renato steffen", "ricardo horta", "ricardo rodriguez", "richarlison", "richie laryea", "riley mcgree", "ritsu dōan", "riyadh sharahili", "roan wilson", "robert arboleda", "robert gumny", "robert lewandowski ", "robert skov", "robert sánchez", "roberto alvarado", "rodolfo cota", "rodri", "rodrigo bentancur", "rodrigo de paul", "rodrygo", "rogelio funes mori", "romain saïss ", "romario ibarra", "romelu lukaku", "ronald araújo", "rouzbeh cheshmi", "ruben vargas", "rubin colwill", "rui patrício", "ró-ró", "rónald matarrita", "rúben dias", "rúben neves", "saad al-sheeb", "sadegh moharrami", "sadio mané", "saeid ezatolahi", "saleh al-shehri", "salem al-dawsari", "salem al-hajri", "salis abdul samed", "salman al-faraj ", "sam adekugbe", "saman ghoddos", "sami al-najei", "samuel gouet", "samuel piette", "sardar azmoun", "saud abdulhamid", "saša lukić", "sean johnson", "sebas méndez", "sebastian szymański", "sebastián coates", "sebastián sosa", "seifeddine jaziri", "selim amallah", "seny dieng", "serge gnabry", "sergej milinković-savić", "sergio busquets ", "sergio rochet", "sergiño dest", "shaq moore", "shogo taniguchi", "shojae khalilzadeh", "shuto machino", "shūichi gonda", "silvan widmer", "simon kjær ", "simon mignolet", "simon ngapandouetnbu", "sofiane boufal", "sofyan amrabat", "son heung-min ", "son jun-ho", "song bum-keun", "song min-kyu", "sorba thomas", "souaibou marou", "srđan babić", "stefan mitrović", "stefan de vrij", "stephen eustáquio", "steve mandanda", "steven berghuis", "steven bergwijn", "steven vitória", "strahinja eraković", "strahinja pavlović", "sultan al-ghannam", "szymon żurkowski", "taha yassine khenissi", "tajon buchanan", "takefusa kubo", "takehiro tomiyasu", "takuma asano", "takumi minamino", "tarek salman", "tariq lamptey", "teun koopmeiners", "theo hernandez", "thiago almada", "thiago silva ", "thibaut courtois", "thilo kehrer", "thomas delaney", "thomas deng", "thomas meunier", "thomas müller", "thomas partey", "thorgan hazard", "tim ream", "timothy castagne", "timothy weah", "toby alderweireld", "tom lockyer", "trent alexander-arnold", "tyler adams", "tyrell malacia", "unai simón", "uriel antuna", "uroš račić", "vahid amiri", "vanja milinković-savić", "victor nelsson", "vincent aboubakar ", "vincent janssen", "vinícius júnior", "virgil van dijk ", "vitinha", "wahbi khazri", "wajdi kechrida", "walid cheddira", "walker zimmerman", "wataru endo", "wayne hennessey", "weston mckennie", "weverton", "william carvalho", "william pacho", "william saliba", "wojciech szczęsny", "wout faes", "wout weghorst", "xavi simons", "xavier arreaga", "xherdan shaqiri", "yahia attiyat allah", "yahya jabrane", "yann sommer", "yannick carrasco", "yasser al-shahrani", "yassine bounou", "yassine meriah", "yeltsin tejeda", "yeremy pino", "yoon jong-gyu", "youri tielemans", "yousef hassan", "youssef en-nesyri", "youssef msakni ", "youssouf fofana", "youssouf sabaly", "youssoufa moukoko", "youstin salas", "yuki soma", "yunus musah", "yussuf poulsen", "yuto nagatomo", "zakaria aboukhlal", "zeno debast", "álvaro morata", "álvaro zamora", "ángel correa", "ángel di maría", "ángel mena", "ángelo preciado", "éder militão", "édouard mendy", "érick gutiérrez", "éverton ribeiro", "óscar duarte", "i̇lkay gündoğan", "łukasz skorupski" ]
Danung/image_classification
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # image_classification This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.2141 - Accuracy: 0.5938 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0001 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 7 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 40 | 1.2602 | 0.5312 | | No log | 2.0 | 80 | 1.2212 | 0.55 | | No log | 3.0 | 120 | 1.2422 | 0.5375 | | No log | 4.0 | 160 | 1.1822 | 0.6 | | No log | 5.0 | 200 | 1.2218 | 0.55 | | No log | 6.0 | 240 | 1.1602 | 0.6125 | | No log | 7.0 | 280 | 1.2598 | 0.5687 | ### Framework versions - Transformers 4.37.2 - Pytorch 2.2.0+cu118 - Datasets 2.17.0 - Tokenizers 0.15.2
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
jeemsterri/emotion_classification
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # emotion_classification This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.1136 - Accuracy: 0.65 - F1: 0.6231 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0001 - train_batch_size: 16 - eval_batch_size: 16 - seed: 45 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: cosine_with_restarts - num_epochs: 30 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | F1 | |:-------------:|:-----:|:----:|:---------------:|:--------:|:------:| | 1.9172 | 1.0 | 43 | 1.5751 | 0.4333 | 0.3263 | | 1.4505 | 2.0 | 86 | 1.3041 | 0.5333 | 0.4651 | | 1.1121 | 3.0 | 129 | 1.2902 | 0.4833 | 0.4684 | | 0.8491 | 4.0 | 172 | 1.2309 | 0.5167 | 0.4916 | | 0.6168 | 5.0 | 215 | 1.2573 | 0.5583 | 0.5310 | | 0.3953 | 6.0 | 258 | 1.1502 | 0.575 | 0.5401 | | 0.3048 | 7.0 | 301 | 1.1136 | 0.65 | 0.6231 | | 0.1875 | 8.0 | 344 | 1.4224 | 0.5667 | 0.5598 | | 0.1277 | 9.0 | 387 | 1.3467 | 0.6167 | 0.6011 | | 0.1123 | 10.0 | 430 | 1.5838 | 0.5833 | 0.5657 | | 0.1123 | 11.0 | 473 | 1.5063 | 0.5833 | 0.5550 | | 0.0694 | 12.0 | 516 | 1.7733 | 0.55 | 0.5320 | | 0.0499 | 13.0 | 559 | 1.6329 | 0.5833 | 0.5536 | | 0.0367 | 14.0 | 602 | 1.6878 | 0.5833 | 0.5685 | | 0.0291 | 15.0 | 645 | 1.6855 | 0.575 | 0.5392 | | 0.0284 | 16.0 | 688 | 1.7869 | 0.6083 | 0.5880 | | 0.0316 | 17.0 | 731 | 1.5831 | 0.5917 | 0.5670 | | 0.0273 | 18.0 | 774 | 1.5933 | 0.625 | 0.5984 | | 0.0234 | 19.0 | 817 | 1.7830 | 0.5833 | 0.5652 | | 0.0194 | 20.0 | 860 | 1.6804 | 0.6083 | 0.5878 | | 0.0214 | 21.0 | 903 | 1.5962 | 0.6 | 0.5701 | | 0.0204 | 22.0 | 946 | 1.5684 | 0.625 | 0.5992 | | 0.0178 | 23.0 | 989 | 1.5924 | 0.625 | 0.5992 | | 0.0173 | 24.0 | 1032 | 1.6228 | 0.6167 | 0.5933 | | 0.016 | 25.0 | 1075 | 1.6177 | 0.6333 | 0.6073 | | 0.016 | 26.0 | 1118 | 1.6268 | 0.625 | 0.6009 | | 0.016 | 27.0 | 1161 | 1.6387 | 0.625 | 0.6009 | | 0.0159 | 28.0 | 1204 | 1.6403 | 0.625 | 0.6009 | | 0.0162 | 29.0 | 1247 | 1.6409 | 0.625 | 0.6009 | | 0.018 | 30.0 | 1290 | 1.6412 | 0.625 | 0.6009 | ### Framework versions - Transformers 4.37.2 - Pytorch 2.1.2 - Datasets 2.16.1 - Tokenizers 0.15.1
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
nabilayumnan/emotion_classification
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # emotion_classification This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.2936 - Accuracy: 0.5 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 10 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 40 | 1.5449 | 0.4562 | | No log | 2.0 | 80 | 1.5041 | 0.4188 | | No log | 3.0 | 120 | 1.3526 | 0.5375 | | No log | 4.0 | 160 | 1.3390 | 0.5125 | | No log | 5.0 | 200 | 1.2977 | 0.4875 | | No log | 6.0 | 240 | 1.2655 | 0.525 | | No log | 7.0 | 280 | 1.2572 | 0.5437 | | No log | 8.0 | 320 | 1.2862 | 0.4875 | | No log | 9.0 | 360 | 1.2907 | 0.5375 | | No log | 10.0 | 400 | 1.2621 | 0.5125 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.1
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
IsaacMwesigwa/footballer-retrain-3
# Model Trained Using AutoTrain - Problem type: Image Classification ## Validation Metricsg loss: nan f1_macro: 2.895499120347367e-06 f1_micro: 0.0012045290291496024 f1_weighted: 2.8982892905428352e-06 precision_macro: 1.4494934165458512e-06 precision_micro: 0.0012045290291496024 precision_weighted: 1.4508901820640839e-06 recall_macro: 0.0012033694344163659 recall_micro: 0.0012045290291496024 recall_weighted: 0.0012045290291496024 accuracy: 0.0012045290291496024
[ "aaron long", "aaron mooy", "aaron ramsdale", "aaron ramsey", "abde ezzalzouli", "abdelhamid sabiri", "abdelkarim hassan", "abderrazak hamdallah", "abdou diallo", "abdul fatawu issahaku", "abdul manaf nurudeen", "abdulaziz hatem", "abdulelah al-amri", "abdulellah al-malki", "abdullah madu", "abdullah otayf", "abdulrahman al-aboud", "abolfazl jalali", "achraf dari", "achraf hakimi", "adam davies", "adrien rabiot", "agustín canobbio", "ahmad nourollahi", "ahmed alaaeldin", "ahmed reda tagnaouti", "ajdin hrustic", "akram afif", "alan franco", "alejandro balde", "aleksandar mitrović", "alex sandro", "alex telles", "alexander bah", "alexander djiku", "alexander domínguez", "alexis mac allister", "alexis vega", "alfred gomis", "alfredo talavera", "ali abdi", "ali al-bulaihi", "ali al-hassan", "ali assadalla", "ali gholizadeh", "ali karimi", "ali maâloul", "alidu seidu", "alireza beiranvand", "alireza jahanbakhsh", "alisson", "alistair johnston", "almoez ali", "alphonse areola", "alphonso davies", "amadou onana", "amir abedzadeh", "anass zaroury", "andreas christensen", "andreas cornelius", "andreas skov olsen", "andrej kramarić", "andrew redmayne", "andries noppert", "andrija živković", "andré ayew ", "andré onana", "andré silva", "andré-frank zambo anguissa", "andrés guardado ", "anis ben slimane", "ansu fati", "ante budimir", "anthony contreras", "anthony hernández", "antoine griezmann", "antoine semenyo", "antonee robinson", "antonio rüdiger", "antony", "antónio silva", "ao tanaka", "ardon jashari", "arkadiusz milik", "armel bella-kotchap", "arthur theate", "artur jędrzejczyk", "assim madibo", "atiba hutchinson ", "aurélien tchouaméni", "awer mabil", "axel disasi", "axel witsel", "ayase ueda", "aymen dahmen", "aymen mathlouthi", "aymeric laporte", "ayrton preciado", "aziz behich", "azzedine ounahi", "aïssa laïdouni", "baba rahman", "badr benoun", "bailey wright", "bamba dieng", "bartosz bereszyński", "bassam al-rawi", "bechir ben saïd", "ben cabango", "ben davies", "ben white", "benjamin pavard", "bernardo silva", "bilal el khannous", "bilel ifa", "borna barišić", "borna sosa", "boualem khoukhi", "boulaye dia", "brandon aguilera", "breel embolo", "bremer", "brenden aaronson", "brennan johnson", "bruno fernandes", "bruno guimarães", "bruno petković", "bryan mbeumo", "bryan oviedo", "bryan ruiz ", "bukayo saka", "callum wilson", "cameron carter-vickers", "cameron devlin", "carlos gruezo", "carlos martínez", "carlos rodríguez", "carlos soler", "casemiro", "celso borges", "charles de ketelaere", "cheikhou kouyaté", "cho gue-sung", "cho yu-min", "chris gunter", "chris mepham", "christian bassogog", "christian eriksen", "christian fassnacht", "christian günter", "christian nørgaard", "christian pulisic", "christopher wooh", "cody gakpo", "collins fai", "connor roberts", "conor coady", "conor gallagher", "craig goodwin", "cristian roldan", "cristian romero", "cristiano ronaldo ", "cyle larin", "césar azpilicueta", "césar montes", "daichi kamada", "daizen maeda", "daley blind", "damian szymański", "dani alves", "dani carvajal", "dani olmo", "daniel afriyie", "daniel amartey", "daniel chacón", "daniel james", "daniel schmidt", "daniel wass", "daniel-kofi kyereh", "danilo", "danilo pereira", "danny vukovic", "danny ward", "darko lazović", "darwin núñez", "david raum", "david raya", "david wotherspoon", "davy klaassen", "dayne st. clair", "dayot upamecano", "deandre yedlin", "declan rice", "dejan lovren", "denis odoi", "denis zakaria", "denzel dumfries", "derek cornelius", "devis epassy", "diego godín ", "diego palacios", "diogo costa", "diogo dalot", "djibril sow", "djorkaeff reasco", "domagoj vida", "dominik livaković", "douglas lópez", "dries mertens", "dušan tadić ", "dušan vlahović", "dylan bronn", "dylan levitt", "eden hazard ", "ederson", "edimilson fernandes", "edinson cavani", "edson álvarez", "eduardo camavinga", "ehsan hajsafi ", "eiji kawashima", "elisha owusu", "ellyes skhiri", "emiliano martínez", "enner valencia ", "enzo ebosse", "enzo fernández", "eray cömert", "eric dier", "eric garcía", "eric maxim choupo-moting", "esteban alvarado", "ethan ampadu", "ethan horvath", "exequiel palacios", "fabian frei", "fabian rieder", "fabian schär", "fabinho", "facundo pellistri", "facundo torres", "famara diédhiou", "federico valverde", "ferjani sassi", "fernando muslera", "ferran torres", "filip kostić", "filip mladenović", "filip đuričić", "firas al-buraikan", "fodé ballo-touré", "formose mendy", "fran karačić", "francisco calvo", "franco armani", "fred", "frederik rønnow", "frenkie de jong", "félix torres", "gabriel jesus", "gabriel martinelli", "gaku shibasaki", "garang kuol", "gareth bale ", "gavi", "gaël ondoua", "georges-kévin nkoudou", "gerardo arteaga", "germán pezzella", "gerson torres", "gerónimo rulli", "ghailene chaalali", "gideon mensah", "giorgian de arrascaeta", "giovanni reyna", "gonzalo montiel", "gonzalo plata", "gonçalo ramos", "granit xhaka ", "gregor kobel", "grzegorz krychowiak", "guido rodríguez", "guillermo ochoa", "guillermo varela", "haitham asiri", "haji wright", "hakim ziyech", "hannibal mejbri", "hans vanaken", "haris seferovic", "harry kane ", "harry maguire", "harry souttar", "harry wilson", "hassan al-haydos ", "hassan al-tambakti", "hattan bahebri", "henry martín", "hernán galíndez", "hidemasa morita", "hiroki ito", "hiroki sakai", "hirving lozano", "homam ahmed", "hong chul", "hossein hosseini", "hossein kanaanizadegan", "hugo guillamón", "hugo lloris ", "hwang hee-chan", "hwang in-beom", "hwang ui-jo", "héctor herrera", "héctor moreno", "ibrahim danlad", "ibrahima konaté", "idrissa gueye", "iké ugbo", "ilias chair", "iliman ndiaye", "ismaeel mohammad", "ismail jakobs", "ismaël koné", "ismaïla sarr", "issam jebali", "ivan ilić", "ivan perišić", "ivica ivušić", "ivo grbić", "iñaki williams", "jack grealish", "jackson irvine", "jackson porozo", "jakub kamiński", "jakub kiwior", "jamal musiala", "james maddison", "james pantemis", "jamie maclaren", "jan bednarek", "jan vertonghen", "jason cummings", "jassem gaber", "jawad el yamiq", "jean-charles castelletto", "jean-pierre nsame", "jens stryger larsen", "jeong woo-yeong", "jeremie frimpong", "jeremy sarmiento", "jerome ngom mbekeli", "jesper lindstrøm", "jesús ferreira", "jesús gallardo", "jewison bennette", "jo hyeon-woo", "joachim andersen", "joakim mæhle", "joe allen", "joe morrell", "joe rodon", "joe scally", "joel campbell", "joel king", "joel waterman", "johan venegas", "johan vásquez", "john stones", "jonas hofmann", "jonas omlin", "jonas wind", "jonathan david", "jonathan osorio", "jonny williams", "jordan ayew", "jordan henderson", "jordan morris", "jordan pickford", "jordan veretout", "jordi alba", "jorge sánchez", "joseph aidoo", "josh sargent", "joshua kimmich", "josip juranović", "josip stanišić", "josip šutalo", "josé cifuentes", "josé giménez", "josé luis rodríguez", "josé sá", "joão cancelo", "joão félix", "joão mário", "joão palhinha", "joško gvardiol", "juan foyth", "juan pablo vargas", "jude bellingham", "jules koundé", "julian brandt", "julián álvarez", "jung woo-young", "junior hoilett", "junya ito", "jurriën timber", "justin bijlow", "jérémy doku", "kai havertz", "kalidou koulibaly ", "kalvin phillips", "kamal miller", "kamal sowah", "kamaldeen sulemana", "kamil glik", "kamil grabara", "kamil grosicki", "kaoru mitoma", "karim adeyemi", "karim ansarifard", "karim benzema", "karim boudiaf", "karl toko ekambi", "karol świderski", "kasper dolberg", "kasper schmeichel", "keanu baccus", "kellyn acosta", "kendall waston", "kenneth taylor", "kevin de bruyne", "kevin rodríguez", "kevin trapp", "kevin álvarez", "keylor navas", "keysher fuller", "khalid muneer", "kieffer moore", "kieran trippier", "kim jin-su", "kim min-jae", "kim moon-hwan", "kim seung-gyu", "kim tae-hwan", "kim young-gwon", "kingsley coman", "ko itakura", "koen casteels", "koke", "kristijan jakić", "krystian bielik", "krzysztof piątek", "krépin diatta", "kwon chang-hoon", "kwon kyung-won", "kye rowles", "kyle walker", "kylian mbappé", "lautaro martínez", "lawrence ati-zigi", "leander dendoncker", "leandro paredes", "leandro trossard", "lee jae-sung", "lee kang-in", "leon goretzka", "leroy sané", "liam fraser", "liam millar", "lionel messi ", "lisandro martínez", "lovro majer", "loïs openda", "luca de la torre", "lucas cavallini", "lucas hernandez", "lucas paquetá", "lucas torreira", "luis chávez", "luis romo", "luis suárez", "luka jović", "luka modrić ", "luka sučić", "lukas klostermann", "luke shaw", "luuk de jong", "majid hosseini", "mamadou loum", "manuel akanji", "manuel neuer ", "manuel ugarte", "marc-andré ter stegen", "marcelo brozović", "marco asensio", "marcos acuña", "marcos llorente", "marcus rashford", "marcus thuram", "mario götze", "mario pašalić", "mark harris", "mark-anthony kaye", "marko dmitrović", "marko grujić", "marko livaja", "marquinhos", "marten de roon", "martin boyle", "martin braithwaite", "martin erlić", "martin hongla", "martín cáceres", "mason mount", "mateo kovačić", "mateusz wieteska", "matheus nunes", "mathew leckie", "mathew ryan ", "mathias jensen", "mathías olivera", "matt turner", "matteo guendouzi", "matthew smith", "matthias ginter", "matthijs de ligt", "matty cash", "matías vecino", "matías viña", "maxi gómez", "maya yoshida ", "mehdi taremi", "mehdi torabi", "memphis depay", "meshaal barsham", "michael estrada", "michał skóraś", "michel aebischer", "michy batshuayi", "miki yamane", "mikkel damsgaard", "milad mohammadi", "milan borjan", "miloš degenek", "miloš veljković", "mislav oršić", "mitchell duke", "mohamed ali ben romdhane", "mohamed dräger", "mohamed kanno", "mohammed al-breik", "mohammed al-owais", "mohammed al-rubaie", "mohammed kudus", "mohammed muntari", "mohammed salisu", "mohammed waad", "moisés caicedo", "moisés ramírez", "montassar talbi", "morteza pouraliganji", "mostafa meshaal", "mouez hassen", "moumi ngamaleu", "moustapha name", "munir mohamedi", "musab kheder", "na sang-ho", "nader ghandri", "nahuel molina", "naif al-hadhrami", "nampalys mendy", "nasser al-dawsari", "nathan aké", "nathaniel atkinson", "nawaf al-abed", "nawaf al-aqidi", "nayef aguerd", "naïm sliti", "neco williams", "nemanja gudelj", "nemanja maksimović", "nemanja radonjić", "neymar", "nick pope", "niclas füllkrug", "nico elvedi", "nico schlotterbeck", "nico williams", "nicola zalewski", "nicolas jackson", "nicolas nkoulou", "nicolás otamendi", "nicolás tagliafico", "nicolás de la cruz", "niklas süle", "nikola milenković", "nikola vlašić", "noa lang", "noah okafor", "nouhou tolo", "noussair mazraoui", "nuno mendes", "néstor araujo", "oliver christensen", "olivier giroud", "olivier mbaizo", "olivier ntcham", "orbelín pineda", "osman bukari", "otávio", "ousmane dembélé", "pablo sarabia", "paik seung-ho", "pape abou cissé", "pape gueye", "pape matar sarr", "papu gómez", "pathé ciss", "patrick sequeira", "pau torres", "paulo dybala", "payam niazmand", "pedri", "pedro", "pepe", "pervis estupiñán", "phil foden", "philipp köhn", "piero hincapié", "pierre kunde", "pierre-emile højbjerg", "piotr zieliński", "predrag rajković", "przemysław frankowski", "rafael leão", "raheem sterling", "ramin rezaeian", "randal kolo muani", "raphaël guerreiro", "raphaël varane", "raphinha", "rasmus kristensen", "raúl jiménez", "remko pasveer", "remo freuler", "renato steffen", "ricardo horta", "ricardo rodriguez", "richarlison", "richie laryea", "riley mcgree", "ritsu dōan", "riyadh sharahili", "roan wilson", "robert arboleda", "robert gumny", "robert lewandowski ", "robert skov", "robert sánchez", "roberto alvarado", "rodolfo cota", "rodri", "rodrigo bentancur", "rodrigo de paul", "rodrygo", "rogelio funes mori", "romain saïss ", "romario ibarra", "romelu lukaku", "ronald araújo", "rouzbeh cheshmi", "ruben vargas", "rubin colwill", "rui patrício", "ró-ró", "rónald matarrita", "rúben dias", "rúben neves", "saad al-sheeb", "sadegh moharrami", "sadio mané", "saeid ezatolahi", "saleh al-shehri", "salem al-dawsari", "salem al-hajri", "salis abdul samed", "salman al-faraj ", "sam adekugbe", "saman ghoddos", "sami al-najei", "samuel gouet", "samuel piette", "sardar azmoun", "saud abdulhamid", "saša lukić", "sean johnson", "sebas méndez", "sebastian szymański", "sebastián coates", "sebastián sosa", "seifeddine jaziri", "selim amallah", "seny dieng", "serge gnabry", "sergej milinković-savić", "sergio busquets ", "sergio rochet", "sergiño dest", "shaq moore", "shogo taniguchi", "shojae khalilzadeh", "shuto machino", "shūichi gonda", "silvan widmer", "simon kjær ", "simon mignolet", "simon ngapandouetnbu", "sofiane boufal", "sofyan amrabat", "son heung-min ", "son jun-ho", "song bum-keun", "song min-kyu", "sorba thomas", "souaibou marou", "srđan babić", "stefan mitrović", "stefan de vrij", "stephen eustáquio", "steve mandanda", "steven berghuis", "steven bergwijn", "steven vitória", "strahinja eraković", "strahinja pavlović", "sultan al-ghannam", "szymon żurkowski", "taha yassine khenissi", "tajon buchanan", "takefusa kubo", "takehiro tomiyasu", "takuma asano", "takumi minamino", "tarek salman", "tariq lamptey", "teun koopmeiners", "theo hernandez", "thiago almada", "thiago silva ", "thibaut courtois", "thilo kehrer", "thomas delaney", "thomas deng", "thomas meunier", "thomas müller", "thomas partey", "thorgan hazard", "tim ream", "timothy castagne", "timothy weah", "toby alderweireld", "tom lockyer", "trent alexander-arnold", "tyler adams", "tyrell malacia", "unai simón", "uriel antuna", "uroš račić", "vahid amiri", "vanja milinković-savić", "victor nelsson", "vincent aboubakar ", "vincent janssen", "vinícius júnior", "virgil van dijk ", "vitinha", "wahbi khazri", "wajdi kechrida", "walid cheddira", "walker zimmerman", "wataru endo", "wayne hennessey", "weston mckennie", "weverton", "william carvalho", "william pacho", "william saliba", "wojciech szczęsny", "wout faes", "wout weghorst", "xavi simons", "xavier arreaga", "xherdan shaqiri", "yahia attiyat allah", "yahya jabrane", "yann sommer", "yannick carrasco", "yasser al-shahrani", "yassine bounou", "yassine meriah", "yeltsin tejeda", "yeremy pino", "yoon jong-gyu", "youri tielemans", "yousef hassan", "youssef en-nesyri", "youssef msakni ", "youssouf fofana", "youssouf sabaly", "youssoufa moukoko", "youstin salas", "yuki soma", "yunus musah", "yussuf poulsen", "yuto nagatomo", "zakaria aboukhlal", "zeno debast", "álvaro morata", "álvaro zamora", "ángel correa", "ángel di maría", "ángel mena", "ángelo preciado", "éder militão", "édouard mendy", "érick gutiérrez", "éverton ribeiro", "óscar duarte", "i̇lkay gündoğan", "łukasz skorupski" ]
louisebld/pizza-or-not-pizza-model
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # pizza-or-not-pizza-model This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the None dataset. It achieves the following results on the evaluation set: - Loss: 0.4637 - Accuracy: 0.8753 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0005 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 64 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 4 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 1.2343 | 1.0 | 25 | 0.9184 | 0.7157 | | 0.5527 | 2.0 | 50 | 0.6300 | 0.8130 | | 0.377 | 3.0 | 75 | 0.5489 | 0.8404 | | 0.2202 | 4.0 | 100 | 0.4637 | 0.8753 | ### Framework versions - Transformers 4.38.1 - Pytorch 2.2.1+cu121 - Datasets 2.17.1 - Tokenizers 0.15.2
[ "not pizza", "4 fromages", "ananas", "champignons", "chevre", "flammenkueche", "pepperoni", "margherita", "savoie" ]
reyhanwiira/image_classification
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # image_classification This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.2542 - Accuracy: 0.5375 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0001 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 15 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 40 | 1.7657 | 0.3187 | | No log | 2.0 | 80 | 1.6332 | 0.3063 | | No log | 3.0 | 120 | 1.4587 | 0.4625 | | No log | 4.0 | 160 | 1.4618 | 0.3812 | | No log | 5.0 | 200 | 1.2944 | 0.5312 | | No log | 6.0 | 240 | 1.3633 | 0.4562 | | No log | 7.0 | 280 | 1.4372 | 0.3937 | | No log | 8.0 | 320 | 1.2895 | 0.5563 | | No log | 9.0 | 360 | 1.2892 | 0.525 | | No log | 10.0 | 400 | 1.2596 | 0.5375 | | No log | 11.0 | 440 | 1.3227 | 0.5188 | | No log | 12.0 | 480 | 1.3231 | 0.5125 | | 1.0624 | 13.0 | 520 | 1.2873 | 0.5312 | | 1.0624 | 14.0 | 560 | 1.3093 | 0.5125 | | 1.0624 | 15.0 | 600 | 1.2294 | 0.5563 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.2
[ "label_0", "label_1", "label_2", "label_3", "label_4", "label_5", "label_6", "label_7" ]
sai17/cards_bottom_right_swin-tiny-patch4-window7-224-finetuned-v2
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # cards_bottom_right_swin-tiny-patch4-window7-224-finetuned-v2 This model is a fine-tuned version of [microsoft/swin-tiny-patch4-window7-224](https://huggingface.co/microsoft/swin-tiny-patch4-window7-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.9317 - Accuracy: 0.6079 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 30 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:-----:|:---------------:|:--------:| | 1.4965 | 1.0 | 1338 | 1.3516 | 0.4156 | | 1.4486 | 2.0 | 2677 | 1.1784 | 0.4938 | | 1.4384 | 3.0 | 4015 | 1.1050 | 0.5223 | | 1.4538 | 4.0 | 5354 | 1.0751 | 0.5433 | | 1.3928 | 5.0 | 6692 | 1.0604 | 0.5440 | | 1.4148 | 6.0 | 8031 | 1.0459 | 0.5523 | | 1.3921 | 7.0 | 9369 | 1.0464 | 0.5501 | | 1.3812 | 8.0 | 10708 | 1.0461 | 0.5491 | | 1.3494 | 9.0 | 12046 | 1.0445 | 0.5486 | | 1.3555 | 10.0 | 13385 | 0.9973 | 0.5693 | | 1.3303 | 11.0 | 14723 | 0.9952 | 0.5719 | | 1.3575 | 12.0 | 16062 | 1.0317 | 0.5574 | | 1.3129 | 13.0 | 17400 | 0.9851 | 0.5813 | | 1.3439 | 14.0 | 18739 | 1.0510 | 0.5523 | | 1.3371 | 15.0 | 20077 | 0.9820 | 0.5795 | | 1.2835 | 16.0 | 21416 | 0.9886 | 0.5738 | | 1.3002 | 17.0 | 22754 | 0.9685 | 0.5869 | | 1.289 | 18.0 | 24093 | 0.9519 | 0.5941 | | 1.3007 | 19.0 | 25431 | 0.9855 | 0.5800 | | 1.2927 | 20.0 | 26770 | 0.9499 | 0.5925 | | 1.2985 | 21.0 | 28108 | 0.9669 | 0.5854 | | 1.2957 | 22.0 | 29447 | 0.9551 | 0.5903 | | 1.2579 | 23.0 | 30785 | 0.9300 | 0.6053 | | 1.2475 | 24.0 | 32124 | 0.9296 | 0.6049 | | 1.2227 | 25.0 | 33462 | 0.9317 | 0.6079 | | 1.2069 | 26.0 | 34801 | 0.9609 | 0.5887 | | 1.2156 | 27.0 | 36139 | 0.9297 | 0.6052 | | 1.25 | 28.0 | 37478 | 0.9300 | 0.6062 | | 1.2394 | 29.0 | 38816 | 0.9238 | 0.6071 | | 1.209 | 29.99 | 40140 | 0.9284 | 0.6064 | ### Framework versions - Transformers 4.37.2 - Pytorch 2.0.1+cu117 - Datasets 2.17.0 - Tokenizers 0.15.2
[ "grade_1", "grade_2", "grade_3", "grade_4", "grade_5", "grade_6", "grade_7", "grade_8", "grade_9" ]
IsaacMwesigwa/footballer-retriain-5
# Model Trained Using AutoTrain - Problem type: Image Classification ## Validation Metricsg loss: 6.718141555786133 f1_macro: 0.0007751185489463366 f1_micro: 0.0031317754757889662 f1_weighted: 0.0007758654711809271 precision_macro: 0.0005378715654317669 precision_micro: 0.0031317754757889667 precision_weighted: 0.0005383898709633804 recall_macro: 0.003128760529482552 recall_micro: 0.0031317754757889667 recall_weighted: 0.0031317754757889667 accuracy: 0.0031317754757889667
[ "aaron long", "aaron mooy", "aaron ramsdale", "aaron ramsey", "abde ezzalzouli", "abdelhamid sabiri", "abdelkarim hassan", "abderrazak hamdallah", "abdou diallo", "abdul fatawu issahaku", "abdul manaf nurudeen", "abdulaziz hatem", "abdulelah al-amri", "abdulellah al-malki", "abdullah madu", "abdullah otayf", "abdulrahman al-aboud", "abolfazl jalali", "achraf dari", "achraf hakimi", "adam davies", "adrien rabiot", "agustín canobbio", "ahmad nourollahi", "ahmed alaaeldin", "ahmed reda tagnaouti", "ajdin hrustic", "akram afif", "alan franco", "alejandro balde", "aleksandar mitrović", "alex sandro", "alex telles", "alexander bah", "alexander djiku", "alexander domínguez", "alexis mac allister", "alexis vega", "alfred gomis", "alfredo talavera", "ali abdi", "ali al-bulaihi", "ali al-hassan", "ali assadalla", "ali gholizadeh", "ali karimi", "ali maâloul", "alidu seidu", "alireza beiranvand", "alireza jahanbakhsh", "alisson", "alistair johnston", "almoez ali", "alphonse areola", "alphonso davies", "amadou onana", "amir abedzadeh", "anass zaroury", "andreas christensen", "andreas cornelius", "andreas skov olsen", "andrej kramarić", "andrew redmayne", "andries noppert", "andrija živković", "andré ayew ", "andré onana", "andré silva", "andré-frank zambo anguissa", "andrés guardado ", "anis ben slimane", "ansu fati", "ante budimir", "anthony contreras", "anthony hernández", "antoine griezmann", "antoine semenyo", "antonee robinson", "antonio rüdiger", "antony", "antónio silva", "ao tanaka", "ardon jashari", "arkadiusz milik", "armel bella-kotchap", "arthur theate", "artur jędrzejczyk", "assim madibo", "atiba hutchinson ", "aurélien tchouaméni", "awer mabil", "axel disasi", "axel witsel", "ayase ueda", "aymen dahmen", "aymen mathlouthi", "aymeric laporte", "ayrton preciado", "aziz behich", "azzedine ounahi", "aïssa laïdouni", "baba rahman", "badr benoun", "bailey wright", "bamba dieng", "bartosz bereszyński", "bassam al-rawi", "bechir ben saïd", "ben cabango", "ben davies", "ben white", "benjamin pavard", "bernardo silva", "bilal el khannous", "bilel ifa", "borna barišić", "borna sosa", "boualem khoukhi", "boulaye dia", "brandon aguilera", "breel embolo", "bremer", "brenden aaronson", "brennan johnson", "bruno fernandes", "bruno guimarães", "bruno petković", "bryan mbeumo", "bryan oviedo", "bryan ruiz ", "bukayo saka", "callum wilson", "cameron carter-vickers", "cameron devlin", "carlos gruezo", "carlos martínez", "carlos rodríguez", "carlos soler", "casemiro", "celso borges", "charles de ketelaere", "cheikhou kouyaté", "cho gue-sung", "cho yu-min", "chris gunter", "chris mepham", "christian bassogog", "christian eriksen", "christian fassnacht", "christian günter", "christian nørgaard", "christian pulisic", "christopher wooh", "cody gakpo", "collins fai", "connor roberts", "conor coady", "conor gallagher", "craig goodwin", "cristian roldan", "cristian romero", "cristiano ronaldo ", "cyle larin", "césar azpilicueta", "césar montes", "daichi kamada", "daizen maeda", "daley blind", "damian szymański", "dani alves", "dani carvajal", "dani olmo", "daniel afriyie", "daniel amartey", "daniel chacón", "daniel james", "daniel schmidt", "daniel wass", "daniel-kofi kyereh", "danilo", "danilo pereira", "danny vukovic", "danny ward", "darko lazović", "darwin núñez", "david raum", "david raya", "david wotherspoon", "davy klaassen", "dayne st. clair", "dayot upamecano", "deandre yedlin", "declan rice", "dejan lovren", "denis odoi", "denis zakaria", "denzel dumfries", "derek cornelius", "devis epassy", "diego godín ", "diego palacios", "diogo costa", "diogo dalot", "djibril sow", "djorkaeff reasco", "domagoj vida", "dominik livaković", "douglas lópez", "dries mertens", "dušan tadić ", "dušan vlahović", "dylan bronn", "dylan levitt", "eden hazard ", "ederson", "edimilson fernandes", "edinson cavani", "edson álvarez", "eduardo camavinga", "ehsan hajsafi ", "eiji kawashima", "elisha owusu", "ellyes skhiri", "emiliano martínez", "enner valencia ", "enzo ebosse", "enzo fernández", "eray cömert", "eric dier", "eric garcía", "eric maxim choupo-moting", "esteban alvarado", "ethan ampadu", "ethan horvath", "exequiel palacios", "fabian frei", "fabian rieder", "fabian schär", "fabinho", "facundo pellistri", "facundo torres", "famara diédhiou", "federico valverde", "ferjani sassi", "fernando muslera", "ferran torres", "filip kostić", "filip mladenović", "filip đuričić", "firas al-buraikan", "fodé ballo-touré", "formose mendy", "fran karačić", "francisco calvo", "franco armani", "fred", "frederik rønnow", "frenkie de jong", "félix torres", "gabriel jesus", "gabriel martinelli", "gaku shibasaki", "garang kuol", "gareth bale ", "gavi", "gaël ondoua", "georges-kévin nkoudou", "gerardo arteaga", "germán pezzella", "gerson torres", "gerónimo rulli", "ghailene chaalali", "gideon mensah", "giorgian de arrascaeta", "giovanni reyna", "gonzalo montiel", "gonzalo plata", "gonçalo ramos", "granit xhaka ", "gregor kobel", "grzegorz krychowiak", "guido rodríguez", "guillermo ochoa", "guillermo varela", "haitham asiri", "haji wright", "hakim ziyech", "hannibal mejbri", "hans vanaken", "haris seferovic", "harry kane ", "harry maguire", "harry souttar", "harry wilson", "hassan al-haydos ", "hassan al-tambakti", "hattan bahebri", "henry martín", "hernán galíndez", "hidemasa morita", "hiroki ito", "hiroki sakai", "hirving lozano", "homam ahmed", "hong chul", "hossein hosseini", "hossein kanaanizadegan", "hugo guillamón", "hugo lloris ", "hwang hee-chan", "hwang in-beom", "hwang ui-jo", "héctor herrera", "héctor moreno", "ibrahim danlad", "ibrahima konaté", "idrissa gueye", "iké ugbo", "ilias chair", "iliman ndiaye", "ismaeel mohammad", "ismail jakobs", "ismaël koné", "ismaïla sarr", "issam jebali", "ivan ilić", "ivan perišić", "ivica ivušić", "ivo grbić", "iñaki williams", "jack grealish", "jackson irvine", "jackson porozo", "jakub kamiński", "jakub kiwior", "jamal musiala", "james maddison", "james pantemis", "jamie maclaren", "jan bednarek", "jan vertonghen", "jason cummings", "jassem gaber", "jawad el yamiq", "jean-charles castelletto", "jean-pierre nsame", "jens stryger larsen", "jeong woo-yeong", "jeremie frimpong", "jeremy sarmiento", "jerome ngom mbekeli", "jesper lindstrøm", "jesús ferreira", "jesús gallardo", "jewison bennette", "jo hyeon-woo", "joachim andersen", "joakim mæhle", "joe allen", "joe morrell", "joe rodon", "joe scally", "joel campbell", "joel king", "joel waterman", "johan venegas", "johan vásquez", "john stones", "jonas hofmann", "jonas omlin", "jonas wind", "jonathan david", "jonathan osorio", "jonny williams", "jordan ayew", "jordan henderson", "jordan morris", "jordan pickford", "jordan veretout", "jordi alba", "jorge sánchez", "joseph aidoo", "josh sargent", "joshua kimmich", "josip juranović", "josip stanišić", "josip šutalo", "josé cifuentes", "josé giménez", "josé luis rodríguez", "josé sá", "joão cancelo", "joão félix", "joão mário", "joão palhinha", "joško gvardiol", "juan foyth", "juan pablo vargas", "jude bellingham", "jules koundé", "julian brandt", "julián álvarez", "jung woo-young", "junior hoilett", "junya ito", "jurriën timber", "justin bijlow", "jérémy doku", "kai havertz", "kalidou koulibaly ", "kalvin phillips", "kamal miller", "kamal sowah", "kamaldeen sulemana", "kamil glik", "kamil grabara", "kamil grosicki", "kaoru mitoma", "karim adeyemi", "karim ansarifard", "karim benzema", "karim boudiaf", "karl toko ekambi", "karol świderski", "kasper dolberg", "kasper schmeichel", "keanu baccus", "kellyn acosta", "kendall waston", "kenneth taylor", "kevin de bruyne", "kevin rodríguez", "kevin trapp", "kevin álvarez", "keylor navas", "keysher fuller", "khalid muneer", "kieffer moore", "kieran trippier", "kim jin-su", "kim min-jae", "kim moon-hwan", "kim seung-gyu", "kim tae-hwan", "kim young-gwon", "kingsley coman", "ko itakura", "koen casteels", "koke", "kristijan jakić", "krystian bielik", "krzysztof piątek", "krépin diatta", "kwon chang-hoon", "kwon kyung-won", "kye rowles", "kyle walker", "kylian mbappé", "lautaro martínez", "lawrence ati-zigi", "leander dendoncker", "leandro paredes", "leandro trossard", "lee jae-sung", "lee kang-in", "leon goretzka", "leroy sané", "liam fraser", "liam millar", "lionel messi ", "lisandro martínez", "lovro majer", "loïs openda", "luca de la torre", "lucas cavallini", "lucas hernandez", "lucas paquetá", "lucas torreira", "luis chávez", "luis romo", "luis suárez", "luka jović", "luka modrić ", "luka sučić", "lukas klostermann", "luke shaw", "luuk de jong", "majid hosseini", "mamadou loum", "manuel akanji", "manuel neuer ", "manuel ugarte", "marc-andré ter stegen", "marcelo brozović", "marco asensio", "marcos acuña", "marcos llorente", "marcus rashford", "marcus thuram", "mario götze", "mario pašalić", "mark harris", "mark-anthony kaye", "marko dmitrović", "marko grujić", "marko livaja", "marquinhos", "marten de roon", "martin boyle", "martin braithwaite", "martin erlić", "martin hongla", "martín cáceres", "mason mount", "mateo kovačić", "mateusz wieteska", "matheus nunes", "mathew leckie", "mathew ryan ", "mathias jensen", "mathías olivera", "matt turner", "matteo guendouzi", "matthew smith", "matthias ginter", "matthijs de ligt", "matty cash", "matías vecino", "matías viña", "maxi gómez", "maya yoshida ", "mehdi taremi", "mehdi torabi", "memphis depay", "meshaal barsham", "michael estrada", "michał skóraś", "michel aebischer", "michy batshuayi", "miki yamane", "mikkel damsgaard", "milad mohammadi", "milan borjan", "miloš degenek", "miloš veljković", "mislav oršić", "mitchell duke", "mohamed ali ben romdhane", "mohamed dräger", "mohamed kanno", "mohammed al-breik", "mohammed al-owais", "mohammed al-rubaie", "mohammed kudus", "mohammed muntari", "mohammed salisu", "mohammed waad", "moisés caicedo", "moisés ramírez", "montassar talbi", "morteza pouraliganji", "mostafa meshaal", "mouez hassen", "moumi ngamaleu", "moustapha name", "munir mohamedi", "musab kheder", "na sang-ho", "nader ghandri", "nahuel molina", "naif al-hadhrami", "nampalys mendy", "nasser al-dawsari", "nathan aké", "nathaniel atkinson", "nawaf al-abed", "nawaf al-aqidi", "nayef aguerd", "naïm sliti", "neco williams", "nemanja gudelj", "nemanja maksimović", "nemanja radonjić", "neymar", "nick pope", "niclas füllkrug", "nico elvedi", "nico schlotterbeck", "nico williams", "nicola zalewski", "nicolas jackson", "nicolas nkoulou", "nicolás otamendi", "nicolás tagliafico", "nicolás de la cruz", "niklas süle", "nikola milenković", "nikola vlašić", "noa lang", "noah okafor", "nouhou tolo", "noussair mazraoui", "nuno mendes", "néstor araujo", "oliver christensen", "olivier giroud", "olivier mbaizo", "olivier ntcham", "orbelín pineda", "osman bukari", "otávio", "ousmane dembélé", "pablo sarabia", "paik seung-ho", "pape abou cissé", "pape gueye", "pape matar sarr", "papu gómez", "pathé ciss", "patrick sequeira", "pau torres", "paulo dybala", "payam niazmand", "pedri", "pedro", "pepe", "pervis estupiñán", "phil foden", "philipp köhn", "piero hincapié", "pierre kunde", "pierre-emile højbjerg", "piotr zieliński", "predrag rajković", "przemysław frankowski", "rafael leão", "raheem sterling", "ramin rezaeian", "randal kolo muani", "raphaël guerreiro", "raphaël varane", "raphinha", "rasmus kristensen", "raúl jiménez", "remko pasveer", "remo freuler", "renato steffen", "ricardo horta", "ricardo rodriguez", "richarlison", "richie laryea", "riley mcgree", "ritsu dōan", "riyadh sharahili", "roan wilson", "robert arboleda", "robert gumny", "robert lewandowski ", "robert skov", "robert sánchez", "roberto alvarado", "rodolfo cota", "rodri", "rodrigo bentancur", "rodrigo de paul", "rodrygo", "rogelio funes mori", "romain saïss ", "romario ibarra", "romelu lukaku", "ronald araújo", "rouzbeh cheshmi", "ruben vargas", "rubin colwill", "rui patrício", "ró-ró", "rónald matarrita", "rúben dias", "rúben neves", "saad al-sheeb", "sadegh moharrami", "sadio mané", "saeid ezatolahi", "saleh al-shehri", "salem al-dawsari", "salem al-hajri", "salis abdul samed", "salman al-faraj ", "sam adekugbe", "saman ghoddos", "sami al-najei", "samuel gouet", "samuel piette", "sardar azmoun", "saud abdulhamid", "saša lukić", "sean johnson", "sebas méndez", "sebastian szymański", "sebastián coates", "sebastián sosa", "seifeddine jaziri", "selim amallah", "seny dieng", "serge gnabry", "sergej milinković-savić", "sergio busquets ", "sergio rochet", "sergiño dest", "shaq moore", "shogo taniguchi", "shojae khalilzadeh", "shuto machino", "shūichi gonda", "silvan widmer", "simon kjær ", "simon mignolet", "simon ngapandouetnbu", "sofiane boufal", "sofyan amrabat", "son heung-min ", "son jun-ho", "song bum-keun", "song min-kyu", "sorba thomas", "souaibou marou", "srđan babić", "stefan mitrović", "stefan de vrij", "stephen eustáquio", "steve mandanda", "steven berghuis", "steven bergwijn", "steven vitória", "strahinja eraković", "strahinja pavlović", "sultan al-ghannam", "szymon żurkowski", "taha yassine khenissi", "tajon buchanan", "takefusa kubo", "takehiro tomiyasu", "takuma asano", "takumi minamino", "tarek salman", "tariq lamptey", "teun koopmeiners", "theo hernandez", "thiago almada", "thiago silva ", "thibaut courtois", "thilo kehrer", "thomas delaney", "thomas deng", "thomas meunier", "thomas müller", "thomas partey", "thorgan hazard", "tim ream", "timothy castagne", "timothy weah", "toby alderweireld", "tom lockyer", "trent alexander-arnold", "tyler adams", "tyrell malacia", "unai simón", "uriel antuna", "uroš račić", "vahid amiri", "vanja milinković-savić", "victor nelsson", "vincent aboubakar ", "vincent janssen", "vinícius júnior", "virgil van dijk ", "vitinha", "wahbi khazri", "wajdi kechrida", "walid cheddira", "walker zimmerman", "wataru endo", "wayne hennessey", "weston mckennie", "weverton", "william carvalho", "william pacho", "william saliba", "wojciech szczęsny", "wout faes", "wout weghorst", "xavi simons", "xavier arreaga", "xherdan shaqiri", "yahia attiyat allah", "yahya jabrane", "yann sommer", "yannick carrasco", "yasser al-shahrani", "yassine bounou", "yassine meriah", "yeltsin tejeda", "yeremy pino", "yoon jong-gyu", "youri tielemans", "yousef hassan", "youssef en-nesyri", "youssef msakni ", "youssouf fofana", "youssouf sabaly", "youssoufa moukoko", "youstin salas", "yuki soma", "yunus musah", "yussuf poulsen", "yuto nagatomo", "zakaria aboukhlal", "zeno debast", "álvaro morata", "álvaro zamora", "ángel correa", "ángel di maría", "ángel mena", "ángelo preciado", "éder militão", "édouard mendy", "érick gutiérrez", "éverton ribeiro", "óscar duarte", "i̇lkay gündoğan", "łukasz skorupski" ]
gabrielganan/image_classification
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # image_classification This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.3925 - Accuracy: 0.4813 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 5 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 40 | 1.8673 | 0.3937 | | No log | 2.0 | 80 | 1.5846 | 0.3875 | | No log | 3.0 | 120 | 1.4794 | 0.4875 | | No log | 4.0 | 160 | 1.4010 | 0.5375 | | No log | 5.0 | 200 | 1.3734 | 0.5625 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.2
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
IsaacMwesigwa/footballer-recognition-6
# Model Trained Using AutoTrain - Problem type: Image Classification ## Validation Metricsg loss: 4.167623996734619 f1_macro: 0.18343584441809782 f1_micro: 0.2057335581787521 f1_weighted: 0.18361260745776836 precision_macro: 0.19376895027886565 precision_micro: 0.2057335581787521 precision_weighted: 0.19395567053931265 recall_macro: 0.2055354993983153 recall_micro: 0.2057335581787521 recall_weighted: 0.2057335581787521 accuracy: 0.2057335581787521
[ "aaron long", "aaron mooy", "aaron ramsdale", "aaron ramsey", "abde ezzalzouli", "abdelhamid sabiri", "abdelkarim hassan", "abderrazak hamdallah", "abdou diallo", "abdul fatawu issahaku", "abdul manaf nurudeen", "abdulaziz hatem", "abdulelah al-amri", "abdulellah al-malki", "abdullah madu", "abdullah otayf", "abdulrahman al-aboud", "abolfazl jalali", "achraf dari", "achraf hakimi", "adam davies", "adrien rabiot", "agustín canobbio", "ahmad nourollahi", "ahmed alaaeldin", "ahmed reda tagnaouti", "ajdin hrustic", "akram afif", "alan franco", "alejandro balde", "aleksandar mitrović", "alex sandro", "alex telles", "alexander bah", "alexander djiku", "alexander domínguez", "alexis mac allister", "alexis vega", "alfred gomis", "alfredo talavera", "ali abdi", "ali al-bulaihi", "ali al-hassan", "ali assadalla", "ali gholizadeh", "ali karimi", "ali maâloul", "alidu seidu", "alireza beiranvand", "alireza jahanbakhsh", "alisson", "alistair johnston", "almoez ali", "alphonse areola", "alphonso davies", "amadou onana", "amir abedzadeh", "anass zaroury", "andreas christensen", "andreas cornelius", "andreas skov olsen", "andrej kramarić", "andrew redmayne", "andries noppert", "andrija živković", "andré ayew ", "andré onana", "andré silva", "andré-frank zambo anguissa", "andrés guardado ", "anis ben slimane", "ansu fati", "ante budimir", "anthony contreras", "anthony hernández", "antoine griezmann", "antoine semenyo", "antonee robinson", "antonio rüdiger", "antony", "antónio silva", "ao tanaka", "ardon jashari", "arkadiusz milik", "armel bella-kotchap", "arthur theate", "artur jędrzejczyk", "assim madibo", "atiba hutchinson ", "aurélien tchouaméni", "awer mabil", "axel disasi", "axel witsel", "ayase ueda", "aymen dahmen", "aymen mathlouthi", "aymeric laporte", "ayrton preciado", "aziz behich", "azzedine ounahi", "aïssa laïdouni", "baba rahman", "badr benoun", "bailey wright", "bamba dieng", "bartosz bereszyński", "bassam al-rawi", "bechir ben saïd", "ben cabango", "ben davies", "ben white", "benjamin pavard", "bernardo silva", "bilal el khannous", "bilel ifa", "borna barišić", "borna sosa", "boualem khoukhi", "boulaye dia", "brandon aguilera", "breel embolo", "bremer", "brenden aaronson", "brennan johnson", "bruno fernandes", "bruno guimarães", "bruno petković", "bryan mbeumo", "bryan oviedo", "bryan ruiz ", "bukayo saka", "callum wilson", "cameron carter-vickers", "cameron devlin", "carlos gruezo", "carlos martínez", "carlos rodríguez", "carlos soler", "casemiro", "celso borges", "charles de ketelaere", "cheikhou kouyaté", "cho gue-sung", "cho yu-min", "chris gunter", "chris mepham", "christian bassogog", "christian eriksen", "christian fassnacht", "christian günter", "christian nørgaard", "christian pulisic", "christopher wooh", "cody gakpo", "collins fai", "connor roberts", "conor coady", "conor gallagher", "craig goodwin", "cristian roldan", "cristian romero", "cristiano ronaldo ", "cyle larin", "césar azpilicueta", "césar montes", "daichi kamada", "daizen maeda", "daley blind", "damian szymański", "dani alves", "dani carvajal", "dani olmo", "daniel afriyie", "daniel amartey", "daniel chacón", "daniel james", "daniel schmidt", "daniel wass", "daniel-kofi kyereh", "danilo", "danilo pereira", "danny vukovic", "danny ward", "darko lazović", "darwin núñez", "david raum", "david raya", "david wotherspoon", "davy klaassen", "dayne st. clair", "dayot upamecano", "deandre yedlin", "declan rice", "dejan lovren", "denis odoi", "denis zakaria", "denzel dumfries", "derek cornelius", "devis epassy", "diego godín ", "diego palacios", "diogo costa", "diogo dalot", "djibril sow", "djorkaeff reasco", "domagoj vida", "dominik livaković", "douglas lópez", "dries mertens", "dušan tadić ", "dušan vlahović", "dylan bronn", "dylan levitt", "eden hazard ", "ederson", "edimilson fernandes", "edinson cavani", "edson álvarez", "eduardo camavinga", "ehsan hajsafi ", "eiji kawashima", "elisha owusu", "ellyes skhiri", "emiliano martínez", "enner valencia ", "enzo ebosse", "enzo fernández", "eray cömert", "eric dier", "eric garcía", "eric maxim choupo-moting", "esteban alvarado", "ethan ampadu", "ethan horvath", "exequiel palacios", "fabian frei", "fabian rieder", "fabian schär", "fabinho", "facundo pellistri", "facundo torres", "famara diédhiou", "federico valverde", "ferjani sassi", "fernando muslera", "ferran torres", "filip kostić", "filip mladenović", "filip đuričić", "firas al-buraikan", "fodé ballo-touré", "formose mendy", "fran karačić", "francisco calvo", "franco armani", "fred", "frederik rønnow", "frenkie de jong", "félix torres", "gabriel jesus", "gabriel martinelli", "gaku shibasaki", "garang kuol", "gareth bale ", "gavi", "gaël ondoua", "georges-kévin nkoudou", "gerardo arteaga", "germán pezzella", "gerson torres", "gerónimo rulli", "ghailene chaalali", "gideon mensah", "giorgian de arrascaeta", "giovanni reyna", "gonzalo montiel", "gonzalo plata", "gonçalo ramos", "granit xhaka ", "gregor kobel", "grzegorz krychowiak", "guido rodríguez", "guillermo ochoa", "guillermo varela", "haitham asiri", "haji wright", "hakim ziyech", "hannibal mejbri", "hans vanaken", "haris seferovic", "harry kane ", "harry maguire", "harry souttar", "harry wilson", "hassan al-haydos ", "hassan al-tambakti", "hattan bahebri", "henry martín", "hernán galíndez", "hidemasa morita", "hiroki ito", "hiroki sakai", "hirving lozano", "homam ahmed", "hong chul", "hossein hosseini", "hossein kanaanizadegan", "hugo guillamón", "hugo lloris ", "hwang hee-chan", "hwang in-beom", "hwang ui-jo", "héctor herrera", "héctor moreno", "ibrahim danlad", "ibrahima konaté", "idrissa gueye", "iké ugbo", "ilias chair", "iliman ndiaye", "ismaeel mohammad", "ismail jakobs", "ismaël koné", "ismaïla sarr", "issam jebali", "ivan ilić", "ivan perišić", "ivica ivušić", "ivo grbić", "iñaki williams", "jack grealish", "jackson irvine", "jackson porozo", "jakub kamiński", "jakub kiwior", "jamal musiala", "james maddison", "james pantemis", "jamie maclaren", "jan bednarek", "jan vertonghen", "jason cummings", "jassem gaber", "jawad el yamiq", "jean-charles castelletto", "jean-pierre nsame", "jens stryger larsen", "jeong woo-yeong", "jeremie frimpong", "jeremy sarmiento", "jerome ngom mbekeli", "jesper lindstrøm", "jesús ferreira", "jesús gallardo", "jewison bennette", "jo hyeon-woo", "joachim andersen", "joakim mæhle", "joe allen", "joe morrell", "joe rodon", "joe scally", "joel campbell", "joel king", "joel waterman", "johan venegas", "johan vásquez", "john stones", "jonas hofmann", "jonas omlin", "jonas wind", "jonathan david", "jonathan osorio", "jonny williams", "jordan ayew", "jordan henderson", "jordan morris", "jordan pickford", "jordan veretout", "jordi alba", "jorge sánchez", "joseph aidoo", "josh sargent", "joshua kimmich", "josip juranović", "josip stanišić", "josip šutalo", "josé cifuentes", "josé giménez", "josé luis rodríguez", "josé sá", "joão cancelo", "joão félix", "joão mário", "joão palhinha", "joško gvardiol", "juan foyth", "juan pablo vargas", "jude bellingham", "jules koundé", "julian brandt", "julián álvarez", "jung woo-young", "junior hoilett", "junya ito", "jurriën timber", "justin bijlow", "jérémy doku", "kai havertz", "kalidou koulibaly ", "kalvin phillips", "kamal miller", "kamal sowah", "kamaldeen sulemana", "kamil glik", "kamil grabara", "kamil grosicki", "kaoru mitoma", "karim adeyemi", "karim ansarifard", "karim benzema", "karim boudiaf", "karl toko ekambi", "karol świderski", "kasper dolberg", "kasper schmeichel", "keanu baccus", "kellyn acosta", "kendall waston", "kenneth taylor", "kevin de bruyne", "kevin rodríguez", "kevin trapp", "kevin álvarez", "keylor navas", "keysher fuller", "khalid muneer", "kieffer moore", "kieran trippier", "kim jin-su", "kim min-jae", "kim moon-hwan", "kim seung-gyu", "kim tae-hwan", "kim young-gwon", "kingsley coman", "ko itakura", "koen casteels", "koke", "kristijan jakić", "krystian bielik", "krzysztof piątek", "krépin diatta", "kwon chang-hoon", "kwon kyung-won", "kye rowles", "kyle walker", "kylian mbappé", "lautaro martínez", "lawrence ati-zigi", "leander dendoncker", "leandro paredes", "leandro trossard", "lee jae-sung", "lee kang-in", "leon goretzka", "leroy sané", "liam fraser", "liam millar", "lionel messi ", "lisandro martínez", "lovro majer", "loïs openda", "luca de la torre", "lucas cavallini", "lucas hernandez", "lucas paquetá", "lucas torreira", "luis chávez", "luis romo", "luis suárez", "luka jović", "luka modrić ", "luka sučić", "lukas klostermann", "luke shaw", "luuk de jong", "majid hosseini", "mamadou loum", "manuel akanji", "manuel neuer ", "manuel ugarte", "marc-andré ter stegen", "marcelo brozović", "marco asensio", "marcos acuña", "marcos llorente", "marcus rashford", "marcus thuram", "mario götze", "mario pašalić", "mark harris", "mark-anthony kaye", "marko dmitrović", "marko grujić", "marko livaja", "marquinhos", "marten de roon", "martin boyle", "martin braithwaite", "martin erlić", "martin hongla", "martín cáceres", "mason mount", "mateo kovačić", "mateusz wieteska", "matheus nunes", "mathew leckie", "mathew ryan ", "mathias jensen", "mathías olivera", "matt turner", "matteo guendouzi", "matthew smith", "matthias ginter", "matthijs de ligt", "matty cash", "matías vecino", "matías viña", "maxi gómez", "maya yoshida ", "mehdi taremi", "mehdi torabi", "memphis depay", "meshaal barsham", "michael estrada", "michał skóraś", "michel aebischer", "michy batshuayi", "miki yamane", "mikkel damsgaard", "milad mohammadi", "milan borjan", "miloš degenek", "miloš veljković", "mislav oršić", "mitchell duke", "mohamed ali ben romdhane", "mohamed dräger", "mohamed kanno", "mohammed al-breik", "mohammed al-owais", "mohammed al-rubaie", "mohammed kudus", "mohammed muntari", "mohammed salisu", "mohammed waad", "moisés caicedo", "moisés ramírez", "montassar talbi", "morteza pouraliganji", "mostafa meshaal", "mouez hassen", "moumi ngamaleu", "moustapha name", "munir mohamedi", "musab kheder", "na sang-ho", "nader ghandri", "nahuel molina", "naif al-hadhrami", "nampalys mendy", "nasser al-dawsari", "nathan aké", "nathaniel atkinson", "nawaf al-abed", "nawaf al-aqidi", "nayef aguerd", "naïm sliti", "neco williams", "nemanja gudelj", "nemanja maksimović", "nemanja radonjić", "neymar", "nick pope", "niclas füllkrug", "nico elvedi", "nico schlotterbeck", "nico williams", "nicola zalewski", "nicolas jackson", "nicolas nkoulou", "nicolás otamendi", "nicolás tagliafico", "nicolás de la cruz", "niklas süle", "nikola milenković", "nikola vlašić", "noa lang", "noah okafor", "nouhou tolo", "noussair mazraoui", "nuno mendes", "néstor araujo", "oliver christensen", "olivier giroud", "olivier mbaizo", "olivier ntcham", "orbelín pineda", "osman bukari", "otávio", "ousmane dembélé", "pablo sarabia", "paik seung-ho", "pape abou cissé", "pape gueye", "pape matar sarr", "papu gómez", "pathé ciss", "patrick sequeira", "pau torres", "paulo dybala", "payam niazmand", "pedri", "pedro", "pepe", "pervis estupiñán", "phil foden", "philipp köhn", "piero hincapié", "pierre kunde", "pierre-emile højbjerg", "piotr zieliński", "predrag rajković", "przemysław frankowski", "rafael leão", "raheem sterling", "ramin rezaeian", "randal kolo muani", "raphaël guerreiro", "raphaël varane", "raphinha", "rasmus kristensen", "raúl jiménez", "remko pasveer", "remo freuler", "renato steffen", "ricardo horta", "ricardo rodriguez", "richarlison", "richie laryea", "riley mcgree", "ritsu dōan", "riyadh sharahili", "roan wilson", "robert arboleda", "robert gumny", "robert lewandowski ", "robert skov", "robert sánchez", "roberto alvarado", "rodolfo cota", "rodri", "rodrigo bentancur", "rodrigo de paul", "rodrygo", "rogelio funes mori", "romain saïss ", "romario ibarra", "romelu lukaku", "ronald araújo", "rouzbeh cheshmi", "ruben vargas", "rubin colwill", "rui patrício", "ró-ró", "rónald matarrita", "rúben dias", "rúben neves", "saad al-sheeb", "sadegh moharrami", "sadio mané", "saeid ezatolahi", "saleh al-shehri", "salem al-dawsari", "salem al-hajri", "salis abdul samed", "salman al-faraj ", "sam adekugbe", "saman ghoddos", "sami al-najei", "samuel gouet", "samuel piette", "sardar azmoun", "saud abdulhamid", "saša lukić", "sean johnson", "sebas méndez", "sebastian szymański", "sebastián coates", "sebastián sosa", "seifeddine jaziri", "selim amallah", "seny dieng", "serge gnabry", "sergej milinković-savić", "sergio busquets ", "sergio rochet", "sergiño dest", "shaq moore", "shogo taniguchi", "shojae khalilzadeh", "shuto machino", "shūichi gonda", "silvan widmer", "simon kjær ", "simon mignolet", "simon ngapandouetnbu", "sofiane boufal", "sofyan amrabat", "son heung-min ", "son jun-ho", "song bum-keun", "song min-kyu", "sorba thomas", "souaibou marou", "srđan babić", "stefan mitrović", "stefan de vrij", "stephen eustáquio", "steve mandanda", "steven berghuis", "steven bergwijn", "steven vitória", "strahinja eraković", "strahinja pavlović", "sultan al-ghannam", "szymon żurkowski", "taha yassine khenissi", "tajon buchanan", "takefusa kubo", "takehiro tomiyasu", "takuma asano", "takumi minamino", "tarek salman", "tariq lamptey", "teun koopmeiners", "theo hernandez", "thiago almada", "thiago silva ", "thibaut courtois", "thilo kehrer", "thomas delaney", "thomas deng", "thomas meunier", "thomas müller", "thomas partey", "thorgan hazard", "tim ream", "timothy castagne", "timothy weah", "toby alderweireld", "tom lockyer", "trent alexander-arnold", "tyler adams", "tyrell malacia", "unai simón", "uriel antuna", "uroš račić", "vahid amiri", "vanja milinković-savić", "victor nelsson", "vincent aboubakar ", "vincent janssen", "vinícius júnior", "virgil van dijk ", "vitinha", "wahbi khazri", "wajdi kechrida", "walid cheddira", "walker zimmerman", "wataru endo", "wayne hennessey", "weston mckennie", "weverton", "william carvalho", "william pacho", "william saliba", "wojciech szczęsny", "wout faes", "wout weghorst", "xavi simons", "xavier arreaga", "xherdan shaqiri", "yahia attiyat allah", "yahya jabrane", "yann sommer", "yannick carrasco", "yasser al-shahrani", "yassine bounou", "yassine meriah", "yeltsin tejeda", "yeremy pino", "yoon jong-gyu", "youri tielemans", "yousef hassan", "youssef en-nesyri", "youssef msakni ", "youssouf fofana", "youssouf sabaly", "youssoufa moukoko", "youstin salas", "yuki soma", "yunus musah", "yussuf poulsen", "yuto nagatomo", "zakaria aboukhlal", "zeno debast", "álvaro morata", "álvaro zamora", "ángel correa", "ángel di maría", "ángel mena", "ángelo preciado", "éder militão", "édouard mendy", "érick gutiérrez", "éverton ribeiro", "óscar duarte", "i̇lkay gündoğan", "łukasz skorupski" ]
ebotwick/results
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # results This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the cats_vs_dogs dataset. It achieves the following results on the evaluation set: - eval_loss: 0.0418 - eval_runtime: 1879.1131 - eval_samples_per_second: 6.229 - eval_steps_per_second: 0.39 - epoch: 1.0 - step: 366 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - gradient_accumulation_steps: 2 - total_train_batch_size: 32 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 1 ### Framework versions - Transformers 4.35.0 - Pytorch 2.2.0 - Datasets 2.14.6 - Tokenizers 0.14.1
[ "cat", "dog" ]
punchnami/ViT-Base-Pothole-Classification
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # output This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.1172 - Accuracy: 0.9669 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 8 - eval_batch_size: 8 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 3.0 ### Training results ### Framework versions - Transformers 4.38.0.dev0 - Pytorch 2.2.0+cpu - Datasets 2.17.0 - Tokenizers 0.15.1
[ "no_pothole", "pothole" ]
hiendang7613/test-cifar-10
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # test-cifar-10 This model is a fine-tuned version of [google/vit-base-patch16-224](https://huggingface.co/google/vit-base-patch16-224) on an unknown dataset. ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 128 - eval_batch_size: 128 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 100 ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.2
[ "n01443537", "n01629819", "n01641577", "n01644900", "n01698640", "n01742172", "n01768244", "n01770393", "n01774384", "n01774750", "n01784675", "n01882714", "n01910747", "n01917289", "n01944390", "n01950731", "n01983481", "n01984695", "n02002724", "n02056570", "n02058221", "n02074367", "n02094433", "n02099601", "n02099712", "n02106662", "n02113799", "n02123045", "n02123394", "n02124075", "n02125311", "n02129165", "n02132136", "n02165456", "n02226429", "n02231487", "n02233338", "n02236044", "n02268443", "n02279972", "n02281406", "n02321529", "n02364673", "n02395406", "n02403003", "n02410509", "n02415577", "n02423022", "n02437312", "n02480495", "n02481823", "n02486410", "n02504458", "n02509815", "n02666347", "n02669723", "n02699494", "n02769748", "n02788148", "n02791270", "n02793495", "n02795169", "n02802426", "n02808440", "n02814533", "n02814860", "n02815834", "n02823428", "n02837789", "n02841315", "n02843684", "n02883205", "n02892201", "n02909870", "n02917067", "n02927161", "n02948072", "n02950826", "n02963159", "n02977058", "n02988304", "n03014705", "n03026506", "n03042490", "n03085013", "n03089624", "n03100240", "n03126707", "n03160309", "n03179701", "n03201208", "n03255030", "n03355925", "n03373237", "n03388043", "n03393912", "n03400231", "n03404251", "n03424325", "n03444034", "n03447447", "n03544143", "n03584254", "n03599486", "n03617480", "n03637318", "n03649909", "n03662601", "n03670208", "n03706229", "n03733131", "n03763968", "n03770439", "n03796401", "n03814639", "n03837869", "n03838899", "n03854065", "n03891332", "n03902125", "n03930313", "n03937543", "n03970156", "n03977966", "n03980874", "n03983396", "n03992509", "n04008634", "n04023962", "n04070727", "n04074963", "n04099969", "n04118538", "n04133789", "n04146614", "n04149813", "n04179913", "n04251144", "n04254777", "n04259630", "n04265275", "n04275548", "n04285008", "n04311004", "n04328186", "n04356056", "n04366367", "n04371430", "n04376876", "n04398044", "n04399382", "n04417672", "n04456115", "n04465666", "n04486054", "n04487081", "n04501370", "n04507155", "n04532106", "n04532670", "n04540053", "n04560804", "n04562935", "n04596742", "n04598010", "n06596364", "n07056680", "n07583066", "n07614500", "n07615774", "n07646821", "n07647870", "n07657664", "n07695742", "n07711569", "n07715103", "n07720875", "n07749582", "n07753592", "n07768694", "n07871810", "n07873807", "n07875152", "n07920052", "n07975909", "n08496334", "n08620881", "n08742578", "n09193705", "n09246464", "n09256479", "n09332890", "n09428293", "n12267677", "n12520864", "n13001041", "n13652335", "n13652994", "n13719102", "n14991210" ]
Hamzaharman/imageclassification
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # imageclassification This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.1467 - Accuracy: 0.5938 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 20 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 40 | 1.8113 | 0.35 | | No log | 2.0 | 80 | 1.5533 | 0.3937 | | No log | 3.0 | 120 | 1.4193 | 0.4688 | | No log | 4.0 | 160 | 1.3237 | 0.5687 | | No log | 5.0 | 200 | 1.2989 | 0.4938 | | No log | 6.0 | 240 | 1.2901 | 0.5 | | No log | 7.0 | 280 | 1.2380 | 0.5625 | | No log | 8.0 | 320 | 1.1773 | 0.6125 | | No log | 9.0 | 360 | 1.2149 | 0.5625 | | No log | 10.0 | 400 | 1.2280 | 0.5312 | | No log | 11.0 | 440 | 1.2326 | 0.5625 | | No log | 12.0 | 480 | 1.1488 | 0.5875 | | 1.0601 | 13.0 | 520 | 1.1597 | 0.6062 | | 1.0601 | 14.0 | 560 | 1.1953 | 0.5563 | | 1.0601 | 15.0 | 600 | 1.2011 | 0.55 | | 1.0601 | 16.0 | 640 | 1.2294 | 0.55 | | 1.0601 | 17.0 | 680 | 1.1972 | 0.5687 | | 1.0601 | 18.0 | 720 | 1.3043 | 0.525 | | 1.0601 | 19.0 | 760 | 1.2796 | 0.525 | | 1.0601 | 20.0 | 800 | 1.1781 | 0.5813 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.2
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
evanrsl/facial_emotion_model
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # facial_emotion_model This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.2427 - Accuracy: 0.5563 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 10 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 40 | 1.8904 | 0.3125 | | No log | 2.0 | 80 | 1.6093 | 0.4437 | | No log | 3.0 | 120 | 1.4846 | 0.4813 | | No log | 4.0 | 160 | 1.4352 | 0.5437 | | No log | 5.0 | 200 | 1.3533 | 0.5 | | No log | 6.0 | 240 | 1.3076 | 0.5188 | | No log | 7.0 | 280 | 1.2484 | 0.55 | | No log | 8.0 | 320 | 1.2073 | 0.5875 | | No log | 9.0 | 360 | 1.2465 | 0.5687 | | No log | 10.0 | 400 | 1.2770 | 0.5188 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.2
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
friedrice231/swin-tiny-patch4-window7-224-finetuned-eurosat
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # swin-tiny-patch4-window7-224-finetuned-eurosat This model is a fine-tuned version of [microsoft/swin-tiny-patch4-window7-224](https://huggingface.co/microsoft/swin-tiny-patch4-window7-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.1631 - Accuracy: 0.9546 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 64 - eval_batch_size: 64 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 256 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 30 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.4463 | 1.0 | 51 | 0.3232 | 0.9166 | | 0.2061 | 2.0 | 102 | 0.1659 | 0.9398 | | 0.1795 | 3.0 | 153 | 0.2043 | 0.9192 | | 0.1356 | 4.0 | 204 | 0.1204 | 0.9542 | | 0.1193 | 5.0 | 255 | 0.1357 | 0.9454 | | 0.1054 | 6.0 | 306 | 0.1197 | 0.9516 | | 0.0974 | 7.0 | 357 | 0.1081 | 0.9586 | | 0.092 | 8.0 | 408 | 0.1220 | 0.9532 | | 0.0601 | 9.0 | 459 | 0.1587 | 0.9466 | | 0.0639 | 10.0 | 510 | 0.1676 | 0.9440 | | 0.072 | 11.0 | 561 | 0.1058 | 0.9618 | | 0.0606 | 12.0 | 612 | 0.1061 | 0.9634 | | 0.0572 | 13.0 | 663 | 0.1375 | 0.9552 | | 0.0563 | 14.0 | 714 | 0.1377 | 0.9548 | | 0.0413 | 15.0 | 765 | 0.1823 | 0.9470 | | 0.0361 | 16.0 | 816 | 0.0992 | 0.9674 | | 0.0471 | 17.0 | 867 | 0.1508 | 0.9550 | | 0.04 | 18.0 | 918 | 0.1700 | 0.9506 | | 0.0417 | 19.0 | 969 | 0.1760 | 0.9454 | | 0.0238 | 20.0 | 1020 | 0.1311 | 0.9600 | | 0.0319 | 21.0 | 1071 | 0.1502 | 0.9562 | | 0.0328 | 22.0 | 1122 | 0.1843 | 0.9484 | | 0.0363 | 23.0 | 1173 | 0.1473 | 0.9558 | | 0.0385 | 24.0 | 1224 | 0.1625 | 0.9516 | | 0.0198 | 25.0 | 1275 | 0.1749 | 0.9490 | | 0.0349 | 26.0 | 1326 | 0.1586 | 0.9528 | | 0.0337 | 27.0 | 1377 | 0.1343 | 0.9614 | | 0.0261 | 28.0 | 1428 | 0.1624 | 0.9542 | | 0.0253 | 29.0 | 1479 | 0.1727 | 0.9532 | | 0.0271 | 30.0 | 1530 | 0.1631 | 0.9546 | ### Framework versions - Transformers 4.37.2 - Pytorch 1.13.1 - Datasets 2.16.1 - Tokenizers 0.15.1
[ "meme", "not_meme" ]
supung/swin-tiny-patch4-window7-224-finetuned-eurosat
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # swin-tiny-patch4-window7-224-finetuned-eurosat This model is a fine-tuned version of [microsoft/swin-tiny-patch4-window7-224](https://huggingface.co/microsoft/swin-tiny-patch4-window7-224) on an unknown dataset. It achieves the following results on the evaluation set: - Loss: 0.0830 - Accuracy: 0.9698 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 0.3484 | 1.0 | 114 | 0.1715 | 0.9457 | | 0.2188 | 2.0 | 228 | 0.0976 | 0.9710 | | 0.2193 | 3.0 | 342 | 0.0830 | 0.9698 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.2
[ "annualcrop", "forest", "herbaceousvegetation", "highway", "industrial", "pasture", "permanentcrop", "residential", "river", "sealake" ]
Sniken/content
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # google/vit-base-patch16-224-in21k This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 52251.0508 - Accuracy: 0.0938 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | 47720.7531 | 1.0 | 20 | 52251.0508 | 0.0938 | | 63036.45 | 2.0 | 40 | 57330.5195 | 0.0938 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.2
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
sai17/cards-top_left_swin-tiny-patch4-window7-224-finetuned-v3_more_data
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # cards-top_left_swin-tiny-patch4-window7-224-finetuned-v3_more_data This model is a fine-tuned version of [microsoft/swin-tiny-patch4-window7-224](https://huggingface.co/microsoft/swin-tiny-patch4-window7-224) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.9722 - Accuracy: 0.5941 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 32 - eval_batch_size: 32 - seed: 42 - gradient_accumulation_steps: 4 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_ratio: 0.1 - num_epochs: 30 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:-----:|:---------------:|:--------:| | 1.5501 | 1.0 | 1346 | 1.2633 | 0.4645 | | 1.4882 | 2.0 | 2692 | 1.1866 | 0.4951 | | 1.5148 | 3.0 | 4038 | 1.1644 | 0.5066 | | 1.4605 | 4.0 | 5384 | 1.1546 | 0.5105 | | 1.425 | 5.0 | 6730 | 1.0940 | 0.5361 | | 1.4452 | 6.0 | 8076 | 1.0750 | 0.5530 | | 1.4507 | 7.0 | 9422 | 1.0997 | 0.5301 | | 1.4435 | 8.0 | 10768 | 1.0835 | 0.5445 | | 1.3904 | 9.0 | 12114 | 1.0587 | 0.5493 | | 1.3826 | 10.0 | 13460 | 1.0434 | 0.5581 | | 1.4186 | 11.0 | 14806 | 1.0515 | 0.5536 | | 1.3938 | 12.0 | 16152 | 1.0283 | 0.5635 | | 1.3763 | 13.0 | 17498 | 1.0140 | 0.5740 | | 1.3873 | 14.0 | 18844 | 1.0557 | 0.5470 | | 1.3833 | 15.0 | 20190 | 1.0244 | 0.5638 | | 1.385 | 16.0 | 21536 | 1.0345 | 0.5584 | | 1.3492 | 17.0 | 22882 | 0.9997 | 0.5757 | | 1.3332 | 18.0 | 24228 | 1.0106 | 0.5697 | | 1.399 | 19.0 | 25574 | 0.9867 | 0.5846 | | 1.3117 | 20.0 | 26920 | 0.9929 | 0.5833 | | 1.362 | 21.0 | 28266 | 0.9895 | 0.5861 | | 1.3279 | 22.0 | 29612 | 0.9853 | 0.5858 | | 1.3057 | 23.0 | 30958 | 0.9872 | 0.5865 | | 1.3217 | 24.0 | 32304 | 0.9761 | 0.5909 | | 1.2854 | 25.0 | 33650 | 0.9800 | 0.5910 | | 1.3194 | 26.0 | 34996 | 0.9867 | 0.5901 | | 1.2733 | 27.0 | 36342 | 0.9927 | 0.5871 | | 1.2949 | 28.0 | 37688 | 0.9755 | 0.5939 | | 1.2836 | 29.0 | 39034 | 0.9738 | 0.5940 | | 1.2974 | 30.0 | 40380 | 0.9722 | 0.5941 | ### Framework versions - Transformers 4.37.2 - Pytorch 2.0.1+cu117 - Datasets 2.17.0 - Tokenizers 0.15.2
[ "grade_1", "grade_2", "grade_3", "grade_4", "grade_5", "grade_6", "grade_7", "grade_8", "grade_9" ]
UNAVS/image_classification
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # image_classification This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.7071 - Accuracy: 0.4 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 64 - eval_batch_size: 64 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 10 | 1.8097 | 0.3438 | | No log | 2.0 | 20 | 1.7289 | 0.3875 | | No log | 3.0 | 30 | 1.7099 | 0.4 | ### Framework versions - Transformers 4.36.2 - Pytorch 2.1.2+cpu - Datasets 2.17.0 - Tokenizers 0.15.0
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
xwvzr/image_classification
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # image_classification This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 1.7477 - Accuracy: 0.3875 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 3 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 40 | 1.9458 | 0.3625 | | No log | 2.0 | 80 | 1.7437 | 0.4188 | | No log | 3.0 | 120 | 1.6751 | 0.4 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.2
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]
jetaimejeteveux/vit-emotions-fp16
<!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # vit-emotions-fp16 This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset. It achieves the following results on the evaluation set: - Loss: 0.0725 - Accuracy: 0.9859 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 5e-05 - train_batch_size: 16 - eval_batch_size: 16 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 50 ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:----:|:---------------:|:--------:| | No log | 1.0 | 40 | 1.3965 | 0.4938 | | No log | 2.0 | 80 | 1.4154 | 0.425 | | No log | 3.0 | 120 | 1.3729 | 0.4562 | | No log | 4.0 | 160 | 1.3532 | 0.4562 | | No log | 5.0 | 200 | 1.2993 | 0.5062 | | No log | 6.0 | 240 | 1.3438 | 0.4938 | | No log | 7.0 | 280 | 1.3741 | 0.5 | | No log | 8.0 | 320 | 1.5267 | 0.4313 | | No log | 9.0 | 360 | 1.2778 | 0.5375 | | No log | 10.0 | 400 | 1.3864 | 0.5062 | | No log | 11.0 | 440 | 1.4221 | 0.4875 | | No log | 12.0 | 480 | 1.5059 | 0.5062 | | 0.7596 | 13.0 | 520 | 1.5004 | 0.5188 | | 0.7596 | 14.0 | 560 | 1.4539 | 0.5125 | | 0.7596 | 15.0 | 600 | 1.5219 | 0.5375 | | 0.7596 | 16.0 | 640 | 1.6179 | 0.4813 | | 0.7596 | 17.0 | 680 | 1.4562 | 0.55 | | 0.7596 | 18.0 | 720 | 1.5473 | 0.4875 | | 0.7596 | 19.0 | 760 | 1.5820 | 0.5188 | | 0.7596 | 20.0 | 800 | 1.5877 | 0.5125 | | 0.7596 | 21.0 | 840 | 1.4965 | 0.55 | | 0.7596 | 22.0 | 880 | 1.5947 | 0.5375 | | 0.7596 | 23.0 | 920 | 1.4672 | 0.5437 | | 0.7596 | 24.0 | 960 | 1.7930 | 0.5 | | 0.2328 | 25.0 | 1000 | 1.8033 | 0.4875 | | 0.2328 | 26.0 | 1040 | 1.7193 | 0.5312 | | 0.2328 | 27.0 | 1080 | 1.8072 | 0.4813 | | 0.2328 | 28.0 | 1120 | 1.6767 | 0.5437 | | 0.2328 | 29.0 | 1160 | 1.6138 | 0.5625 | | 0.2328 | 30.0 | 1200 | 1.8484 | 0.4938 | | 0.2328 | 31.0 | 1240 | 1.7691 | 0.5062 | | 0.2328 | 32.0 | 1280 | 1.7797 | 0.5062 | | 0.2328 | 33.0 | 1320 | 1.7575 | 0.5375 | | 0.2328 | 34.0 | 1360 | 1.7550 | 0.5062 | | 0.2328 | 35.0 | 1400 | 1.7933 | 0.5 | | 0.2328 | 36.0 | 1440 | 1.7056 | 0.5563 | | 0.2328 | 37.0 | 1480 | 1.8739 | 0.4938 | | 0.1517 | 38.0 | 1520 | 1.7637 | 0.5188 | | 0.1517 | 39.0 | 1560 | 1.7178 | 0.5563 | | 0.1517 | 40.0 | 1600 | 1.9114 | 0.5 | | 0.1517 | 41.0 | 1640 | 1.8453 | 0.5188 | | 0.1517 | 42.0 | 1680 | 1.7571 | 0.5625 | | 0.1517 | 43.0 | 1720 | 1.7757 | 0.5437 | | 0.1517 | 44.0 | 1760 | 1.8389 | 0.5125 | | 0.1517 | 45.0 | 1800 | 1.8109 | 0.5375 | | 0.1517 | 46.0 | 1840 | 1.8537 | 0.4688 | | 0.1517 | 47.0 | 1880 | 1.7422 | 0.5563 | | 0.1517 | 48.0 | 1920 | 1.7807 | 0.5687 | | 0.1517 | 49.0 | 1960 | 1.8111 | 0.525 | | 0.1045 | 50.0 | 2000 | 1.9057 | 0.5125 | ### Framework versions - Transformers 4.35.2 - Pytorch 2.1.0+cu121 - Datasets 2.17.0 - Tokenizers 0.15.2
[ "anger", "contempt", "disgust", "fear", "happy", "neutral", "sad", "surprise" ]