{ // 获取包含Hugging Face文本的span元素 const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap'); spans.forEach(span => { if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) { span.textContent = 'AI快站'; } }); }); // 替换logo图片的alt属性 document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => { if (img.alt.match(/Hugging\s*Face/i)) { img.alt = 'AI快站 logo'; } }); } // 替换导航栏中的链接 function replaceNavigationLinks() { // 已替换标记,防止重复运行 if (window._navLinksReplaced) { return; } // 已经替换过的链接集合,防止重复替换 const replacedLinks = new Set(); // 只在导航栏区域查找和替换链接 const headerArea = document.querySelector('header') || document.querySelector('nav'); if (!headerArea) { return; } // 在导航区域内查找链接 const navLinks = headerArea.querySelectorAll('a'); navLinks.forEach(link => { // 如果已经替换过,跳过 if (replacedLinks.has(link)) return; const linkText = link.textContent.trim(); const linkHref = link.getAttribute('href') || ''; // 替换Spaces链接 - 仅替换一次 if ( (linkHref.includes('/spaces') || linkHref === '/spaces' || linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) && linkText !== 'PDF TO Markdown' && linkText !== 'PDF TO Markdown' ) { link.textContent = 'PDF TO Markdown'; link.href = 'https://fast360.xyz'; link.setAttribute('target', '_blank'); link.setAttribute('rel', 'noopener noreferrer'); replacedLinks.add(link); } // 删除Posts链接 else if ( (linkHref.includes('/posts') || linkHref === '/posts' || linkText === 'Posts' || linkText.match(/^s*Postss*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } // 替换Docs链接 - 仅替换一次 else if ( (linkHref.includes('/docs') || linkHref === '/docs' || linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) && linkText !== 'Voice Cloning' ) { link.textContent = 'Voice Cloning'; link.href = 'https://vibevoice.info/'; replacedLinks.add(link); } // 删除Enterprise链接 else if ( (linkHref.includes('/enterprise') || linkHref === '/enterprise' || linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } }); // 查找可能嵌套的Spaces和Posts文本 const textNodes = []; function findTextNodes(element) { if (element.nodeType === Node.TEXT_NODE) { const text = element.textContent.trim(); if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') { textNodes.push(element); } } else { for (const child of element.childNodes) { findTextNodes(child); } } } // 只在导航区域内查找文本节点 findTextNodes(headerArea); // 替换找到的文本节点 textNodes.forEach(node => { const text = node.textContent.trim(); if (text === 'Spaces') { node.textContent = node.textContent.replace(/Spaces/g, 'PDF TO Markdown'); } else if (text === 'Posts') { // 删除Posts文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } else if (text === 'Enterprise') { // 删除Enterprise文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } }); // 标记已替换完成 window._navLinksReplaced = true; } // 替换代码区域中的域名 function replaceCodeDomains() { // 特别处理span.hljs-string和span.njs-string元素 document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换hljs-string类的span中的域名(移除多余的转义符号) document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换pre和code标签中包含git clone命令的域名 document.querySelectorAll('pre, code').forEach(element => { if (element.textContent && element.textContent.includes('git clone')) { const text = element.innerHTML; if (text.includes('huggingface.co')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 处理特定的命令行示例 document.querySelectorAll('pre, code').forEach(element => { const text = element.innerHTML; if (text.includes('huggingface.co')) { // 针对git clone命令的专门处理 if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 特别处理模型下载页面上的代码片段 document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => { const content = container.innerHTML; if (content && content.includes('huggingface.co')) { container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 特别处理模型仓库克隆对话框中的代码片段 try { // 查找包含"Clone this model repository"标题的对话框 const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]'); if (cloneDialog) { // 查找对话框中所有的代码片段和命令示例 const codeElements = cloneDialog.querySelectorAll('pre, code, span'); codeElements.forEach(element => { if (element.textContent && element.textContent.includes('huggingface.co')) { if (element.innerHTML.includes('huggingface.co')) { element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); } // 更精确地定位克隆命令中的域名 document.querySelectorAll('[data-target]').forEach(container => { const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string'); codeBlocks.forEach(block => { if (block.textContent && block.textContent.includes('huggingface.co')) { if (block.innerHTML.includes('huggingface.co')) { block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); }); } catch (e) { // 错误处理但不打印日志 } } // 当DOM加载完成后执行替换 if (document.readyState === 'loading') { document.addEventListener('DOMContentLoaded', () => { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); }); } else { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); } // 增加一个MutationObserver来处理可能的动态元素加载 const observer = new MutationObserver(mutations => { // 检查是否导航区域有变化 const hasNavChanges = mutations.some(mutation => { // 检查是否存在header或nav元素变化 return Array.from(mutation.addedNodes).some(node => { if (node.nodeType === Node.ELEMENT_NODE) { // 检查是否是导航元素或其子元素 if (node.tagName === 'HEADER' || node.tagName === 'NAV' || node.querySelector('header, nav')) { return true; } // 检查是否在导航元素内部 let parent = node.parentElement; while (parent) { if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') { return true; } parent = parent.parentElement; } } return false; }); }); // 只在导航区域有变化时执行替换 if (hasNavChanges) { // 重置替换状态,允许再次替换 window._navLinksReplaced = false; replaceHeaderBranding(); replaceNavigationLinks(); } }); // 开始观察document.body的变化,包括子节点 if (document.body) { observer.observe(document.body, { childList: true, subtree: true }); } else { document.addEventListener('DOMContentLoaded', () => { observer.observe(document.body, { childList: true, subtree: true }); }); } })(); \n\n\nTip: The fread() and the fclose() functions will be explained below.\n\nThe file may be opened in one of the following modes:\nModes \tDescription\nr \tOpen a file for read only. File pointer starts at the beginning of the file\nw \tOpen a file for write only. Erases the contents of the file or creates a new file if it doesn't exist. File pointer starts at the beginning of the file\na \tOpen a file for write only. The existing data in file is preserved. File pointer starts at the end of the file. Creates a new file if the file doesn't exist\nx \tCreates a new file for write only. Returns FALSE and an error if file already exists\nr+ \tOpen a file for read/write. File pointer starts at the beginning of the file\nw+ \tOpen a file for read/write. Erases the contents of the file or creates a new file if it doesn't exist. File pointer starts at the beginning of the file\na+ \tOpen a file for read/write. The existing data in file is preserved. File pointer starts at the end of the file. Creates a new file if the file doesn't exist\nx+ \tCreates a new file for read/write. Returns FALSE and an error if file already exists\nPHP Read File - fread()\n\nThe fread() function reads from an open file.\n\nThe first parameter of fread() contains the name of the file to read from and the second parameter specifies the maximum number of bytes to read.\n\nThe following PHP code reads the \"webdictionary.txt\" file to the end:\nfread($myfile,filesize(\"webdictionary.txt\"));\nPHP Close File - fclose()\n\nThe fclose() function is used to close an open file.\n\nIt's a good programming practice to close all files after you have finished with them. You don't want an open file running around on your server taking up resources!\n\nThe fclose() requires the name of the file (or a variable that holds the filename) we want to close:\n\n"},"avg_line_length":{"kind":"number","value":45.5869565217,"string":"45.586957"},"max_line_length":{"kind":"number","value":165,"string":"165"},"alphanum_fraction":{"kind":"number","value":0.7515498331,"string":"0.75155"},"score":{"kind":"number","value":3.484375,"string":"3.484375"}}},{"rowIdx":735,"cells":{"hexsha":{"kind":"string","value":"7fd874a01ca9a73524521fafb5c03ba365046987"},"size":{"kind":"number","value":6530,"string":"6,530"},"ext":{"kind":"string","value":"go"},"lang":{"kind":"string","value":"Go"},"max_stars_repo_path":{"kind":"string","value":"server/game/runner.go"},"max_stars_repo_name":{"kind":"string","value":"jacobpatterson1549/selene-bananas"},"max_stars_repo_head_hexsha":{"kind":"string","value":"6f8213ce8786c796f9272f403204c5869f9aa68b"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":1,"string":"1"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2021-06-22T12:40:21.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2021-06-22T12:40:21.000Z"},"max_issues_repo_path":{"kind":"string","value":"server/game/runner.go"},"max_issues_repo_name":{"kind":"string","value":"jacobpatterson1549/selene-bananas"},"max_issues_repo_head_hexsha":{"kind":"string","value":"6f8213ce8786c796f9272f403204c5869f9aa68b"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":1,"string":"1"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2021-03-04T00:48:54.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2021-03-17T20:08:55.000Z"},"max_forks_repo_path":{"kind":"string","value":"server/game/runner.go"},"max_forks_repo_name":{"kind":"string","value":"jacobpatterson1549/selene-bananas"},"max_forks_repo_head_hexsha":{"kind":"string","value":"6f8213ce8786c796f9272f403204c5869f9aa68b"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"package game\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"sync\"\n\n\t\"github.com/jacobpatterson1549/selene-bananas/game\"\n\t\"github.com/jacobpatterson1549/selene-bananas/game/message\"\n\t\"github.com/jacobpatterson1549/selene-bananas/game/player\"\n\t\"github.com/jacobpatterson1549/selene-bananas/server/log\"\n)\n\ntype (\n\t// Runner runs games.\n\tRunner struct {\n\t\t// log is used to log errors and other information\n\t\tlog log.Logger\n\t\t// games maps game ids to the channel each games listens to for incoming messages\n\t\t// OutChannels are stored here because the Runner writes to the game, which in turn reads from the Runner's channel as an InChannel\n\t\tgames map[game.ID]chan<- message.Message\n\t\t// lastID is the ID of themost recently created game. The next new game should get a larger ID.\n\t\tlastID game.ID\n\t\t// WordValidator is used to validate players' words when they try to finish the game.\n\t\tWordValidator WordValidator\n\t\t// UserDao increments user points when a game is finished.\n\t\tuserDao UserDao\n\t\t// RunnerConfig contains configuration properties of the Runner.\n\t\tRunnerConfig\n\t}\n\n\t// RunnerConfig is used to create a game Runner.\n\tRunnerConfig struct {\n\t\t// Debug is a flag that causes the game to log the types messages that are read.\n\t\tDebug bool\n\t\t// The maximum number of games.\n\t\tMaxGames int\n\t\t// The config for creating new games.\n\t\tGameConfig Config\n\t}\n\n\t// WordValidator checks if words are valid.\n\tWordValidator interface {\n\t\tValidate(word string) bool\n\t}\n\n\t// UserDao makes changes to the stored state of users in the game\n\tUserDao interface {\n\t\t// UpdatePointsIncrement increments points for the specified usernames based on the userPointsIncrementFunc\n\t\tUpdatePointsIncrement(ctx context.Context, userPoints map[string]int) error\n\t}\n)\n\n// NewRunner creates a new game runner from the config.\nfunc (cfg RunnerConfig) NewRunner(log log.Logger, WordValidator WordValidator, userDao UserDao) (*Runner, error) {\n\tif err := cfg.validate(log, WordValidator, userDao); err != nil {\n\t\treturn nil, fmt.Errorf(\"creating game runner: validation: %w\", err)\n\t}\n\tm := Runner{\n\t\tlog: log,\n\t\tgames: make(map[game.ID]chan<- message.Message, cfg.MaxGames),\n\t\tRunnerConfig: cfg,\n\t\tWordValidator: WordValidator,\n\t\tuserDao: userDao,\n\t}\n\treturn &m, nil\n}\n\n// Run consumes messages from the \"in\" channel, processing them on a new goroutine until the \"in\" channel closes.\n// The results of messages are sent on the \"out\" channel to be read by the subscriber.\nfunc (r *Runner) Run(ctx context.Context, wg *sync.WaitGroup, in <-chan message.Message) <-chan message.Message {\n\tctx, cancelFunc := context.WithCancel(ctx)\n\tout := make(chan message.Message)\n\twg.Add(1)\n\trun := func() {\n\t\tdefer wg.Done()\n\t\tdefer r.log.Printf(\"game runner stopped\")\n\t\tdefer close(out)\n\t\tdefer cancelFunc()\n\t\tfor { // BLOCKING\n\t\t\tselect {\n\t\t\tcase <-ctx.Done():\n\t\t\t\treturn\n\t\t\tcase m, ok := <-in:\n\t\t\t\tif !ok {\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\tr.handleMessage(ctx, wg, m, out)\n\t\t\t}\n\t\t}\n\t}\n\tgo run()\n\treturn out\n}\n\n// validate ensures the configuration has no errors.\nfunc (cfg RunnerConfig) validate(log log.Logger, WordValidator WordValidator, userDao UserDao) error {\n\tswitch {\n\tcase log == nil:\n\t\treturn fmt.Errorf(\"log required\")\n\tcase WordValidator == nil:\n\t\treturn fmt.Errorf(\"word validator required\")\n\tcase userDao == nil:\n\t\treturn fmt.Errorf(\"user dao required\")\n\tcase cfg.MaxGames < 1:\n\t\treturn fmt.Errorf(\"must be able to create at least one game\")\n\t}\n\treturn nil\n}\n\n// handleMessage takes appropriate actions for different message types.\nfunc (r *Runner) handleMessage(ctx context.Context, wg *sync.WaitGroup, m message.Message, out chan<- message.Message) {\n\tswitch m.Type {\n\tcase message.CreateGame:\n\t\tr.createGame(ctx, wg, m, out)\n\tcase message.DeleteGame:\n\t\tr.deleteGame(ctx, m, out)\n\tdefault:\n\t\tr.handleGameMessage(ctx, m, out)\n\t}\n}\n\n// createGame allocates a new game, adding it to the open games.\nfunc (r *Runner) createGame(ctx context.Context, wg *sync.WaitGroup, m message.Message, out chan<- message.Message) {\n\tif err := r.validateCreateGame(m); err != nil {\n\t\tr.sendError(err, m.PlayerName, out)\n\t\treturn\n\t}\n\tid := r.lastID + 1\n\tgameCfg := r.GameConfig\n\tgameCfg.Config = *m.Game.Config\n\tg, err := gameCfg.NewGame(r.log, id, r.WordValidator, r.userDao)\n\tif err != nil {\n\t\tr.sendError(err, m.PlayerName, out)\n\t\treturn\n\t}\n\tr.lastID = id\n\tgIn := make(chan message.Message)\n\tg.Run(ctx, wg, gIn, out) // all games publish to the same \"out\" channel\n\tr.games[id] = gIn\n\tm.Type = message.JoinGame\n\tmessage.Send(m, gIn, r.Debug, r.log)\n}\n\n// validateCreateGame returns an err if the runner cannot create a new game or the message to create one is invalid.\nfunc (r *Runner) validateCreateGame(m message.Message) error {\n\tswitch {\n\tcase len(r.games) >= r.MaxGames:\n\t\treturn fmt.Errorf(\"the maximum number of games have already been created (%v)\", r.MaxGames)\n\tcase m.Game == nil, m.Game.Board == nil:\n\t\treturn fmt.Errorf(\"board config required when creating game\")\n\tcase m.Game.Config == nil:\n\t\treturn fmt.Errorf(\"missing config for game properties\")\n\t}\n\treturn nil\n}\n\n// deleteGame removes a game from the runner, notifying the game that it is being deleted so it can notify users.\nfunc (r *Runner) deleteGame(ctx context.Context, m message.Message, out chan<- message.Message) {\n\tgIn, err := r.getGame(m)\n\tif err != nil {\n\t\tr.sendError(err, m.PlayerName, out)\n\t\treturn\n\t}\n\tdelete(r.games, m.Game.ID)\n\tmessage.Send(m, gIn, r.Debug, r.log)\n}\n\n// handleGameMessage passes an error to the game the message is for.\nfunc (r *Runner) handleGameMessage(ctx context.Context, m message.Message, out chan<- message.Message) {\n\tgIn, err := r.getGame(m)\n\tif err != nil {\n\t\tr.sendError(err, m.PlayerName, out)\n\t\treturn\n\t}\n\tmessage.Send(m, gIn, r.Debug, r.log)\n}\n\n// getGame retrieves the game from the runner for the message, if the runner has a game for the message's game ID.\nfunc (r *Runner) getGame(m message.Message) (chan<- message.Message, error) {\n\tif m.Game == nil {\n\t\treturn nil, fmt.Errorf(\"no game for runner to handle in message: %v\", m)\n\t}\n\tgIn, ok := r.games[m.Game.ID]\n\tif !ok {\n\t\treturn nil, fmt.Errorf(\"no game ID for runner to handle in message: %v\", m)\n\t}\n\treturn gIn, nil\n}\n\n// sendError adds a message for the player on the channel\nfunc (r *Runner) sendError(err error, pn player.Name, out chan<- message.Message) {\n\terr = fmt.Errorf(\"player %v: %w\", pn, err)\n\tr.log.Printf(\"game runner error: %v\", err)\n\tm := message.Message{\n\t\tType: message.SocketError,\n\t\tInfo: err.Error(),\n\t\tPlayerName: pn,\n\t}\n\tmessage.Send(m, out, r.Debug, r.log)\n}\n"},"avg_line_length":{"kind":"number","value":32.3267326733,"string":"32.326733"},"max_line_length":{"kind":"number","value":133,"string":"133"},"alphanum_fraction":{"kind":"number","value":0.7110260337,"string":"0.711026"},"score":{"kind":"number","value":3.40625,"string":"3.40625"}}},{"rowIdx":736,"cells":{"hexsha":{"kind":"string","value":"fa82de916adac298052379e4e7e81b6c55191986"},"size":{"kind":"number","value":6814,"string":"6,814"},"ext":{"kind":"string","value":"sql"},"lang":{"kind":"string","value":"SQL"},"max_stars_repo_path":{"kind":"string","value":"public/question4/Bookazon.sql"},"max_stars_repo_name":{"kind":"string","value":"bryan-gilbert/assignments"},"max_stars_repo_head_hexsha":{"kind":"string","value":"ff5db009a8c324d73aedf07b6b3938a656f919be"},"max_stars_repo_licenses":{"kind":"list like","value":["Unlicense"],"string":"[\n \"Unlicense\"\n]"},"max_stars_count":{"kind":"number","value":1,"string":"1"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2020-07-28T21:48:49.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2020-07-28T21:48:49.000Z"},"max_issues_repo_path":{"kind":"string","value":"public/question4/Bookazon.sql"},"max_issues_repo_name":{"kind":"string","value":"bryan-gilbert/assignments"},"max_issues_repo_head_hexsha":{"kind":"string","value":"ff5db009a8c324d73aedf07b6b3938a656f919be"},"max_issues_repo_licenses":{"kind":"list like","value":["Unlicense"],"string":"[\n \"Unlicense\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"public/question4/Bookazon.sql"},"max_forks_repo_name":{"kind":"string","value":"bryan-gilbert/assignments"},"max_forks_repo_head_hexsha":{"kind":"string","value":"ff5db009a8c324d73aedf07b6b3938a656f919be"},"max_forks_repo_licenses":{"kind":"list like","value":["Unlicense"],"string":"[\n \"Unlicense\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"-- MySQL Script generated by MySQL Workbench\n-- Wed Aug 22 01:55:12 2018\n-- Model: New Model Version: 1.0\n-- MySQL Workbench Forward Engineering\n\nSET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0;\nSET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0;\nSET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='ONLY_FULL_GROUP_BY,STRICT_TRANS_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,NO_ENGINE_SUBSTITUTION';\n\n-- -----------------------------------------------------\n-- Schema bookazon\n-- -----------------------------------------------------\n\n-- source Bookazon.sql;\n\n-- -----------------------------------------------------\n-- Schema bookazon\n-- -----------------------------------------------------\nCREATE SCHEMA IF NOT EXISTS `bookazon` DEFAULT CHARACTER SET utf8 ;\nUSE `bookazon` ;\n\n-- -----------------------------------------------------\n-- Table `bookazon`.`Authors`\n-- -----------------------------------------------------\nDROP TABLE IF EXISTS `bookazon`.`Authors` ;\n\nCREATE TABLE IF NOT EXISTS `bookazon`.`Authors` (\n `id` INT NOT NULL AUTO_INCREMENT,\n `Name` VARCHAR(100) NULL,\n PRIMARY KEY (`id`),\n UNIQUE INDEX `id_UNIQUE` (`id` ASC) )\nENGINE = InnoDB;\nLOAD DATA LOCAL INFILE 'Authors.csv' INTO TABLE Authors FIELDS TERMINATED BY ',' ENCLOSED BY '\"' LINES TERMINATED BY '\\n' IGNORE 1 LINES;\n\n-- -----------------------------------------------------\n-- Table `bookazon`.`Books`\n-- -----------------------------------------------------\nDROP TABLE IF EXISTS `bookazon`.`Books` ;\n\nCREATE TABLE IF NOT EXISTS `bookazon`.`Books` (\n `id` INT NOT NULL AUTO_INCREMENT,\n `title` VARCHAR(200) NOT NULL,\n `format` VARCHAR(45) NOT NULL,\n `price` DOUBLE NULL default 0.0,\n `authorId` INT NULL,\n `category` INT NULL,\n PRIMARY KEY (`id`),\n UNIQUE INDEX `id_UNIQUE` (`id` ASC)\n)\nENGINE = InnoDB;\nLOAD DATA LOCAL INFILE 'Books.csv' INTO TABLE Books FIELDS TERMINATED BY ',' ENCLOSED BY '\"' IGNORE 1 LINES;\n\n-- -----------------------------------------------------\n-- Table `bookazon`.`Categories`\n-- -----------------------------------------------------\nDROP TABLE IF EXISTS `bookazon`.`Categories` ;\n\nCREATE TABLE IF NOT EXISTS `bookazon`.`Categories` (\n `id` INT UNSIGNED NOT NULL AUTO_INCREMENT,\n `category` VARCHAR(200) NULL,\n PRIMARY KEY (`id`),\n UNIQUE INDEX `id_UNIQUE` (`id` ASC) )\nENGINE = InnoDB;\nLOAD DATA LOCAL INFILE 'Categories.csv' INTO TABLE Categories FIELDS TERMINATED BY ',' ENCLOSED BY '\"' IGNORE 1 LINES;\n\n\n-- -----------------------------------------------------\n-- Table `bookazon`.`Customers`\n-- -----------------------------------------------------\nDROP TABLE IF EXISTS `bookazon`.`Customers` ;\n\nCREATE TABLE IF NOT EXISTS `bookazon`.`Customers` (\n `id` INT NOT NULL AUTO_INCREMENT,\n `Name` VARCHAR(100) NULL,\n `City` VARCHAR(45) NULL,\n `State` VARCHAR(45) NULL,\n PRIMARY KEY (`id`),\n UNIQUE INDEX `id_UNIQUE` (`id` ASC) )\nENGINE = InnoDB;\nLOAD DATA LOCAL INFILE 'Customers.csv' INTO TABLE Customers FIELDS TERMINATED BY ',' ENCLOSED BY '\"' IGNORE 1 LINES;\n\n\n-- -----------------------------------------------------\n-- Table `bookazon`.`LineItem`\n-- -----------------------------------------------------\nDROP TABLE IF EXISTS `bookazon`.`LineItems` ;\n\nCREATE TABLE IF NOT EXISTS `bookazon`.`LineItems` (\n `id` INT UNSIGNED NOT NULL AUTO_INCREMENT,\n `bookId` INT NOT NULL,\n `quantity` INT NOT NULL,\n `cost` DOUBLE NULL DEFAULT 0.0,\n `orderId` INT NOT NULL DEFAULT 0.0,\n PRIMARY KEY (`id`),\n UNIQUE INDEX `id_UNIQUE` (`id` ASC)\n)\nENGINE = InnoDB;\nLOAD DATA LOCAL INFILE 'LineItems.csv' INTO TABLE LineItems FIELDS TERMINATED BY ',' ENCLOSED BY '\"' IGNORE 1 LINES;\n\n\n-- -----------------------------------------------------\n-- Table `bookazon`.`Orders`\n-- -----------------------------------------------------\nDROP TABLE IF EXISTS `bookazon`.`Orders` ;\nCREATE TABLE IF NOT EXISTS `bookazon`.`Orders` (\n `id` INT UNSIGNED NOT NULL AUTO_INCREMENT,\n `date` DATETIME NOT NULL,\n `custId` INT NULL,\n PRIMARY KEY (`id`), UNIQUE INDEX `id_UNIQUE` (`id` ASC) ) ENGINE = InnoDB;\nLOAD DATA LOCAL INFILE 'Orders.csv' INTO TABLE Orders FIELDS TERMINATED BY ',' OPTIONALLY ENCLOSED BY '\"' LINES TERMINATED BY '\\n' IGNORE 1 LINES;\n\n-- select * from salesByAuthor;\nDROP VIEW IF EXISTS `bookazon`.`salesByAuthor` ;\nCREATE VIEW `salesByAuthor` AS\nSELECT\n authors.Name, SUM(bookSales.totalBookUnits) AS totalAuthorUnits, SUM(bookSales.totalBookSales) AS totalAuthorSales, bookSales.yr\nFROM Books bookAuthor\nINNER JOIN Authors authors\nON authors.id = bookAuthor.authorId\nINNER JOIN (\n SELECT\n book.id bookId, FORMAT(SUM(cmb.totalUnits), 2) AS totalBookUnits, FORMAT(SUM(cmb.totalSales), 2) AS totalBookSales, yr\n FROM Books book\n INNER JOIN (\n SELECT bookId, yr, SUM(quantity * cost) AS totalSales, SUM(quantity ) AS totalUnits FROM LineItems item\n INNER JOIN (\n SELECT id, custId, YEAR(STR_TO_DATE(date, '%Y-%m-%d')) AS yr FROM Orders\n ) AS orders\n ON orders.id = item.orderId\n GROUP BY bookId, yr\n ORDER BY SUM(quantity * cost), SUM(quantity)\n ) AS cmb\n ON cmb.bookId = book.id\n GROUP BY book.id, yr\n ORDER BY SUM(book.price * cmb.totalUnits) DESC\n) AS bookSales\nON bookSales.bookId = bookAuthor.id\nGROUP BY bookSales.yr, bookAuthor.authorId\nORDER BY SUM(bookSales.totalBookUnits) DESC\n;\n\n-- select * from booksSoldByYear;\nDROP VIEW IF EXISTS `bookazon`.`booksSoldByYear` ;\nCREATE VIEW `booksSoldByYear` AS\nSELECT bookId, yr, SUM(quantity * cost) AS totalSales, SUM(quantity ) AS totalUnits\nFROM LineItems items\nINNER JOIN (SELECT id, custId, YEAR(STR_TO_DATE(date, '%Y-%m-%d')) AS yr FROM Orders) orders ON orders.id = items.orderId\nGROUP BY bookId, yr\nORDER BY SUM(quantity * cost), SUM(quantity)\n;\n\n\n-- select * from booksSold;\nDROP VIEW IF EXISTS `bookazon`.`booksSold` ;\nCREATE VIEW `booksSold` AS\nselect title as bookTitle, cat.category, auth.Name as Author, format, b2.yr, b2.totalSales, b2.totalUnits from books AS b1\nINNER JOIN ( SELECT c.id, c.category from Categories c) as cat ON b1.category = cat.id\nINNER JOIN ( SELECT a.id, a.Name from Authors a ) as auth ON auth.id = b1.authorId\nINNER JOIN ( SELECT bookId, yr, totalSales, totalUnits from booksSoldByYear b2 ) as b2 ON b1.id = b2.bookId\n;\n\n\nDROP VIEW IF EXISTS `bookazon`.`customersOrders` ;\nCREATE VIEW `customersOrders` AS\nselect Name, City, State, o.yr, li.bookId, li.quantity, li.cost, li.total FROM Customers AS c\nINNER JOIN (SELECT o.id, custId, YEAR(STR_TO_DATE(date, '%Y-%m-%d')) AS yr from Orders as o) as o ON o.custId = c.id\nINNER JOIN (SELECT bookId, quantity, cost, (quantity*cost ) total, orderId FROM LineItems li) as li ON li.orderId = o.id\n;\n\nSET SQL_MODE=@OLD_SQL_MODE;\nSET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS;\nSET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS;\n"},"avg_line_length":{"kind":"number","value":39.387283237,"string":"39.387283"},"max_line_length":{"kind":"number","value":159,"string":"159"},"alphanum_fraction":{"kind":"number","value":0.623275609,"string":"0.623276"},"score":{"kind":"number","value":3.0625,"string":"3.0625"}}},{"rowIdx":737,"cells":{"hexsha":{"kind":"string","value":"aa0f2b4b610691e2b60c7d028cff936bf3f46255"},"size":{"kind":"number","value":884,"string":"884"},"ext":{"kind":"string","value":"asm"},"lang":{"kind":"string","value":"Assembly"},"max_stars_repo_path":{"kind":"string","value":"boot/disk.asm"},"max_stars_repo_name":{"kind":"string","value":"a-quelle/OSproject"},"max_stars_repo_head_hexsha":{"kind":"string","value":"8afaaf87eef9a13fbca7819b751f8d4a7c13099f"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"boot/disk.asm"},"max_issues_repo_name":{"kind":"string","value":"a-quelle/OSproject"},"max_issues_repo_head_hexsha":{"kind":"string","value":"8afaaf87eef9a13fbca7819b751f8d4a7c13099f"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"boot/disk.asm"},"max_forks_repo_name":{"kind":"string","value":"a-quelle/OSproject"},"max_forks_repo_head_hexsha":{"kind":"string","value":"8afaaf87eef9a13fbca7819b751f8d4a7c13099f"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":";params: bx is read buffer address\n;\t\t dl is drive number to read from\n;\t\t dh is number of sectors to read\n;reads dh sectors, starting at sector 2 of cylinder 0 of drive dl\n\ndisk_load:\n\tpush ax\n\tpush bx\n\tpush cx\n\tpush dx\n\tmov ah, 0x02 \t;disk read with int 0x13\n\tmov al, dh \t\t;al is number of sectors to read\n\tmov cl, 0x02 \t;cl is start sector\n\tmov ch, 0x00 \t;ch is cylinder to read\n\tmov dh, 0x00\t;dh is head number to use\n\tint 0x13\n\tjc disk_error\n\n\tpop dx\n\tcmp al, dh \t\t;al contains number of sector read\n\tjne sectors_error\n\tpop cx\n\tpop bx\n\tpop ax\n\tret\n\ndisk_error:\n\tmov bx, DISK_ERROR\n\tcall print\n\tcall print_nl\n\tmov dh, ah \t\t;ah contains error code\n\tcall print_hex\t;will print error code and drive number\n\tjmp disk_loop\n\nsectors_error:\n\tmov bx, SECTORS_ERROR\n\tcall print\n\ndisk_loop:\n\tjmp $\n\nDISK_ERROR: db \"Disk read error\", 0\nSECTORS_ERROR: db \"Incorrect number of sectors read\", 0"},"avg_line_length":{"kind":"number","value":20.5581395349,"string":"20.55814"},"max_line_length":{"kind":"number","value":65,"string":"65"},"alphanum_fraction":{"kind":"number","value":0.7285067873,"string":"0.728507"},"score":{"kind":"number","value":3.25,"string":"3.25"}}},{"rowIdx":738,"cells":{"hexsha":{"kind":"string","value":"0ba7c2583722cded680577d9143e475dac69688c"},"size":{"kind":"number","value":2258,"string":"2,258"},"ext":{"kind":"string","value":"js"},"lang":{"kind":"string","value":"JavaScript"},"max_stars_repo_path":{"kind":"string","value":"src/controllers/userController.js"},"max_stars_repo_name":{"kind":"string","value":"AntonioCopete/node-develop-your-mvc-project"},"max_stars_repo_head_hexsha":{"kind":"string","value":"680a52b22062f4e5c93e1549604a234dc19a1682"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":2,"string":"2"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2021-12-22T16:43:08.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2021-12-26T11:31:21.000Z"},"max_issues_repo_path":{"kind":"string","value":"src/controllers/userController.js"},"max_issues_repo_name":{"kind":"string","value":"AntonioCopete/mern-back"},"max_issues_repo_head_hexsha":{"kind":"string","value":"e0f83eeba752095aad05c379ff68454e10283c5a"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"src/controllers/userController.js"},"max_forks_repo_name":{"kind":"string","value":"AntonioCopete/mern-back"},"max_forks_repo_head_hexsha":{"kind":"string","value":"e0f83eeba752095aad05c379ff68454e10283c5a"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"const { errorMiddleware } = require('../middleware');\nconst db = require('../models');\n\nasync function login(req, res, next) {\n const { uid, email } = req.user;\n\n try {\n const User = await db.User.findOne({ email: email }).select().lean().exec();\n console.log(User);\n\n if (User) {\n res.status(200).send({\n message: 'Logged successfully',\n user: User,\n });\n }\n } catch (err) {\n next(err);\n }\n}\n\nasync function createUser(req, res, next) {\n console.log(req.body);\n const { email, password, fullName, userLink } = req.body;\n\n try {\n const user = await db.User.create({\n fullName: fullName,\n email: email,\n password: password,\n userLink: userLink,\n });\n\n res.status(201).send({\n message: 'User created succeessfully',\n data: user,\n });\n } catch (err) {\n console.log('hello kitty');\n next(err);\n }\n}\n\nasync function getUsers(req, res, next) {\n try {\n const users = await db.User.find().lean().exec();\n res.status(200).send({\n data: users,\n });\n } catch (err) {\n next(err);\n }\n}\n\nasync function updateUser(req, res, next) {\n const { userId } = req.params;\n\n try {\n const updateUser = await db.User.findByIdAndUpdate(userId, req.body, {\n new: true,\n });\n res.status(200).send({\n message: 'User updated successfully',\n data: updateUser,\n });\n } catch (err) {\n next(err);\n }\n}\n\nasync function deleteUser(req, res, next) {\n const { userId } = req.params;\n\n try {\n const deleteUser = await db.User.deleteOne({ _id: userId });\n\n if (deleteUser.deletedCount === 1) {\n res.status(200).send({\n message: 'User successfully deleted',\n });\n } else {\n res.status(500).send({\n message: 'User not removed',\n });\n }\n } catch (err) {\n next(err);\n }\n}\n\nasync function getSingleUser(req, res, next) {\n try {\n const { userId } = req.params;\n\n const user = await db.User.findById({ _id: userId }).lean().exec();\n res.status(200).send({\n data: user,\n });\n } catch (err) {\n next(err);\n }\n}\n\nmodule.exports = {\n createUser: createUser,\n login: login,\n getUsers: getUsers,\n updateUser: updateUser,\n deleteUser: deleteUser,\n getSingleUser: getSingleUser,\n};\n"},"avg_line_length":{"kind":"number","value":20.1607142857,"string":"20.160714"},"max_line_length":{"kind":"number","value":80,"string":"80"},"alphanum_fraction":{"kind":"number","value":0.5788308237,"string":"0.578831"},"score":{"kind":"number","value":3.03125,"string":"3.03125"}}},{"rowIdx":739,"cells":{"hexsha":{"kind":"string","value":"0ad2a13bdbcf121ef1a736fdac3c09a1f15a1613"},"size":{"kind":"number","value":1715,"string":"1,715"},"ext":{"kind":"string","value":"kt"},"lang":{"kind":"string","value":"Kotlin"},"max_stars_repo_path":{"kind":"string","value":"src/commonTest/kotlin/examples/qep/samples.kt"},"max_stars_repo_name":{"kind":"string","value":"GameModsBR/quick-expression-parser"},"max_stars_repo_head_hexsha":{"kind":"string","value":"91c286ad40e9ca345846a0f575edda5fcd3af40e"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"src/commonTest/kotlin/examples/qep/samples.kt"},"max_issues_repo_name":{"kind":"string","value":"GameModsBR/quick-expression-parser"},"max_issues_repo_head_hexsha":{"kind":"string","value":"91c286ad40e9ca345846a0f575edda5fcd3af40e"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"src/commonTest/kotlin/examples/qep/samples.kt"},"max_forks_repo_name":{"kind":"string","value":"GameModsBR/quick-expression-parser"},"max_forks_repo_head_hexsha":{"kind":"string","value":"91c286ad40e9ca345846a0f575edda5fcd3af40e"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"@file:Suppress(\"UndocumentedPublicClass\", \"unused\")\n\npackage examples.qep\n\nimport br.com.gamemods.qep.ParameterProvider\nimport br.com.gamemods.qep.parseExpression\n\nprivate fun parseExpressionMapSample()\n = \"Hello #{user.name}, today is #dayOfWeek. Will you go #{user.isAdult? 'work' : 'to school'} today?\"\n .parseExpression(mapOf(\n \"dayOfWeek\" to \"monday\",\n \"user\" to mapOf(\n \"name\" to \"Michael\",\n \"isAdult\" to true\n )\n ))\n\nprivate fun parseExpressionVarargSample()\n = \"Hello #{user.name}, today is #dayOfWeek. Will you go #{user.isAdult? 'work' : 'to school'} today?\"\n .parseExpression(\n \"dayOfWeek\" to \"monday\",\n \"user\" to mapOf(\n \"name\" to \"Michael\",\n \"isAdult\" to true\n )\n )\nprivate fun parseExpressionParamProviderSample(): String {\n data class UserBean(val name: String, val age: Int): ParameterProvider {\n override fun getParameter(identifier: String): Any? = when (identifier) {\n \"name\" -> name\n \"isAdult\" -> age >= 18\n else -> null\n }\n }\n\n data class ExampleBean(val dayOfWeek: String, val user: UserBean): ParameterProvider {\n override fun getParameter(identifier: String): Any? = when (identifier) {\n \"dayOfWeek\" -> dayOfWeek\n \"user\" -> user\n else -> null\n }\n }\n\n val bean = ExampleBean(\"monday\", UserBean(\"Michael\", 12))\n\n return \"Hello #{user.name}, today is #dayOfWeek. Will you go #{user.isAdult? 'work' : 'to school'} today?\"\n .parseExpression(bean)\n}\n"},"avg_line_length":{"kind":"number","value":35,"string":"35"},"max_line_length":{"kind":"number","value":110,"string":"110"},"alphanum_fraction":{"kind":"number","value":0.561516035,"string":"0.561516"},"score":{"kind":"number","value":3.15625,"string":"3.15625"}}},{"rowIdx":740,"cells":{"hexsha":{"kind":"string","value":"2a02c560298b28e3615935fc514fccffd58fa8f5"},"size":{"kind":"number","value":4198,"string":"4,198"},"ext":{"kind":"string","value":"java"},"lang":{"kind":"string","value":"Java"},"max_stars_repo_path":{"kind":"string","value":"dap4/d4core/src/main/java/dap4/core/util/DapDump.java"},"max_stars_repo_name":{"kind":"string","value":"joansmith3/thredds"},"max_stars_repo_head_hexsha":{"kind":"string","value":"ac321ce2a15f020f0cdef1ff9a2cf82261d8297c"},"max_stars_repo_licenses":{"kind":"list like","value":["NetCDF"],"string":"[\n \"NetCDF\"\n]"},"max_stars_count":{"kind":"number","value":1,"string":"1"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2018-04-24T13:53:46.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2018-04-24T13:53:46.000Z"},"max_issues_repo_path":{"kind":"string","value":"dap4/d4core/src/main/java/dap4/core/util/DapDump.java"},"max_issues_repo_name":{"kind":"string","value":"joansmith3/thredds"},"max_issues_repo_head_hexsha":{"kind":"string","value":"ac321ce2a15f020f0cdef1ff9a2cf82261d8297c"},"max_issues_repo_licenses":{"kind":"list like","value":["NetCDF"],"string":"[\n \"NetCDF\"\n]"},"max_issues_count":{"kind":"number","value":16,"string":"16"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2016-04-11T06:42:41.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2019-05-03T04:04:50.000Z"},"max_forks_repo_path":{"kind":"string","value":"dap4/d4core/src/main/java/dap4/core/util/DapDump.java"},"max_forks_repo_name":{"kind":"string","value":"joansmith3/thredds"},"max_forks_repo_head_hexsha":{"kind":"string","value":"ac321ce2a15f020f0cdef1ff9a2cf82261d8297c"},"max_forks_repo_licenses":{"kind":"list like","value":["NetCDF"],"string":"[\n \"NetCDF\"\n]"},"max_forks_count":{"kind":"number","value":1,"string":"1"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2019-07-22T19:57:26.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2019-07-22T19:57:26.000Z"},"content":{"kind":"string","value":"/* Copyright 2012, UCAR/Unidata.\n See the LICENSE file for more information.\n*/\n\n\npackage dap4.core.util;\n\nimport java.io.*;\nimport java.nio.ByteBuffer;\nimport java.nio.ByteOrder;\nimport java.util.Arrays;\n\nabstract public class DapDump\n{\n //////////////////////////////////////////////////\n // Provide a simple dump of binary data\n // (Static method)\n\n //////////////////////////////////////////////////\n // Constants\n\n static int MAXLIMIT = 20000;\n //////////////////////////////////////////////////\n // Provide a simple dump of binary data\n\n static public void\n dumpbytes(ByteBuffer buf0, boolean skipdmr)\n {\n int savepos = buf0.position();\n int limit0 = buf0.limit();\n int skipcount = 0;\n if(limit0 > MAXLIMIT) limit0 = MAXLIMIT;\n if(limit0 >= buf0.limit()) limit0 = buf0.limit();\n if(skipdmr) {\n skipcount = buf0.getInt(); //dmr count\n skipcount &= 0xFFFFFF; // mask off the flags to get true count\n skipcount += 4; // skip the count also\n }\n byte[] bytes = new byte[(limit0 + 8) - skipcount];\n Arrays.fill(bytes, (byte) 0);\n buf0.position(savepos + skipcount);\n buf0.get(bytes, 0, limit0 - skipcount);\n buf0.position(savepos);\n\n ByteBuffer buf = ByteBuffer.wrap(bytes).order(buf0.order());\n dumpbytes(buf);\n }\n\n /**\n * Dump the contents of a buffer from 0 to position\n *\n * @param buf0 byte buffer to dump\n */\n static public void\n dumpbytes(ByteBuffer buf0)\n {\n int stop = buf0.limit();\n int size = stop + 8;\n ByteBuffer buf = ByteBuffer.allocate(size).order(buf0.order());\n Arrays.fill(buf.array(), (byte) 0);\n buf.put(buf0.array());\n buf.position(0);\n buf.limit(size);\n int i = 0;\n try {\n for(i = 0; buf.position() < stop; i++) {\n int savepos = buf.position();\n int iv = buf.getInt();\n buf.position(savepos);\n long lv = buf.getLong();\n buf.position(savepos);\n short sv = buf.getShort();\n buf.position(savepos);\n byte b = buf.get();\n long uiv = ((long) iv) & 0xFFFFFFFFL;\n int usv = ((int) sv) & 0xFFFF;\n int ib = (int) b;\n int ub = (iv & 0xFF);\n char c = (char) ub;\n String s = Character.toString(c);\n if(c == '\\r') s = \"\\\\r\";\n else if(c == '\\n') s = \"\\\\n\";\n else if(c < ' ') s = \"?\";\n System.err.printf(\"[%03d] %02x %03d %4d '%s'\", i, ub, ub, ib, s);\n System.err.printf(\"\\t%12d 0x%08x\", iv, uiv);\n System.err.printf(\"\\t%5d\\t0x%04x\", sv, usv);\n System.err.println();\n System.err.flush();\n }\n\n } catch (Exception e) {\n System.err.println(\"failure:\" + e);\n } finally {\n System.err.flush();\n //new Exception().printStackTrace(System.err);\n\t System.err.flush();\n }\n }\n\n static public void\n dumpbytestream(OutputStream stream, ByteOrder order, String tag)\n {\n if(stream instanceof ByteArrayOutputStream) {\n byte[] content = ((ByteArrayOutputStream) stream).toByteArray();\n dumpbytestream(content, order, tag);\n }\n }\n\n static public void\n dumpbytestream(ByteBuffer buf, ByteOrder order, String tag)\n {\n dumpbytestream(buf.array(),0,buf.position(),order,tag);\n }\n\n static public void\n dumpbytestream(byte[] content, ByteOrder order, String tag)\n {\n dumpbytestream(content,0,content.length,order,tag);\n }\n\n static public void\n dumpbytestream(byte[] content, int start, int len, ByteOrder order, String tag)\n {\n System.err.println(\"++++++++++ \" + tag + \" ++++++++++ \");\n ByteBuffer tmp = ByteBuffer.wrap(content).order(order);\n tmp.position(start);\n tmp.limit(len);\n DapDump.dumpbytes(tmp);\n System.err.println(\"++++++++++ \" + tag + \" ++++++++++ \");\n System.err.flush();\n }\n\n}\n"},"avg_line_length":{"kind":"number","value":31.328358209,"string":"31.328358"},"max_line_length":{"kind":"number","value":83,"string":"83"},"alphanum_fraction":{"kind":"number","value":0.5066698428,"string":"0.50667"},"score":{"kind":"number","value":3.015625,"string":"3.015625"}}},{"rowIdx":741,"cells":{"hexsha":{"kind":"string","value":"f07e0ced31d9f3b5a75c59dd3ef793ba14212ab0"},"size":{"kind":"number","value":2831,"string":"2,831"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"tests/base.py"},"max_stars_repo_name":{"kind":"string","value":"octue/octue-sdk-python"},"max_stars_repo_head_hexsha":{"kind":"string","value":"31c6e9358d3401ca708f5b3da702bfe3be3e52ce"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":5,"string":"5"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2020-10-01T12:43:10.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-03-14T17:26:25.000Z"},"max_issues_repo_path":{"kind":"string","value":"tests/base.py"},"max_issues_repo_name":{"kind":"string","value":"octue/octue-sdk-python"},"max_issues_repo_head_hexsha":{"kind":"string","value":"31c6e9358d3401ca708f5b3da702bfe3be3e52ce"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":322,"string":"322"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2020-06-24T15:55:22.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2022-03-30T11:49:28.000Z"},"max_forks_repo_path":{"kind":"string","value":"tests/base.py"},"max_forks_repo_name":{"kind":"string","value":"octue/octue-sdk-python"},"max_forks_repo_head_hexsha":{"kind":"string","value":"31c6e9358d3401ca708f5b3da702bfe3be3e52ce"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"import os\nimport subprocess\nimport unittest\nimport uuid\nimport warnings\nfrom tempfile import TemporaryDirectory, gettempdir\n\nfrom octue.cloud.emulators import GoogleCloudStorageEmulatorTestResultModifier\nfrom octue.mixins import MixinBase, Pathable\nfrom octue.resources import Datafile, Dataset, Manifest\nfrom tests import TEST_BUCKET_NAME\n\n\nclass MyPathable(Pathable, MixinBase):\n pass\n\n\nclass BaseTestCase(unittest.TestCase):\n \"\"\"Base test case for twined:\n - sets a path to the test data directory\n \"\"\"\n\n test_result_modifier = GoogleCloudStorageEmulatorTestResultModifier(default_bucket_name=TEST_BUCKET_NAME)\n setattr(unittest.TestResult, \"startTestRun\", test_result_modifier.startTestRun)\n setattr(unittest.TestResult, \"stopTestRun\", test_result_modifier.stopTestRun)\n\n def setUp(self):\n # Set up paths to the test data directory and to the app templates directory\n root_dir = os.path.dirname(os.path.abspath(__file__))\n self.data_path = os.path.join(root_dir, \"data\")\n self.templates_path = os.path.join(os.path.dirname(root_dir), \"octue\", \"templates\")\n\n # Make unittest ignore excess ResourceWarnings so tests' console outputs are clearer. This has to be done even\n # if these warnings are ignored elsewhere as unittest forces warnings to be displayed by default.\n warnings.simplefilter(\"ignore\", category=ResourceWarning)\n\n super().setUp()\n\n def callCli(self, args):\n \"\"\"Utility to call the octue CLI (eg for a templated example) in a separate subprocess\n Enables testing that multiple processes aren't using the same memory space, or for running multiple apps in\n parallel to ensure they don't conflict\n \"\"\"\n call_id = str(uuid.uuid4())\n tmp_dir_name = os.path.join(gettempdir(), \"octue-sdk-python\", f\"test-{call_id}\")\n\n with TemporaryDirectory(dir=tmp_dir_name):\n subprocess.call(args, cwd=tmp_dir_name)\n\n def create_valid_dataset(self):\n \"\"\" Create a valid dataset with two valid datafiles (they're the same file in this case). \"\"\"\n path_from = MyPathable(path=os.path.join(self.data_path, \"basic_files\", \"configuration\", \"test-dataset\"))\n path = os.path.join(\"path-within-dataset\", \"a_test_file.csv\")\n\n files = [\n Datafile(path_from=path_from, path=path, skip_checks=False),\n Datafile(path_from=path_from, path=path, skip_checks=False),\n ]\n\n return Dataset(files=files)\n\n def create_valid_manifest(self):\n \"\"\" Create a valid manifest with two valid datasets (they're the same dataset in this case). \"\"\"\n datasets = [self.create_valid_dataset(), self.create_valid_dataset()]\n manifest = Manifest(datasets=datasets, keys={\"my_dataset\": 0, \"another_dataset\": 1})\n return manifest\n"},"avg_line_length":{"kind":"number","value":42.2537313433,"string":"42.253731"},"max_line_length":{"kind":"number","value":118,"string":"118"},"alphanum_fraction":{"kind":"number","value":0.7160014129,"string":"0.716001"},"score":{"kind":"number","value":3.265625,"string":"3.265625"}}},{"rowIdx":742,"cells":{"hexsha":{"kind":"string","value":"39c20b5333b6e43bd4fb9b976104f021d0684e57"},"size":{"kind":"number","value":157167,"string":"157,167"},"ext":{"kind":"string","value":"js"},"lang":{"kind":"string","value":"JavaScript"},"max_stars_repo_path":{"kind":"string","value":"example/main.js"},"max_stars_repo_name":{"kind":"string","value":"LeeeeeeM/mobx"},"max_stars_repo_head_hexsha":{"kind":"string","value":"67540a4e5a6cae3a65e12f74a5a0bf7258cb1f83"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"example/main.js"},"max_issues_repo_name":{"kind":"string","value":"LeeeeeeM/mobx"},"max_issues_repo_head_hexsha":{"kind":"string","value":"67540a4e5a6cae3a65e12f74a5a0bf7258cb1f83"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"example/main.js"},"max_forks_repo_name":{"kind":"string","value":"LeeeeeeM/mobx"},"max_forks_repo_head_hexsha":{"kind":"string","value":"67540a4e5a6cae3a65e12f74a5a0bf7258cb1f83"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"!(function(e) {\n var t = {}\n function n(r) {\n if (t[r]) return t[r].exports\n var i = (t[r] = { i: r, l: !1, exports: {} })\n return e[r].call(i.exports, i, i.exports, n), (i.l = !0), i.exports\n }\n ;(n.m = e),\n (n.c = t),\n (n.d = function(e, t, r) {\n n.o(e, t) || Object.defineProperty(e, t, { enumerable: !0, get: r })\n }),\n (n.r = function(e) {\n \"undefined\" != typeof Symbol &&\n Symbol.toStringTag &&\n Object.defineProperty(e, Symbol.toStringTag, { value: \"Module\" }),\n Object.defineProperty(e, \"__esModule\", { value: !0 })\n }),\n (n.t = function(e, t) {\n if ((1 & t && (e = n(e)), 8 & t)) return e\n if (4 & t && \"object\" == typeof e && e && e.__esModule) return e\n var r = Object.create(null)\n if (\n (n.r(r),\n Object.defineProperty(r, \"default\", { enumerable: !0, value: e }),\n 2 & t && \"string\" != typeof e)\n )\n for (var i in e)\n n.d(\n r,\n i,\n function(t) {\n return e[t]\n }.bind(null, i)\n )\n return r\n }),\n (n.n = function(e) {\n var t =\n e && e.__esModule\n ? function() {\n return e.default\n }\n : function() {\n return e\n }\n return n.d(t, \"a\", t), t\n }),\n (n.o = function(e, t) {\n return Object.prototype.hasOwnProperty.call(e, t)\n }),\n (n.p = \"\"),\n n((n.s = 6))\n})([\n function(e, t, n) {\n \"use strict\"\n function r(e, t) {\n return (\n (function(e) {\n if (Array.isArray(e)) return e\n })(e) ||\n (function(e, t) {\n var n = [],\n r = !0,\n i = !1,\n o = void 0\n try {\n for (\n var a, u = e[Symbol.iterator]();\n !(r = (a = u.next()).done) && (n.push(a.value), !t || n.length !== t);\n r = !0\n );\n } catch (e) {\n ;(i = !0), (o = e)\n } finally {\n try {\n r || null == u.return || u.return()\n } finally {\n if (i) throw o\n }\n }\n return n\n })(e, t) ||\n (function() {\n throw new TypeError(\"Invalid attempt to destructure non-iterable instance\")\n })()\n )\n }\n function i(e) {\n return (i =\n \"function\" == typeof Symbol && \"symbol\" == typeof Symbol.iterator\n ? function(e) {\n return typeof e\n }\n : function(e) {\n return e &&\n \"function\" == typeof Symbol &&\n e.constructor === Symbol &&\n e !== Symbol.prototype\n ? \"symbol\"\n : typeof e\n })(e)\n }\n var o =\n \"An invariant failed, however the error is obfuscated because this is an production build.\",\n a = []\n Object.freeze(a)\n var u = {}\n function c() {\n return ++Pe.a.mobxGuid\n }\n function s(e) {\n throw (l(!1, e), \"X\")\n }\n function l(e, t) {\n if (!e) throw new Error(\"[mobx] \" + (t || o))\n }\n Object.freeze(u)\n function f(e, t) {\n return !1\n }\n function h(e) {\n var t = !1\n return function() {\n if (!t) return (t = !0), e.apply(this, arguments)\n }\n }\n var d = function() {}\n function v(e) {\n var t = []\n return (\n e.forEach(function(e) {\n ;-1 === t.indexOf(e) && t.push(e)\n }),\n t\n )\n }\n function y(e) {\n return null !== e && \"object\" === i(e)\n }\n function p(e) {\n if (null === e || \"object\" !== i(e)) return !1\n var t = Object.getPrototypeOf(e)\n return t === Object.prototype || null === t\n }\n function b(e, t, n) {\n Object.defineProperty(e, t, {\n enumerable: !1,\n writable: !0,\n configurable: !0,\n value: n\n })\n }\n function m(e, t, n) {\n Object.defineProperty(e, t, {\n enumerable: !1,\n writable: !1,\n configurable: !0,\n value: n\n })\n }\n function g(e, t) {\n var n = Object.getOwnPropertyDescriptor(e, t)\n return !n || (!1 !== n.configurable && !1 !== n.writable)\n }\n function w(e, t) {\n 0\n }\n function O(e, t) {\n var n = \"isMobX\" + e\n return (\n (t.prototype[n] = !0),\n function(e) {\n return y(e) && !0 === e[n]\n }\n )\n }\n function S(e) {\n return e instanceof Map\n }\n function k(e) {\n return e instanceof Set\n }\n function _(e) {\n var t = new Set()\n for (var n in e) t.add(n)\n return (\n Object.getOwnPropertySymbols(e).forEach(function(n) {\n Object.getOwnPropertyDescriptor(e, n).enumerable && t.add(n)\n }),\n Array.from(t)\n )\n }\n function A(e) {\n return e && e.toString ? e.toString() : new String(e).toString()\n }\n function E(e) {\n return p(e)\n ? Object.keys(e)\n : Array.isArray(e)\n ? e.map(function(e) {\n return r(e, 1)[0]\n })\n : S(e) || $t(e)\n ? Array.from(e.keys())\n : s(\"Cannot get keys from '\".concat(e, \"'\"))\n }\n function j(e) {\n return null === e ? null : \"object\" === i(e) ? \"\" + e : e\n }\n function T(e, t) {\n for (var n = 0; n < t.length; n++) {\n var r = t[n]\n ;(r.enumerable = r.enumerable || !1),\n (r.configurable = !0),\n \"value\" in r && (r.writable = !0),\n Object.defineProperty(e, r.key, r)\n }\n }\n var x = Symbol(\"mobx administration\"),\n C = (function() {\n function e() {\n var t =\n arguments.length > 0 && void 0 !== arguments[0]\n ? arguments[0]\n : \"Atom@\" + c()\n !(function(e, t) {\n if (!(e instanceof t))\n throw new TypeError(\"Cannot call a class as a function\")\n })(this, e),\n (this.name = t),\n (this.isPendingUnobservation = !1),\n (this.isBeingObserved = !1),\n (this.observers = new Set()),\n (this.diffValue = 0),\n (this.lastAccessedBy = 0),\n (this.lowestObserverState = be.NOT_TRACKING)\n }\n var t, n, r\n return (\n (t = e),\n (n = [\n {\n key: \"onBecomeObserved\",\n value: function() {\n this.onBecomeObservedListeners &&\n this.onBecomeObservedListeners.forEach(function(e) {\n return e()\n })\n }\n },\n {\n key: \"onBecomeUnobserved\",\n value: function() {\n this.onBecomeUnobservedListeners &&\n this.onBecomeUnobservedListeners.forEach(function(e) {\n return e()\n })\n }\n },\n {\n key: \"reportObserved\",\n value: function() {\n return Me(this)\n }\n },\n {\n key: \"reportChanged\",\n value: function() {\n Re(), Ue(this), Ie()\n }\n },\n {\n key: \"toString\",\n value: function() {\n return this.name\n }\n }\n ]) && T(t.prototype, n),\n r && T(t, r),\n e\n )\n })(),\n P = O(\"Atom\", C)\n function V(e) {\n var t = arguments.length > 1 && void 0 !== arguments[1] ? arguments[1] : d,\n n = arguments.length > 2 && void 0 !== arguments[2] ? arguments[2] : d,\n r = new C(e)\n return t !== d && ht(r, t), n !== d && dt(r, n), r\n }\n var D = {\n identity: function(e, t) {\n return e === t\n },\n structural: function(e, t) {\n return On(e, t)\n },\n default: function(e, t) {\n return Object.is(e, t)\n }\n },\n N = Symbol(\"mobx did run lazy initializers\"),\n L = Symbol(\"mobx pending decorators\"),\n B = {},\n R = {}\n function I(e, t) {\n var n = t ? B : R\n return (\n n[e] ||\n (n[e] = {\n configurable: !0,\n enumerable: t,\n get: function() {\n return M(this), this[e]\n },\n set: function(t) {\n M(this), (this[e] = t)\n }\n })\n )\n }\n function M(e) {\n if (!0 !== e[N]) {\n var t = e[L]\n if (t)\n for (var n in (b(e, N, !0), t)) {\n var r = t[n]\n r.propertyCreator(\n e,\n r.prop,\n r.descriptor,\n r.decoratorTarget,\n r.decoratorArguments\n )\n }\n }\n }\n function U(e, t) {\n return function() {\n var n,\n r = function(r, i, o, a) {\n if (!0 === a) return t(r, i, o, r, n), null\n if (!Object.prototype.hasOwnProperty.call(r, L)) {\n var u = r[L]\n b(r, L, Object.assign({}, u))\n }\n return (\n (r[L][i] = {\n prop: i,\n propertyCreator: t,\n descriptor: o,\n decoratorTarget: r,\n decoratorArguments: n\n }),\n I(i, e)\n )\n }\n return G(arguments)\n ? ((n = a), r.apply(null, arguments))\n : ((n = Array.prototype.slice.call(arguments)), r)\n }\n }\n function G(e) {\n return (\n ((2 === e.length || 3 === e.length) && \"string\" == typeof e[1]) ||\n (4 === e.length && !0 === e[3])\n )\n }\n function K(e, t, n) {\n return Ot(e)\n ? e\n : Array.isArray(e)\n ? te.array(e, { name: n })\n : p(e)\n ? te.object(e, void 0, { name: n })\n : S(e)\n ? te.map(e, { name: n })\n : k(e)\n ? te.set(e, { name: n })\n : e\n }\n function z(e, t, n) {\n return null == e\n ? e\n : vn(e) || qt(e) || $t(e) || rn(e)\n ? e\n : Array.isArray(e)\n ? te.array(e, { name: n, deep: !1 })\n : p(e)\n ? te.object(e, void 0, { name: n, deep: !1 })\n : S(e)\n ? te.map(e, { name: n, deep: !1 })\n : k(e)\n ? te.set(e, { name: n, deep: !1 })\n : s(!1)\n }\n function H(e) {\n return e\n }\n function J(e, t, n) {\n return On(e, t) ? t : e\n }\n var q = n(4)\n var W = { deep: !0, name: void 0, defaultDecorator: void 0, proxy: !0 }\n function X(e) {\n return null == e ? W : \"string\" == typeof e ? { name: e, deep: !0, proxy: !0 } : e\n }\n Object.freeze(W)\n var Y = Object(q.a)(K),\n F = Object(q.a)(z),\n Q = Object(q.a)(H),\n Z = Object(q.a)(J)\n function $(e) {\n return e.defaultDecorator ? e.defaultDecorator.enhancer : !1 === e.deep ? H : K\n }\n var ee = {\n box: function(e, t) {\n arguments.length > 2 && ne(\"box\")\n var n = X(t)\n return new ve(e, $(n), n.name, !0, n.equals)\n },\n array: function(e, t) {\n arguments.length > 2 && ne(\"array\")\n var n = X(t)\n return Gt(e, $(n), n.name)\n },\n map: function(e, t) {\n arguments.length > 2 && ne(\"map\")\n var n = X(t)\n return new Zt(e, $(n), n.name)\n },\n set: function(e, t) {\n arguments.length > 2 && ne(\"set\")\n var n = X(t)\n return new nn(e, $(n), n.name)\n },\n object: function(e, t, n) {\n \"string\" == typeof arguments[1] && ne(\"object\")\n var r = X(n)\n if (!1 === r.proxy) return yt({}, e, t, r)\n var i = pt(r),\n o = yt({}, void 0, void 0, r),\n a = Ct(o)\n return bt(a, e, t, i), a\n },\n ref: Q,\n shallow: F,\n deep: Y,\n struct: Z\n },\n te = function(e, t, n) {\n if (\"string\" == typeof arguments[1]) return Y.apply(null, arguments)\n if (Ot(e)) return e\n var r = p(e)\n ? te.object(e, t, n)\n : Array.isArray(e)\n ? te.array(e, t)\n : S(e)\n ? te.map(e, t)\n : k(e)\n ? te.set(e, t)\n : e\n if (r !== e) return r\n s(!1)\n }\n function ne(e) {\n s(\n \"Expected one or two arguments to observable.\"\n .concat(e, \". Did you accidentally try to use observable.\")\n .concat(e, \" as decorator?\")\n )\n }\n Object.keys(ee).forEach(function(e) {\n return (te[e] = ee[e])\n })\n var re = U(!1, function(e, t, n, r, i) {\n var o = n.get,\n a = n.set,\n u = i[0] || {}\n cn(e).addComputedProp(e, t, Object.assign({ get: o, set: a, context: e }, u))\n })\n re({ equals: D.structural })\n function ie(e, t, n) {\n var r = function() {\n return oe(e, t, n || this, arguments)\n }\n return (r.isMobxAction = !0), r\n }\n function oe(e, t, n, r) {\n var i = (function(e, t, n, r) {\n var i = 0\n var o = Te()\n Re()\n var a = ue(!0)\n return {\n prevDerivation: o,\n prevAllowStateChanges: a,\n notifySpy: !1,\n startTime: i\n }\n })(),\n o = !0\n try {\n var a = t.apply(n, r)\n return (o = !1), a\n } finally {\n o\n ? ((Pe.a.suppressReactionErrors = o), ae(i), (Pe.a.suppressReactionErrors = !1))\n : ae(i)\n }\n }\n function ae(e) {\n ce(e.prevAllowStateChanges), Ie(), xe(e.prevDerivation), e.notifySpy\n }\n function ue(e) {\n var t = Pe.a.allowStateChanges\n return (Pe.a.allowStateChanges = e), t\n }\n function ce(e) {\n Pe.a.allowStateChanges = e\n }\n function se(e) {\n return (se =\n \"function\" == typeof Symbol && \"symbol\" == typeof Symbol.iterator\n ? function(e) {\n return typeof e\n }\n : function(e) {\n return e &&\n \"function\" == typeof Symbol &&\n e.constructor === Symbol &&\n e !== Symbol.prototype\n ? \"symbol\"\n : typeof e\n })(e)\n }\n function le(e, t) {\n for (var n = 0; n < t.length; n++) {\n var r = t[n]\n ;(r.enumerable = r.enumerable || !1),\n (r.configurable = !0),\n \"value\" in r && (r.writable = !0),\n Object.defineProperty(e, r.key, r)\n }\n }\n function fe(e, t) {\n return !t || (\"object\" !== se(t) && \"function\" != typeof t)\n ? (function(e) {\n if (void 0 === e)\n throw new ReferenceError(\n \"this hasn't been initialised - super() hasn't been called\"\n )\n return e\n })(e)\n : t\n }\n function he(e) {\n return (he = Object.setPrototypeOf\n ? Object.getPrototypeOf\n : function(e) {\n return e.__proto__ || Object.getPrototypeOf(e)\n })(e)\n }\n function de(e, t) {\n return (de =\n Object.setPrototypeOf ||\n function(e, t) {\n return (e.__proto__ = t), e\n })(e, t)\n }\n var ve = (function(e) {\n function t(e, n) {\n var r,\n i =\n arguments.length > 2 && void 0 !== arguments[2]\n ? arguments[2]\n : \"ObservableValue@\" + c(),\n o =\n (!(arguments.length > 3 && void 0 !== arguments[3]) || arguments[3],\n arguments.length > 4 && void 0 !== arguments[4]\n ? arguments[4]\n : D.default)\n return (\n (function(e, t) {\n if (!(e instanceof t))\n throw new TypeError(\"Cannot call a class as a function\")\n })(this, t),\n ((r = fe(this, he(t).call(this, i))).enhancer = n),\n (r.name = i),\n (r.equals = o),\n (r.hasUnreportedChange = !1),\n (r.value = n(e, void 0, i)),\n r\n )\n }\n var n, r, i\n return (\n (function(e, t) {\n if (\"function\" != typeof t && null !== t)\n throw new TypeError(\n \"Super expression must either be null or a function\"\n )\n ;(e.prototype = Object.create(t && t.prototype, {\n constructor: { value: e, writable: !0, configurable: !0 }\n })),\n t && de(e, t)\n })(t, C),\n (n = t),\n (r = [\n {\n key: \"dehanceValue\",\n value: function(e) {\n return void 0 !== this.dehancer ? this.dehancer(e) : e\n }\n },\n {\n key: \"set\",\n value: function(e) {\n this.value,\n (e = this.prepareNewValue(e)) !== Pe.a.UNCHANGED &&\n this.setNewValue(e)\n }\n },\n {\n key: \"prepareNewValue\",\n value: function(e) {\n if ((_e(this), Pt(this))) {\n var t = Dt(this, { object: this, type: \"update\", newValue: e })\n if (!t) return Pe.a.UNCHANGED\n e = t.newValue\n }\n return (\n (e = this.enhancer(e, this.value, this.name)),\n this.equals(this.value, e) ? Pe.a.UNCHANGED : e\n )\n }\n },\n {\n key: \"setNewValue\",\n value: function(e) {\n var t = this.value\n ;(this.value = e),\n this.reportChanged(),\n Nt(this) &&\n Bt(this, {\n type: \"update\",\n object: this,\n newValue: e,\n oldValue: t\n })\n }\n },\n {\n key: \"get\",\n value: function() {\n return this.reportObserved(), this.dehanceValue(this.value)\n }\n },\n {\n key: \"intercept\",\n value: function(e) {\n return Vt(this, e)\n }\n },\n {\n key: \"observe\",\n value: function(e, t) {\n return (\n t &&\n e({\n object: this,\n type: \"update\",\n newValue: this.value,\n oldValue: void 0\n }),\n Lt(this, e)\n )\n }\n },\n {\n key: \"toJSON\",\n value: function() {\n return this.get()\n }\n },\n {\n key: \"toString\",\n value: function() {\n return \"\".concat(this.name, \"[\").concat(this.value, \"]\")\n }\n },\n {\n key: \"valueOf\",\n value: function() {\n return j(this.get())\n }\n },\n {\n key: Symbol.toPrimitive,\n value: function() {\n return this.valueOf()\n }\n }\n ]) && le(n.prototype, r),\n i && le(n, i),\n t\n )\n })(),\n ye = O(\"ObservableValue\", ve)\n function pe(e, t) {\n for (var n = 0; n < t.length; n++) {\n var r = t[n]\n ;(r.enumerable = r.enumerable || !1),\n (r.configurable = !0),\n \"value\" in r && (r.writable = !0),\n Object.defineProperty(e, r.key, r)\n }\n }\n var be,\n me,\n ge = (function() {\n function e(t) {\n !(function(e, t) {\n if (!(e instanceof t))\n throw new TypeError(\"Cannot call a class as a function\")\n })(this, e),\n (this.dependenciesState = be.NOT_TRACKING),\n (this.observing = []),\n (this.newObserving = null),\n (this.isBeingObserved = !1),\n (this.isPendingUnobservation = !1),\n (this.observers = new Set()),\n (this.diffValue = 0),\n (this.runId = 0),\n (this.lastAccessedBy = 0),\n (this.lowestObserverState = be.UP_TO_DATE),\n (this.unboundDepsCount = 0),\n (this.__mapid = \"#\" + c()),\n (this.value = new Oe(null)),\n (this.isComputing = !1),\n (this.isRunningSetter = !1),\n (this.isTracing = me.NONE),\n (this.derivation = t.get),\n (this.name = t.name || \"ComputedValue@\" + c()),\n t.set && (this.setter = ie(this.name + \"-setter\", t.set)),\n (this.equals =\n t.equals ||\n (t.compareStructural || t.struct ? D.structural : D.default)),\n (this.scope = t.context),\n (this.requiresReaction = !!t.requiresReaction),\n (this.keepAlive = !!t.keepAlive)\n }\n var t, n, r\n return (\n (t = e),\n (n = [\n {\n key: \"onBecomeStale\",\n value: function() {\n Ke(this)\n }\n },\n {\n key: \"onBecomeObserved\",\n value: function() {\n this.onBecomeObservedListeners &&\n this.onBecomeObservedListeners.forEach(function(e) {\n return e()\n })\n }\n },\n {\n key: \"onBecomeUnobserved\",\n value: function() {\n this.onBecomeUnobservedListeners &&\n this.onBecomeUnobservedListeners.forEach(function(e) {\n return e()\n })\n }\n },\n {\n key: \"get\",\n value: function() {\n this.isComputing &&\n s(\n \"Cycle detected in computation \"\n .concat(this.name, \": \")\n .concat(this.derivation)\n ),\n 0 !== Pe.a.inBatch ||\n 0 !== this.observers.size ||\n this.keepAlive\n ? (Me(this), ke(this) && this.trackAndCompute() && Ge(this))\n : ke(this) &&\n (this.warnAboutUntrackedRead(),\n Re(),\n (this.value = this.computeValue(!1)),\n Ie())\n var e = this.value\n if (Se(e)) throw e.cause\n return e\n }\n },\n {\n key: \"peek\",\n value: function() {\n var e = this.computeValue(!1)\n if (Se(e)) throw e.cause\n return e\n }\n },\n {\n key: \"set\",\n value: function(e) {\n if (this.setter) {\n l(\n !this.isRunningSetter,\n \"The setter of computed value '\".concat(\n this.name,\n \"' is trying to update itself. Did you intend to update an _observable_ value, instead of the computed property?\"\n )\n ),\n (this.isRunningSetter = !0)\n try {\n this.setter.call(this.scope, e)\n } finally {\n this.isRunningSetter = !1\n }\n } else l(!1, !1)\n }\n },\n {\n key: \"trackAndCompute\",\n value: function() {\n var e = this.value,\n t = this.dependenciesState === be.NOT_TRACKING,\n n = this.computeValue(!0),\n r = t || Se(e) || Se(n) || !this.equals(e, n)\n return r && (this.value = n), r\n }\n },\n {\n key: \"computeValue\",\n value: function(e) {\n var t\n if (((this.isComputing = !0), Pe.a.computationDepth++, e))\n t = Ae(this, this.derivation, this.scope)\n else if (!0 === Pe.a.disableErrorBoundaries)\n t = this.derivation.call(this.scope)\n else\n try {\n t = this.derivation.call(this.scope)\n } catch (e) {\n t = new Oe(e)\n }\n return Pe.a.computationDepth--, (this.isComputing = !1), t\n }\n },\n {\n key: \"suspend\",\n value: function() {\n this.keepAlive || (Ee(this), (this.value = void 0))\n }\n },\n {\n key: \"observe\",\n value: function(e, t) {\n var n = this,\n r = !0,\n i = void 0\n return st(function() {\n var o = n.get()\n if (!r || t) {\n var a = Te()\n e({ type: \"update\", object: n, newValue: o, oldValue: i }),\n xe(a)\n }\n ;(r = !1), (i = o)\n })\n }\n },\n { key: \"warnAboutUntrackedRead\", value: function() {} },\n {\n key: \"toJSON\",\n value: function() {\n return this.get()\n }\n },\n {\n key: \"toString\",\n value: function() {\n return \"\"\n .concat(this.name, \"[\")\n .concat(this.derivation.toString(), \"]\")\n }\n },\n {\n key: \"valueOf\",\n value: function() {\n return j(this.get())\n }\n },\n {\n key: Symbol.toPrimitive,\n value: function() {\n return this.valueOf()\n }\n }\n ]) && pe(t.prototype, n),\n r && pe(t, r),\n e\n )\n })(),\n we = O(\"ComputedValue\", ge)\n !(function(e) {\n ;(e[(e.NOT_TRACKING = -1)] = \"NOT_TRACKING\"),\n (e[(e.UP_TO_DATE = 0)] = \"UP_TO_DATE\"),\n (e[(e.POSSIBLY_STALE = 1)] = \"POSSIBLY_STALE\"),\n (e[(e.STALE = 2)] = \"STALE\")\n })(be || (be = {})),\n (function(e) {\n ;(e[(e.NONE = 0)] = \"NONE\"), (e[(e.LOG = 1)] = \"LOG\"), (e[(e.BREAK = 2)] = \"BREAK\")\n })(me || (me = {}))\n var Oe = function e(t) {\n !(function(e, t) {\n if (!(e instanceof t)) throw new TypeError(\"Cannot call a class as a function\")\n })(this, e),\n (this.cause = t)\n }\n function Se(e) {\n return e instanceof Oe\n }\n function ke(e) {\n switch (e.dependenciesState) {\n case be.UP_TO_DATE:\n return !1\n case be.NOT_TRACKING:\n case be.STALE:\n return !0\n case be.POSSIBLY_STALE:\n for (var t = Te(), n = e.observing, r = n.length, i = 0; i < r; i++) {\n var o = n[i]\n if (we(o)) {\n if (Pe.a.disableErrorBoundaries) o.get()\n else\n try {\n o.get()\n } catch (e) {\n return xe(t), !0\n }\n if (e.dependenciesState === be.STALE) return xe(t), !0\n }\n }\n return Ce(e), xe(t), !1\n }\n }\n function _e(e) {\n var t = e.observers.size > 0\n Pe.a.computationDepth > 0 && t && s(!1),\n Pe.a.allowStateChanges || (!t && \"strict\" !== Pe.a.enforceActions) || s(!1)\n }\n function Ae(e, t, n) {\n Ce(e),\n (e.newObserving = new Array(e.observing.length + 100)),\n (e.unboundDepsCount = 0),\n (e.runId = ++Pe.a.runId)\n var r,\n i = Pe.a.trackingDerivation\n if (((Pe.a.trackingDerivation = e), !0 === Pe.a.disableErrorBoundaries)) r = t.call(n)\n else\n try {\n r = t.call(n)\n } catch (e) {\n r = new Oe(e)\n }\n return (\n (Pe.a.trackingDerivation = i),\n (function(e) {\n for (\n var t = e.observing,\n n = (e.observing = e.newObserving),\n r = be.UP_TO_DATE,\n i = 0,\n o = e.unboundDepsCount,\n a = 0;\n a < o;\n a++\n ) {\n var u = n[a]\n 0 === u.diffValue && ((u.diffValue = 1), i !== a && (n[i] = u), i++),\n u.dependenciesState > r && (r = u.dependenciesState)\n }\n ;(n.length = i), (e.newObserving = null), (o = t.length)\n for (; o--; ) {\n var c = t[o]\n 0 === c.diffValue && Le(c, e), (c.diffValue = 0)\n }\n for (; i--; ) {\n var s = n[i]\n 1 === s.diffValue && ((s.diffValue = 0), Ne(s, e))\n }\n r !== be.UP_TO_DATE && ((e.dependenciesState = r), e.onBecomeStale())\n })(e),\n r\n )\n }\n function Ee(e) {\n var t = e.observing\n e.observing = []\n for (var n = t.length; n--; ) Le(t[n], e)\n e.dependenciesState = be.NOT_TRACKING\n }\n function je(e) {\n var t = Te()\n try {\n return e()\n } finally {\n xe(t)\n }\n }\n function Te() {\n var e = Pe.a.trackingDerivation\n return (Pe.a.trackingDerivation = null), e\n }\n function xe(e) {\n Pe.a.trackingDerivation = e\n }\n function Ce(e) {\n if (e.dependenciesState !== be.UP_TO_DATE) {\n e.dependenciesState = be.UP_TO_DATE\n for (var t = e.observing, n = t.length; n--; )\n t[n].lowestObserverState = be.UP_TO_DATE\n }\n }\n var Pe = n(5)\n function Ve(e) {\n return e.observers && e.observers.size > 0\n }\n function De(e) {\n return e.observers\n }\n function Ne(e, t) {\n e.observers.add(t),\n e.lowestObserverState > t.dependenciesState &&\n (e.lowestObserverState = t.dependenciesState)\n }\n function Le(e, t) {\n e.observers.delete(t), 0 === e.observers.size && Be(e)\n }\n function Be(e) {\n !1 === e.isPendingUnobservation &&\n ((e.isPendingUnobservation = !0), Pe.a.pendingUnobservations.push(e))\n }\n function Re() {\n Pe.a.inBatch++\n }\n function Ie() {\n if (0 == --Pe.a.inBatch) {\n Xe()\n for (var e = Pe.a.pendingUnobservations, t = 0; t < e.length; t++) {\n var n = e[t]\n ;(n.isPendingUnobservation = !1),\n 0 === n.observers.size &&\n (n.isBeingObserved &&\n ((n.isBeingObserved = !1), n.onBecomeUnobserved()),\n n instanceof ge && n.suspend())\n }\n Pe.a.pendingUnobservations = []\n }\n }\n function Me(e) {\n var t = Pe.a.trackingDerivation\n return null !== t\n ? (t.runId !== e.lastAccessedBy &&\n ((e.lastAccessedBy = t.runId),\n (t.newObserving[t.unboundDepsCount++] = e),\n e.isBeingObserved || ((e.isBeingObserved = !0), e.onBecomeObserved())),\n !0)\n : (0 === e.observers.size && Pe.a.inBatch > 0 && Be(e), !1)\n }\n function Ue(e) {\n e.lowestObserverState !== be.STALE &&\n ((e.lowestObserverState = be.STALE),\n e.observers.forEach(function(t) {\n t.dependenciesState === be.UP_TO_DATE &&\n (t.isTracing !== me.NONE && ze(t, e), t.onBecomeStale()),\n (t.dependenciesState = be.STALE)\n }))\n }\n function Ge(e) {\n e.lowestObserverState !== be.STALE &&\n ((e.lowestObserverState = be.STALE),\n e.observers.forEach(function(t) {\n t.dependenciesState === be.POSSIBLY_STALE\n ? (t.dependenciesState = be.STALE)\n : t.dependenciesState === be.UP_TO_DATE &&\n (e.lowestObserverState = be.UP_TO_DATE)\n }))\n }\n function Ke(e) {\n e.lowestObserverState === be.UP_TO_DATE &&\n ((e.lowestObserverState = be.POSSIBLY_STALE),\n e.observers.forEach(function(t) {\n t.dependenciesState === be.UP_TO_DATE &&\n ((t.dependenciesState = be.POSSIBLY_STALE),\n t.isTracing !== me.NONE && ze(t, e),\n t.onBecomeStale())\n }))\n }\n function ze(e, t) {\n if (\n (console.log(\n \"[mobx.trace] '\"\n .concat(e.name, \"' is invalidated due to a change in: '\")\n .concat(t.name, \"'\")\n ),\n e.isTracing === me.BREAK)\n ) {\n var n = []\n !(function e(t, n, r) {\n if (n.length >= 1e3) return void n.push(\"(and many more)\")\n n.push(\"\".concat(new Array(r).join(\"\\t\")).concat(t.name))\n t.dependencies &&\n t.dependencies.forEach(function(t) {\n return e(t, n, r + 1)\n })\n })(mt(e), n, 1),\n new Function(\n \"debugger;\\n/*\\nTracing '\"\n .concat(\n e.name,\n \"'\\n\\nYou are entering this break point because derivation '\"\n )\n .concat(e.name, \"' is being traced and '\")\n .concat(\n t.name,\n \"' is now forcing it to update.\\nJust follow the stacktrace you should now see in the devtools to see precisely what piece of your code is causing this update\\nThe stackframe you are looking for is at least ~6-8 stack-frames up.\\n\\n\"\n )\n .concat(\n e instanceof ge\n ? e.derivation.toString().replace(/[*]\\//g, \"/\")\n : \"\",\n \"\\n\\nThe dependencies for this derivation are:\\n\\n\"\n )\n .concat(n.join(\"\\n\"), \"\\n*/\\n \")\n )()\n }\n }\n function He(e, t) {\n for (var n = 0; n < t.length; n++) {\n var r = t[n]\n ;(r.enumerable = r.enumerable || !1),\n (r.configurable = !0),\n \"value\" in r && (r.writable = !0),\n Object.defineProperty(e, r.key, r)\n }\n }\n var Je = (function() {\n function e() {\n var t =\n arguments.length > 0 && void 0 !== arguments[0]\n ? arguments[0]\n : \"Reaction@\" + c(),\n n = arguments.length > 1 ? arguments[1] : void 0,\n r = arguments.length > 2 ? arguments[2] : void 0\n !(function(e, t) {\n if (!(e instanceof t)) throw new TypeError(\"Cannot call a class as a function\")\n })(this, e),\n (this.name = t),\n (this.onInvalidate = n),\n (this.errorHandler = r),\n (this.observing = []),\n (this.newObserving = []),\n (this.dependenciesState = be.NOT_TRACKING),\n (this.diffValue = 0),\n (this.runId = 0),\n (this.unboundDepsCount = 0),\n (this.__mapid = \"#\" + c()),\n (this.isDisposed = !1),\n (this._isScheduled = !1),\n (this._isTrackPending = !1),\n (this._isRunning = !1),\n (this.isTracing = me.NONE)\n }\n var t, n, r\n return (\n (t = e),\n (n = [\n {\n key: \"onBecomeStale\",\n value: function() {\n this.schedule()\n }\n },\n {\n key: \"schedule\",\n value: function() {\n this._isScheduled ||\n ((this._isScheduled = !0), Pe.a.pendingReactions.push(this), Xe())\n }\n },\n {\n key: \"isScheduled\",\n value: function() {\n return this._isScheduled\n }\n },\n {\n key: \"runReaction\",\n value: function() {\n if (!this.isDisposed) {\n if ((Re(), (this._isScheduled = !1), ke(this))) {\n this._isTrackPending = !0\n try {\n this.onInvalidate(), this._isTrackPending\n } catch (e) {\n this.reportExceptionInDerivation(e)\n }\n }\n Ie()\n }\n }\n },\n {\n key: \"track\",\n value: function(e) {\n if (!this.isDisposed) {\n Re(), (this._isRunning = !0)\n var t = Ae(this, e, void 0)\n ;(this._isRunning = !1),\n (this._isTrackPending = !1),\n this.isDisposed && Ee(this),\n Se(t) && this.reportExceptionInDerivation(t.cause),\n Ie()\n }\n }\n },\n {\n key: \"reportExceptionInDerivation\",\n value: function(e) {\n var t = this\n if (this.errorHandler) this.errorHandler(e, this)\n else {\n if (Pe.a.disableErrorBoundaries) throw e\n var n = \"[mobx] Encountered an uncaught exception that was thrown by a reaction or observer component, in: '\".concat(\n this,\n \"'\"\n )\n Pe.a.suppressReactionErrors\n ? console.warn(\n \"[mobx] (error in reaction '\".concat(\n this.name,\n \"' suppressed, fix error of causing action below)\"\n )\n )\n : console.error(n, e),\n Pe.a.globalReactionErrorHandlers.forEach(function(n) {\n return n(e, t)\n })\n }\n }\n },\n {\n key: \"dispose\",\n value: function() {\n this.isDisposed ||\n ((this.isDisposed = !0), this._isRunning || (Re(), Ee(this), Ie()))\n }\n },\n {\n key: \"getDisposer\",\n value: function() {\n var e = this.dispose.bind(this)\n return (e[x] = this), e\n }\n },\n {\n key: \"toString\",\n value: function() {\n return \"Reaction[\".concat(this.name, \"]\")\n }\n },\n {\n key: \"trace\",\n value: function() {\n var e = arguments.length > 0 && void 0 !== arguments[0] && arguments[0]\n _t(this, e)\n }\n }\n ]) && He(t.prototype, n),\n r && He(t, r),\n e\n )\n })()\n var qe = 100,\n We = function(e) {\n return e()\n }\n function Xe() {\n Pe.a.inBatch > 0 || Pe.a.isRunningReactions || We(Ye)\n }\n function Ye() {\n Pe.a.isRunningReactions = !0\n for (var e = Pe.a.pendingReactions, t = 0; e.length > 0; ) {\n ++t === qe &&\n (console.error(\n \"Reaction doesn't converge to a stable state after \".concat(\n qe,\n \" iterations.\"\n ) + \" Probably there is a cycle in the reactive function: \".concat(e[0])\n ),\n e.splice(0))\n for (var n = e.splice(0), r = 0, i = n.length; r < i; r++) n[r].runReaction()\n }\n Pe.a.isRunningReactions = !1\n }\n var Fe = O(\"Reaction\", Je)\n function Qe(e) {\n var t = We\n We = function(n) {\n return e(function() {\n return t(n)\n })\n }\n }\n function Ze() {\n return !1\n }\n function $e(e) {}\n function et(e) {}\n function tt(e) {}\n function nt(e) {\n return console.warn(\"[mobx.spy] Is a no-op in production builds\"), function() {}\n }\n function rt() {\n s(!1)\n }\n function it(e) {\n return function(t, n, r) {\n if (r) {\n if (r.value)\n return {\n value: ie(e, r.value),\n enumerable: !1,\n configurable: !0,\n writable: !0\n }\n var i = r.initializer\n return {\n enumerable: !1,\n configurable: !0,\n writable: !0,\n initializer: function() {\n return ie(e, i.call(this))\n }\n }\n }\n return ot(e).apply(this, arguments)\n }\n }\n function ot(e) {\n return function(t, n, r) {\n Object.defineProperty(t, n, {\n configurable: !0,\n enumerable: !1,\n get: function() {},\n set: function(t) {\n b(this, n, ut(e, t))\n }\n })\n }\n }\n function at(e, t, n, r) {\n return !0 === r\n ? (ct(e, t, n.value), null)\n : n\n ? {\n configurable: !0,\n enumerable: !1,\n get: function() {\n return ct(this, t, n.value || n.initializer.call(this)), this[t]\n },\n set: rt\n }\n : {\n enumerable: !1,\n configurable: !0,\n set: function(e) {\n ct(this, t, e)\n },\n get: function() {}\n }\n }\n var ut = function(e, t, n, r) {\n return 1 === arguments.length && \"function\" == typeof e\n ? ie(e.name || \"\", e)\n : 2 === arguments.length && \"function\" == typeof t\n ? ie(e, t)\n : 1 === arguments.length && \"string\" == typeof e\n ? it(e)\n : !0 !== r\n ? it(t).apply(null, arguments)\n : void b(e, t, ie(e.name || t, n.value, this))\n }\n function ct(e, t, n) {\n b(e, t, ie(t, n.bind(e)))\n }\n function st(e) {\n var t = arguments.length > 1 && void 0 !== arguments[1] ? arguments[1] : u\n var n,\n r = (t && t.name) || e.name || \"Autorun@\" + c()\n if (!t.scheduler && !t.delay)\n n = new Je(\n r,\n function() {\n this.track(a)\n },\n t.onError\n )\n else {\n var i = ft(t),\n o = !1\n n = new Je(\n r,\n function() {\n o ||\n ((o = !0),\n i(function() {\n ;(o = !1), n.isDisposed || n.track(a)\n }))\n },\n t.onError\n )\n }\n function a() {\n e(n)\n }\n return n.schedule(), n.getDisposer()\n }\n ut.bound = at\n var lt = function(e) {\n return e()\n }\n function ft(e) {\n return e.scheduler\n ? e.scheduler\n : e.delay\n ? function(t) {\n return setTimeout(t, e.delay)\n }\n : lt\n }\n function ht(e, t, n) {\n return vt(\"onBecomeObserved\", e, t, n)\n }\n function dt(e, t, n) {\n return vt(\"onBecomeUnobserved\", e, t, n)\n }\n function vt(e, t, n, r) {\n var i = \"function\" == typeof r ? pn(t, n) : pn(t),\n o = \"function\" == typeof r ? r : n,\n a = \"\".concat(e, \"Listeners\")\n return (\n i[a] ? i[a].add(o) : (i[a] = new Set([o])),\n \"function\" != typeof i[e]\n ? s(!1)\n : function() {\n var e = i[a]\n e && (e.delete(o), 0 === e.size && delete i[a])\n }\n )\n }\n function yt(e, t, n, r) {\n var i = pt((r = X(r)))\n return M(e), cn(e, r.name, i.enhancer), t && bt(e, t, n, i), e\n }\n function pt(e) {\n return e.defaultDecorator || (!1 === e.deep ? Q : Y)\n }\n function bt(e, t, n, r) {\n Re()\n try {\n var i = _(t),\n o = !0,\n a = !1,\n u = void 0\n try {\n for (var c, s = i[Symbol.iterator](); !(o = (c = s.next()).done); o = !0) {\n var l = c.value,\n f = Object.getOwnPropertyDescriptor(t, l)\n 0\n var h = (n && l in n ? n[l] : f.get ? re : r)(e, l, f, !0)\n h && Object.defineProperty(e, l, h)\n }\n } catch (e) {\n ;(a = !0), (u = e)\n } finally {\n try {\n o || null == s.return || s.return()\n } finally {\n if (a) throw u\n }\n }\n } finally {\n Ie()\n }\n }\n function mt(e, t) {\n return gt(pn(e, t))\n }\n function gt(e) {\n var t = { name: e.name }\n return (\n e.observing && e.observing.length > 0 && (t.dependencies = v(e.observing).map(gt)),\n t\n )\n }\n function wt(e, t) {\n return (\n null != e &&\n (void 0 !== t\n ? !!vn(e) && e[x].values.has(t)\n : vn(e) || !!e[x] || P(e) || Fe(e) || we(e))\n )\n }\n function Ot(e) {\n return 1 !== arguments.length && s(!1), wt(e)\n }\n function St(e) {\n return vn(e)\n ? e[x].getKeys()\n : $t(e)\n ? Array.from(e.keys())\n : rn(e)\n ? Array.from(e.keys())\n : qt(e)\n ? e.map(function(e, t) {\n return t\n })\n : s(!1)\n }\n function kt(e, t, n) {\n if (2 !== arguments.length || rn(e))\n if (vn(e)) {\n var r = e[x]\n r.values.get(t) ? r.write(t, n) : r.addObservableProp(t, n, r.defaultEnhancer)\n } else if ($t(e)) e.set(t, n)\n else if (rn(e)) e.add(t)\n else {\n if (!qt(e)) return s(!1)\n \"number\" != typeof t && (t = parseInt(t, 10)),\n l(t >= 0, \"Not a valid index: '\".concat(t, \"'\")),\n Re(),\n t >= e.length && (e.length = t + 1),\n (e[t] = n),\n Ie()\n }\n else {\n Re()\n var i = t\n try {\n for (var o in i) kt(e, o, i[o])\n } finally {\n Ie()\n }\n }\n }\n function _t() {\n for (var e = !1, t = arguments.length, n = new Array(t), r = 0; r < t; r++)\n n[r] = arguments[r]\n \"boolean\" == typeof n[n.length - 1] && (e = n.pop())\n var i = (function(e) {\n switch (e.length) {\n case 0:\n return Pe.a.trackingDerivation\n case 1:\n return pn(e[0])\n case 2:\n return pn(e[0], e[1])\n }\n })(n)\n if (!i) return s(!1)\n i.isTracing === me.NONE &&\n console.log(\"[mobx.trace] '\".concat(i.name, \"' tracing enabled\")),\n (i.isTracing = e ? me.BREAK : me.LOG)\n }\n function At(e) {\n var t = arguments.length > 1 && void 0 !== arguments[1] ? arguments[1] : void 0\n Re()\n try {\n return e.apply(t)\n } finally {\n Ie()\n }\n }\n function Et(e) {\n return (Et =\n \"function\" == typeof Symbol && \"symbol\" == typeof Symbol.iterator\n ? function(e) {\n return typeof e\n }\n : function(e) {\n return e &&\n \"function\" == typeof Symbol &&\n e.constructor === Symbol &&\n e !== Symbol.prototype\n ? \"symbol\"\n : typeof e\n })(e)\n }\n function jt(e) {\n return e[x]\n }\n function Tt(e) {\n return \"string\" == typeof e || \"number\" == typeof e || \"symbol\" === Et(e)\n }\n var xt = {\n has: function(e, t) {\n if (t === x || \"constructor\" === t || t === N) return !0\n var n = jt(e)\n return Tt(t) ? n.has(t) : t in e\n },\n get: function(e, t) {\n if (t === x || \"constructor\" === t || t === N) return e[t]\n var n = jt(e),\n r = n.values.get(t)\n if (r instanceof C) {\n var i = r.get()\n return void 0 === i && n.has(t), i\n }\n return Tt(t) && n.has(t), e[t]\n },\n set: function(e, t, n) {\n return !!Tt(t) && (kt(e, t, n), !0)\n },\n deleteProperty: function(e, t) {\n return !!Tt(t) && (jt(e).remove(t), !0)\n },\n ownKeys: function(e) {\n return jt(e).keysAtom.reportObserved(), Reflect.ownKeys(e)\n },\n preventExtensions: function(e) {\n return s(\"Dynamic observable objects cannot be frozen\"), !1\n }\n }\n function Ct(e) {\n var t = new Proxy(e, xt)\n return (e[x].proxy = t), t\n }\n function Pt(e) {\n return void 0 !== e.interceptors && e.interceptors.length > 0\n }\n function Vt(e, t) {\n var n = e.interceptors || (e.interceptors = [])\n return (\n n.push(t),\n h(function() {\n var e = n.indexOf(t)\n ;-1 !== e && n.splice(e, 1)\n })\n )\n }\n function Dt(e, t) {\n var n = Te()\n try {\n var r = e.interceptors\n if (r)\n for (\n var i = 0, o = r.length;\n i < o &&\n (l(\n !(t = r[i](t)) || t.type,\n \"Intercept handlers should return nothing or a change object\"\n ),\n t);\n i++\n );\n return t\n } finally {\n xe(n)\n }\n }\n function Nt(e) {\n return void 0 !== e.changeListeners && e.changeListeners.length > 0\n }\n function Lt(e, t) {\n var n = e.changeListeners || (e.changeListeners = [])\n return (\n n.push(t),\n h(function() {\n var e = n.indexOf(t)\n ;-1 !== e && n.splice(e, 1)\n })\n )\n }\n function Bt(e, t) {\n var n = Te(),\n r = e.changeListeners\n if (r) {\n for (var i = 0, o = (r = r.slice()).length; i < o; i++) r[i](t)\n xe(n)\n }\n }\n function Rt(e) {\n return (\n (function(e) {\n if (Array.isArray(e)) {\n for (var t = 0, n = new Array(e.length); t < e.length; t++) n[t] = e[t]\n return n\n }\n })(e) ||\n (function(e) {\n if (\n Symbol.iterator in Object(e) ||\n \"[object Arguments]\" === Object.prototype.toString.call(e)\n )\n return Array.from(e)\n })(e) ||\n (function() {\n throw new TypeError(\"Invalid attempt to spread non-iterable instance\")\n })()\n )\n }\n function It(e, t) {\n for (var n = 0; n < t.length; n++) {\n var r = t[n]\n ;(r.enumerable = r.enumerable || !1),\n (r.configurable = !0),\n \"value\" in r && (r.writable = !0),\n Object.defineProperty(e, r.key, r)\n }\n }\n function Mt(e) {\n return (Mt =\n \"function\" == typeof Symbol && \"symbol\" == typeof Symbol.iterator\n ? function(e) {\n return typeof e\n }\n : function(e) {\n return e &&\n \"function\" == typeof Symbol &&\n e.constructor === Symbol &&\n e !== Symbol.prototype\n ? \"symbol\"\n : typeof e\n })(e)\n }\n var Ut = {\n get: function(e, t) {\n return t === x\n ? e[x]\n : \"length\" === t\n ? e[x].getArrayLength()\n : \"number\" == typeof t\n ? zt.get.call(e, t)\n : \"string\" != typeof t || isNaN(t)\n ? zt.hasOwnProperty(t)\n ? zt[t]\n : e[t]\n : zt.get.call(e, parseInt(t))\n },\n set: function(e, t, n) {\n return (\n \"length\" === t && e[x].setArrayLength(n),\n \"number\" == typeof t && zt.set.call(e, t, n),\n \"symbol\" === Mt(t) || isNaN(t) ? (e[t] = n) : zt.set.call(e, parseInt(t), n),\n !0\n )\n },\n preventExtensions: function(e) {\n return s(\"Observable arrays cannot be frozen\"), !1\n }\n }\n function Gt(e, t) {\n var n =\n arguments.length > 2 && void 0 !== arguments[2]\n ? arguments[2]\n : \"ObservableArray@\" + c(),\n r = arguments.length > 3 && void 0 !== arguments[3] && arguments[3],\n i = new Kt(n, t, r)\n m(i.values, x, i)\n var o = new Proxy(i.values, Ut)\n if (((i.proxy = o), e && e.length)) {\n var a = ue(!0)\n i.spliceWithArray(0, 0, e), ce(a)\n }\n return o\n }\n var Kt = (function() {\n function e(t, n, r) {\n !(function(e, t) {\n if (!(e instanceof t))\n throw new TypeError(\"Cannot call a class as a function\")\n })(this, e),\n (this.owned = r),\n (this.values = []),\n (this.proxy = void 0),\n (this.lastKnownLength = 0),\n (this.atom = new C(t || \"ObservableArray@\" + c())),\n (this.enhancer = function(e, r) {\n return n(e, r, t + \"[..]\")\n })\n }\n var t, n, r\n return (\n (t = e),\n (n = [\n {\n key: \"dehanceValue\",\n value: function(e) {\n return void 0 !== this.dehancer ? this.dehancer(e) : e\n }\n },\n {\n key: \"dehanceValues\",\n value: function(e) {\n return void 0 !== this.dehancer && e.length > 0\n ? e.map(this.dehancer)\n : e\n }\n },\n {\n key: \"intercept\",\n value: function(e) {\n return Vt(this, e)\n }\n },\n {\n key: \"observe\",\n value: function(e) {\n var t =\n arguments.length > 1 && void 0 !== arguments[1] && arguments[1]\n return (\n t &&\n e({\n object: this.proxy,\n type: \"splice\",\n index: 0,\n added: this.values.slice(),\n addedCount: this.values.length,\n removed: [],\n removedCount: 0\n }),\n Lt(this, e)\n )\n }\n },\n {\n key: \"getArrayLength\",\n value: function() {\n return this.atom.reportObserved(), this.values.length\n }\n },\n {\n key: \"setArrayLength\",\n value: function(e) {\n if (\"number\" != typeof e || e < 0)\n throw new Error(\"[mobx.array] Out of range: \" + e)\n var t = this.values.length\n if (e !== t)\n if (e > t) {\n for (var n = new Array(e - t), r = 0; r < e - t; r++)\n n[r] = void 0\n this.spliceWithArray(t, 0, n)\n } else this.spliceWithArray(e, t - e)\n }\n },\n {\n key: \"updateArrayLength\",\n value: function(e, t) {\n if (e !== this.lastKnownLength)\n throw new Error(\n \"[mobx] Modification exception: the internal structure of an observable array was changed.\"\n )\n this.lastKnownLength += t\n }\n },\n {\n key: \"spliceWithArray\",\n value: function(e, t, n) {\n var r = this\n _e(this.atom)\n var i = this.values.length\n if (\n (void 0 === e\n ? (e = 0)\n : e > i\n ? (e = i)\n : e < 0 && (e = Math.max(0, i + e)),\n (t =\n 1 === arguments.length\n ? i - e\n : null == t\n ? 0\n : Math.max(0, Math.min(t, i - e))),\n void 0 === n && (n = a),\n Pt(this))\n ) {\n var o = Dt(this, {\n object: this.proxy,\n type: \"splice\",\n index: e,\n removedCount: t,\n added: n\n })\n if (!o) return a\n ;(t = o.removedCount), (n = o.added)\n }\n n =\n 0 === n.length\n ? n\n : n.map(function(e) {\n return r.enhancer(e, void 0)\n })\n var u = this.spliceItemsIntoValues(e, t, n)\n return (\n (0 === t && 0 === n.length) || this.notifyArraySplice(e, n, u),\n this.dehanceValues(u)\n )\n }\n },\n {\n key: \"spliceItemsIntoValues\",\n value: function(e, t, n) {\n var r\n if (n.length < 1e4)\n return (r = this.values).splice.apply(r, [e, t].concat(Rt(n)))\n var i = this.values.slice(e, e + t)\n return (\n (this.values = this.values\n .slice(0, e)\n .concat(n, this.values.slice(e + t))),\n i\n )\n }\n },\n {\n key: \"notifyArrayChildUpdate\",\n value: function(e, t, n) {\n var r = !this.owned && !1,\n i = Nt(this),\n o =\n i || r\n ? {\n object: this.proxy,\n type: \"update\",\n index: e,\n newValue: t,\n oldValue: n\n }\n : null\n this.atom.reportChanged(), i && Bt(this, o)\n }\n },\n {\n key: \"notifyArraySplice\",\n value: function(e, t, n) {\n var r = !this.owned && !1,\n i = Nt(this),\n o =\n i || r\n ? {\n object: this.proxy,\n type: \"splice\",\n index: e,\n removed: n,\n added: t,\n removedCount: n.length,\n addedCount: t.length\n }\n : null\n this.atom.reportChanged(), i && Bt(this, o)\n }\n }\n ]) && It(t.prototype, n),\n r && It(t, r),\n e\n )\n })(),\n zt = {\n intercept: function(e) {\n return this[x].intercept(e)\n },\n observe: function(e) {\n var t = arguments.length > 1 && void 0 !== arguments[1] && arguments[1],\n n = this[x]\n return n.observe(e, t)\n },\n clear: function() {\n return this.splice(0)\n },\n replace: function(e) {\n var t = this[x]\n return t.spliceWithArray(0, t.values.length, e)\n },\n toJS: function() {\n return this.slice()\n },\n toJSON: function() {\n return this.toJS()\n },\n splice: function(e, t) {\n for (\n var n = arguments.length, r = new Array(n > 2 ? n - 2 : 0), i = 2;\n i < n;\n i++\n )\n r[i - 2] = arguments[i]\n var o = this[x]\n switch (arguments.length) {\n case 0:\n return []\n case 1:\n return o.spliceWithArray(e)\n case 2:\n return o.spliceWithArray(e, t)\n }\n return o.spliceWithArray(e, t, r)\n },\n spliceWithArray: function(e, t, n) {\n return this[x].spliceWithArray(e, t, n)\n },\n push: function() {\n for (var e = this[x], t = arguments.length, n = new Array(t), r = 0; r < t; r++)\n n[r] = arguments[r]\n return e.spliceWithArray(e.values.length, 0, n), e.values.length\n },\n pop: function() {\n return this.splice(Math.max(this[x].values.length - 1, 0), 1)[0]\n },\n shift: function() {\n return this.splice(0, 1)[0]\n },\n unshift: function() {\n for (var e = this[x], t = arguments.length, n = new Array(t), r = 0; r < t; r++)\n n[r] = arguments[r]\n return e.spliceWithArray(0, 0, n), e.values.length\n },\n reverse: function() {\n var e = this.slice()\n return e.reverse.apply(e, arguments)\n },\n sort: function(e) {\n var t = this.slice()\n return t.sort.apply(t, arguments)\n },\n remove: function(e) {\n var t = this[x],\n n = t.dehanceValues(t.values).indexOf(e)\n return n > -1 && (this.splice(n, 1), !0)\n },\n get: function(e) {\n var t = this[x]\n if (t) {\n if (e < t.values.length)\n return t.atom.reportObserved(), t.dehanceValue(t.values[e])\n console.warn(\n \"[mobx.array] Attempt to read an array index (\"\n .concat(e, \") that is out of bounds (\")\n .concat(\n t.values.length,\n \"). Please check length first. Out of bound indices will not be tracked by MobX\"\n )\n )\n }\n },\n set: function(e, t) {\n var n = this[x],\n r = n.values\n if (e < r.length) {\n _e(n.atom)\n var i = r[e]\n if (Pt(n)) {\n var o = Dt(n, {\n type: \"update\",\n object: n.proxy,\n index: e,\n newValue: t\n })\n if (!o) return\n t = o.newValue\n }\n ;(t = n.enhancer(t, i)) !== i &&\n ((r[e] = t), n.notifyArrayChildUpdate(e, t, i))\n } else {\n if (e !== r.length)\n throw new Error(\n \"[mobx.array] Index out of bounds, \"\n .concat(e, \" is larger than \")\n .concat(r.length)\n )\n n.spliceWithArray(e, 0, [t])\n }\n }\n }\n ;[\n \"concat\",\n \"every\",\n \"filter\",\n \"forEach\",\n \"indexOf\",\n \"join\",\n \"lastIndexOf\",\n \"map\",\n \"reduce\",\n \"reduceRight\",\n \"slice\",\n \"some\",\n \"toString\",\n \"toLocaleString\"\n ].forEach(function(e) {\n zt[e] = function() {\n var t = this[x]\n t.atom.reportObserved()\n var n = t.dehanceValues(t.values)\n return n[e].apply(n, arguments)\n }\n })\n var Ht,\n Jt = O(\"ObservableArrayAdministration\", Kt)\n function qt(e) {\n return y(e) && Jt(e[x])\n }\n function Wt(e) {\n return (Wt =\n \"function\" == typeof Symbol && \"symbol\" == typeof Symbol.iterator\n ? function(e) {\n return typeof e\n }\n : function(e) {\n return e &&\n \"function\" == typeof Symbol &&\n e.constructor === Symbol &&\n e !== Symbol.prototype\n ? \"symbol\"\n : typeof e\n })(e)\n }\n function Xt(e, t) {\n return (\n (function(e) {\n if (Array.isArray(e)) return e\n })(e) ||\n (function(e, t) {\n var n = [],\n r = !0,\n i = !1,\n o = void 0\n try {\n for (\n var a, u = e[Symbol.iterator]();\n !(r = (a = u.next()).done) && (n.push(a.value), !t || n.length !== t);\n r = !0\n );\n } catch (e) {\n ;(i = !0), (o = e)\n } finally {\n try {\n r || null == u.return || u.return()\n } finally {\n if (i) throw o\n }\n }\n return n\n })(e, t) ||\n (function() {\n throw new TypeError(\"Invalid attempt to destructure non-iterable instance\")\n })()\n )\n }\n function Yt(e, t) {\n for (var n = 0; n < t.length; n++) {\n var r = t[n]\n ;(r.enumerable = r.enumerable || !1),\n (r.configurable = !0),\n \"value\" in r && (r.writable = !0),\n Object.defineProperty(e, r.key, r)\n }\n }\n var Ft,\n Qt = {},\n Zt = (function() {\n function e(t) {\n var n = arguments.length > 1 && void 0 !== arguments[1] ? arguments[1] : K,\n r =\n arguments.length > 2 && void 0 !== arguments[2]\n ? arguments[2]\n : \"ObservableMap@\" + c()\n if (\n ((function(e, t) {\n if (!(e instanceof t))\n throw new TypeError(\"Cannot call a class as a function\")\n })(this, e),\n (this.enhancer = n),\n (this.name = r),\n (this[Ht] = Qt),\n (this._keysAtom = V(\"\".concat(this.name, \".keys()\"))),\n (this[Symbol.toStringTag] = \"Map\"),\n \"function\" != typeof Map)\n )\n throw new Error(\n \"mobx.map requires Map polyfill for the current browser. Check babel-polyfill or core-js/es6/map.js\"\n )\n ;(this._data = new Map()), (this._hasMap = new Map()), this.merge(t)\n }\n var t, n, r\n return (\n (t = e),\n (n = [\n {\n key: \"_has\",\n value: function(e) {\n return this._data.has(e)\n }\n },\n {\n key: \"has\",\n value: function(e) {\n var t = this\n if (!Pe.a.trackingDerivation) return this._has(e)\n var n = this._hasMap.get(e)\n if (!n) {\n var r = (n = new ve(\n this._has(e),\n H,\n \"\".concat(this.name, \".\").concat(A(e), \"?\"),\n !1\n ))\n this._hasMap.set(e, r),\n dt(r, function() {\n return t._hasMap.delete(e)\n })\n }\n return n.get()\n }\n },\n {\n key: \"set\",\n value: function(e, t) {\n var n = this._has(e)\n if (Pt(this)) {\n var r = Dt(this, {\n type: n ? \"update\" : \"add\",\n object: this,\n newValue: t,\n name: e\n })\n if (!r) return this\n t = r.newValue\n }\n return n ? this._updateValue(e, t) : this._addValue(e, t), this\n }\n },\n {\n key: \"delete\",\n value: function(e) {\n var t = this\n if (\n Pt(this) &&\n !Dt(this, { type: \"delete\", object: this, name: e })\n )\n return !1\n if (this._has(e)) {\n var n = Nt(this),\n r = n\n ? {\n type: \"delete\",\n object: this,\n oldValue: this._data.get(e).value,\n name: e\n }\n : null\n return (\n At(function() {\n t._keysAtom.reportChanged(),\n t._updateHasMapEntry(e, !1),\n t._data.get(e).setNewValue(void 0),\n t._data.delete(e)\n }),\n n && Bt(this, r),\n !0\n )\n }\n return !1\n }\n },\n {\n key: \"_updateHasMapEntry\",\n value: function(e, t) {\n var n = this._hasMap.get(e)\n n && n.setNewValue(t)\n }\n },\n {\n key: \"_updateValue\",\n value: function(e, t) {\n var n = this._data.get(e)\n if ((t = n.prepareNewValue(t)) !== Pe.a.UNCHANGED) {\n var r = Nt(this),\n i = r\n ? {\n type: \"update\",\n object: this,\n oldValue: n.value,\n name: e,\n newValue: t\n }\n : null\n n.setNewValue(t), r && Bt(this, i)\n }\n }\n },\n {\n key: \"_addValue\",\n value: function(e, t) {\n var n = this\n _e(this._keysAtom),\n At(function() {\n var r = new ve(\n t,\n n.enhancer,\n \"\".concat(n.name, \".\").concat(A(e)),\n !1\n )\n n._data.set(e, r),\n (t = r.value),\n n._updateHasMapEntry(e, !0),\n n._keysAtom.reportChanged()\n })\n var r = Nt(this),\n i = r\n ? { type: \"add\", object: this, name: e, newValue: t }\n : null\n r && Bt(this, i)\n }\n },\n {\n key: \"get\",\n value: function(e) {\n return this.has(e)\n ? this.dehanceValue(this._data.get(e).get())\n : this.dehanceValue(void 0)\n }\n },\n {\n key: \"dehanceValue\",\n value: function(e) {\n return void 0 !== this.dehancer ? this.dehancer(e) : e\n }\n },\n {\n key: \"keys\",\n value: function() {\n return this._keysAtom.reportObserved(), this._data.keys()\n }\n },\n {\n key: \"values\",\n value: function() {\n var e = this,\n t = 0,\n n = Array.from(this.keys())\n return An({\n next: function() {\n return t < n.length\n ? { value: e.get(n[t++]), done: !1 }\n : { done: !0 }\n }\n })\n }\n },\n {\n key: \"entries\",\n value: function() {\n var e = this,\n t = 0,\n n = Array.from(this.keys())\n return An({\n next: function() {\n if (t < n.length) {\n var r = n[t++]\n return { value: [r, e.get(r)], done: !1 }\n }\n return { done: !0 }\n }\n })\n }\n },\n {\n key: ((Ht = x), Symbol.iterator),\n value: function() {\n return this.entries()\n }\n },\n {\n key: \"forEach\",\n value: function(e, t) {\n var n = !0,\n r = !1,\n i = void 0\n try {\n for (\n var o, a = this[Symbol.iterator]();\n !(n = (o = a.next()).done);\n n = !0\n ) {\n var u = Xt(o.value, 2),\n c = u[0],\n s = u[1]\n e.call(t, s, c, this)\n }\n } catch (e) {\n ;(r = !0), (i = e)\n } finally {\n try {\n n || null == a.return || a.return()\n } finally {\n if (r) throw i\n }\n }\n }\n },\n {\n key: \"merge\",\n value: function(e) {\n var t = this\n return (\n $t(e) && (e = e.toJS()),\n At(function() {\n p(e)\n ? _(e).forEach(function(n) {\n return t.set(n, e[n])\n })\n : Array.isArray(e)\n ? e.forEach(function(e) {\n var n = Xt(e, 2),\n r = n[0],\n i = n[1]\n return t.set(r, i)\n })\n : S(e)\n ? (e.constructor !== Map &&\n s(\n \"Cannot initialize from classes that inherit from Map: \" +\n e.constructor.name\n ),\n e.forEach(function(e, n) {\n return t.set(n, e)\n }))\n : null != e && s(\"Cannot initialize map from \" + e)\n }),\n this\n )\n }\n },\n {\n key: \"clear\",\n value: function() {\n var e = this\n At(function() {\n je(function() {\n var t = !0,\n n = !1,\n r = void 0\n try {\n for (\n var i, o = e.keys()[Symbol.iterator]();\n !(t = (i = o.next()).done);\n t = !0\n ) {\n var a = i.value\n e.delete(a)\n }\n } catch (e) {\n ;(n = !0), (r = e)\n } finally {\n try {\n t || null == o.return || o.return()\n } finally {\n if (n) throw r\n }\n }\n })\n })\n }\n },\n {\n key: \"replace\",\n value: function(e) {\n var t = this\n return (\n At(function() {\n var n = E(e)\n Array.from(t.keys())\n .filter(function(e) {\n return -1 === n.indexOf(e)\n })\n .forEach(function(e) {\n return t.delete(e)\n }),\n t.merge(e)\n }),\n this\n )\n }\n },\n {\n key: \"toPOJO\",\n value: function() {\n var e = {},\n t = !0,\n n = !1,\n r = void 0\n try {\n for (\n var i, o = this[Symbol.iterator]();\n !(t = (i = o.next()).done);\n t = !0\n ) {\n var a = Xt(i.value, 2),\n u = a[0],\n c = a[1]\n e[\"symbol\" === Wt(u) ? u : A(u)] = c\n }\n } catch (e) {\n ;(n = !0), (r = e)\n } finally {\n try {\n t || null == o.return || o.return()\n } finally {\n if (n) throw r\n }\n }\n return e\n }\n },\n {\n key: \"toJS\",\n value: function() {\n return new Map(this)\n }\n },\n {\n key: \"toJSON\",\n value: function() {\n return this.toPOJO()\n }\n },\n {\n key: \"toString\",\n value: function() {\n var e = this\n return (\n this.name +\n \"[{ \" +\n Array.from(this.keys())\n .map(function(t) {\n return \"\".concat(A(t), \": \").concat(\"\" + e.get(t))\n })\n .join(\", \") +\n \" }]\"\n )\n }\n },\n {\n key: \"observe\",\n value: function(e, t) {\n return Lt(this, e)\n }\n },\n {\n key: \"intercept\",\n value: function(e) {\n return Vt(this, e)\n }\n },\n {\n key: \"size\",\n get: function() {\n return this._keysAtom.reportObserved(), this._data.size\n }\n }\n ]) && Yt(t.prototype, n),\n r && Yt(t, r),\n e\n )\n })(),\n $t = O(\"ObservableMap\", Zt)\n function en(e, t) {\n for (var n = 0; n < t.length; n++) {\n var r = t[n]\n ;(r.enumerable = r.enumerable || !1),\n (r.configurable = !0),\n \"value\" in r && (r.writable = !0),\n Object.defineProperty(e, r.key, r)\n }\n }\n var tn = {},\n nn = (function() {\n function e(t) {\n var n = arguments.length > 1 && void 0 !== arguments[1] ? arguments[1] : K,\n r =\n arguments.length > 2 && void 0 !== arguments[2]\n ? arguments[2]\n : \"ObservableSet@\" + c()\n if (\n ((function(e, t) {\n if (!(e instanceof t))\n throw new TypeError(\"Cannot call a class as a function\")\n })(this, e),\n (this.name = r),\n (this[Ft] = tn),\n (this._data = new Set()),\n (this._atom = V(this.name)),\n (this[Symbol.toStringTag] = \"Set\"),\n \"function\" != typeof Set)\n )\n throw new Error(\n \"mobx.set requires Set polyfill for the current browser. Check babel-polyfill or core-js/es6/set.js\"\n )\n ;(this.enhancer = function(e, t) {\n return n(e, t, r)\n }),\n t && this.replace(t)\n }\n var t, n, r\n return (\n (t = e),\n (n = [\n {\n key: \"dehanceValue\",\n value: function(e) {\n return void 0 !== this.dehancer ? this.dehancer(e) : e\n }\n },\n {\n key: \"clear\",\n value: function() {\n var e = this\n At(function() {\n je(function() {\n var t = !0,\n n = !1,\n r = void 0\n try {\n for (\n var i, o = e._data.values()[Symbol.iterator]();\n !(t = (i = o.next()).done);\n t = !0\n ) {\n var a = i.value\n e.delete(a)\n }\n } catch (e) {\n ;(n = !0), (r = e)\n } finally {\n try {\n t || null == o.return || o.return()\n } finally {\n if (n) throw r\n }\n }\n })\n })\n }\n },\n {\n key: \"forEach\",\n value: function(e, t) {\n var n = !0,\n r = !1,\n i = void 0\n try {\n for (\n var o, a = this[Symbol.iterator]();\n !(n = (o = a.next()).done);\n n = !0\n ) {\n var u = o.value\n e.call(t, u, u, this)\n }\n } catch (e) {\n ;(r = !0), (i = e)\n } finally {\n try {\n n || null == a.return || a.return()\n } finally {\n if (r) throw i\n }\n }\n }\n },\n {\n key: \"add\",\n value: function(e) {\n var t = this\n if (\n (_e(this._atom),\n Pt(this) &&\n !Dt(this, { type: \"add\", object: this, newValue: e }))\n )\n return this\n if (!this.has(e)) {\n At(function() {\n t._data.add(t.enhancer(e, void 0)), t._atom.reportChanged()\n })\n var n = Nt(this),\n r = n ? { type: \"add\", object: this, newValue: e } : null\n n && Bt(this, r)\n }\n return this\n }\n },\n {\n key: \"delete\",\n value: function(e) {\n var t = this\n if (\n Pt(this) &&\n !Dt(this, { type: \"delete\", object: this, oldValue: e })\n )\n return !1\n if (this.has(e)) {\n var n = Nt(this),\n r = n ? { type: \"delete\", object: this, oldValue: e } : null\n return (\n At(function() {\n t._atom.reportChanged(), t._data.delete(e)\n }),\n n && Bt(this, r),\n !0\n )\n }\n return !1\n }\n },\n {\n key: \"has\",\n value: function(e) {\n return (\n this._atom.reportObserved(),\n this._data.has(this.dehanceValue(e))\n )\n }\n },\n {\n key: \"entries\",\n value: function() {\n var e = 0,\n t = Array.from(this.keys()),\n n = Array.from(this.values())\n return An({\n next: function() {\n var r = e\n return (\n (e += 1),\n r < n.length\n ? { value: [t[r], n[r]], done: !1 }\n : { done: !0 }\n )\n }\n })\n }\n },\n {\n key: \"keys\",\n value: function() {\n return this.values()\n }\n },\n {\n key: \"values\",\n value: function() {\n this._atom.reportObserved()\n var e = this,\n t = 0,\n n = Array.from(this._data.values())\n return An({\n next: function() {\n return t < n.length\n ? { value: e.dehanceValue(n[t++]), done: !1 }\n : { done: !0 }\n }\n })\n }\n },\n {\n key: \"replace\",\n value: function(e) {\n var t = this\n return (\n rn(e) && (e = e.toJS()),\n At(function() {\n Array.isArray(e)\n ? (t.clear(),\n e.forEach(function(e) {\n return t.add(e)\n }))\n : k(e)\n ? (t.clear(),\n e.forEach(function(e) {\n return t.add(e)\n }))\n : null != e && s(\"Cannot initialize set from \" + e)\n }),\n this\n )\n }\n },\n {\n key: \"observe\",\n value: function(e, t) {\n return Lt(this, e)\n }\n },\n {\n key: \"intercept\",\n value: function(e) {\n return Vt(this, e)\n }\n },\n {\n key: \"toJS\",\n value: function() {\n return new Set(this)\n }\n },\n {\n key: \"toString\",\n value: function() {\n return this.name + \"[ \" + Array.from(this).join(\", \") + \" ]\"\n }\n },\n {\n key: ((Ft = x), Symbol.iterator),\n value: function() {\n return this.values()\n }\n },\n {\n key: \"size\",\n get: function() {\n return this._atom.reportObserved(), this._data.size\n }\n }\n ]) && en(t.prototype, n),\n r && en(t, r),\n e\n )\n })(),\n rn = O(\"ObservableSet\", nn)\n function on(e, t) {\n return (\n (function(e) {\n if (Array.isArray(e)) return e\n })(e) ||\n (function(e, t) {\n var n = [],\n r = !0,\n i = !1,\n o = void 0\n try {\n for (\n var a, u = e[Symbol.iterator]();\n !(r = (a = u.next()).done) && (n.push(a.value), !t || n.length !== t);\n r = !0\n );\n } catch (e) {\n ;(i = !0), (o = e)\n } finally {\n try {\n r || null == u.return || u.return()\n } finally {\n if (i) throw o\n }\n }\n return n\n })(e, t) ||\n (function() {\n throw new TypeError(\"Invalid attempt to destructure non-iterable instance\")\n })()\n )\n }\n function an(e, t) {\n for (var n = 0; n < t.length; n++) {\n var r = t[n]\n ;(r.enumerable = r.enumerable || !1),\n (r.configurable = !0),\n \"value\" in r && (r.writable = !0),\n Object.defineProperty(e, r.key, r)\n }\n }\n var un = (function() {\n function e(t) {\n var n = arguments.length > 1 && void 0 !== arguments[1] ? arguments[1] : new Map(),\n r = arguments.length > 2 ? arguments[2] : void 0,\n i = arguments.length > 3 ? arguments[3] : void 0\n !(function(e, t) {\n if (!(e instanceof t)) throw new TypeError(\"Cannot call a class as a function\")\n })(this, e),\n (this.target = t),\n (this.values = n),\n (this.name = r),\n (this.defaultEnhancer = i),\n (this.keysAtom = new C(r + \".keys\"))\n }\n var t, n, r\n return (\n (t = e),\n (n = [\n {\n key: \"read\",\n value: function(e) {\n return this.values.get(e).get()\n }\n },\n {\n key: \"write\",\n value: function(e, t) {\n var n = this.target,\n r = this.values.get(e)\n if (r instanceof ge) r.set(t)\n else {\n if (Pt(this)) {\n var i = Dt(this, {\n type: \"update\",\n object: this.proxy || n,\n name: e,\n newValue: t\n })\n if (!i) return\n t = i.newValue\n }\n if ((t = r.prepareNewValue(t)) !== Pe.a.UNCHANGED) {\n var o = Nt(this),\n a = o\n ? {\n type: \"update\",\n object: this.proxy || n,\n oldValue: r.value,\n name: e,\n newValue: t\n }\n : null\n r.setNewValue(t), o && Bt(this, a)\n }\n }\n }\n },\n {\n key: \"has\",\n value: function(e) {\n var t = this.pendingKeys || (this.pendingKeys = new Map()),\n n = t.get(e)\n if (n) return n.get()\n var r = !!this.values.get(e)\n return (\n (n = new ve(r, H, \"\".concat(this.name, \".\").concat(A(e), \"?\"), !1)),\n t.set(e, n),\n n.get()\n )\n }\n },\n {\n key: \"addObservableProp\",\n value: function(e, t) {\n var n =\n arguments.length > 2 && void 0 !== arguments[2]\n ? arguments[2]\n : this.defaultEnhancer,\n r = this.target\n if ((w(), Pt(this))) {\n var i = Dt(this, {\n object: this.proxy || r,\n name: e,\n type: \"add\",\n newValue: t\n })\n if (!i) return\n t = i.newValue\n }\n var o = new ve(t, n, \"\".concat(this.name, \".\").concat(A(e)), !1)\n this.values.set(e, o),\n (t = o.value),\n Object.defineProperty(r, e, fn(e)),\n this.notifyPropertyAddition(e, t)\n }\n },\n {\n key: \"addComputedProp\",\n value: function(e, t, n) {\n var r = this.target\n ;(n.name = n.name || \"\".concat(this.name, \".\").concat(A(t))),\n this.values.set(t, new ge(n)),\n (e === r || g(e, t)) &&\n Object.defineProperty(\n e,\n t,\n (function(e) {\n return (\n ln[e] ||\n (ln[e] = {\n configurable: Pe.a.computedConfigurable,\n enumerable: !1,\n get: function() {\n return hn(this).read(e)\n },\n set: function(t) {\n hn(this).write(e, t)\n }\n })\n )\n })(t)\n )\n }\n },\n {\n key: \"remove\",\n value: function(e) {\n if (this.values.has(e)) {\n var t = this.target\n if (\n Pt(this) &&\n !Dt(this, { object: this.proxy || t, name: e, type: \"remove\" })\n )\n return\n try {\n Re()\n var n = Nt(this),\n r = this.values.get(e),\n i = r && r.get()\n if (\n (r && r.set(void 0),\n this.keysAtom.reportChanged(),\n this.values.delete(e),\n this.pendingKeys)\n ) {\n var o = this.pendingKeys.get(e)\n o && o.set(!1)\n }\n delete this.target[e]\n var a = n\n ? {\n type: \"remove\",\n object: this.proxy || t,\n oldValue: i,\n name: e\n }\n : null\n n && Bt(this, a)\n } finally {\n Ie()\n }\n }\n }\n },\n {\n key: \"illegalAccess\",\n value: function(e, t) {\n console.warn(\n \"Property '\"\n .concat(t, \"' of '\")\n .concat(\n e,\n \"' was accessed through the prototype chain. Use 'decorate' instead to declare the prop or access it statically through it's owner\"\n )\n )\n }\n },\n {\n key: \"observe\",\n value: function(e, t) {\n return Lt(this, e)\n }\n },\n {\n key: \"intercept\",\n value: function(e) {\n return Vt(this, e)\n }\n },\n {\n key: \"notifyPropertyAddition\",\n value: function(e, t) {\n var n = Nt(this),\n r = n\n ? {\n type: \"add\",\n object: this.proxy || this.target,\n name: e,\n newValue: t\n }\n : null\n if ((n && Bt(this, r), this.pendingKeys)) {\n var i = this.pendingKeys.get(e)\n i && i.set(!0)\n }\n this.keysAtom.reportChanged()\n }\n },\n {\n key: \"getKeys\",\n value: function() {\n this.keysAtom.reportObserved()\n var e = [],\n t = !0,\n n = !1,\n r = void 0\n try {\n for (\n var i, o = this.values[Symbol.iterator]();\n !(t = (i = o.next()).done);\n t = !0\n ) {\n var a = on(i.value, 2),\n u = a[0]\n a[1] instanceof ve && e.push(u)\n }\n } catch (e) {\n ;(n = !0), (r = e)\n } finally {\n try {\n t || null == o.return || o.return()\n } finally {\n if (n) throw r\n }\n }\n return e\n }\n }\n ]) && an(t.prototype, n),\n r && an(t, r),\n e\n )\n })()\n function cn(e) {\n var t = arguments.length > 1 && void 0 !== arguments[1] ? arguments[1] : \"\",\n n = arguments.length > 2 && void 0 !== arguments[2] ? arguments[2] : K\n if (Object.prototype.hasOwnProperty.call(e, x)) return e[x]\n p(e) || (t = (e.constructor.name || \"ObservableObject\") + \"@\" + c()),\n t || (t = \"ObservableObject@\" + c())\n var r = new un(e, new Map(), A(t), n)\n return b(e, x, r), r\n }\n var sn = Object.create(null),\n ln = Object.create(null)\n function fn(e) {\n return (\n sn[e] ||\n (sn[e] = {\n configurable: !0,\n enumerable: !0,\n get: function() {\n return this[x].read(e)\n },\n set: function(t) {\n this[x].write(e, t)\n }\n })\n )\n }\n function hn(e) {\n var t = e[x]\n return t || (M(e), e[x])\n }\n var dn = O(\"ObservableObjectAdministration\", un)\n function vn(e) {\n return !!y(e) && (M(e), dn(e[x]))\n }\n function yn(e) {\n return (yn =\n \"function\" == typeof Symbol && \"symbol\" == typeof Symbol.iterator\n ? function(e) {\n return typeof e\n }\n : function(e) {\n return e &&\n \"function\" == typeof Symbol &&\n e.constructor === Symbol &&\n e !== Symbol.prototype\n ? \"symbol\"\n : typeof e\n })(e)\n }\n function pn(e, t) {\n if (\"object\" === yn(e) && null !== e) {\n if (qt(e)) return void 0 !== t && s(!1), e[x].atom\n if (rn(e)) return e[x]\n if ($t(e)) {\n var n = e\n if (void 0 === t) return n._keysAtom\n var r = n._data.get(t) || n._hasMap.get(t)\n return r || s(!1), r\n }\n if ((M(e), t && !e[x] && e[t], vn(e))) {\n if (!t) return s(!1)\n var i = e[x].values.get(t)\n return i || s(!1), i\n }\n if (P(e) || we(e) || Fe(e)) return e\n } else if (\"function\" == typeof e && Fe(e[x])) return e[x]\n return s(!1)\n }\n function bn(e, t) {\n return (\n e || s(\"Expecting some object\"),\n void 0 !== t\n ? bn(pn(e, t))\n : P(e) || we(e) || Fe(e)\n ? e\n : $t(e) || rn(e)\n ? e\n : (M(e), e[x] ? e[x] : void s(!1))\n )\n }\n function mn(e, t) {\n return (void 0 !== t ? pn(e, t) : vn(e) || $t(e) || rn(e) ? bn(e) : pn(e)).name\n }\n function gn(e) {\n return (gn =\n \"function\" == typeof Symbol && \"symbol\" == typeof Symbol.iterator\n ? function(e) {\n return typeof e\n }\n : function(e) {\n return e &&\n \"function\" == typeof Symbol &&\n e.constructor === Symbol &&\n e !== Symbol.prototype\n ? \"symbol\"\n : typeof e\n })(e)\n }\n var wn = Object.prototype.toString\n function On(e, t) {\n return Sn(e, t)\n }\n function Sn(e, t, n, r) {\n if (e === t) return 0 !== e || 1 / e == 1 / t\n if (null == e || null == t) return !1\n if (e != e) return t != t\n var i = gn(e)\n return (\n (\"function\" === i || \"object\" === i || \"object\" == gn(t)) &&\n (function(e, t, n, r) {\n ;(e = kn(e)), (t = kn(t))\n var i = wn.call(e)\n if (i !== wn.call(t)) return !1\n switch (i) {\n case \"[object RegExp]\":\n case \"[object String]\":\n return \"\" + e == \"\" + t\n case \"[object Number]\":\n return +e != +e ? +t != +t : 0 == +e ? 1 / +e == 1 / t : +e == +t\n case \"[object Date]\":\n case \"[object Boolean]\":\n return +e == +t\n case \"[object Symbol]\":\n return (\n \"undefined\" != typeof Symbol &&\n Symbol.valueOf.call(e) === Symbol.valueOf.call(t)\n )\n }\n var o = \"[object Array]\" === i\n if (!o) {\n if (\"object\" != gn(e) || \"object\" != gn(t)) return !1\n var a = e.constructor,\n u = t.constructor\n if (\n a !== u &&\n !(\n \"function\" == typeof a &&\n a instanceof a &&\n \"function\" == typeof u &&\n u instanceof u\n ) &&\n \"constructor\" in e &&\n \"constructor\" in t\n )\n return !1\n }\n r = r || []\n var c = (n = n || []).length\n for (; c--; ) if (n[c] === e) return r[c] === t\n if ((n.push(e), r.push(t), o)) {\n if ((c = e.length) !== t.length) return !1\n for (; c--; ) if (!Sn(e[c], t[c], n, r)) return !1\n } else {\n var s,\n l = Object.keys(e)\n if (((c = l.length), Object.keys(t).length !== c)) return !1\n for (; c--; )\n if (((s = l[c]), !_n(t, s) || !Sn(e[s], t[s], n, r))) return !1\n }\n return n.pop(), r.pop(), !0\n })(e, t, n, r)\n )\n }\n function kn(e) {\n return qt(e)\n ? e.slice()\n : S(e) || $t(e)\n ? Array.from(e.entries())\n : k(e) || rn(e)\n ? Array.from(e.entries())\n : e\n }\n function _n(e, t) {\n return Object.prototype.hasOwnProperty.call(e, t)\n }\n function An(e) {\n return (e[Symbol.iterator] = En), e\n }\n function En() {\n return this\n }\n n.d(t, \"e\", function() {\n return a\n }),\n n.d(t, \"f\", function() {\n return u\n }),\n n.d(t, \"Y\", function() {\n return c\n }),\n n.d(t, \"R\", function() {\n return s\n }),\n n.d(t, \"hb\", function() {\n return l\n }),\n n.d(t, \"M\", function() {\n return f\n }),\n n.d(t, \"Ib\", function() {\n return h\n }),\n n.d(t, \"Db\", function() {\n return d\n }),\n n.d(t, \"ic\", function() {\n return v\n }),\n n.d(t, \"nb\", function() {\n return y\n }),\n n.d(t, \"ub\", function() {\n return p\n }),\n n.d(t, \"o\", function() {\n return b\n }),\n n.d(t, \"n\", function() {\n return m\n }),\n n.d(t, \"vb\", function() {\n return g\n }),\n n.d(t, \"u\", function() {\n return w\n }),\n n.d(t, \"F\", function() {\n return O\n }),\n n.d(t, \"lb\", function() {\n return S\n }),\n n.d(t, \"mb\", function() {\n return k\n }),\n n.d(t, \"ab\", function() {\n return _\n }),\n n.d(t, \"dc\", function() {\n return A\n }),\n n.d(t, \"X\", function() {\n return E\n }),\n n.d(t, \"ec\", function() {\n return j\n }),\n n.d(t, \"a\", function() {\n return x\n }),\n n.d(t, \"b\", function() {\n return C\n }),\n n.d(t, \"ib\", function() {\n return P\n }),\n n.d(t, \"C\", function() {\n return V\n }),\n n.d(t, \"z\", function() {\n return D\n }),\n n.d(t, \"Bb\", function() {\n return N\n }),\n n.d(t, \"fb\", function() {\n return M\n }),\n n.d(t, \"H\", function() {\n return U\n }),\n n.d(t, \"J\", function() {\n return K\n }),\n n.d(t, \"Wb\", function() {\n return z\n }),\n n.d(t, \"Ob\", function() {\n return H\n }),\n n.d(t, \"Nb\", function() {\n return J\n }),\n n.d(t, \"D\", function() {\n return q.a\n }),\n n.d(t, \"s\", function() {\n return X\n }),\n n.d(t, \"I\", function() {\n return Y\n }),\n n.d(t, \"Mb\", function() {\n return Q\n }),\n n.d(t, \"Fb\", function() {\n return te\n }),\n n.d(t, \"A\", function() {\n return re\n }),\n n.d(t, \"B\", function() {\n return ie\n }),\n n.d(t, \"O\", function() {\n return oe\n }),\n n.d(t, \"r\", function() {\n return ue\n }),\n n.d(t, \"q\", function() {\n return ce\n }),\n n.d(t, \"j\", function() {\n return ve\n }),\n n.d(t, \"tb\", function() {\n return ye\n }),\n n.d(t, \"d\", function() {\n return ge\n }),\n n.d(t, \"kb\", function() {\n return we\n }),\n n.d(t, \"g\", function() {\n return be\n }),\n n.d(t, \"l\", function() {\n return me\n }),\n n.d(t, \"c\", function() {\n return Oe\n }),\n n.d(t, \"jb\", function() {\n return Se\n }),\n n.d(t, \"Xb\", function() {\n return ke\n }),\n n.d(t, \"x\", function() {\n return _e\n }),\n n.d(t, \"gc\", function() {\n return Ae\n }),\n n.d(t, \"y\", function() {\n return Ee\n }),\n n.d(t, \"jc\", function() {\n return je\n }),\n n.d(t, \"lc\", function() {\n return Te\n }),\n n.d(t, \"kc\", function() {\n return xe\n }),\n n.d(t, \"bb\", function() {\n return Pe.a\n }),\n n.d(t, \"yb\", function() {\n return Pe.b\n }),\n n.d(t, \"eb\", function() {\n return Ve\n }),\n n.d(t, \"Z\", function() {\n return De\n }),\n n.d(t, \"p\", function() {\n return Ne\n }),\n n.d(t, \"Rb\", function() {\n return Le\n }),\n n.d(t, \"cc\", function() {\n return Re\n }),\n n.d(t, \"N\", function() {\n return Ie\n }),\n n.d(t, \"Sb\", function() {\n return Me\n }),\n n.d(t, \"Kb\", function() {\n return Ue\n }),\n n.d(t, \"Jb\", function() {\n return Ge\n }),\n n.d(t, \"Lb\", function() {\n return Ke\n }),\n n.d(t, \"k\", function() {\n return Je\n }),\n n.d(t, \"Tb\", function() {\n return Xe\n }),\n n.d(t, \"wb\", function() {\n return Fe\n }),\n n.d(t, \"Vb\", function() {\n return Qe\n }),\n n.d(t, \"xb\", function() {\n return Ze\n }),\n n.d(t, \"Zb\", function() {\n return $e\n }),\n n.d(t, \"bc\", function() {\n return et\n }),\n n.d(t, \"ac\", function() {\n return tt\n }),\n n.d(t, \"Yb\", function() {\n return nt\n }),\n n.d(t, \"Cb\", function() {\n return it\n }),\n n.d(t, \"w\", function() {\n return at\n }),\n n.d(t, \"m\", function() {\n return ut\n }),\n n.d(t, \"L\", function() {\n return ct\n }),\n n.d(t, \"v\", function() {\n return st\n }),\n n.d(t, \"Gb\", function() {\n return ht\n }),\n n.d(t, \"Hb\", function() {\n return dt\n }),\n n.d(t, \"P\", function() {\n return yt\n }),\n n.d(t, \"V\", function() {\n return pt\n }),\n n.d(t, \"Q\", function() {\n return bt\n }),\n n.d(t, \"W\", function() {\n return mt\n }),\n n.d(t, \"ob\", function() {\n return Ot\n }),\n n.d(t, \"zb\", function() {\n return St\n }),\n n.d(t, \"Ub\", function() {\n return kt\n }),\n n.d(t, \"fc\", function() {\n return _t\n }),\n n.d(t, \"hc\", function() {\n return At\n }),\n n.d(t, \"E\", function() {\n return Ct\n }),\n n.d(t, \"cb\", function() {\n return Pt\n }),\n n.d(t, \"Pb\", function() {\n return Vt\n }),\n n.d(t, \"gb\", function() {\n return Dt\n }),\n n.d(t, \"db\", function() {\n return Nt\n }),\n n.d(t, \"Qb\", function() {\n return Lt\n }),\n n.d(t, \"Eb\", function() {\n return Bt\n }),\n n.d(t, \"G\", function() {\n return Gt\n }),\n n.d(t, \"pb\", function() {\n return qt\n }),\n n.d(t, \"h\", function() {\n return Zt\n }),\n n.d(t, \"qb\", function() {\n return $t\n }),\n n.d(t, \"i\", function() {\n return nn\n }),\n n.d(t, \"sb\", function() {\n return rn\n }),\n n.d(t, \"t\", function() {\n return cn\n }),\n n.d(t, \"rb\", function() {\n return vn\n }),\n n.d(t, \"T\", function() {\n return pn\n }),\n n.d(t, \"S\", function() {\n return bn\n }),\n n.d(t, \"U\", function() {\n return mn\n }),\n n.d(t, \"K\", function() {\n return On\n }),\n n.d(t, \"Ab\", function() {\n return An\n })\n },\n function(e, t, n) {\n \"use strict\"\n ;(function(e, t) {\n var r = n(0)\n function i(e) {\n return (i =\n \"function\" == typeof Symbol && \"symbol\" == typeof Symbol.iterator\n ? function(e) {\n return typeof e\n }\n : function(e) {\n return e &&\n \"function\" == typeof Symbol &&\n e.constructor === Symbol &&\n e !== Symbol.prototype\n ? \"symbol\"\n : typeof e\n })(e)\n }\n if (\"undefined\" == typeof Proxy || \"undefined\" == typeof Symbol)\n throw new Error(\n \"[mobx] MobX 5+ requires Proxy and Symbol objects. If your environment doesn't support Symbol or Proxy objects, please downgrade to MobX 4. For React Native Android, consider upgrading JSCore.\"\n )\n \"object\" ===\n (\"undefined\" == typeof __MOBX_DEVTOOLS_GLOBAL_HOOK__\n ? \"undefined\"\n : i(__MOBX_DEVTOOLS_GLOBAL_HOOK__)) &&\n __MOBX_DEVTOOLS_GLOBAL_HOOK__.injectMobx({\n spy: r.Yb,\n extras: { getDebugName: r.U },\n $mobx: r.a\n })\n }.call(this, n(2), n(3)))\n },\n function(e, t) {\n var n\n n = (function() {\n return this\n })()\n try {\n n = n || new Function(\"return this\")()\n } catch (e) {\n \"object\" == typeof window && (n = window)\n }\n e.exports = n\n },\n function(e, t) {\n var n,\n r,\n i = (e.exports = {})\n function o() {\n throw new Error(\"setTimeout has not been defined\")\n }\n function a() {\n throw new Error(\"clearTimeout has not been defined\")\n }\n function u(e) {\n if (n === setTimeout) return setTimeout(e, 0)\n if ((n === o || !n) && setTimeout) return (n = setTimeout), setTimeout(e, 0)\n try {\n return n(e, 0)\n } catch (t) {\n try {\n return n.call(null, e, 0)\n } catch (t) {\n return n.call(this, e, 0)\n }\n }\n }\n !(function() {\n try {\n n = \"function\" == typeof setTimeout ? setTimeout : o\n } catch (e) {\n n = o\n }\n try {\n r = \"function\" == typeof clearTimeout ? clearTimeout : a\n } catch (e) {\n r = a\n }\n })()\n var c,\n s = [],\n l = !1,\n f = -1\n function h() {\n l && c && ((l = !1), c.length ? (s = c.concat(s)) : (f = -1), s.length && d())\n }\n function d() {\n if (!l) {\n var e = u(h)\n l = !0\n for (var t = s.length; t; ) {\n for (c = s, s = []; ++f < t; ) c && c[f].run()\n ;(f = -1), (t = s.length)\n }\n ;(c = null),\n (l = !1),\n (function(e) {\n if (r === clearTimeout) return clearTimeout(e)\n if ((r === a || !r) && clearTimeout)\n return (r = clearTimeout), clearTimeout(e)\n try {\n r(e)\n } catch (t) {\n try {\n return r.call(null, e)\n } catch (t) {\n return r.call(this, e)\n }\n }\n })(e)\n }\n }\n function v(e, t) {\n ;(this.fun = e), (this.array = t)\n }\n function y() {}\n ;(i.nextTick = function(e) {\n var t = new Array(arguments.length - 1)\n if (arguments.length > 1)\n for (var n = 1; n < arguments.length; n++) t[n - 1] = arguments[n]\n s.push(new v(e, t)), 1 !== s.length || l || u(d)\n }),\n (v.prototype.run = function() {\n this.fun.apply(null, this.array)\n }),\n (i.title = \"browser\"),\n (i.browser = !0),\n (i.env = {}),\n (i.argv = []),\n (i.version = \"\"),\n (i.versions = {}),\n (i.on = y),\n (i.addListener = y),\n (i.once = y),\n (i.off = y),\n (i.removeListener = y),\n (i.removeAllListeners = y),\n (i.emit = y),\n (i.prependListener = y),\n (i.prependOnceListener = y),\n (i.listeners = function(e) {\n return []\n }),\n (i.binding = function(e) {\n throw new Error(\"process.binding is not supported\")\n }),\n (i.cwd = function() {\n return \"/\"\n }),\n (i.chdir = function(e) {\n throw new Error(\"process.chdir is not supported\")\n }),\n (i.umask = function() {\n return 0\n })\n },\n function(e, t, n) {\n \"use strict\"\n ;(function(e) {\n n.d(t, \"a\", function() {\n return i\n })\n var r = n(0)\n function i(t) {\n Object(r.hb)(t)\n var n = Object(r.H)(!0, function(e, n, i, o, a) {\n var u = i ? (i.initializer ? i.initializer.call(e) : i.value) : void 0\n Object(r.t)(e).addObservableProp(n, u, t)\n }),\n i = (void 0 !== e && e.env, n)\n return (i.enhancer = t), i\n }\n }.call(this, n(3)))\n },\n function(e, t, n) {\n \"use strict\"\n ;(function(e) {\n n.d(t, \"a\", function() {\n return u\n }),\n n.d(t, \"b\", function() {\n return c\n })\n var r = n(0)\n var i = function e() {\n !(function(e, t) {\n if (!(e instanceof t))\n throw new TypeError(\"Cannot call a class as a function\")\n })(this, e),\n (this.version = 5),\n (this.UNCHANGED = {}),\n (this.trackingDerivation = null),\n (this.computationDepth = 0),\n (this.runId = 0),\n (this.mobxGuid = 0),\n (this.inBatch = 0),\n (this.pendingUnobservations = []),\n (this.pendingReactions = []),\n (this.isRunningReactions = !1),\n (this.allowStateChanges = !0),\n (this.enforceActions = !1),\n (this.spyListeners = []),\n (this.globalReactionErrorHandlers = []),\n (this.computedRequiresReaction = !1),\n (this.computedConfigurable = !1),\n (this.disableErrorBoundaries = !1),\n (this.suppressReactionErrors = !1)\n },\n o = !0,\n a = !1,\n u = (function() {\n var e = l()\n return (\n e.__mobxInstanceCount > 0 && !e.__mobxGlobals && (o = !1),\n e.__mobxGlobals && e.__mobxGlobals.version !== new i().version && (o = !1),\n o\n ? e.__mobxGlobals\n ? ((e.__mobxInstanceCount += 1),\n e.__mobxGlobals.UNCHANGED || (e.__mobxGlobals.UNCHANGED = {}),\n e.__mobxGlobals)\n : ((e.__mobxInstanceCount = 1), (e.__mobxGlobals = new i()))\n : (setTimeout(function() {\n a ||\n Object(r.R)(\n \"There are multiple, different versions of MobX active. Make sure MobX is loaded only once or use `configure({ isolateGlobalState: true })`\"\n )\n }, 1),\n new i())\n )\n })()\n function c() {\n ;(u.pendingReactions.length || u.inBatch || u.isRunningReactions) &&\n Object(r.R)(\n \"isolateGlobalState should be called before MobX is running any reactions\"\n ),\n (a = !0),\n o &&\n (0 == --l().__mobxInstanceCount && (l().__mobxGlobals = void 0),\n (u = new i()))\n }\n var s = {}\n function l() {\n return \"undefined\" != typeof window ? window : void 0 !== e ? e : s\n }\n }.call(this, n(2)))\n },\n function(e, t, n) {\n \"use strict\"\n n.r(t)\n var r = n(1),\n i = document.getElementById(\"add\"),\n o = document.getElementById(\"minus\"),\n a = document.getElementById(\"display\"),\n u = r.default.observable({ name: \"Ivan Fan\", income: 3, debit: 2 })\n r.default.autorun(function() {\n a.innerHTML = \"i'm the content \".concat(u.income)\n }),\n i.addEventListener(\"click\", function() {\n u.income++\n }),\n o.addEventListener(\"click\", function() {\n u.income--\n })\n }\n])\n"},"avg_line_length":{"kind":"number","value":39.9610983982,"string":"39.961098"},"max_line_length":{"kind":"number","value":265,"string":"265"},"alphanum_fraction":{"kind":"number","value":0.2614671019,"string":"0.261467"},"score":{"kind":"number","value":3.046875,"string":"3.046875"}}},{"rowIdx":743,"cells":{"hexsha":{"kind":"string","value":"0bb20a1cdedfbffd4e7982cebb075a87cca615b2"},"size":{"kind":"number","value":3731,"string":"3,731"},"ext":{"kind":"string","value":"js"},"lang":{"kind":"string","value":"JavaScript"},"max_stars_repo_path":{"kind":"string","value":"src/js/service/keyboard/ShortcutService.js"},"max_stars_repo_name":{"kind":"string","value":"stayqrious/piskel"},"max_stars_repo_head_hexsha":{"kind":"string","value":"a7d43f63b4dc263550e4be2add920f52b11913ee"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"number","value":2039,"string":"2,039"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2015-01-01T16:28:53.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-01-14T01:03:19.000Z"},"max_issues_repo_path":{"kind":"string","value":"src/js/service/keyboard/ShortcutService.js"},"max_issues_repo_name":{"kind":"string","value":"stayqrious/piskel"},"max_issues_repo_head_hexsha":{"kind":"string","value":"a7d43f63b4dc263550e4be2add920f52b11913ee"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"number","value":319,"string":"319"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2015-01-04T17:01:48.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2017-05-13T00:37:29.000Z"},"max_forks_repo_path":{"kind":"string","value":"src/js/service/keyboard/ShortcutService.js"},"max_forks_repo_name":{"kind":"string","value":"stayqrious/piskel"},"max_forks_repo_head_hexsha":{"kind":"string","value":"a7d43f63b4dc263550e4be2add920f52b11913ee"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"number","value":209,"string":"209"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2015-01-17T01:37:33.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2021-09-17T00:39:33.000Z"},"content":{"kind":"string","value":"(function () {\n var ns = $.namespace('pskl.service.keyboard');\n\n ns.ShortcutService = function () {\n this.shortcuts_ = [];\n };\n\n /**\n * @public\n */\n ns.ShortcutService.prototype.init = function() {\n $(document.body).keydown($.proxy(this.onKeyDown_, this));\n };\n\n /**\n * Add a keyboard shortcut\n * @param {pskl.service.keyboard.Shortcut} shortcut\n * @param {Function} callback should return true to let the original event perform its default action\n */\n ns.ShortcutService.prototype.registerShortcut = function (shortcut, callback) {\n if (!(shortcut instanceof ns.Shortcut)) {\n throw 'Invalid shortcut argument, please use instances of pskl.service.keyboard.Shortcut';\n }\n\n if (typeof callback != 'function') {\n throw 'Invalid callback argument, please provide a function';\n }\n\n this.shortcuts_.push({\n shortcut : shortcut,\n callback : callback\n });\n };\n\n ns.ShortcutService.prototype.unregisterShortcut = function (shortcut) {\n var index = -1;\n this.shortcuts_.forEach(function (s, i) {\n if (s.shortcut === shortcut) {\n index = i;\n }\n });\n if (index != -1) {\n this.shortcuts_.splice(index, 1);\n }\n };\n\n /**\n * @private\n */\n ns.ShortcutService.prototype.onKeyDown_ = function(evt) {\n var eventKey = ns.KeyUtils.createKeyFromEvent(evt);\n if (this.isInInput_(evt) || !eventKey) {\n return;\n }\n\n this.shortcuts_.forEach(function (shortcutInfo) {\n shortcutInfo.shortcut.getKeys().forEach(function (shortcutKey) {\n if (!ns.KeyUtils.equals(shortcutKey, eventKey)) {\n return;\n }\n\n var bubble = shortcutInfo.callback(eventKey.key);\n if (bubble !== true) {\n evt.preventDefault();\n }\n $.publish(Events.KEYBOARD_EVENT, [evt]);\n }.bind(this));\n }.bind(this));\n };\n\n ns.ShortcutService.prototype.isInInput_ = function (evt) {\n var targetTagName = evt.target.nodeName.toUpperCase();\n return targetTagName === 'INPUT' || targetTagName === 'TEXTAREA';\n };\n\n ns.ShortcutService.prototype.getShortcutById = function (id) {\n return pskl.utils.Array.find(this.getShortcuts(), function (shortcut) {\n return shortcut.getId() === id;\n });\n };\n\n ns.ShortcutService.prototype.getShortcuts = function () {\n var shortcuts = [];\n ns.Shortcuts.CATEGORIES.forEach(function (category) {\n var shortcutMap = ns.Shortcuts[category];\n Object.keys(shortcutMap).forEach(function (shortcutKey) {\n shortcuts.push(shortcutMap[shortcutKey]);\n });\n });\n return shortcuts;\n };\n\n ns.ShortcutService.prototype.updateShortcut = function (shortcut, keyAsString) {\n var key = keyAsString.replace(/\\s/g, '');\n\n var isForbiddenKey = ns.Shortcuts.FORBIDDEN_KEYS.indexOf(key) != -1;\n if (isForbiddenKey) {\n $.publish(Events.SHOW_NOTIFICATION, [{\n 'content': 'Key cannot be remapped (' + keyAsString + ')',\n 'hideDelay' : 5000\n }]);\n } else {\n this.removeKeyFromAllShortcuts_(key);\n shortcut.updateKeys([key]);\n $.publish(Events.SHORTCUTS_CHANGED);\n }\n };\n\n ns.ShortcutService.prototype.removeKeyFromAllShortcuts_ = function (key) {\n this.getShortcuts().forEach(function (s) {\n if (s.removeKeys([key])) {\n $.publish(Events.SHOW_NOTIFICATION, [{\n 'content': 'Shortcut key removed for ' + s.getId(),\n 'hideDelay' : 5000\n }]);\n }\n });\n };\n\n /**\n * Restore the default piskel key for all shortcuts\n */\n ns.ShortcutService.prototype.restoreDefaultShortcuts = function () {\n this.getShortcuts().forEach(function (shortcut) {\n shortcut.restoreDefault();\n });\n $.publish(Events.SHORTCUTS_CHANGED);\n };\n\n})();\n"},"avg_line_length":{"kind":"number","value":28.4809160305,"string":"28.480916"},"max_line_length":{"kind":"number","value":103,"string":"103"},"alphanum_fraction":{"kind":"number","value":0.6285178236,"string":"0.628518"},"score":{"kind":"number","value":3.125,"string":"3.125"}}},{"rowIdx":744,"cells":{"hexsha":{"kind":"string","value":"d97c4eed83940ece811d2fc4ce1151eeaee5d519"},"size":{"kind":"number","value":2730,"string":"2,730"},"ext":{"kind":"string","value":"rs"},"lang":{"kind":"string","value":"Rust"},"max_stars_repo_path":{"kind":"string","value":"src/columns/write_bytes.rs"},"max_stars_repo_name":{"kind":"string","value":"sorairolake/procs"},"max_stars_repo_head_hexsha":{"kind":"string","value":"01eae490b37037a9c059c2ded04ca0a64a5841a7"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"src/columns/write_bytes.rs"},"max_issues_repo_name":{"kind":"string","value":"sorairolake/procs"},"max_issues_repo_head_hexsha":{"kind":"string","value":"01eae490b37037a9c059c2ded04ca0a64a5841a7"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":24,"string":"24"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2022-01-25T20:30:33.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2022-03-31T20:40:52.000Z"},"max_forks_repo_path":{"kind":"string","value":"src/columns/write_bytes.rs"},"max_forks_repo_name":{"kind":"string","value":"doytsujin/procs"},"max_forks_repo_head_hexsha":{"kind":"string","value":"65fb32c1e879727ba2561bbb1d95617945733517"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"use crate::process::ProcessInfo;\nuse crate::util::bytify;\nuse crate::{column_default, Column};\nuse std::cmp;\nuse std::collections::HashMap;\n\npub struct WriteBytes {\n header: String,\n unit: String,\n fmt_contents: HashMap,\n raw_contents: HashMap,\n width: usize,\n}\n\nimpl WriteBytes {\n pub fn new(header: Option) -> Self {\n let header = header.unwrap_or_else(|| String::from(\"Write\"));\n let unit = String::from(\"[B/s]\");\n WriteBytes {\n fmt_contents: HashMap::new(),\n raw_contents: HashMap::new(),\n width: 0,\n header,\n unit,\n }\n }\n}\n\n#[cfg(any(target_os = \"linux\", target_os = \"android\"))]\nimpl Column for WriteBytes {\n fn add(&mut self, proc: &ProcessInfo) {\n let (fmt_content, raw_content) = if proc.curr_io.is_some() && proc.prev_io.is_some() {\n let interval_ms = proc.interval.as_secs() + u64::from(proc.interval.subsec_millis());\n let io = (proc.curr_io.as_ref().unwrap().write_bytes\n - proc.prev_io.as_ref().unwrap().write_bytes)\n * 1000\n / interval_ms;\n (bytify(io), io)\n } else {\n (String::from(\"\"), 0)\n };\n\n self.fmt_contents.insert(proc.pid, fmt_content);\n self.raw_contents.insert(proc.pid, raw_content);\n }\n\n column_default!(u64);\n}\n\n#[cfg_attr(tarpaulin, skip)]\n#[cfg(target_os = \"macos\")]\nimpl Column for WriteBytes {\n fn add(&mut self, proc: &ProcessInfo) {\n let (fmt_content, raw_content) = if proc.curr_res.is_some() && proc.prev_res.is_some() {\n let interval_ms = proc.interval.as_secs() + u64::from(proc.interval.subsec_millis());\n let io = (proc.curr_res.as_ref().unwrap().ri_diskio_byteswritten\n - proc.prev_res.as_ref().unwrap().ri_diskio_byteswritten)\n * 1000\n / interval_ms;\n (bytify(io), io)\n } else {\n (String::from(\"\"), 0)\n };\n\n self.fmt_contents.insert(proc.pid, fmt_content);\n self.raw_contents.insert(proc.pid, raw_content);\n }\n\n column_default!(u64);\n}\n\n#[cfg_attr(tarpaulin, skip)]\n#[cfg(target_os = \"windows\")]\nimpl Column for WriteBytes {\n fn add(&mut self, proc: &ProcessInfo) {\n let interval_ms = proc.interval.as_secs() + u64::from(proc.interval.subsec_millis());\n let io = (proc.disk_info.curr_write - proc.disk_info.prev_write) * 1000 / interval_ms;\n\n let raw_content = io;\n let fmt_content = bytify(raw_content);\n\n self.fmt_contents.insert(proc.pid, fmt_content);\n self.raw_contents.insert(proc.pid, raw_content);\n }\n\n column_default!(u64);\n}\n"},"avg_line_length":{"kind":"number","value":31.0227272727,"string":"31.022727"},"max_line_length":{"kind":"number","value":97,"string":"97"},"alphanum_fraction":{"kind":"number","value":0.595970696,"string":"0.595971"},"score":{"kind":"number","value":3.125,"string":"3.125"}}},{"rowIdx":745,"cells":{"hexsha":{"kind":"string","value":"7fce45fb82afbd057f10d7053606a05de0d7902f"},"size":{"kind":"number","value":5494,"string":"5,494"},"ext":{"kind":"string","value":"go"},"lang":{"kind":"string","value":"Go"},"max_stars_repo_path":{"kind":"string","value":"galley/pkg/runtime/publish/strategy.go"},"max_stars_repo_name":{"kind":"string","value":"pbohman/istio"},"max_stars_repo_head_hexsha":{"kind":"string","value":"6bade8133aadc2b32382256fc5f60d30f99379f5"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"number","value":2,"string":"2"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2021-01-15T09:23:29.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2021-12-04T13:35:18.000Z"},"max_issues_repo_path":{"kind":"string","value":"galley/pkg/runtime/publish/strategy.go"},"max_issues_repo_name":{"kind":"string","value":"pbohman/istio"},"max_issues_repo_head_hexsha":{"kind":"string","value":"6bade8133aadc2b32382256fc5f60d30f99379f5"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"number","value":7,"string":"7"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2020-04-08T00:11:35.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2021-09-21T01:49:26.000Z"},"max_forks_repo_path":{"kind":"string","value":"galley/pkg/runtime/publish/strategy.go"},"max_forks_repo_name":{"kind":"string","value":"pbohman/istio"},"max_forks_repo_head_hexsha":{"kind":"string","value":"6bade8133aadc2b32382256fc5f60d30f99379f5"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"number","value":5,"string":"5"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2018-01-16T00:38:11.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2019-07-10T19:04:40.000Z"},"content":{"kind":"string","value":"// Copyright 2018 Istio Authors\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\n\npackage publish\n\nimport (\n\t\"context\"\n\t\"sync\"\n\t\"time\"\n\n\t\"istio.io/istio/galley/pkg/runtime/log\"\n\t\"istio.io/istio/galley/pkg/runtime/monitoring\"\n\t\"istio.io/istio/galley/pkg/util\"\n)\n\nconst (\n\t// Maximum wait time before deciding to publish the events.\n\tdefaultMaxWaitDuration = time.Second\n\n\t// Minimum time distance between two events for deciding on the quiesce point. If the time delay\n\t// between two events is larger than this, then we can deduce that we hit a quiesce point.\n\tdefaultQuiesceDuration = time.Second\n\n\t// The frequency for firing the timer events.\n\tdefaultTimerFrequency = 500 * time.Millisecond\n)\n\n// Strategy is a heuristic model for deciding when to publish snapshots. It tries to detect\n// quiesce points for events with a total bounded wait time.\ntype Strategy struct {\n\tmaxWaitDuration time.Duration\n\tquiesceDuration time.Duration\n\ttimerFrequency time.Duration\n\n\t// stateLock protects the internal state of the publishing strategy.\n\tstateLock sync.Mutex\n\n\t// Publish channel is used to trigger the publication of snapshots.\n\tPublish chan struct{}\n\n\t// the time of first event that is received.\n\tfirstEvent time.Time\n\n\t// the time of the latest event that is received.\n\tlatestEvent time.Time\n\n\t// timer that is used for periodically checking for the quiesce point.\n\ttimer *time.Timer\n\n\t// nowFn is a testing hook for overriding time.Now()\n\tnowFn func() time.Time\n\n\t// startTimerFn is a testing hook for overriding the starting of the timer.\n\tstartTimerFn func()\n\n\t// worker manages the lifecycle of the timer worker thread.\n\tworker *util.Worker\n\n\t// resetChan is used to issue a reset to the timer.\n\tresetChan chan struct{}\n\n\t// pendingChanges indicates that there are unpublished changes.\n\tpendingChanges bool\n}\n\n// NewStrategyWithDefaults creates a new strategy with default values.\nfunc NewStrategyWithDefaults() *Strategy {\n\treturn NewStrategy(defaultMaxWaitDuration, defaultQuiesceDuration, defaultTimerFrequency)\n}\n\n// NewStrategy creates a new strategy with the given values.\nfunc NewStrategy(\n\tmaxWaitDuration time.Duration,\n\tquiesceDuration time.Duration,\n\ttimerFrequency time.Duration) *Strategy {\n\n\ts := &Strategy{\n\t\tmaxWaitDuration: maxWaitDuration,\n\t\tquiesceDuration: quiesceDuration,\n\t\ttimerFrequency: timerFrequency,\n\t\tPublish: make(chan struct{}, 1),\n\t\tnowFn: time.Now,\n\t\tworker: util.NewWorker(\"runtime publishing strategy\", log.Scope),\n\t\tresetChan: make(chan struct{}, 1),\n\t}\n\ts.startTimerFn = s.startTimer\n\treturn s\n}\n\nfunc (s *Strategy) OnChange() {\n\ts.stateLock.Lock()\n\n\tmonitoring.RecordStrategyOnChange()\n\n\t// Capture the latest event time.\n\ts.latestEvent = s.nowFn()\n\n\tif !s.pendingChanges {\n\t\t// This is the first event after a quiesce, start a timer to periodically check event\n\t\t// frequency and fire the publish event.\n\t\ts.pendingChanges = true\n\t\ts.firstEvent = s.latestEvent\n\n\t\t// Start or reset the timer.\n\t\tif s.timer != nil {\n\t\t\t// Timer has already been started, just reset it now.\n\t\t\t// NOTE: Unlocking the state lock first, to avoid a potential race with\n\t\t\t// the timer thread waiting to enter onTimer.\n\t\t\ts.stateLock.Unlock()\n\t\t\ts.resetChan <- struct{}{}\n\t\t\treturn\n\t\t}\n\t\ts.startTimerFn()\n\t}\n\n\ts.stateLock.Unlock()\n}\n\n// startTimer performs a start or reset on the timer. Called with lock on stateLock.\nfunc (s *Strategy) startTimer() {\n\ts.timer = time.NewTimer(s.timerFrequency)\n\n\teventLoop := func(ctx context.Context) {\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase <-s.timer.C:\n\t\t\t\tif !s.onTimer() {\n\t\t\t\t\t// We did not publish. Reset the timer and try again later.\n\t\t\t\t\ts.timer.Reset(s.timerFrequency)\n\t\t\t\t}\n\t\t\tcase <-s.resetChan:\n\t\t\t\ts.timer.Reset(s.timerFrequency)\n\t\t\tcase <-ctx.Done():\n\t\t\t\t// User requested to stop the timer.\n\t\t\t\ts.timer.Stop()\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}\n\n\t// Start a go routine to listen to the timer.\n\t_ = s.worker.Start(nil, eventLoop)\n}\n\nfunc (s *Strategy) onTimer() bool {\n\ts.stateLock.Lock()\n\tdefer s.stateLock.Unlock()\n\n\tnow := s.nowFn()\n\n\t// If there has been a long time since the first event, or if there was a quiesce since last event,\n\t// then fire publish to create new snapshots.\n\t// Otherwise, reset the timer and get a call again.\n\n\tmaxTimeReached := now.After(s.firstEvent.Add(s.maxWaitDuration))\n\tquiesceTimeReached := now.After(s.latestEvent.Add(s.quiesceDuration))\n\n\tpublished := false\n\tif maxTimeReached || quiesceTimeReached {\n\t\t// Try to send to the channel\n\t\tselect {\n\t\tcase s.Publish <- struct{}{}:\n\t\t\ts.pendingChanges = false\n\t\t\tpublished = true\n\t\tdefault:\n\t\t\t// If the calling code is not draining the publish channel, then we can potentially cause\n\t\t\t// a deadlock here. Avoid the deadlock by going through the timer loop again.\n\t\t\tlog.Scope.Warnf(\"Unable to publish to the channel, resetting the timer again to avoid deadlock\")\n\t\t}\n\t}\n\n\tmonitoring.RecordOnTimer(maxTimeReached, quiesceTimeReached, !published)\n\treturn published\n}\n\nfunc (s *Strategy) Close() {\n\ts.worker.Stop()\n}\n"},"avg_line_length":{"kind":"number","value":28.9157894737,"string":"28.915789"},"max_line_length":{"kind":"number","value":100,"string":"100"},"alphanum_fraction":{"kind":"number","value":0.7293410994,"string":"0.729341"},"score":{"kind":"number","value":3.15625,"string":"3.15625"}}},{"rowIdx":746,"cells":{"hexsha":{"kind":"string","value":"f7565039202ff3dc92fbf144641c15f512eaeda9"},"size":{"kind":"number","value":1906,"string":"1,906"},"ext":{"kind":"string","value":"h"},"lang":{"kind":"string","value":"C"},"max_stars_repo_path":{"kind":"string","value":"inc/ftsq/fast_queue.h"},"max_stars_repo_name":{"kind":"string","value":"after5cst/fast-thread-safe-queue"},"max_stars_repo_head_hexsha":{"kind":"string","value":"1abc40d620afe7b476803577b86a86d70f61bc32"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":1,"string":"1"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2016-01-19T19:17:17.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2016-01-19T19:17:17.000Z"},"max_issues_repo_path":{"kind":"string","value":"inc/ftsq/fast_queue.h"},"max_issues_repo_name":{"kind":"string","value":"after5cst/fast-thread-safe-queue"},"max_issues_repo_head_hexsha":{"kind":"string","value":"1abc40d620afe7b476803577b86a86d70f61bc32"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"inc/ftsq/fast_queue.h"},"max_forks_repo_name":{"kind":"string","value":"after5cst/fast-thread-safe-queue"},"max_forks_repo_head_hexsha":{"kind":"string","value":"1abc40d620afe7b476803577b86a86d70f61bc32"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"#ifndef FTSQ_MUTEX_H\n#define FTSQ_MUTEX_H\n\n#include \"ftsq/mutex.h\"\n#include \n#include \n\nnamespace ftsq\n{\n template \n class queue_pop_one\n {\n public:\n typedef std::deque queue_type;\n typedef typename queue_type::size_type size_type;\n\n size_type push(T item)\n {\n std::lock_guard guard(m_mutex);\n m_queue.push_back(std::move(item));\n return m_queue.size();\n }\n\n bool pop(T& item)\n {\n std::lock_guard guard(m_mutex);\n if(m_queue.empty())\n {\n return false;\n }\n item = std::move(m_queue.front());\n m_queue.pop_front();\n return true;\n }\n\n queue_pop_one() {}\n // disable object copy\n queue_pop_one(const queue_pop_one&) = delete;\n void operator=(const queue_pop_one&) = delete;\n private:\n mutex_type m_mutex;\n queue_type m_queue;\n }; //class queue_pop_one\n\n template \n class queue_pop_all\n {\n public:\n typedef std::vector queue_type;\n typedef typename queue_type::size_type size_type;\n\n size_type push(T item)\n {\n std::lock_guard guard(m_mutex);\n m_queue.push_back(std::move(item));\n return m_queue.size();\n }\n\n queue_type pop_all()\n {\n std::lock_guard guard(m_mutex);\n return std::move(m_queue);\n }\n\n queue_pop_all() {}\n // disable object copy\n queue_pop_all(const queue_pop_all&) = delete;\n void operator=(const queue_pop_all&) = delete;\n private:\n mutex_type m_mutex;\n queue_type m_queue;\n }; //class queue_pop_all\n}\n#endif // FTSQ_MUTEX_H\n"},"avg_line_length":{"kind":"number","value":25.4133333333,"string":"25.413333"},"max_line_length":{"kind":"number","value":58,"string":"58"},"alphanum_fraction":{"kind":"number","value":0.573976915,"string":"0.573977"},"score":{"kind":"number","value":3.015625,"string":"3.015625"}}},{"rowIdx":747,"cells":{"hexsha":{"kind":"string","value":"3f6238e86b0863465a6220c6a98c119e4b3ef3d0"},"size":{"kind":"number","value":4218,"string":"4,218"},"ext":{"kind":"string","value":"swift"},"lang":{"kind":"string","value":"Swift"},"max_stars_repo_path":{"kind":"string","value":"Sources/Constraints/Standard/OptionalConstraint.swift"},"max_stars_repo_name":{"kind":"string","value":"alexcristea/brick-validator"},"max_stars_repo_head_hexsha":{"kind":"string","value":"9696dfe2d2095c8e4be80eafb582b54348dcb36e"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":32,"string":"32"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2017-02-26T19:09:43.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2020-12-07T11:05:53.000Z"},"max_issues_repo_path":{"kind":"string","value":"Sources/Constraints/Standard/OptionalConstraint.swift"},"max_issues_repo_name":{"kind":"string","value":"alexcristea/validation-kit"},"max_issues_repo_head_hexsha":{"kind":"string","value":"9696dfe2d2095c8e4be80eafb582b54348dcb36e"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":22,"string":"22"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2017-02-26T23:22:29.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2021-03-06T13:12:08.000Z"},"max_forks_repo_path":{"kind":"string","value":"Sources/Constraints/Standard/OptionalConstraint.swift"},"max_forks_repo_name":{"kind":"string","value":"alexcristea/validation-kit"},"max_forks_repo_head_hexsha":{"kind":"string","value":"9696dfe2d2095c8e4be80eafb582b54348dcb36e"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":15,"string":"15"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2017-02-26T19:09:44.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2021-02-10T08:37:00.000Z"},"content":{"kind":"string","value":"import Foundation\n\n/**\n A `Constraint` that accepts an optional input and passes the unwrapped value to an underlying `Constraint`.\n \n ```swift\n enum Failure: Error {\n case required\n case invalidEmail\n }\n ```\n \n ```swift\n let email: String? = \"hello@nsagora.com\"\n let constraint = OptionalConstraint(required: .required) {\n PredicateConstraint(.email, error: .invalidEmail)\n }\n\n let result = constraint.evaluate(with: email)\n ```\n */\npublic struct OptionalConstraint: Constraint {\n \n public typealias InputType = T?\n public typealias ErrorType = E\n \n private let constraint: AnyConstraint\n private let requiredError: E?\n \n /**\n Returns a new `OptionalConstraint` instance.\n \n ```swift\n enum Failure: Error {\n case required\n case invalidEmail\n }\n ```\n \n ```swift\n let email: String? = \"hello@nsagora.com\"\n let emailConstraint = PredicateConstraint(.email, error: .invalidEmail)\n let constraint = OptionalConstraint(required: .required, constraint: emailConstraint)\n\n let result = constraint.evaluate(with: email)\n \n - parameter required: An optional `Error` that marks the optional as mandatory.\n - parameter constraint: A `Constraint` to describes the evaluation rule for the unwrapped value of the input.\n */\n public init(required requiredError: E? = nil, constraint: C) where C.InputType == T, C.ErrorType == E {\n self.constraint = constraint.erase()\n self.requiredError = requiredError\n }\n \n \n /**\n Returns a new `OptionalConstraint` instance.\n \n ```swift\n enum Failure: Error {\n case required\n case invalidEmail\n }\n ```\n \n ```swift\n let email: String? = \"hello@nsagora.com\"\n let constraint = OptionalConstraint(required: .required) {\n PredicateConstraint(.email, error: .invalidEmail)\n }\n\n let result = constraint.evaluate(with: email)\n \n - parameter required: An optional `Error` that marks the optional as mandatory.\n - parameter constraint: A closure that dynamically builds a `Constraint` to describes the evaluation rule for the unwrapped value of the input.\n */\n public init(required requiredError: E? = nil, constraintBuilder: () -> C) where C.InputType == T, C.ErrorType == E {\n self.init(required: requiredError, constraint: constraintBuilder())\n }\n \n /**\n Evaluates the unwrapped input on the underlying constraint.\n \n - parameter input: The optional input to be validated.\n - returns: `.failure` with a `Summary` containing the required error when the optional is marked as required and the input is `nil`, `success` when the optional is not marked as required and the input is `nil`, the evaluation result from the underlying constraint otherwise.\n */\n public func evaluate(with input: T?) -> Result> {\n \n if let input = input {\n return constraint.evaluate(with: input)\n }\n \n if let requiredError = requiredError {\n return .failure(Summary(errors: [requiredError]))\n }\n \n return .success(())\n }\n}\n\n// MARK: - Constraint modifiers\n\nextension Constraint {\n \n /**\n Returns a new `OptionalConstraint` instance.\n \n ```swift\n enum Failure: Error {\n case required\n case invalidEmail\n }\n ```\n \n ```swift\n let email: String? = \"hello@nsagora.com\"\n let emailConstraint = PredicateConstraint(.email, error: .invalidEmail)\n let constraint = emailConstraint.optional(required: .required)\n\n let result = constraint.evaluate(with: email)\n \n - parameter required: An optional `Error` that marks the optional as mandatory.\n - parameter constraint: A `Constraint` to describes the evaluation rule for the unwrapped value of the input.\n */\n public func `optional`(required requiredError: E? = nil) -> OptionalConstraint where Self.ErrorType == E, Self.InputType == T{\n OptionalConstraint(required: requiredError, constraint: self)\n }\n}\n"},"avg_line_length":{"kind":"number","value":32.6976744186,"string":"32.697674"},"max_line_length":{"kind":"number","value":279,"string":"279"},"alphanum_fraction":{"kind":"number","value":0.652916074,"string":"0.652916"},"score":{"kind":"number","value":3.25,"string":"3.25"}}},{"rowIdx":748,"cells":{"hexsha":{"kind":"string","value":"5a51656f89a2ad7d0edf1573643150e47ab98e91"},"size":{"kind":"number","value":13067,"string":"13,067"},"ext":{"kind":"string","value":"rs"},"lang":{"kind":"string","value":"Rust"},"max_stars_repo_path":{"kind":"string","value":"rosomaxa/src/example.rs"},"max_stars_repo_name":{"kind":"string","value":"PeakBI/ds-reinterpretcat-vrp"},"max_stars_repo_head_hexsha":{"kind":"string","value":"62428fdd5438812ddcf37583a14b9a26bdb43225"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"rosomaxa/src/example.rs"},"max_issues_repo_name":{"kind":"string","value":"PeakBI/ds-reinterpretcat-vrp"},"max_issues_repo_head_hexsha":{"kind":"string","value":"62428fdd5438812ddcf37583a14b9a26bdb43225"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"rosomaxa/src/example.rs"},"max_forks_repo_name":{"kind":"string","value":"PeakBI/ds-reinterpretcat-vrp"},"max_forks_repo_head_hexsha":{"kind":"string","value":"62428fdd5438812ddcf37583a14b9a26bdb43225"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"//! This module contains example models and logic to demonstrate practical usage of rosomaxa crate.\n\n#[cfg(test)]\n#[path = \"../tests/unit/example_test.rs\"]\nmod example_test;\n\nuse crate::evolution::*;\nuse crate::get_default_population;\nuse crate::hyper::*;\nuse crate::population::{DominanceOrder, DominanceOrdered, RosomaxaWeighted, Shuffled};\nuse crate::prelude::*;\nuse crate::utils::Noise;\nuse hashbrown::{HashMap, HashSet};\nuse std::any::Any;\nuse std::ops::Deref;\nuse std::sync::Arc;\n\n/// An example objective function.\npub type VectorFunction = Arc f64 + Send + Sync>;\n\n/// An example heuristic context.\npub struct VectorContext {\n objective: Arc,\n population: Box>,\n statistics: HeuristicStatistics,\n environment: Arc,\n state: HashMap>,\n}\n\n/// An example heuristic objective.\npub struct VectorObjective {\n func: VectorFunction,\n}\n\n/// An example heuristic solution.\npub struct VectorSolution {\n /// Solution payload.\n pub data: Vec,\n objective: Arc,\n order: DominanceOrder,\n}\n\nimpl VectorContext {\n /// Creates a new instance of `VectorContext`.\n pub fn new(\n objective: Arc,\n population: Box>,\n environment: Arc,\n ) -> Self {\n Self { objective, population, statistics: Default::default(), environment, state: Default::default() }\n }\n}\n\nimpl HeuristicContext for VectorContext {\n type Objective = VectorObjective;\n type Solution = VectorSolution;\n\n fn objective(&self) -> &Self::Objective {\n &self.objective\n }\n\n fn population(&self) -> &dyn HeuristicPopulation {\n self.population.as_ref()\n }\n\n fn population_mut(\n &mut self,\n ) -> &mut dyn HeuristicPopulation {\n self.population.as_mut()\n }\n\n fn statistics(&self) -> &HeuristicStatistics {\n &self.statistics\n }\n\n fn statistics_mut(&mut self) -> &mut HeuristicStatistics {\n &mut self.statistics\n }\n\n fn environment(&self) -> &Environment {\n self.environment.as_ref()\n }\n}\n\nimpl Stateful for VectorContext {\n type Key = i32;\n\n fn set_state(&mut self, key: Self::Key, state: T) {\n self.state.insert(key, Box::new(state));\n }\n\n fn get_state(&self, key: &Self::Key) -> Option<&T> {\n self.state.get(key).and_then(|v| v.downcast_ref::())\n }\n\n fn state_mut T>(&mut self, key: Self::Key, inserter: F) -> &mut T {\n self.state.entry(key).or_insert_with(|| Box::new(inserter())).downcast_mut::().unwrap()\n }\n}\n\nimpl VectorObjective {\n /// Creates a new instance `VectorObjective`.\n pub fn new(func: VectorFunction) -> Self {\n Self { func }\n }\n}\n\nimpl HeuristicObjective for VectorObjective {}\n\nimpl Objective for VectorObjective {\n type Solution = VectorSolution;\n\n fn fitness(&self, solution: &Self::Solution) -> f64 {\n self.func.deref()(solution.data.as_slice())\n }\n}\n\nimpl MultiObjective for VectorObjective {\n fn objectives<'a>(\n &'a self,\n ) -> Box + Send + Sync)> + 'a> {\n let objective: &(dyn Objective + Send + Sync) = self;\n\n Box::new(std::iter::once(objective))\n }\n}\n\nimpl Shuffled for VectorObjective {\n fn get_shuffled(&self, _: &(dyn Random + Send + Sync)) -> Self {\n Self::new(self.func.clone())\n }\n}\n\nimpl HeuristicSolution for VectorSolution {\n fn get_fitness<'a>(&'a self) -> Box + 'a> {\n Box::new(self.objective.objectives().map(move |objective| objective.fitness(self)))\n }\n\n fn deep_copy(&self) -> Self {\n Self::new(self.data.clone(), self.objective.clone())\n }\n}\n\nimpl DominanceOrdered for VectorSolution {\n fn get_order(&self) -> &DominanceOrder {\n &self.order\n }\n\n fn set_order(&mut self, order: DominanceOrder) {\n self.order = order\n }\n}\n\nimpl RosomaxaWeighted for VectorSolution {\n fn weights(&self) -> Vec {\n // TODO:\n // for the sake of experimentation, consider to provide some configuration here to allow\n // usage of some noise, smoothing or optional weights, but not only direct mapping of data.\n self.data.clone()\n }\n}\n\nimpl VectorSolution {\n /// Creates a new instance of `VectorSolution`.\n pub fn new(data: Vec, objective: Arc) -> Self {\n Self { data, objective, order: DominanceOrder::default() }\n }\n}\n\n/// An example initial operator\npub struct VectorInitialOperator {\n data: Vec,\n}\n\nimpl VectorInitialOperator {\n /// Creates a new instance of `VectorInitialOperator`.\n pub fn new(data: Vec) -> Self {\n Self { data }\n }\n}\n\nimpl InitialOperator for VectorInitialOperator {\n type Context = VectorContext;\n type Objective = VectorObjective;\n type Solution = VectorSolution;\n\n fn create(&self, context: &Self::Context) -> Self::Solution {\n Self::Solution::new(self.data.clone(), context.objective.clone())\n }\n}\n\n/// Specifies mode of heuristic operator.\npub enum VectorHeuristicOperatorMode {\n /// Adds some noice to all dimensions.\n JustNoise(Noise),\n /// Adds some noice to specific dimensions.\n DimensionNoise(Noise, HashSet),\n}\n\n/// A naive implementation of heuristic search operator in vector space.\nstruct VectorHeuristicOperator {\n mode: VectorHeuristicOperatorMode,\n}\n\nimpl HeuristicOperator for VectorHeuristicOperator {\n type Context = VectorContext;\n type Objective = VectorObjective;\n type Solution = VectorSolution;\n\n fn search(&self, context: &Self::Context, solution: &Self::Solution) -> Self::Solution {\n Self::Solution::new(\n match &self.mode {\n VectorHeuristicOperatorMode::JustNoise(noise) => {\n solution.data.iter().map(|d| *d + noise.add(*d)).collect()\n }\n VectorHeuristicOperatorMode::DimensionNoise(noise, dimens) => solution\n .data\n .iter()\n .enumerate()\n .map(|(idx, d)| if dimens.contains(&idx) { *d + noise.add(*d) } else { *d })\n .collect(),\n },\n context.objective.clone(),\n )\n }\n}\n\ntype TargetInitialOperator = Box<\n dyn InitialOperator + Send + Sync,\n>;\n\ntype TargetHeuristicOperator = Arc<\n dyn HeuristicOperator\n + Send\n + Sync,\n>;\n\n/// Specifies solver solutions.\npub type SolverSolutions = Vec<(Vec, f64)>;\n\n/// An example of the optimization solver to solve trivial problems.\npub struct Solver {\n initial_solutions: Vec>,\n initial_params: (usize, f64),\n objective_func: Option,\n max_time: Option,\n max_generations: Option,\n min_cv: Option<(String, usize, f64, bool)>,\n target_proximity: Option<(Vec, f64)>,\n operators: Vec<(TargetHeuristicOperator, String, f64)>,\n}\n\nimpl Default for Solver {\n fn default() -> Self {\n Self {\n initial_solutions: vec![],\n initial_params: (4, 0.05),\n objective_func: None,\n max_time: Some(10),\n max_generations: Some(100),\n min_cv: None,\n target_proximity: None,\n operators: vec![],\n }\n }\n}\n\nimpl Solver {\n /// Sets initial parameters.\n pub fn with_init_params(mut self, max_size: usize, quota: f64) -> Self {\n self.initial_params = (max_size, quota);\n self\n }\n\n /// Sets initial solutions.\n pub fn with_init_solutions(mut self, init_solutions: Vec>) -> Self {\n self.initial_solutions = init_solutions;\n self\n }\n\n // TODO add termination to stop when solution close to some target\n\n /// Sets termination parameters.\n pub fn with_termination(\n mut self,\n max_time: Option,\n max_generations: Option,\n min_cv: Option<(String, usize, f64, bool)>,\n target_proximity: Option<(Vec, f64)>,\n ) -> Self {\n self.max_time = max_time;\n self.max_generations = max_generations;\n self.min_cv = min_cv;\n self.target_proximity = target_proximity;\n\n self\n }\n\n /// Sets search operator.\n pub fn with_operator(mut self, mode: VectorHeuristicOperatorMode, name: &str, probability: f64) -> Self {\n self.operators.push((Arc::new(VectorHeuristicOperator { mode }), name.to_string(), probability));\n self\n }\n\n /// Sets objective function.\n pub fn with_objective_fun(mut self, objective_func: VectorFunction) -> Self {\n self.objective_func = Some(objective_func);\n self\n }\n\n /// Runs the solver using configuration provided through fluent interface methods.\n pub fn solve(self) -> Result<(SolverSolutions, Option), String> {\n let environment = Arc::new(Environment::new_with_time_quota(self.max_time));\n\n // build instances of implementation types from submitted data\n let func = self.objective_func.ok_or_else(|| \"objective function must be set\".to_string())?;\n let objective = Arc::new(VectorObjective::new(func));\n let heuristic = Box::new(MultiSelective::new(\n Box::new(DynamicSelective::new(\n self.operators.iter().map(|(op, name, _)| (op.clone(), name.clone())).collect(),\n environment.random.clone(),\n )),\n Box::new(StaticSelective::new(\n self.operators\n .iter()\n .map(|(op, _, probability)| {\n let random = environment.random.clone();\n let probability = *probability;\n let probability_func: HeuristicProbability =\n (Box::new(move |_, _| random.is_hit(probability)), Default::default());\n (op.clone(), probability_func)\n })\n .collect(),\n )),\n ));\n let initial_operators = self\n .initial_solutions\n .into_iter()\n .map(VectorInitialOperator::new)\n .map::<(TargetInitialOperator, _), _>(|o| (Box::new(o), 1))\n .collect();\n\n // create a heuristic context\n let context = VectorContext::new(\n objective.clone(),\n get_default_population::(objective.clone(), environment.clone()),\n environment.clone(),\n );\n\n // create a telemetry which will log population\n let telemetry = Telemetry::new(TelemetryMode::OnlyLogging {\n logger: environment.logger.clone(),\n log_best: 100,\n log_population: 500,\n dump_population: false,\n });\n\n // build evolution config using fluent interface\n let config = EvolutionConfigBuilder::default()\n .with_heuristic(heuristic)\n .with_objective(objective)\n .with_context(context)\n .with_min_cv(self.min_cv, 1)\n .with_max_time(self.max_time)\n .with_max_generations(self.max_generations)\n .with_target_proximity(self.target_proximity)\n .with_initial(self.initial_params.0, self.initial_params.1, initial_operators)\n .with_telemetry(telemetry)\n .build()?;\n\n // solve the problem\n let (solutions, metrics) = EvolutionSimulator::new(config)?.run()?;\n\n let solutions = solutions\n .into_iter()\n .map(|s| {\n let fitness = s.get_fitness().next().expect(\"empty fitness\");\n (s.data, fitness)\n })\n .collect();\n\n Ok((solutions, metrics))\n }\n}\n\n/// Creates multidimensional Rosenbrock function, also referred to as the Valley or Banana function.\n/// The function is usually evaluated on the hypercube xi ∈ [-5, 10], for all i = 1, …, d, although\n/// it may be restricted to the hypercube xi ∈ [-2.048, 2.048], for all i = 1, …, d.\npub fn create_rosenbrock_function() -> VectorFunction {\n Arc::new(|input| {\n assert!(input.len() > 1);\n\n input.windows(2).fold(0., |acc, pair| {\n let (x1, x2) = match pair {\n [x1, x2] => (*x1, *x2),\n _ => unreachable!(),\n };\n\n acc + 100. * (x2 - x1.powi(2)).powi(2) + (x1 - 1.).powi(2)\n })\n })\n}\n"},"avg_line_length":{"kind":"number","value":32.4243176179,"string":"32.424318"},"max_line_length":{"kind":"number","value":119,"string":"119"},"alphanum_fraction":{"kind":"number","value":0.6142955537,"string":"0.614296"},"score":{"kind":"number","value":3.234375,"string":"3.234375"}}},{"rowIdx":749,"cells":{"hexsha":{"kind":"string","value":"6533cb2d911f06e68a721247deb37def17dac93b"},"size":{"kind":"number","value":5448,"string":"5,448"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"kedro/extras/datasets/pandas/appendable_excel_dataset.py"},"max_stars_repo_name":{"kind":"string","value":"hfwittmann/kedro"},"max_stars_repo_head_hexsha":{"kind":"string","value":"b0d4fcd8f19b49a7916d78fd09daeb6209a7b6c6"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"number","value":1,"string":"1"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2021-11-25T12:33:13.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2021-11-25T12:33:13.000Z"},"max_issues_repo_path":{"kind":"string","value":"kedro/extras/datasets/pandas/appendable_excel_dataset.py"},"max_issues_repo_name":{"kind":"string","value":"MerelTheisenQB/kedro"},"max_issues_repo_head_hexsha":{"kind":"string","value":"1eaa2e0fa5d80f96e18ea60b9f3d6e6efc161827"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"kedro/extras/datasets/pandas/appendable_excel_dataset.py"},"max_forks_repo_name":{"kind":"string","value":"MerelTheisenQB/kedro"},"max_forks_repo_head_hexsha":{"kind":"string","value":"1eaa2e0fa5d80f96e18ea60b9f3d6e6efc161827"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"\"\"\"``AppendableExcelDataSet`` loads/saves data from/to a local Excel file opened in append mode.\nIt uses pandas to handle the Excel file.\n\"\"\"\nfrom copy import deepcopy\nfrom pathlib import Path, PurePosixPath\nfrom typing import Any, Dict\n\nimport pandas as pd\n\nfrom kedro.io.core import AbstractDataSet, DataSetError\n\n\nclass AppendableExcelDataSet(AbstractDataSet):\n \"\"\"``AppendableExcelDataSet`` loads/saves data from/to a local Excel file opened in\n append mode. It uses pandas to handle the Excel file.\n\n Example adding a catalog entry with\n `YAML API `_:\n\n .. code-block:: yaml\n\n >>> # AppendableExcelDataSet creates a new sheet for every dataset\n >>> # ExcelDataSet restricts one dataset per file as it is overwritten\n >>>\n >>> preprocessed_companies:\n >>> type: pandas.AppendableExcelDataSet\n >>> filepath: data/02_intermediate/preprocessed.xlsx # assumes file already exists\n >>> save_args:\n >>> sheet_name: preprocessed_companies\n >>> load_args:\n >>> sheet_name: preprocessed_companies\n >>>\n >>> preprocessed_shuttles:\n >>> type: pandas.AppendableExcelDataSet\n >>> filepath: data/02_intermediate/preprocessed.xlsx\n >>> save_args:\n >>> sheet_name: preprocessed_shuttles\n >>> load_args:\n >>> sheet_name: preprocessed_shuttles\n\n Example using Python API:\n ::\n\n >>> from kedro.extras.datasets.pandas import AppendableExcelDataSet\n >>> from kedro.extras.datasets.pandas import ExcelDataSet\n >>> import pandas as pd\n >>>\n >>> data_1 = pd.DataFrame({'col1': [1, 2], 'col2': [4, 5],\n >>> 'col3': [5, 6]})\n >>>\n >>> data_2 = pd.DataFrame({'col1': [7, 8], 'col2': [5, 7]})\n >>>\n >>> regular_ds = ExcelDataSet(filepath=\"/tmp/test.xlsx\")\n >>> appendable_ds = AppendableExcelDataSet(\n >>> filepath=\"/tmp/test.xlsx\",\n >>> save_args={\"sheet_name\": \"my_sheet\"},\n >>> load_args={\"sheet_name\": \"my_sheet\"}\n >>> )\n >>>\n >>> regular_ds.save(data_1)\n >>> appendable_ds.save(data_2)\n >>> reloaded = appendable_ds.load()\n >>> assert data_2.equals(reloaded)\n\n \"\"\"\n\n DEFAULT_LOAD_ARGS = {\"engine\": \"openpyxl\"}\n DEFAULT_SAVE_ARGS = {\"index\": False}\n\n def __init__(\n self,\n filepath: str,\n load_args: Dict[str, Any] = None,\n save_args: Dict[str, Any] = None,\n ) -> None:\n \"\"\"Creates a new instance of ``AppendableExcelDataSet`` pointing to an existing local\n Excel file to be opened in append mode.\n\n Args:\n filepath: Filepath in POSIX format to an existing local Excel file.\n load_args: Pandas options for loading Excel files.\n Here you can find all available arguments:\n https://pandas.pydata.org/pandas-docs/stable/generated/pandas.read_excel.html\n All defaults are preserved, but \"engine\", which is set to \"openpyxl\".\n save_args: Pandas options for saving Excel files.\n Here you can find all available arguments:\n https://pandas.pydata.org/pandas-docs/stable/generated/pandas.DataFrame.to_excel.html\n All defaults are preserved, but \"index\", which is set to False.\n If you would like to specify options for the `ExcelWriter`,\n you can include them under \"writer\" key. Here you can\n find all available arguments:\n https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.ExcelWriter.html\n Note: `mode` option of `ExcelWriter` is set to `a` and it can not be overridden.\n \"\"\"\n self._filepath = PurePosixPath(filepath)\n\n # Handle default load and save arguments\n self._load_args = deepcopy(self.DEFAULT_LOAD_ARGS)\n if load_args is not None:\n self._load_args.update(load_args)\n\n save_args = deepcopy(save_args) or {}\n self._save_args = deepcopy(self.DEFAULT_SAVE_ARGS)\n self._writer_args = save_args.pop(\"writer\", {}) # type: Dict[str, Any]\n self._writer_args.setdefault(\"engine\", \"openpyxl\")\n if save_args is not None:\n self._save_args.update(save_args)\n\n # Use only append mode\n self._writer_args[\"mode\"] = \"a\"\n\n def _describe(self) -> Dict[str, Any]:\n return dict(\n filepath=self._filepath,\n load_args=self._load_args,\n save_args=self._save_args,\n writer_args=self._writer_args,\n )\n\n def _load(self) -> pd.DataFrame:\n return pd.read_excel(str(self._filepath), **self._load_args)\n\n def _save(self, data: pd.DataFrame) -> None:\n # pylint: disable=abstract-class-instantiated\n try:\n with pd.ExcelWriter(str(self._filepath), **self._writer_args) as writer:\n data.to_excel(writer, **self._save_args)\n except FileNotFoundError as exc:\n raise DataSetError(\n f\"`{self._filepath}` Excel file not found. The file cannot be opened in \"\n f\"append mode.\"\n ) from exc\n\n def _exists(self) -> bool:\n return Path(self._filepath.as_posix()).is_file()\n"},"avg_line_length":{"kind":"number","value":39.7664233577,"string":"39.766423"},"max_line_length":{"kind":"number","value":101,"string":"101"},"alphanum_fraction":{"kind":"number","value":0.6125183554,"string":"0.612518"},"score":{"kind":"number","value":3.03125,"string":"3.03125"}}},{"rowIdx":750,"cells":{"hexsha":{"kind":"string","value":"544ceb2743dbbb4ce474f25f42d566e60e7e9573"},"size":{"kind":"number","value":3812,"string":"3,812"},"ext":{"kind":"string","value":"go"},"lang":{"kind":"string","value":"Go"},"max_stars_repo_path":{"kind":"string","value":"Godeps/_workspace/src/github.com/ThomasRooney/gexpect/gexpect_test.go"},"max_stars_repo_name":{"kind":"string","value":"maquanyi/rkt"},"max_stars_repo_head_hexsha":{"kind":"string","value":"d213d00ad591e9b2e1542c3b1615a79bab03633d"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"Godeps/_workspace/src/github.com/ThomasRooney/gexpect/gexpect_test.go"},"max_issues_repo_name":{"kind":"string","value":"maquanyi/rkt"},"max_issues_repo_head_hexsha":{"kind":"string","value":"d213d00ad591e9b2e1542c3b1615a79bab03633d"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"Godeps/_workspace/src/github.com/ThomasRooney/gexpect/gexpect_test.go"},"max_forks_repo_name":{"kind":"string","value":"maquanyi/rkt"},"max_forks_repo_head_hexsha":{"kind":"string","value":"d213d00ad591e9b2e1542c3b1615a79bab03633d"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"number","value":1,"string":"1"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2022-03-22T09:16:50.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2022-03-22T09:16:50.000Z"},"content":{"kind":"string","value":"package gexpect\n\nimport (\n\t\"strings\"\n\t\"testing\"\n)\n\nfunc TestHelloWorld(t *testing.T) {\n\tt.Logf(\"Testing Hello World... \")\n\tchild, err := Spawn(\"echo \\\"Hello World\\\"\")\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\terr = child.Expect(\"Hello World\")\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n}\n\nfunc TestDoubleHelloWorld(t *testing.T) {\n\tt.Logf(\"Testing Double Hello World... \")\n\tchild, err := Spawn(`sh -c \"echo Hello World ; echo Hello ; echo Hi\"`)\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\terr = child.Expect(\"Hello World\")\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\terr = child.Expect(\"Hello\")\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\terr = child.Expect(\"Hi\")\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n}\n\nfunc TestHelloWorldFailureCase(t *testing.T) {\n\tt.Logf(\"Testing Hello World Failure case... \")\n\tchild, err := Spawn(\"echo \\\"Hello World\\\"\")\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\terr = child.Expect(\"YOU WILL NEVER FIND ME\")\n\tif err != nil {\n\t\treturn\n\t}\n\tt.Fatal(\"Expected an error for TestHelloWorldFailureCase\")\n}\n\nfunc TestBiChannel(t *testing.T) {\n\tt.Logf(\"Testing BiChannel screen... \")\n\tchild, err := Spawn(\"cat\")\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tsender, reciever := child.AsyncInteractChannels()\n\twait := func(str string) {\n\t\tfor {\n\t\t\tmsg, open := <-reciever\n\t\t\tif !open {\n\t\t\t\treturn\n\t\t\t}\n\t\t\tif strings.Contains(msg, str) {\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}\n\tsender <- \"echo\\n\"\n\twait(\"echo\")\n\tsender <- \"echo2\"\n\twait(\"echo2\")\n\tchild.Close()\n\t// child.Wait()\n}\n\nfunc TestCommandStart(t *testing.T) {\n\tt.Logf(\"Testing Command... \")\n\n\t// Doing this allows you to modify the cmd struct prior to execution, for example to add environment variables\n\tchild, err := Command(\"echo 'Hello World'\")\n\tif err != nil {\n\t\tt.Fatal(err)\n\t}\n\tchild.Start()\n\tchild.Expect(\"Hello World\")\n}\n\nvar regexMatchTests = []struct {\n\tre string\n\tgood string\n\tbad string\n}{\n\t{`a`, `a`, `b`},\n\t{`.b`, `ab`, `ac`},\n\t{`a+hello`, `aaaahello`, `bhello`},\n\t{`(hello|world)`, `hello`, `unknown`},\n\t{`(hello|world)`, `world`, `unknown`},\n}\n\nfunc TestRegexMatch(t *testing.T) {\n\tt.Logf(\"Testing Regular Expression Matching... \")\n\tfor _, tt := range regexMatchTests {\n\t\trunTest := func(input string) bool {\n\t\t\tvar match bool\n\t\t\tchild, err := Spawn(\"echo \\\"\" + input + \"\\\"\")\n\t\t\tif err != nil {\n\t\t\t\tt.Fatal(err)\n\t\t\t}\n\t\t\tmatch, err = child.ExpectRegex(tt.re)\n\t\t\tif err != nil {\n\t\t\t\tt.Fatal(err)\n\t\t\t}\n\t\t\treturn match\n\t\t}\n\t\tif !runTest(tt.good) {\n\t\t\tt.Errorf(\"Regex Not matching [%#q] with pattern [%#q]\", tt.good, tt.re)\n\t\t}\n\t\tif runTest(tt.bad) {\n\t\t\tt.Errorf(\"Regex Matching [%#q] with pattern [%#q]\", tt.bad, tt.re)\n\t\t}\n\t}\n}\n\nvar regexFindTests = []struct {\n\tre string\n\tinput string\n\tmatches []string\n}{\n\t{`he(l)lo wo(r)ld`, `hello world`, []string{\"hello world\", \"l\", \"r\"}},\n\t{`(a)`, `a`, []string{\"a\", \"a\"}},\n\t{`so.. (hello|world)`, `so.. hello`, []string{\"so.. hello\", \"hello\"}},\n\t{`(a+)hello`, `aaaahello`, []string{\"aaaahello\", \"aaaa\"}},\n\t{`\\d+ (\\d+) (\\d+)`, `123 456 789`, []string{\"123 456 789\", \"456\", \"789\"}},\n}\n\nfunc TestRegexFind(t *testing.T) {\n\tt.Logf(\"Testing Regular Expression Search... \")\n\tfor _, tt := range regexFindTests {\n\t\trunTest := func(input string) []string {\n\t\t\tchild, err := Spawn(\"echo \\\"\" + input + \"\\\"\")\n\t\t\tif err != nil {\n\t\t\t\tt.Fatal(err)\n\t\t\t}\n\t\t\tmatches, err := child.ExpectRegexFind(tt.re)\n\t\t\tif err != nil {\n\t\t\t\tt.Fatal(err)\n\t\t\t}\n\t\t\treturn matches\n\t\t}\n\t\tmatches := runTest(tt.input)\n\t\tif len(matches) != len(tt.matches) {\n\t\t\tt.Fatalf(\"Regex not producing the expected number of patterns.. got[%d] ([%s]) expected[%d] ([%s])\",\n\t\t\t\tlen(matches), strings.Join(matches, \",\"),\n\t\t\t\tlen(tt.matches), strings.Join(tt.matches, \",\"))\n\t\t}\n\t\tfor i, _ := range matches {\n\t\t\tif matches[i] != tt.matches[i] {\n\t\t\t\tt.Errorf(\"Regex Expected group [%s] and got group [%s] with pattern [%#q] and input [%s]\",\n\t\t\t\t\ttt.matches[i], matches[i], tt.re, tt.input)\n\t\t\t}\n\t\t}\n\t}\n}\n"},"avg_line_length":{"kind":"number","value":22.8263473054,"string":"22.826347"},"max_line_length":{"kind":"number","value":111,"string":"111"},"alphanum_fraction":{"kind":"number","value":0.5952256034,"string":"0.595226"},"score":{"kind":"number","value":3.296875,"string":"3.296875"}}},{"rowIdx":751,"cells":{"hexsha":{"kind":"string","value":"7828db01df90d87ce8957314f72541d2ebdb2520"},"size":{"kind":"number","value":1260,"string":"1,260"},"ext":{"kind":"string","value":"swift"},"lang":{"kind":"string","value":"Swift"},"max_stars_repo_path":{"kind":"string","value":"Sources/SATSCore/Extensions/SwiftUI/ViewData/ActionSheetViewData.swift"},"max_stars_repo_name":{"kind":"string","value":"healthfitnessnordic/SATSCore-iOS"},"max_stars_repo_head_hexsha":{"kind":"string","value":"66ca055876bdc92c5df250d140e916d1575eab13"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":3,"string":"3"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2021-05-18T07:31:59.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-03-20T10:07:32.000Z"},"max_issues_repo_path":{"kind":"string","value":"Sources/SATSCore/Extensions/SwiftUI/ViewData/ActionSheetViewData.swift"},"max_issues_repo_name":{"kind":"string","value":"healthfitnessnordic/SATSCore-iOS"},"max_issues_repo_head_hexsha":{"kind":"string","value":"66ca055876bdc92c5df250d140e916d1575eab13"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":12,"string":"12"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2021-08-02T08:53:22.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2022-03-23T10:44:28.000Z"},"max_forks_repo_path":{"kind":"string","value":"Sources/SATSCore/Extensions/SwiftUI/ViewData/ActionSheetViewData.swift"},"max_forks_repo_name":{"kind":"string","value":"healthfitnessnordic/SATSCore-iOS"},"max_forks_repo_head_hexsha":{"kind":"string","value":"66ca055876bdc92c5df250d140e916d1575eab13"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"import SwiftUI\n\npublic struct ActionSheetViewData: Identifiable, Equatable {\n public let id: String\n public let title: String\n public let message: String?\n public let actions: [ActionViewData]\n\n public init(id: String? = nil, title: String, message: String?, actions: [ActionViewData]) {\n self.id = id ?? UUID().uuidString\n self.title = title\n self.message = message\n self.actions = actions\n }\n\n public struct ActionViewData: Equatable {\n public let title: String\n public let perform: () -> Void\n\n public init(title: String, perform: @escaping () -> Void) {\n self.title = title\n self.perform = perform\n }\n\n public static func == (lhs: Self, rhs: Self) -> Bool {\n lhs.title == rhs.title\n }\n }\n}\n\npublic extension ActionSheet {\n init(viewData: ActionSheetViewData) {\n var buttons: [Button] = viewData.actions\n .map { action in\n Button.default(Text(action.title), action: action.perform)\n }\n buttons.append(.cancel())\n\n self.init(\n title: Text(viewData.title),\n message: viewData.message.map { Text($0) },\n buttons: buttons\n )\n }\n}\n"},"avg_line_length":{"kind":"number","value":27.3913043478,"string":"27.391304"},"max_line_length":{"kind":"number","value":96,"string":"96"},"alphanum_fraction":{"kind":"number","value":0.5777777778,"string":"0.577778"},"score":{"kind":"number","value":3.109375,"string":"3.109375"}}},{"rowIdx":752,"cells":{"hexsha":{"kind":"string","value":"410aa5cee9cac9a50c2dba28317823f60b93253f"},"size":{"kind":"number","value":794,"string":"794"},"ext":{"kind":"string","value":"c"},"lang":{"kind":"string","value":"C"},"max_stars_repo_path":{"kind":"string","value":"LeetCode/0064_minimum-path-sum/0064_minimum-path-sum.c"},"max_stars_repo_name":{"kind":"string","value":"kenjin/DSAlgo"},"max_stars_repo_head_hexsha":{"kind":"string","value":"f4f58d57eebc5d7d1ce78f842e08cec360f403a4"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":13,"string":"13"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2020-08-10T08:25:07.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-03-22T07:47:46.000Z"},"max_issues_repo_path":{"kind":"string","value":"LeetCode/0064_minimum-path-sum/0064_minimum-path-sum.c"},"max_issues_repo_name":{"kind":"string","value":"kenjin/DSAlgo"},"max_issues_repo_head_hexsha":{"kind":"string","value":"f4f58d57eebc5d7d1ce78f842e08cec360f403a4"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"LeetCode/0064_minimum-path-sum/0064_minimum-path-sum.c"},"max_forks_repo_name":{"kind":"string","value":"kenjin/DSAlgo"},"max_forks_repo_head_hexsha":{"kind":"string","value":"f4f58d57eebc5d7d1ce78f842e08cec360f403a4"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":5,"string":"5"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2021-01-05T01:58:04.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2022-03-22T07:47:49.000Z"},"content":{"kind":"string","value":"\n#define MIN(a, b) (a < b ? a : b)\n\nint minPathSum(int **grid, int grid_sz, int *grid_col_sz)\n{\n /* sanity check */\n if (grid_sz == 0)\n return 0;\n\n int **dp = malloc(sizeof(int *) * grid_sz);\n int col_sz = grid_col_sz[0], sum = 0;\n for (int i = 0; i < grid_sz; i++) {\n dp[i] = malloc(sizeof(int) * col_sz);\n sum += grid[i][0];\n dp[i][0] = sum;\n }\n sum = 0;\n for (int i = 0; i < col_sz; i++) {\n sum += grid[0][i];\n dp[0][i] = sum;\n }\n\n for (int i = 1; i < grid_sz; i++) {\n for (int j = 1; j < col_sz; j++)\n dp[i][j] = MIN(dp[i - 1][j], dp[i][j - 1]) + grid[i][j];\n }\n\n int ret = dp[grid_sz - 1][col_sz - 1];\n for (int i = 0; i < grid_sz; i++)\n free(dp[i]);\n free(dp);\n\n return ret;\n}"},"avg_line_length":{"kind":"number","value":23.3529411765,"string":"23.352941"},"max_line_length":{"kind":"number","value":68,"string":"68"},"alphanum_fraction":{"kind":"number","value":0.4408060453,"string":"0.440806"},"score":{"kind":"number","value":3,"string":"3"}}},{"rowIdx":753,"cells":{"hexsha":{"kind":"string","value":"3ce2becf1f32314524e24bf314d8c9206b194655"},"size":{"kind":"number","value":11946,"string":"11,946"},"ext":{"kind":"string","value":"lua"},"lang":{"kind":"string","value":"Lua"},"max_stars_repo_path":{"kind":"string","value":"resources/[race]/race_random/random_c.lua"},"max_stars_repo_name":{"kind":"string","value":"AfuSensi/MTA-Resources"},"max_stars_repo_head_hexsha":{"kind":"string","value":"e4a0f3981ddc92c8f15c3d93140196c6a8589fa8"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT","0BSD"],"string":"[\n \"MIT\",\n \"0BSD\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"resources/[race]/race_random/random_c.lua"},"max_issues_repo_name":{"kind":"string","value":"AfuSensi/MTA-Resources"},"max_issues_repo_head_hexsha":{"kind":"string","value":"e4a0f3981ddc92c8f15c3d93140196c6a8589fa8"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT","0BSD"],"string":"[\n \"MIT\",\n \"0BSD\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"resources/[race]/race_random/random_c.lua"},"max_forks_repo_name":{"kind":"string","value":"AfuSensi/MTA-Resources"},"max_forks_repo_head_hexsha":{"kind":"string","value":"e4a0f3981ddc92c8f15c3d93140196c6a8589fa8"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT","0BSD"],"string":"[\n \"MIT\",\n \"0BSD\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"--Sweeper\nlocal function playRunSound()\n\tlocal sound = playSound(\"files/run.wma\")\n\nend\naddEvent(\"playRunSound\", true)\naddEventHandler(\"playRunSound\", root, playRunSound)\n\n--Launch in air/Send to Heaven\nlocal function playHallelujahSound()\n\tlocal sound = playSound(\"files/hallelujah.wma\")\n\nend\naddEvent(\"playHallelujahSound\", true)\naddEventHandler(\"playHallelujahSound\", root, playHallelujahSound)\n\n--Darkness\nlocal function enableDarkness()\n\tfadeCamera(false, 3, 0, 0, 0)\n\n\tlocal function disableDarkness()\n\t\tfadeCamera(true, 3, 0, 0 , 0)\n\tend\n\tsetTimer(disableDarkness, 6000, 1)\nend\naddEvent(\"serverDarkness\", true)\naddEventHandler(\"serverDarkness\", root, enableDarkness)\n\n--Teleport back\nlocal function playTeleportSound()\n\tsound = playSound(\"files/tpsound.wma\")\n\nend\naddEvent(\"playTeleportSound\", true)\naddEventHandler(\"playTeleportSound\", root, playTeleportSound)\n\n--Turn player around\nlocal function playerTurnAround()\n\tif not (getPedOccupiedVehicle(localPlayer) and isElement(getPedOccupiedVehicle(localPlayer))) then\n\t\treturn\n\tend\n\n\tlocal vehicle = getPedOccupiedVehicle(getLocalPlayer())\n\tif not vehicle then\n\t\treturn\n\tend\n\n\tlocal RotX, RotY, RotZ = getElementRotation(vehicle)\n\tsetElementRotation(vehicle, RotX, RotY, RotZ+180)\nend\naddEvent(\"playerTurnAround\", true)\naddEventHandler(\"playerTurnAround\", root, playerTurnAround)\n\n--Massive Slap\nlocal function playerMassiveSlap()\n\tif not (getPedOccupiedVehicle(localPlayer) and isElement(getPedOccupiedVehicle(localPlayer))) then\n\t\treturn\n\tend\n\n\tlocal vehicle = getPedOccupiedVehicle(getLocalPlayer())\n\n\tsetElementVelocity(vehicle, 0, 0, 0.2)\n\tsetElementHealth(vehicle, getElementHealth(vehicle) - 100)\nend\naddEvent(\"playerMassiveSlap\", true)\naddEventHandler(\"playerMassiveSlap\", root, playerMassiveSlap)\n\nlocal function serverM()\n\tif getElementData(localPlayer,\"state\") ~= \"alive\" then\n\t\tlocal sound = playSound(\"files/votesound.wav\")\n\tend\n\nend\naddEvent( \"serverN\", true )\naddEventHandler( \"serverN\", root, serverM )\n\n\nlocal function serverFloat()\n\tlocal vehicle = getPedOccupiedVehicle(localPlayer)\n\tif not vehicle or not isElement(vehicle) then\n\t\treturn\n\tend\n\t\n setVehicleGravity(vehicle, 0, 0, 0)\n setTimer(setVehicleGravity, 15000, 1, vehicle, 0, 0, -1)\nend\naddEvent( \"serverGravityFloat\", true )\naddEventHandler( \"serverGravityFloat\", root, serverFloat )\n\nlocal function serverFloatStairwell()\n\tlocal vehicle = getPedOccupiedVehicle(localPlayer)\n\tif not vehicle or not isElement(vehicle) then\n\t\treturn\n\tend\n\n setVehicleGravity(vehicle, 0, 0, 1)\n setTimer(setVehicleGravity,8000,1,vehicle,0,0,-1 )\nend\naddEvent( \"serverGravityFloatStairwell\", true )\naddEventHandler( \"serverGravityFloatStairwell\", root, serverFloatStairwell )\n\n\n\nlocal function serverSleepWithFish()\n\tlocal vehicle = getPedOccupiedVehicle(source)\n\tif not vehicle or not isElement(vehicle) then\n\t\treturn\n\tend\n\n for _, object in ipairs(getElementsByType('object')) do\n \tsetElementCollidableWith(vehicle, object, false)\n end\nend\naddEvent( \"serverSleepWithFish\", true )\naddEventHandler( \"serverSleepWithFish\", root, serverSleepWithFish )\n\n\nfunction noBrakes()\n\tlocal time = {\n\t[1] = 10000,\n\t[2] = 12000,\n\t[3] = 14000,\n\t[4] = 16000,\n\t[5] = 20000\n\t} -- revert time\n\tlocal theTime = time[math.random(1,5)]\n\ttoggleControl( \"handbrake\", false )\n\ttoggleControl( \"brake_reverse\", false )\n\texports.messages:outputGameMessage(\"You have no brakes for \"..tostring(theTime/1000)..\" seconds!\",2,255,0,0)\n\n\tsetTimer(function()\n\t\ttoggleControl( \"handbrake\", true )\n\t\ttoggleControl( \"brake_reverse\", true )\n\t\texports.messages:outputGameMessage(\"Your brakes returned!\",2,0,255,0)\n\t\tend, theTime, 1)\nend\naddEvent( \"serverNoBrakes\", true )\naddEventHandler( \"serverNoBrakes\", resourceRoot, noBrakes )\n\nfunction Nuke(Amount)\n\tplayRunSound()\n\texports.messages:outputGameMessage(\"There are \"..tostring(Amount)..\" missiles coming your way!\",2.5,255,0,0)\n\tsetTimer(function()\n\t\tlocal veh = getPedOccupiedVehicle( localPlayer )\n\t\tif veh then\n\t\t\tlocal px,py,pz = getElementPosition( veh )\n\t\t\tsetTimer(function() \n\t\t\t\tcreateProjectile( veh, 19, px, py, pz+100, 1,localPlayer,0,0,0,0,0,-45 )\n\t\t\t\tend, 200, 1)\n\t\t\t\n\t\tend\n\tend,1000,Amount)\n\nend\naddEvent( \"serverNuke\", true )\naddEventHandler( \"serverNuke\", resourceRoot, Nuke )\n\n\n\naddEvent(\"clientRemovePickups\", true)\nfunction c_removePickups()\n\tlocal pickups = exports[\"race\"]:e_getPickups()\n\n\tfor f, u in pairs(pickups) do\n\t\tsetElementPosition(f, 0,0,-10000) -- Hides colshape to -10000 Z\n\t\tsetElementPosition(u[\"object\"],0,0,-10000) -- Hides pickup to -10000 Z\n\tend\n\n\t\texports.messages:outputGameMessage(\"All pickups are removed!\",2,255,255,255)\nend\n\naddEventHandler(\"clientRemovePickups\", resourceRoot, c_removePickups)\n\naddEvent(\"onRavebreakStart\",true)\naddEvent(\"stopRaveBreak\",true)\nfunction c_ravebreak(t)\n\trb_soundVolumes = {}\n\tlocal rb_sounds = getElementsByType( \"sound\" )\n\tfor _,snd in pairs(rb_sounds) do\n\t\trb_soundVolumes[snd] = getSoundVolume( snd )\n\t\tsetSoundVolume( snd, 0 )\n\tend\n\n\tlocal screenWidth, screenHeight = guiGetScreenSize()\n\traveBreakBrowser = createBrowser(screenWidth, screenHeight, true, true)\n\n\travebreak = playSound(\"files/ravebreak\"..tostring(math.random(1,4))..\".mp3\")\n\n\n\tshakeTimer = setTimer ( function()\n\t\tif getElementData(localPlayer,\"state\") == \"alive\" then -- If player's alive, explode under car, otherwise check for camera pos\n\t\t\tpx,py,pz = getElementPosition(getLocalPlayer())\t\n\t\t\tcreateExplosion(px, py, pz-30, 0, false, 2.5, false)\n\t\telse\n\t\t\tpx,py,pz = getCameraMatrix()\t\n\t\t\tcreateExplosion(px-30, py-30, pz-30, 0, false, 2.5, false)\n\t\tend\n\n\t\tend, 1000, 0 )\n\tcolorTimer = setTimer ( function()\tfadeCamera(false, 0.9, math.random(0,180), math.random(0,180), math.random(0,180) ) end, 250, 0 )\n\tresetTimer = setTimer ( function()\tfadeCamera(true, 0.3 )\tend, 320, 0 )\n\n\n\n\t\n\t\n\t\nend\naddEventHandler(\"onRavebreakStart\",root,c_ravebreak)\n\nfunction renderRaveBreak()\n\t\n\tlocal screenWidth, screenHeight = guiGetScreenSize()\n\tdxDrawImage(0, 0, screenWidth , screenHeight, raveBreakBrowser, 0, 0, 0, tocolor(255,255,255,180), false)\nend\n\naddEventHandler(\"onClientBrowserCreated\", root, \n\tfunction()\n\n\t\tif source ~= raveBreakBrowser then return end\n\t\tloadBrowserURL(raveBreakBrowser, \"http://mta/local/ravebreak.html\")\n\t\t\n\t\t\n\t\taddEventHandler(\"onClientRender\", root, renderRaveBreak)\n\tend\n)\n\n\nfunction stopRaveBreak()\n\tstopSound( ravebreak )\n\tkillTimer(shakeTimer)\n\tkillTimer(colorTimer)\n\tkillTimer(resetTimer)\n\tfadeCamera(true, 0.5 )\n\t\n\tfor sound,volume in pairs(rb_soundVolumes) do\n\t\tif isElement(sound) and getElementType(sound) == \"sound\" and tonumber(volume) then\n\t\t\tsetSoundVolume( sound, volume )\n\t\tend\n\tend\n\tremoveEventHandler(\"onClientRender\", root, renderRaveBreak)\n\tif isElement(raveBreakBrowser) then\n\t\tdestroyElement(raveBreakBrowser)\n\tend\nend\naddEventHandler(\"stopRaveBreak\",root,stopRaveBreak)\n\naddCommandHandler(\"freeravebreak\", function()\n\tc_ravebreak()\n\tsetTimer ( stopRaveBreak, 10000, 1 )\nend)\n\n\n-- nuked http://community.mtasa.com/index.php?p=resources&s=details&id=71\nN_loops = 0\nN_cloudRotationAngle = 0 \nNFlashDelay = 0\nstopNFlash = false \n\nfunction FireN ( x, y, z )\n\tlocal sound = playSound3D( \"files/BOMB_SIREN-BOMB_SIREN-247265934.mp3\", x, y, z)\n\tsetSoundMaxDistance(sound, 100)\n\tsetTimer(destroyElement, 3000, 1, sound)\n\tNBeaconX = x --these are for the render function\n\tNBeaconY = y \n\tNBeaconZ = z\n\tN_Cloud = NBeaconZ\t\n setTimer ( function() setTimer ( NExplosion, 170, 35 ) end, 2700, 1 ) -- wait 2700 seconds then 35 loops @ 170ms\n setTimer ( NShot, 500, 1 )\t\nend\naddEvent(\"ClientFireN\",true)\naddEventHandler(\"ClientFireN\", getRootElement(), FireN)\n\nfunction NShot ()\n\tNukeObjectA = createObject ( 16340, NBeaconX, NBeaconY, NBeaconZ + 200 )\n\tNukeObjectB = createObject ( 3865, NBeaconX + 0.072265, NBeaconY + 0.013731, NBeaconZ + 196.153122 )\n\tNukeObjectC = createObject ( 1243, NBeaconX + 0.060547, NBeaconY - 0.017578, NBeaconZ + 189.075554 )\n\tsetElementRotation ( NukeObjectA, math.deg(3.150001), math.deg(0), math.deg(0.245437) )\n\tsetElementRotation ( NukeObjectB, math.deg(-1.575), math.deg(0), math.deg(1.938950) )\n\tsetElementRotation ( NukeObjectC, math.deg(0), math.deg(0), math.deg(-1.767145) )\n\tshotpath = NBeaconZ - 200\n\tmoveObject ( NukeObjectA, 5000, NBeaconX, NBeaconY, shotpath, 0, 0, 259.9 ) \n\tmoveObject ( NukeObjectB, 5000, NBeaconX + 0.072265, NBeaconY + 0.013731, shotpath - 3.846878, 0, 0, 259.9 )\n\tmoveObject ( NukeObjectC, 5000, NBeaconX + 0.060547, NBeaconY - 0.017578, shotpath - 10.924446, 0, 0, 259.9 )\nend\n \nfunction NExplosion ()\n\tN_loops = N_loops + 1\t\n\tr = math.random(1.5, 4.5)\n\tangleup = math.random(0, 35999)/100\n\texplosionXCoord = r*math.cos(angleup) + NBeaconX\n\tExplosionYCoord = r*math.sin(angleup) + NBeaconY\t\n\tif N_loops == 1 then\n\t\tN_Cloud = NBeaconZ\n\t\tcreateExplosion ( explosionXCoord, ExplosionYCoord, N_Cloud, 7 )\n\t\tkillXPosRadius = NBeaconX + 35\n\t\tkillXNegRadius = NBeaconX - 35\n\t\tkillYPosRadius = NBeaconY + 35\n\t\tkillYNegRadius = NBeaconY - 35 --+/- 35 x/y\n\t\tkillZPosRadius = NBeaconZ + 28-- +28\n\t\tkillZNegRadius = NBeaconZ - 28-- -28\n\t\tlocal x, y, z = getElementPosition ( localPlayer )\n\t\tif ( x < killXPosRadius ) and ( x > killXNegRadius ) and ( y < killYPosRadius ) and ( y > killYNegRadius ) and \n\t\t( z < killZPosRadius ) and ( z > killZNegRadius ) then\n\t\t\t--triggerServerEvent ( \"serverKillNukedPlayer\", localPlayer )\n\t\tend\n\telseif N_loops == 2 then\n\t\tN_Cloud = NBeaconZ + 4\n\t\tcreateExplosion ( explosionXCoord, ExplosionYCoord, N_Cloud, 7 )\n \tdestroyElement ( NukeObjectA ) --Exploded, get rid of objects\n\t\tdestroyElement ( NukeObjectB )\n\t\tdestroyElement ( NukeObjectC )\n\telseif N_loops > 20 then\n\t\tN_cloudRotationAngle = N_cloudRotationAngle + 22.5\n\t\tif N_explosionLimiter == false then\n\t\t\tN_cloudRadius = 7\n\t\t\texplosionXCoord = N_cloudRadius*math.cos(N_cloudRotationAngle) + NBeaconX --recalculate\n\t\t\tExplosionYCoord = N_cloudRadius*math.sin(N_cloudRotationAngle) + NBeaconY --recalculate\n\t\t\tcreateExplosion ( explosionXCoord, ExplosionYCoord, N_Cloud, 7 )\n\t\t\tN_explosionLimiter = true\n\t\telseif N_explosionLimiter == true then\n\t\t\tN_explosionLimiter = false\n\t\tend\n\t\tN_cloudRadius2 = 16\n\t\texplosionXCoord2 = N_cloudRadius2*math.cos(N_cloudRotationAngle) + NBeaconX\n\t\tExplosionYCoord2 = N_cloudRadius2*math.sin(N_cloudRotationAngle) + NBeaconY\n\t\tcreateExplosion ( explosionXCoord2, ExplosionYCoord2, N_Cloud, 7 )\n\telse\n \tN_Cloud = N_Cloud + 4\n \tcreateExplosion ( explosionXCoord, ExplosionYCoord, N_Cloud, 7 )\n end\n\t\n\tif N_loops == 1 then\n\t\tNExplosionFlash = createMarker ( NBeaconX, NBeaconY, NBeaconZ, \"corona\", 0, 255, 255, 255, 255 )\n\t\tN_FlashSize = 1\n\t\taddEventHandler ( \"onClientRender\", root, NFlash )\n\telseif N_loops == 35 then\n\t\tstopNFlash = true \n\tend\t\nend\n\nfunction NFlash () --Corona \"flare\". Grows after cp marker B grows a little\n\tif ( stopNFlash == false ) then\n\t\t\tif N_FlashSize > 60 then --beginning flash must grow fast, then delayed\n\t\t\t\tif NFlashDelay == 2 then\n\t\t\t\t\tN_FlashSize = N_FlashSize + 1\n\t\t\t\t\tNFlashDelay = 0\n\t\t\t\telse\t\n\t\t\t\t\tNFlashDelay = NFlashDelay + 1\t\t\t\t\t\t\t\t\t\n\t\t\t\tend \n\t\t\telse\n\t\t\t\tN_FlashSize = N_FlashSize + 1\t\t\t\n\t\t\tend \n\telse\n\t\tN_FlashSize = N_FlashSize - 1\n\tend\t\n\tsetMarkerSize ( NExplosionFlash, N_FlashSize )\t\t\t\t\t\n\tif N_FlashSize == 0 then\n\t\tremoveEventHandler ( \"onClientRender\", root, NFlash )\n\t\tdestroyElement ( NExplosionFlash )\n\t\tN_loops = 0 --reset stuff\n\t\tN_cloudRotationAngle = 0 --reset stuff\n\t\tstopNFlash = false --reset stuff\n\t\tNFlashDelay = 0 --reset stuff\n\t\t--triggerServerEvent ( \"serverNukeFinished\", getRootElement() )\n\tend\nend\n\nfunction serverLowFPS(limit, duration)\n\tsetTimer(setFPSLimit, 10 * 1000, 1, getFPSLimit() )\n setFPSLimit ( 30 )\nend\naddEvent( \"serverLowFPS\", true )\naddEventHandler( \"serverLowFPS\", resourceRoot, serverLowFPS )\n\n\n-- Quick spectate victims\naddEvent(\"onSpectateVictim\",true)\nfunction spectateVictim(name)\n\n\tif name then\n\t\texecuteCommandHandler(\"s\",name)\n\tend\nend\naddEventHandler(\"onSpectateVictim\",resourceRoot,spectateVictim)"},"avg_line_length":{"kind":"number","value":31.0285714286,"string":"31.028571"},"max_line_length":{"kind":"number","value":133,"string":"133"},"alphanum_fraction":{"kind":"number","value":0.7388247112,"string":"0.738825"},"score":{"kind":"number","value":3.078125,"string":"3.078125"}}},{"rowIdx":754,"cells":{"hexsha":{"kind":"string","value":"b063af9621c085fe9d87be3e0f9946a5380e6991"},"size":{"kind":"number","value":1489,"string":"1,489"},"ext":{"kind":"string","value":"rs"},"lang":{"kind":"string","value":"Rust"},"max_stars_repo_path":{"kind":"string","value":"src/identity/bin/password_authenticator/src/keys.rs"},"max_stars_repo_name":{"kind":"string","value":"allansrc/fuchsia"},"max_stars_repo_head_hexsha":{"kind":"string","value":"a2c235b33fc4305044d496354a08775f30cdcf37"},"max_stars_repo_licenses":{"kind":"list like","value":["BSD-2-Clause"],"string":"[\n \"BSD-2-Clause\"\n]"},"max_stars_count":{"kind":"number","value":2,"string":"2"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2022-02-24T16:24:29.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-02-25T22:33:10.000Z"},"max_issues_repo_path":{"kind":"string","value":"src/identity/bin/password_authenticator/src/keys.rs"},"max_issues_repo_name":{"kind":"string","value":"allansrc/fuchsia"},"max_issues_repo_head_hexsha":{"kind":"string","value":"a2c235b33fc4305044d496354a08775f30cdcf37"},"max_issues_repo_licenses":{"kind":"list like","value":["BSD-2-Clause"],"string":"[\n \"BSD-2-Clause\"\n]"},"max_issues_count":{"kind":"number","value":1,"string":"1"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2022-03-01T01:12:04.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2022-03-01T01:17:26.000Z"},"max_forks_repo_path":{"kind":"string","value":"src/identity/bin/password_authenticator/src/keys.rs"},"max_forks_repo_name":{"kind":"string","value":"allansrc/fuchsia"},"max_forks_repo_head_hexsha":{"kind":"string","value":"a2c235b33fc4305044d496354a08775f30cdcf37"},"max_forks_repo_licenses":{"kind":"list like","value":["BSD-2-Clause"],"string":"[\n \"BSD-2-Clause\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"// Copyright 2021 The Fuchsia Authors. All rights reserved.\n// Use of this source code is governed by a BSD-style license that can be\n// found in the LICENSE file.\n\nuse {async_trait::async_trait, fidl_fuchsia_identity_account as faccount, thiserror::Error};\n\n#[derive(Debug, Error)]\npub enum KeyError {\n // TODO(zarvox): remove once NullKey support is removed\n // This is only needed for NullKeyDerivation -- once we no longer have a key derivation that\n // would otherwise ignore the password provided, we can simply handle all authentication\n // failures by letting the resulting derived-key simply not match what the partition will\n // require to be unsealed.\n #[error(\"Password did not meet precondition\")]\n PasswordError,\n\n #[error(\"Failed to derive key from password\")]\n KeyDerivationError,\n}\n\n/// A 256-bit key.\npub type Key = [u8; 32];\n\n/// The `KeyDerivation` trait provides a mechanism for deriving a key from a password.\n/// The returned key is suitable for use with a zxcrypt volume.\n\n#[async_trait]\npub trait KeyDerivation {\n /// Derive a key from the given password. The returned key will be 256 bits long.\n async fn derive_key(&self, password: &str) -> Result;\n}\n\nimpl From for faccount::Error {\n fn from(e: KeyError) -> Self {\n match e {\n KeyError::PasswordError => faccount::Error::FailedAuthentication,\n KeyError::KeyDerivationError => faccount::Error::Internal,\n }\n }\n}\n"},"avg_line_length":{"kind":"number","value":36.3170731707,"string":"36.317073"},"max_line_length":{"kind":"number","value":96,"string":"96"},"alphanum_fraction":{"kind":"number","value":0.7065144392,"string":"0.706514"},"score":{"kind":"number","value":3.140625,"string":"3.140625"}}},{"rowIdx":755,"cells":{"hexsha":{"kind":"string","value":"0ba3c4d7d4d48cd32673696a0d4ce0dedcefcaca"},"size":{"kind":"number","value":21354,"string":"21,354"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"pootlestuff/watchables.py"},"max_stars_repo_name":{"kind":"string","value":"pootle/pootles_utils"},"max_stars_repo_head_hexsha":{"kind":"string","value":"bb47103e71ccc4fa01269259b73ca1932184af84"},"max_stars_repo_licenses":{"kind":"list like","value":["UPL-1.0"],"string":"[\n \"UPL-1.0\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"pootlestuff/watchables.py"},"max_issues_repo_name":{"kind":"string","value":"pootle/pootles_utils"},"max_issues_repo_head_hexsha":{"kind":"string","value":"bb47103e71ccc4fa01269259b73ca1932184af84"},"max_issues_repo_licenses":{"kind":"list like","value":["UPL-1.0"],"string":"[\n \"UPL-1.0\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"pootlestuff/watchables.py"},"max_forks_repo_name":{"kind":"string","value":"pootle/pootles_utils"},"max_forks_repo_head_hexsha":{"kind":"string","value":"bb47103e71ccc4fa01269259b73ca1932184af84"},"max_forks_repo_licenses":{"kind":"list like","value":["UPL-1.0"],"string":"[\n \"UPL-1.0\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"\"\"\"\nThis module provides classes that support observers, smart value handling and debug functions\n\nAll changes to values nominate an agent, and observers nominate the agent making changes they\nare interested in.\n\nIt supercedes the pvars module\n\"\"\"\nimport logging, sys, threading, pathlib, math, json\nfrom enum import Enum, auto as enumauto, Flag\n\nclass loglvls(Enum):\n \"\"\"\n A class for logging levels so data is self identfying\n \"\"\"\n VAST = logging.DEBUG-1 \n DEBUG = logging.DEBUG\n INFO = logging.INFO\n WARN = logging.WARN\n ERROR = logging.ERROR\n FATAL = logging.FATAL\n NONE = 0\n\nclass myagents(Flag):\n NONE = 0\n app = enumauto()\n user = enumauto()\n\nclass wflags(Flag):\n NONE = 0\n DISABLED = enumauto()\n\nclass watchable():\n \"\"\"\n provides a 'smart' object that provides basic observer functionality around an object.\n \n Changes to the value can be policed, and updates have to provide an agent that is \n performing the update. Observers can then request to be notified when the value is changed\n by specific agents.\n \"\"\"\n def __init__(self, value, app, flags=wflags.NONE, loglevel=loglvls.INFO):\n \"\"\"\n creates a new watchable. Initialises the internal value and sets an empty observers list\n \n value: the initial value for the object. Not validated!\n \n app : the app instance for this. Used for logging and for validating agents\n \"\"\"\n self._val=value\n self.app=app\n self.observers=None\n self.oblock=threading.Lock()\n self.flags=flags\n self.loglevel=loglevel\n self.log(loglvls.DEBUG, 'watchable type %s setup with value %s' % (type(self).__name__, self._val))\n\n def setValue(self, value, agent):\n \"\"\"\n Updates the value of a watchable or the loglevel.\n \n if not a loglevel, this validates and converts (if relevant) the requested value.\n \n If the value is valid and different from the current value, checks for and calls\n any observers interested in changes by the given agent.\n \"\"\"\n if isinstance(value, loglvls):\n self.loglevel = value\n return False\n if isinstance(value, wflags):\n self.flags=value\n return False\n assert isinstance(agent, self.app.agentclass), 'unexpected value %s of type %s in setValue' % (value, type(value).__name__)\n newvalue=self.validValue(value, agent)\n if newvalue != self._val:\n self.notify(newvalue, agent)\n return True\n else:\n self.log(loglvls.DEBUG,'value unchanged (%s)' % self._val)\n return False\n\n def getValue(self):\n return self._val\n\n def validValue(self, value, agent=None):\n \"\"\"\n validates the given value and returns the canonical value which will be stored.\n \n Raise an exception if the value is invalid\n \n 'Real' classes must implement this\n \"\"\"\n raise NotImplementedError()\n\n def notify(self, newvalue, agent):\n if self.observers:\n clist=None\n with self.oblock:\n if agent in self.observers:\n clist=self.observers[agent].copy()\n oldvalue=self._val\n self._val=newvalue\n if clist:\n for ob in clist:\n ob(oldValue=oldvalue, newValue=newvalue, agent=agent, watched=self)\n self.log(loglvls.DEBUG,'value changed (%s)- observers called' % self._val)\n else:\n self._val=newvalue\n self.log(loglvls.DEBUG,'value changed (%s)- no observers' % self._val)\n\n def addNotify(self, callback, agent):\n assert callable(callback)\n assert isinstance(agent, self.app.agentclass)\n self.log(loglvls.DEBUG,'added watcher %s' % callback.__name__)\n with self.oblock:\n if self.observers is None:\n self.observers={agent:[callback]}\n elif agent in self.observers:\n self.observers[agent].append(callback)\n else:\n self.observers[agent]=[callback]\n \n def dropNotify(self, callback, agent):\n with self.oblock:\n aglist=self.observers[agent]\n ix = aglist.index(callback)\n aglist.pop(ix)\n\n def log(self, loglevel, *args, **kwargs):\n \"\"\"\n request a logging operation. This does nothing if the given loglevel is < the loglevel set in the object\n \"\"\"\n if loglevel.value >= self.loglevel.value:\n self.app.log(loglevel, *args, **kwargs)\n\nclass textWatch(watchable): \n \"\"\"\n A refinement of watchable for text strings.\n \"\"\"\n def validValue(self, value, agent):\n \"\"\"\n value : the requested new value for the field, can be anything that str() takes, but None will fail.\n \n agent : who asked for then change (ignored here)\n \n returns : the valid new value (this is always a str)\n \n raises : Any error that str() can raise\n \"\"\"\n if value is None:\n raise ValueError('None is not a valid textVar value')\n return str(value)\n\nclass floatWatch(watchable):\n \"\"\"\n A refinement of watchable that restricts the value to numbers - simple floating point.\n \"\"\"\n def __init__(self, *, maxv=sys.float_info.max, minv=-sys.float_info.max, clamp=False, allowNaN=True, **kwargs):\n \"\"\"\n Makes a float given min and max values. The value can be set clamped to prevent failures \n \n minv : the lowest allowed value - use 0 to allow only positive numbers\n \n maxv : the highest value allowed\n\n clamp : if True all values that can float() are accepted for updating, but are restricted to be between minv and maxv\n \"\"\"\n self.maxv=float(maxv)\n self.minv=float(minv)\n self.clamp=clamp==True\n self.allowNaN=allowNaN\n super().__init__(**kwargs)\n\n def validValue(self, value, agent):\n \"\"\"\n value : the requested new value for the field, can be anything that float(x) can handle that is between minv and maxv\n - or if clamp is True, any value\n \n agent : who asked for then change (ignored here)\n\n returns : the valid new value (this is always a float)\n \n raises : ValueError if the provided value is invalid\n \"\"\"\n av=float(value)\n if math.isnan(av) and self.allowNaN:\n return av\n if self.clamp:\n return self.minv if av < self.minv else self.maxv if av > self.maxv else av\n if self.minv <= av <= self.maxv:\n return av\n raise ValueError('value {} is outside range {} to {}'.format(value, self.minv, self.maxv))\n\nclass intWatch(watchable):\n \"\"\"\n A refinement of watchable that restricts the field value to integer numbers optionally within a range.\n \"\"\"\n def __init__(self, maxv=None, minv=None, clamp=False, **kwargs):\n \"\"\"\n creates an integer var\n \n maxv: None if unbounded maximum else anything that int() accepts\n \n minv: None if unbounded minimum else anything that int() accepts\n \n clamp: if True then value is clamped to maxv and minv (either can be None for unbounded in either 'direction'\n \"\"\"\n self.maxv=maxv if maxv is None else int(maxv)\n self.minv=minv if minv is None else int(minv)\n self.clamp=clamp==True\n super().__init__(**kwargs)\n \n def validValue(self, value, agent):\n \"\"\"\n value : the requested new value for the field, can be anything that int() can handle that is between minv and maxv\n - or if clamp is True, any value\n \n agent : who asked for then change (ignored here)\n\n returns : the valid new value (this is always an int)\n \n raises : ValueError if the provided value is invalid\n \"\"\"\n av=int(value)\n if self.clamp:\n if not self.minv is None and av < self.minv:\n return self.minv\n if not self.maxv is None and av > self.maxv:\n return self.maxv\n return av\n if (self.minv is None or av >= self.minv) and (self.maxv is None or av <= self.maxv):\n return av\n raise ValueError('value {} is outside range {} to {} for watchable'.format(value, self.minv, self.maxv))\n\n def increment(self, agent, count=1):\n incer=int(count)\n newval=self.getValue()+incer\n self.setValue(newval, agent)\n return newval\n\nclass enumWatch(watchable):\n \"\"\"\n a watchable that can only take a specific set of values, and can wrap / clamp values.\n \n It also allows values to be cycled through\n \"\"\"\n def __init__(self, vlist, wrap=True, clamp=False, **kwargs):\n self.wrap=wrap == True\n self.clamp=clamp == True\n self.vlist=vlist\n super().__init__(**kwargs)\n\n def validValue(self, value, agent):\n if not value in self.vlist:\n raise ValueError('value (%s) not valid' % value)\n return value\n\n def getIndex(self):\n return self.vlist.index(self._val)\n\n def increment(self, agent, inc=1):\n newi=self.getIndex()+inc\n if 0 <= newi < len(self.vlist):\n return self.setValue(self.vlist[newi], agent)\n elif self.wrap:\n if newi < 0:\n useval = self.vlist[-1]\n else:\n useval = self.vlist[0]\n elif self.clamp:\n if newi < 0:\n useval = self.vlist[0]\n else:\n useval = self.vlist[-1]\n else:\n raise ValueError('operation exceeds list boundary')\n self.setValue(useval, agent)\n\n def setIndex(self, ival, agent):\n if 0 <= ival < len(self.vlist):\n return self.setValue(self.vlist[ival], agent)\n else:\n raise ValueError('index out of range')\n \nclass btnWatch(watchable):\n \"\"\"\n For simple click buttons that always notify\n \"\"\"\n def setValue(self, value, agent):\n if isinstance(value, loglvls):\n self.loglevel = value\n return False\n if isinstance(value, wflags):\n self.flags=value\n return False\n assert isinstance(agent, self.app.agentclass)\n self.notify(self._val, agent)\n return True\n\nclass folderWatch(watchable):\n \"\"\"\n Internally. the value is a pathlib path to a folder (subfolders are created automatically).\n \"\"\"\n def __init__(self, value, **kwargs):\n super().__init__(value=self.validValue(value, None), **kwargs)\n\n def validValue(self, value, agent):\n tp=pathlib.Path(value).expanduser()\n if tp.exists():\n if tp.is_dir():\n return tp\n else:\n raise ValueError('%s is not a folder' % str(tp))\n else:\n tp.mkdir(parents=True, exist_ok=True)\n return tp\n\n def getValue(self):\n return str(self._val)\n\n def getFolder(self):\n return self._val\n\n def currentfilenames(self, includes=None, excludes=None):\n \"\"\"\n returns names of files currently in this folder\n \"\"\"\n return [pp.name for pp in self.getValue().iterdir() if pp.is_file() and \n (True if includes is None else [1 for x in includes if pp.name.endswith(x)]) and \n (True if excludes is None else [1 for x in excludes if not pp.name.endswith(x)])]\n\nclass watchablegroup(object):\n def __init__(self, value, wabledefs, loglevel=None):\n \"\"\"\n value : dict of preferred values for watchables in this activity (e.g. from saved settings file)\n \n wabledefs: a list of 5-tuples that define each watchable with the following entries:\n 0: name of the watchable\n 1: class of the watchable\n 2: default value of the watchable\n 3: True if the watchable is returned by fetchsettings (as a dict member)\n 4: kwargs to use when setting up the watchable\n \"\"\"\n self.perslist=[]\n self.loglevel=loglvls.INFO if loglevel is None else loglevel\n for awable in wabledefs:\n ch=self.makeChild(defn=awable, value=awable[2] if value is None else value.get(awable[0], awable[2]))\n if ch is None:\n raise ValueError('child construction failed - see log')\n setattr(self, awable[0], ch)\n if awable[3]:\n self.perslist.append(awable[0])\n\n def makeChild(self, value, defn):\n \"\"\"\n returns a new object with this object as the app using a definition list\n \n value : value for the \n \n defn: a list of 5-tuples that define each watchable with the following entries:\n 0: name of the watchable - not used\n 1: class of the watchable\n 2: default value of the watchable - only used if value is None\n 3: True if then watchable is returned by fetchsettings (as a dict member) - not used\n 4: kwargs to use when setting up the watchable\n \"\"\"\n deflen=len(defn)\n if deflen==4:\n params={}\n elif deflen==5:\n params=defn[4]\n else:\n raise ValueError('there are not 4 or 5 entries in this definition for class %s: %s' % (type(self).__name__, defn))\n try:\n vv=defn[2] if value is None else value\n return defn[1](app=self, value=vv, **params)\n except:\n print('Exception in makeChild for class %s' % defn[1], ('using defn value (%s)' % defn[2]) if value is None else str(vv))\n print('extra keyword args', params)\n print('input values:', value)\n self.log(loglvls.ERROR,'class %s exception making variable %s' % (type(self).__name__, defn[0]), exc_info=True, stack_info=True)\n return None\n\n def fetchsettings(self):\n return {kv: getattr(self,kv).getValue() for kv in self.perslist}\n\n def applysettings(self, settings, agent):\n for k,v in settings:\n if k in self.perslist:\n getattr(self, k).setValue(v, agent)\n\nclass watchablesmart(watchablegroup):\n \"\"\"\n This class can act as a complete app, or as a part of an app.\n \n For a complete app:\n sets up logging for the app\n \n for a component of an app:\n passes logging calls up to the app.\n \n value: for the top level (app is None), if a string, this is the file name for json file which should yield a dict with the settings to be applied in construction\n otherwise id should be a dict with the settings\n \n lower levels always expect a dict\n \n app: If app is None, this node is the app, otherwise it should be the app object (which provides logging and save / restore settings\n \"\"\"\n def __init__(self, value, app=None, loglevel=loglvls.INFO, **kwargs):\n if app==None: # this is the real (top level) app\n if loglevel is None or loglevel is loglvls.NONE:\n self.logger=None\n print('%s no logging' % type(self).__name__)\n else:\n self.agentclass=myagents\n self.logger=logging.getLogger(__loader__.name+'.'+type(self).__name__)\n chandler=logging.StreamHandler()\n chandler.setFormatter(logging.Formatter(fmt= '%(asctime)s %(levelname)7s (%(process)d)%(threadName)12s %(module)s.%(funcName)s: %(message)s', datefmt= \"%M:%S\"))\n self.logger.addHandler(chandler)\n self.logger.setLevel(loglevel.value)\n self.log(loglvls.INFO,'logging level is %s' % loglevel)\n self.startsettings, lmsg, self.settingsfrom = loadsettings(value)\n self.log(loglvls.INFO, lmsg)\n else:\n self.app=app\n self.agentclass=app.agentclass\n self.startsettings=value\n super().__init__(value=self.startsettings, loglevel=loglevel, **kwargs)\n\n def log(self, level, msg, *args, **kwargs):\n if hasattr(self,'app'):\n if self.loglevel.value <= level.value:\n self.app.log(level, msg, *args, **kwargs)\n else:\n if self.logger:\n self.logger.log(level.value, msg, *args, **kwargs)\n elif level.value >= loglvls.WARN:\n print(msg) \n\n def savesettings(self, oldValue, newValue, agent, watched):\n if hasattr(self, 'app'):\n raise ValueError('only the app level can save settings')\n try:\n setts = self.fetchsettings()\n except:\n self.log(loglvls.WARN,'fetchsettings failed', exc_info=True, stack_info=True)\n setts = None\n if not setts is None:\n try:\n settstr=json.dumps(setts, indent=4)\n except:\n self.log(loglvls.WARN,'json conversion of these settings failed', exc_info=True, stack_info=True)\n self.log(loglvls.WARN,str(setts))\n settstr=None\n if not settstr is None:\n try:\n with self.settingsfrom.open('w') as sfo:\n sfo.write(settstr)\n except:\n self.log(loglvls.WARN,'save settings failed to write file', exc_info=True, stack_info=True)\n return\n self.log(loglvls.INFO,'settings saved to file %s' % str(self.settingsfrom))\n\nclass watchablepigpio(watchablesmart):\n \"\"\"\n a root class that adds in pigpio setup to watchablesmart\n \"\"\"\n def __init__(self, app=None, pigp=None, **kwargs):\n \"\"\"\n if the app has a pio attribute, (an instance of pigpio.pi), that is used otherwise one is set up.\n \"\"\"\n if not app is None and hasattr(app,'pio'):\n self.pio=app.pio\n self.mypio=False\n elif pigp is None:\n import pigpio\n ptest=pigpio.pi()\n if not ptest.connected:\n raise ValueError('pigpio failed to initialise')\n self.pio=ptest\n self.mypio=True\n else:\n self.pio=pigp\n self.mypio=False\n if not self.pio.connected:\n raise ValueError('pigpio is not connected')\n super().__init__(app=app, **kwargs)\n\n def close(self):\n if self.mypio:\n self.pio.stop()\n self.mypio=False\n self.pio=None\n\nclass watchableAct(watchablegroup):\n \"\"\"\n An app can have a number of optional activities (that can have their own threads, watched vars etc.\n \n This class provides useful common bits for such activities. It provides:\n \n A way to set up the watchable variables for the class, using passed in values (for saved settings for example)\n with defaults if a value isn't passed.\n \n A way to automatically retrieve values for a subset of watchable variables (e.g. to save values as a known config)\n \n logging via the parent app using Python's standard logging module\n \"\"\"\n def __init__(self, app, **kwargs):\n self.app=app\n self.agentclass=app.agentclass\n super().__init__(**kwargs)\n\n def log(self, loglevel, *args, **kwargs):\n \"\"\"\n request a logging operation. This does nothing if the given loglevel is < the loglevel set in the object\n \"\"\"\n if self.loglevel.value <= loglevel.value:\n self.app.log(loglevel, *args, **kwargs)\n\nclass watchableApp(object):\n def __init__(self, agentclass=myagents, loglevel=None):\n self.agentclass=agentclass\n if loglevel is None or loglevel is loglvls.NONE:\n self.logger=None\n print('%s no logging' % type(self).__name__)\n else:\n self.logger=logging.getLogger(__loader__.name+'.'+type(self).__name__)\n chandler=logging.StreamHandler()\n chandler.setFormatter(logging.Formatter(fmt= '%(asctime)s %(levelname)7s (%(process)d)%(threadName)12s %(module)s.%(funcName)s: %(message)s', datefmt= \"%M:%S\"))\n self.logger.addHandler(chandler)\n self.logger.setLevel(loglevel.value)\n\n def log(self, level, msg, *args, **kwargs):\n if self.logger:\n self.logger.log(level.value, msg, *args, **kwargs)\n\ndef loadsettings(value):\n if isinstance(value, str):\n spath=pathlib.Path(value).expanduser()\n settingsfrom=spath\n if spath.is_file():\n try:\n with spath.open('r') as spo:\n startsettings=json.load(spo)\n return startsettings, 'app settings loaded from file %s' % spath, spath\n except:\n return {}, 'failed to load settings from %s - default values used' % spath, spath\n else:\n return {}, 'app settings file %s not found - default values used' % str(spath), spath\n elif hasattr(value,'keys'):\n return value, 'using settings from passed object', None\n elif value is None:\n return {}, 'settings not specified, default values used', None\n else:\n return {}, 'setings not processed from passed %s' % type(values).__name__, None\n"},"avg_line_length":{"kind":"number","value":38.4756756757,"string":"38.475676"},"max_line_length":{"kind":"number","value":177,"string":"177"},"alphanum_fraction":{"kind":"number","value":0.5949236677,"string":"0.594924"},"score":{"kind":"number","value":3.234375,"string":"3.234375"}}},{"rowIdx":756,"cells":{"hexsha":{"kind":"string","value":"4a67ee91447ce787f0f3b06732979750489cf134"},"size":{"kind":"number","value":826,"string":"826"},"ext":{"kind":"string","value":"js"},"lang":{"kind":"string","value":"JavaScript"},"max_stars_repo_path":{"kind":"string","value":"client/src/js/render/clock.js"},"max_stars_repo_name":{"kind":"string","value":"yg-0103/pomodoro-clone"},"max_stars_repo_head_hexsha":{"kind":"string","value":"1ce5a0edf42927afea2fa41a9b89cca7b0f88d28"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":1,"string":"1"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2021-01-15T08:52:00.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2021-01-15T08:52:00.000Z"},"max_issues_repo_path":{"kind":"string","value":"client/src/js/render/clock.js"},"max_issues_repo_name":{"kind":"string","value":"yg-0103/pomodoro-clone"},"max_issues_repo_head_hexsha":{"kind":"string","value":"1ce5a0edf42927afea2fa41a9b89cca7b0f88d28"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":58,"string":"58"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2021-01-18T01:23:33.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2021-01-26T08:39:34.000Z"},"max_forks_repo_path":{"kind":"string","value":"client/src/js/render/clock.js"},"max_forks_repo_name":{"kind":"string","value":"yg-0103/pomodoro-clone"},"max_forks_repo_head_hexsha":{"kind":"string","value":"1ce5a0edf42927afea2fa41a9b89cca7b0f88d28"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":4,"string":"4"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2021-01-16T03:03:54.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2021-01-18T17:28:28.000Z"},"content":{"kind":"string","value":"import Pomodoro from '../time';\nimport fetch from '../axios/fetch';\n\nexport default async function () {\n // 서버에서 설정된 시간들을 가져온다.\n try {\n const { long_interval, auto_start } = await fetch.settings();\n // 상태에 따라 어떤시간을 렌더링할지 정한다.\n\n // 설정된 시간이 0분이면 1분을 넣어준다.\n const curTime = (await fetch.curClockTime()) || 1;\n\n const pomodoro = new Pomodoro(curTime, long_interval, auto_start);\n // 설정된 시간을 랜더링한다.\n pomodoro.setTimeText();\n\n const $nav = document.querySelector('.main__btn-group');\n $nav.addEventListener('click', async (e) => {\n if (e.target === e.currentTarget) return;\n // 네비게이션 버튼이 클릭되면 현재 설정된 시간으로 초기화 되고 초기화 된 시간을 다시 랜더링한다.\n pomodoro.minute = await fetch.curClockTime();\n pomodoro.second = 0;\n pomodoro.setTimeText();\n });\n } catch (e) {\n console.error(e);\n }\n}\n"},"avg_line_length":{"kind":"number","value":28.4827586207,"string":"28.482759"},"max_line_length":{"kind":"number","value":70,"string":"70"},"alphanum_fraction":{"kind":"number","value":0.6319612591,"string":"0.631961"},"score":{"kind":"number","value":3.0625,"string":"3.0625"}}},{"rowIdx":757,"cells":{"hexsha":{"kind":"string","value":"f01e8e597dc20bba7caf3b9b0fddc57695c216de"},"size":{"kind":"number","value":5316,"string":"5,316"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"train.py"},"max_stars_repo_name":{"kind":"string","value":"ThiruRJST/Deformed-Yolo"},"max_stars_repo_head_hexsha":{"kind":"string","value":"c9eb4e8c090dff0e9fc4f8652897ff2c59dce889"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":1,"string":"1"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2021-09-10T17:20:09.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2021-09-10T17:20:09.000Z"},"max_issues_repo_path":{"kind":"string","value":"train.py"},"max_issues_repo_name":{"kind":"string","value":"ThiruRJST/Deformed-Yolo"},"max_issues_repo_head_hexsha":{"kind":"string","value":"c9eb4e8c090dff0e9fc4f8652897ff2c59dce889"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":1,"string":"1"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2021-09-10T17:19:54.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2021-09-11T08:17:14.000Z"},"max_forks_repo_path":{"kind":"string","value":"wandb/run-20210904_163431-3lkn6hoe/files/code/train.py"},"max_forks_repo_name":{"kind":"string","value":"ThiruRJST/Deformed-Yolo"},"max_forks_repo_head_hexsha":{"kind":"string","value":"c9eb4e8c090dff0e9fc4f8652897ff2c59dce889"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"from pandas.core.algorithms import mode\n\nimport torch\nimport torch.nn as nn\nfrom albumentations import Compose,Resize,Normalize\nfrom albumentations.pytorch import ToTensorV2\nimport wandb\nimport time\nimport torchvision\nimport torch.nn.functional as F\nimport torch.optim as optim\nfrom torch.cuda.amp import autocast,GradScaler\nimport os\nimport numpy as np\nfrom tqdm import tqdm\n\nfrom callbacks import EarlyStopping\nimport pandas as pd\nfrom torch.utils.data import Dataset, DataLoader\nimport cv2\nimport torch.nn.functional as F\nimport random\n\n\nfrom build_model import Deformed_Darknet53\n\ntorch.manual_seed(2021)\nnp.random.seed(2021)\nrandom.seed(2021)\ntorch.backends.cudnn.benchmark = True\ntorch.backends.cudnn.deterministic = True\nDEVICE = \"cuda:0\" if torch.cuda.is_available() else \"cpu\"\nTOTAL_EPOCHS = 100\nscaler = GradScaler()\nearly_stop = EarlyStopping()\nwandb.init(project='deformed-darknet',entity='tensorthug',name='new-darknet-256x256_32')\n\n\n\nprint(\"***** Loading the Model in {} *****\".format(DEVICE))\n\nModel = Deformed_Darknet53().to(DEVICE)\n\nprint(\"Model Shipped to {}\".format(DEVICE))\n\ndata = pd.read_csv(\"data.csv\")\n\ntrain_loss_fn = nn.BCEWithLogitsLoss()\nval_loss_fn = nn.BCEWithLogitsLoss()\n\noptim = torch.optim.Adam(Model.parameters())\n\nwandb.watch(Model)\n\nclass dog_cat(Dataset):\n def __init__(self,df,mode=\"train\",folds=0,transforms=None):\n super(dog_cat,self).__init__()\n self.df = df\n self.mode = mode\n self.folds = folds\n self.transforms = transforms\n\n if self.mode == \"train\":\n self.data = self.df[self.df.folds != self.folds].reset_index(drop=True)\n \n else:\n self.data = self.df[self.df.folds == self.folds].reset_index(drop=True)\n \n def __len__(self):\n return len(self.data)\n \n def __getitem__(self,idx):\n\n img = cv2.imread(self.data.loc[idx,\"Paths\"])\n label = self.data.loc[idx,'Labels']\n\n if self.transforms is not None:\n image = self.transforms(image=img)['image']\n\n \n return image,label\n\n\ndef train_loop(epoch,dataloader,model,loss_fn,optim,device=DEVICE):\n model.train()\n epoch_loss = 0\n epoch_acc = 0\n #start_time = time.time()\n pbar = tqdm(enumerate(dataloader),total=len(dataloader))\n for i,(img,label) in pbar:\n optim.zero_grad()\n\n img = img.to(DEVICE).float()\n label = label.to(DEVICE).float()\n \n #LOAD_TIME = time.time() - start_time\n\n with autocast():\n yhat = model(img)\n #Loss Calculation\n train_loss = loss_fn(input = yhat.flatten(), target = label)\n \n out = (yhat.flatten().sigmoid() > 0.5).float()\n correct = (label == out).float().sum()\n\n scaler.scale(train_loss).backward()\n scaler.step(optim)\n scaler.update()\n\n \n epoch_loss += train_loss.item()\n epoch_acc += correct.item() / out.shape[0]\n\n train_epoch_loss = epoch_loss / len(dataloader)\n train_epoch_acc = epoch_acc / len(dataloader)\n\n wandb.log({\"Training_Loss\":train_epoch_loss})\n wandb.log({\"Training_Acc\":train_epoch_acc})\n \n #print(f\"Epoch:{epoch}/{TOTAL_EPOCHS} Epoch Loss:{epoch_loss / len(dataloader):.4f} Epoch Acc:{epoch_acc / len(dataloader):.4f}\")\n \n return train_epoch_loss,train_epoch_acc\n\ndef val_loop(epoch,dataloader,model,loss_fn,device = DEVICE):\n model.eval()\n val_epoch_loss = 0\n val_epoch_acc = 0\n pbar = tqdm(enumerate(dataloader),total=len(dataloader))\n\n with torch.no_grad():\n for i,(img,label) in pbar:\n img = img.to(device).float()\n label = label.to(device).float()\n\n yhat = model(img)\n val_loss = loss_fn(input=yhat.flatten(),target=label)\n\n out = (yhat.flatten().sigmoid()>0.5).float()\n correct = (label == out).float().sum()\n\n val_epoch_loss += val_loss.item()\n val_epoch_acc += correct.item() / out.shape[0]\n\n val_lossd = val_epoch_loss / len(dataloader)\n val_accd = val_epoch_acc / len(dataloader)\n \n wandb.log({\"Val_Loss\":val_lossd,\"Epoch\":epoch})\n wandb.log({\"Val_Acc\":val_accd/len(dataloader),\"Epoch\":epoch})\n\n return val_lossd,val_accd\n\n\n\n\n\n\n \n \n\n\nif __name__ == \"__main__\":\n\n train_per_epoch_loss,train_per_epoch_acc = [],[]\n val_per_epoch_loss,val_per_epoch_acc = [],[]\n train = dog_cat(data,transforms=Compose([Resize(256,256),Normalize(),ToTensorV2()]))\n val = dog_cat(data,mode='val',transforms=Compose([Resize(256,256),Normalize(),ToTensorV2()]))\n\n train_load = DataLoader(train,batch_size=32,shuffle=True,num_workers=4)\n val_load = DataLoader(val,batch_size=32,num_workers=4)\n\n for e in range(TOTAL_EPOCHS):\n train_loss,train_acc = train_loop(e,train_load,Model,train_loss_fn,optim)\n val_loss,val_acc = val_loop(e,val_load,Model,val_loss_fn)\n train_per_epoch_loss.append(train_loss)\n train_per_epoch_acc.append(train_acc)\n val_per_epoch_loss.append(val_loss)\n val_per_epoch_acc.append(val_acc)\n print(f\"TrainLoss:{train_loss:.4f} TrainAcc:{train_acc:.4f}\")\n print(f\"ValLoss:{val_loss:.4f} ValAcc:{val_acc:.4f}\")\n early_stop(Model,val_loss)\n if early_stop.early_stop:\n break\n"},"avg_line_length":{"kind":"number","value":29.0491803279,"string":"29.04918"},"max_line_length":{"kind":"number","value":133,"string":"133"},"alphanum_fraction":{"kind":"number","value":0.6595184349,"string":"0.659518"},"score":{"kind":"number","value":3.234375,"string":"3.234375"}}},{"rowIdx":758,"cells":{"hexsha":{"kind":"string","value":"0ce58d7de1508c5e2496368e37a432c416830c42"},"size":{"kind":"number","value":2183,"string":"2,183"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"lib_dsp/iir/iir/design/iir.py"},"max_stars_repo_name":{"kind":"string","value":"PyGears/lib-dsp"},"max_stars_repo_head_hexsha":{"kind":"string","value":"a4c80882f5188799233dc9108f91faa4bab0ac57"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":3,"string":"3"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2019-08-26T17:32:33.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-03-19T02:05:02.000Z"},"max_issues_repo_path":{"kind":"string","value":"pygears_dsp/lib/iir.py"},"max_issues_repo_name":{"kind":"string","value":"bogdanvuk/pygears-dsp"},"max_issues_repo_head_hexsha":{"kind":"string","value":"ca107d3f9e8d02023e9ccd27f7bc95f10b5aa995"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"pygears_dsp/lib/iir.py"},"max_forks_repo_name":{"kind":"string","value":"bogdanvuk/pygears-dsp"},"max_forks_repo_head_hexsha":{"kind":"string","value":"ca107d3f9e8d02023e9ccd27f7bc95f10b5aa995"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":5,"string":"5"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2019-09-18T18:00:13.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2022-03-28T11:07:26.000Z"},"content":{"kind":"string","value":"from pygears import gear, Intf\nfrom pygears.lib import dreg, decouple, saturate, qround\n\n\n@gear\ndef iir_1dsos(din, *, a, b, gain):\n\n # add input gain and init delayed inputs\n zu0 = din * gain\n zu1 = zu0 | dreg(init=0)\n zu2 = zu1 | dreg(init=0)\n\n # perform b coefficient sum\n a1 = (zu1 * b[1]) + (zu2 * b[2])\n a2 = a1 + (zu0 * b[0])\n\n # declare output interface and its type\n y = Intf(a2.dtype)\n\n # init delayed outputs\n zy1 = y | decouple(init=0)\n zy2 = zy1 | dreg(init=0)\n\n # perform a coefficient sum\n b1 = (zy2 * a[2]) + (zy1 * a[1])\n\n # add both sums and set output\n y |= (a2 - b1) | qround(fract=a2.dtype.fract) | saturate(t=a2.dtype)\n return y\n\n\n@gear\ndef iir_2tsos(din, *, a, b, gain):\n\n # add input gain\n x = din * gain\n\n # declare output interface and its type\n y = Intf(din.dtype)\n\n # perform first tap multiplication and sum\n z0 = ((x * b[2]) - (y * a[2]))\n\n # delay first sum output\n z0_delayed = z0 | dreg(init=0)\n\n # perform second tap multiplication and sum\n z1 = ((x * b[1]) + z0_delayed - (y * a[1]))\n\n # delay second sum output\n z1_delayed = z1 | decouple(init=0)\n\n # perform final sum and set output\n y |= ((x * b[0]) + z1_delayed) | qround(fract=din.dtype.fract) | saturate(t=din.dtype)\n return y\n\n\n@gear\ndef iir_df1dsos(din, *, a, b, gain, ogain):\n\n # init temp\n temp = din\n\n # add cascades for all b coefficients\n for i in range(len(b)):\n\n # format every cascaded output as input\n temp = temp | iir_1dsos(a=a[i], b=b[i], gain=gain[i]) | qround(fract=din.dtype.fract) | saturate(t=din.dtype)\n\n # add output gain and format as input\n dout = (temp * ogain) | qround(fract=din.dtype.fract) | saturate(t=din.dtype)\n return dout\n\n\n@gear\ndef iir_df2tsos(din, *, a, b, gain, ogain):\n\n # init temp\n temp = din\n\n # add cascades for all b coefficients\n for i in range(len(b)):\n\n # format every cascaded output as input\n temp = temp | iir_2tsos(a=a[i], b=b[i], gain=gain[i])\n\n # add output gain and format as input\n dout = (temp * ogain) | qround(fract=din.dtype.fract) | saturate(t=din.dtype)\n return dout\n"},"avg_line_length":{"kind":"number","value":24.2555555556,"string":"24.255556"},"max_line_length":{"kind":"number","value":117,"string":"117"},"alphanum_fraction":{"kind":"number","value":0.6014658727,"string":"0.601466"},"score":{"kind":"number","value":3.28125,"string":"3.28125"}}},{"rowIdx":759,"cells":{"hexsha":{"kind":"string","value":"72352eac308cfa7475d88b80208a2f269df1b337"},"size":{"kind":"number","value":4711,"string":"4,711"},"ext":{"kind":"string","value":"lua"},"lang":{"kind":"string","value":"Lua"},"max_stars_repo_path":{"kind":"string","value":"Light Sword.lua"},"max_stars_repo_name":{"kind":"string","value":"xVoid-xyz/Roblox-Scripts"},"max_stars_repo_head_hexsha":{"kind":"string","value":"7eb176fa654f2ea5fbc6bcccced1b15df7ed82c2"},"max_stars_repo_licenses":{"kind":"list like","value":["BSD-3-Clause"],"string":"[\n \"BSD-3-Clause\"\n]"},"max_stars_count":{"kind":"number","value":70,"string":"70"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2021-02-09T17:21:32.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-03-28T12:41:42.000Z"},"max_issues_repo_path":{"kind":"string","value":"Light Sword.lua"},"max_issues_repo_name":{"kind":"string","value":"xVoid-xyz/Roblox-Scripts"},"max_issues_repo_head_hexsha":{"kind":"string","value":"7eb176fa654f2ea5fbc6bcccced1b15df7ed82c2"},"max_issues_repo_licenses":{"kind":"list like","value":["BSD-3-Clause"],"string":"[\n \"BSD-3-Clause\"\n]"},"max_issues_count":{"kind":"number","value":4,"string":"4"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2021-08-19T22:05:58.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2022-03-19T18:58:01.000Z"},"max_forks_repo_path":{"kind":"string","value":"Light Sword.lua"},"max_forks_repo_name":{"kind":"string","value":"xVoid-xyz/Roblox-Scripts"},"max_forks_repo_head_hexsha":{"kind":"string","value":"7eb176fa654f2ea5fbc6bcccced1b15df7ed82c2"},"max_forks_repo_licenses":{"kind":"list like","value":["BSD-3-Clause"],"string":"[\n \"BSD-3-Clause\"\n]"},"max_forks_count":{"kind":"number","value":325,"string":"325"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2021-02-26T22:23:41.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2022-03-31T19:36:12.000Z"},"content":{"kind":"string","value":"Player = game:GetService(\"Players\").LocalPlayer \n\nCha = Player.Character\n\nRShoulder = Cha.Torso['Right Shoulder']\n\nTool = Instance.new(\"HopperBin\",Player.Backpack)\n\nTool.Name = \"ice sword\"\n\n\nfunction onKeyDown(key)\n\nkey = key:lower()\n\nif key == \"e\" then\n\nwal = not wal\n\nif wal == true then\n\nwl=Instance.new(\"Part\",workspace)\n\nwl.BrickColor=BrickColor.new(\"Toothpaste\")\n\nwl.Material=\"Ice\"\n\nwl.Size=Vector3.new(10,7,2)\n\nwl.Anchored=true\n\nwl.CFrame=Cha.Torso.CFrame*CFrame.new(0,0,-5)\n\nwl2=wl:Clone()\n\nwl2.Parent=Workspace\n\nwl2.Size=Vector3.new(2,7,10)\n\nwl2.CFrame=Cha.Torso.CFrame*CFrame.new(-5,0,0)\n\nwl3=wl2:Clone()\n\nwl3.Parent=Workspace\n\nwl3.CFrame=Cha.Torso.CFrame*CFrame.new(5,0,0)\n\nwl4=wl:Clone()\n\nwl4.Parent=Workspace\n\nwl4.CFrame=Cha.Torso.CFrame*CFrame.new(0,0,5)\n\nelse\n\nfor i=1,10 do wait()\n\nwl.Transparency=wl.Transparency +.1\n\nwl2.Transparency=wl2.Transparency +.1\n\nwl3.Transparency=wl3.Transparency +.1\n\nwl4.Transparency=wl4.Transparency +.1\n\nwait()\n\nend\n\nwl:remove()\n\nwl2:remove()\n\nwl3:remove()\n\nwl4:remove()\n\nend\n\nend\n\nend\n\n\nfunction onClicked(mouse)\n\nif (not vDebounce) then\n\nvDebounce = true\n\nwa = Instance.new(\"Part\",Char)\n\nwa.Transparency=1\n\nwa.CanCollide = false \n\nwa.Size = Vector3.new(1, 1, 1) \n\nwa:BreakJoints() \n\nWeld3 = Instance.new(\"Weld\",wa) \n\nWeld3.Part0 = Blade\n\nWeld3.Part1 = wa\n\nWeld3.C0 = CFrame.new(0, 0, -2) * CFrame.Angles(0, 0, 0)\n\nfunction touch(hit) \n\nif hit.Parent:findFirstChild(\"Humanoid\") ~= nil then \n\nhit.Parent.Humanoid.Health=hit.Parent.Humanoid.Health-5\n\nend end wa.Touched:connect(touch)\n\nanimation = Instance.new(\"Animation\")\n\nanimation.Name = \"SlashAnim\"\n\nanimation.AnimationId = \"http://www.roblox.com/Asset?ID=94161088\"\n\nanimTrack = Cha.Humanoid:LoadAnimation(animation)\n\nanimTrack:Play()\n\nfor i = 1,26 do wait()\n\np = Instance.new(\"Part\",workspace)\n\np.FormFactor=\"Custom\"\n\np.Size=Vector3.new(.5,.5,.5)\n\np.TopSurface = 0 \n\np.BottomSurface = 0 \n\np.BrickColor=BrickColor.new(\"Toothpaste\")\n\np.Transparency=.3\n\np.CanCollide=false\n\np.Anchored=true\n\np.CFrame =(Blade.CFrame*CFrame.new(0,0,-2))*CFrame.Angles(math.random(-3,3),math.random(-3,3),math.random(-3,3))\n\ngame.Debris:AddItem(p,.1)\n\nend \n\nwa:remove()\n\nvDebounce = false\n\nend\n\nend\n\n\nTool.Selected:connect(function(mouse)\n\nmouse.Button1Down:connect(function() onClicked(mouse) end)\n\nmouse.KeyDown:connect(onKeyDown)\n\n--==THE ASSIMBLE==--\n\n\nChar=Instance.new(\"Model\",Cha) -- CHA not CHAR\n\nHandle = Instance.new(\"Part\", Char)\n\nHandle.FormFactor = \"Custom\"\n\nHandle.Size = Vector3.new(1, -1, 1)\n\nHandle.TopSurface = \"Smooth\"\n\nHandle.BottomSurface = \"Smooth\"\n\nHandle.BrickColor = BrickColor.new(\"Toothpaste\")\n\nHandle.Reflectance = 0\n\nHandle:BreakJoints()\n\nHandle.CanCollide=false\n\nMesh = Instance.new(\"SpecialMesh\", Handle)\n\nMesh.MeshType = \"Cylinder\"\n\nMesh.Scale = Vector3.new(1, 1, 1)\n\nHandleWeld = Instance.new(\"Weld\", Char)\n\nHandleWeld.Part0 = Cha[\"Right Arm\"]\n\nHandleWeld.Part1 = Handle\n\nHandleWeld.C0 = CFrame.new(0, -1, 0) * CFrame.Angles(0, math.pi/2, 0)\n\nPower = Instance.new(\"Part\", Char)\n\nPower.FormFactor = \"Custom\"\n\nPower.Size = Vector3.new(1, 1, 1)\n\nPower.TopSurface = \"Smooth\"\n\nPower.BottomSurface = \"Smooth\"\n\nPower.BrickColor = BrickColor.new(\"Institutional white\")\n\nPower.Reflectance = 0\n\nPower:BreakJoints()\n\nPower.CanCollide=false\n\nMesh = Instance.new(\"SpecialMesh\", Power)\n\nMesh.MeshType = \"Sphere\"\n\nMesh.Scale = Vector3.new(1, 1, 1)\n\nPowerWeld = Instance.new(\"Weld\", Char)\n\nPowerWeld.Part0 = Cha[\"Right Arm\"]\n\nPowerWeld.Part1 = Power\n\nPowerWeld.C0 = CFrame.new(0, -1, 1) * CFrame.Angles(0, 0, 0)\n\nDetail = Instance.new(\"Part\", Char)\n\nDetail.FormFactor = \"Custom\"\n\nDetail.Size = Vector3.new(1, -1, 1)\n\nDetail.TopSurface = \"Smooth\"\n\nDetail.BottomSurface = \"Smooth\"\n\nDetail.BrickColor = BrickColor.new(\"Institutional white\")\n\nDetail.Reflectance = 0\n\nDetail:BreakJoints()\n\nDetail.CanCollide=false\n\nMesh = Instance.new(\"SpecialMesh\", Detail)\n\nMesh.MeshType = \"Cylinder\"\n\nMesh.Scale = Vector3.new(1, 1, 1)\n\nDetailWeld = Instance.new(\"Weld\", Char)\n\nDetailWeld.Part0 = Cha[\"Right Arm\"]\n\nDetailWeld.Part1 = Detail\n\nDetailWeld.C0 = CFrame.new(0, -1, math.rad(-30)) * CFrame.Angles(0, 0, math.rad(90))\n\nBlade = Instance.new(\"Part\", Char)\n\nBlade.FormFactor = \"Custom\"\n\nBlade.Size = Vector3.new(-1, -2, 4)\n\nBlade.TopSurface = \"Smooth\"\n\nBlade.BottomSurface = \"Smooth\"\n\nBlade.BrickColor = BrickColor.new(\"Institutional white\")\n\nBlade.Reflectance = 0\n\nBlade:BreakJoints()\n\nBlade.CanCollide=false\n\nMesh = Instance.new(\"BlockMesh\", Blade)\n\nMesh.Scale = Vector3.new(1, 1, 1)\n\nBladeWeld = Instance.new(\"Weld\", Char)\n\nBladeWeld.Part0 = Cha[\"Right Arm\"]\n\nBladeWeld.Part1 = Blade\n\nBladeWeld.C0 = CFrame.new(0, -1, -2) * CFrame.Angles(0, 0, math.rad(90))\n\nend)\n\nTool.Deselected:connect(function(mouse)\n\nChar:remove()\n\nend)"},"avg_line_length":{"kind":"number","value":15.4459016393,"string":"15.445902"},"max_line_length":{"kind":"number","value":112,"string":"112"},"alphanum_fraction":{"kind":"number","value":0.7142857143,"string":"0.714286"},"score":{"kind":"number","value":3.265625,"string":"3.265625"}}},{"rowIdx":760,"cells":{"hexsha":{"kind":"string","value":"2a898e9ca996780380ac7e002b690ae74c005d3f"},"size":{"kind":"number","value":1462,"string":"1,462"},"ext":{"kind":"string","value":"java"},"lang":{"kind":"string","value":"Java"},"max_stars_repo_path":{"kind":"string","value":"chapter_007/src/main/java/ru/job4j/nonblockingcache/NonBlockingCache.java"},"max_stars_repo_name":{"kind":"string","value":"danailKondov/dkondov"},"max_stars_repo_head_hexsha":{"kind":"string","value":"14b3d2940638b2f69072dbdc0a9d7f8ba1b3748b"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"number","value":1,"string":"1"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2018-05-24T06:36:30.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2018-05-24T06:36:30.000Z"},"max_issues_repo_path":{"kind":"string","value":"chapter_007/src/main/java/ru/job4j/nonblockingcache/NonBlockingCache.java"},"max_issues_repo_name":{"kind":"string","value":"danailKondov/dkondov"},"max_issues_repo_head_hexsha":{"kind":"string","value":"14b3d2940638b2f69072dbdc0a9d7f8ba1b3748b"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"chapter_007/src/main/java/ru/job4j/nonblockingcache/NonBlockingCache.java"},"max_forks_repo_name":{"kind":"string","value":"danailKondov/dkondov"},"max_forks_repo_head_hexsha":{"kind":"string","value":"14b3d2940638b2f69072dbdc0a9d7f8ba1b3748b"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"number","value":1,"string":"1"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2018-11-08T23:33:17.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2018-11-08T23:33:17.000Z"},"content":{"kind":"string","value":"package ru.job4j.nonblockingcache;\n\nimport java.util.concurrent.ConcurrentHashMap;\nimport java.util.function.BiFunction;\n\n/**\n * Class for simple DIY non-blocking cache.\n *\n * @since 11/10/2017\n * @version 1\n */\npublic class NonBlockingCache {\n\n /**\n * Task storage.\n */\n private ConcurrentHashMap storage = new ConcurrentHashMap<>();\n\n /**\n * Adds new task to storage. If task with same ID\n * is already present in storage nothing will be added.\n *\n * @param task to add.\n */\n public void add(Task task) {\n storage.putIfAbsent(task.getiD(), task);\n }\n\n /**\n * Deletes task with same ID.\n * @param id of task to remove\n */\n public void delete(int id) {\n storage.remove(id);\n }\n\n /**\n * Updates task in storage.\n * @param task to update\n * @throws OptimisticException if task was already modified\n */\n public void update(Task task) throws OptimisticException {\n\n Task result = storage.computeIfPresent(task.getiD(), new BiFunction() {\n @Override\n public Task apply(Integer integer, Task oldTask) {\n Task result = null;\n if(oldTask.getVersion() + 1 == task.getVersion()) {\n result = task;\n } else {\n throw new OptimisticException();\n }\n return result;\n }\n });\n }\n}\n"},"avg_line_length":{"kind":"number","value":25.2068965517,"string":"25.206897"},"max_line_length":{"kind":"number","value":100,"string":"100"},"alphanum_fraction":{"kind":"number","value":0.5704514364,"string":"0.570451"},"score":{"kind":"number","value":3.109375,"string":"3.109375"}}},{"rowIdx":761,"cells":{"hexsha":{"kind":"string","value":"c3c80a79b861ce9007f26e8463ab326a5ddb90fc"},"size":{"kind":"number","value":1945,"string":"1,945"},"ext":{"kind":"string","value":"go"},"lang":{"kind":"string","value":"Go"},"max_stars_repo_path":{"kind":"string","value":"portxo/keygen.go"},"max_stars_repo_name":{"kind":"string","value":"tnakagawa/lit"},"max_stars_repo_head_hexsha":{"kind":"string","value":"57c63ed5cc9584bff083047c8fc0b5be1c4fde2f"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":560,"string":"560"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2016-11-16T02:10:02.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-03-26T16:28:58.000Z"},"max_issues_repo_path":{"kind":"string","value":"portxo/keygen.go"},"max_issues_repo_name":{"kind":"string","value":"tnakagawa/lit"},"max_issues_repo_head_hexsha":{"kind":"string","value":"57c63ed5cc9584bff083047c8fc0b5be1c4fde2f"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":374,"string":"374"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2016-11-29T21:42:49.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2021-02-16T13:30:44.000Z"},"max_forks_repo_path":{"kind":"string","value":"portxo/keygen.go"},"max_forks_repo_name":{"kind":"string","value":"tnakagawa/lit"},"max_forks_repo_head_hexsha":{"kind":"string","value":"57c63ed5cc9584bff083047c8fc0b5be1c4fde2f"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":126,"string":"126"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2016-12-15T21:26:19.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2022-02-22T21:23:03.000Z"},"content":{"kind":"string","value":"package portxo\n\nimport (\n\t\"bytes\"\n\t\"encoding/binary\"\n\t\"fmt\"\n)\n\n// KeyGen describes how to get to the key from the master / seed.\n// it can be used with bip44 or other custom schemes (up to 5 levels deep)\n// Depth must be 0 to 5 inclusive. Child indexes of 0 are OK, so we can't just\n// terminate at the first 0.\ntype KeyGen struct {\n\tDepth uint8 `json:\"depth\"` // how many levels of the path to use. 0 means privkey as-is\n\tStep [5]uint32 `json:\"steps\"` // bip 32 / 44 path numbers\n\tPrivKey [32]byte `json:\"privkey\"` // private key\n}\n\n// Bytes returns the 53 byte serialized key derivation path.\n// always works\nfunc (k KeyGen) Bytes() []byte {\n\tvar buf bytes.Buffer\n\tbinary.Write(&buf, binary.BigEndian, k.Depth)\n\tbinary.Write(&buf, binary.BigEndian, k.Step[0])\n\tbinary.Write(&buf, binary.BigEndian, k.Step[1])\n\tbinary.Write(&buf, binary.BigEndian, k.Step[2])\n\tbinary.Write(&buf, binary.BigEndian, k.Step[3])\n\tbinary.Write(&buf, binary.BigEndian, k.Step[4])\n\tbuf.Write(k.PrivKey[:])\n\treturn buf.Bytes()\n}\n\n// KeyGenFromBytes turns a 53 byte array into a key derivation path. Always works\n// (note a depth > 5 path is invalid, but this just deserializes & doesn't check)\nfunc KeyGenFromBytes(b [53]byte) (k KeyGen) {\n\tbuf := bytes.NewBuffer(b[:])\n\tbinary.Read(buf, binary.BigEndian, &k.Depth)\n\tbinary.Read(buf, binary.BigEndian, &k.Step[0])\n\tbinary.Read(buf, binary.BigEndian, &k.Step[1])\n\tbinary.Read(buf, binary.BigEndian, &k.Step[2])\n\tbinary.Read(buf, binary.BigEndian, &k.Step[3])\n\tbinary.Read(buf, binary.BigEndian, &k.Step[4])\n\tcopy(k.PrivKey[:], buf.Next(32))\n\treturn\n}\n\n// String turns a keygen into a string\nfunc (k KeyGen) String() string {\n\tvar s string\n\t//\ts = fmt.Sprintf(\"\\tkey derivation path: m\")\n\tfor i := uint8(0); i < k.Depth; i++ {\n\t\tif k.Step[i]&0x80000000 != 0 { // high bit means hardened\n\t\t\ts += fmt.Sprintf(\"/%d'\", k.Step[i]&0x7fffffff)\n\t\t} else {\n\t\t\ts += fmt.Sprintf(\"/%d\", k.Step[i])\n\t\t}\n\t}\n\treturn s\n}\n"},"avg_line_length":{"kind":"number","value":32.4166666667,"string":"32.416667"},"max_line_length":{"kind":"number","value":96,"string":"96"},"alphanum_fraction":{"kind":"number","value":0.6776349614,"string":"0.677635"},"score":{"kind":"number","value":3.046875,"string":"3.046875"}}},{"rowIdx":762,"cells":{"hexsha":{"kind":"string","value":"85bdbc597d8ba42af73b254481cdc2da2315503d"},"size":{"kind":"number","value":1949,"string":"1,949"},"ext":{"kind":"string","value":"h"},"lang":{"kind":"string","value":"C"},"max_stars_repo_path":{"kind":"string","value":"source/Fenghui_Zhang_Core/inc/renderable_object.h"},"max_stars_repo_name":{"kind":"string","value":"9prady9/duotone"},"max_stars_repo_head_hexsha":{"kind":"string","value":"53d5d8daa9a90ca7ca39698766c267d5b03849cb"},"max_stars_repo_licenses":{"kind":"list like","value":["BSD-3-Clause"],"string":"[\n \"BSD-3-Clause\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"source/Fenghui_Zhang_Core/inc/renderable_object.h"},"max_issues_repo_name":{"kind":"string","value":"9prady9/duotone"},"max_issues_repo_head_hexsha":{"kind":"string","value":"53d5d8daa9a90ca7ca39698766c267d5b03849cb"},"max_issues_repo_licenses":{"kind":"list like","value":["BSD-3-Clause"],"string":"[\n \"BSD-3-Clause\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"source/Fenghui_Zhang_Core/inc/renderable_object.h"},"max_forks_repo_name":{"kind":"string","value":"9prady9/duotone"},"max_forks_repo_head_hexsha":{"kind":"string","value":"53d5d8daa9a90ca7ca39698766c267d5b03849cb"},"max_forks_repo_licenses":{"kind":"list like","value":["BSD-3-Clause"],"string":"[\n \"BSD-3-Clause\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"#ifndef GEOMETRY_RENDERABLE_OBJECT__\n#define GEOMETRY_RENDERABLE_OBJECT__\n\n#include \"topology_object.h\"\n\n#include \n#include \n\nclass Face;\nclass Vertex;\n/**\n * For regular rendering, we need\n * (1) coordinates of each vertex\n * (2) list of faces, each of them has a list of vertices (corners)\n * (3) the normal of each face for planar modeling\n * (4) the normal of each corner for polygonal modeling\n * (5) the material of the object, or each face\n * (6) color of the object, or each face\n * (7) texture\n *\n * Many of these do not need to be stored with faces or vertices. We\n * can use property maps instead. These non-core properties are not\n * critical, i.e., they can be replace or reconstructed easily.\n *\n * The Topological Object keeps tracks of the core component\n * (1) vertices (2) edges (3) rotations\n * --- Good!\n * OR\n * (1) vertices (2) faces with vertex-lists.\n * --- not good enough, we could have multiple edges between vertices.\n * OR\n * DLFL --- the current presentation is bad, but can be fixed.\n *\n * Hence it needs\n * (1) add/delete vertices\n * (2) add/delete edges\n */\n\n// Object class.\nclass RenderableObject : public TopologyObject {\npublic:\n RenderableObject();\n\n std::set GetFaces();\n float* GetVertexCoordinates(Vertex* v);\n float* GetFaceNormal(Face* f);\n void ReComputeFaces();\n\n bool SetCoords(Vertex* v, float*);\n\nprotected:\n std::set faces_;\n // property map to store coordinates.\n // vertex_ID -> coordinates.\n // TODO: when the vertex is being removed, we have to delete coords pointers.\n std::map coords_;\n // face_ID -> normal.\n // TODO: when the face is being removed, we have to delete normal pointers.\n std::map normals_;\n // vertex_ID -> vertex_ID -> face_ID.\n std::map > face_map_;\n // TODO: We need to know if the faces are up to date.\n};\n\n#endif // GEOMETRY_RENDERABLE_OBJECT__\n"},"avg_line_length":{"kind":"number","value":29.5303030303,"string":"29.530303"},"max_line_length":{"kind":"number","value":79,"string":"79"},"alphanum_fraction":{"kind":"number","value":0.694202155,"string":"0.694202"},"score":{"kind":"number","value":3.0625,"string":"3.0625"}}},{"rowIdx":763,"cells":{"hexsha":{"kind":"string","value":"6298cfef83d50e3c288ccabb25265909198db02e"},"size":{"kind":"number","value":1400,"string":"1,400"},"ext":{"kind":"string","value":"rs"},"lang":{"kind":"string","value":"Rust"},"max_stars_repo_path":{"kind":"string","value":"rust/robot-name/src/lib.rs"},"max_stars_repo_name":{"kind":"string","value":"TheTonttu/exercism-solutions"},"max_stars_repo_head_hexsha":{"kind":"string","value":"25420fc86d4b4a12e45f14f7472546f10f8864ea"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"rust/robot-name/src/lib.rs"},"max_issues_repo_name":{"kind":"string","value":"TheTonttu/exercism-solutions"},"max_issues_repo_head_hexsha":{"kind":"string","value":"25420fc86d4b4a12e45f14f7472546f10f8864ea"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"rust/robot-name/src/lib.rs"},"max_forks_repo_name":{"kind":"string","value":"TheTonttu/exercism-solutions"},"max_forks_repo_head_hexsha":{"kind":"string","value":"25420fc86d4b4a12e45f14f7472546f10f8864ea"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"use once_cell::sync::Lazy;\nuse rand::Rng;\nuse std::collections::HashSet;\nuse std::sync::Mutex;\n\nstatic NAME_REGISTRY: Lazy>> = Lazy::new(|| Mutex::new(HashSet::new()));\n\n#[derive(Default)]\npub struct Robot {\n name: String,\n}\n\nimpl Robot {\n pub fn new() -> Self {\n Self {\n name: gen_unique_name(),\n }\n }\n\n pub fn name(&self) -> &str {\n self.name.as_str()\n }\n\n pub fn reset_name(&mut self) {\n unregister_name(&self.name);\n self.name = gen_unique_name();\n }\n}\n\n// Unregister name when robot goes out of scope.\nimpl Drop for Robot {\n fn drop(&mut self) {\n unregister_name(&self.name);\n }\n}\n\nfn unregister_name(name: &str) {\n NAME_REGISTRY.lock().unwrap().remove(name);\n}\n\nfn gen_unique_name() -> String {\n let mut registry = NAME_REGISTRY.lock().unwrap();\n loop {\n let new_name = gen_random_name();\n if registry.insert(new_name.clone()) {\n return new_name;\n }\n }\n}\n\nfn gen_random_name() -> String {\n const LETTER_COUNT: usize = 2;\n const NUMBER_COUNT: usize = 3;\n\n let mut rng = rand::thread_rng();\n\n let letters: String = (0..LETTER_COUNT)\n .map(|_| rng.gen_range('A'..='Z'))\n .collect();\n let numbers: String = (0..NUMBER_COUNT)\n .map(|_| rng.gen_range('0'..='9'))\n .collect();\n\n [letters, numbers].concat()\n}\n"},"avg_line_length":{"kind":"number","value":21.2121212121,"string":"21.212121"},"max_line_length":{"kind":"number","value":94,"string":"94"},"alphanum_fraction":{"kind":"number","value":0.5807142857,"string":"0.580714"},"score":{"kind":"number","value":3.21875,"string":"3.21875"}}},{"rowIdx":764,"cells":{"hexsha":{"kind":"string","value":"71c93cb15243802170fe3df03855d238e9ee1949"},"size":{"kind":"number","value":3557,"string":"3,557"},"ext":{"kind":"string","value":"kt"},"lang":{"kind":"string","value":"Kotlin"},"max_stars_repo_path":{"kind":"string","value":"http4k-aws/src/test/kotlin/org/http4k/aws/AwsRealChunkKeyContentsIfRequiredTest.kt"},"max_stars_repo_name":{"kind":"string","value":"savagematt/http4k"},"max_stars_repo_head_hexsha":{"kind":"string","value":"9cc8ef11121bfbe10a1cd0ca58a17885c297af52"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"http4k-aws/src/test/kotlin/org/http4k/aws/AwsRealChunkKeyContentsIfRequiredTest.kt"},"max_issues_repo_name":{"kind":"string","value":"savagematt/http4k"},"max_issues_repo_head_hexsha":{"kind":"string","value":"9cc8ef11121bfbe10a1cd0ca58a17885c297af52"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"http4k-aws/src/test/kotlin/org/http4k/aws/AwsRealChunkKeyContentsIfRequiredTest.kt"},"max_forks_repo_name":{"kind":"string","value":"savagematt/http4k"},"max_forks_repo_head_hexsha":{"kind":"string","value":"9cc8ef11121bfbe10a1cd0ca58a17885c297af52"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"package org.http4k.aws\n\nimport com.natpryce.hamkrest.assertion.assertThat\nimport com.natpryce.hamkrest.containsSubstring\nimport com.natpryce.hamkrest.equalTo\nimport org.http4k.client.ApacheClient\nimport org.http4k.core.BodyMode\nimport org.http4k.core.HttpHandler\nimport org.http4k.core.Method.DELETE\nimport org.http4k.core.Method.GET\nimport org.http4k.core.Method.PUT\nimport org.http4k.core.Request\nimport org.http4k.core.Status.Companion.NO_CONTENT\nimport org.http4k.core.Status.Companion.OK\nimport org.http4k.core.then\nimport org.http4k.filter.ChunkKeyContentsIfRequired\nimport org.http4k.filter.ClientFilters\nimport org.http4k.filter.DebuggingFilters\nimport org.http4k.filter.Payload\nimport org.junit.jupiter.api.Disabled\nimport org.junit.jupiter.api.Test\n\nclass AwsRealChunkKeyContentsIfRequiredTest : AbstractAwsRealS3TestCase() {\n\n @Test\n fun `default usage`() {\n val requestBodyMode = BodyMode.Memory\n bucketLifecycle(ClientFilters.ChunkKeyContentsIfRequired(requestBodyMode = requestBodyMode)\n .then(awsClientFilter(Payload.Mode.Signed))\n .then(DebuggingFilters.PrintResponse())\n .then(ApacheClient(requestBodyMode = requestBodyMode)))\n }\n\n @Test\n @Disabled\n fun `streaming usage`() {\n val requestBodyMode = BodyMode.Stream\n bucketLifecycle(ClientFilters.ChunkKeyContentsIfRequired(requestBodyMode = requestBodyMode)\n .then(awsClientFilter(Payload.Mode.Unsigned))\n .then(DebuggingFilters.PrintResponse())\n .then(ApacheClient(requestBodyMode = requestBodyMode)))\n }\n\n private fun bucketLifecycle(client: HttpHandler) {\n val aClient = aClient()\n\n val contentOriginal = (1..10 * 1024 * 1024).map { 'a' }.joinToString(\"\")\n\n assertThat(\n \"Bucket should not exist in root listing\",\n aClient(Request(GET, s3Root)).bodyString(),\n !containsSubstring(bucketName))\n assertThat(\n \"Put of bucket should succeed\",\n aClient(Request(PUT, bucketUrl)).status,\n equalTo(OK))\n assertThat(\n \"Bucket should exist in root listing\",\n aClient(Request(GET, s3Root)).bodyString(),\n containsSubstring(bucketName))\n assertThat(\n \"Key should not exist in bucket listing\",\n aClient(Request(GET, bucketUrl)).bodyString(),\n !containsSubstring(key))\n\n client(Request(PUT, keyUrl)\n .body(contentOriginal.byteInputStream(), contentOriginal.length.toLong()))\n\n assertThat(\n \"Key should appear in bucket listing\",\n aClient(Request(GET, bucketUrl)).bodyString(),\n containsSubstring(key))\n assertThat(\n \"Key contents should be as expected\",\n aClient(Request(GET, keyUrl)).bodyString().length,\n equalTo(contentOriginal.length))\n assertThat(\n \"Delete of key should succeed\",\n aClient(Request(DELETE, keyUrl)).status,\n equalTo(NO_CONTENT))\n assertThat(\n \"Key should no longer appear in bucket listing\",\n aClient(Request(GET, bucketUrl)).bodyString(),\n !containsSubstring(key))\n assertThat(\n \"Delete of bucket should succeed\",\n aClient(Request(DELETE, bucketUrl)).status,\n equalTo(NO_CONTENT))\n assertThat(\n \"Bucket should no longer exist in root listing\",\n aClient(Request(GET, s3Root)).bodyString(),\n !containsSubstring(bucketName))\n }\n}\n"},"avg_line_length":{"kind":"number","value":37.4421052632,"string":"37.442105"},"max_line_length":{"kind":"number","value":99,"string":"99"},"alphanum_fraction":{"kind":"number","value":0.6682597695,"string":"0.66826"},"score":{"kind":"number","value":3.03125,"string":"3.03125"}}},{"rowIdx":765,"cells":{"hexsha":{"kind":"string","value":"df971cb7f86de38ee4e25e3e47bf4158c7fece5a"},"size":{"kind":"number","value":1723,"string":"1,723"},"ext":{"kind":"string","value":"ts"},"lang":{"kind":"string","value":"TypeScript"},"max_stars_repo_path":{"kind":"string","value":"src/commands/takeoff/destroy-project.ts"},"max_stars_repo_name":{"kind":"string","value":"Takeoff-Env/takeoff"},"max_stars_repo_head_hexsha":{"kind":"string","value":"5cfac571159c3ee7815582c6888096422460a015"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":101,"string":"101"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2017-09-26T03:23:23.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-01-23T05:06:49.000Z"},"max_issues_repo_path":{"kind":"string","value":"src/commands/takeoff/destroy-project.ts"},"max_issues_repo_name":{"kind":"string","value":"Takeoff-Env/takeoff"},"max_issues_repo_head_hexsha":{"kind":"string","value":"5cfac571159c3ee7815582c6888096422460a015"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":18,"string":"18"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2017-09-27T17:02:01.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2018-12-24T11:02:37.000Z"},"max_forks_repo_path":{"kind":"string","value":"src/commands/takeoff/destroy-project.ts"},"max_forks_repo_name":{"kind":"string","value":"Takeoff-Env/takeoff"},"max_forks_repo_head_hexsha":{"kind":"string","value":"5cfac571159c3ee7815582c6888096422460a015"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":12,"string":"12"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2017-10-11T16:59:19.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2018-05-16T12:46:33.000Z"},"content":{"kind":"string","value":"import { CommandResult, TakeoffCommand } from 'commands';\nimport { TakeoffCmdParameters } from 'takeoff';\nimport { ExitCode } from 'task';\n\n/**\n * Destroys an project in a non-reversable way\n */\nexport = ({\n shell,\n args,\n opts,\n rcFile,\n pathExists,\n printMessage,\n runCommand,\n}: TakeoffCmdParameters): TakeoffCommand => ({\n args: '',\n command: 'destroy',\n description:\n 'Destroys the docker containers for a project. Can also optionally remove the folder, this operation cannot be reversed.',\n group: 'takeoff',\n options: [\n {\n description: 'Also removes the directory, otherwise only docker images and volumes are destroyed',\n option: '-r, --remove-dir',\n },\n ],\n handler(): CommandResult {\n const [project]: string[] = args.length > 0 ? args : ['default'];\n\n printMessage(`Destroying project ${project}`);\n\n const envDir = `${rcFile.rcRoot}/projects/${project}`;\n\n if (!pathExists(envDir)) {\n return { code: ExitCode.Error, fail: `The project ${project} doesn't exist` };\n }\n\n const runCmd = runCommand(`docker-compose -f docker/docker-compose.yml down --rmi all`, envDir);\n\n if (runCmd.code !== 0) {\n return { extra: runCmd.stderr, code: runCmd.code, fail: `Error destroying ${project}` };\n }\n\n if (opts['r'] || opts['remove-dir']) {\n printMessage(`Removing folder ${envDir}`);\n const removeFolder = shell.rm('-rf', `${envDir}`);\n if (removeFolder.code !== 0) {\n return { extra: removeFolder.stderr, code: removeFolder.code, fail: `Error deleting ${project}` };\n }\n printMessage(`Folder ${envDir} removed`);\n }\n\n return { code: ExitCode.Success, success: `Successfully destroyed ${project}` };\n },\n});\n"},"avg_line_length":{"kind":"number","value":30.2280701754,"string":"30.22807"},"max_line_length":{"kind":"number","value":126,"string":"126"},"alphanum_fraction":{"kind":"number","value":0.6395821242,"string":"0.639582"},"score":{"kind":"number","value":3.1875,"string":"3.1875"}}},{"rowIdx":766,"cells":{"hexsha":{"kind":"string","value":"d29a64249662933eed11e9ead4c0a07c4caae362"},"size":{"kind":"number","value":1040,"string":"1,040"},"ext":{"kind":"string","value":"php"},"lang":{"kind":"string","value":"PHP"},"max_stars_repo_path":{"kind":"string","value":"app/Role.php"},"max_stars_repo_name":{"kind":"string","value":"apurv4193/RYEC-Backend"},"max_stars_repo_head_hexsha":{"kind":"string","value":"8682463777afad323d30f832693d5802f00c1dcd"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"app/Role.php"},"max_issues_repo_name":{"kind":"string","value":"apurv4193/RYEC-Backend"},"max_issues_repo_head_hexsha":{"kind":"string","value":"8682463777afad323d30f832693d5802f00c1dcd"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"app/Role.php"},"max_forks_repo_name":{"kind":"string","value":"apurv4193/RYEC-Backend"},"max_forks_repo_head_hexsha":{"kind":"string","value":"8682463777afad323d30f832693d5802f00c1dcd"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":" 0) {\n $updateData = [];\n foreach ($this->fillable as $field) {\n if (array_key_exists($field, $data)) {\n $updateData[$field] = $data[$field];\n }\n }\n return Role::where('id', $data['id'])->update($updateData);\n } else {\n return Role::create($data);\n }\n }\n\n public function getAllRoles()\n {\n $roles = Role::where('status', '<>', Config::get('constant.DELETED_FLAG'))\n ->orderBy('id', 'DESC')\n ->paginate(Config::get('constant.ADMIN_RECORD_PER_PAGE'));\n\n return $roles;\n }\n\n \n}\n"},"avg_line_length":{"kind":"number","value":25.3658536585,"string":"25.365854"},"max_line_length":{"kind":"number","value":90,"string":"90"},"alphanum_fraction":{"kind":"number","value":0.4932692308,"string":"0.493269"},"score":{"kind":"number","value":3.046875,"string":"3.046875"}}},{"rowIdx":767,"cells":{"hexsha":{"kind":"string","value":"9cd8960e7fa0ed1792d1b9fe84ef85aa1dd1c2fa"},"size":{"kind":"number","value":1422,"string":"1,422"},"ext":{"kind":"string","value":"lua"},"lang":{"kind":"string","value":"Lua"},"max_stars_repo_path":{"kind":"string","value":"frameworks/cocos2d-x/cocos/scripting/lua-bindings/auto/api/WorldClock.lua"},"max_stars_repo_name":{"kind":"string","value":"TshineZheng/DragonbonesCocos2dx"},"max_stars_repo_head_hexsha":{"kind":"string","value":"cf5e251092d23161dd4876353fa26dfe6425ff18"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":6,"string":"6"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2016-12-28T08:38:00.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2019-03-28T04:51:54.000Z"},"max_issues_repo_path":{"kind":"string","value":"frameworks/cocos2d-x/cocos/scripting/lua-bindings/auto/api/WorldClock.lua"},"max_issues_repo_name":{"kind":"string","value":"TshineZheng/DragonbonesCocos2dx"},"max_issues_repo_head_hexsha":{"kind":"string","value":"cf5e251092d23161dd4876353fa26dfe6425ff18"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":2,"string":"2"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2017-02-10T03:48:11.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2017-03-03T10:14:35.000Z"},"max_forks_repo_path":{"kind":"string","value":"frameworks/cocos2d-x/cocos/scripting/lua-bindings/auto/api/WorldClock.lua"},"max_forks_repo_name":{"kind":"string","value":"TshineZheng/DragonbonesCocos2dx"},"max_forks_repo_head_hexsha":{"kind":"string","value":"cf5e251092d23161dd4876353fa26dfe6425ff18"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":5,"string":"5"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2017-04-20T07:31:39.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2022-01-16T15:38:14.000Z"},"content":{"kind":"string","value":"\n--------------------------------\n-- @module WorldClock\n-- @extend IAnimateble\n-- @parent_module db\n\n--------------------------------\n-- \n-- @function [parent=#WorldClock] clear \n-- @param self\n-- @return WorldClock#WorldClock self (return value: db.WorldClock)\n \n--------------------------------\n-- \n-- @function [parent=#WorldClock] contains \n-- @param self\n-- @param #db.IAnimateble value\n-- @return bool#bool ret (return value: bool)\n \n--------------------------------\n-- \n-- @function [parent=#WorldClock] advanceTime \n-- @param self\n-- @param #float passedTime\n-- @return WorldClock#WorldClock self (return value: db.WorldClock)\n \n--------------------------------\n-- \n-- @function [parent=#WorldClock] remove \n-- @param self\n-- @param #db.Armature armature\n-- @return WorldClock#WorldClock self (return value: db.WorldClock)\n \n--------------------------------\n-- \n-- @function [parent=#WorldClock] add \n-- @param self\n-- @param #db.Armature armature\n-- @return WorldClock#WorldClock self (return value: db.WorldClock)\n \n--------------------------------\n-- \n-- @function [parent=#WorldClock] getInstance \n-- @param self\n-- @return WorldClock#WorldClock ret (return value: db.WorldClock)\n \n--------------------------------\n-- \n-- @function [parent=#WorldClock] WorldClock \n-- @param self\n-- @return WorldClock#WorldClock self (return value: db.WorldClock)\n \nreturn nil\n"},"avg_line_length":{"kind":"number","value":26.3333333333,"string":"26.333333"},"max_line_length":{"kind":"number","value":67,"string":"67"},"alphanum_fraction":{"kind":"number","value":0.5485232068,"string":"0.548523"},"score":{"kind":"number","value":3.40625,"string":"3.40625"}}},{"rowIdx":768,"cells":{"hexsha":{"kind":"string","value":"9d84bf32c2b80a5f28d98f3e880852db8d2e82e4"},"size":{"kind":"number","value":1611,"string":"1,611"},"ext":{"kind":"string","value":"swift"},"lang":{"kind":"string","value":"Swift"},"max_stars_repo_path":{"kind":"string","value":"SwiftBooster/Classes/Extension/Helpers/JSON.swift"},"max_stars_repo_name":{"kind":"string","value":"3pehrbehroozi/SwiftBooster"},"max_stars_repo_head_hexsha":{"kind":"string","value":"33651a5718fb2d8334ba6551facd1d02c3cf2001"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"SwiftBooster/Classes/Extension/Helpers/JSON.swift"},"max_issues_repo_name":{"kind":"string","value":"3pehrbehroozi/SwiftBooster"},"max_issues_repo_head_hexsha":{"kind":"string","value":"33651a5718fb2d8334ba6551facd1d02c3cf2001"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"SwiftBooster/Classes/Extension/Helpers/JSON.swift"},"max_forks_repo_name":{"kind":"string","value":"3pehrbehroozi/SwiftBooster"},"max_forks_repo_head_hexsha":{"kind":"string","value":"33651a5718fb2d8334ba6551facd1d02c3cf2001"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"//\n// JSON.swift\n// SwiftBooster\n//\n// Created by Sepehr Behroozi on 4/17/19.\n// Copyright © 2019 ayantech.ir. All rights reserved.\n//\n\nimport Foundation\n\n/// Typealias for [String: Any]\npublic typealias JSONObject = [String: Any]\n\n/// Typealias for [Any]\npublic typealias JSONArray = [Any]\n\npublic func getValue(input: Any?, subscripts: Any...) -> T? {\n \n var extractingValue = input\n \n subscripts.forEach { (key) in\n if let intKey = key as? Int {\n extractingValue = (extractingValue as? [Any])?[intKey]\n }\n if let stringKey = key as? String {\n extractingValue = (extractingValue as? [String: Any])?[stringKey]\n }\n }\n \n switch T.self {\n case is Int.Type:\n let result = extractingValue as? Int ?? (extractingValue as? String)?.toInt()\n return result as? T\n case is Double.Type:\n let result = extractingValue as? Double ?? (extractingValue as? String)?.toDouble()\n return result as? T\n case is String.Type:\n let result = extractingValue as? String ?? (extractingValue as? Int)?.toString() ?? (extractingValue as? Double)?.toString()\n if result == \"null\" {\n return nil\n } else {\n return result as? T\n }\n case is Bool.Type:\n var result = extractingValue as? Bool ?? (extractingValue as? String)?.toBool()\n if result == nil {\n if let resultInt = extractingValue as? Int {\n result = resultInt == 1\n }\n }\n return result as? T\n default:\n return extractingValue as? T\n }\n}\n"},"avg_line_length":{"kind":"number","value":28.7678571429,"string":"28.767857"},"max_line_length":{"kind":"number","value":132,"string":"132"},"alphanum_fraction":{"kind":"number","value":0.5847299814,"string":"0.58473"},"score":{"kind":"number","value":3.015625,"string":"3.015625"}}},{"rowIdx":769,"cells":{"hexsha":{"kind":"string","value":"d3bbf78c213e4cecf5fe1674415c41ebcc0c8580"},"size":{"kind":"number","value":1079,"string":"1,079"},"ext":{"kind":"string","value":"lua"},"lang":{"kind":"string","value":"Lua"},"max_stars_repo_path":{"kind":"string","value":"csv.lua"},"max_stars_repo_name":{"kind":"string","value":"prototux/haproxy-summary"},"max_stars_repo_head_hexsha":{"kind":"string","value":"db04e6d9416388c1c0f4b386288111cf2b23764b"},"max_stars_repo_licenses":{"kind":"list like","value":["BSD-2-Clause"],"string":"[\n \"BSD-2-Clause\"\n]"},"max_stars_count":{"kind":"number","value":1,"string":"1"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2017-01-28T17:19:03.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2017-01-28T17:19:03.000Z"},"max_issues_repo_path":{"kind":"string","value":"csv.lua"},"max_issues_repo_name":{"kind":"string","value":"prototux/haproxy-summary"},"max_issues_repo_head_hexsha":{"kind":"string","value":"db04e6d9416388c1c0f4b386288111cf2b23764b"},"max_issues_repo_licenses":{"kind":"list like","value":["BSD-2-Clause"],"string":"[\n \"BSD-2-Clause\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"csv.lua"},"max_forks_repo_name":{"kind":"string","value":"prototux/haproxy-summary"},"max_forks_repo_head_hexsha":{"kind":"string","value":"db04e6d9416388c1c0f4b386288111cf2b23764b"},"max_forks_repo_licenses":{"kind":"list like","value":["BSD-2-Clause"],"string":"[\n \"BSD-2-Clause\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"-- Small CSV helper\n-- Source: http://lua-users.org/wiki/LuaCsv\nlocal csv = {}\n\nfunction csv.parse(line, sep)\n local res = {}\n local pos = 1\n sep = sep or ','\n while true do\n local c = string.sub(line,pos,pos)\n if (c == \"\") then break end\n if (c == '\"') then\n local txt = \"\"\n repeat\n local startp,endp = string.find(line,'^%b\"\"',pos)\n txt = txt..string.sub(line,startp+1,endp-1)\n pos = endp + 1\n c = string.sub(line,pos,pos)\n if (c == '\"') then txt = txt..'\"' end\n until (c ~= '\"')\n table.insert(res,txt)\n assert(c == sep or c == \"\")\n pos = pos + 1\n else\n local startp,endp = string.find(line,sep,pos)\n if (startp) then\n table.insert(res,string.sub(line,pos,startp-1))\n pos = endp + 1\n else\n table.insert(res,string.sub(line,pos))\n break\n end\n end\n end\n return res\nend\n\nreturn csv\n"},"avg_line_length":{"kind":"number","value":27.6666666667,"string":"27.666667"},"max_line_length":{"kind":"number","value":65,"string":"65"},"alphanum_fraction":{"kind":"number","value":0.453197405,"string":"0.453197"},"score":{"kind":"number","value":3.171875,"string":"3.171875"}}},{"rowIdx":770,"cells":{"hexsha":{"kind":"string","value":"df8d1ae2242076bdf69f9fa487f491b863946bc4"},"size":{"kind":"number","value":6998,"string":"6,998"},"ext":{"kind":"string","value":"tsx"},"lang":{"kind":"string","value":"TypeScript"},"max_stars_repo_path":{"kind":"string","value":"app/components/zaposleni/components/zaposleniModal/zaposleniModal.tsx"},"max_stars_repo_name":{"kind":"string","value":"BogMil/racunovodja"},"max_stars_repo_head_hexsha":{"kind":"string","value":"1ba95eafb2a04056ea279f7a93fb1b034564060a"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":1,"string":"1"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2020-09-19T19:21:58.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2020-09-19T19:21:58.000Z"},"max_issues_repo_path":{"kind":"string","value":"app/components/zaposleni/components/zaposleniModal/zaposleniModal.tsx"},"max_issues_repo_name":{"kind":"string","value":"BogMil/racunovodja"},"max_issues_repo_head_hexsha":{"kind":"string","value":"1ba95eafb2a04056ea279f7a93fb1b034564060a"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":3,"string":"3"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2021-01-28T21:01:59.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2022-02-08T17:50:37.000Z"},"max_forks_repo_path":{"kind":"string","value":"app/components/zaposleni/components/zaposleniModal/zaposleniModal.tsx"},"max_forks_repo_name":{"kind":"string","value":"BogMil/racunovodja"},"max_forks_repo_head_hexsha":{"kind":"string","value":"1ba95eafb2a04056ea279f7a93fb1b034564060a"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"import React from 'react';\nimport { Button, Modal, Form, Row, Col } from 'react-bootstrap';\nimport { useSelector, useDispatch } from 'react-redux';\nimport {\n close,\n updateZaposleniState,\n setErrors\n} from './zaposleniModal.actions';\nimport { AppStore } from '../../../../reducers';\nimport { reloadEmployees } from '../../zaposleni.actions';\nimport * as Service from '../../zaposleni.service';\nimport { handleResponse } from '../../../../utils/responseHandler';\nimport { CREATE_MODE, EDIT_MODE } from '../../../../constants/modalModes';\nimport { ErrorText } from '../../../common/errorText';\nimport { User } from '../../../auth/auth.store.types';\n\nexport default function ZaposleniModalComponent() {\n const dispatch = useDispatch();\n\n const { zaposleni, mode, show, title, opstine, errors } = useSelector(\n (state: AppStore) => {\n return state.zaposleniPage.zaposleniModal;\n }\n );\n\n const { prava_pristupa } = useSelector((state: AppStore) => {\n return state.auth.user as User;\n });\n\n const handleClose = () => {\n dispatch(close());\n };\n\n const handleChange = (e: any) => {\n let value = e.target.value;\n let name = e.target.name;\n\n if (name == 'active') value = e.target.checked;\n\n dispatch(updateZaposleniState(name, value));\n };\n\n const handleSave = async () => {\n if (mode == CREATE_MODE)\n handleResponse(\n await Service.createEmployee(zaposleni),\n () => {\n dispatch(reloadEmployees());\n dispatch(close());\n },\n () => {},\n (response: any) => {\n dispatch(setErrors(response.data.errors));\n }\n );\n else if (mode == EDIT_MODE)\n handleResponse(\n await Service.updateEmployee(zaposleni),\n () => {\n dispatch(reloadEmployees());\n dispatch(close());\n },\n () => {},\n (response: any) => {\n dispatch(setErrors(response.data.errors));\n }\n );\n };\n\n return (\n \n \n {title}\n \n\n \n
\n \n \n \n JMBG\n \n \n \n \n \n \n Broj zaposlenog\n \n \n \n \n \n \n \n \n Prezime\n \n \n \n \n \n \n Ime\n \n \n \n \n \n \n \n \n Broj računa\n \n \n \n \n \n {prava_pristupa.opiro && (\n \n Opština stanovanja\n \n <>\n \n {opstine.map(opstina => {\n return (\n \n );\n })}\n \n \n \n \n )}\n \n \n \n \n \n Email\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n Email 2 (opciono)\n \n \n \n \n \n
\n
\n\n \n \n \n \n );\n}\n"},"avg_line_length":{"kind":"number","value":30.2943722944,"string":"30.294372"},"max_line_length":{"kind":"number","value":74,"string":"74"},"alphanum_fraction":{"kind":"number","value":0.4386967705,"string":"0.438697"},"score":{"kind":"number","value":3.1875,"string":"3.1875"}}},{"rowIdx":771,"cells":{"hexsha":{"kind":"string","value":"d8d02339b668d288a2c120877157fe869cbd3ed5"},"size":{"kind":"number","value":7623,"string":"7,623"},"ext":{"kind":"string","value":"lua"},"lang":{"kind":"string","value":"Lua"},"max_stars_repo_path":{"kind":"string","value":"soccar/gamestates/lobby.lua"},"max_stars_repo_name":{"kind":"string","value":"Dummiesman/KissMP-Soccar"},"max_stars_repo_head_hexsha":{"kind":"string","value":"07eec8951a117daac4f74ecf596ff8f537f78ac9"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":2,"string":"2"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2021-04-25T20:01:14.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2021-04-26T02:31:40.000Z"},"max_issues_repo_path":{"kind":"string","value":"soccar/gamestates/lobby.lua"},"max_issues_repo_name":{"kind":"string","value":"Dummiesman/KissMP-Soccar"},"max_issues_repo_head_hexsha":{"kind":"string","value":"07eec8951a117daac4f74ecf596ff8f537f78ac9"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"soccar/gamestates/lobby.lua"},"max_forks_repo_name":{"kind":"string","value":"Dummiesman/KissMP-Soccar"},"max_forks_repo_head_hexsha":{"kind":"string","value":"07eec8951a117daac4f74ecf596ff8f537f78ac9"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"local M = {}\nM.name = \"Lobby\"\n\nlocal readyTimer = 0\nlocal lobbyTeamMap = {}\n\n-- team related stuff\nlocal function getFirstIdOnTeam(team)\n for client_id, team2 in pairs(lobbyTeamMap) do\n if team2 == team then return client_id end\n end\n return nil\nend\n\nlocal function getTeamMemberCount(team)\n local c = 0\n for _,team2 in pairs(lobbyTeamMap) do\n if team2 == team then c = c + 1 end\n end\n return c\nend\n\nlocal function allClientsOnTeams()\n local cc = 0\n local ctc = 0\n for client_id, connection in pairs(getConnections()) do\n if lobbyTeamMap[client_id] then ctc = ctc + 1 end\n cc = cc + 1\n end\n return cc == ctc\nend\n\nlocal function getClientsTableWithoutTeam()\n local t = {}\n for client_id, connection in pairs(getConnections()) do\n if not lobbyTeamMap[client_id] then table.insert(t, client_id) end\n end\n return t\nend\n\nlocal function checkTeamFull(team)\n local limit = TEAM_LIMITS[team]\n if not limit then return true end\n if limit < 0 then return false end\n return getTeamMemberCount(team) >= limit\nend\n\nlocal function setTeam(client, team)\n local currentTeam = lobbyTeamMap[client:getID()]\n local newTeamName = TEAM_NAMES[team]\n lobbyTeamMap[client:getID()] = team\n \n if currentTeam and currentTeam ~= team then\n local currentTeamName = TEAM_NAMES[currentTeam]\n sendChatMessage(client, \"Changed team from \" .. currentTeamName .. \" to \" .. newTeamName .. \".\", {r=1,g=1})\n elseif currentTeam and currentTeam == team then\n sendChatMessage(client, \"You're already on the \" .. newTeamName .. \" team.\", {r=1,g=1})\n else\n sendChatMessage(client, \"Set team to \" .. newTeamName .. \".\", {r=1,g=1})\n end\nend\n\n-- game start function\nlocal function startGame()\n -- first off, move someone off their team if \n -- the other team is empty\n local cc = getConnectionCount()\n if cc > 1 then\n local rc = getTeamMemberCount(TEAM_RED)\n local bc = getTeamMemberCount(TEAM_BLUE)\n if rc == cc or bc == cc then\n -- We must reassign someone\n if rc == cc then\n local id = getFirstIdOnTeam(TEAM_RED)\n lobbyTeamMap[id] = TEAM_BLUE\n sendChatMessage(getConnection(id), \"*** Your team has been reassigned because everyone was on one team. Your new team is Blue ***\", {r=1,g=1})\n else\n local id = getFirstIdOnTeam(TEAM_BLUE)\n lobbyTeamMap[id] = TEAM_RED\n sendChatMessage(getConnection(id), \"*** Your team has been reassigned because everyone was on one team Your new team is Red ***\", {r=1,g=1})\n end\n end\n end\n\n -- clear existing game participants leftover from any previous runs\n GameData.reset()\n \n -- add everyone to participants list\n for client_id, _ in pairs(getConnections()) do\n local participant = GameData.createPlayer(client_id)\n GameData.participants[client_id] = participant\n GameData.teams[lobbyTeamMap[client_id]].participants[client_id] = participant\n GameData.participants[client_id].team = lobbyTeamMap[client_id]\n end\n \n -- remove players 2nd+ vehicles\n local removeVehiclesTable = {}\n for client_id, _ in pairs(getConnections()) do\n local vc = 0\n for vehicle_id, vehicle in pairs(vehicles) do\n if vehicle:getData():getOwner() == client_id and vehicle:getData():getID() ~= GameData.ballVehicleId then\n vc = vc + 1\n if vc > 1 then\n table.insert(removeVehiclesTable, vehicle)\n end\n end\n end\n end\n for _, vehicle in pairs(removeVehiclesTable) do\n vehicle:remove()\n end\n \n -- move to running state\n StateManager.switchToState(GAMESTATE_RUNNING)\nend\n\n-- state stuff\nlocal function onPlayerDisconnected(client_id)\n lobbyTeamMap[client_id] = nil\nend\n\nlocal function onEnterState()\n lobbyTeamMap = {}\n readyTimer = 0\nend\n\nlocal function onChatMessage(client_id, message)\n local messageLower = message:lower()\n \n -- debug\n if GameData.DEBUG_MODE then\n if message == \"/s\" then startGame() return \"\" end\n end\n \n -- team assignment\n if messageLower == \"/team blue\" or messageLower == \"/blue\" then\n if not checkTeamFull(TEAM_BLUE) then\n setTeam(getConnection(client_id), TEAM_BLUE)\n else\n sendChatMessage(getConnection(client_id), \"This team is full\", {r=1})\n end\n return \"\"\n end\n if messageLower == \"/team red\" or messageLower == \"/red\" then\n if not checkTeamFull(TEAM_RED) then\n setTeam(getConnection(client_id), TEAM_RED)\n else\n sendChatMessage(getConnection(client_id), \"This team is full\", {r=1})\n end\n return \"\"\n end\n if messageLower == \"/random\" then\n local r = math.random()\n local attemptTeam = nil\n local alternateTeam = nil\n if r > 0.5 then\n attemptTeam = TEAM_RED\n alternateTeam = TEAM_BLUE\n else\n attemptTeam = TEAM_BLUE\n alternateTeam = TEAM_RED\n end\n \n if checkTeamFull(attemptTeam) then\n attemptTeam = alternateTeam\n end\n \n if checkTeamFull(attemptTeam) then\n -- can't assign any team?\n sendChatMessage(getConnection(client_id), \"All teams are full\", {r=1})\n else\n sendChatMessage(getConnection(client_id), \"The randomizer assigns you to the \" .. TEAM_NAMES[attemptTeam] .. \" team.\", {r=1, g=1})\n setTeam(getConnection(client_id), attemptTeam)\n end \n return \"\"\n end\n \n -- ball assignment\n if messageLower == \"/setball\" or messageLower == \"/ball\" then\n -- get clients active vehicle and set it as ballVehicleId\n local client = getConnection(client_id)\n local vehicleId = vehicleIdWrapper(client:getCurrentVehicle())\n if not vehicleId then\n sendChatMessage(getConnection(client_id), \"Failed to set ball vehicle\", {r=1})\n return \"\"\n end\n \n local vehicle = vehicles[vehicleId]\n if not vehicle then \n sendChatMessage(getConnection(client_id), \"Failed to set ball vehicle\", {r=1})\n return \"\"\n end \n \n sendChatMessage(getConnection(client_id), \"Ball vehicle set\", {g=1})\n GameData.ballVehicleId = vehicle:getData():getID()\n return \"\"\n end\nend\n\nlocal function update(dt)\n local ready = allClientsOnTeams()\n local connectionCount = getConnectionCount()\n if ready and connectionCount >= 2 then\n -- if the timer is 0, we've just entered ready state. Notify clients.\n local startTime = GameData.DEBUG_MODE and 5 or 10\n if readyTimer == 0 then\n broadcastChatMessageAndToast(\"The game will start in \" .. tostring(startTime) .. \" second(s)\", {r=1,g=1})\n end\n readyTimer = readyTimer + dt\n \n -- start game after timer ends\n if readyTimer > startTime then\n startGame()\n end\n else\n -- if the timer is not 0, we *were* in ready state, and something happened\n if readyTimer ~= 0 then\n broadcastChatMessageAndToast(\"Start timer interrupted. All clients are no longer ready.\")\n end\n \n -- notify players that they need a team\n local lobbyNotifTimer = StateManager.timeInState % 60\n local lobbyNotifTimerNext = (StateManager.timeInState + dt) % 60\n if lobbyNotifTimerNext < lobbyNotifTimer then \n broadcastChatMessage(\"In lobby mode. Waiting for all players to assign a team.\")\n \n -- get the players who have no team\n local noTeamMap = getClientsTableWithoutTeam()\n local noTeamNameMap = {}\n for _,id in pairs(noTeamMap) do\n table.insert(noTeamNameMap, getConnection(id):getName())\n end\n broadcastChatMessage(\"The following players have not assigned a team yet: \" .. strTableToStr(noTeamNameMap), {r=1})\n end\n \n --\n readyTimer = 0\n end\nend\n\nM.onEnterState = onEnterState\nM.onChatMessage = onChatMessage\nM.onPlayerDisconnected = onPlayerDisconnected\nM.update = update\n\nreturn M"},"avg_line_length":{"kind":"number","value":30.987804878,"string":"30.987805"},"max_line_length":{"kind":"number","value":150,"string":"150"},"alphanum_fraction":{"kind":"number","value":0.6858192313,"string":"0.685819"},"score":{"kind":"number","value":3.375,"string":"3.375"}}},{"rowIdx":772,"cells":{"hexsha":{"kind":"string","value":"e7f06cecae55d479e6604b53a295b76a9bdf0276"},"size":{"kind":"number","value":5005,"string":"5,005"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"backend/tests/unit/protocols/application/test_lists.py"},"max_stars_repo_name":{"kind":"string","value":"pez-globo/pufferfish-software"},"max_stars_repo_head_hexsha":{"kind":"string","value":"b42fecd652731dd80fbe366e95983503fced37a4"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"number","value":1,"string":"1"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2020-10-20T23:47:23.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2020-10-20T23:47:23.000Z"},"max_issues_repo_path":{"kind":"string","value":"backend/tests/unit/protocols/application/test_lists.py"},"max_issues_repo_name":{"kind":"string","value":"pez-globo/pufferfish-software"},"max_issues_repo_head_hexsha":{"kind":"string","value":"b42fecd652731dd80fbe366e95983503fced37a4"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"number","value":242,"string":"242"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2020-10-23T06:44:01.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2022-01-28T05:50:45.000Z"},"max_forks_repo_path":{"kind":"string","value":"backend/tests/unit/protocols/application/test_lists.py"},"max_forks_repo_name":{"kind":"string","value":"pez-globo/pufferfish-vent-software"},"max_forks_repo_head_hexsha":{"kind":"string","value":"f1e5e47acf1941e7c729adb750b85bf26c38b274"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"number","value":1,"string":"1"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2021-04-12T02:10:18.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2021-04-12T02:10:18.000Z"},"content":{"kind":"string","value":"\"\"\"Test the functionality of protocols.application.states classes.\"\"\"\n\nfrom ventserver.protocols.application import lists\nfrom ventserver.protocols.protobuf import mcu_pb as pb\n\n\ndef test_send_new_elements() -> None:\n \"\"\"Test adding new elements to a list for sending.\"\"\"\n example_sequence = [\n lists.UpdateEvent(new_elements=[pb.LogEvent(id=i)])\n for i in range(20)\n ]\n\n synchronizer = lists.SendSynchronizer(\n segment_type=pb.NextLogEvents,\n max_len=10, max_segment_len=5\n )\n assert synchronizer.output() is None\n for update_event in example_sequence:\n synchronizer.input(update_event)\n\n assert synchronizer.output() is None\n\n # The first 10 events should've been discarded\n for next_expected in range(10):\n synchronizer.input(lists.UpdateEvent(next_expected=next_expected))\n output = synchronizer.output()\n assert isinstance(output, pb.NextLogEvents)\n assert output.next_expected == next_expected\n assert output.total == 10\n assert output.remaining == 10\n for (i, event) in enumerate(output.elements):\n assert event.id == 10 + i\n\n # Segments should be returned as requested\n for next_expected in range(10, 20):\n synchronizer.input(lists.UpdateEvent(next_expected=next_expected))\n output = synchronizer.output()\n assert isinstance(output, pb.NextLogEvents)\n assert output.next_expected == next_expected\n assert output.total == 10\n assert output.remaining == 10 - (next_expected - 10)\n for (i, event) in enumerate(output.elements):\n assert event.id == next_expected + i\n if next_expected <= 15:\n assert len(output.elements) == 5\n else:\n assert len(output.elements) == 5 - (next_expected - 15)\n\n # New elements should be in the segment resulting from a repeated request\n assert synchronizer.output() is None\n synchronizer.input(lists.UpdateEvent(\n new_elements=[pb.LogEvent(id=20)], next_expected=19\n ))\n output = synchronizer.output()\n assert isinstance(output, pb.NextLogEvents)\n assert output.next_expected == 19\n assert output.total == 10\n assert output.remaining == 2\n for (i, event) in enumerate(output.elements):\n assert event.id == 19 + i\n assert len(output.elements) == 2\n\n# TODO: add a test where we send all events, then reset expected event to 0.\n# All events should be sent again.\n\n\ndef test_receive_new_elements() -> None:\n \"\"\"Test adding new elements to a list from receiving.\"\"\"\n example_sequence = [\n pb.NextLogEvents(\n session_id=0, elements=[pb.LogEvent(id=i) for i in range(0, 5)]\n ),\n pb.NextLogEvents(\n session_id=0, elements=[pb.LogEvent(id=i) for i in range(5, 10)]\n ),\n pb.NextLogEvents(\n session_id=0, elements=[pb.LogEvent(id=i) for i in range(7, 11)]\n ),\n pb.NextLogEvents(\n session_id=0, elements=[pb.LogEvent(id=i) for i in range(0, 4)]\n ),\n pb.NextLogEvents(session_id=1),\n pb.NextLogEvents(\n session_id=1, elements=[pb.LogEvent(id=i) for i in range(0, 4)]\n ),\n ]\n\n synchronizer: lists.ReceiveSynchronizer[pb.LogEvent] = \\\n lists.ReceiveSynchronizer()\n assert synchronizer.output() is None\n for segment in example_sequence:\n synchronizer.input(segment)\n\n update_event = synchronizer.output()\n assert update_event is not None\n assert update_event.session_id == 0\n assert update_event.next_expected == 5\n assert len(update_event.new_elements) == 5\n for (i, element) in enumerate(update_event.new_elements):\n assert element.id == i\n\n update_event = synchronizer.output()\n assert update_event is not None\n assert update_event.session_id == 0\n assert update_event.next_expected == 10\n assert len(update_event.new_elements) == 5\n for (i, element) in enumerate(update_event.new_elements):\n assert element.id == 5 + i\n\n update_event = synchronizer.output()\n assert update_event is not None\n assert update_event.session_id == 0\n assert update_event.next_expected == 11\n assert len(update_event.new_elements) == 1\n assert update_event.new_elements[0].id == 10\n\n update_event = synchronizer.output()\n assert update_event is not None\n assert update_event.session_id == 0\n assert update_event.next_expected == 11\n assert len(update_event.new_elements) == 0\n\n update_event = synchronizer.output()\n assert update_event is not None\n assert update_event.session_id == 1\n assert update_event.next_expected == 0\n assert len(update_event.new_elements) == 0\n\n update_event = synchronizer.output()\n assert update_event is not None\n assert update_event.session_id == 1\n assert update_event.next_expected == 4\n assert len(update_event.new_elements) == 4\n for (i, element) in enumerate(update_event.new_elements):\n assert element.id == i\n"},"avg_line_length":{"kind":"number","value":36.5328467153,"string":"36.532847"},"max_line_length":{"kind":"number","value":77,"string":"77"},"alphanum_fraction":{"kind":"number","value":0.6755244755,"string":"0.675524"},"score":{"kind":"number","value":3.296875,"string":"3.296875"}}},{"rowIdx":773,"cells":{"hexsha":{"kind":"string","value":"e754832dab77d8c61522e8d0299d1eb2b720fed1"},"size":{"kind":"number","value":22157,"string":"22,157"},"ext":{"kind":"string","value":"js"},"lang":{"kind":"string","value":"JavaScript"},"max_stars_repo_path":{"kind":"string","value":"source/actions.js"},"max_stars_repo_name":{"kind":"string","value":"RahavLussato/redux-react-firebase"},"max_stars_repo_head_hexsha":{"kind":"string","value":"f66b2ca8b0b39b5a51e0538e7e2ca43ab25d1556"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":287,"string":"287"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2016-01-13T12:20:08.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-02-21T03:15:31.000Z"},"max_issues_repo_path":{"kind":"string","value":"source/actions.js"},"max_issues_repo_name":{"kind":"string","value":"enkuush-ca/redux-react-firebase"},"max_issues_repo_head_hexsha":{"kind":"string","value":"004bfd20d60e6ac8c5793c7aa66b161ae6fc9f03"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":60,"string":"60"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2016-01-26T15:01:45.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2018-10-18T22:34:14.000Z"},"max_forks_repo_path":{"kind":"string","value":"source/actions.js"},"max_forks_repo_name":{"kind":"string","value":"enkuush-ca/redux-react-firebase"},"max_forks_repo_head_hexsha":{"kind":"string","value":"004bfd20d60e6ac8c5793c7aa66b161ae6fc9f03"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":48,"string":"48"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2016-01-18T17:38:37.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2021-03-23T23:46:45.000Z"},"content":{"kind":"string","value":"\nimport {\n SET,\n SET_REQUESTED,\n SET_PROFILE,\n LOGIN,\n LOGOUT,\n LOGIN_ERROR,\n PERMISSION_DENIED_ERROR,\n START,\n INIT_BY_PATH\n// NO_VALUE\n} from './constants'\n\nimport { Promise } from 'es6-promise'\nimport _ from 'lodash'\n\nconst getWatchPath = (event, path) => event + ':' + ((getCleanPath(path).substring(0, 1) === '/') ? '' : '/') + getCleanPath(path)\n\nconst setWatcher = (firebase, event, path, ConnectId='Manual') => {\n const id = getWatchPath(event, path);\n\n firebase._.watchers[id] = firebase._.watchers[id] || {};\n\n if (Object.keys(firebase._.watchers[id]).includes(ConnectId)) {\n firebase._.watchers[id][ConnectId]++\n } else {\n firebase._.watchers[id][ConnectId] = 1\n }\n\n return firebase._.watchers[id]\n}\n\nconst cleanOnceWatcher = (firebase, dispatch, event, path, ConnectId) => {\n const id = getWatchPath(event, path);\n\n if (firebase._.watchers[id]) {\n if (firebase._.watchers[id][ConnectId] <= 1) {\n delete firebase._.watchers[id][ConnectId];\n\n if (Object.keys(firebase._.watchers[id]).length === 0) {\n delete firebase._.watchers[id];\n }\n } else if (firebase._.watchers[id][ConnectId]) {\n firebase._.watchers[id][ConnectId]--\n }\n }\n\n if(firebase._.shouldClearAfterOnce[id]) {\n for (let clean of firebase._.shouldClearAfterOnce[id]) {\n firebase.database().ref().child(clean.path).off(clean.event);\n if(!clean.isSkipClean){\n dispatch({\n type: INIT_BY_PATH,\n path: clean.path\n })\n }\n }\n\n delete firebase._.shouldClearAfterOnce[id];\n }\n\n return firebase._.watchers[id]\n}\n\nconst getWatcherCount = (firebase, event, path) => {\n const id = getWatchPath(event, path);\n const watchers = firebase._.watchers[id];\n\n return watchers && Object.keys(watchers).length\n}\n\nconst getCleanPath = (path) => {\n let pathSplitted = path.split('#');\n return pathSplitted[0];\n}\n\nconst unsetWatcher = (firebase, dispatch, event, path, ConnectId='Manual', isSkipClean=false, isNewQuery=false) => {\n const id = getWatchPath(event, path);\n const onceEvent = getWatchPath('once', path);\n path = path.split('#')[0]\n\n if ((firebase._.watchers[id] && firebase._.watchers[id][ConnectId] <= 1) || isNewQuery || ConnectId === 'CleanAll') {\n var aggregationId = getWatchPath('child_aggregation', path);\n\n if (firebase._.timeouts && firebase._.timeouts[aggregationId]) {\n clearTimeout(firebase._.timeouts[aggregationId]);\n firebase._.timeouts[aggregationId] = undefined;\n }\n\n ConnectId !== 'CleanAll' && delete firebase._.watchers[id][ConnectId];\n\n const countWatchers = ConnectId !== 'CleanAll' ? Object.keys(firebase._.watchers[id]).length : 0;\n\n if (countWatchers === 0 || isNewQuery) {\n countWatchers === 0 && delete firebase._.watchers[id];\n\n if (event!='once'){\n if (!firebase._.watchers[onceEvent]) {\n event !== 'all' && firebase.database().ref().child(path).off(event);\n if(!isSkipClean){\n dispatch({\n type: INIT_BY_PATH,\n path\n })\n }\n } else {\n firebase._.shouldClearAfterOnce[onceEvent] = firebase._.shouldClearAfterOnce[onceEvent] || [];\n firebase._.shouldClearAfterOnce[onceEvent].push({path, event, isSkipClean});\n }\n }\n }\n } else if (firebase._.watchers[id] && firebase._.watchers[id][ConnectId]) {\n firebase._.watchers[id][ConnectId]--\n }\n}\n\nexport const isWatchPath = (firebase, dispatch, event, path) => {\n const id = getWatchPath(event, path);\n let isWatch = false;\n\n if (firebase._.watchers[id] > 0) {\n isWatch = true;\n }\n\n return isWatch;\n}\n\nfunction isNumeric(n) {\n return !isNaN(n - parseFloat(n));\n}\n\nexport const watchEvent = (firebase, dispatch, event, path, ConnectId='Manual', isListenOnlyOnDelta=false,\n isAggregation=false, setFunc=undefined, setOptions=undefined) => {\n\n if (path) {\n\n\n const isNewQuery = path.includes('#')\n const isNewSet = setOptions !== undefined\n let queryParams = []\n\n if (isNewQuery) {\n let pathSplitted = path.split('#')\n path = pathSplitted[0]\n queryParams = pathSplitted[1].split('&')\n }\n\n const watchPath = path\n const counter = getWatcherCount(firebase, event, watchPath)\n\n if (counter > 0) {\n if (isNewQuery || isNewSet) {\n unsetWatcher(firebase, dispatch, event, path, ConnectId, false, isNewQuery || isNewSet)\n } else {\n setWatcher(firebase, event, watchPath, ConnectId)\n return\n }\n }\n\n setWatcher(firebase, event, watchPath, ConnectId)\n\n let query = firebase.database().ref().child(path)\n\n if (isNewQuery) {\n let doNotParse = false\n\n queryParams.forEach((param) => {\n param = param.split('=')\n switch (param[0]) {\n case 'doNotParse':\n doNotParse = true\n break\n case 'orderByValue':\n query = query.orderByValue()\n doNotParse = true\n break\n case 'orderByPriority':\n query = query.orderByPriority()\n doNotParse = true\n break\n case 'orderByKey':\n query = query.orderByKey()\n doNotParse = true\n break\n case 'orderByChild':\n query = query.orderByChild(param[1])\n break\n case 'limitToFirst':\n query = query.limitToFirst(parseInt(param[1]))\n break\n case 'limitToLast':\n query = query.limitToLast(parseInt(param[1]))\n break\n case 'equalTo':\n let equalToParam = (!doNotParse && isNumeric(param[1])) ? parseFloat(param[1]) || (param[1] === '0' ? 0 : param[1]) : param[1]\n equalToParam = equalToParam === 'null' ? null : equalToParam\n query = param.length === 3\n ? query.equalTo(equalToParam, param[2])\n : query.equalTo(equalToParam)\n break\n case 'startAt':\n let startAtParam = (!doNotParse && isNumeric(param[1])) ? parseFloat(param[1]) || (param[1] === '0' ? 0 : param[1]) : param[1]\n startAtParam = startAtParam === 'null' ? null : startAtParam\n query = param.length === 3\n ? query.startAt(startAtParam, param[2])\n : query.startAt(startAtParam)\n break\n case 'endAt':\n let endAtParam = (!doNotParse && isNumeric(param[1])) ? parseFloat(param[1]) || (param[1] === '0' ? 0 : param[1]) : param[1]\n endAtParam = endAtParam === 'null' ? null : endAtParam\n query = param.length === 3\n ? query.endAt(endAtParam, param[2])\n : query.endAt(endAtParam)\n break\n default:\n break\n }\n })\n }\n\n const runQuery = (q, e, p) => {\n dispatch({\n type: START,\n timestamp: Date.now(),\n requesting: true,\n requested: false,\n path\n })\n\n let aggregationId = getWatchPath('child_aggregation', path);\n\n if (e === 'once') {\n q.once('value')\n .then(snapshot => {\n cleanOnceWatcher(firebase, dispatch, event, watchPath, ConnectId)\n if (snapshot.val() !== null) {\n if (setFunc) {\n setFunc(snapshot, 'value', dispatch, setOptions);\n dispatch({\n type: SET_REQUESTED,\n path: p,\n key: snapshot.key,\n timestamp: Date.now(),\n requesting: false,\n requested: true\n });\n } else {\n dispatch({\n type: SET,\n path: p,\n data: snapshot.val(),\n snapshot: Object.assign(snapshot, {_event: 'value'}),\n key: snapshot.key,\n timestamp: Date.now(),\n requesting: false,\n requested: true,\n isChild: false,\n isMixSnapshot: false,\n isMergeDeep: false\n })\n }\n }\n }, dispatchPermissionDeniedError)\n } else if (e === 'child_added' && isListenOnlyOnDelta) {\n let newItems = false;\n\n q.on(e, snapshot => {\n if (!newItems) return;\n\n let tempSnapshot = Object.assign(snapshot, {_event: e});\n\n if (isAggregation) {\n if (!firebase._.timeouts[aggregationId]) {\n firebase._.aggregatedData[aggregationId] = {}\n firebase._.aggregatedSnapshot[aggregationId] = {}\n firebase._.timeouts[aggregationId] = setTimeout(() => {\n dispatchBulk(p, aggregationId)\n }, 1000);\n }\n\n firebase._.aggregatedData[aggregationId][snapshot.key] = snapshot.val()\n firebase._.aggregatedSnapshot[aggregationId][snapshot.key] = tempSnapshot;\n } else {\n if (setFunc) {\n setFunc(snapshot, 'child_added', dispatch, setOptions);\n dispatch({\n type: SET_REQUESTED,\n path: p,\n key: snapshot.key,\n timestamp: Date.now(),\n requesting: false,\n requested: true\n });\n } else {\n dispatch({\n type: SET,\n path: p,\n data: snapshot.val(),\n snapshot: tempSnapshot,\n key: snapshot.key,\n timestamp: Date.now(),\n requesting: false,\n requested: true,\n isChild: true,\n isMixSnapshot: true,\n isMergeDeep: false\n })\n }\n }\n }, dispatchPermissionDeniedError)\n\n q.once('value')\n .then(snapshot => {\n newItems = true;\n if (snapshot.val() !== null) {\n if (setFunc) {\n setFunc(snapshot, 'value', dispatch, setOptions);\n dispatch({\n type: SET_REQUESTED,\n path: p,\n key: snapshot.key,\n timestamp: Date.now(),\n requesting: false,\n requested: true\n });\n } else {\n dispatch({\n type: SET,\n path: p,\n data: snapshot.val(),\n snapshot: Object.assign(snapshot, {_event: 'value'}),\n key: snapshot.key,\n timestamp: Date.now(),\n requesting: false,\n requested: true,\n isChild: false,\n isMixSnapshot: true,\n isMergeDeep: false\n })\n }\n }\n }, dispatchPermissionDeniedError)\n } else {\n q.on(e, snapshot => {\n let data = (e === 'child_removed') ? '_child_removed' : snapshot.val();\n let tempSnapshot = Object.assign(snapshot, {_event: e});\n\n if (e !== 'value' && isAggregation) {\n if (!firebase._.timeouts[aggregationId]) {\n firebase._.aggregatedData[aggregationId] = {}\n firebase._.aggregatedSnapshot[aggregationId] = {}\n firebase._.timeouts[aggregationId] = setTimeout(() => {\n dispatchBulk(p, aggregationId)\n }, 1000);\n }\n\n firebase._.aggregatedData[aggregationId][snapshot.key] = data\n firebase._.aggregatedSnapshot[aggregationId][snapshot.key] = tempSnapshot\n } else {\n if (setFunc) {\n setFunc(tempSnapshot, e, dispatch, setOptions);\n\n } else {\n dispatch({\n type: SET,\n path: p,\n data,\n snapshot: tempSnapshot,\n key: snapshot.key,\n timestamp: Date.now(),\n requesting: false,\n requested: true,\n isChild: e !== 'value',\n isMixSnapshot: isListenOnlyOnDelta,\n isMergeDeep: false\n })\n }\n }\n }, (permError) => dispatchPermissionDeniedError(permError, p))\n }\n }\n\n const dispatchBulk = (p, aggregationId) => {\n if (setFunc) {\n setFunc(firebase._.aggregatedSnapshot[aggregationId], 'aggregated', dispatch, setOptions);\n dispatch({\n type: SET_REQUESTED,\n path: p,\n key: '_NONE',\n timestamp: Date.now(),\n requesting: false,\n requested: true\n });\n } else {\n dispatch({\n type: SET,\n path: p,\n data: firebase._.aggregatedData[aggregationId],\n snapshot: firebase._.aggregatedSnapshot[aggregationId],\n key: '_NONE',\n timestamp: Date.now(),\n requesting: false,\n requested: true,\n isChild: false,\n isMixSnapshot: true,\n isMergeDeep: true\n })\n }\n\n firebase._.timeouts[aggregationId] = undefined\n }\n\n const dispatchPermissionDeniedError = (permError, p) => {\n if (permError && permError.code === 'PERMISSION_DENIED' &&\n permError.message && !permError.message.includes('undefined')) {\n\n dispatch({\n type: PERMISSION_DENIED_ERROR,\n data: undefined,\n snapshot: {val: () => undefined},\n path: p,\n timestamp: Date.now(),\n requesting: false,\n requested: true,\n permError\n })\n }\n\n throw permError\n }\n\n runQuery(query, event, path)\n }\n}\n\nexport const unWatchEvent = (firebase, dispatch, event, path, ConnectId, isSkipClean=false) => {\n unsetWatcher(firebase, dispatch, event, path, ConnectId, isSkipClean)\n}\n\nexport const watchEvents = (firebase, dispatch, events, ConnectId='Manual') =>\n events.forEach(event => watchEvent(firebase, dispatch, event.name, event.path, ConnectId, event.isListenOnlyOnDelta, event.isAggregation, event.setFunc, event.setOptions))\n\nexport const unWatchEvents = (firebase, dispatch, events, ConnectId='Manual', isUnmount=false) =>\n events.forEach(event => unWatchEvent(firebase, dispatch, event.name, event.path, ConnectId, isUnmount ? !!event.isSkipCleanOnUnmount : event.isSkipClean))\n\nconst dispatchLoginError = (dispatch, authError) =>\n dispatch({\n type: LOGIN_ERROR,\n authError\n })\n\n\n\nconst dispatchLogin = (dispatch, auth) =>\n dispatch({\n type: LOGIN,\n auth,\n authError: null\n })\n\nconst unWatchUserProfile = (firebase) => {\n const authUid = firebase._.authUid\n const userProfile = firebase._.config.userProfile\n if (firebase._.profileWatch) {\n firebase.database().ref().child(`${userProfile}/${authUid}`).off('value', firebase._.profileWatch)\n firebase._.profileWatch = null\n }\n}\n\nconst watchUserProfile = (dispatch, firebase) => {\n const authUid = firebase._.authUid\n const userProfile = firebase._.config.userProfile\n unWatchUserProfile(firebase)\n if (firebase._.config.userProfile) {\n firebase._.profileWatch = firebase.database().ref().child(`${userProfile}/${authUid}`).on('value', snap => {\n dispatch({\n type: SET_PROFILE,\n profile: snap.val()\n })\n })\n }\n}\n\nconst createLoginPromise = (firebase, credentials) => {\n const auth = firebase.auth()\n if (_.isString(credentials)) {\n return auth.signInWithCustomToken(credentials)\n } else if (_.has(credentials, \"email\") && _.has(credentials, \"password\")) {\n return auth.signInWithEmailAndPassword(email, password)\n } else {\n return Promise.reject(new Error(`Malformed credentials or unsupported way of logging in: ${credentials}`))\n }\n}\n\nexport const login = (dispatch, firebase, credentials) => {\n return new Promise((resolve, reject) => {\n dispatchLoginError(dispatch, null)\n\n createLoginPromise(firebase, credentials)\n .then(resolve)\n .catch(err => {\n dispatchLoginError(dispatch, err)\n reject(err)\n });\n })\n}\n\nexport const init = (dispatch, firebase) => {\n firebase.auth().onAuthStateChanged(authData => {\n if (!authData) {\n return dispatch({type: LOGOUT})\n }\n\n firebase._.authUid = authData.uid\n watchUserProfile(dispatch, firebase)\n\n if (!!firebase._.firebasePendingEvents) {\n for (let key of Object.keys(firebase._.firebasePendingEvents)) {\n watchEvents(firebase, dispatch, firebase._.firebasePendingEvents[key], key);\n }\n\n firebase._.firebasePendingEvents = undefined\n }\n\n dispatchLogin(dispatch, authData)\n });\n\n // Run onAuthStateChanged if it exists in config\n if (firebase._.config.onAuthStateChanged) {\n firebase._.config.onAuthStateChanged(authData, firebase)\n }\n}\n\nexport const logout = (dispatch, firebase, preserve = [], remove = []) => {\n firebase.auth().signOut()\n dispatch({type: LOGOUT, preserve, remove})\n firebase._.authUid = null\n unWatchUserProfile(firebase)\n}\n\nexport const createUser = (dispatch, firebase, credentials, profile) =>\n new Promise((resolve, reject) => {\n dispatchLoginError(dispatch, null)\n firebase.auth().createUserWithEmailAndPassword(credentials.email, credentials.password)\n .then((userData) => {\n if (profile && firebase._.config.userProfile) {\n firebase.database().ref().child(`${firebase._.config.userProfile}/${userData.uid}`).set(profile)\n }\n\n login(dispatch, firebase, credentials)\n .then(() => resolve(userData.uid))\n .catch(err => reject(err))\n })\n .catch(err => {\n dispatchLoginError(dispatch, err)\n return reject(err)\n })\n })\n\nexport const resetPassword = (dispatch, firebase, email) => {\n dispatchLoginError(dispatch, null)\n return firebase.auth().sendPasswordResetEmail(email).catch((err) => {\n if (err) {\n switch (err.code) {\n case 'INVALID_USER':\n dispatchLoginError(dispatch, new Error('The specified user account does not exist.'))\n break\n default:\n dispatchLoginError(dispatch, err)\n }\n return\n }\n })\n}\n\nexport default { watchEvents, unWatchEvents, init, logout, createUser, resetPassword, isWatchPath }\n"},"avg_line_length":{"kind":"number","value":38.3339100346,"string":"38.33391"},"max_line_length":{"kind":"number","value":175,"string":"175"},"alphanum_fraction":{"kind":"number","value":0.4631042109,"string":"0.463104"},"score":{"kind":"number","value":3.046875,"string":"3.046875"}}},{"rowIdx":774,"cells":{"hexsha":{"kind":"string","value":"f16a4c0abcbe9f3811a1d34fd2b925a33028ae43"},"size":{"kind":"number","value":2477,"string":"2,477"},"ext":{"kind":"string","value":"rb"},"lang":{"kind":"string","value":"Ruby"},"max_stars_repo_path":{"kind":"string","value":"app/controllers/metrics_controller.rb"},"max_stars_repo_name":{"kind":"string","value":"tkowark/repmine"},"max_stars_repo_head_hexsha":{"kind":"string","value":"6d358e1178892fb715ece18e5bc5722c6eb882c9"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":3,"string":"3"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2017-10-24T18:49:46.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2020-12-22T17:35:32.000Z"},"max_issues_repo_path":{"kind":"string","value":"app/controllers/metrics_controller.rb"},"max_issues_repo_name":{"kind":"string","value":"tkowark/repmine"},"max_issues_repo_head_hexsha":{"kind":"string","value":"6d358e1178892fb715ece18e5bc5722c6eb882c9"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":7,"string":"7"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2016-05-02T14:26:41.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2016-05-03T13:52:31.000Z"},"max_forks_repo_path":{"kind":"string","value":"app/controllers/metrics_controller.rb"},"max_forks_repo_name":{"kind":"string","value":"tkowark/repmine"},"max_forks_repo_head_hexsha":{"kind":"string","value":"6d358e1178892fb715ece18e5bc5722c6eb882c9"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":1,"string":"1"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2020-05-09T13:48:43.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2020-05-09T13:48:43.000Z"},"content":{"kind":"string","value":"class MetricsController < ApplicationController\n autocomplete :tag, :name, :class_name => 'ActsAsTaggableOn::Tag'\n\n def create\n @metric = Metric.new\n @metric.save(validate: false)\n redirect_to metric_path(@metric)\n end\n\n def show\n @metric = Metric.find(params[:id])\n @measurable_groups = Metric.grouped([@metric.id]).merge(Pattern.grouped){|key, val1, val2| val1 + val2}\n @existing_connections = []\n @metric.metric_nodes.each do |node|\n node.children.each do |child|\n @existing_connections << {:source => node.id, :target => child.id}\n end\n end\n @title = @metric.name.blank? ? \"New metric\" : \"Metric '#{@metric.name}'\"\n end\n\n def update\n metric = Metric.find(params[:id])\n if metric.update_attributes(params[:metric])\n flash[:notice] = \"Successfully saved metric!\"\n render json: {}\n else\n flash[:error] = \"Could not save metric!
#{metric.errors.full_messages.join(\"
\")}\"\n render json: {}, :status => :unprocessable_entity\n end\n end\n\n def create_connection\n source = MetricNode.find(params[:source_id])\n target = MetricNode.find(params[:target_id])\n target.parent = source\n target.save(validate: false)\n render :nothing => true, :status => 200, :content_type => 'text/html'\n end\n\n def destroy_connection\n begin\n source = MetricNode.find(params[:source_id])\n target = MetricNode.find(params[:target_id])\n target.parent = nil\n target.save\n rescue Exception => e\n end\n render :nothing => true, :status => 200, :content_type => 'text/html'\n end\n\n def download_csv\n repository = Repository.find(params[:repository_id])\n metric = Metric.find(params[:metrics].first)\n metric.calculate(repository)\n send_data(\n File.open(metric.metrics_path(\"csv\", repository)).read,\n :type => 'text/csv; charset=utf-8; header=present',\n :filename => metric.fancy_metric_file_name(repository)\n )\n end\n\n def create_node\n metric = Metric.find(params[:metric_id])\n measurable = Measurable.find(params[:pattern_id])\n node = metric.create_node(measurable)\n render :partial => \"metric_nodes/show\", :layout => false, :locals => {:node => node}\n end\n\n def create_operator\n metric = Metric.find(params[:metric_id])\n node = MetricOperatorNode.create(:operator_cd => params[:operator])\n metric.metric_nodes << node\n render :partial => \"metric_nodes/show\", :layout => false, :locals => {:node => node}\n end\nend"},"avg_line_length":{"kind":"number","value":32.5921052632,"string":"32.592105"},"max_line_length":{"kind":"number","value":107,"string":"107"},"alphanum_fraction":{"kind":"number","value":0.6645135244,"string":"0.664514"},"score":{"kind":"number","value":3.296875,"string":"3.296875"}}},{"rowIdx":775,"cells":{"hexsha":{"kind":"string","value":"12e2ad4fc6525e0a31ce4b9546d519cb7efc7a48"},"size":{"kind":"number","value":2551,"string":"2,551"},"ext":{"kind":"string","value":"kt"},"lang":{"kind":"string","value":"Kotlin"},"max_stars_repo_path":{"kind":"string","value":"koma-core-api/common/src/koma/internal/default/generated/matrix/DefaultIntMatrixFactory.kt"},"max_stars_repo_name":{"kind":"string","value":"drmoose/koma"},"max_stars_repo_head_hexsha":{"kind":"string","value":"765dfb206cada4b682a94e140a40ba6c6e95667b"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"number","value":233,"string":"233"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2017-05-03T16:54:08.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2021-12-04T03:20:04.000Z"},"max_issues_repo_path":{"kind":"string","value":"koma-core-api/common/src/koma/internal/default/generated/matrix/DefaultIntMatrixFactory.kt"},"max_issues_repo_name":{"kind":"string","value":"drmoose/koma"},"max_issues_repo_head_hexsha":{"kind":"string","value":"765dfb206cada4b682a94e140a40ba6c6e95667b"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"number","value":70,"string":"70"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2017-05-07T20:07:37.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2021-08-11T20:33:13.000Z"},"max_forks_repo_path":{"kind":"string","value":"koma-core-api/common/src/koma/internal/default/generated/matrix/DefaultIntMatrixFactory.kt"},"max_forks_repo_name":{"kind":"string","value":"drmoose/koma"},"max_forks_repo_head_hexsha":{"kind":"string","value":"765dfb206cada4b682a94e140a40ba6c6e95667b"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"number","value":31,"string":"31"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2017-05-18T09:04:56.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2021-05-07T22:40:26.000Z"},"content":{"kind":"string","value":"/**\n * THIS FILE IS AUTOGENERATED, DO NOT MODIFY. EDIT THE FILES IN templates/\n * AND RUN ./gradlew :codegen INSTEAD!\n */\n\npackage koma.internal.default.generated.matrix\n\nimport koma.*\nimport koma.matrix.*\nimport koma.extensions.*\nimport koma.internal.notImplemented\nimport koma.internal.getRng\nimport koma.internal.syncNotNative\n\nclass DefaultIntMatrixFactory: MatrixFactory> {\n override fun zeros(rows: Int, cols: Int) \n = DefaultIntMatrix(rows, cols)\n\n override fun create(data: IntRange): Matrix {\n val input = data.map { it.toInt() }\n val out = DefaultIntMatrix(1, input.size)\n input.forEachIndexed { idx, ele -> out[idx] = ele }\n return out\n } \n override fun create(data: DoubleArray): Matrix {\n val out = DefaultIntMatrix(1, data.size)\n data.forEachIndexed { idx, ele -> out[idx] = ele.toInt() }\n return out\n }\n\n override fun create(data: Array): Matrix {\n val out = DefaultIntMatrix(data.size, data[0].size)\n data.forEachIndexed { rowIdx, row ->\n row.forEachIndexed { colIdx, ele -> \n out[rowIdx, colIdx] = ele.toInt()\n }\n }\n return out\n }\n\n override fun ones(rows: Int, cols: Int): Matrix\n = zeros(rows, cols).fill {_,_-> 1.toInt()}\n\n override fun eye(size: Int): Matrix \n = eye(size, size)\n override fun eye(rows: Int, cols: Int): Matrix\n = zeros(rows, cols)\n .fill {row,col->if (row==col) 1.toInt() else 0.toInt() }\n\n\n override fun rand(rows: Int, cols: Int): Matrix {\n val array = zeros(rows, cols)\n val rng = getRng()\n syncNotNative(rng) {\n array.fill { _, _ -> rng.nextDoubleUnsafe().toInt() }\n }\n return array;\n }\n\n\n override fun randn(rows: Int, cols: Int): Matrix {\n val array = zeros(rows, cols)\n val rng = getRng()\n syncNotNative(rng) {\n array.fill { _, _ -> rng.nextGaussianUnsafe().toInt() }\n }\n return array;\n }\n\n\n override fun arange(start: Double, stop: Double, increment: Double): Matrix {\n error(notImplemented)\n }\n\n override fun arange(start: Double, stop: Double): Matrix {\n error(notImplemented)\n }\n\n override fun arange(start: Int, stop: Int, increment: Int): Matrix {\n error(notImplemented)\n }\n\n override fun arange(start: Int, stop: Int): Matrix {\n error(notImplemented)\n }\n}\n"},"avg_line_length":{"kind":"number","value":29.3218390805,"string":"29.321839"},"max_line_length":{"kind":"number","value":86,"string":"86"},"alphanum_fraction":{"kind":"number","value":0.596628773,"string":"0.596629"},"score":{"kind":"number","value":3.15625,"string":"3.15625"}}},{"rowIdx":776,"cells":{"hexsha":{"kind":"string","value":"e8fc72a77fdd416f9afae8ddad6132491cc5fabf"},"size":{"kind":"number","value":4405,"string":"4,405"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"c4/system/history.py"},"max_stars_repo_name":{"kind":"string","value":"Brewgarten/c4-system-manager"},"max_stars_repo_head_hexsha":{"kind":"string","value":"6fdec33ced4b1cb32d82a24cd168447a899b7e10"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"c4/system/history.py"},"max_issues_repo_name":{"kind":"string","value":"Brewgarten/c4-system-manager"},"max_issues_repo_head_hexsha":{"kind":"string","value":"6fdec33ced4b1cb32d82a24cd168447a899b7e10"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":1,"string":"1"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2017-10-17T21:51:40.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2017-10-17T21:51:40.000Z"},"max_forks_repo_path":{"kind":"string","value":"c4/system/history.py"},"max_forks_repo_name":{"kind":"string","value":"Brewgarten/c4-system-manager"},"max_forks_repo_head_hexsha":{"kind":"string","value":"6fdec33ced4b1cb32d82a24cd168447a899b7e10"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"\"\"\"\nCopyright (c) IBM 2015-2017. All Rights Reserved.\nProject name: c4-system-manager\nThis project is licensed under the MIT License, see LICENSE\n\"\"\"\nfrom abc import ABCMeta, abstractmethod\n\n\nclass DeviceHistory(object):\n \"\"\"\n Device manager history\n \"\"\"\n __metaclass__ = ABCMeta\n\n @abstractmethod\n def add(self, node, name, status, ttl=None):\n \"\"\"\n Add status for device manager with specified name on specified node\n\n :param node: node name\n :type node: str\n :param name: device manager name\n :type name: str\n :param status: status\n :type status: :class:`DeviceManagerStatus`\n :param ttl: time to live (in seconds), infinite by default\n :type ttl: int\n \"\"\"\n\n @abstractmethod\n def get(self, node, name, limit=None):\n \"\"\"\n Get status history for device manager with specified name on specified node\n\n :param node: node name\n :type node: str\n :param name: device manager name\n :type name: str\n :param limit: number of statuses to return\n :type limit: int\n :returns: list of history entries\n :rtype: [:class:`Entry`]\n \"\"\"\n\n @abstractmethod\n def getAll(self):\n \"\"\"\n Get status history for all device managers on all nodes\n\n :returns: list of history entries\n :rtype: [:class:`Entry`]\n \"\"\"\n\n @abstractmethod\n def getLatest(self, node, name):\n \"\"\"\n Get latest status for device manager with specified name on specified node\n\n :param node: node name\n :type node: str\n :param name: device manager name\n :type name: str\n :returns: history entry\n :rtype: :class:`Entry`\n \"\"\"\n\n @abstractmethod\n def remove(self, node=None, name=None):\n \"\"\"\n Remove status history for device managers with specified names on specified nodes.\n\n node and name:\n remove history for specific device on a specific node\n\n node and no name\n remove history for all devices on a specific node\n\n no node and name\n remove history for specific device on all nodes\n\n no node and no name\n remove history for all devices on all nodes\n\n :param node: node name\n :type node: str\n :param name: device manager name\n :type name: str\n \"\"\"\n\nclass Entry(object):\n \"\"\"\n History entry with timestamp and status information\n\n :param timestamp: datetime instance\n :type timestamp: :class:`Datetime`\n :param status: status\n :type status: :class:`SystemManagerStatus` or :class:`DeviceManagerStatus`\n \"\"\"\n def __init__(self, timestamp, status):\n self.timestamp = timestamp\n self.status = status\n\nclass NodeHistory(object):\n \"\"\"\n System manager history\n \"\"\"\n __metaclass__ = ABCMeta\n\n @abstractmethod\n def add(self, node, status, ttl=None):\n \"\"\"\n Add status for system manager with on specified node\n\n :param node: node name\n :type node: str\n :param status: status\n :type status: :class:`SystemManagerStatus`\n :param ttl: time to live (in seconds), infinite by default\n :type ttl: int\n \"\"\"\n\n @abstractmethod\n def get(self, node, limit=None):\n \"\"\"\n Get status history for system manager on specified node\n\n :param node: node name\n :type node: str\n :param limit: number of statuses to return\n :type limit: int\n :returns: list of history entries\n :rtype: [:class:`Entry`]\n \"\"\"\n\n @abstractmethod\n def getAll(self):\n \"\"\"\n Get status history for all system managers on all nodes\n\n :returns: list of history entries\n :rtype: [:class:`Entry`]\n \"\"\"\n\n @abstractmethod\n def getLatest(self, node):\n \"\"\"\n Get latest status for system manager on specified node\n\n :param node: node name\n :type node: str\n :returns: history entry\n :rtype: :class:`Entry`\n \"\"\"\n\n @abstractmethod\n def remove(self, node=None):\n \"\"\"\n Remove status history for system managers on specified nodes.\n\n node:\n remove history for specific node\n\n no node\n remove history for all nodes\n\n :param node: node name\n :type node: str\n \"\"\"\n"},"avg_line_length":{"kind":"number","value":26.0650887574,"string":"26.065089"},"max_line_length":{"kind":"number","value":90,"string":"90"},"alphanum_fraction":{"kind":"number","value":0.5956867196,"string":"0.595687"},"score":{"kind":"number","value":3.078125,"string":"3.078125"}}},{"rowIdx":777,"cells":{"hexsha":{"kind":"string","value":"e7ea5fbf2a5ea893fa5d02bc075a60e6e8983358"},"size":{"kind":"number","value":4580,"string":"4,580"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"app/request.py"},"max_stars_repo_name":{"kind":"string","value":"angelakarenzi5/News-Highlight"},"max_stars_repo_head_hexsha":{"kind":"string","value":"3eae6f743f9e5d9eb4ea80b29ae0e2c57dd0aa62"},"max_stars_repo_licenses":{"kind":"list like","value":["Unlicense"],"string":"[\n \"Unlicense\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"app/request.py"},"max_issues_repo_name":{"kind":"string","value":"angelakarenzi5/News-Highlight"},"max_issues_repo_head_hexsha":{"kind":"string","value":"3eae6f743f9e5d9eb4ea80b29ae0e2c57dd0aa62"},"max_issues_repo_licenses":{"kind":"list like","value":["Unlicense"],"string":"[\n \"Unlicense\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"app/request.py"},"max_forks_repo_name":{"kind":"string","value":"angelakarenzi5/News-Highlight"},"max_forks_repo_head_hexsha":{"kind":"string","value":"3eae6f743f9e5d9eb4ea80b29ae0e2c57dd0aa62"},"max_forks_repo_licenses":{"kind":"list like","value":["Unlicense"],"string":"[\n \"Unlicense\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"from app import app\nimport urllib.request,json\nfrom .models import source\nfrom .models import article\n\nSource = source.Source\nArticle = article.Article\n\n# Getting api key\napi_key = app.config['NEWS_API_KEY']\n\n\n# Getting the source base url\nbase_url = app.config[\"SOURCE_API_BASE_URL\"]\narticle_url = app.config[\"ARTICLE_API_BASE_URL\"]\n\n\ndef process_results(source_list):\n '''\n Function that processes the source result and transform them to a list of Objects\n\n Args:\n source_list: A list of dictionaries that contain source details\n\n Returns :\n source_results: A list of source objects\n '''\n source_results = []\n for source_item in source_list:\n id = source_item.get('id')\n name = source_item.get('name')\n description= source_item.get('description')\n url = source_item.get('url')\n category = source_item.get('category')\n language = source_item.get('language')\n country = source_item.get('country')\n\n if url:\n source_object = Source(id,name,description,url,category,language,country)\n source_results.append(source_object)\n\n return source_results\n\ndef get_sources(category):\n '''\n Function that gets the json response to our url request\n '''\n get_sources_url = base_url.format(category,api_key)\n\n with urllib.request.urlopen(get_sources_url) as url:\n get_sources_data = url.read()\n get_sources_response = json.loads(get_sources_data)\n\n source_results = None\n\n if get_sources_response['sources']:\n source_results_list = get_sources_response['sources']\n source_results = process_results(source_results_list)\n\n return source_results\n\ndef get_articles(category):\n '''\n Function that gets the json response to our url request\n '''\n get_articles_url = article_url.format(category,api_key)\n\n with urllib.request.urlopen(get_articles_url) as url:\n get_articles_data = url.read()\n get_articles_response = json.loads(get_articles_data)\n\n article_results = None\n\n if get_articles_response['articles']:\n article_results_list = get_articles_response['articles']\n article_results = process_results(article_results_list)\n\n return article_results\n\n\ndef get_source(id):\n get_sources_details_url = article_url.format(id,api_key)\n\n with urllib.request.urlopen(get_sources_details_url) as url:\n source_details_data = url.read()\n source_details_response = json.loads(source_details_data)\n\n source_object = None\n if source_details_response:\n id = source_details_response.get('id')\n name = source_details_response.get('name')\n description = source_details_response.get('description')\n url = source_details_response.get('url')\n category = source_details_response.get('category')\n language = source_details_response.get('language')\n country = source_details_response.get('country')\n\n\n source_object = Source(id,name,description,url,category,language,country)\n\n return source_object\n\n\ndef process_articles(article_list):\n '''\n Function that processes the article result and transform them to a list of Objects\n\n Args:\n article_list: A list of dictionaries that contain article details\n\n Returns :\n article_results: A list of article objects\n '''\n article_results = []\n for article_item in article_list:\n author = article_item.get('author')\n title = article_item.get('title')\n description= article_item.get('description')\n url =article_item.get('url')\n urlToImage = article_item.get('urlToImage')\n publishedAt = article_item.get('publishedAt')\n content = article_item.get('content')\n\n if url:\n article_object =Article(author,title,description, url, urlToImage,publishedAt,content)\n article_results.append(article_object)\n\n return article_results\n\ndef get_articles(source):\n '''\n Function that gets the json response to our url request\n '''\n get_articles_url = article_url.format(source,api_key)\n\n \n with urllib.request.urlopen(get_articles_url) as url:\n get_articles_data = url.read()\n get_articles_response = json.loads(get_articles_data)\n\n article_results = None\n\n if get_articles_response['articles']:\n article_results_list = get_articles_response['articles']\n article_results = process_articles(article_results_list)\n\n return article_results"},"avg_line_length":{"kind":"number","value":31.156462585,"string":"31.156463"},"max_line_length":{"kind":"number","value":98,"string":"98"},"alphanum_fraction":{"kind":"number","value":0.6919213974,"string":"0.691921"},"score":{"kind":"number","value":3.140625,"string":"3.140625"}}},{"rowIdx":778,"cells":{"hexsha":{"kind":"string","value":"16c5ad86cd82ede7749039f69f9eab6aa6ad6753"},"size":{"kind":"number","value":1381,"string":"1,381"},"ext":{"kind":"string","value":"kt"},"lang":{"kind":"string","value":"Kotlin"},"max_stars_repo_path":{"kind":"string","value":"stopwatch-core/src/main/java/com/danielbostwick/stopwatch/core/service/DefaultStopwatchService.kt"},"max_stars_repo_name":{"kind":"string","value":"bostwick/android-stopwatch"},"max_stars_repo_head_hexsha":{"kind":"string","value":"94735805b22592f077a40b5578d661983a57b42a"},"max_stars_repo_licenses":{"kind":"list like","value":["BSD-3-Clause"],"string":"[\n \"BSD-3-Clause\"\n]"},"max_stars_count":{"kind":"number","value":29,"string":"29"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2015-02-25T23:04:31.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2021-02-17T11:28:06.000Z"},"max_issues_repo_path":{"kind":"string","value":"stopwatch-core/src/main/java/com/danielbostwick/stopwatch/core/service/DefaultStopwatchService.kt"},"max_issues_repo_name":{"kind":"string","value":"bostwick/android-stopwatch"},"max_issues_repo_head_hexsha":{"kind":"string","value":"94735805b22592f077a40b5578d661983a57b42a"},"max_issues_repo_licenses":{"kind":"list like","value":["BSD-3-Clause"],"string":"[\n \"BSD-3-Clause\"\n]"},"max_issues_count":{"kind":"number","value":4,"string":"4"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2016-04-28T18:25:07.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2018-02-15T08:44:15.000Z"},"max_forks_repo_path":{"kind":"string","value":"stopwatch-core/src/main/java/com/danielbostwick/stopwatch/core/service/DefaultStopwatchService.kt"},"max_forks_repo_name":{"kind":"string","value":"bostwick/android-stopwatch"},"max_forks_repo_head_hexsha":{"kind":"string","value":"94735805b22592f077a40b5578d661983a57b42a"},"max_forks_repo_licenses":{"kind":"list like","value":["BSD-3-Clause"],"string":"[\n \"BSD-3-Clause\"\n]"},"max_forks_count":{"kind":"number","value":21,"string":"21"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2015-02-25T23:04:28.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2021-12-15T05:51:26.000Z"},"content":{"kind":"string","value":"package com.danielbostwick.stopwatch.core.service\n\nimport com.danielbostwick.stopwatch.core.model.Stopwatch\nimport com.danielbostwick.stopwatch.core.model.StopwatchState.PAUSED\nimport com.danielbostwick.stopwatch.core.model.StopwatchState.STARTED\nimport org.joda.time.DateTime\nimport org.joda.time.Duration\nimport org.joda.time.Interval\n\n\nclass DefaultStopwatchService : StopwatchService {\n override fun create() = Stopwatch(PAUSED, DateTime.now(), Duration.ZERO)\n\n override fun start(stopwatch: Stopwatch, startedAt: DateTime) = when (stopwatch.state) {\n PAUSED -> Stopwatch(STARTED, DateTime.now(), stopwatch.offset)\n STARTED -> stopwatch\n }\n\n override fun pause(stopwatch: Stopwatch, pausedAt: DateTime) = when (stopwatch.state) {\n PAUSED -> stopwatch\n STARTED -> Stopwatch(PAUSED, DateTime.now(),\n newOffset(stopwatch.offset, stopwatch.startedAt, pausedAt))\n }\n\n override fun reset(stopwatch: Stopwatch) = create()\n\n override fun timeElapsed(stopwatch: Stopwatch, now: DateTime): Duration = when (stopwatch.state) {\n PAUSED -> stopwatch.offset\n STARTED -> stopwatch.offset.plus(Interval(stopwatch.startedAt, now).toDuration())\n }\n\n private fun newOffset(existingOffset: Duration, startedAt: DateTime, pausedAt: DateTime) =\n existingOffset.plus(Interval(startedAt, pausedAt).toDuration())\n}\n"},"avg_line_length":{"kind":"number","value":39.4571428571,"string":"39.457143"},"max_line_length":{"kind":"number","value":102,"string":"102"},"alphanum_fraction":{"kind":"number","value":0.7385952209,"string":"0.738595"},"score":{"kind":"number","value":3.21875,"string":"3.21875"}}},{"rowIdx":779,"cells":{"hexsha":{"kind":"string","value":"5b175b219a0692a2df8cd6b1cedfce902d4856fd"},"size":{"kind":"number","value":2932,"string":"2,932"},"ext":{"kind":"string","value":"c"},"lang":{"kind":"string","value":"C"},"max_stars_repo_path":{"kind":"string","value":"src/pack.c"},"max_stars_repo_name":{"kind":"string","value":"macton/shannon-fano"},"max_stars_repo_head_hexsha":{"kind":"string","value":"e82a26939f34180aff25f5676dd9cc6392b78367"},"max_stars_repo_licenses":{"kind":"list like","value":["BSD-3-Clause"],"string":"[\n \"BSD-3-Clause\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"src/pack.c"},"max_issues_repo_name":{"kind":"string","value":"macton/shannon-fano"},"max_issues_repo_head_hexsha":{"kind":"string","value":"e82a26939f34180aff25f5676dd9cc6392b78367"},"max_issues_repo_licenses":{"kind":"list like","value":["BSD-3-Clause"],"string":"[\n \"BSD-3-Clause\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"src/pack.c"},"max_forks_repo_name":{"kind":"string","value":"macton/shannon-fano"},"max_forks_repo_head_hexsha":{"kind":"string","value":"e82a26939f34180aff25f5676dd9cc6392b78367"},"max_forks_repo_licenses":{"kind":"list like","value":["BSD-3-Clause"],"string":"[\n \"BSD-3-Clause\"\n]"},"max_forks_count":{"kind":"number","value":1,"string":"1"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2019-06-13T12:15:36.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2019-06-13T12:15:36.000Z"},"content":{"kind":"string","value":"#include \"main.h\"\n\nstatic ptab ptable[MAPSIZE];\nstatic char codes[MAPSIZE][256];\n\nvoid\npack(const char *input, const char *output)\n{\n#ifdef STAT\n\tclock_t time1, time2;\n\ttime1 = clock();\n#endif\n\tint c, i, j;\n\n\tFILE *infile = fopen(input, \"r\");\n\tassert(infile);\n\n\tint size = ptablebuild(infile, ptable);\n\n\tencode(0, size - 1);\n\n\tprintf(\"code table size: %d\\n\", size);\n\n#ifdef STAT\n\tFILE *codetable = fopen(\"codetable\", \"wb\");\n\tassert(codetable);\n\tfor (i = 0; i < size; ++i)\n\t\t{\n\t\t\tfprintf(codetable, \"%c %s %f \\n\", ptable[i].ch, codes[ptable[i].ch], ptable[i].p);\n\t\t\tprintf(\"%c->%s\\n\", ptable[i].ch, codes[ptable[i].ch]);\n\t\t}\n\tfclose(codetable);\n#endif\n\t\n\tfor (i = 0; i < size; ++i)\n\t\tprintf(\"%c->%s\\n\", ptable[i].ch, codes[ptable[i].ch]);\n\t\n\tFILE *outfile = fopen(output, \"wb\");\n\tassert(outfile);\n\t\n\tputc(size - 1, outfile);\n\n\tbuffer buff;\n\tbuff.size = buff.v = 0;\n\n\tchar codesize[8], codebit[8], *ch;\n\tfor (i = 0; i < size; ++i)\n\t\t{\n\t\t\tc = ptable[i].ch;\n\t\t\tchartobit(c, codebit);\n\t\t\tfor (j = 0; j < 8; ++j)\n\t\t\t\twritebit(outfile, &buff, codebit[j]); // 8 bits of the code\n\n\t\t\tchartobit(strlen(codes[c]) - 1, codesize);\n\t\t\tfor (j = 0; j < 8; ++j)\n\t\t\t\twritebit(outfile, &buff, codesize[j]); // size of code\n\t\t\t\n\t\t\tj = -1;\n\t\t\tch = codes[c];\n\t\t\twhile (ch[++j] != '\\0')\n\t\t\t\twritebit(outfile, &buff, ch[j]); // code\n\t\t}\n\n\tfseek(infile, 0, SEEK_SET);\n\t\n\twhile ((c = getc(infile)) != EOF)\n\t\t{\n\t\t\tch = codes[c];\n\t\t\tj = -1;\n\t\t\twhile (ch[++j] != '\\0')\n\t\t\t\twritebit(outfile, &buff, ch[j]);\n\t\t}\n\tif (buff.size != 8)\n\t\tputc(buff.v, outfile);\n\t\n\tputc(buff.size, outfile);\n\n\tfclose(outfile);\n\tfclose(infile);\n\n#ifdef STAT\n\ttime2 = clock();\n\tprintf(\"time:%f\\n\", (double)(time2 - time1) / (double)CLOCKS_PER_SEC);\n#endif\n}\n\nint\nptablebuild(FILE *infile, ptab ptable[])\n{\n\tint freq_table[MAPSIZE], i, c;\n\n\tunsigned long total = 0;\n\tfor (i = 0; i < MAPSIZE; ++i)\n\t\t\tfreq_table[i] = 0;\n\n\twhile ((c = getc(infile)) != EOF)\n\t\t{\n\t\t\tfreq_table[c]++;\n\t\t\ttotal++;\n\t\t}\n\n\tdouble ftot = (double)total;\n\n\tint size = 0;\n\tfor (i = 0; i < MAPSIZE; ++i)\n\t\t{\n\t\t\tif (!freq_table[i])\n\t\t\t\tcontinue;\n\t\t\tptable[size].ch = i;\n\t\t\tptable[size].p = (double)freq_table[i] / ftot;\n\t\t\tsize++;\n\t\t}\n\t\n\tquicksort(ptable, 0, size);\n\treturn size;\n}\n\nvoid\nencode(int li, int ri)\n{\n\tif (li == ri)\n\t\treturn;\n\n\tint i, isp;\n\tfloat p, phalf;\n\n\tif (ri - li == 1)\n\t\t{\n\t\t\tcharcat(codes[ptable[li].ch], '0');\n\t\t\tcharcat(codes[ptable[ri].ch], '1');\n\t\t}\n\telse\n\t\t{\n\t\t\tphalf = 0;\n\t\t\tfor(i = li; i <= ri; ++i)\n\t\t\t\tphalf += ptable[i].p;\n\n\t\t\tp = 0;\n\t\t\tisp = -1;\n\t\t\tphalf *= 0.5f;\n\t\t\tfor(i = li; i <= ri; ++i)\n\t\t\t\t{\n\t\t\t\t\tif(p <= phalf)\n\t\t\t\t\t\tcharcat(codes[ptable[i].ch], '0');\n\t\t\t\t\telse\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tcharcat(codes[ptable[i].ch], '1');\n\t\t\t\t\t\t\tif(isp < 0)\n\t\t\t\t\t\t\t\tisp = i;\n\t\t\t\t\t\t}\n\t\t\t\t\tp += ptable[i].p;\n\t\t\t\t}\n\n\t\t\tif (isp < 0)\n\t\t\t\tisp = li + 1;\n\n\t\t\tencode(li, isp - 1);\n\t\t\tencode(isp, ri);\n\n\t\t}\n\n}\n\nvoid\ncharcat(char s[], char t)\n{\n\tint i = 0;\n\twhile (s[i] != '\\0')\n\t\ti++;\n\ts[i++] = t;\n\ts[i++] = '\\0';\n}\n"},"avg_line_length":{"kind":"number","value":16.9479768786,"string":"16.947977"},"max_line_length":{"kind":"number","value":85,"string":"85"},"alphanum_fraction":{"kind":"number","value":0.536834925,"string":"0.536835"},"score":{"kind":"number","value":3.21875,"string":"3.21875"}}},{"rowIdx":780,"cells":{"hexsha":{"kind":"string","value":"3ef74a45487ef684eda81fe738c3e8bee9e2a584"},"size":{"kind":"number","value":2861,"string":"2,861"},"ext":{"kind":"string","value":"h"},"lang":{"kind":"string","value":"C"},"max_stars_repo_path":{"kind":"string","value":"src/ScoreTracker.h"},"max_stars_repo_name":{"kind":"string","value":"syi47/spacebilliards"},"max_stars_repo_head_hexsha":{"kind":"string","value":"39a0a55761917144920a0a5ac4ff145a83d69a55"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"src/ScoreTracker.h"},"max_issues_repo_name":{"kind":"string","value":"syi47/spacebilliards"},"max_issues_repo_head_hexsha":{"kind":"string","value":"39a0a55761917144920a0a5ac4ff145a83d69a55"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"src/ScoreTracker.h"},"max_forks_repo_name":{"kind":"string","value":"syi47/spacebilliards"},"max_forks_repo_head_hexsha":{"kind":"string","value":"39a0a55761917144920a0a5ac4ff145a83d69a55"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"/* Copyright 2009 Tatham Johnson\r\n\r\n Licensed under the Apache License, Version 2.0 (the \"License\");\r\n you may not use this file except in compliance with the License.\r\n You may obtain a copy of the License at\r\n\r\n http://www.apache.org/licenses/LICENSE-2.0\r\n\r\n Unless required by applicable law or agreed to in writing, software\r\n distributed under the License is distributed on an \"AS IS\" BASIS,\r\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n See the License for the specific language governing permissions and\r\n limitations under the License.\r\n*/\r\n\r\n#pragma once\r\n#include \r\n#include \r\n\r\nclass Score\r\n{\r\npublic:\r\n\t///Constructor\r\n\t/** @param time Time, in Milliseconds, of the score\r\n\t@param name The name of the Player who owns the score\r\n\t**/\r\n\tScore(int time, const std::string& name)\r\n\t\t: m_Time(time), m_Name(name) {}\r\n\tint Time() const { return m_Time; }\t///< The time of the score\r\n\tconst std::string& Name() const { return m_Name; }\t///< The name of the player who owns the score\r\n\tbool operator<(const Score& rvalue) const { return m_Time < rvalue.Time(); }\t///< Used to sort the scores\r\nprivate:\r\n\tint m_Time;\r\n\tstd::string m_Name;\r\n};\r\n\r\n\r\nclass ScoreTracker\r\n{\r\npublic:\r\n\t///Constructor\r\n\t/** @param fileName The location of the file to save and load scores to/from\r\n\t**/\r\n\tScoreTracker(const std::string& fileName);\r\n\t///Destructor\r\n\t~ScoreTracker(void);\r\n\r\n\t///Saves the scores to the file\r\n\tvoid save();\r\n\t///Loads the scores from the file\r\n\tvoid load();\r\n\r\n\t///Adds a score to the current scores.\r\n\t/** Note: does not automatically save the scores\r\n\t**/\r\n\tvoid addScore(int time, const std::string& name);\r\n\r\n\t///Rates a score against the current high scores\r\n\t/**\r\n\t@param time The time to test against the current high scores\r\n\t@return The high score that the time would displace, or -1 if not a high score\r\n\t**/\r\n\tint rateScore(int time);\r\n\r\n\t///Gets the number of scores currently stored\r\n\t/** @return The number of scores\r\n\t**/\r\n\tint count() const {return m_Scores.size(); }\r\n\t///Clears all the high scores\r\n\t/** This will clear all the scores stored inside the file as well\r\n\t**/\r\n\tvoid clearScores();\r\n\r\n\t///Gets the score at the given index\r\n\tconst Score& at(int index) const;\r\n\t///Gets the score at the given index, using square bracket operators\r\n\tconst Score& operator[](int index) const { return at(index); }\r\n\r\n\t///Returns the index of the last score added\r\n\tint lastScoreIndex() { return m_LastScoreIndex; }\r\n\r\nprivate:\r\nprivate:\r\n\t///Sorts the scores lowest (best) to highest (worst)\r\n\tvoid sortScores();\r\n\r\n\t///Removes scores from memory, but does not delete scores from disk\r\n\tvoid removeCachedScores();\r\n\r\nprivate:\r\n\tstd::string m_FileName;\r\n\tstd::vector m_Scores;\r\n\tint m_LastScoreIndex;\r\n\ttypedef std::vector::iterator ScoreIterator;\r\n};\r\n"},"avg_line_length":{"kind":"number","value":29.8020833333,"string":"29.802083"},"max_line_length":{"kind":"number","value":107,"string":"107"},"alphanum_fraction":{"kind":"number","value":0.6927647676,"string":"0.692765"},"score":{"kind":"number","value":3.265625,"string":"3.265625"}}},{"rowIdx":781,"cells":{"hexsha":{"kind":"string","value":"72ba66a1ffade0abd7d9777fb505ec5a703b5441"},"size":{"kind":"number","value":4797,"string":"4,797"},"ext":{"kind":"string","value":"lua"},"lang":{"kind":"string","value":"Lua"},"max_stars_repo_path":{"kind":"string","value":"layout/stack.lua"},"max_stars_repo_name":{"kind":"string","value":"ErikRoelofs/renderer"},"max_stars_repo_head_hexsha":{"kind":"string","value":"f0702d05752859a6d097f39e08ed97256e677f09"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":1,"string":"1"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2016-09-26T18:49:36.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2016-09-26T18:49:36.000Z"},"max_issues_repo_path":{"kind":"string","value":"layout/stack.lua"},"max_issues_repo_name":{"kind":"string","value":"ErikRoelofs/looky"},"max_issues_repo_head_hexsha":{"kind":"string","value":"f0702d05752859a6d097f39e08ed97256e677f09"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"layout/stack.lua"},"max_forks_repo_name":{"kind":"string","value":"ErikRoelofs/looky"},"max_forks_repo_head_hexsha":{"kind":"string","value":"f0702d05752859a6d097f39e08ed97256e677f09"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"local renderChildren = function(self) \n self:renderBackground()\n\n local locX, locY = self:startCoordsBasedOnGravity()\n\n for k, v in ipairs(self.children) do\n love.graphics.push() \n love.graphics.translate( self.scaffold[v][1], self.scaffold[v][2])\n v:render()\n love.graphics.pop()\n end\nend\n\nlocal function scaffoldViews(self)\n local hTilt, vTilt \n local tilt = function (number, direction)\n if self.tiltDirection[direction] == \"start\" then\n return (self.tiltAmount[direction] * (#self.children-1)) - (self.tiltAmount[direction] * number)\n elseif self.tiltDirection[direction] == \"none\" then\n return 0\n elseif self.tiltDirection[direction] == \"end\" then\n return self.tiltAmount[direction] * number\n end\n end\n\n local locX, locY = self:startCoordsBasedOnGravity()\n\n for k, v in ipairs(self.children) do \n self.scaffold[v] = { locX + tilt(k-1, 1), locY + tilt(k-1, 2) }\n end\n \nend\n\nlocal function layout(self, children)\n local maxWidth = self:availableWidth()\n local maxHeight = self:availableHeight()\n for k, v in ipairs(children) do\n local childWidth, childHeight\n if v:desiredWidth() == \"fill\" then\n childWidth = maxWidth\n else\n childWidth = math.min(maxWidth, v:desiredWidth())\n end\n \n if v:desiredHeight() == \"fill\" then\n childHeight = maxHeight\n else\n childHeight = math.min(maxHeight, v:desiredHeight())\n end\n \n v:setDimensions(childWidth, childHeight)\n \n end\n \n for k, v in ipairs(children) do \n v:layoutingPass()\n end\n self:scaffoldViews()\nend\n\nlocal function containerWidth(self) \n local width = 0\n for k, v in ipairs(self.children) do\n if v:desiredWidth() == \"fill\" then\n return \"fill\"\n else\n if v:desiredWidth() > width then\n width = v:desiredWidth()\n end\n end\n end\n return width + (self.tiltAmount[1] * #self.children)\nend\n\nlocal function containerHeight(self)\n local height = 0\n for k, v in ipairs(self.children) do\n if v:desiredHeight() == \"fill\" then\n return \"fill\"\n else\n if v:desiredHeight() > height then\n height = v:desiredHeight()\n end\n end\n end\n height = height + (self.tiltAmount[2] * #self.children)\n return height\nend\n\nlocal function clickShouldTargetChild(self, x, y, child)\n local relativeX = x - self.scaffold[v][1]\n local relativeY = y - self.scaffold[v][2]\n return relativeX > 0 and relativeY > 0 and \n relativeX < child:getGrantedWidth() and relativeY < child:getGrantedHeight()\nend\n\nlocal function signalTargetedChildren(self, signal, payload) \n for i, v in ipairs(self:getChildren()) do\n if clickShouldTargetChild(self, payload.x, payload.y, child) then\n local thisPayload = { x = payload.x - self.scaffold[child][1] , y = payload.y - self.scaffold[child][2] }\n v:receiveSignal(signal, thisPayload)\n end\n end\nend\n\nreturn function(looky)\n return {\n build = function (options)\n local base = looky:makeBaseLayout(options)\n base.renderCustom = renderChildren \n base.layoutingPass = function(self) layout(self, self.children) end \n base.contentWidth = containerWidth\n base.contentHeight = containerHeight\n base.tiltDirection = options.tiltDirection or {\"none\", \"none\"}\n base.tiltAmount = options.tiltAmount or {0,0}\n base.scaffoldViews = scaffoldViews\n base.scaffold = {} \n base.getLocationOffset = getLocationOffset\n \n if not options.signalHandlers then\n options.signalHandlers = {}\n if not options.signalHandlers.leftclick then\n options.signalHandlers.leftclick = signalTargetedChildren\n end\n end\n base.signalHandlers = options.signalHandlers\n \n base.update = function(self, dt)\n for k, v in ipairs(self.children) do\n v:update(dt)\n end\n end\n \n base.translateCoordsToChild = function(self, child, x, y) \n return x - self.scaffold[child][1], y - self.scaffold[child][2]\n end\n base.translateCoordsFromChild = function(self, child, x, y)\n return x + self.scaffold[child][1], y + self.scaffold[child][2]\n end\n\n return base\n end,\n schema = looky:extendSchema(\"base\", {\n tiltAmount = { \n required = false, \n schemaType = \"table\", \n options = { \n { required = true, schemaType = \"number\" }, \n { required = true, schemaType = \"number\" }, \n }\n },\n tiltDirection = { \n required = false, \n schemaType = \"table\", \n options = { \n { required = true, schemaType = \"fromList\", list = { \"start\", \"none\", \"end\" } }, \n { required = true, schemaType = \"fromList\", list = { \"start\", \"none\", \"end\" } } \n }\n }\n })\n }\nend"},"avg_line_length":{"kind":"number","value":29.6111111111,"string":"29.611111"},"max_line_length":{"kind":"number","value":111,"string":"111"},"alphanum_fraction":{"kind":"number","value":0.6312278507,"string":"0.631228"},"score":{"kind":"number","value":3.34375,"string":"3.34375"}}},{"rowIdx":782,"cells":{"hexsha":{"kind":"string","value":"77e57aae25476783412603e36caaa852e987551e"},"size":{"kind":"number","value":1174,"string":"1,174"},"ext":{"kind":"string","value":"rs"},"lang":{"kind":"string","value":"Rust"},"max_stars_repo_path":{"kind":"string","value":"contract/src/geohash.rs"},"max_stars_repo_name":{"kind":"string","value":"enigmampc/safetraceV2"},"max_stars_repo_head_hexsha":{"kind":"string","value":"6d0598e75c109cab0300e67366074656b9d9b64a"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":4,"string":"4"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2020-11-22T08:37:05.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2021-01-21T09:20:04.000Z"},"max_issues_repo_path":{"kind":"string","value":"contract/src/geohash.rs"},"max_issues_repo_name":{"kind":"string","value":"enigmampc/safetraceV2"},"max_issues_repo_head_hexsha":{"kind":"string","value":"6d0598e75c109cab0300e67366074656b9d9b64a"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"contract/src/geohash.rs"},"max_forks_repo_name":{"kind":"string","value":"enigmampc/safetraceV2"},"max_forks_repo_head_hexsha":{"kind":"string","value":"6d0598e75c109cab0300e67366074656b9d9b64a"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"use cosmwasm_std::{StdError, StdResult};\nuse geohash::{encode, Coordinate};\n\nuse schemars::JsonSchema;\nuse serde::{Deserialize, Serialize};\n\nconst PRECISION: usize = 9usize;\n\n/// return the geohash to a precision degree specified by `PRECISION`.\n/// 7 ~ 76m\n/// 8 ~ 20m\n/// 9 ~ 7m\n/// 10 ~ 1m\npub fn ghash(x: f64, y: f64) -> StdResult {\n encode(\n Coordinate {\n x, // lng\n y, // lat\n },\n PRECISION,\n )\n .map_err(|_| StdError::generic_err(format!(\"Cannot encode data to geohash ({}, {})\", x, y)))\n}\n\npub fn neighbors(geohash: &String) -> StdResult> {\n let mut all: Vec = vec![];\n\n let positions = geohash::neighbors(geohash)\n .map_err(|_| StdError::generic_err(\"Failed to decode geohash\"))?;\n\n all.push(positions.n);\n all.push(positions.ne);\n all.push(positions.e);\n all.push(positions.se);\n all.push(positions.s);\n all.push(positions.sw);\n all.push(positions.w);\n all.push(positions.nw);\n\n Ok(all)\n}\n\n#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]\npub struct GeoLocationTime {\n pub geohash: String,\n pub timestamp_ms: u64,\n}\n"},"avg_line_length":{"kind":"number","value":24.4583333333,"string":"24.458333"},"max_line_length":{"kind":"number","value":96,"string":"96"},"alphanum_fraction":{"kind":"number","value":0.6252129472,"string":"0.625213"},"score":{"kind":"number","value":3.203125,"string":"3.203125"}}},{"rowIdx":783,"cells":{"hexsha":{"kind":"string","value":"dfdf0a0793736d8413235dfb32e0be3aa6c6d834"},"size":{"kind":"number","value":2778,"string":"2,778"},"ext":{"kind":"string","value":"ts"},"lang":{"kind":"string","value":"TypeScript"},"max_stars_repo_path":{"kind":"string","value":"projects/ng-translation/src/lib/components/ng-trans-subcontent/ng-trans-subcontent.component.spec.ts"},"max_stars_repo_name":{"kind":"string","value":"wjx774326739/ng-translation"},"max_stars_repo_head_hexsha":{"kind":"string","value":"adc0c53a14bb40cf256f75b082a0a3eb69cea37b"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":3,"string":"3"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2022-02-28T14:30:53.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-02-28T14:53:39.000Z"},"max_issues_repo_path":{"kind":"string","value":"projects/ng-translation/src/lib/components/ng-trans-subcontent/ng-trans-subcontent.component.spec.ts"},"max_issues_repo_name":{"kind":"string","value":"wjx774326739/ng-translation"},"max_issues_repo_head_hexsha":{"kind":"string","value":"adc0c53a14bb40cf256f75b082a0a3eb69cea37b"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":3,"string":"3"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2022-03-02T13:04:02.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2022-03-11T14:45:22.000Z"},"max_forks_repo_path":{"kind":"string","value":"projects/ng-translation/src/lib/components/ng-trans-subcontent/ng-trans-subcontent.component.spec.ts"},"max_forks_repo_name":{"kind":"string","value":"bigBear713/ng-translation"},"max_forks_repo_head_hexsha":{"kind":"string","value":"adc0c53a14bb40cf256f75b082a0a3eb69cea37b"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"import { ChangeDetectorRef, Component, TemplateRef, ViewChild } from '@angular/core';\nimport { ComponentFixture, TestBed } from '@angular/core/testing';\nimport { NgTransTestingModule } from '../../testing';\nimport { NgTransSubcontentComponent } from './ng-trans-subcontent.component';\n\n@Component({\n selector: 'mock-tpl-ref',\n template: `\n {{content}}\n\n \n

{{item}}

\n
\n `,\n})\nexport class MockTplRefComponent {\n @ViewChild('tplRef') tplRef!: TemplateRef;\n @ViewChild('tplRefWithList') tplRefWithList!: TemplateRef;\n\n content = 'mock templateRef content';\n}\n\ndescribe('Component: NgTransSubcontent', () => {\n let component: NgTransSubcontentComponent;\n let fixture: ComponentFixture;\n let hostEle: HTMLElement;\n\n beforeEach(async () => {\n await TestBed.configureTestingModule({\n imports: [NgTransTestingModule],\n declarations: [MockTplRefComponent]\n })\n .compileComponents();\n });\n\n beforeEach(() => {\n fixture = TestBed.createComponent(NgTransSubcontentComponent);\n component = fixture.componentInstance;\n fixture.detectChanges();\n hostEle = fixture.debugElement.nativeElement;\n });\n\n it('should be created', () => {\n expect(component).toBeTruthy();\n });\n\n it('the content is a string value', () => {\n const content = 'test content';\n component.content = content;\n\n detectChanges();\n\n expect(hostEle.textContent?.trim()).toEqual(content);\n });\n\n it('the content is a templateRef type value', () => {\n const mockTplRefFixture = TestBed.createComponent(MockTplRefComponent);\n const mockTplRefComp = mockTplRefFixture.componentInstance;\n mockTplRefFixture.detectChanges();\n\n const content = mockTplRefComp.tplRef;\n component.content = content;\n\n detectChanges();\n\n expect(hostEle.textContent?.trim()).toEqual(mockTplRefComp.content);\n });\n\n it('the content is a templateRef type value with string list param', () => {\n const mockList = ['mock list 1', 'mock list 2'];\n\n const mockTplRefFixture = TestBed.createComponent(MockTplRefComponent);\n const mockTplRefComp = mockTplRefFixture.componentInstance;\n mockTplRefFixture.detectChanges();\n\n const content = mockTplRefComp.tplRefWithList;\n component.content = content;\n component.list = mockList;\n\n detectChanges();\n\n const listFromDom = Array.from(hostEle.querySelectorAll('p')).map(item => item.textContent?.trim());\n expect(listFromDom).toEqual(mockList);\n });\n\n function detectChanges() {\n const changeDR = fixture.componentRef.injector.get(ChangeDetectorRef);\n changeDR.markForCheck();\n fixture.detectChanges();\n }\n});\n"},"avg_line_length":{"kind":"number","value":30.1956521739,"string":"30.195652"},"max_line_length":{"kind":"number","value":104,"string":"104"},"alphanum_fraction":{"kind":"number","value":0.7037437005,"string":"0.703744"},"score":{"kind":"number","value":3.171875,"string":"3.171875"}}},{"rowIdx":784,"cells":{"hexsha":{"kind":"string","value":"4f9a40ddaeef2dfe752121a12c272ab3436c571f"},"size":{"kind":"number","value":1978,"string":"1,978"},"ext":{"kind":"string","value":"lua"},"lang":{"kind":"string","value":"Lua"},"max_stars_repo_path":{"kind":"string","value":"hammerspoon/init.lua"},"max_stars_repo_name":{"kind":"string","value":"inoc603/dotfiles"},"max_stars_repo_head_hexsha":{"kind":"string","value":"a7c7592a59d51bb45a8d7977e51293ac2e845e1b"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":1,"string":"1"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2017-04-20T13:24:50.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2017-04-20T13:24:50.000Z"},"max_issues_repo_path":{"kind":"string","value":"hammerspoon/init.lua"},"max_issues_repo_name":{"kind":"string","value":"inoc603/dotfiles"},"max_issues_repo_head_hexsha":{"kind":"string","value":"a7c7592a59d51bb45a8d7977e51293ac2e845e1b"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"hammerspoon/init.lua"},"max_forks_repo_name":{"kind":"string","value":"inoc603/dotfiles"},"max_forks_repo_head_hexsha":{"kind":"string","value":"a7c7592a59d51bb45a8d7977e51293ac2e845e1b"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"hs.hotkey.bind({\"cmd\", \"ctrl\"}, \"r\", function()\n hs.reload()\nend)\nhs.alert.show(\"Config loaded\")\n\nfunction posX(screen)\n x, y = screen:position()\n return x\nend\n\nfunction screenAtCenter()\n local screens = hs.screen.allScreens()\n table.sort(screens, function(a, b) return posX(a) < posX(b) end)\n return screens[math.ceil(#screens/2)]\nend\n\nlocal wf=hs.window.filter\n\nfunction startsWith(str, start)\n return str:sub(1, #start) == start\nend\n\nlocal alacrittyPrefix = \"alacritty-\"\n\nlocal appCache = {}\n\nfunction moveToCenter(w)\n if startsWith(w:title(), alacrittyPrefix) then\n\tw:moveToScreen(screenAtCenter(), 0)\n end\nend\n\n-- when alacritty windows is created or focused by hot key, make sure it's in the center screen.\nlocal alacritty = wf.new(false):setAppFilter('Alacritty', {allowTitles=1})\nalacritty:subscribe(wf.windowCreated, moveToCenter)\nalacritty:subscribe(wf.windowFocused, moveToCenter)\n\nfunction launchAlacritty(title, commands)\n title = alacrittyPrefix .. title\n\n app = appCache[title]\n\n if app == nil then\n\tapp = hs.window.get(title)\n\tappCache[title] = app\n end\n\n if app == nil then\n params = {\"-t\", title, \"--config-file\", os.getenv(\"HOME\") .. \"/.alacritty.yml\"}\n if commands then\n table.insert(params, \"-e\")\n for i, v in ipairs(commands) do\n table.insert(params, v)\n end\n end\n\n hs.task.new(\n\t\t\"/Applications/Alacritty.app/Contents/MacOS/alacritty\",\n\t\tfunction()\n\t\t\tprint(\"STOPPED\", title)\n\t\t\tappCache[title] = nil\n\t\tend,\n\t\tparams\n\t):start()\n else\n app:focus()\n end\nend\n\n-- ssh to devbox and attach to the last used tmux session.\nhs.hotkey.bind({\"cmd\", \"ctrl\"}, \"k\", function()\n launchAlacritty(\"remote\", {\"ssh\", \"t\"})\nend)\n\n-- attach to the last used tmux session or create one from home directory if there is none.\nhs.hotkey.bind({\"cmd\", \"ctrl\"}, \"l\", function()\n launchAlacritty(\"local\", {\"zsh\", \"--login\", \"-i\", \"-c\", \"ta\"})\nend)\n"},"avg_line_length":{"kind":"number","value":25.0379746835,"string":"25.037975"},"max_line_length":{"kind":"number","value":96,"string":"96"},"alphanum_fraction":{"kind":"number","value":0.6516683519,"string":"0.651668"},"score":{"kind":"number","value":3.484375,"string":"3.484375"}}},{"rowIdx":785,"cells":{"hexsha":{"kind":"string","value":"161a26670648f222d07a3cc7d9d0d73373cc4f96"},"size":{"kind":"number","value":9445,"string":"9,445"},"ext":{"kind":"string","value":"ts"},"lang":{"kind":"string","value":"TypeScript"},"max_stars_repo_path":{"kind":"string","value":"src/blockchain/transactions.ts"},"max_stars_repo_name":{"kind":"string","value":"OasisDEX/xDex"},"max_stars_repo_head_hexsha":{"kind":"string","value":"e75d9fe69a9f9ceda4097546c9f6a8308b599a8d"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"src/blockchain/transactions.ts"},"max_issues_repo_name":{"kind":"string","value":"OasisDEX/xDex"},"max_issues_repo_head_hexsha":{"kind":"string","value":"e75d9fe69a9f9ceda4097546c9f6a8308b599a8d"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"number","value":1,"string":"1"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2022-02-21T14:14:10.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2022-02-22T06:16:16.000Z"},"max_forks_repo_path":{"kind":"string","value":"src/blockchain/transactions.ts"},"max_forks_repo_name":{"kind":"string","value":"OasisDEX/xDex"},"max_forks_repo_head_hexsha":{"kind":"string","value":"e75d9fe69a9f9ceda4097546c9f6a8308b599a8d"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"number","value":1,"string":"1"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2021-08-01T16:29:45.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2021-08-01T16:29:45.000Z"},"content":{"kind":"string","value":"/*\n * Copyright (C) 2020 Maker Ecosystem Growth Holdings, INC.\n */\n\nimport * as _ from 'lodash';\nimport { fromPairs } from 'ramda';\nimport { bindNodeCallback, combineLatest, fromEvent, merge, Observable, of, Subject, timer } from 'rxjs';\nimport { takeWhileInclusive } from 'rxjs-take-while-inclusive';\nimport { ajax } from 'rxjs/ajax';\nimport { catchError, filter, first, map, mergeMap, scan, shareReplay, startWith, switchMap } from 'rxjs/operators';\n\nimport { UnreachableCaseError } from '../utils/UnreachableCaseError';\nimport { account$, context$, onEveryBlock$ } from './network';\nimport { web3 } from './web3';\n\nexport enum TxStatus {\n WaitingForApproval = 'WaitingForApproval',\n CancelledByTheUser = 'CancelledByTheUser',\n Propagating = 'Propagating',\n WaitingForConfirmation = 'WaitingForConfirmation',\n Success = 'Success',\n Error = 'Error',\n Failure = 'Failure',\n}\n\nexport function isDone(state: TxState) {\n return [TxStatus.CancelledByTheUser, TxStatus.Error, TxStatus.Failure, TxStatus.Success].indexOf(state.status) >= 0;\n}\n\nexport function isDoneButNotSuccessful(state: TxState) {\n return [TxStatus.CancelledByTheUser, TxStatus.Error, TxStatus.Failure].indexOf(state.status) >= 0;\n}\n\nexport function isSuccess(state: TxState) {\n return TxStatus.Success === state.status;\n}\n\nexport function getTxHash(state: TxState): string | undefined {\n if (\n state.status === TxStatus.Success ||\n state.status === TxStatus.Failure ||\n state.status === TxStatus.Error ||\n state.status === TxStatus.WaitingForConfirmation\n ) {\n return state.txHash;\n }\n return undefined;\n}\n\nexport enum TxRebroadcastStatus {\n speedup = 'speedup',\n cancel = 'cancel',\n}\n\nexport type TxState = {\n account: string;\n txNo: number;\n networkId: string;\n meta: any;\n start: Date;\n end?: Date;\n lastChange: Date;\n dismissed: boolean;\n} & (\n | {\n status: TxStatus.WaitingForApproval;\n }\n | {\n status: TxStatus.CancelledByTheUser;\n error: any;\n }\n | {\n status: TxStatus.WaitingForConfirmation | TxStatus.Propagating;\n txHash: string;\n broadcastedAt: Date;\n }\n | {\n status: TxStatus.Success;\n txHash: string;\n blockNumber: number;\n receipt: any;\n confirmations: number;\n safeConfirmations: number;\n rebroadcast?: TxRebroadcastStatus;\n }\n | {\n status: TxStatus.Failure;\n txHash: string;\n blockNumber: number;\n receipt: any;\n }\n | {\n status: TxStatus.Error;\n txHash: string;\n error: any;\n }\n);\n\nlet txCounter: number = 1;\n\ntype NodeCallback = (i: I, callback: (err: any, r: R) => any) => any;\n\ninterface TransactionReceiptLike {\n transactionHash: string;\n status: boolean;\n blockNumber: number;\n}\n\ntype GetTransactionReceipt = NodeCallback;\n\ninterface TransactionLike {\n hash: string;\n nonce: number;\n input: string;\n blockHash: string;\n}\n\ntype GetTransaction = NodeCallback;\n\nfunction txRebroadcastStatus({ hash, nonce, input }: TransactionLike) {\n return combineLatest(externalNonce2tx$, onEveryBlock$).pipe(\n map(([externalNonce2tx]) => {\n if (externalNonce2tx[nonce] && externalNonce2tx[nonce].hash !== hash) {\n return [\n externalNonce2tx[nonce].hash,\n input === externalNonce2tx[nonce].callData ? TxRebroadcastStatus.speedup : TxRebroadcastStatus.cancel,\n ];\n }\n return [hash, undefined];\n }),\n ) as Observable<[string, undefined | TxRebroadcastStatus]>;\n}\n\nexport function send(\n account: string,\n networkId: string,\n meta: any,\n method: (...args: any[]) => any, // Any contract method\n): Observable {\n const common = {\n account,\n networkId,\n meta,\n txNo: txCounter += 1,\n start: new Date(),\n lastChange: new Date(),\n };\n\n function successOrFailure(\n txHash: string,\n receipt: TransactionReceiptLike,\n rebroadcast: TxRebroadcastStatus | undefined,\n ): Observable {\n const end = new Date();\n\n if (!receipt.status) {\n // TODO: failure should be confirmed!\n return of({\n ...common,\n txHash,\n receipt,\n end,\n lastChange: end,\n blockNumber: receipt.blockNumber,\n status: TxStatus.Failure,\n } as TxState);\n }\n\n // TODO: error handling!\n return combineLatest(context$, onEveryBlock$).pipe(\n mergeMap(([context, blockNumber]) =>\n of({\n ...common,\n txHash,\n receipt,\n end,\n rebroadcast,\n lastChange: new Date(),\n blockNumber: receipt.blockNumber,\n status: TxStatus.Success,\n confirmations: Math.max(0, blockNumber - receipt.blockNumber),\n safeConfirmations: context.safeConfirmations,\n } as TxState),\n ),\n takeWhileInclusive((state) => state.status === TxStatus.Success && state.confirmations < state.safeConfirmations),\n );\n }\n\n const promiEvent = method();\n const result: Observable = merge(fromEvent(promiEvent, 'transactionHash'), promiEvent).pipe(\n map((txHash: string) => [txHash, new Date()]),\n first(),\n mergeMap(([txHash, broadcastedAt]: [string, Date]) =>\n timer(0, 1000).pipe(\n switchMap(() => bindNodeCallback(web3.eth.getTransaction as GetTransaction)(txHash)),\n filter((transaction) => !!transaction),\n first(),\n mergeMap(\n (transaction: TransactionLike) =>\n (txRebroadcastStatus(transaction).pipe(\n switchMap(([hash, rebroadcast]) =>\n bindNodeCallback(web3.eth.getTransactionReceipt as GetTransactionReceipt)(hash).pipe(\n filter((receipt) => receipt && !!receipt.blockNumber),\n mergeMap((receipt) => successOrFailure(hash, receipt, rebroadcast)),\n ),\n ),\n first(),\n startWith({\n ...common,\n broadcastedAt,\n txHash,\n status: TxStatus.WaitingForConfirmation,\n } as TxState),\n catchError((error) => {\n return of({\n ...common,\n error,\n txHash: transaction.hash,\n end: new Date(),\n lastChange: new Date(),\n status: TxStatus.Error,\n } as TxState);\n }),\n ) as any) as Observable,\n ),\n startWith({\n ...common,\n broadcastedAt,\n txHash,\n status: TxStatus.Propagating,\n } as TxState),\n ),\n ),\n startWith({\n ...common,\n status: TxStatus.WaitingForApproval,\n }),\n shareReplay(1),\n catchError((error) => {\n if ((error.message as string).indexOf('User denied transaction signature') === -1) {\n console.error(error);\n }\n return of({\n ...common,\n error,\n end: new Date(),\n lastChange: new Date(),\n status: TxStatus.CancelledByTheUser,\n });\n }),\n );\n result.subscribe((state) => transactionObserver.next({ state, kind: 'newTx' }));\n\n return result;\n}\n\ninterface NewTransactionChange {\n kind: 'newTx';\n state: TxState;\n}\n\ninterface DismissedChange {\n kind: 'dismissed';\n txNo: number;\n}\n\nexport const transactionObserver: Subject = new Subject();\n\ntype TransactionsChange = NewTransactionChange | DismissedChange;\n\nexport const transactions$: Observable = combineLatest(\n transactionObserver.pipe(\n scan((transactions: TxState[], change: TransactionsChange) => {\n switch (change.kind) {\n case 'newTx': {\n const newState = change.state;\n const result = [...transactions];\n const i = result.findIndex((t) => t.txNo === newState.txNo);\n if (i >= 0) {\n result[i] = newState;\n } else {\n result.push(newState);\n }\n return result;\n }\n case 'dismissed': {\n const result = [...transactions];\n const i = result.findIndex((t) => t.txNo === change.txNo);\n\n result[i].dismissed = true;\n\n return result;\n }\n default:\n throw new UnreachableCaseError(change);\n }\n }, []),\n ),\n account$,\n context$,\n).pipe(\n map(([transactions, account, context]) =>\n transactions.filter((t: TxState) => t.account === account && t.networkId === context.id),\n ),\n startWith([]),\n shareReplay(1),\n);\n\ninterface ExternalNonce2tx {\n [nonce: number]: { hash: string; callData: string };\n}\nconst externalNonce2tx$: Observable = combineLatest(\n context$,\n account$,\n onEveryBlock$.pipe(first()),\n onEveryBlock$,\n).pipe(\n switchMap(([context, account, firstBlock]) =>\n ajax({\n url:\n `${context.etherscan.apiUrl}?module=account` +\n `&action=txlist` +\n `&address=${account}` +\n `&startblock=${firstBlock}` +\n `&sort=desc` +\n `&apikey=${context.etherscan.apiKey}`,\n }),\n ),\n map(({ response }) => response.result),\n map((transactions: Array<{ hash: string; nonce: string; input: string }>) =>\n fromPairs(\n _.map(\n transactions,\n (tx) => [tx.nonce, { hash: tx.hash, callData: tx.input }] as [string, { hash: string; callData: string }],\n ),\n ),\n ),\n catchError((error) => {\n console.error(error);\n return of({});\n }),\n shareReplay(1),\n);\n"},"avg_line_length":{"kind":"number","value":27.4563953488,"string":"27.456395"},"max_line_length":{"kind":"number","value":120,"string":"120"},"alphanum_fraction":{"kind":"number","value":0.6028586554,"string":"0.602859"},"score":{"kind":"number","value":3.125,"string":"3.125"}}},{"rowIdx":786,"cells":{"hexsha":{"kind":"string","value":"24c6dec269d7c632d90e9801d7fd371e425dba33"},"size":{"kind":"number","value":14669,"string":"14,669"},"ext":{"kind":"string","value":"go"},"lang":{"kind":"string","value":"Go"},"max_stars_repo_path":{"kind":"string","value":"make-plural.go"},"max_stars_repo_name":{"kind":"string","value":"gotnospirit/makeplural"},"max_stars_repo_head_hexsha":{"kind":"string","value":"a5f48d94d976801ab2251014c9a626d1c86d7e22"},"max_stars_repo_licenses":{"kind":"list like","value":["BSD-2-Clause"],"string":"[\n \"BSD-2-Clause\"\n]"},"max_stars_count":{"kind":"number","value":1,"string":"1"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2020-04-02T01:08:35.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2020-04-02T01:08:35.000Z"},"max_issues_repo_path":{"kind":"string","value":"make-plural.go"},"max_issues_repo_name":{"kind":"string","value":"gotnospirit/makeplural"},"max_issues_repo_head_hexsha":{"kind":"string","value":"a5f48d94d976801ab2251014c9a626d1c86d7e22"},"max_issues_repo_licenses":{"kind":"list like","value":["BSD-2-Clause"],"string":"[\n \"BSD-2-Clause\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"make-plural.go"},"max_forks_repo_name":{"kind":"string","value":"gotnospirit/makeplural"},"max_forks_repo_head_hexsha":{"kind":"string","value":"a5f48d94d976801ab2251014c9a626d1c86d7e22"},"max_forks_repo_licenses":{"kind":"list like","value":["BSD-2-Clause"],"string":"[\n \"BSD-2-Clause\"\n]"},"max_forks_count":{"kind":"number","value":2,"string":"2"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2019-04-12T07:23:21.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2020-04-01T08:17:57.000Z"},"content":{"kind":"string","value":"package main\n\nimport (\n\t\"encoding/json\"\n\t\"flag\"\n\t\"fmt\"\n\t\"io/ioutil\"\n\t\"net/http\"\n\t\"os\"\n\t\"sort\"\n\t\"strconv\"\n\t\"strings\"\n\t\"text/template\"\n\t\"time\"\n)\n\ntype (\n\tSource interface {\n\t\tCulture() string\n\t\tCultureId() string\n\t\tCode() string\n\t}\n\n\tTest interface {\n\t\ttoString() string\n\t}\n\n\tFuncSource struct {\n\t\tculture, vars, impl string\n\t}\n\n\tUnitTestSource struct {\n\t\tculture string\n\t\ttests []Test\n\t}\n\n\tUnitTest struct {\n\t\tordinal bool\n\t\texpected, value string\n\t}\n\n\tOp struct {\n\t\tprevious_logic, left, operator, right, next_logic string\n\t}\n)\n\nfunc (x FuncSource) Culture() string {\n\treturn x.culture\n}\n\nfunc (x FuncSource) CultureId() string {\n\treturn sanitize(x.culture)\n}\n\nfunc (x FuncSource) Code() string {\n\tresult := \"\"\n\tif \"\" != x.vars {\n\t\tresult += x.vars + \"\\n\"\n\t}\n\tresult += x.impl\n\treturn result\n}\n\nfunc (x UnitTestSource) Culture() string {\n\treturn x.culture\n}\n\nfunc (x UnitTestSource) CultureId() string {\n\treturn sanitize(x.culture)\n}\n\nfunc (x UnitTestSource) Code() string {\n\tvar result []string\n\tfor _, child := range x.tests {\n\t\tresult = append(result, \"\\t\\t\"+child.toString())\n\t}\n\treturn strings.Join(result, \"\\n\")\n}\n\nfunc (x UnitTest) toString() string {\n\treturn fmt.Sprintf(\n\t\t\"testNamedKey(t, fn, %s, `%s`, `%s`, %v)\",\n\t\tx.value,\n\t\tx.expected,\n\t\tfmt.Sprintf(\"fn(\"+x.value+\", %v)\", x.ordinal),\n\t\tx.ordinal,\n\t)\n}\n\nfunc sanitize(input string) string {\n\tvar result string\n\tfor _, char := range input {\n\t\tswitch {\n\t\tcase char >= 'a' && char <= 'z', char >= 'A' && char <= 'Z':\n\t\t\tresult += string(char)\n\t\t}\n\t}\n\treturn result\n}\n\nfunc (x Op) conditions() []string {\n\tvar result []string\n\n\tconditions := strings.Split(x.right, \",\")\n\tfor _, condition := range conditions {\n\t\tpos := strings.Index(condition, \"..\")\n\n\t\tif -1 != pos {\n\t\t\tlower_bound, upper_bound := condition[:pos], condition[pos+2:]\n\t\t\tlb, _ := strconv.Atoi(lower_bound)\n\t\t\tub, _ := strconv.Atoi(upper_bound)\n\n\t\t\tr := rangeCondition(x.left, lb, ub, x.operator)\n\t\t\tresult = append(result, r...)\n\t\t} else {\n\t\t\tresult = append(result, fmt.Sprintf(\"%s %s %s\", x.left, x.operator, condition))\n\t\t}\n\t}\n\treturn result\n}\n\nfunc get(url, key string, headers *string) (map[string]map[string]string, error) {\n\tfmt.Print(\"GET \", url)\n\n\tresponse, err := http.Get(url)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer response.Body.Close()\n\n\tif 200 != response.StatusCode {\n\t\treturn nil, fmt.Errorf(response.Status)\n\t}\n\n\tcontents, err := ioutil.ReadAll(response.Body)\n\n\tvar document map[string]map[string]json.RawMessage\n\terr = json.Unmarshal([]byte(contents), &document)\n\tif nil != err {\n\t\treturn nil, err\n\t}\n\n\tif _, ok := document[\"supplemental\"]; !ok {\n\t\treturn nil, fmt.Errorf(\"Data does not appear to be CLDR data\")\n\t}\n\t*headers += fmt.Sprintf(\"//\\n// URL: %s\\n\", url)\n\n\t{\n\t\tvar version map[string]string\n\t\terr = json.Unmarshal(document[\"supplemental\"][\"version\"], &version)\n\t\tif nil != err {\n\t\t\treturn nil, err\n\t\t}\n\t\t*headers += fmt.Sprintf(\"// %s\\n\", version[\"_number\"])\n\t}\n\n\t{\n\t\tvar generation map[string]string\n\t\terr = json.Unmarshal(document[\"supplemental\"][\"generation\"], &generation)\n\t\tif nil != err {\n\t\t\treturn nil, err\n\t\t}\n\t\t*headers += fmt.Sprintf(\"// %s\\n\", generation[\"_date\"])\n\t}\n\n\tvar data map[string]map[string]string\n\terr = json.Unmarshal(document[\"supplemental\"][\"plurals-type-\"+key], &data)\n\tif nil != err {\n\t\treturn nil, err\n\t}\n\treturn data, nil\n}\n\nfunc rangeCondition(varname string, lower, upper int, operator string) []string {\n\tvar result []string\n\tfor i := lower; i <= upper; i++ {\n\t\tresult = append(result, fmt.Sprintf(\"%s %s %d\", varname, operator, i))\n\t}\n\treturn result\n}\n\nfunc pattern2code(input string, ptr_vars *[]string) []string {\n\tleft, short, operator, logic := \"\", \"\", \"\", \"\"\n\n\tvar ops []Op\n\tbuf := \"\"\nloop:\n\tfor _, char := range input {\n\t\tswitch char {\n\t\tdefault:\n\t\t\tbuf += string(char)\n\n\t\tcase '@':\n\t\t\tbreak loop\n\n\t\tcase ' ':\n\n\t\tcase '=':\n\t\t\tif \"\" != buf {\n\t\t\t\tleft, operator, buf = buf, \"==\", \"\"\n\t\t\t\tshort = toVar(left, ptr_vars)\n\t\t\t}\n\n\t\tcase '!':\n\t\t\tleft, operator, buf = buf, \"!=\", \"\"\n\t\t\tshort = toVar(left, ptr_vars)\n\t\t}\n\n\t\tif \"\" != buf {\n\t\t\tpos := strings.Index(buf, \"and\")\n\n\t\t\tif -1 != pos {\n\t\t\t\tops = append(ops, Op{logic, short, operator, buf[:pos], \"AND\"})\n\t\t\t\tbuf, left, operator, logic = \"\", \"\", \"\", \"AND\"\n\t\t\t} else {\n\t\t\t\tpos = strings.Index(buf, \"or\")\n\n\t\t\t\tif -1 != pos {\n\t\t\t\t\tops = append(ops, Op{logic, short, operator, buf[:pos], \"OR\"})\n\t\t\t\t\tbuf, left, operator, logic = \"\", \"\", \"\", \"OR\"\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\tif \"\" != buf {\n\t\tops = append(ops, Op{logic, short, operator, buf, \"\"})\n\t}\n\n\tif 1 == len(ops) {\n\t\tconditions := ops[0].conditions()\n\t\tif \"==\" == ops[0].operator {\n\t\t\treturn conditions\n\t\t} else {\n\t\t\treturn []string{strings.Join(conditions, \" && \")}\n\t\t}\n\t}\n\n\tvar result []string\n\tvar buffer []string\n\n\tbuffer_length := 0\n\tfor _, o := range ops {\n\t\tconditions := o.conditions()\n\t\tlogic = o.previous_logic\n\t\tnextLogic := o.next_logic\n\t\toperator := o.operator\n\n\t\tif \"OR\" == logic && buffer_length > 0 {\n\t\t\tresult = append(result, strings.Join(buffer, \", \"))\n\t\t\tbuffer = []string{}\n\t\t\tbuffer_length = 0\n\t\t}\n\n\t\tif (\"\" == logic && \"OR\" == nextLogic) || (\"OR\" == logic && \"OR\" == nextLogic) || (\"OR\" == logic && \"\" == nextLogic) {\n\t\t\tif \"==\" == operator {\n\t\t\t\tbuffer = append(buffer, conditions...)\n\t\t\t} else {\n\t\t\t\tbuffer = append(buffer, strings.Join(conditions, \" && \"))\n\t\t\t}\n\t\t\tbuffer_length = len(buffer)\n\t\t} else if \"AND\" == logic && (\"AND\" == nextLogic || \"\" == nextLogic) {\n\t\t\tif \"==\" == operator {\n\t\t\t\tbuffer[buffer_length-1] += \" && \" + joinOr(conditions)\n\t\t\t} else {\n\t\t\t\tbuffer[buffer_length-1] += \" && \" + strings.Join(conditions, \" && \")\n\t\t\t}\n\t\t} else if \"\" == logic && \"AND\" == nextLogic {\n\t\t\tif \"==\" == operator {\n\t\t\t\tbuffer = append(buffer, joinOr(conditions))\n\t\t\t} else {\n\t\t\t\tbuffer = append(buffer, strings.Join(conditions, \" && \"))\n\t\t\t}\n\t\t\tbuffer_length = len(buffer)\n\t\t} else if \"OR\" == logic && \"AND\" == nextLogic {\n\t\t\tif \"==\" == operator {\n\t\t\t\tif len(conditions) > 1 {\n\t\t\t\t\tbuffer = append(buffer, joinOr(conditions))\n\t\t\t\t} else {\n\t\t\t\t\tbuffer = append(buffer, conditions...)\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tbuffer = append(buffer, strings.Join(conditions, \" && \"))\n\t\t\t}\n\t\t\tbuffer_length = len(buffer)\n\t\t} else if \"AND\" == logic && \"OR\" == nextLogic {\n\t\t\tif \"==\" == operator {\n\t\t\t\tbuffer[buffer_length-1] += \" && \" + joinOr(conditions)\n\t\t\t} else {\n\t\t\t\tbuffer[buffer_length-1] += \" && \" + strings.Join(conditions, \" && \")\n\t\t\t}\n\t\t}\n\t}\n\n\tif len(buffer) > 0 {\n\t\tif \"OR\" == logic {\n\t\t\tresult = append(result, buffer...)\n\t\t} else {\n\t\t\tresult = append(result, strings.Join(buffer, \" && \"))\n\t\t}\n\t}\n\treturn result\n}\n\nfunc joinOr(data []string) string {\n\tif len(data) > 1 {\n\t\treturn \"(\" + strings.Join(data, \" || \") + \")\"\n\t}\n\treturn data[0]\n}\n\nfunc rule2code(key string, data map[string]string, ptr_vars *[]string, padding string) string {\n\tif input, ok := data[\"pluralRule-count-\"+key]; ok {\n\t\tresult := \"\"\n\n\t\tif \"other\" == key {\n\t\t\tif 1 == len(data) {\n\t\t\t\treturn padding + \"return \\\"other\\\"\\n\"\n\t\t\t}\n\t\t\tresult += padding + \"default:\\n\"\n\t\t} else {\n\t\t\tcases := pattern2code(input, ptr_vars)\n\t\t\tresult += \"\\n\" + padding + \"case \" + strings.Join(cases, \", \") + \":\\n\"\n\t\t}\n\t\tresult += padding + \"\\treturn \\\"\" + key + \"\\\"\\n\"\n\t\treturn result\n\t}\n\treturn \"\"\n}\n\nfunc map2code(data map[string]string, ptr_vars *[]string, padding string) string {\n\tif 1 == len(data) {\n\t\treturn rule2code(\"other\", data, ptr_vars, padding)\n\t}\n\tresult := padding + \"switch {\\n\"\n\tresult += rule2code(\"other\", data, ptr_vars, padding)\n\tresult += rule2code(\"zero\", data, ptr_vars, padding)\n\tresult += rule2code(\"one\", data, ptr_vars, padding)\n\tresult += rule2code(\"two\", data, ptr_vars, padding)\n\tresult += rule2code(\"few\", data, ptr_vars, padding)\n\tresult += rule2code(\"many\", data, ptr_vars, padding)\n\tresult += padding + \"}\\n\"\n\treturn result\n}\n\nfunc splitValues(input string) []string {\n\tvar result []string\n\n\tpos := -1\n\tfor idx, char := range input {\n\t\tswitch {\n\t\tcase (char >= '0' && char <= '9') || '.' == char:\n\t\t\tif -1 == pos {\n\t\t\t\tpos = idx\n\t\t\t}\n\n\t\t// Inutile de générer un interval lorsque l'on rencontre '~' :)\n\t\tcase ' ' == char || ',' == char || '~' == char:\n\t\t\tif -1 != pos {\n\t\t\t\tresult = append(result, input[pos:idx])\n\t\t\t\tpos = -1\n\t\t\t}\n\t\t}\n\t}\n\n\tif -1 != pos {\n\t\tresult = append(result, input[pos:])\n\t}\n\treturn result\n}\n\nfunc pattern2test(expected, input string, ordinal bool) []Test {\n\tvar result []Test\n\n\tpatterns := strings.Split(input, \"@\")\n\tfor _, pattern := range patterns {\n\t\tif strings.HasPrefix(pattern, \"integer\") {\n\t\t\tfor _, value := range splitValues(pattern[8:]) {\n\t\t\t\tresult = append(result, UnitTest{ordinal, expected, value})\n\t\t\t}\n\t\t} else if strings.HasPrefix(pattern, \"decimal\") {\n\t\t\tfor _, value := range splitValues(pattern[8:]) {\n\t\t\t\tresult = append(result, UnitTest{ordinal, expected, \"\\\"\" + value + \"\\\"\"})\n\t\t\t}\n\t\t}\n\t}\n\treturn result\n}\n\nfunc map2test(ordinals, plurals map[string]string) []Test {\n\tvar result []Test\n\n\tfor _, rule := range []string{\"one\", \"two\", \"few\", \"many\", \"zero\", \"other\"} {\n\t\tif input, ok := ordinals[\"pluralRule-count-\"+rule]; ok {\n\t\t\tresult = append(result, pattern2test(rule, input, true)...)\n\t\t}\n\n\t\tif input, ok := plurals[\"pluralRule-count-\"+rule]; ok {\n\t\t\tresult = append(result, pattern2test(rule, input, false)...)\n\t\t}\n\t}\n\treturn result\n}\n\nfunc culture2code(ordinals, plurals map[string]string, padding string) (string, string, []Test) {\n\tvar code string\n\tvar vars []string\n\n\tif nil == ordinals {\n\t\tcode = map2code(plurals, &vars, padding)\n\t} else {\n\t\tcode = padding + \"if ordinal {\\n\"\n\t\tcode += map2code(ordinals, &vars, padding+\"\\t\")\n\t\tcode += padding + \"}\\n\\n\"\n\t\tcode += map2code(plurals, &vars, padding)\n\t}\n\ttests := map2test(ordinals, plurals)\n\n\tstr_vars := \"\"\n\tmax := len(vars)\n\n\tif max > 0 {\n\t\t// http://unicode.org/reports/tr35/tr35-numbers.html#Operands\n\t\t//\n\t\t// Symbol\tValue\n\t\t// n\t absolute value of the source number (integer and decimals).\n\t\t// i\t integer digits of n.\n\t\t// v\t number of visible fraction digits in n, with trailing zeros.\n\t\t// w\t number of visible fraction digits in n, without trailing zeros.\n\t\t// f\t visible fractional digits in n, with trailing zeros.\n\t\t// t\t visible fractional digits in n, without trailing zeros.\n\t\tvar_f := varname('f', vars)\n\t\tvar_i := varname('i', vars)\n\t\tvar_n := varname('n', vars)\n\t\tvar_v := varname('v', vars)\n\t\tvar_t := varname('t', vars)\n\t\tvar_w := varname('w', vars)\n\n\t\tif \"_\" != var_f || \"_\" != var_v || \"_\" != var_t || \"_\" != var_w {\n\t\t\tstr_vars += padding + fmt.Sprintf(\"%s, %s, %s, %s, %s, %s := finvtw(value)\\n\", var_f, var_i, var_n, var_v, var_t, var_w)\n\t\t} else {\n\t\t\tif \"_\" != var_n {\n\t\t\t\tif \"_\" != var_i {\n\t\t\t\t\tstr_vars += padding + \"flt := float(value)\\n\"\n\t\t\t\t\tstr_vars += padding + \"n := math.Abs(flt)\\n\"\n\t\t\t\t\tstr_vars += padding + \"i := int64(flt)\\n\"\n\t\t\t\t} else {\n\t\t\t\t\tstr_vars += padding + \"n := math.Abs(float(value))\\n\"\n\t\t\t\t}\n\t\t\t} else if \"_\" != var_i {\n\t\t\t\tstr_vars += padding + \"i := int64(float(value))\\n\"\n\t\t\t}\n\t\t}\n\n\t\tfor i := 0; i < max; i += 2 {\n\t\t\tk := vars[i]\n\t\t\tv := vars[i+1]\n\n\t\t\tif k != v {\n\t\t\t\tstr_vars += padding + k + \" := \" + v + \"\\n\"\n\t\t\t}\n\t\t}\n\t}\n\treturn str_vars, code, tests\n}\n\nfunc addVar(varname, expr string, ptr_vars *[]string) string {\n\texists := false\n\tfor i := 0; i < len(*ptr_vars); i += 2 {\n\t\tif (*ptr_vars)[i] == varname {\n\t\t\texists = true\n\t\t\tbreak\n\t\t}\n\t}\n\n\tif !exists {\n\t\t*ptr_vars = append(*ptr_vars, varname, expr)\n\t}\n\treturn varname\n}\n\nfunc toVar(expr string, ptr_vars *[]string) string {\n\tvar varname string\n\n\tif pos := strings.Index(expr, \"%\"); -1 != pos {\n\t\tk, v := expr[:pos], expr[pos+1:]\n\t\tvarname = k + v\n\t\tif \"n\" == k {\n\t\t\texpr = \"mod(n, \" + v + \")\"\n\t\t} else {\n\t\t\texpr = k + \" % \" + v\n\t\t}\n\t} else {\n\t\tvarname = expr\n\t}\n\treturn addVar(varname, expr, ptr_vars)\n}\n\nfunc varname(char uint8, vars []string) string {\n\tfor i := 0; i < len(vars); i += 2 {\n\t\tif char == vars[i][0] {\n\t\t\treturn string(char)\n\t\t}\n\t}\n\treturn \"_\"\n}\n\nfunc createGoFiles(headers string, ptr_plurals, ptr_ordinals *map[string]map[string]string) error {\n\tvar cultures []string\n\tif \"*\" == *user_culture {\n\t\t// On sait que len(ordinals) <= len(plurals)\n\t\tfor culture, _ := range *ptr_plurals {\n\t\t\tcultures = append(cultures, culture)\n\t\t}\n\t} else {\n\t\tfor _, culture := range strings.Split(*user_culture, \",\") {\n\t\t\tculture = strings.TrimSpace(culture)\n\n\t\t\tif _, ok := (*ptr_plurals)[culture]; !ok {\n\t\t\t\treturn fmt.Errorf(\"Aborted, `%s` not found...\", culture)\n\t\t\t}\n\t\t\tcultures = append(cultures, culture)\n\t\t}\n\t}\n\tsort.Strings(cultures)\n\n\tif 0 == len(cultures) {\n\t\treturn fmt.Errorf(\"Not enough data to create source...\")\n\t}\n\n\tvar items []Source\n\tvar tests []Source\n\n\tfor _, culture := range cultures {\n\t\tfmt.Print(culture)\n\n\t\tplurals := (*ptr_plurals)[culture]\n\n\t\tif nil == plurals {\n\t\t\tfmt.Println(\" \\u2717 - Plural not defined\")\n\t\t} else if _, ok := plurals[\"pluralRule-count-other\"]; !ok {\n\t\t\tfmt.Println(\" \\u2717 - Plural missing mandatory `other` choice...\")\n\t\t} else {\n\t\t\tordinals := (*ptr_ordinals)[culture]\n\t\t\tif nil != ordinals {\n\t\t\t\tif _, ok := ordinals[\"pluralRule-count-other\"]; !ok {\n\t\t\t\t\tfmt.Println(\" \\u2717 - Ordinal missing the mandatory `other` choice...\")\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tvars, code, unit_tests := culture2code(ordinals, plurals, \"\\t\\t\")\n\t\t\titems = append(items, FuncSource{culture, vars, code})\n\n\t\t\tfmt.Println(\" \\u2713\")\n\n\t\t\tif len(unit_tests) > 0 {\n\t\t\t\ttests = append(tests, UnitTestSource{culture, unit_tests})\n\t\t\t}\n\t\t}\n\t}\n\n\tif len(tests) > 0 {\n\t\terr := createSource(\"plural_test.tmpl\", \"plural/func_test.go\", headers, tests)\n\t\tif nil != err {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn createSource(\"plural.tmpl\", \"plural/func.go\", headers, items)\n}\n\nfunc createSource(tmpl_filepath, dest_filepath, headers string, items []Source) error {\n\tsource, err := template.ParseFiles(tmpl_filepath)\n\tif nil != err {\n\t\treturn err\n\t}\n\n\tfile, err := os.Create(dest_filepath)\n\tif nil != err {\n\t\treturn err\n\t}\n\tdefer file.Close()\n\n\treturn source.Execute(file, struct {\n\t\tHeaders string\n\t\tTimestamp string\n\t\tItems []Source\n\t}{\n\t\theaders,\n\t\ttime.Now().Format(time.RFC1123Z),\n\t\titems,\n\t})\n}\n\nvar user_culture = flag.String(\"culture\", \"*\", \"Culture subset\")\n\nfunc main() {\n\tflag.Parse()\n\n\tvar headers string\n\n\tordinals, err := get(\"https://github.com/unicode-cldr/cldr-core/raw/master/supplemental/ordinals.json\", \"ordinal\", &headers)\n\tif nil != err {\n\t\tfmt.Println(\" \\u2717\")\n\t\tfmt.Println(err)\n\t} else {\n\t\tfmt.Println(\" \\u2713\")\n\n\t\tplurals, err := get(\"https://github.com/unicode-cldr/cldr-core/raw/master/supplemental/plurals.json\", \"cardinal\", &headers)\n\t\tif nil != err {\n\t\t\tfmt.Println(\" \\u2717\")\n\t\t\tfmt.Println(err)\n\t\t} else {\n\t\t\tfmt.Println(\" \\u2713\")\n\n\t\t\terr = createGoFiles(headers, &plurals, &ordinals)\n\t\t\tif nil != err {\n\t\t\t\tfmt.Println(err, \"(╯°□°)╯︵ ┻━┻\")\n\t\t\t} else {\n\t\t\t\tfmt.Println(\"Succeed (ッ)\")\n\t\t\t}\n\t\t}\n\t}\n}\n"},"avg_line_length":{"kind":"number","value":23.4704,"string":"23.4704"},"max_line_length":{"kind":"number","value":125,"string":"125"},"alphanum_fraction":{"kind":"number","value":0.6026313996,"string":"0.602631"},"score":{"kind":"number","value":3.234375,"string":"3.234375"}}},{"rowIdx":787,"cells":{"hexsha":{"kind":"string","value":"149e40eb324765f72b30ac300899143823cc1062"},"size":{"kind":"number","value":1109,"string":"1,109"},"ext":{"kind":"string","value":"kt"},"lang":{"kind":"string","value":"Kotlin"},"max_stars_repo_path":{"kind":"string","value":"app/src/main/java/com/mapswithme/maps/search/BookingFilterParams.kt"},"max_stars_repo_name":{"kind":"string","value":"dnemov/omim.kt"},"max_stars_repo_head_hexsha":{"kind":"string","value":"8b75114193e141aee14fcbc207a208c4a39de1db"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"number","value":1,"string":"1"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2020-03-06T13:56:02.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2020-03-06T13:56:02.000Z"},"max_issues_repo_path":{"kind":"string","value":"app/src/main/java/com/mapswithme/maps/search/BookingFilterParams.kt"},"max_issues_repo_name":{"kind":"string","value":"dnemov/omim.kt"},"max_issues_repo_head_hexsha":{"kind":"string","value":"8b75114193e141aee14fcbc207a208c4a39de1db"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"app/src/main/java/com/mapswithme/maps/search/BookingFilterParams.kt"},"max_forks_repo_name":{"kind":"string","value":"dnemov/omim.kt"},"max_forks_repo_head_hexsha":{"kind":"string","value":"8b75114193e141aee14fcbc207a208c4a39de1db"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"package com.mapswithme.maps.search\n\nimport android.os.Parcel\nimport android.os.Parcelable\nimport com.mapswithme.util.ConnectionState\nimport kotlinx.android.parcel.Parcelize\n\n@Parcelize\nclass BookingFilterParams (val mCheckinMillisec: Long,\n val mCheckoutMillisec: Long,\n vararg val mRooms: Room) : Parcelable {\n @Parcelize\n class Room(val mAdultsCount: Int, val mAgeOfChild: Int) : Parcelable {\n constructor(adultsCount: Int) : this(adultsCount, NO_CHILDREN)\n\n companion object {\n // This value is corresponds to AvailabilityParams::Room::kNoChildren in core.\n const val NO_CHILDREN = -1\n @JvmField\n val DEFAULT = Room(2)\n }\n }\n\n class Factory {\n fun createParams(\n checkIn: Long,\n checkOut: Long,\n vararg rooms: Room\n ): BookingFilterParams? {\n return if (ConnectionState.isConnected) BookingFilterParams(\n checkIn,\n checkOut,\n *rooms\n ) else null\n }\n }\n}"},"avg_line_length":{"kind":"number","value":29.972972973,"string":"29.972973"},"max_line_length":{"kind":"number","value":90,"string":"90"},"alphanum_fraction":{"kind":"number","value":0.5933273219,"string":"0.593327"},"score":{"kind":"number","value":3.078125,"string":"3.078125"}}},{"rowIdx":788,"cells":{"hexsha":{"kind":"string","value":"cb493c42819d31b414ebda897c1b87d112dc3574"},"size":{"kind":"number","value":1718,"string":"1,718"},"ext":{"kind":"string","value":"swift"},"lang":{"kind":"string","value":"Swift"},"max_stars_repo_path":{"kind":"string","value":"XLsn0wQuora/Classes/APIManager/APIManager.swift"},"max_stars_repo_name":{"kind":"string","value":"XLsn0w/XLsn0wQuora"},"max_stars_repo_head_hexsha":{"kind":"string","value":"93d803a321b1696d0507df8294581c85058a2a3e"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":10,"string":"10"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2017-10-25T08:49:59.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2018-06-16T01:21:24.000Z"},"max_issues_repo_path":{"kind":"string","value":"XLsn0wQuora/Classes/APIManager/APIManager.swift"},"max_issues_repo_name":{"kind":"string","value":"XLsn0w/XLsn0wQuora"},"max_issues_repo_head_hexsha":{"kind":"string","value":"93d803a321b1696d0507df8294581c85058a2a3e"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"XLsn0wQuora/Classes/APIManager/APIManager.swift"},"max_forks_repo_name":{"kind":"string","value":"XLsn0w/XLsn0wQuora"},"max_forks_repo_head_hexsha":{"kind":"string","value":"93d803a321b1696d0507df8294581c85058a2a3e"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":1,"string":"1"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2018-03-05T07:21:09.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2018-03-05T07:21:09.000Z"},"content":{"kind":"string","value":"\n\nimport Foundation\nimport Moya\n\n//: URL基地址\nlet BASE_URL = \"http://english.6ag.cn/\"\n\nenum APIManager {\n case getLaunchImg\n case getNewsList\n case getMoreNews(String)\n case getThemeList\n case getThemeDesc(Int)\n case getNewsDesc(Int)\n}\n\nextension APIManager: TargetType {\n /// The target's base `URL`.\n var baseURL: URL {\n return URL.init(string: \"http://news-at.zhihu.com/api/\")!\n }\n \n /// The path to be appended to `baseURL` to form the full `URL`.\n var path: String {\n switch self {\n case .getLaunchImg:\n return \"7/prefetch-launch-images/750*1142\"\n case .getNewsList:\n return \"4/news/latest\"\n case .getMoreNews(let date):\n return \"4/news/before/\" + date\n case .getThemeList:\n return \"4/themes\"\n case .getThemeDesc(let id):\n return \"4/theme/\\(id)\"\n case .getNewsDesc(let id):\n return \"4/news/\\(id)\"\n }\n }\n \n /// The HTTP method used in the request.\n var method: Moya.Method {\n return .get\n }\n \n /// The parameters to be incoded in the request.\n var parameters: [String: Any]? {\n return nil\n }\n \n /// The method used for parameter encoding.\n var parameterEncoding: ParameterEncoding {\n return URLEncoding.default\n }\n \n /// Provides stub data for use in testing.\n var sampleData: Data {\n return \"\".data(using: String.Encoding.utf8)!\n }\n \n /// The type of HTTP task to be performed.\n var task: Task {\n return .request\n }\n \n /// Whether or not to perform Alamofire validation. Defaults to `false`.\n var validate: Bool {\n return false\n }\n}\n"},"avg_line_length":{"kind":"number","value":23.8611111111,"string":"23.861111"},"max_line_length":{"kind":"number","value":76,"string":"76"},"alphanum_fraction":{"kind":"number","value":0.5820721769,"string":"0.582072"},"score":{"kind":"number","value":3,"string":"3"}}},{"rowIdx":789,"cells":{"hexsha":{"kind":"string","value":"3334b81c461f0f87292473253147e7b45b17a48f"},"size":{"kind":"number","value":1705,"string":"1,705"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"scripts/wsi_bot_show_regions.py"},"max_stars_repo_name":{"kind":"string","value":"higex/qpath"},"max_stars_repo_head_hexsha":{"kind":"string","value":"0377f2fdadad6e02ecde8ba2557fe9b957280fa1"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":6,"string":"6"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2017-03-18T19:17:42.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2019-05-05T14:57:31.000Z"},"max_issues_repo_path":{"kind":"string","value":"WSItk/tools/wsi_bot_show_regions.py"},"max_issues_repo_name":{"kind":"string","value":"vladpopovici/WSItk"},"max_issues_repo_head_hexsha":{"kind":"string","value":"02db9dbf1148106a576d7b4dd7965c73607efdae"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"WSItk/tools/wsi_bot_show_regions.py"},"max_forks_repo_name":{"kind":"string","value":"vladpopovici/WSItk"},"max_forks_repo_head_hexsha":{"kind":"string","value":"02db9dbf1148106a576d7b4dd7965c73607efdae"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":4,"string":"4"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2015-11-29T14:47:25.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2019-11-28T03:16:39.000Z"},"content":{"kind":"string","value":"# -*- coding: utf-8 -*-\n\"\"\"\nSHOW_REGIONS\n\nEmphasizes some regions in the image, by decreasing the importance of the rest.\n\n@author: vlad\n\"\"\"\n\nfrom __future__ import (absolute_import, division, print_function, unicode_literals)\nfrom builtins import *\n\nimport argparse as opt\nimport skimage.io\nimport numpy as np\n\nfrom util.storage import ModelPersistence\nfrom util.visualization import enhance_patches\n\n__author__ = 'vlad'\n__version__ = 0.1\n\n\ndef main():\n p = opt.ArgumentParser(description=\"\"\"\n Emphasizes the patches with a given code (from BoT) by reducing the contrast of the rest of the image.\n \"\"\"\n )\n p.add_argument('image', action='store', help='image file name')\n p.add_argument('res_image', action='store', help='name of the resulting image')\n p.add_argument('bot_result', action='store', help='a file with BoT coding for regions')\n p.add_argument('bot_code', action='store', help='the code of the regions to be emphasized', type=int)\n p.add_argument('-g', '--gamma', action='store', nargs=1, type=float,\n help='the gamma level of the background regions',\n default=0.2)\n args = p.parse_args()\n\n img = skimage.io.imread(args.image)\n regs = []\n with ModelPersistence(args.bot_result, 'r', format='pickle') as d:\n block_codes = d['l1_codes']\n regs = d['regs']\n\n #print(block_codes)\n #print(args.bot_code)\n # filter regions of interest:\n roi = [ regs[k] for k in np.where(np.array(block_codes, dtype=np.int) == args.bot_code)[0] ]\n\n #print(roi)\n\n img = enhance_patches(img, roi, _gamma=args.gamma)\n\n skimage.io.imsave(args.res_image, img)\n\n return\n\n\nif __name__ == '__main__':\n main()"},"avg_line_length":{"kind":"number","value":28.8983050847,"string":"28.898305"},"max_line_length":{"kind":"number","value":106,"string":"106"},"alphanum_fraction":{"kind":"number","value":0.6744868035,"string":"0.674487"},"score":{"kind":"number","value":3.046875,"string":"3.046875"}}},{"rowIdx":790,"cells":{"hexsha":{"kind":"string","value":"042014a0d273822380e8703c8cba04e1914fcb3d"},"size":{"kind":"number","value":5230,"string":"5,230"},"ext":{"kind":"string","value":"js"},"lang":{"kind":"string","value":"JavaScript"},"max_stars_repo_path":{"kind":"string","value":"Develop/app.js"},"max_stars_repo_name":{"kind":"string","value":"PopSizzle/TeamProfileGenerator"},"max_stars_repo_head_hexsha":{"kind":"string","value":"5cfa650fc37824f934211b676fcd5c7a5984fe8d"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"Develop/app.js"},"max_issues_repo_name":{"kind":"string","value":"PopSizzle/TeamProfileGenerator"},"max_issues_repo_head_hexsha":{"kind":"string","value":"5cfa650fc37824f934211b676fcd5c7a5984fe8d"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":1,"string":"1"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2021-05-11T10:37:20.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2021-05-11T10:37:20.000Z"},"max_forks_repo_path":{"kind":"string","value":"Develop/app.js"},"max_forks_repo_name":{"kind":"string","value":"PopSizzle/TeamProfileGenerator"},"max_forks_repo_head_hexsha":{"kind":"string","value":"5cfa650fc37824f934211b676fcd5c7a5984fe8d"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"const Manager = require(\"./lib/Manager\");\nconst Engineer = require(\"./lib/Engineer\");\nconst Intern = require(\"./lib/Intern\");\nconst inquirer = require(\"inquirer\");\nconst path = require(\"path\");\nconst fs = require(\"fs\");\nlet employees = [];\n\nconst OUTPUT_DIR = path.resolve(__dirname, \"output\");\nconst outputPath = path.join(OUTPUT_DIR, \"team.html\");\n\nconst render = require(\"./lib/htmlRenderer\");\n\n// Welcome message\nfunction beginTeam () {\n\n console.log(\"Welcome to the team creator!\")\n console.log(\"We will now begin to construct your team profile\");\n console.log(\"------------------------------------------\")\n\n enterTeamMember();\n}\n\n// Function for adding a team member\nfunction enterTeamMember() {\n // Inquirer prompt for basic details\n inquirer.prompt([\n {\n type: \"input\",\n message: \"Please enter the name of your employee.\",\n name: \"name\"\n },\n {\n type: \"input\",\n message: \"Please enter the id of your employee.\",\n name: \"id\"\n },\n {\n type: \"input\",\n message: \"Please enter the email of your employee.\",\n name: \"email\"\n },\n {\n type: \"list\",\n message: \"Please select this employee's role on your team.\",\n name: \"role\",\n choices: [\n \"Manager\",\n \"Engineer\",\n \"Intern\",\n ]\n }\n ])\n .then(function(response) {\n // Switch case for different classes of employees\n switch(response.role) {\n // If manager class\n case \"Manager\":\n\n // Check if there is already a manager\n let isManager = employees.filter(employee => employee.getRole() === \"Manager\");\n console.log(isManager);\n console.log(isManager.length);\n // If there is a manager go back and try again\n if(isManager.length > 0){\n console.log(\"Your team already has a manager, please go back and select a different option.\")\n return nextStep();\n }\n\n inquirer.prompt([\n {\n type: \"input\",\n message: \"Please enter your manager's office number.\",\n name: \"officeNumber\"\n }\n ])\n .then(function(response1){\n response.officeNumber = response1.officeNumber;\n console.log(response);\n const manager = new Manager(response.name, response.id, response.email, response.officeNumber);\n employees.push(manager);\n nextStep();\n })\n break;\n // If Engineer class\n case \"Engineer\":\n inquirer.prompt([\n {\n type: \"input\",\n message: \"Please enter your Engineer's Github username.\",\n name: \"github\"\n }\n ])\n .then(function(response1){\n response.github = response1.github;\n console.log(response);\n const engineer = new Engineer(response.name, response.id, response.email, response.github);\n employees.push(engineer);\n nextStep();\n })\n break;\n // If neither, must be intern class\n default:\n inquirer.prompt([\n {\n type: \"input\",\n message: \"Please enter your Intern's school.\",\n name: \"school\"\n }\n ])\n .then(function(response1){\n response.school = response1.school;\n console.log(response);\n const intern = new Intern(response.name, response.id, response.email, response.school);\n employees.push(intern);\n nextStep();\n })\n } \n }) \n}\n\n// Function for switching between adding team members, printing team, and exiting.\nfunction nextStep() {\n inquirer.prompt([\n {\n type: \"list\",\n message: \"what would you like to do now?\",\n name: \"continue\",\n choices: [\n \"Add another employee\",\n \"Print my team to an html file\",\n \"Exit\"\n ]\n\n }\n ])\n .then(function(response){\n // Switch case to handle inquirer response\n switch(response.continue) {\n // Add another employee\n case \"Add another employee\":\n enterTeamMember();\n break;\n // Print the team\n case \"Print my team to an html file\":\n \n console.log(employees);\n // Using the render function, write the employees to the html templates.\n fs.writeFile(\"./output/index.html\", render(employees), function(err) {\n\n if (err) {\n return console.log(err);\n }\n \n console.log(\"Success!\");\n \n });\n break;\n // Exit the app\n default:\n return;\n }\n })\n}\n\nbeginTeam();\n"},"avg_line_length":{"kind":"number","value":31.3173652695,"string":"31.317365"},"max_line_length":{"kind":"number","value":111,"string":"111"},"alphanum_fraction":{"kind":"number","value":0.4845124283,"string":"0.484512"},"score":{"kind":"number","value":3.1875,"string":"3.1875"}}},{"rowIdx":791,"cells":{"hexsha":{"kind":"string","value":"1e581aecf2f32077037563a16d8b0ce759776e2a"},"size":{"kind":"number","value":2765,"string":"2,765"},"ext":{"kind":"string","value":"lua"},"lang":{"kind":"string","value":"Lua"},"max_stars_repo_path":{"kind":"string","value":"CountDownLua.lua"},"max_stars_repo_name":{"kind":"string","value":"yuzh0816/Count-Down"},"max_stars_repo_head_hexsha":{"kind":"string","value":"c546915bea08a1a43356380095ad2a0d2047ce75"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":1,"string":"1"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2020-12-18T01:07:41.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2020-12-18T01:07:41.000Z"},"max_issues_repo_path":{"kind":"string","value":"CountDownLua.lua"},"max_issues_repo_name":{"kind":"string","value":"yuzh0816/Count-Down"},"max_issues_repo_head_hexsha":{"kind":"string","value":"c546915bea08a1a43356380095ad2a0d2047ce75"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"CountDownLua.lua"},"max_forks_repo_name":{"kind":"string","value":"yuzh0816/Count-Down"},"max_forks_repo_head_hexsha":{"kind":"string","value":"c546915bea08a1a43356380095ad2a0d2047ce75"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"PROPERTIES = {year=0, month=0, day=0, hour=0, min=0, sec=0}\ntotalTime = 0\nstartTime = 0\nisWorkOvertime = false\n\nYYYY = 2021\nMM = 1\nDD = 5\nH = 8\nM = 0\nS = 0\n\nfunction Initialize()\n\n\tstringDate = tolua.cast(SKIN:GetMeter(\"Date\"), \"CMeterString\")\n\tstringHour = tolua.cast(SKIN:GetMeter(\"Hour\"), \"CMeterString\")\n\tstringMinute = tolua.cast(SKIN:GetMeter(\"Minute\"), \"CMeterString\")\n\tstringSecond = tolua.cast(SKIN:GetMeter(\"Second\"), \"CMeterString\")\n\tstringmSecond = tolua.cast(SKIN:GetMeter(\"mSecond\"), \"CMeterString\")\n\n\tstartTime = os.time(getStartWorkTime())\n\tcountdownTime = getOffWorkTime()\n\ttotalTime = os.time(countdownTime)-startTime\n\tprogress = 0\n\t\nend -- function Initialize\n\nfunction Update()\n\t\n\tlocal rLeft = os.time(countdownTime) - os.time()\n\tif rLeft < 0 then\n\t\trLeft = 0\n\tend\n\t\n\tlocal dLeft = math.floor(rLeft/60/60/24)\n\tlocal hLeft = math.floor(rLeft/60/60)%24\n\tlocal mLeft = math.floor(rLeft/60)%60\n\tlocal sLeft = math.floor(rLeft)%60\n\tlocal msLeft = math.floor(1000-(os.clock()*1000)%1000)\n\n\tif rLeft == 0 then\n\t\tstringmSecond:SetText(0)\n\telse\n\t\tstringmSecond:SetText(msLeft)\n\tend\n\t\n\tif totalTime > 0 and progress <= 1 then\n\t\tprogress = (os.time()-startTime)/totalTime\n\t\tlocal progressWidth = getMeterWidth() * progress\n\t\tprogressMeter = SKIN:GetMeter(\"progress\")\n\t\tprogressMeter:SetW(progressWidth)\n\t\t\n\t\tlocal color = getCurrentColor(progress)\n\t\t--myMeter:SetSolidColor(color)\n\t\t--myMeter:SetOption('SolidColor', color)\n\tend\n\t\n\tstringDate:SetText(dLeft)\n\tstringHour:SetText(hLeft)\n\tstringMinute:SetText(mLeft)\n\tstringSecond:SetText(sLeft)\n\nend -- function Update\n\nfunction getMeterWidth()\n\tlocal meterWidth = SKIN:GetMeter(\"Note\"):GetW() \n\t\t+ SKIN:GetMeter(\"Date\"):GetW()\n\t\t+ SKIN:GetMeter(\"Hour\"):GetW()\n\t\t+ SKIN:GetMeter(\"Minute\"):GetW()\n\t\t+ SKIN:GetMeter(\"Second\"):GetW()\n\treturn meterWidth\nend\n\nfunction getOffWorkTime()\n\tlocal w = os.date(\"%w\")\n\tlocal hour = 21\n\tif w == \"5\" then\n\t\thour = 18\n\tend\n\tif isWorkOvertime == false then\n\t\thour = 18\n\tend\n\t\n\treturn {year=YYYY, month=MM, day=DD, hour=H, min=M, sec=S}\nend\n\nfunction getStartWorkTime()\n\treturn {year=2020, month=12, day=16, hour=15, min=33, sec=35}\nend\n\nfunction getCurrentColor(progress)\n\tlocal startR = 30\n\tlocal startG = 199\n\tlocal startB = 230\n\t\n\tlocal endR = 146\n\tlocal endG = 185\n\tlocal endB = 1\n\t\n\tlocal currentR = getCurrentValue(startR, endR, progress)\n\tlocal currentG = getCurrentValue(startG, endG, progress)\n\tlocal currentB = getCurrentValue(startB, endB, progress)\n\t\n\tlocal RGB = {}\n RGB.r = currentR\n RGB.g = currentG\n RGB.b = currentB\n\treturn RGB\nend\n\nfunction getCurrentValue(startValue, endValue, progress)\n\tlocal left = endValue - startValue\n\tif left == 0 then\n\t\treturn startValue\n\tend\n\t\n\tlocal currentValue = startValue + left * progress\n\treturn currentValue\nend"},"avg_line_length":{"kind":"number","value":23.2352941176,"string":"23.235294"},"max_line_length":{"kind":"number","value":69,"string":"69"},"alphanum_fraction":{"kind":"number","value":0.7139240506,"string":"0.713924"},"score":{"kind":"number","value":3.40625,"string":"3.40625"}}},{"rowIdx":792,"cells":{"hexsha":{"kind":"string","value":"ebaa870ae82fce283159efbef5235534c057cc7e"},"size":{"kind":"number","value":1271,"string":"1,271"},"ext":{"kind":"string","value":"rs"},"lang":{"kind":"string","value":"Rust"},"max_stars_repo_path":{"kind":"string","value":"src/sinks/aws_lambda/run.rs"},"max_stars_repo_name":{"kind":"string","value":"savaki/oura"},"max_stars_repo_head_hexsha":{"kind":"string","value":"05527037c8ba2e2810554684f492339487b14a19"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"number","value":124,"string":"124"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2021-12-04T11:13:09.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-03-26T10:51:19.000Z"},"max_issues_repo_path":{"kind":"string","value":"src/sinks/aws_lambda/run.rs"},"max_issues_repo_name":{"kind":"string","value":"savaki/oura"},"max_issues_repo_head_hexsha":{"kind":"string","value":"05527037c8ba2e2810554684f492339487b14a19"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"number","value":95,"string":"95"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2021-12-14T05:44:16.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2022-03-31T22:38:28.000Z"},"max_forks_repo_path":{"kind":"string","value":"src/sinks/aws_lambda/run.rs"},"max_forks_repo_name":{"kind":"string","value":"savaki/oura"},"max_forks_repo_head_hexsha":{"kind":"string","value":"05527037c8ba2e2810554684f492339487b14a19"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"number","value":16,"string":"16"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2021-12-09T19:07:55.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2022-03-28T16:01:05.000Z"},"content":{"kind":"string","value":"use aws_sdk_lambda::{types::Blob, Client};\nuse serde_json::json;\nuse std::sync::Arc;\n\nuse crate::{model::Event, pipelining::StageReceiver, utils::Utils, Error};\n\nasync fn invoke_lambda_function(\n client: Arc,\n function_name: &str,\n event: &Event,\n) -> Result<(), Error> {\n let body = json!(event).to_string();\n\n let req = client\n .invoke()\n .function_name(function_name)\n .payload(Blob::new(body));\n\n let res = req.send().await?;\n\n log::trace!(\"Lambda invoke response: {:?}\", res);\n\n Ok(())\n}\n\npub fn writer_loop(\n input: StageReceiver,\n client: Client,\n function_name: &str,\n utils: Arc,\n) -> Result<(), Error> {\n let client = Arc::new(client);\n\n let rt = tokio::runtime::Builder::new_current_thread()\n .enable_time()\n .enable_io()\n .build()?;\n\n for event in input.iter() {\n // notify the pipeline where we are\n utils.track_sink_progress(&event);\n\n let client = client.clone();\n\n let result = rt.block_on(invoke_lambda_function(client, function_name, &event));\n\n if let Err(err) = result {\n log::error!(\"unrecoverable error invoking lambda function: {:?}\", err);\n return Err(err);\n }\n }\n\n Ok(())\n}\n"},"avg_line_length":{"kind":"number","value":23.1090909091,"string":"23.109091"},"max_line_length":{"kind":"number","value":88,"string":"88"},"alphanum_fraction":{"kind":"number","value":0.5924468922,"string":"0.592447"},"score":{"kind":"number","value":3.046875,"string":"3.046875"}}},{"rowIdx":793,"cells":{"hexsha":{"kind":"string","value":"f06f16ee399ccb9faac16cda8b08d3cc4df552cb"},"size":{"kind":"number","value":1480,"string":"1,480"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"projectenv/main/forms.py"},"max_stars_repo_name":{"kind":"string","value":"rzsaglam/project-env"},"max_stars_repo_head_hexsha":{"kind":"string","value":"f4c02b15cf924ba5d69d8a4a89efcc686b73aa9c"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"projectenv/main/forms.py"},"max_issues_repo_name":{"kind":"string","value":"rzsaglam/project-env"},"max_issues_repo_head_hexsha":{"kind":"string","value":"f4c02b15cf924ba5d69d8a4a89efcc686b73aa9c"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"projectenv/main/forms.py"},"max_forks_repo_name":{"kind":"string","value":"rzsaglam/project-env"},"max_forks_repo_head_hexsha":{"kind":"string","value":"f4c02b15cf924ba5d69d8a4a89efcc686b73aa9c"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"from django import forms\nfrom django.contrib.auth import models\nfrom django.db.models.base import Model\nfrom django.forms import ModelForm, fields\nfrom .models import Paint\nfrom django import forms\nfrom django.contrib.auth.models import User\nfrom django.contrib.auth.forms import UserCreationForm, AuthenticationForm\n\n\nclass StockForm(forms.ModelForm):\n class Meta:\n model = Paint\n fields = \"__all__\"\n\n\nclass PaintForm(forms.ModelForm):\n class Meta:\n model = Paint\n fields = \"__all__\"\n\n def save(self, commit=True):\n paint = super(PaintForm, self).save(commit=False)\n if commit:\n paint.save()\n return paint\n\n\nclass NewUserForm(UserCreationForm):\n username = forms.CharField(max_length=200, required=True, widget=forms.TextInput(\n attrs={'class': 'input-group-text'}))\n\n class Meta:\n model = User\n fields = (\"username\", \"password1\", \"password2\")\n\n def save(self, commit=True):\n user = super(NewUserForm, self).save(commit=False)\n if commit:\n user.save()\n return user\n\n\nclass LoginForm(AuthenticationForm):\n username = forms.CharField(max_length=200, required=True, widget=forms.TextInput(\n attrs={'class': 'input-group-text'}))\n password = forms.CharField(max_length=200, required=True, widget=forms.TextInput(\n attrs={'class': 'input-group-text'}))\n\n class Meta:\n model = User\n fields = (\"username\", \"password\")\n"},"avg_line_length":{"kind":"number","value":27.9245283019,"string":"27.924528"},"max_line_length":{"kind":"number","value":85,"string":"85"},"alphanum_fraction":{"kind":"number","value":0.6675675676,"string":"0.667568"},"score":{"kind":"number","value":3.28125,"string":"3.28125"}}},{"rowIdx":794,"cells":{"hexsha":{"kind":"string","value":"3302f95944549893e6c718830b8f06c614895c10"},"size":{"kind":"number","value":8700,"string":"8,700"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"Python/cs611python.py"},"max_stars_repo_name":{"kind":"string","value":"david145/CS6112018"},"max_stars_repo_head_hexsha":{"kind":"string","value":"7a74c239bf5157507594157b5871c9d0c70fcc23"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"Python/cs611python.py"},"max_issues_repo_name":{"kind":"string","value":"david145/CS6112018"},"max_issues_repo_head_hexsha":{"kind":"string","value":"7a74c239bf5157507594157b5871c9d0c70fcc23"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":1,"string":"1"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2018-10-29T17:41:08.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2018-10-29T17:41:08.000Z"},"max_forks_repo_path":{"kind":"string","value":"Python/cs611python.py"},"max_forks_repo_name":{"kind":"string","value":"david145/CS6112018"},"max_forks_repo_head_hexsha":{"kind":"string","value":"7a74c239bf5157507594157b5871c9d0c70fcc23"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"print(\"\\n\")\nprint(\"PythonExercises-v2 by David Bochan\")\n\nprint(\"\\n\")\nprint(\"=== EXERCISE 1 ===\")\n\nprint(\"\\n\")\nprint(\"(a) 5 / 3 = \" + str(5 / 3))\nprint(\"=> with python3 you can receive a float even if you divide two \\\nintegers\")\n\nprint(\"\\n\")\nprint(\"(b) 5 % 3 = \" + str(5 % 3))\nprint(\"=> % is the modulus which divides left hand operand by right hand \\\noperand and returns remainder\")\n\nprint(\"\\n\")\nprint(\"(c) 5.0 / 3 = \" + str(5.0 / 3))\nprint(\"=> outputs a float number.. there is no difference if a plain 5 or 5.0 \\\nis used\")\n\nprint(\"\\n\")\nprint(\"(d) 5 / 3.0 = \" + str(5 / 3.0))\nprint(\"=> outputs a float number.. there is no difference if a plain 3 or 3.0 \\\nis used\")\n\nprint(\"\\n\")\nprint(\"(e) 5.2 % 3 = \" + str(5.2 % 3))\nprint(\"=> % is the modulus which divides left hand operand by right hand \\\noperand and returns remainder\")\n\nprint(\"\\n\")\nprint(\"=== EXERCISE 2 ===\")\n\nprint(\"\\n\")\nprint(\"(a) 2000.3 ** 200 = ...\")\ntry:\n print(str(2000.3 ** 200))\nexcept OverflowError as e:\n print(\"=> The python3 interpreter throws a OverflowError \" + str(e))\n\nprint(\"\\n\")\nprint(\"(b) 1.0 + 1.0 - 1.0 = \" + str(1.0 + 1.0 - 1.0))\nprint(\"=> Addition and substraction of float values which results in another \\\nfloat value\")\n\nprint(\"\\n\")\nprint(\"(c) 1.0 + 1.0e20 - 1.0e20 = \" + str(1.0 + 1.0e20 - 1.0e20))\nprint(\"=> 1.0 + 1.0e20 is rounded as close as possible, which is 1.0e20 and \\\nafter substraction of it again it results in 0.0\")\n\nprint(\"\\n\")\nprint(\"=== EXERCISE 3 ===\")\n\nprint(\"\\n\")\nprint(\"(a) float(123) = \" + str(float(123)))\nprint(\"=> Takes the integer value 123 as input and casts it to the float \\\nvalue 123.0\")\n\nprint(\"\\n\")\nprint(\"(b) float('123') = \" + str(float('123')))\nprint(\"=> Takes the string '123' as input and casts it to the float value \\\n123.0\")\n\nprint(\"\\n\")\nprint(\"(c) float('123.23') = \" + str(float('123.23')))\nprint(\"=> Takes the string '123.23' as input and casts it to the float value \\\n123.23\")\n\nprint(\"\\n\")\nprint(\"(d) int(123.23) = \" + str(int(123.23)))\nprint(\"=> Takes the float 123.23 as input and casts it to the integer value \\\n123\")\n\nprint(\"\\n\")\nprint(\"(e) int('123.23') = ...\")\ntry:\n int('123.23')\nexcept ValueError as e:\n print(\"=> The int() function can't cast a string to float to int and thus \\\nthrows a ValueError (\" + str(e) + \")\")\n\nprint(\"\\n\")\nprint(\"(f) int(float('123.23')) = \" + str(int(float(123.23))))\nprint(\"=> As we cast the string to float first, we can use it as a input to \\\nthe int() function and receive a integer\")\n\nprint(\"\\n\")\nprint(\"(g) str(12) = \" + str(12))\nprint(\"=> Takes the integer 12 as input and casts it to the string '12'\")\n\nprint(\"\\n\")\nprint(\"(h) str(12.2) = \" + str(12.2))\nprint(\"=> Takes the float 12.2 as input and casts it to the string '12.2'\")\n\nprint(\"\\n\")\nprint(\"(i) bool('a') = \" + str(bool('a')))\nprint(\"=> Because an actual value (the character 'a') is passed to the bool() \\\nfunction, True is returned\")\n\nprint(\"\\n\")\nprint(\"(j) bool(0) = \" + str(bool(0)))\nprint(\"=> The boolean value False equals 0 in python, thus False is returned\")\n\nprint(\"\\n\")\nprint(\"(k) bool(0.1) = \" + str(bool(0.1)))\nprint(\"=> Because a value != 0 is provided in the bool() function, \\\nit returns True\")\n\nprint(\"\\n\")\nprint(\"=== EXERCISE 4 ===\")\n\nprint(\"\\n\")\nprint(\"range(5) = {}\".format(range(5)))\nprint(\"=> range(5) returns a sequence of integers from 0 to 4. for i in \\\nrange(5) is consequently iterating over the sequence of integers\")\n\nprint(\"\\n\")\nprint(\"type(range(5)) = {}\".format(type(range(5))))\nprint(\"=> The type function returns an object's class. For range(5) the class \\\nrange is returned\")\n\nprint(\"\\n\")\nprint(\"=== EXERCISE 5 ===\")\n\nprint(\"\\n\")\n\ndef div_by_number(numbers_list, max_found):\n number_found = 0\n x = 1\n\n while number_found < max_found:\n for number in numbers_list:\n if x % number == 0:\n print(x)\n number_found = number_found + 1\n \n x = x + 1\n \nnumbers_list = [5, 7, 11]\nprint(\"div_by_number({}, 20)\\n\".format(numbers_list))\ndiv_by_number(numbers_list, 20)\n\nprint(\"\\n\")\nprint(\"=== EXERCISE 6 ===\")\n\nprint(\"\\n\")\nprint(\"(a) & (b)\\n\")\n\ndef is_prime(n):\n if n <= 3:\n return n > 1\n elif n % 2 == 0 or n % 3 == 0:\n return False\n \n i = 5\n\n while i * i <= n:\n if n % i == 0 or n % (i + 2) == 0:\n return False\n i = i + 6\n \n return True\n\nprint(\"is_prime(0) = {}\\n\".format(is_prime(0)))\nprint(\"is_prime(1) = {}\\n\".format(is_prime(1)))\nprint(\"is_prime(3) = {}\\n\".format(is_prime(3)))\nprint(\"is_prime(7) = {}\\n\".format(is_prime(7)))\nprint(\"is_prime(8) = {}\\n\".format(is_prime(8)))\nprint(\"is_prime(112331) = {}\".format(is_prime(112331)))\n\ndef primes_up_to(n):\n\n primes = []\n\n for i in range(0, n):\n if is_prime(i):\n primes.append(i)\n\n return primes\n\nprint(\"\\n(c) primes_up_to(100) = {}\".format(primes_up_to(100)))\n\ndef first_primes(n):\n\n primes = []\n i = 0\n\n while len(primes) < n:\n if is_prime(i):\n primes.append(i)\n \n i = i + 1\n\n return primes\n\nprint(\"\\n(d) first_primes(12) = {}\".format(first_primes(12)))\n\nprint(\"\\n\")\nprint(\"=== EXERCISE 7 ===\")\n\nprint(\"\\n\")\nprint(\"(a) print_elements(elements_list)\\n\")\n\ndef print_elements(elements):\n for element in elements:\n print(element)\n\nelements_list = [12, \"abc\", 92.2, \"hello\"]\n\nprint_elements(elements_list)\n\nprint(\"\\n(b) print_elements_reverse(elements_list)\\n\")\n\ndef print_elements_reverse(elements):\n for element in elements[::-1]:\n print(element)\n\nprint_elements_reverse(elements_list)\n\nprint(\"\\n(c) len_elements(elements_list)\\n\")\n\ndef len_elements(elements):\n count = 0\n\n for _ in elements:\n count = count + 1\n\n return count\n\nprint(\"len_elements(elements_list) = {}\".format(len_elements(elements_list)))\n\nprint(\"\\n\")\nprint(\"=== EXERCISE 8 ===\")\n\na = [12, \"abc\", 92.2, \"hello\"]\n\nprint(\"\\n\")\nprint(\"(a) a = {}\".format(a))\n\nprint(\"\\n(b) b = a\")\n\nb = a\n\nprint(\"\\n(c) b[1] = 'changed'\")\n\nb[1] = \"changed\"\n\nprint(\"\\n(d) a = {}\".format(a))\nprint(\"=> b is binding to the same object as a, so when b[1] was changed \\\na[1] also shows the change\")\n\nprint(\"\\n(e) c = a[:]\")\n\nc = a[:]\n\nprint(\"\\n(f) c[2] = 'also changed'\")\n\nc[2] = \"also changed\"\n\nprint(\"\\n(g) a = {}\".format(a))\nprint(\"=> A copy of the list a was created with a[:] and assigned to c, thus \\\na[2] did not change when c[2] changed\")\n\ndef set_first_elem_to_zero(l):\n if len(l) > 0:\n l[0] = 0\n\n return l\n\nnumbers = [12, 21, 214, 3]\n\nprint(\"\\n...\")\n\nprint(\"\\nnumbers = {}\".format(numbers))\nprint(\"set_first_elem_to_zero(numbers) = \\\n{}\".format(set_first_elem_to_zero(numbers)))\nprint(\"numbers = {}\".format(numbers))\nprint(\"=> The original list also changed, even though we did not assign \\\nthe returned list to it (same binding)\")\n\nprint(\"\\n\")\nprint(\"=== EXERCISE 9 ===\")\n\nelements = [[1,3], [3,6]]\n\nprint(\"\\n\")\nprint(\"elements = {}\".format(elements))\n\nflat_list = lambda l: [element for sublist in l for element in sublist]\n\nprint(\"flat_list(elements) = {}\".format(flat_list(elements)))\n\nprint(\"\\n\")\nprint(\"=== EXERCISE 10 ===\")\n\nimport matplotlib.pyplot as plt\nimport numpy as np\n\nt = np.arange(0.0, 2.0, 0.01)\ns = np.sin(t - 2) ** 2 * np.e ** (-t ** 2)\n\nfig, ax = plt.subplots()\nax.plot(t, s)\n\nax.set(xlabel='x', ylabel='y',\n title='Exercise 10')\nplt.show()\n\nprint(\"\\n\")\nprint(\"See Figure_1.png\")\n\nprint(\"\\n\")\nprint(\"=== EXERCISE 11 ===\")\n\ndef product_iteration(numbers):\n product = 0\n\n if len(numbers) > 0:\n product = numbers.pop()\n\n for number in numbers:\n product = product * number\n\n return product\n\nfrom functools import reduce \n\ndef product_recursive(numbers):\n if len(numbers) > 0:\n return reduce((lambda x, y: x * y), numbers)\n else:\n return 0\n\nnumbers = [21, 12, 10, 128, 2]\nempty_list = []\n\nprint(\"\\n\")\nprint(\"product_iteration(numbers) = {}\".format(product_iteration(numbers)))\nprint(\"product_iteration(empty_list) = \\\n{}\".format(product_iteration(empty_list)))\n\nnumbers = [21, 12, 10, 128, 2]\n\nprint(\"\\n\")\nprint(\"product_recursive(numbers) = {}\".format(product_recursive(numbers)))\nprint(\"product_recursive(empty_list) = \\\n{}\".format(product_recursive(empty_list)))\n\nprint(\"\\n\")\nprint(\"=== EXERCISE 12 ===\")\n\nprint(\"\\n\\nGood to know!\")\n\nprint(\"\\n\")\nprint(\"=== EXERCISE 13 ===\")\n\ndef read_file(filename):\n with open(filename, 'r') as myfile:\n data=myfile.read().replace('\\n', '')\n\n return data\n\nfile_content = read_file(\"emails.txt\")\n\nprint(\"\\n\\nread_file('emails.txt')\\n\\n{}\".format(file_content))\n\nimport re\n\ndef extract_email(string):\n match = re.findall(r'[\\w\\.-]+@[\\w\\.-]+\\.\\w+', string)\n\n return match\n\nprint(\"\\nextract_email(file_content)\\\n\\n\\n{}\".format(extract_email(file_content)))"},"avg_line_length":{"kind":"number","value":23.1382978723,"string":"23.138298"},"max_line_length":{"kind":"number","value":79,"string":"79"},"alphanum_fraction":{"kind":"number","value":0.608045977,"string":"0.608046"},"score":{"kind":"number","value":3.21875,"string":"3.21875"}}},{"rowIdx":795,"cells":{"hexsha":{"kind":"string","value":"c3da8241c82bf2dfbd9560002b07070e56d88b16"},"size":{"kind":"number","value":3040,"string":"3,040"},"ext":{"kind":"string","value":"go"},"lang":{"kind":"string","value":"Go"},"max_stars_repo_path":{"kind":"string","value":"agingMap_test.go"},"max_stars_repo_name":{"kind":"string","value":"520MianXiangDuiXiang520/agingMap"},"max_stars_repo_head_hexsha":{"kind":"string","value":"baf954f604bef9c0e3a9040e5fa331bf736495d4"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"agingMap_test.go"},"max_issues_repo_name":{"kind":"string","value":"520MianXiangDuiXiang520/agingMap"},"max_issues_repo_head_hexsha":{"kind":"string","value":"baf954f604bef9c0e3a9040e5fa331bf736495d4"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"agingMap_test.go"},"max_forks_repo_name":{"kind":"string","value":"520MianXiangDuiXiang520/agingMap"},"max_forks_repo_head_hexsha":{"kind":"string","value":"baf954f604bef9c0e3a9040e5fa331bf736495d4"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"package agingMap\n\nimport (\n\t\"fmt\"\n\t\"math/rand\"\n\t\"sync\"\n\t\"testing\"\n\t\"time\"\n)\n\nfunc ExampleAgingMap_Delete() {\n\tam := NewAgingMap()\n\tam.Store(\"key\", \"value\", time.Second)\n\tam.Delete(\"key\")\n}\n\nfunc ExampleAgingMap_Store() {\n\tam := NewAgingMap()\n\tam.Store(\"key\", \"value\", time.Second)\n}\n\nfunc ExampleAgingMap_Load() {\n\tam := NewAgingMap()\n\tch := make(chan string, 10)\n\tfor i := 0; i < 10; i++ {\n\t\tgo func(i int) {\n\t\t\tfor {\n\t\t\t\tkey := fmt.Sprintf(\"%d: %d\", i, time.Now().UnixNano())\n\t\t\t\tch <- key\n\t\t\t\tam.Store(key, i, time.Second)\n\t\t\t\ttime.Sleep(time.Duration(rand.Int63n(2000)) * time.Millisecond)\n\t\t\t}\n\t\t}(i)\n\t}\n\tfor i := 0; i < 10; i++ {\n\t\tgo func(i int) {\n\t\t\tfor {\n\t\t\t\tkey := <-ch\n\t\t\t\tval, ok := am.Load(key)\n\t\t\t\tfmt.Println(val, ok)\n\t\t\t}\n\t\t}(i)\n\t}\n\tfor {\n\t\tkey := <-ch\n\t\tval, ok := am.Load(key)\n\t\tfmt.Println(val, ok)\n\t}\n}\n\nfunc TestAgingMap(t *testing.T) {\n\taMap := NewWithLazyDelete()\n\taMap.Store(\"key\", \"val\", time.Second)\n\ttime.Sleep(time.Second)\n\tv, ok := aMap.Load(\"key\")\n\tif ok || v != nil {\n\t\tt.Error(\"get expired data\")\n\t}\n}\n\nfunc TestAgingMap_AutoDelete(t *testing.T) {\n\taMap := NewBaseAgingMap(time.Second, 1)\n\tfor i := 0; i < 7; i++ {\n\t\taMap.Store(i, \"val\", time.Second)\n\t}\n\ttime.Sleep(time.Second * 2)\n\tfor i := 0; i < 7; i++ {\n\t\tv, ok := aMap._map.Load(i)\n\t\tif ok || v != nil {\n\t\t\tt.Error(\"get expired data\")\n\t\t}\n\t}\n}\n\nfunc TestAgingMap_LoadOrStore(t *testing.T) {\n\taMap := NewBaseAgingMap(time.Second, 1)\n\t_, _, stored := aMap.LoadOrStore(\"key\", 1, time.Second)\n\tif !stored {\n\t\tt.Errorf(\"第一次未存储\")\n\t}\n\tv, _, stored := aMap.LoadOrStore(\"key\", 1, time.Second)\n\tif v != 1 || stored {\n\t\tt.Errorf(\"第二次存储\")\n\t}\n\ttime.Sleep(time.Second)\n\t_, _, stored = aMap.LoadOrStore(\"key\", 1, time.Second)\n\tif !stored {\n\t\tt.Errorf(\"第一次未存储\")\n\t}\n}\n\nfunc TestAgingMap_LoadOrStore_concurrent(t *testing.T) {\n\taMap := NewBaseAgingMap(time.Second, 1)\n\twg := sync.WaitGroup{}\n\tfor i := 0; i < 100; i++ {\n\t\tvar v1, v2 interface{}\n\t\tvar s1, s2 bool\n\t\twg.Add(2)\n\t\tgo func(i int) {\n\t\t\tdefer wg.Done()\n\t\t\tv1, _, s1 = aMap.LoadOrStore(i, fmt.Sprintf(\"F%d\", i), time.Second)\n\t\t}(i)\n\t\tgo func(i int) {\n\t\t\tdefer wg.Done()\n\t\t\tv2, _, s2 = aMap.LoadOrStore(i, fmt.Sprintf(\"S%d\", i), time.Second)\n\t\t}(i)\n\t\twg.Wait()\n\t\tif v1 != v2 {\n\t\t\tt.Errorf(\"两次值一样, V1 = %v, V2 = %v\", v1, v2)\n\t\t}\n\t\tif s1 && s2 {\n\t\t\tt.Errorf(\"true true\")\n\t\t}\n\t\tif !(s1 || s2) {\n\t\t\tt.Errorf(\"false false\")\n\t\t}\n\t}\n}\n\nfunc TestAgingMap_Store(t *testing.T) {\n\taMap := NewBaseAgingMap(time.Minute, 0.5)\n\tgo func() {\n\t\tfor i := 0; i < 7; i++ {\n\t\t\taMap.Store(i, \"val\", time.Second*10)\n\t\t\tfmt.Println(\"Store: \", i)\n\t\t\ttime.Sleep(10 * time.Second)\n\t\t}\n\t}()\n\ttime.Sleep(45 * time.Second)\n\taMap.Range(func(k, v interface{}) bool {\n\t\tfmt.Println(k, v)\n\t\treturn true\n\t})\n\n\tfmt.Println(\"------\")\n\ttime.Sleep(20 * time.Second)\n\taMap.Range(func(k, v interface{}) bool {\n\t\tfmt.Println(k, v)\n\t\treturn true\n\t})\n\n}\n\nfunc TestAgingMap_LoadWithDeadline(t *testing.T) {\n\tam := NewAgingMap()\n\tam.Store(1, 2, time.Minute)\n\tfor i := 0; i < 70; i++ {\n\t\tfmt.Println(am.LoadWithDeadline(1))\n\t\ttime.Sleep(time.Second * 10)\n\t}\n}\n"},"avg_line_length":{"kind":"number","value":20,"string":"20"},"max_line_length":{"kind":"number","value":70,"string":"70"},"alphanum_fraction":{"kind":"number","value":0.5904605263,"string":"0.590461"},"score":{"kind":"number","value":3.328125,"string":"3.328125"}}},{"rowIdx":796,"cells":{"hexsha":{"kind":"string","value":"078126e28455007d4256937f05d51acba62cf889"},"size":{"kind":"number","value":3027,"string":"3,027"},"ext":{"kind":"string","value":"swift"},"lang":{"kind":"string","value":"Swift"},"max_stars_repo_path":{"kind":"string","value":"LNSideMenu/Classes/LNPanelViewController.swift"},"max_stars_repo_name":{"kind":"string","value":"luannguyenkhoa/LNSideMenu"},"max_stars_repo_head_hexsha":{"kind":"string","value":"ef22b77871ad5dc22e3725438c6b0ee83db34e95"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":112,"string":"112"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2016-03-22T12:02:20.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2021-01-31T03:22:28.000Z"},"max_issues_repo_path":{"kind":"string","value":"LNSideMenu/Classes/LNPanelViewController.swift"},"max_issues_repo_name":{"kind":"string","value":"luannguyenkhoa/LNSideMenu"},"max_issues_repo_head_hexsha":{"kind":"string","value":"ef22b77871ad5dc22e3725438c6b0ee83db34e95"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":19,"string":"19"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2016-08-04T06:32:08.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2020-04-14T10:34:59.000Z"},"max_forks_repo_path":{"kind":"string","value":"LNSideMenu/Classes/LNPanelViewController.swift"},"max_forks_repo_name":{"kind":"string","value":"luannguyenkhoa/LNSideMenu"},"max_forks_repo_head_hexsha":{"kind":"string","value":"ef22b77871ad5dc22e3725438c6b0ee83db34e95"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":18,"string":"18"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2016-07-19T22:22:43.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2019-11-01T20:45:23.000Z"},"content":{"kind":"string","value":"//\n// LNPanelViewController.swift\n// LNSideMenuEffect\n//\n// Created by Luan Nguyen on 6/22/16.\n// Copyright © 2016 Luan Nguyen. All rights reserved.\n//\n\nimport UIKit\n\npublic final class LNPanelViewController: UIViewController {\n \n // MARK: Properties\n fileprivate var items: [String] = []\n fileprivate var didInit = false\n weak var delegate: LNSMDelegate?\n var position: Position = .left\n var isTranslucent = false {\n didSet {\n updateFrame()\n }\n }\n // MARK: Colors\n public var menuBgColor = LNColor.bgView.color\n public var itemBgColor = LNColor.bgItem.color\n public var highlightColor = LNColor.highlight.color\n public var titleColor = LNColor.title.color\n \n lazy var sideMenuView: LNSideMenuView = LNSideMenuView()\n \n convenience init(items: Array, menuPosition: Position, highlightCellAtIndex: Int = Int.max) {\n self.init()\n self.items = items\n self.position = menuPosition\n self.sideMenuView.indexOfDefaultCellHighlight = highlightCellAtIndex\n }\n \n override public func viewDidLoad() {\n super.viewDidLoad()\n \n self.view.backgroundColor = .clear\n self.view.autoresizingMask = [.flexibleHeight, .flexibleWidth]\n }\n \n public override func viewWillAppear(_ animated: Bool) {\n super.viewWillAppear(animated)\n if !didInit {\n didInit = true\n initialSideMenu()\n }\n }\n /**\n Initial side menu with components\n */\n fileprivate func initialSideMenu() {\n sideMenuView.items = items\n _ = setViewFrame()\n \n // Config colors\n sideMenuView.bgColor = menuBgColor\n sideMenuView.titleColor = titleColor\n sideMenuView.itemBgColor = itemBgColor\n sideMenuView.highlightColor = highlightColor\n \n // Setup menu\n sideMenuView.setupMenu(view, position: position)\n sideMenuView.delegate = self\n }\n \n internal func setViewFrame() -> Bool {\n // Set frame for view\n let distance: CGFloat = isTranslucent ? 0 : 44 + UIApplication.shared.statusBarFrame.size.height\n if view.y != distance {\n view.y = distance\n view.height = screenHeight - view.y\n return true\n }\n return false\n }\n \n internal func updateFrame() {\n // Just refresh side menu iff the view frame has already changed\n if setViewFrame() {\n sideMenuView.refreshMenuWithFrame(view.frame, translucent: isTranslucent)\n }\n }\n \n // Moving all items out of container view bounds before performing animation\n internal func prepareForAnimation() {\n sideMenuView.prepareForAnimation()\n }\n \n internal func animateContents(completion: @escaping Completion) {\n // Animate items when it's about diplayed\n sideMenuView.animateContents(completion: completion)\n }\n \n internal func transitionToView() {\n // TODO: implementing set contentViewController effection\n }\n \n}\n\nextension LNPanelViewController: LNSMDelegate {\n func didSelectItemAtIndex(SideMenu: LNSideMenuView, index: Int) {\n // Forward did select item at index action\n delegate?.didSelectItemAtIndex(SideMenu: SideMenu, index: index)\n }\n}\n"},"avg_line_length":{"kind":"number","value":27.5181818182,"string":"27.518182"},"max_line_length":{"kind":"number","value":103,"string":"103"},"alphanum_fraction":{"kind":"number","value":0.7076313181,"string":"0.707631"},"score":{"kind":"number","value":3.125,"string":"3.125"}}},{"rowIdx":797,"cells":{"hexsha":{"kind":"string","value":"233695968d2c7784c04d5fabcfb8fca3500d015f"},"size":{"kind":"number","value":708,"string":"708"},"ext":{"kind":"string","value":"sql"},"lang":{"kind":"string","value":"SQL"},"max_stars_repo_path":{"kind":"string","value":"sqls/access methods.sql"},"max_stars_repo_name":{"kind":"string","value":"tomi/presentation-postgres-indexes"},"max_stars_repo_head_hexsha":{"kind":"string","value":"d2be24142e989f67bc40a2781b17ecbb652b8128"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":2,"string":"2"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2019-07-02T06:40:59.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2019-08-01T17:40:12.000Z"},"max_issues_repo_path":{"kind":"string","value":"sqls/access methods.sql"},"max_issues_repo_name":{"kind":"string","value":"tomi/presentation-postgres-indexes"},"max_issues_repo_head_hexsha":{"kind":"string","value":"d2be24142e989f67bc40a2781b17ecbb652b8128"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":2,"string":"2"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2020-07-17T08:00:41.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2021-05-09T05:13:00.000Z"},"max_forks_repo_path":{"kind":"string","value":"sqls/access methods.sql"},"max_forks_repo_name":{"kind":"string","value":"tomi/presentation-postgres-indexes"},"max_forks_repo_head_hexsha":{"kind":"string","value":"d2be24142e989f67bc40a2781b17ecbb652b8128"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"\n------ Scanning techniques ------\nDROP INDEX IF EXISTS t_a_idx;\n\n-- Sequential scan\nEXPLAIN(costs off) SELECT * FROM t WHERE a = 5;\n\n-- Add index\nCREATE INDEX t_a_idx ON t(a);\n\n-- Index scan\nEXPLAIN(costs off) SELECT * FROM t WHERE a = 5;\n\n-- Bitmap scan\nEXPLAIN(costs off) SELECT * FROM t WHERE a <= 100;\n\n-- Index only-scan\nEXPLAIN(costs off) SELECT a FROM t WHERE a = 5;\n\n------ Partial indexes ------\nCREATE INDEX IF NOT EXISTS t_c_idx ON t(c);\nEXPLAIN(costs off) SELECT * FROM t WHERE c;\nEXPLAIN(costs off) SELECT * FROM t WHERE NOT c;\n-- Check number of pages\nSELECT relpages FROM pg_class WHERE relname='t_c_idx';\n\nDROP INDEX IF EXISTS t_c_idx;\nCREATE INDEX IF NOT EXISTS t_c_idx ON t(c) WHERE c;\n\n\n\n"},"avg_line_length":{"kind":"number","value":22.125,"string":"22.125"},"max_line_length":{"kind":"number","value":54,"string":"54"},"alphanum_fraction":{"kind":"number","value":0.6963276836,"string":"0.696328"},"score":{"kind":"number","value":3,"string":"3"}}},{"rowIdx":798,"cells":{"hexsha":{"kind":"string","value":"e51e96650379da19d23b73ca6b7e943b66e5d48a"},"size":{"kind":"number","value":2592,"string":"2,592"},"ext":{"kind":"string","value":"ts"},"lang":{"kind":"string","value":"TypeScript"},"max_stars_repo_path":{"kind":"string","value":"src/chat/api/chat.gateway.ts"},"max_stars_repo_name":{"kind":"string","value":"ArmNem/fullstackDev2021-Backend-master"},"max_stars_repo_head_hexsha":{"kind":"string","value":"ba6287756af8510b5b431ab5d7d6ae3ed04cfa77"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"src/chat/api/chat.gateway.ts"},"max_issues_repo_name":{"kind":"string","value":"ArmNem/fullstackDev2021-Backend-master"},"max_issues_repo_head_hexsha":{"kind":"string","value":"ba6287756af8510b5b431ab5d7d6ae3ed04cfa77"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"src/chat/api/chat.gateway.ts"},"max_forks_repo_name":{"kind":"string","value":"ArmNem/fullstackDev2021-Backend-master"},"max_forks_repo_head_hexsha":{"kind":"string","value":"ba6287756af8510b5b431ab5d7d6ae3ed04cfa77"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"import {\n ConnectedSocket,\n MessageBody,\n OnGatewayConnection,\n OnGatewayDisconnect,\n SubscribeMessage,\n WebSocketGateway,\n WebSocketServer,\n} from '@nestjs/websockets';\nimport { Socket } from 'socket.io';\nimport { ChatService } from '../core/services/chat.service';\nimport { WelcomeDto } from './dto/welcome.dto';\nimport {\n IChatService,\n IChatServiceProvider,\n} from '../core/primary-ports/chat.service.interface';\nimport { Inject } from '@nestjs/common';\nimport { JoinChatDto } from './dto/join-chat.dto';\nimport { ChatClientModule } from '../core/models/chat.client.module';\n\n@WebSocketGateway()\nexport class ChatGateway implements OnGatewayConnection, OnGatewayDisconnect {\n constructor(\n @Inject(IChatServiceProvider) private chatService: IChatService,\n ) {}\n\n @WebSocketServer() server;\n\n @SubscribeMessage('message')\n handleChatEvent(\n @MessageBody() message: string,\n @ConnectedSocket() client: Socket,\n ): void {\n const chatMessage = this.chatService.newMessage(message, client.id);\n this.server.emit('newmessages', chatMessage);\n }\n\n @SubscribeMessage('typing')\n handleTypingEvent(\n @MessageBody() typing: boolean,\n @ConnectedSocket() client: Socket,\n ): void {\n const chatClient = this.chatService.updateTyping(typing, client.id);\n if (chatClient) {\n this.server.emit('clientTyping', chatClient);\n }\n }\n\n @SubscribeMessage('joinchat')\n async handleJoinChatEvent(\n @MessageBody() joinChatClientDto: JoinChatDto,\n @ConnectedSocket() client: Socket,\n ): Promise {\n try {\n let chatClient: ChatClientModule = JSON.parse(\n JSON.stringify(joinChatClientDto),\n );\n chatClient = await this.chatService.newClient(chatClient);\n const chatClients = await this.chatService.getClients();\n const welcome: WelcomeDto = {\n clients: chatClients,\n messages: this.chatService.getMessages(),\n client: chatClient,\n };\n client.emit('welcome', welcome);\n this.server.emit('clients', chatClients);\n } catch (e) {\n client.error(e.message);\n }\n }\n\n async handleConnection(client: Socket, ...args: any[]): Promise {\n console.log('Client Connect', client.id);\n client.emit('allMessages', this.chatService.getMessages());\n this.server.emit('clients', await this.chatService.getClients());\n }\n\n async handleDisconnect(client: Socket): Promise {\n await this.chatService.delete(client.id);\n this.server.emit('clients', this.chatService.getClients());\n console.log('Client Disconnect', await this.chatService.getClients());\n }\n}\n"},"avg_line_length":{"kind":"number","value":30.8571428571,"string":"30.857143"},"max_line_length":{"kind":"number","value":78,"string":"78"},"alphanum_fraction":{"kind":"number","value":0.6975308642,"string":"0.697531"},"score":{"kind":"number","value":3,"string":"3"}}},{"rowIdx":799,"cells":{"hexsha":{"kind":"string","value":"dd540f79ba514c8330c098b284a6473469eed5ba"},"size":{"kind":"number","value":2285,"string":"2,285"},"ext":{"kind":"string","value":"go"},"lang":{"kind":"string","value":"Go"},"max_stars_repo_path":{"kind":"string","value":"deepfence_agent/tools/apache/scope/probe/process/walker_darwin.go"},"max_stars_repo_name":{"kind":"string","value":"tuapuikia/ThreatMapper"},"max_stars_repo_head_hexsha":{"kind":"string","value":"22c473e133e2a57a402f27a12d44e1787a2895cc"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"number","value":1281,"string":"1,281"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2020-04-08T17:07:21.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-03-31T11:22:16.000Z"},"max_issues_repo_path":{"kind":"string","value":"deepfence_agent/tools/apache/scope/probe/process/walker_darwin.go"},"max_issues_repo_name":{"kind":"string","value":"tuapuikia/ThreatMapper"},"max_issues_repo_head_hexsha":{"kind":"string","value":"22c473e133e2a57a402f27a12d44e1787a2895cc"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"number","value":180,"string":"180"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2020-04-06T15:40:16.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2022-03-31T02:19:34.000Z"},"max_forks_repo_path":{"kind":"string","value":"probe/process/walker_darwin.go"},"max_forks_repo_name":{"kind":"string","value":"Pradeepkumarbk/scope11"},"max_forks_repo_head_hexsha":{"kind":"string","value":"0d87f2b54fe8f291fec0d13ccda5d9db3c91c273"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"number","value":148,"string":"148"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2020-04-08T21:38:39.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2022-03-30T18:04:50.000Z"},"content":{"kind":"string","value":"package process\n\nimport (\n\t\"fmt\"\n\t\"os/exec\"\n\t\"strconv\"\n\t\"strings\"\n)\n\n// NewWalker returns a Darwin (lsof-based) walker.\nfunc NewWalker(_ string, _ bool) Walker {\n\treturn &walker{}\n}\n\ntype walker struct{}\n\nconst (\n\tlsofBinary = \"lsof\"\n\tlsofFields = \"cn\" // parseLSOF() depends on the order\n\tnetstatBinary = \"netstat\"\n)\n\n// These functions copied from procspy.\n\n// IsProcInAccept returns true if the process has a at least one thread\n// blocked on the accept() system call\nfunc IsProcInAccept(procRoot, pid string) (ret bool) {\n\t// Not implemented on darwin\n\treturn false\n}\n\nfunc (walker) Walk(f func(Process, Process)) error {\n\toutput, err := exec.Command(\n\t\tlsofBinary,\n\t\t\"-i\", // only Internet files\n\t\t\"-n\", \"-P\", // no number resolving\n\t\t\"-w\", // no warnings\n\t\t\"-F\", lsofFields, // \\n based output of only the fields we want.\n\t).CombinedOutput()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tprocesses, err := parseLSOF(string(output))\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfor _, process := range processes {\n\t\tf(process, Process{})\n\t}\n\treturn nil\n}\n\nfunc parseLSOF(output string) (map[string]Process, error) {\n\tvar (\n\t\tprocesses = map[string]Process{} // Local addr -> Proc\n\t\tprocess Process\n\t)\n\tfor _, line := range strings.Split(output, \"\\n\") {\n\t\tif len(line) <= 1 {\n\t\t\tcontinue\n\t\t}\n\n\t\tvar (\n\t\t\tfield = line[0]\n\t\t\tvalue = line[1:]\n\t\t)\n\t\tswitch field {\n\t\tcase 'p':\n\t\t\tpid, err := strconv.Atoi(value)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, fmt.Errorf(\"invalid 'p' field in lsof output: %#v\", value)\n\t\t\t}\n\t\t\tprocess.PID = pid\n\n\t\tcase 'c':\n\t\t\tprocess.Name = value\n\n\t\tcase 'n':\n\t\t\t// 'n' is the last field, with '-F cn'\n\t\t\t// format examples:\n\t\t\t// \"192.168.2.111:44013->54.229.241.196:80\"\n\t\t\t// \"[2003:45:2b57:8900:1869:2947:f942:aba7]:55711->[2a00:1450:4008:c01::11]:443\"\n\t\t\t// \"*:111\" <- a listen\n\t\t\taddresses := strings.SplitN(value, \"->\", 2)\n\t\t\tif len(addresses) != 2 {\n\t\t\t\t// That's a listen entry.\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tprocesses[addresses[0]] = Process{\n\t\t\t\tPID: process.PID,\n\t\t\t\tName: process.Name,\n\t\t\t}\n\n\t\tdefault:\n\t\t\treturn nil, fmt.Errorf(\"unexpected lsof field: %c in %#v\", field, value)\n\t\t}\n\t}\n\treturn processes, nil\n}\n\n// GetDeltaTotalJiffies returns 0 - darwin doesn't have jiffies.\nfunc GetDeltaTotalJiffies() (uint64, float64, error) {\n\treturn 0, 0.0, nil\n}\n"},"avg_line_length":{"kind":"number","value":21.3551401869,"string":"21.35514"},"max_line_length":{"kind":"number","value":83,"string":"83"},"alphanum_fraction":{"kind":"number","value":0.629321663,"string":"0.629322"},"score":{"kind":"number","value":3.25,"string":"3.25"}}}],"truncated":false,"partial":false},"paginationData":{"pageIndex":7,"numItemsPerPage":100,"numTotalItems":831747,"offset":700,"length":100}},"jwt":"eyJhbGciOiJFZERTQSJ9.eyJyZWFkIjp0cnVlLCJwZXJtaXNzaW9ucyI6eyJyZXBvLmNvbnRlbnQucmVhZCI6dHJ1ZX0sImlhdCI6MTc1ODMyNjAwOCwic3ViIjoiL2RhdGFzZXRzL2Rldm5naG8vdGhlLXN0YWNrLW1pbmktZWR1IiwiZXhwIjoxNzU4MzI5NjA4LCJpc3MiOiJodHRwczovL2h1Z2dpbmdmYWNlLmNvIn0.pzhsbNbgQKpHsui4MHJmTHRKCyspQZqjM9-jx5vfSrNJUcAhT9QsVYKlKvB_mRJCP6lIbEPYcaLT26qeTZwBDA","displayUrls":true},"discussionsStats":{"closed":1,"open":0,"total":1},"fullWidth":true,"hasGatedAccess":true,"hasFullAccess":true,"isEmbedded":false,"savedQueries":{"community":[],"user":[]}}">
hexsha
stringlengths
40
40
size
int64
140
1.03M
ext
stringclasses
94 values
lang
stringclasses
21 values
max_stars_repo_path
stringlengths
3
663
max_stars_repo_name
stringlengths
4
120
max_stars_repo_head_hexsha
stringlengths
40
78
max_stars_repo_licenses
listlengths
1
10
max_stars_count
int64
1
368k
max_stars_repo_stars_event_min_datetime
stringlengths
24
24
max_stars_repo_stars_event_max_datetime
stringlengths
24
24
max_issues_repo_path
stringlengths
3
663
max_issues_repo_name
stringlengths
4
120
max_issues_repo_head_hexsha
stringlengths
40
78
max_issues_repo_licenses
listlengths
1
10
max_issues_count
int64
1
116k
max_issues_repo_issues_event_min_datetime
stringlengths
24
24
max_issues_repo_issues_event_max_datetime
stringlengths
24
24
max_forks_repo_path
stringlengths
3
663
max_forks_repo_name
stringlengths
4
135
max_forks_repo_head_hexsha
stringlengths
40
78
max_forks_repo_licenses
listlengths
1
10
max_forks_count
int64
1
105k
max_forks_repo_forks_event_min_datetime
stringlengths
24
24
max_forks_repo_forks_event_max_datetime
stringlengths
24
24
content
stringlengths
140
1.03M
avg_line_length
float64
2.32
23.1k
max_line_length
int64
11
938k
alphanum_fraction
float64
0.01
1
score
float32
3
4.25
755d2297789780b296868ad03108d1b89dd09ff7
3,427
h
C
networkit/cpp/graph/RandomMaximumSpanningForest.h
maxvogel/NetworKit-mirror2
02a1805a4eda56fbdd647852afcfac26bcb77099
[ "MIT" ]
null
null
null
networkit/cpp/graph/RandomMaximumSpanningForest.h
maxvogel/NetworKit-mirror2
02a1805a4eda56fbdd647852afcfac26bcb77099
[ "MIT" ]
null
null
null
networkit/cpp/graph/RandomMaximumSpanningForest.h
maxvogel/NetworKit-mirror2
02a1805a4eda56fbdd647852afcfac26bcb77099
[ "MIT" ]
null
null
null
#ifndef RANDOMMAXIMUMSPANNINGFOREST_H #define RANDOMMAXIMUMSPANNINGFOREST_H #include "Graph.h" #include <limits> #include "../structures/UnionFind.h" #include "../auxiliary/Log.h" #include "../auxiliary/Random.h" #include "../base/Algorithm.h" namespace NetworKit { /** * Computes a random maximum-weight spanning forest using Kruskal's algorithm by randomizing the order of edges of the same weight. */ class RandomMaximumSpanningForest : public Algorithm { public: /** * Initialize the random maximum-weight spanning forest algorithm, uses edge weights. * * @param G The input graph. */ RandomMaximumSpanningForest(const Graph &G); /** * Initialize the random maximum-weight spanning forest algorithm using an attribute as edge weight. * * This copies the attribute values, the supplied attribute vector is not stored. * * @param G The input graph. * @param attribute The attribute to use, can be either of type edgeweight (double) or count (uint64), internally all values are handled as double. */ template <typename A> RandomMaximumSpanningForest(const Graph &G, const std::vector<A> &attribute); /** * Execute the algorithm. */ virtual void run() override; /** * Get a boolean attribute that indicates for each edge if it is part of the calculated maximum-weight spanning forest. * * This attribute is only calculated and can thus only be request if the supplied graph has edge ids. * * @param move If the attribute shall be moved out of the algorithm instance. * @return The vector with the boolean attribute for each edge. */ std::vector<bool> getAttribute(bool move = false); /** * Checks if the edge (@a u, @a v) is part of the calculated maximum-weight spanning forest. * * @param u The first node of the edge to check * @param v The second node of the edge to check * @return If the edge is part of the calculated maximum-weight spanning forest. */ bool inMSF(node u, node v) const; /** * Checks if the edge with the id @a eid is part of the calculated maximum-weight spanning forest. * * @param eid The id of the edge to check. * @return If the edge is part of the calculated maximum-weight spanning forest. */ bool inMSF(edgeid eid) const; /** * Gets the calculated maximum-weight spanning forest as graph. * * @param move If the graph shall be moved out of the algorithm instance. * @return The calculated maximum-weight spanning forest. */ Graph getMSF(bool move = false); /** * @return false - this algorithm is not parallelized */ virtual bool isParallel() const override; /** * @return The name of this algorithm. */ virtual std::string toString() const override; private: struct weightedEdge { double attribute; node u; node v; edgeid eid; index rand; bool operator>(const weightedEdge &other) const { return (attribute > other.attribute) || (attribute == other.attribute && (rand > other.rand || (rand == other.rand && (u > other.u || (u == other.u && v > other.v))))); }; weightedEdge(node u, node v, double attribute, edgeid eid = 0) : attribute(attribute), u(u), v(v), eid(eid), rand(Aux::Random::integer()) {}; }; const Graph &G; std::vector<weightedEdge> weightedEdges; Graph msf; std::vector<bool> msfAttribute; bool hasWeightedEdges; bool hasMSF; bool hasAttribute; }; } // namespace NetworKit #endif // RANDOMMAXIMUMSPANNINGFOREST_H
28.322314
148
0.707324
3.421875
ddf4c5e584b013355f49f6671a07671377687302
6,660
php
PHP
app/Http/Controllers/Api/QuestionController.php
saif22nemr/doctor_appointment
7136714b9778cae821488893ad281657663f636a
[ "MIT" ]
null
null
null
app/Http/Controllers/Api/QuestionController.php
saif22nemr/doctor_appointment
7136714b9778cae821488893ad281657663f636a
[ "MIT" ]
null
null
null
app/Http/Controllers/Api/QuestionController.php
saif22nemr/doctor_appointment
7136714b9778cae821488893ad281657663f636a
[ "MIT" ]
null
null
null
<?php namespace App\Http\Controllers\Api; use App\Http\Controllers\Controller; use App\Models\Activity; use App\Models\Choose; use App\Models\Question; use Illuminate\Http\Request; class QuestionController extends ApiController { /** * Display a listing of the resource. * * @return \Illuminate\Http\Response */ public function index(Request $request) { $request->validate([ 'question' => 'min:1', ]); if($request->has('question')){ $questions = Question::where('question' , 'LIKE' ,'%'.$request->question.'%'); } else{ $questions = Question::where('id' , '!=' , 0); } $questions = $questions->orderBy('created_at' , 'asc')->with('chooses')->get(); return $this->showAll($questions); } /** * Store a newly created resource in storage. * * @param \Illuminate\Http\Request $request * @return \Illuminate\Http\Response */ public function store(Request $request) { $request->validate([ 'question' => 'required|min:1|max:1000', 'reason' => 'min:1,0', 'is_many' => 'min:1,0', 'answer_type' => 'required|in:checkbox,selectbox,text,textarea', ]); $type = ['checkbox' => 2, 'text' => 1 , 'selectbox' => 3 , 'textarea' => 4]; // validate data; if($request->answer_type == 'checkbox' or $request->answer_type == 'selectbox'){ $request->validate([ 'chooses' => 'required|array|min:2' ]); $reason = $request->has('reason') ? $request->reason : 0; $isMany = $request->has('is_many') ? $request->is_many : 0; if(!$request->has('chooses')){ foreach($request->chooses as $choose){ if(mb_strlen($choose , 'utf8') < 2){ return $this->errorResponse(trans('application.error_choose')); } } } }else{ $reason = 0; $isMany = 0; } // store $data = $request->only([ 'question' ]); $data['answer_type'] = $type[$request->answer_type]; $data['reason'] = $reason ; $data['is_many'] = $isMany; $question = Question::create($data); if($request->answer_type == 'checkbox' or $request->answer_type == 'selectbox'){ foreach($request->chooses as $value){ Choose::create([ 'question_id' =>$question->id, 'choose' => $value ]); } } $activity = new Activity([ 'type' => 'create', 'description' => trans('activity.create_question'), 'user_id' => $this->user->id, 'related_id' => $question->id ]); $question->chooses ; $question->activities()->save($activity); return $this->successResponse([ 'success' => true, 'message' => trans('app.create_successfully'), 'data' => $question ]); } /** * Display the specified resource. * * @param \App\Models\Question $question * @return \Illuminate\Http\Response */ public function show(Question $question) { $question->chooses; return $this->showOne($question); } /** * Update the specified resource in storage. * * @param \Illuminate\Http\Request $request * @param \App\Models\Question $question * @return \Illuminate\Http\Response */ public function update(Request $request, Question $question) { $request->validate([ 'question' => 'min:1|max:1000', 'reason' => 'min:1,0', 'is_many' => 'min:1,0', 'chooses' => 'array|min:2' ]); if($request->has('question')){ $question->question = $request->question; } $type = ['checkbox' => 2, 'text' => 1 , 'selectbox' => 3 , 'textarea' => 4]; // validate data; if($request->answer_type == 'checkbox' or $request->answer_type == 'selectbox'){ if($request->has('reason')) $question->reason = $request->reason; if($request->has('is_many')) $question->is_many = $request->is_many; if($request->has('chooses')){ foreach($request->chooses as $choose){ if(mb_strlen($choose , 'utf8') < 2){ return $this->errorResponse(trans('application.error_choose')); } } }else{ return $this->errorResponse(trans('application.error_choose')); } } // store $question->save(); if($request->has('chooses')){ $question->chooses()->delete(); if($request->answer_type == 'checkbox' or $request->answer_type == 'selectbox'){ foreach($request->chooses as $value){ Choose::create([ 'question_id' =>$question->id, 'choose' => $value ]); } } } $activity = new Activity([ 'type' => 'edit', 'description' => trans('activity.edit_question'), 'user_id' => $this->user->id, 'related_id' => $question->id ]); $question->chooses ; $question->activities()->save($activity); return $this->successResponse([ 'success' => true, 'message' => trans('app.edit_successfully'), 'data' => $question ]); } /** * Remove the specified resource from storage. * * @param \App\Models\Question $question * @return \Illuminate\Http\Response */ public function destroy(Question $question) { $question->delete(); $activity = new Activity([ 'type' => 'delete', 'description' => trans('activity.delete_question'), 'user_id' => $this->user->id, 'related_id' => $question->id ]); $question->chooses ; $question->activities()->save($activity); return $this->successResponse([ 'success' => true, 'message' => trans('app.delete_successfully'), 'data' => $question ]); } }
32.019231
92
0.479279
3
3b8b108e2d8b2f63c1fffe690e1f75cdfac67f3a
4,833
h
C
src/heap.h
LLNL/gecko
490ab7d9b7b4e0f007e10d2af2b022b96d427fee
[ "BSD-3-Clause" ]
11
2020-01-21T15:31:16.000Z
2021-07-24T05:28:33.000Z
src/heap.h
LLNL/gecko
490ab7d9b7b4e0f007e10d2af2b022b96d427fee
[ "BSD-3-Clause" ]
null
null
null
src/heap.h
LLNL/gecko
490ab7d9b7b4e0f007e10d2af2b022b96d427fee
[ "BSD-3-Clause" ]
null
null
null
#ifndef DYNAMIC_HEAP_H #define DYNAMIC_HEAP_H #include <algorithm> #include <functional> #include <map> #include <vector> template < typename T, // data type typename P, // priority type class C = std::less<P>, // comparator for priorities class M = std::map<T, unsigned int> // maps type T to unsigned integer > class DynamicHeap { public: DynamicHeap(size_t count = 0); ~DynamicHeap() {} void insert(T data, P priority); void update(T data, P priority); bool top(T& data); bool top(T& data, P& priority); bool pop(); bool extract(T& data); bool extract(T& data, P& priority); bool erase(T data); bool find(T data) const; bool find(T data, P& priority) const; bool empty() const { return heap.empty(); } size_t size() const { return heap.size(); } private: struct HeapEntry { HeapEntry(P p, T d) : priority(p), data(d) {} P priority; T data; }; std::vector<HeapEntry> heap; M index; C lower; void ascend(unsigned int i); void descend(unsigned int i); void swap(unsigned int i, unsigned int j); bool ordered(unsigned int i, unsigned int j) const { return !lower(heap[i].priority, heap[j].priority); } unsigned int parent(unsigned int i) const { return (i - 1) / 2; } unsigned int left(unsigned int i) const { return 2 * i + 1; } unsigned int right(unsigned int i) const { return 2 * i + 2; } }; template < typename T, typename P, class C, class M > DynamicHeap<T, P, C, M>::DynamicHeap(size_t count) { heap.reserve(count); } template < typename T, typename P, class C, class M > void DynamicHeap<T, P, C, M>::insert(T data, P priority) { if (index.find(data) != index.end()) update(data, priority); else { unsigned int i = (unsigned int)heap.size(); heap.push_back(HeapEntry(priority, data)); ascend(i); } } template < typename T, typename P, class C, class M > void DynamicHeap<T, P, C, M>::update(T data, P priority) { unsigned int i = index[data]; heap[i].priority = priority; ascend(i); descend(i); } template < typename T, typename P, class C, class M > bool DynamicHeap<T, P, C, M>::top(T& data) { if (!heap.empty()) { data = heap[0].data; return true; } else return false; } template < typename T, typename P, class C, class M > bool DynamicHeap<T, P, C, M>::top(T& data, P& priority) { if (!heap.empty()) { data = heap[0].data; priority = heap[0].priority; return true; } else return false; } template < typename T, typename P, class C, class M > bool DynamicHeap<T, P, C, M>::pop() { if (!heap.empty()) { T data = heap[0].data; swap(0, (unsigned int)heap.size() - 1); index.erase(data); heap.pop_back(); if (!heap.empty()) descend(0); return true; } else return false; } template < typename T, typename P, class C, class M > bool DynamicHeap<T, P, C, M>::extract(T& data) { if (!heap.empty()) { data = heap[0].data; return pop(); } else return false; } template < typename T, typename P, class C, class M > bool DynamicHeap<T, P, C, M>::extract(T& data, P& priority) { if (!heap.empty()) { data = heap[0].data; priority = heap[0].priority; return pop(); } else return false; } template < typename T, typename P, class C, class M > bool DynamicHeap<T, P, C, M>::erase(T data) { if (index.find(data) == index.end()) return false; unsigned int i = index[data]; swap(i, heap.size() - 1); index.erase(data); heap.pop_back(); if (i < heap.size()) { ascend(i); descend(i); } return true; } template < typename T, typename P, class C, class M > bool DynamicHeap<T, P, C, M>::find(T data) const { return index.find(data) != index.end(); } template < typename T, typename P, class C, class M > bool DynamicHeap<T, P, C, M>::find(T data, P& priority) const { typename M::const_iterator p; if ((p = index.find(data)) == index.end()) return false; unsigned int i = p->second; priority = heap[i].priority; return true; } template < typename T, typename P, class C, class M > void DynamicHeap<T, P, C, M>::ascend(unsigned int i) { for (unsigned int j; i && !ordered(j = parent(i), i); i = j) swap(i, j); index[heap[i].data] = i; } template < typename T, typename P, class C, class M > void DynamicHeap<T, P, C, M>::descend(unsigned int i) { for (unsigned int j, k; (j = ((k = left(i)) < heap.size() && !ordered(i, k) ? k : i), j = ((k = right(i)) < heap.size() && !ordered(j, k) ? k : j)) != i; i = j) swap(i, j); index[heap[i].data] = i; } template < typename T, typename P, class C, class M > void DynamicHeap<T, P, C, M>::swap(unsigned int i, unsigned int j) { std::swap(heap[i], heap[j]); index[heap[i].data] = i; } #endif
22.584112
76
0.607283
3.171875
85d3cd80b6ce2f0d4aa048730fff1790a86a4633
5,748
js
JavaScript
src/tunnel/udp/index.js
blackcoffeecat/ngc-tunnel
fe104928a48dcededaaf43b92e4aab4da2ea1553
[ "MIT" ]
null
null
null
src/tunnel/udp/index.js
blackcoffeecat/ngc-tunnel
fe104928a48dcededaaf43b92e4aab4da2ea1553
[ "MIT" ]
null
null
null
src/tunnel/udp/index.js
blackcoffeecat/ngc-tunnel
fe104928a48dcededaaf43b92e4aab4da2ea1553
[ "MIT" ]
null
null
null
import dgram from 'dgram'; import {EventEmitter} from 'events'; import tcp from 'net'; import pLimit from 'p-limit'; import {v4 as uuid} from 'uuid'; import Queue from 'yocto-queue'; import {closeCon, forClose, hasProp, onExit} from '../common.js'; EventEmitter.setMaxListeners(0); export function createUdpSender({ sender, port, address, receiver, key, timeout = 2e3, before, after, }) { let chunkSize = 0; let sendBuffer = Buffer.alloc(0); let limit = pLimit(1); let callback; let timer; function onReceiveMsg(msg) { msg = msg + ''; msg .split('\n') .filter(Boolean) .forEach(line => { let [type, rKey] = line.split(':'); if (type === 'receive' && rKey === key) callback?.(); }); } function nextChunk() { if (!sendBuffer.length) return null; let next = sendBuffer.slice(0, chunkSize); sendBuffer = sendBuffer.slice(chunkSize); return next; } function send(buf) { if (!chunkSize) chunkSize = sender.getSendBufferSize(); sendBuffer = Buffer.concat([sendBuffer, buf]); if (!callback) { before?.(); limit(sendNext).then(() => after?.()); } } function sendNext() { return new Promise(resolve => { const next = nextChunk(); const thisCallback = isFail => { if (isFail) { timer = setTimeout(thisCallback, timeout, true); sender.send(next, port, address); return; } callback = null; if (timer) { clearTimeout(timer); timer = null; } if (sendBuffer.length) return sendNext().then(resolve); resolve(); }; callback = thisCallback; timer = setTimeout(thisCallback, timeout, true); sender.send(next, port, address); }); } function end() { limit(() => { receiver.off('message', onReceiveMsg); sender.send(Buffer.alloc(0), port, address); }); } receiver.on('message', onReceiveMsg); return {send, end}; } export function serveUdp({port}) { const servMap = {}; const serv = dgram .createSocket('udp4') .bind(port, '0.0.0.0') .once('listening', () => console.log('UDP server listening on port', port)) .on('message', (msg, {port, address}) => { if (!msg?.length) return; msg = msg + ''; const {pull, close} = msg.split('\n').reduce((ret, line) => { const [type, port] = line.split(':'); ret[type] = port; return ret; }, {}); if (pull) handlePull(pull, handleCon(port, address)); if (close) servMap[close]?.close(); }); function handleCon(cliPort, cliAddress) { return function handle({con, key}) { let dataServ = dgram.createSocket('udp4'); dataServ.send(`ready:${key}\n`, cliPort, cliAddress); dataServ.once('message', (ready, {port, address}) => { const {send, end} = createUdpSender({ sender: dataServ, receiver: serv, port, address, key, timeout: 100, }); con.on('data', send).once('close', end); dataServ.on('message', buf => { serv.send(`receive:${key}\n`, cliPort, cliAddress); if (!buf.length) return con.end(); con.write(buf); }); con.resume(); }); }; } function handlePull(port, handler) { if (servMap[port]) { const {renew, que} = servMap[port]; renew(); while (que.size) handler(que.dequeue()); return; } console.log('udp createServer', port); let que = new Queue(); let server = tcp .createServer({allowHalfOpen: true, pauseOnConnect: true}, con => { let key = uuid(); que.enqueue({con, key}); }) .listen(port); let timer; let context = { que, renew() { if (timer) clearTimeout(timer); timer = setTimeout(() => { timer = null; context?.close(); }, 5e3); }, close() { server?.close(() => { console.log('udp closeServer', port); que = null; server = null; servMap[port] = null; context = null; }); }, }; servMap[port] = context; servMap[port].renew(); } } export function connectUdp({port, host, targetHost, serverPort, targetPort}) { let cliPort = port, cliAddress = host; const client = dgram.createSocket('udp4'); let interval = setInterval(() => { client.send(`pull:${serverPort}\n`, port, host); }, 1e3); client .on('message', (msg, {port, address}) => { if (!msg?.length) return; msg = msg + ''; msg .split('\n') .filter(Boolean) .forEach(line => { const [type, key] = line.split(':'); if (type !== 'ready') return; handleOpen(key, port, address); }); }) .once('close', () => { clearInterval(interval); }); forClose(client, callback => { const me = dgram.createSocket('udp4'); me.send(`close:${serverPort}\n`, port, host, callback); }); onExit(() => closeCon(client)); return client; function handleOpen(key, port, address) { let proxy = tcp.connect({allowHalfOpen: true, port: targetPort, host: targetHost}); let data = dgram.createSocket('udp4'); const {send, end} = createUdpSender({ sender: data, receiver: client, key, port, address, }); proxy.on('data', send).once('close', end); data .on('message', buf => { client.send(`receive:${key}\n`, cliPort, cliAddress); if (!buf.length) return proxy.end(); proxy.write(buf); }) .send('ready', port, address); } }
23.654321
87
0.543145
3.328125
9c4c5cfde77550de3c72209f72ee5dd795ccf5f3
1,239
js
JavaScript
tests/pages/Login.spec.js
DiegoVictor/tindev-app
d69e3249117a12125227d12ccc430d747dbbe1ae
[ "MIT" ]
null
null
null
tests/pages/Login.spec.js
DiegoVictor/tindev-app
d69e3249117a12125227d12ccc430d747dbbe1ae
[ "MIT" ]
null
null
null
tests/pages/Login.spec.js
DiegoVictor/tindev-app
d69e3249117a12125227d12ccc430d747dbbe1ae
[ "MIT" ]
null
null
null
import React from 'react'; import { render, fireEvent, act } from '@testing-library/react-native'; import AsyncStorage from '@react-native-community/async-storage'; import faker from 'faker'; import MockAdapter from 'axios-mock-adapter'; import api from '~/services/api'; import Login from '~/pages/Login'; import { UserContext } from '~/contexts/User'; describe('Login page', () => { const apiMock = new MockAdapter(api); it('should be able to login', async () => { const setUser = jest.fn(); const { getByTestId, getByPlaceholderText } = render( <UserContext.Provider value={{ setUser, }} > <Login /> </UserContext.Provider> ); const id = faker.random.number(); const token = faker.random.uuid(); apiMock.onPost('developers').reply(200, { developer: { _id: id }, token }); fireEvent.changeText( getByPlaceholderText('Digite seu usuáro no Github'), faker.internet.userName() ); await act(async () => { fireEvent.press(getByTestId('submit')); }); expect(await AsyncStorage.getItem('tindev_user')).toBe( JSON.stringify({ id, token }) ); expect(setUser).toHaveBeenCalledWith({ id, token }); }); });
26.934783
79
0.632768
3.046875
189d39b549542bab9b583804d53bb8cb0aa0e344
4,071
rb
Ruby
lib/synced_memory_store/subscriber.rb
shiftcommerce/synced_memory_store
1fe627d8a785a801618da01275d3c142204d8dd8
[ "MIT" ]
null
null
null
lib/synced_memory_store/subscriber.rb
shiftcommerce/synced_memory_store
1fe627d8a785a801618da01275d3c142204d8dd8
[ "MIT" ]
null
null
null
lib/synced_memory_store/subscriber.rb
shiftcommerce/synced_memory_store
1fe627d8a785a801618da01275d3c142204d8dd8
[ "MIT" ]
null
null
null
require 'redis' require 'thread' module SyncedMemoryStore class Subscriber include Singleton def initialize self.mutex = Mutex.new self.subscriptions = [] self.subscribed = false end def subscribe(cache_instance) subscriptions << cache_instance unless subscriptions.include?(cache_instance) log("SyncedMemoryStore instance #{cache_instance.uuid} registered for updates") end def configure(logger: nil) mutex.synchronize do next self if configured self.logger = logger self.configured = true self end end def reset! self.subscriptions = [] end def start(wait: false) mutex.synchronize do next self if started start_thread if wait wait_for_subscription end self.started = true self end end def start_thread self.thread = Thread.new do begin redis.subscribe(:synced_memory_store_writes, :synced_memory_store_deletes, :synced_memory_store_clears) do |on| on.subscribe do |channel, subscriptions| log("Subscribed to channel #{channel}") self.subscribed = true end on.message do |channel, message| send("on_#{channel}".to_sym, message) redis.unsubscribe if message == "exit" end on.unsubscribe do |channel, subscriptions| log("Unsubscribed from channel #{channel}") self.subscribed = false end end rescue Redis::BaseConnectionError => error puts "#{error}, retrying in 1s" sleep 1 retry rescue Exception => ex raise end end end private def log(msg) return if logger.nil? if logger.respond_to?(:tagged) logger.tagged("synced_memory_store") { logger.info msg } else logger.info msg end end def on_synced_memory_store_writes(message) message_decoded = Marshal.load(message) subscribers_informed = 0 subscriptions.each do |cache_instance| next if cache_instance.uuid == message_decoded[:sender_uuid] cache_instance.write_from_subscriber(message_decoded[:key], message_decoded[:entry], silent: true, persist: false, **message_decoded[:options]) subscribers_informed += 1 end log("Write to key #{message_decoded[:key]} shared with #{subscribers_informed} subscribers") unless subscribers_informed == 0 end def on_synced_memory_store_deletes(message) message_decoded = Marshal.load(message) subscribers_informed = 0 subscriptions.each do |cache_instance| next if cache_instance.uuid == message_decoded[:sender_uuid] cache_instance.delete(message_decoded[:key], silent: true, persist: false) subscribers_informed += 1 end log("Delete key #{message_decoded[:key]} shared with #{subscribers_informed} subscribers") unless subscribers_informed == 0 end def on_synced_memory_store_clears(message) message_decoded = Marshal.load(message) subscribers_informed = 0 subscriptions.each do |cache_instance| next if cache_instance.uuid == message_decoded[:sender_uuid] cache_instance.clear(silent: true, persist: false) subscribers_informed += 1 end log("Clear call shared with #{subscribers_informed} subscribers") unless subscribers_informed == 0 end def subscribed? subscribed end def wait_for_subscription start = Time.now while Time.now < (start + 10.seconds) break if subscribed? sleep 0.1 end raise "Could not subscribe to redis in 10 seconds" unless subscribed? end def redis @redis ||= Redis.new end attr_accessor :thread, :subscriptions, :subscribed, :logger, :configured, :started, :mutex private_class_method :initialize private_class_method :new end end redis = Redis.new
28.87234
151
0.644805
3.03125
16af4628219100a1a9442fe663ae8bc821b184e6
6,738
ts
TypeScript
examples/bri-1/base-example/src/mods/avail/avail.ts
Meuko/baseline-commit-mgr
158713d7e57a88ba40c7d8621b47e04951d3172f
[ "CC0-1.0" ]
null
null
null
examples/bri-1/base-example/src/mods/avail/avail.ts
Meuko/baseline-commit-mgr
158713d7e57a88ba40c7d8621b47e04951d3172f
[ "CC0-1.0" ]
4
2021-03-26T10:03:58.000Z
2021-09-02T20:23:57.000Z
examples/bri-1/base-example/src/mods/avail/avail.ts
Meuko/baseline-ganache
158713d7e57a88ba40c7d8621b47e04951d3172f
[ "CC0-1.0" ]
1
2021-03-08T14:47:52.000Z
2021-03-08T14:47:52.000Z
import { fileReader } from "../../utils/utils"; import { FileStructure, FileContentStructure, SupplierType, Contents, } from "../types"; // Current test values, these are supposed to be sourced from // a single source of truth. const fileImporter = async (folders: string[]) => { const fs = require("fs"); if (folders.length === 0) Promise.reject("0x0"); // Queue all our contents for later processing let contentQueue: Promise<Contents>[] = []; await new Promise(async (resolve: any, reject: any) => { for (const folder of folders) { // Loop through all folders await new Promise((resolve: any, reject: any) => { fs.readdir(`src/mods/avail/${folder}`, (error: any, files: any) => { if (error) reject(error); if (files) { for (const file of files) { // Request file information of each file switch (folder) { case "ports": { contentQueue.push( fileReader( `src/mods/avail/${folder}/${file}`, SupplierType.PORT ) ); break; } case "spares": { contentQueue.push( fileReader( `src/mods/avail/${folder}/${file}`, SupplierType.SPARE ) ); break; } case "technicians": { contentQueue.push( fileReader( `src/mods/avail/${folder}/${file}`, SupplierType.TECHNICIAN ) ); break; } case "vessels": { contentQueue.push( fileReader( `src/mods/avail/${folder}/${file}`, SupplierType.VESSEL ) ); break; } default: { throw "Are you sure you're passing in the right folder name?"; break; } } } resolve(contentQueue); } }); }); } resolve(contentQueue); }); return contentQueue; }; let suitabilityFinder = ( suppInfo: number[], timeWindow: number[], taskLength: number ): number[] => { // TODO::(Hamza) --> Move this into its own function :) // Perform availability calculation and save result in either the "ERP" system or temp storage. if (!suppInfo) { return new Array(30).fill(0); } let keys_of_interest: Array<number> = new Array(30).fill(0); loop1: for (var [index, day] of suppInfo.entries()) { // We only care about the days starting from the start date if (index >= timeWindow[0] - 1) { // From starting day; check for each date if we have an available day if (Number(day) !== 0) { // If available day is found, start looping from now into the future untill we hit // the task_length cap -- ci = current index loop2: for (let ci = index; ci != index + Number(taskLength); ci++) { // Don't start day checks for task lengths which are too long for our current calendar. if ( index + Number(taskLength) >= suppInfo.length || index + Number(taskLength) >= timeWindow[1] ) { break loop2; } // Stop counting available days if we hit a 0. if (Number(suppInfo[ci]) === 0) { break loop2; } // For current index + n, incerement its index. keys_of_interest[index] += 1; } } } } let indices_available: Array<number> = []; let final_availability: Array<number> = new Array(30).fill(0); // Now check if we've encountered any days with enough available days in succession. // If yes, add them to the indices_available array. for (let i = 0; i < keys_of_interest.length; i++) { if (keys_of_interest[i] == taskLength) { indices_available.push(i); } } // For each available index, set the corresponding day to true. for (var a of indices_available) { final_availability[a] = a + 1; } return final_availability; }; const supplierResolver = (suppliers: SupplierType[]): string[] => { // Pretend like you didn't see this one. return suppliers.reduce( (pVal: any, cVal: any) => [ ...pVal, SupplierType[cVal].toLocaleLowerCase() + "s", ], [] ); }; export const requestAvailability = async ( suppliers: SupplierType[], timeWindow: number[], taskLength: number ): Promise<FileStructure[]> => { // Retrieve all file contents, this is an array of promises. const folderInfo = await fileImporter(supplierResolver(suppliers)); let suppFormatted: FileStructure[] = []; if (folderInfo.length === 0) return Promise.reject(); // Work on the values await Promise.all(folderInfo).then(async (res: Contents[]) => { new Promise((resolve, reject) => { res.map((entry: Contents) => { // @TODO Reformat this so that suitabilityFinder only accepts an array of days. const entryTag: SupplierType = entry.fileTag; // entryData in its raw form: // '0,0,0,4,5,6,7,0,0,0,11,12,13,14,15,16,17,0,0,0,0,22,23,24,25,26,27,0,0,0\n // 4\n // 140000\n // Vessel25' let entryData: string = entry.fileContents.split("\n"); //FileContentStructure let entryAvailability = suitabilityFinder( entryData[0].split(",").map((day: string) => parseInt(day) || 0), timeWindow, taskLength ); // Reconstruct our new object with our actual availability entryData // supplierId: string; // supplierCost: number; // supplierReputation: number; // supplierAvailability: number[]; const tempFCS: FileContentStructure = { supplierId: entryData[3], supplierCost: parseInt(entryData[2]) || -1, supplierReputation: parseInt(entryData[1]) || -1, supplierAvailability: entryAvailability, }; suppFormatted.push({ _type: entryTag, _metaData: { timeWindow: timeWindow, taskLength: taskLength, }, _content: tempFCS, } as FileStructure); }); // Formatted all our suppliers, let's send it back and return it for further processing. resolve(suppFormatted); }).then((e: any) => { Promise.resolve(suppFormatted); }); }); return suppFormatted; };
32.085714
97
0.540813
3.1875
2a04b5e61133f1109d87cb57307dd2d7839de70c
1,844
sql
SQL
tool-box/audit-log-detection-samples/queries/modify_policy.sql
pradeepsavadi/treasure-boxes
d169605492b68a96ca1bac7d162ed6907a946fb5
[ "MIT" ]
52
2019-07-27T07:59:29.000Z
2022-02-20T16:32:35.000Z
tool-box/audit-log-detection-samples/queries/modify_policy.sql
pradeepsavadi/treasure-boxes
d169605492b68a96ca1bac7d162ed6907a946fb5
[ "MIT" ]
62
2019-07-23T08:18:32.000Z
2022-02-10T01:48:24.000Z
tool-box/audit-log-detection-samples/queries/modify_policy.sql
isabella232/treasure-boxes
a7eb9a6416a71d4e0af8e6a9a2f5ed2a49d7961f
[ "MIT" ]
54
2019-07-23T14:22:09.000Z
2022-01-19T21:28:21.000Z
with source as ( select td_time_format(time, 'yyyy-MM-dd HH:mm:ss', 'JST') as time ,id ,resource_id as policy_id ,resource_name as policy_name ,new_value ,old_value from access where td_interval(time, '-1d', 'JST') and event_name = 'permission_policy_modify' and resource_name = 'test_ysmr_user' ) ,old as ( select time ,policy_id ,policy_name ,id ,k as item ,array_sort(array_agg(json_extract_scalar(v2, '$.operation'))) as value from ( select time ,policy_id ,policy_name ,id ,k, v from source cross join unnest(cast(json_parse(old_value) as map<varchar, array<json>>)) as t(k, v) ) t cross join unnest(v) as t(v2) group by 1,2,3,4,5 ) ,new as ( select time ,policy_id ,policy_name ,id ,k as item ,array_sort(array_agg(json_extract_scalar(v2, '$.operation'))) as value from ( select time ,policy_id ,policy_name ,id ,k, v from source cross join unnest(cast(json_parse(new_value) as map<varchar, array<json>>)) as t(k, v) ) t cross join unnest(v) as t(v2) group by 1,2,3,4,5 ) select COALESCE(new.time, old.time) as time ,COALESCE(new.id, old.id) as id ,COALESCE(new.policy_id, old.policy_id) as policy_id ,COALESCE(new.policy_name, old.policy_name) as policy_name ,COALESCE(new.item, old.item) as item ,new.value as new_value ,old.value as old_value from new full outer join old on new.id = old.id and new.item = old.item where COALESCE(new.item, old.item) != 'Integrations' and ( array_join(new.value, ',') != array_join(old.value, ',') or array_join(new.value, ',') is null or array_join(old.value, ',') is null ) order by id
18.626263
81
0.610629
3.109375
f01f18aee33b562489b0d767d928372db30a59d2
4,501
js
JavaScript
js/jquery.engage.js
jatin-dwebguys/jquery-engage
3258eae4ea8f61cb52424abd6fc0bcb6d65528b5
[ "Unlicense", "MIT" ]
5
2015-05-03T13:46:11.000Z
2019-06-13T16:02:19.000Z
js/jquery.engage.js
jatin-dwebguys/jquery-engage
3258eae4ea8f61cb52424abd6fc0bcb6d65528b5
[ "Unlicense", "MIT" ]
null
null
null
js/jquery.engage.js
jatin-dwebguys/jquery-engage
3258eae4ea8f61cb52424abd6fc0bcb6d65528b5
[ "Unlicense", "MIT" ]
2
2015-05-17T00:57:39.000Z
2015-08-27T09:06:32.000Z
/* * Project: Engage - Engage you readers better with a dynamic social footer * Author: Cedric Dugas, http://www.position-relative.net * License: MIT */ ;(function ( $, window, document, undefined ) { var pluginName = "engage", defaults = { scrollhide : false, offset : 0, contents : ["comment", "share", "newsletter"] }; function Plugin( element, options ) { this.element = element; this.options = $.extend( {}, defaults, options ); this._defaults = defaults; this._name = pluginName; this.init(); } Plugin.prototype = { init: function() { this.loadContent(); this.loadScroller(); }, allContent : [], loadContent : function () { var self = this, colClass = "column" + this.options.contents.length, time = this.getTimes(), $el = $(self.element); this.distanceTop = ($el.offset().top + $el.height()) - ($(window).height()/2)-100 + this.options.offset; $(document).on("click.engage", "#footerEngageContainer .btn_x", function(){ self.hideFull(); return false; }); $.each(this.options.contents, function(i, type){ var contents = $.engage.contents; if(contents[type] && contents[type].init){ contents[type].options = $.extend( {}, contents[type].defaults, self.options[type] ); var $content = $(contents[type].init(time)).addClass(colClass); self.allContent.push($content); } }); }, loadScroller : function () { var self = this; $(window).on("scroll.engage", function() { if ($(window).scrollTop() > self.distanceTop) { self.show(); }else{ if(self.options.scrollhide) self.hide(); } }); }, getTimes : function () { var today = new Date(); var curHr = today.getHours(); var time = ""; if(curHr<10){ time = "morning"; }else if(curHr<20){ time = "defaults"; }else{ time = "night"; } return time; }, show : function(){ if(!$("#footerEngageContainer").length) { var containerClass = "containerColumn" + this.options.contents.length, $content = $(this.getHTML(containerClass)), self = this; $.each(this.allContent, function(i , html){ $content.find("#footerEngage").append(html); if(i !== (self.allContent.length -1)){ $content.find("#footerEngage").append("<div class='separator'></div>"); } }); $("body").append($content); $("#footerEngage").animate({ marginTop:0 }); $(document).trigger("engage.show"); } }, hideFull : function () { this.noshow = true; this.destroy(); this.hide(); }, hide : function () { if(!$("#footerEngageContainer").length) return false; $("#footerEngage").animate({ marginTop:205 }, function(){ $(document).trigger("engage.hide"); $("#footerEngageContainer").remove(); }); }, getHTML : function(containerClass){ var text = "<div id='footerEngageContainer'>\ <div id='footerEngage' class='"+containerClass+"'>\ <a href='#' class='btn_x'>&#215;</a>\ </div>\ </div>"; return text; }, destroy : function () { $(document).trigger("engage.destroy"); $(document).off("click.engage"); $(window).off("scroll.engage"); } }; $[pluginName] = { contents : {} }; $.fn[pluginName] = function ( options ) { return this.each(function () { if (!$.data(this, "plugin_" + pluginName)) { $.data(this, "plugin_" + pluginName, new Plugin( this, options )); } }); }; })( jQuery, window, document );
34.623077
122
0.457232
3.046875
bd6b55304dedb6d35b28cc8ea4986cdd2dab2b5e
1,124
rs
Rust
src/bin/dev_server.rs
fstephany/yogurt
64f20ec1aaa403f782666576bbdc64a2a0d1685a
[ "MIT" ]
null
null
null
src/bin/dev_server.rs
fstephany/yogurt
64f20ec1aaa403f782666576bbdc64a2a0d1685a
[ "MIT" ]
null
null
null
src/bin/dev_server.rs
fstephany/yogurt
64f20ec1aaa403f782666576bbdc64a2a0d1685a
[ "MIT" ]
null
null
null
//! This program starts a webserver on port 8080. It exposes a static file server //! that can be used to develop Yogurt without hitting a real server. extern crate iron; extern crate staticfile; extern crate mount; use std::path::Path; use std::process::exit; use iron::Iron; use staticfile::Static; use mount::Mount; fn main() { let port = 8080; let address = "127.0.0.1"; let path: &Path = Path::new("./dev-server-files"); if !path.exists() { println!("Path {:?} does not exist.", path); exit(1) } if !path.is_dir() { println!("Path {:?} is not a directory.", path); exit(1) } let mut mount: Mount = Mount::new(); mount.mount("/", Static::new(path)); match Iron::new(mount).http((address, port)) { Ok(_) => { println!("Starting up http-server, serving path {:?}", path); println!("Available on:"); println!(" http://{}:{}", address, port); println!("Hit CTRL-C to stop the server") } Err(err) => { println!("{}", err); exit(1) } } }
24.977778
81
0.545374
3.09375
9c097ac4bf003dcdeb7d23372ddf0026c9172bf3
1,381
kt
Kotlin
src/test/kotlin/testutils/TestSource.kt
md-5/proguard-core
6cd532a03c82eb27b14920e38b44130500acdf5d
[ "Apache-2.0" ]
169
2020-06-02T10:20:06.000Z
2022-03-22T11:33:08.000Z
src/test/kotlin/testutils/TestSource.kt
md-5/proguard-core
6cd532a03c82eb27b14920e38b44130500acdf5d
[ "Apache-2.0" ]
23
2020-06-02T20:21:00.000Z
2022-01-14T16:47:48.000Z
src/test/kotlin/testutils/TestSource.kt
md-5/proguard-core
6cd532a03c82eb27b14920e38b44130500acdf5d
[ "Apache-2.0" ]
25
2020-06-16T12:53:56.000Z
2022-01-16T08:34:26.000Z
/* * ProGuard -- shrinking, optimization, obfuscation, and preverification * of Java bytecode. * * Copyright (c) 2002-2021 Guardsquare NV */ package testutils import com.tschuchort.compiletesting.SourceFile import org.intellij.lang.annotations.Language import java.io.File import java.lang.IllegalStateException sealed class TestSource { companion object { fun fromFile(file: File): TestSource { return when { file.isJavaFile() -> JavaSource(file.name, file.readText()) file.isKotlinFile() -> KotlinSource(file.name, file.readText()) else -> FileSource(file) } } } abstract fun asSourceFile(): SourceFile } class JavaSource(val filename: String, @Language("Java") val contents: String) : TestSource() { override fun asSourceFile() = SourceFile.java(filename, contents) } class KotlinSource(val filename: String, @Language("Kotlin") val contents: String) : TestSource() { override fun asSourceFile() = SourceFile.kotlin(filename, contents) } class FileSource(val file: File) : TestSource() { override fun asSourceFile(): SourceFile = SourceFile.fromPath(file) } class AssemblerSource(val filename: String, val contents: String) : TestSource() { override fun asSourceFile(): SourceFile = throw IllegalStateException("Should not be called") }
31.386364
99
0.692252
3.171875
dd0827d373f0d233d93fcc8ae90d1d1b6877ec8e
5,188
go
Go
internal/dnsmasq/dnsmasq_test.go
alankristen2004/blacklist
7cddd567a92bcea119f0b1ba5f300a031caeb108
[ "BSD-2-Clause-FreeBSD" ]
1
2021-05-16T13:30:32.000Z
2021-05-16T13:30:32.000Z
internal/dnsmasq/dnsmasq_test.go
alankristen2004/blacklist
7cddd567a92bcea119f0b1ba5f300a031caeb108
[ "BSD-2-Clause-FreeBSD" ]
null
null
null
internal/dnsmasq/dnsmasq_test.go
alankristen2004/blacklist
7cddd567a92bcea119f0b1ba5f300a031caeb108
[ "BSD-2-Clause-FreeBSD" ]
null
null
null
package dnsmasq import ( "encoding/json" "errors" "fmt" "io" "io/ioutil" "path/filepath" "testing" . "github.com/smartystreets/goconvey/convey" ) func TestConfigFile(t *testing.T) { Convey("Testing ConfigFile()", t, func() { var ( b []byte dir = "../testdata/etc/dnsmasq.d/" err error files []string r io.Reader ) Convey("Testing with a dnsmasq entries loaded from files", func() { files, err = filepath.Glob(dir + "*.conf") So(err, ShouldBeNil) for _, f := range files { Convey("Parsing file: "+f, func() { if r, err = ConfigFile(f); err != nil { Printf("cannot open configuration file %s!", f) } b, _ = ioutil.ReadAll(r) c := make(Conf) ip := "0.0.0.0" So(c.Parse(&Mapping{Contents: b}), ShouldBeNil) for k := range c { So(c.Redirect(k, ip), ShouldBeTrue) } }) } }) Convey("Testing a misdirected dnsmasq address entry...", func() { c := make(Conf) ip := "0.0.0.0" k := "address=/www.google.com/0.0.0.0" So(c.Parse(&Mapping{Contents: []byte(k)}), ShouldBeNil) So(c.Redirect(k, ip), ShouldBeFalse) }) }) } func BenchmarkFetchHost(b *testing.B) { for n := 0; n < b.N; n++ { fetchHost("www.microsoft.com", "0.0.0.0") } } func TestFetchHost(t *testing.T) { tests := []struct { conf Conf exp bool ip string key string name string }{ { ip: "0.0.0.0", key: "badguy_s.com", conf: Conf{"badguys.com": Host{IP: "0.0.0.0", Server: false}}, exp: false, name: "badguys.com", }, { ip: "127.0.0.1", key: "localhoster", conf: Conf{"localhost": Host{IP: "127.0.0.1", Server: false}}, exp: false, name: "localhoster", }, { ip: "127.0.0.1", key: "localhost", conf: Conf{"localhost": Host{IP: "#", Server: true}}, exp: true, name: "localServer", }, { ip: "127.0.0.1", key: "localhost", conf: Conf{"localhost": Host{IP: "127.0.0.1", Server: false}}, exp: true, name: "localhost", }, { ip: "127.0.0.1", exp: false, name: "no name", }, { ip: "::1", key: "localhost", conf: Conf{"localhost": Host{IP: "127.0.0.1", Server: false}}, exp: true, name: "localhost IPv6", }, } Convey("Testing String()", t, func() { for _, tt := range tests { Convey("current test "+tt.name, func() { So(fetchHost(tt.key, tt.ip), ShouldEqual, tt.exp) So(tt.conf.Redirect(tt.key, tt.ip), ShouldEqual, tt.exp) }) } }) } func TestMatchIP(t *testing.T) { tests := []struct { exp bool ip string ips []string name string }{ {name: "Fail with IPv4", exp: false, ip: "0.0.0.0", ips: []string{"192.150.200.1", "72.65.23.17", "204.78.13.40"}}, {name: "Fail with IPv6", exp: false, ip: "0.0.0.0", ips: []string{"0.0.0.0", "0.0.0.0", "fe80::7a8a:20ff:fe44:390d"}}, {name: "Loopback and unspecified", exp: false, ip: "0.0.0.0", ips: []string{"0.0.0.0", "127.0.0.1", "0.0.0.0"}}, {name: "Normal specified", exp: true, ip: "192.167.2.2", ips: []string{"192.167.2.2", "192.167.2.2", "192.167.2.2"}}, {name: "Normal unspecified", exp: true, ip: "0.0.0.0", ips: []string{"0.0.0.0", "0.0.0.0", "0.0.0.0"}}, } Convey("Testing matchIP() with:", t, func() { for _, tt := range tests { Convey(tt.name, func() { fmt.Println(matchIP(tt.ip, tt.ips)) So(matchIP(tt.ip, tt.ips), ShouldEqual, tt.exp) }) } }) } func TestParse(t *testing.T) { tests := []struct { act string Host err error exp string name string reader Mapping }{ { Host: Host{ IP: "127.0.0.1", Server: false, }, act: `{"badguys.com":{"IP":"0.0.0.0"}}`, err: nil, exp: "127.0.0.1", name: "badguys.com", reader: Mapping{Contents: []byte(`address=/badguys.com/0.0.0.0`)}, }, { Host: Host{ IP: "127.0.0.1", Server: true, }, act: `{"xrated.com":{"IP":"0.0.0.0","Server":true}}`, err: nil, exp: "127.0.0.1", name: "xrated.com", reader: Mapping{Contents: []byte(`server=/xrated.com/0.0.0.0`)}, }, { act: `{}`, err: errors.New("no dnsmasq configuration mapping entries found"), exp: "127.0.0.1", name: "No dnsmasq entry", reader: Mapping{Contents: []byte(`# All files in this directory will be read by dnsmasq as # configuration files, except if their names end in # ".dpkg-dist",".dpkg-old" or ".dpkg-new" # # This can be changed by editing /etc/default/dnsmasq`)}, }, } Convey("Conf map should show each map entry", t, func() { c := make(Conf) for _, tt := range tests { Convey("current test: "+tt.name, func() { if err := c.Parse(&tt.reader); err != nil { So(err.Error(), ShouldEqual, tt.err.Error()) } j, err := json.Marshal(c) So(err, ShouldBeNil) So(string(j), ShouldEqual, tt.act) }) } }) } func TestString(t *testing.T) { tests := []struct { conf Conf exp string }{ { conf: Conf{"badguys.com": Host{IP: "0.0.0.0", Server: false}}, exp: `{"badguys.com":{"IP":"0.0.0.0"}}`, }, { exp: `null`, }, } Convey("Testing String()", t, func() { for _, tt := range tests { So(tt.conf.String(), ShouldEqual, tt.exp) } }) }
22.955752
120
0.553778
3.09375
b2b0256c939c52a13bd5a7af9879c33ddb297de5
3,847
rs
Rust
ic-cron-rs/src/types.rs
seniorjoinu/ic-cron
33c9cdc794ade5d49381e904005bef3813336f69
[ "MIT" ]
29
2021-08-16T22:25:32.000Z
2022-03-27T15:57:00.000Z
ic-cron-rs/src/types.rs
seniorjoinu/ic-cron
33c9cdc794ade5d49381e904005bef3813336f69
[ "MIT" ]
2
2021-10-03T01:31:44.000Z
2021-12-08T11:33:56.000Z
ic-cron-rs/src/types.rs
seniorjoinu/ic-cron
33c9cdc794ade5d49381e904005bef3813336f69
[ "MIT" ]
3
2021-10-12T09:24:23.000Z
2021-12-05T18:47:04.000Z
use std::cmp::{max, min, Ordering}; use std::collections::BinaryHeap; use ic_cdk::export::candid::{ decode_one, encode_one, CandidType, Deserialize, Result as CandidResult, }; pub type TaskId = u64; #[derive(Clone, CandidType, Deserialize)] pub struct Task { pub kind: u8, pub data: Vec<u8>, } #[derive(Clone, CandidType, Deserialize)] pub enum Iterations { Infinite, Exact(u64), } #[derive(Clone, CandidType, Deserialize)] pub struct SchedulingInterval { pub duration_nano: u64, pub iterations: Iterations, } #[derive(Clone, CandidType, Deserialize)] pub struct ScheduledTask { pub id: TaskId, pub payload: Task, pub scheduled_at: u64, pub rescheduled_at: Option<u64>, pub scheduling_interval: SchedulingInterval, } impl ScheduledTask { pub fn new<TaskPayload: CandidType>( id: TaskId, kind: u8, payload: TaskPayload, scheduled_at: u64, rescheduled_at: Option<u64>, scheduling_interval: SchedulingInterval, ) -> CandidResult<Self> { let task = Task { kind, data: encode_one(payload).unwrap(), }; Ok(Self { id, payload: task, scheduled_at, rescheduled_at, scheduling_interval, }) } pub fn get_payload<'a, T>(&'a self) -> CandidResult<T> where T: Deserialize<'a> + CandidType, { decode_one(&self.payload.data) } pub fn get_kind(&self) -> u8 { self.payload.kind } } pub struct TaskTimestamp { pub task_id: TaskId, pub timestamp: u64, } impl PartialEq for TaskTimestamp { fn eq(&self, other: &Self) -> bool { self.timestamp.eq(&other.timestamp) && self.task_id.eq(&other.task_id) } } impl Eq for TaskTimestamp {} impl PartialOrd for TaskTimestamp { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { self.timestamp .partial_cmp(&other.timestamp) .map(|it| it.reverse()) } fn lt(&self, other: &Self) -> bool { self.timestamp.gt(&other.timestamp) } fn le(&self, other: &Self) -> bool { self.timestamp.ge(&other.timestamp) } fn gt(&self, other: &Self) -> bool { self.timestamp.lt(&other.timestamp) } fn ge(&self, other: &Self) -> bool { self.timestamp.le(&other.timestamp) } } impl Ord for TaskTimestamp { fn cmp(&self, other: &Self) -> Ordering { self.timestamp.cmp(&other.timestamp).reverse() } fn max(self, other: Self) -> Self where Self: Sized, { max(self, other) } fn min(self, other: Self) -> Self where Self: Sized, { min(self, other) } fn clamp(self, min: Self, max: Self) -> Self where Self: Sized, { if self.timestamp < max.timestamp { max } else if self.timestamp > min.timestamp { min } else { self } } } #[derive(Default)] pub struct TaskExecutionQueue(BinaryHeap<TaskTimestamp>); impl TaskExecutionQueue { #[inline(always)] pub fn push(&mut self, task: TaskTimestamp) { self.0.push(task); } pub fn pop_ready(&mut self, timestamp: u64) -> Vec<TaskTimestamp> { let mut cur = self.0.peek(); if cur.is_none() { return Vec::new(); } let mut result = vec![]; while cur.unwrap().timestamp <= timestamp { result.push(self.0.pop().unwrap()); cur = self.0.peek(); if cur.is_none() { break; } } result } #[inline(always)] pub fn is_empty(&self) -> bool { self.0.is_empty() } #[inline(always)] pub fn len(&self) -> usize { self.0.len() } }
21.254144
78
0.561996
3
36381c475196667a23667334f77474e6cef5d926
1,552
rs
Rust
src/day5.rs
MitMaro/advent-of-code-2015
0f5b6e48b7215ae7e3f03eebf1ebc70560586c8a
[ "0BSD" ]
1
2020-12-04T01:26:24.000Z
2020-12-04T01:26:24.000Z
src/day5.rs
MitMaro/advent-of-code-2015
0f5b6e48b7215ae7e3f03eebf1ebc70560586c8a
[ "0BSD" ]
null
null
null
src/day5.rs
MitMaro/advent-of-code-2015
0f5b6e48b7215ae7e3f03eebf1ebc70560586c8a
[ "0BSD" ]
null
null
null
use aoc_runner_derive::{aoc, aoc_generator}; #[aoc_generator(day5)] pub fn input_generator(input: &str) -> Vec<String> { input.lines().map(String::from).collect::<Vec<String>>() } #[aoc(day5, part1)] pub fn part1(lines: &Vec<String>) -> i32 { let mut total = 0; 'l: for line in lines { let mut double_found = false; let mut vowels = 0; let mut last_char = '\0'; for c in line.chars() { match c { 'a' | 'e' | 'i' | 'o' | 'u' => { vowels += 1; }, _ => {}, } if last_char == c { double_found = true; } match last_char { 'a' if c == 'b' => continue 'l, 'c' if c == 'd' => continue 'l, 'p' if c == 'q' => continue 'l, 'x' if c == 'y' => continue 'l, _ => {}, } last_char = c; } if double_found && vowels >= 3 { total += 1; } } total } #[aoc(day5, part2)] pub fn part2(lines: &Vec<String>) -> i32 { let mut total = 0; for line in lines { let mut double_found = false; let mut repeat_found = false; let mut last_char = '\0'; let mut second_last_char = '\0'; for (index, c) in line.chars().enumerate() { let mut inner_last_character = '\0'; for d in line.chars().skip(index + 1) { if inner_last_character != '\0' && last_char != '\0' && d == c && inner_last_character == last_char { double_found = true; break; } inner_last_character = d; } if second_last_char == c { repeat_found = true; } second_last_char = last_char; last_char = c; } if double_found && repeat_found { total += 1; } } total }
20.155844
105
0.55799
3.140625
ccbc1563b2f10d5c1abd23e025d94dc13978e374
2,180
lua
Lua
assets/scripts/extensions/movements.lua
DiegoG1014/PAC-MATH
6edc60c4d038f3086311ee031cfa2e3ca3234def
[ "MIT" ]
null
null
null
assets/scripts/extensions/movements.lua
DiegoG1014/PAC-MATH
6edc60c4d038f3086311ee031cfa2e3ca3234def
[ "MIT" ]
null
null
null
assets/scripts/extensions/movements.lua
DiegoG1014/PAC-MATH
6edc60c4d038f3086311ee031cfa2e3ca3234def
[ "MIT" ]
null
null
null
mega = mega or {} mega.movements={} --Defines a new move, you can then store it in a variable function mega.movements.newMove(acceleration,deacceleration,maximumspeed,x1,y1,x2,y2) local t = { acc = acceleration, deacc = deacceleration, topspeed = maximumspeed, x=x1, y=y1, finished=false } if x2 ~=nil and y2~=nil then t.targetx=x2 t.targety=y2 local angle=math.atan2((y2 - y1), (x2 - x1)) t.cosangle=math.cos(angle) t.sinangle=math.sin(angle) end if t.acc > 0 then t.vel = 0 else t.vel = t.topspeed end function t:jump() self.x=self.targetx self.y=self.targety self.vel=0 self.finished=true end function t:advance(dt) if self.targetx == nil or self.targety == nil then return end local distance = math.sqrt((self.targetx-self.x)^2 + (self.targety-self.y)^2) if distance < self.vel * dt or self.finished then --we almost reached it, skip the rest self:jump() return end local deaccDistance --the distance we need to fully stop if self.deacc > 0 then deaccDistance = self.vel^2 / (2 * self.deacc) else deaccDistance = 0 end if distance>deaccDistance then self.vel=math.min(self.vel + self.acc * dt, self.topspeed) --we are still far, accelerate (if possible) else self.vel=math.max(self.vel - self.deacc * dt,0) --we should be stopping end if self.vel == 0 then self:jump() return end self.x=self.x + self.vel * self.cosangle * dt self.y=self.y + self.vel * self.sinangle * dt self.finished=false end function t:resetVelocity() if self.acc > 0 then self.vel=0 else self.vel=self.topspeed end end function t:setTarget(x,y) self.finished=false self.targetx=x self.targety=y if x == nil or y == nil then self.finished=true self.vel=0 return end local angle = math.atan2((y - self.y), (x - self.x)) self.cosangle=math.cos(angle) self.sinangle=math.sin(angle) end function t:getPosition() return self.x, self.y end function t:setPosition(x, y) self.x = x self.y = y end function t:isFinished() return self.finished end return t end
23.695652
108
0.649541
3.078125
9be3a2e20817e1a448069c2304cbf6424e442f9b
3,795
js
JavaScript
api-server.js
ajay1133/twitter-latest-user-tweets
fe59ef3a5e063fa7265cdb2ea202aa70c7b87ece
[ "MIT" ]
null
null
null
api-server.js
ajay1133/twitter-latest-user-tweets
fe59ef3a5e063fa7265cdb2ea202aa70c7b87ece
[ "MIT" ]
null
null
null
api-server.js
ajay1133/twitter-latest-user-tweets
fe59ef3a5e063fa7265cdb2ea202aa70c7b87ece
[ "MIT" ]
null
null
null
const express = require("express"); const cors = require("cors"); const morgan = require("morgan"); const helmet = require("helmet"); const jwt = require("express-jwt"); const jwksRsa = require("jwks-rsa"); const axios = require("axios"); const authConfig = require("./src/auth_config.json"); const config = require("./config/config.json"); const app = express(); const port = process.env.API_PORT || 3001; const appPort = process.env.SERVER_PORT || 3000; const appOrigin = authConfig.appOrigin || `http://localhost:${appPort}`; const connection = require("./server/utils/connection.js"); const processTweets = require("./server/controllers/processTweets.js"); const searchTweets = require("./server/controllers/searchTweets.js"); if ( !authConfig.domain || !authConfig.audience || authConfig.audience === "YOUR_API_IDENTIFIER" ) { console.log( "Exiting: Please make sure that auth_config.json is in place and populated with valid domain and audience values" ); process.exit(); } app.use(morgan("dev")); app.use(helmet()); app.use(cors({ origin: appOrigin })); // Set up db connection (async () => { const useDb = await new Promise((resolve, reject) => { connection.connect((err) => { if (err) { resolve({ 'error': true }); } return connection.query('USE ??', [config.db_database], (err, result) => { if (err) { return resolve({ 'error': true }); } return resolve(result); }); }); }); if (useDb.error) { return console.error('Unable to establish MySQL connection'); } else { console.log('MYSQL connection established successfully'); } })(); const checkJwt = jwt({ secret: jwksRsa.expressJwtSecret({ cache: true, rateLimit: true, jwksRequestsPerMinute: 5, jwksUri: `https://${authConfig.domain}/.well-known/jwks.json`, }), audience: authConfig.audience, issuer: `https://${authConfig.domain}/`, algorithms: ["RS256"], }); const authTwitter = async (req, res, next) => { if (!req.user) { throw new Error('Invalid User'); } const subList = req.user.sub && typeof req.user.sub === 'string' ? req.user.sub.split('|') : []; req.user.user_id = subList.length > 1 ? subList[1] : null; if (!req.user.user_id) { throw new Error('Invalid User Id'); } const url = `https://${authConfig.domain}/oauth/token`; try { const token = process.env.AUTH0_MANAGEMENT_API_TOKEN; const postData = { client_id: authConfig.auth0_management_clientId, client_secret: authConfig.auth0_management_clientSecret, audience: `https://${authConfig.domain}/api/v2/`, grant_type: "client_credentials" }; const config = { headers: { "Content-type": "application/json", "Authorization": `Bearer ${token}` } }; const result = await axios.post(url, postData, config); if (!(result && result.data && result.data.access_token)) { return; } req.user.access_token = result.data.access_token; next(); } catch (e) { console.log(e); } }; // Routes app.get("/api/test-server", (req, res) => { res.send({ msg: `API Server listening on port ${port}`, }); }); // Search Tweets app.get("/api/search-tweets", searchTweets); // Health check jwt app.get("/api/test-auth0-token", checkJwt, (req, res) => { res.send({ msg: "Your auth0 access token was successfully validated!", }); }); // Health check twitter middleware authentication app.get("/api/test-twitter-token", checkJwt, authTwitter, (req, res) => { res.send({ msg: "Your twitter access token was successfully validated!", }); }); // Process tweets app.get("/api/process-tweets", checkJwt, authTwitter, processTweets); app.listen(port, () => console.log(`API Server listening on port ${port}`));
29.648438
117
0.646377
3.15625
65004332cb733aa8aa9fc8e64faf35799f2ce289
2,158
py
Python
shepherd/blueprints/editor/__init__.py
Systemetric/shepherd
28473503130cddd2c40702240f3deaad3a21e52b
[ "BSD-2-Clause" ]
null
null
null
shepherd/blueprints/editor/__init__.py
Systemetric/shepherd
28473503130cddd2c40702240f3deaad3a21e52b
[ "BSD-2-Clause" ]
8
2017-12-13T15:27:52.000Z
2019-01-27T21:35:14.000Z
shepherd/blueprints/editor/__init__.py
Systemetric/shepherd
28473503130cddd2c40702240f3deaad3a21e52b
[ "BSD-2-Clause" ]
null
null
null
import json import os import os.path as path import re from flask import Blueprint, request blueprint = Blueprint("editor", __name__) robotsrc_path = path.join(os.getcwd(), "robotsrc") if not path.exists(robotsrc_path): os.mkdir(robotsrc_path) main_path = path.join(robotsrc_path, 'main.py') main_file = open(main_path, 'w') main_file.write('# DO NOT DELETE\n') main_file.close() blocks_path = path.join(robotsrc_path, 'blocks.json') @blueprint.route('/') def get_files(): project_paths = [f for f in os.listdir(robotsrc_path) if path.isfile(path.join(robotsrc_path, f)) and (f.endswith('.py') or f.endswith(".xml") or f == "blocks.json") and f != 'main.py'] def read_project(project_path): with open(path.join(robotsrc_path, project_path), 'r') as project_file: content = project_file.read() return { 'filename': project_path, 'content': content } blocks = {} if path.exists(blocks_path): with open(blocks_path, 'r') as blocks_file:# try: blocks = json.load(blocks_file) except ValueError: pass if "requires" not in blocks: blocks["requires"] = [] if "header" not in blocks: blocks["header"] = "" if "footer" not in blocks: blocks["footer"] = "" if "blocks" not in blocks: blocks["blocks"] = [] return json.dumps({ 'main': main_path, 'blocks': blocks, 'projects': list(map(read_project, project_paths)) }) @blueprint.route("/save/<string:filename>", methods=["POST"]) def save_file(filename): dots = len(re.findall("\.", filename)) if dots == 1: with open(path.join(robotsrc_path, filename), 'w') as f: f.write(request.data.decode('utf-8')) return "" @blueprint.route("/delete/<string:filename>", methods=["DELETE"]) def delete_file(filename): if filename == "blocks.json": return "" dots = len(re.findall("\.", filename)) if dots == 1: os.unlink(path.join(robotsrc_path, filename)) return ""
28.394737
88
0.596386
3.03125
7f4a1c69cd37f0351599371abc3dfbf991ae8c59
4,773
go
Go
pkg/volume/overlay/overlay.go
huiwq1990/mydocker
6bf6b3ea8f0a15efb8ab196d9af807e51c4d5390
[ "Apache-2.0" ]
null
null
null
pkg/volume/overlay/overlay.go
huiwq1990/mydocker
6bf6b3ea8f0a15efb8ab196d9af807e51c4d5390
[ "Apache-2.0" ]
null
null
null
pkg/volume/overlay/overlay.go
huiwq1990/mydocker
6bf6b3ea8f0a15efb8ab196d9af807e51c4d5390
[ "Apache-2.0" ]
null
null
null
package overlay import ( "errors" "github.com/huiwq1990/mydocker/pkg/image" "github.com/huiwq1990/mydocker/pkg/types" "github.com/huiwq1990/mydocker/pkg/util" log "github.com/sirupsen/logrus" "path" "os" "os/exec" "strings" ) func NewWorkSpace(volume, imageName, containerName string) (string,error) { err := createReadOnlyLayer(imageName) if err != nil { return "",err } workDir,err := doOver(imageName,containerName) if err != nil { return "",err } if volume != "" { volumeURLs := strings.Split(volume, ":") length := len(volumeURLs) if length == 2 && volumeURLs[0] != "" && volumeURLs[1] != "" { if err := MountVolume(volumeURLs[0],volumeURLs[1], containerName); err != nil { return "",err } } else { return "",errors.New("Volume parameter input is not correct.") } } //TODO 需要结合overlay的upper worker层看看怎么实现 return workDir,nil } func doOver(imageName,containerName string) (string,error){ containerRootDir := path.Join(types.WriteLayerUrl, containerName) log.Infof("create write layer. %s",containerRootDir) if err := os.MkdirAll(containerRootDir, 0777); err != nil { log.Errorf("Mkdir write layer dir %s error. %v", containerRootDir, err) return "", err } upperDir := path.Join(containerRootDir,"upper") log.Debugf("create write layer. %s",upperDir) if err := os.MkdirAll(upperDir, 0777); err != nil { log.Errorf("Mkdir write layer dir %s error. %v", upperDir, err) return "", err } workDir := path.Join(containerRootDir,"worker") log.Debugf("create write layer. %s",workDir) if err := os.MkdirAll(workDir, 0777); err != nil { log.Errorf("Mkdir write layer dir %s error. %v", workDir, err) return "", err } mergeDir := path.Join(containerRootDir, "merge") log.Infof("create write layer. %s",mergeDir) if err := os.MkdirAll(mergeDir, 0777); err != nil { log.Errorf("Mkdir write layer dir %s error. %v", mergeDir, err) return "", err } //mount -t overlay overlay -o lowerdir=/root/busybox,upperdir=upper,workdir=worker merge oDir := "lowerdir=/root/"+imageName+",upperdir="+ upperDir +",workdir="+workDir log.Debugf("mount -t overlay overlay -o %s %s",oDir,mergeDir) out, err := exec.Command("mount", "-t", "overlay","overlay", "-o", oDir, mergeDir).CombinedOutput() if err != nil { //'Special device overlay doesn't exist' 可能是目录不存在 log.Errorf("mount overlay failed, output:%s, err:%v", string(out), err) return "", err } return mergeDir, err } func MountVolume(from string, target string, containerName string) error { targetMountDir := path.Join(types.WriteLayerUrl, containerName,"merge",target) if err := os.MkdirAll(targetMountDir, 0777); err != nil { log.Infof("Mkdir parent dir %s error. %v", targetMountDir, err) } log.Debugf("mount --bind %s %s",from,targetMountDir) _, err := exec.Command("mount", "--bind", from, targetMountDir).CombinedOutput() if err != nil { log.Errorf("Mount volume failed. %v", err) return err } out, err := exec.Command("mount","-o","remount,rw,bind",targetMountDir).CombinedOutput() if err != nil { log.Errorf("Mount volume failed.%v %v",string(out), err) return err } return nil } func DeleteWorkSpace(volume, containerName string) error{ if volume != "" { volumeURLs := strings.Split(volume, ":") length := len(volumeURLs) if length == 2 && volumeURLs[0] != "" && volumeURLs[1] != "" { if _, err := exec.Command("umount", path.Join(types.WriteLayerUrl,containerName,"upper",volumeURLs[0])).CombinedOutput(); err != nil { return err } }else{ return errors.New("mount config error" + volume) } } return DeleteMountPoint(containerName) } func DeleteMountPoint(containerId string) error { mountDir := path.Join(types.WriteLayerUrl,containerId, "merge") _, err := exec.Command("umount", mountDir).CombinedOutput() if err != nil { log.Errorf("Unmount %s error %v", mountDir, err) return err } if err := os.RemoveAll(path.Join(types.WriteLayerUrl,containerId)); err != nil { return err } return nil } func createReadOnlyLayer(imageName string) error { untarFolderUrl := path.Join(types.ImageRepository, imageName) exist, err := util.PathExists(untarFolderUrl) if err != nil { log.Errorf("Fail to judge whether dir %s exists. %v", imageName, err) return err } if !exist { tarUrl, err := image.GetImageTar(imageName) if err != nil{ return err } log.Debugf("crate read only layer. image:%s, untarurl:%s",untarFolderUrl,tarUrl) if err := os.MkdirAll(untarFolderUrl, 0622); err != nil { log.Errorf("Mkdir %s error %v", untarFolderUrl, err) return err } if _, err := exec.Command("tar", "-xvf", tarUrl, "-C", untarFolderUrl).CombinedOutput(); err != nil { log.Errorf("Untar dir %s error %v", tarUrl, err) return err } } return nil }
29.462963
137
0.679656
3.09375
b2f7f4cc70879c961d4345ed522c0b9c510c8bf6
5,218
py
Python
scratch/movielens-mongodb.py
crcsmnky/movielens-data-exports
f316f1367abef80a1abce64d3adb3bd3effc6365
[ "Apache-2.0" ]
1
2022-02-01T19:44:36.000Z
2022-02-01T19:44:36.000Z
scratch/movielens-mongodb.py
crcsmnky/movielens-data-exports
f316f1367abef80a1abce64d3adb3bd3effc6365
[ "Apache-2.0" ]
null
null
null
scratch/movielens-mongodb.py
crcsmnky/movielens-data-exports
f316f1367abef80a1abce64d3adb3bd3effc6365
[ "Apache-2.0" ]
null
null
null
""" usage: python movielens-mongodb.py [movies] [ratings] [links] """ import sys import re import csv import os # import tmdbsimple as tmdb from pymongo import MongoClient from pymongo import ASCENDING, DESCENDING from datetime import datetime from time import sleep def import_movies(db, mfile): movies = [] mcsv = csv.DictReader(mfile) for row in mcsv: movie = { 'movieid': int(row['movieId']), 'title': row['title'].split(' (')[0], 'year': row['title'].split(' (')[-1][:-1], 'genres': row['genres'].split('|') } movies.append(movie) if (len(movies) % 1000) == 0: # print count, "movies inserted" db.command('insert', 'movies', documents=movies, ordered=False) movies = [] if len(movies) > 0: db.command('insert', 'movies', documents=movies, ordered=False) def import_ratings(db, rfile): count = 0 ratings, movies, users = [], [], [] rcsv = csv.DictReader(rfile) for row in rcsv: rating = { 'movieid': int(row['movieId']), 'userid': int(row['userId']), 'rating': float(row['rating']), 'ts': datetime.fromtimestamp(float(row['timestamp'])) } ratings.append(rating) movie_update = { 'q': { 'movieid': int(row['movieId']) }, 'u': { '$inc': { 'ratings' : 1, 'total_rating': float(row['rating']) } } } movies.append(movie_update) user_update = { 'q': { 'userid' : int(row['userId']) }, 'u': { '$inc': { 'ratings': 1 } }, 'upsert': True } users.append(user_update) count += 1 if (count % 1000) == 0: # print count, "ratings inserted, movies updated, users updated" db.command('insert', 'ratings', documents=ratings, ordered=False) db.command('update', 'movies', updates=movies, ordered=False) db.command('update', 'users', updates=users, ordered=False) ratings, movies, users = [], [], [] if count > 0: db.command('insert', 'ratings', documents=ratings, ordered=False) db.command('update', 'movies', updates=movies, ordered=False) db.command('update', 'users', updates=users, ordered=False) def import_links(db, lfile): count = 0 movies = [] lcsv = csv.DictReader(lfile) for row in lcsv: try: movies.append({ 'q': {'movieid': int(row['movieId'])}, 'u': { '$set': { 'imdb': row['imdbId'], 'tmdb': row['tmdbId'] }} }) count += 1 except: continue if (count % 1000) == 0: db.command('update', 'movies', updates=movies, ordered=False) movies = [] if count > 0: db.command('update', 'movies', updates=movies, ordered=False) def create_genres(db): docs = list(db.movies.aggregate([ {'$unwind' : '$genres'}, {'$group': { '_id': '$genres', 'count': {'$sum': 1} }}, ], cursor={})) genres = [ {'_id': idx, 'name': doc['_id'], 'count': doc['count']} for idx, doc in enumerate(docs) ] db.command('insert', 'genres', documents=genres, ordered=False) def update_avg_ratings(db): movies = db.movies.find() for m in movies: try: db.movies.update_one({'_id': m['_id']}, {'$set': {'avg_rating': float(m['total_rating'])/m['ratings']}}) except: continue def get_poster_links(db): tmdb.API_KEY='[YOUR API KEY HERE]' conf = tmdb.Configuration() imgurl = conf.info()['images']['base_url'] + 'w154' + '{path}' allmovies = db.movies.find() for i in xrange(0, allmovies.count(), 40): print i for j in xrange(i, i+40): try: movie = tmdb.Movies(int(allmovies[j]['tmdb'])).info() db.movies.update_one( {'_id': allmovies[j]['_id']}, {'$set': {'poster': imgurl.format(path=movie['poster_path'])}} ) except: continue sleep(10) def ensure_indexes(db): db.movies.ensure_index("movieid") db.movies.ensure_index("ratings") db.movies.ensure_index("genres") db.ratings.ensure_index([("userid", ASCENDING),("movieid", ASCENDING)]) db.users.ensure_index("userid") db.genres.ensure_index("name") def main(): host=os.environ.get('MONGODB_HOST', 'localhost') port=os.environ.get('MONGODB_PORT', 27017) database=os.environ.get('MONGODB_DB', 'movieweb') db = MongoClient(host, port)[database] with open(sys.argv[1]) as mfile: import_movies(db, mfile) with open(sys.argv[2]) as rfile: import_ratings(db, rfile) with open(sys.argv[3]) as lfile: import_links(db, lfile) create_genres(db) update_avg_ratings(db) get_poster_links(db) # ensure_indexes(db) if __name__ == '__main__': main()
26.622449
116
0.526639
3.03125
74e25c49306863ca568da2ea28775673566f1870
9,402
rs
Rust
languages/rust/oso/src/query.rs
srenatus/oso
f5ee73c195d0e381841bb1916ae9b223f6332f09
[ "Apache-2.0" ]
null
null
null
languages/rust/oso/src/query.rs
srenatus/oso
f5ee73c195d0e381841bb1916ae9b223f6332f09
[ "Apache-2.0" ]
null
null
null
languages/rust/oso/src/query.rs
srenatus/oso
f5ee73c195d0e381841bb1916ae9b223f6332f09
[ "Apache-2.0" ]
null
null
null
use std::collections::HashMap; use std::sync::{Arc, Mutex}; use crate::host::{Instance, PolarResultIter}; use crate::{FromPolar, ToPolar}; use polar_core::events::*; use polar_core::terms::*; impl Iterator for Query { type Item = crate::Result<ResultSet>; fn next(&mut self) -> Option<Self::Item> { Query::next_result(self) } } pub struct Query { inner: polar_core::polar::Query, calls: HashMap<u64, PolarResultIter>, host: Arc<Mutex<crate::host::Host>>, } impl Query { pub fn new(inner: polar_core::polar::Query, host: Arc<Mutex<crate::host::Host>>) -> Self { Self { calls: HashMap::new(), inner, host, } } pub fn next_result(&mut self) -> Option<crate::Result<ResultSet>> { loop { let event = self.inner.next()?; check_messages!(self.inner); if let Err(e) = event { return Some(Err(e.into())); } let event = event.unwrap(); tracing::debug!(event=?event); let result = match event { QueryEvent::None => Ok(()), QueryEvent::Done => return None, QueryEvent::Result { bindings, .. } => { return Some(Ok(ResultSet { bindings, host: self.host.clone(), })); } QueryEvent::MakeExternal { instance_id, constructor, } => self.handle_make_external(instance_id, constructor), QueryEvent::ExternalCall { call_id, instance, attribute, args, } => self.handle_external_call(call_id, instance, attribute, args), QueryEvent::ExternalOp { call_id, operator, args, } => self.handle_external_op(call_id, operator, args), QueryEvent::ExternalIsa { call_id, instance, class_tag, } => self.handle_external_isa(call_id, instance, class_tag), QueryEvent::ExternalUnify { call_id, left_instance_id, right_instance_id, } => self.handle_external_unify(call_id, left_instance_id, right_instance_id), QueryEvent::ExternalIsSubSpecializer { call_id, instance_id, left_class_tag, right_class_tag, } => self.handle_external_is_subspecializer( call_id, instance_id, left_class_tag, right_class_tag, ), QueryEvent::Debug { message } => self.handle_debug(message), }; if let Err(e) = result { // TODO (dhatch): These seem to be getting swallowed tracing::error!("application error {}", e); self.application_error(e); } } } fn question_result(&mut self, call_id: u64, result: bool) { self.inner.question_result(call_id, result); } fn call_result(&mut self, call_id: u64, result: Box<dyn ToPolar>) -> crate::Result<()> { let mut host = self.host.lock().unwrap(); let value = result.to_polar(&mut host); Ok(self.inner.call_result(call_id, Some(value))?) } fn call_result_none(&mut self, call_id: u64) -> crate::Result<()> { Ok(self.inner.call_result(call_id, None)?) } fn application_error(&mut self, error: crate::OsoError) { self.inner.application_error(error.to_string()) } fn handle_make_external(&mut self, instance_id: u64, constructor: Term) -> crate::Result<()> { let mut host = self.host.lock().unwrap(); match constructor.value() { Value::InstanceLiteral(InstanceLiteral { .. }) => todo!("instantiate from literal"), Value::Call(Call { name, args, .. }) => { let _instance = host.make_instance(name, args.clone(), instance_id); } _ => panic!("not valid"), } Ok(()) } fn register_call( &mut self, call_id: u64, instance: Instance, name: Symbol, args: Option<Vec<Term>>, ) -> crate::Result<()> { if self.calls.get(&call_id).is_none() { let (f, args) = if let Some(args) = args { if let Some(m) = instance.methods.get(&name) { (m, args) } else { return lazy_error!("instance method not found"); } } else if let Some(attr) = instance.attributes.get(&name) { (attr, vec![]) } else { return lazy_error!("attribute lookup not found"); }; tracing::trace!(call_id, name = %name, args = ?args, "register_call"); let host = &mut self.host.lock().unwrap(); let result = f.invoke(instance.instance.as_ref(), args, host)?; self.calls.insert(call_id, result.to_polar_results()); } Ok(()) } fn next_call_result( &mut self, call_id: u64, ) -> Option<Result<Box<dyn ToPolar>, crate::OsoError>> { self.calls.get_mut(&call_id).and_then(|c| c.next()) } fn handle_external_call( &mut self, call_id: u64, instance: Term, name: Symbol, args: Option<Vec<Term>>, ) -> crate::Result<()> { let instance = Instance::from_polar(&instance, &mut self.host.lock().unwrap()).unwrap(); if let Err(e) = self.register_call(call_id, instance, name, args) { self.application_error(e); return self.call_result_none(call_id); } if let Some(result) = self.next_call_result(call_id) { match result { Ok(r) => self.call_result(call_id, r), Err(e) => { self.application_error(e); self.call_result_none(call_id) } } } else { self.call_result_none(call_id) } } fn handle_external_op( &mut self, call_id: u64, operator: Operator, args: Vec<Term>, ) -> crate::Result<()> { assert_eq!(args.len(), 2); let res = { let mut host = self.host.lock().unwrap(); let args = [ Instance::from_polar(&args[0], &mut host).unwrap(), Instance::from_polar(&args[1], &mut host).unwrap(), ]; host.operator(operator, args)? }; self.question_result(call_id, res); Ok(()) } fn handle_external_isa( &mut self, call_id: u64, instance: Term, class_tag: Symbol, ) -> crate::Result<()> { tracing::debug!(instance = ?instance, class = %class_tag, "isa"); let res = self.host.lock().unwrap().isa(instance, &class_tag); self.question_result(call_id, res); Ok(()) } fn handle_external_unify( &mut self, call_id: u64, left_instance_id: u64, right_instance_id: u64, ) -> crate::Result<()> { let res = self .host .lock() .unwrap() .unify(left_instance_id, right_instance_id)?; self.question_result(call_id, res); Ok(()) } fn handle_external_is_subspecializer( &mut self, call_id: u64, instance_id: u64, left_class_tag: Symbol, right_class_tag: Symbol, ) -> crate::Result<()> { let res = self.host.lock().unwrap().is_subspecializer( instance_id, &left_class_tag, &right_class_tag, ); self.question_result(call_id, res); Ok(()) } fn handle_debug(&mut self, message: String) -> crate::Result<()> { eprintln!("TODO: {}", message); check_messages!(self.inner); Ok(()) } } #[derive(Clone)] pub struct ResultSet { pub bindings: polar_core::kb::Bindings, pub host: Arc<Mutex<crate::host::Host>>, } impl ResultSet { pub fn get(&self, name: &str) -> Option<crate::Value> { self.bindings .get(&Symbol(name.to_string())) .map(|t| t.value().clone()) } pub fn get_typed<T: crate::host::FromPolar>(&self, name: &str) -> crate::Result<T> { self.bindings .get(&Symbol(name.to_string())) .ok_or_else(|| crate::OsoError::FromPolar) .and_then(|term| T::from_polar(term, &mut self.host.lock().unwrap())) } } impl std::fmt::Debug for ResultSet { fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result { write!(fmt, "{:#?}", self.bindings) } } impl<S: AsRef<str>, T: crate::host::FromPolar + PartialEq<T>> PartialEq<HashMap<S, T>> for ResultSet { fn eq(&self, other: &HashMap<S, T>) -> bool { other.iter().all(|(k, v)| { self.get_typed::<T>(k.as_ref()) .map(|binding| &binding == v) .unwrap_or(false) }) } }
32.088737
98
0.507871
3.03125
c38dfe84cc1e43b51eebeba0f3d8b5817fa43fc0
2,197
rs
Rust
day-03/src/main.rs
huzecong/advent-of-code-2018
c9c161e5aa45f25bace51dea68b909b2f3dd479a
[ "MIT" ]
1
2018-12-23T00:50:00.000Z
2018-12-23T00:50:00.000Z
day-03/src/main.rs
huzecong/advent-of-code-2018
c9c161e5aa45f25bace51dea68b909b2f3dd479a
[ "MIT" ]
null
null
null
day-03/src/main.rs
huzecong/advent-of-code-2018
c9c161e5aa45f25bace51dea68b909b2f3dd479a
[ "MIT" ]
null
null
null
extern crate utils; use std::fs; struct Rectange { id: i32, x: usize, y: usize, w: usize, h: usize, } fn parse_numeric<'a, T, I>(iter: I) -> Vec<T> where T: std::str::FromStr, I: Iterator<Item=&'a str> { iter.map(|x| x.parse::<T>().ok().unwrap()).collect() } fn parse(s: &str) -> Rectange { let parts = s.split_whitespace().collect::<Vec<&str>>(); let id = parts[0][1..].parse::<i32>().unwrap(); let x_y = parse_numeric::<usize, _>(parts[2][..(parts[2].len() - 1)].split(",")); let w_h = parse_numeric::<usize, _>(parts[3].split("x")); Rectange { id: id, x: x_y[0], y: x_y[1], w: w_h[0], h: w_h[1] } } fn main() { let input = fs::read_to_string("day-03/input.txt").ok().unwrap(); // temporary bound to the scope let rects: Vec<Rectange> = input.split_terminator("\n").map(parse).collect(); // Part 1 let width = rects.iter().map(|rect| rect.x + rect.w).max().unwrap() as usize; let height = rects.iter().map(|rect| rect.y + rect.h).max().unwrap() as usize; let mut counts = vec![vec![0; width + 1]; height + 1]; // prefix array for rect in rects.iter() { let right = rect.x + rect.w; let down = rect.y + rect.h; counts[rect.x][rect.y] += 1; counts[right][rect.y] -= 1; counts[rect.x][down] -= 1; counts[right][down] += 1; } utils::cumsum(&mut counts, width, height); let mut overlap = 0; for i in 0..width { for j in 1..height { if counts[i][j] > 1 { overlap += 1; } } } println!("{}", overlap); // Part 2 utils::cumsum(&mut counts, width, height); let get_counts = |x: usize, y: usize| -> i32 { if x > 0 && y > 0 { counts[x - 1][y - 1] } else { 0 } }; for rect in rects.iter() { let right = rect.x + rect.w; let down = rect.y + rect.h; let sum = get_counts(right, down) - get_counts(right, rect.y) - get_counts(rect.x, down) + get_counts(rect.x, rect.y); let area = (rect.w * rect.h) as i32; if sum == area { println!("{}", rect.id); break; } } }
30.513889
126
0.514793
3.40625
75c6bf8938fdca079301f46661f466f83dfed727
2,397
rs
Rust
src/font/mod.rs
BarePotato/figglebit
d0a6e279ff29cbef82df4b765ba07d4470e48146
[ "MIT" ]
null
null
null
src/font/mod.rs
BarePotato/figglebit
d0a6e279ff29cbef82df4b765ba07d4470e48146
[ "MIT" ]
null
null
null
src/font/mod.rs
BarePotato/figglebit
d0a6e279ff29cbef82df4b765ba07d4470e48146
[ "MIT" ]
1
2021-03-17T16:17:51.000Z
2021-03-17T16:17:51.000Z
// flf2a$ 6 5 20 15 3 0 143 229 NOTE: The first five characters in // | | | | | | | | | | the entire file must be "flf2a". // / / | | | | | | | \ // Signature / / | | | | | \ Codetag_Count // Hardblank / / | | | \ Full_Layout* // Height / | | \ Print_Direction // Baseline / \ Comment_Lines // Max_Length Old_Layout* pub(super) mod character; mod parser; use character::Char; #[derive(Debug)] pub enum ParseErr { MissingFlf2a, MissingHardBlank, IncompleteHeader, IncompleteFile, EmptyGlyph, EOLCharacterMissmatch, } type Result<T> = std::result::Result<T, ParseErr>; // ----------------------------------------------------------------------------- // - Font - // ----------------------------------------------------------------------------- #[derive(Debug)] pub struct Font { pub(crate) header: Header, chars: Vec<Char>, } impl Font { pub fn to_chars(&self, s: &str) -> Vec<&Char> { s.as_bytes() .iter() .map(|b| (b - 32) as usize) .map(|i| &self.chars[i]) .collect::<Vec<_>>() } } // ----------------------------------------------------------------------------- // - Header - // ----------------------------------------------------------------------------- // -1 full-width layout by default // 0 horizontal fitting (kerning) layout by default* // 1 apply horizontal smushing rule 1 by default // 2 apply horizontal smushing rule 2 by default // 4 apply horizontal smushing rule 3 by default // 8 apply horizontal smushing rule 4 by default // 16 apply horizontal smushing rule 5 by default // 32 apply horizontal smushing rule 6 by default bitflags::bitflags! { pub struct OldLayout: i16 { const FULL_WIDTH = -1; const KERNING = 0; const HORZ_SMUSH_1 = 1; const HORZ_SMUSH_2 = 2; const HORZ_SMUSH_3 = 4; const HORZ_SMUSH_4 = 8; const HORZ_SMUSH_5 = 16; const HORZ_SMUSH_6 = 32; } } #[derive(Debug)] pub(crate) struct Header { pub hard_blank: char, pub(crate) height: i16, baseline: i16, max_len: i16, pub old_layout: OldLayout, comment_lines: usize, print_dir: i16, full_layout: Option<i16>, code_tag_count: Option<i16>, } pub use parser::parse;
28.535714
80
0.497705
3.046875
7b1584d39805283e8b170137d006b0f37fd6fa1a
1,259
rb
Ruby
support/generators/halcyon/templates/config/init.rb
mtodd/halcyon
c4cb414bf7b05176c844ff589a580aacf82d5748
[ "MIT" ]
9
2015-11-05T12:12:57.000Z
2021-09-05T21:45:51.000Z
support/generators/halcyon/templates/config/init.rb
albertobraschi/halcyon
c4cb414bf7b05176c844ff589a580aacf82d5748
[ "MIT" ]
null
null
null
support/generators/halcyon/templates/config/init.rb
albertobraschi/halcyon
c4cb414bf7b05176c844ff589a580aacf82d5748
[ "MIT" ]
4
2015-08-15T10:31:04.000Z
2021-07-13T19:16:02.000Z
Halcyon.config.use do |c| # = Framework # # <tt>allow_from</tt>: specifies what connections to accept; # * <tt>all</tt>: allow connections from all clients # * <tt>local</tt>: only allow connections from the same host (localhost et al) # * <tt>halcyon_clients</tt>: only Halcyon clients (tests the User-Agent only) c[:allow_from] = :all # = Environment # # Uncomment to manually specify the environment to run the application in. # Defaults to <tt>:development</tt>. # # c[:environment] = :production # = Logging # # Configures the logging client in the framework, including destination, # level filter, and what logger to use. # # <tt>type</tt>: the logger to use (defaults to Ruby's <tt>Logger</tt>) # * <tt>Logger</tt> # * <tt>Analogger</tt> # * <tt>Logging</tt> # * <tt>Log4r</tt> # <tt>file</tt>: the log file; leave unset for STDOUT # <tt>level</tt>: the message filter level (default to <tt>debug</tt>) # * specific to the client used, often is: debug, info, warn, error, fatal # = Logging c[:logging] = { :type => 'Logger', # :file => nil, # nil is STDOUT :level => 'debug' } # = Application # # Your application-specific configuration options here. end
29.27907
81
0.633042
3.171875
261194722df6d6289db3c7213e4d360ec7da2468
1,314
java
Java
src/main/java/io/github/suxil/rsql/util/StringUtils.java
suxil/rsql
ddcd275efdd2acf85673b20d7980ee9498816b5c
[ "MIT" ]
3
2021-06-29T14:40:39.000Z
2022-02-14T06:23:48.000Z
src/main/java/io/github/suxil/rsql/util/StringUtils.java
suxil/rsql
ddcd275efdd2acf85673b20d7980ee9498816b5c
[ "MIT" ]
null
null
null
src/main/java/io/github/suxil/rsql/util/StringUtils.java
suxil/rsql
ddcd275efdd2acf85673b20d7980ee9498816b5c
[ "MIT" ]
null
null
null
package io.github.suxil.rsql.util; import java.util.List; /** * 字符串工具类 * * @author lu_it * @since V1.0 */ public class StringUtils { public static boolean isBlank(String str) { int strLen; if (str != null && (strLen = str.length()) != 0) { for(int i = 0; i < strLen; ++i) { if (!Character.isWhitespace(str.charAt(i))) { return false; } } return true; } else { return true; } } public static boolean hasText(CharSequence str) { return (str != null && str.length() > 0 && containsText(str)); } private static boolean containsText(CharSequence str) { int strLen = str.length(); for (int i = 0; i < strLen; i++) { if (!Character.isWhitespace(str.charAt(i))) { return true; } } return false; } public static String join(List<?> list, String delim) { if (list == null) { return ""; } StringBuilder sb = new StringBuilder(); for (int i = 0; i < list.size(); i++) { if (i > 0) { sb.append(delim); } sb.append(list.get(i)); } return sb.toString(); } }
22.271186
70
0.468037
3
7169d494286e3a36ab98e29cd8aebf0b6f5ee5aa
3,892
ts
TypeScript
src/common/query.ts
rolandbernard/marvin
60be93f622b65652d07e659eaf81dd763cf8b3eb
[ "MIT" ]
5
2020-11-30T11:48:12.000Z
2022-02-21T15:48:49.000Z
src/common/query.ts
rolandbernard/marvin
60be93f622b65652d07e659eaf81dd763cf8b3eb
[ "MIT" ]
4
2020-10-06T19:17:18.000Z
2021-10-31T17:08:19.000Z
src/common/query.ts
rolandbernard/marvin
60be93f622b65652d07e659eaf81dd763cf8b3eb
[ "MIT" ]
null
null
null
const MAX_MATCH_LENGTH = 200; export class Query { readonly advanced: boolean; readonly raw: string; readonly text: string; readonly regex: RegExp; normalizeString(text: string): string { return text.normalize('NFKD') .replace(/[\u0300-\u036F]/g, '') .replace(/\s+/g, ' '); } escapeRegex(text: string, map?: (c: string) => string, join = '') { return this.normalizeString(text.substr(0, MAX_MATCH_LENGTH)) .split('').map((ch) => { // Escape special regex characters if ([ '\\', '.', '*', '+', '[', ']', '(', ')', '{', '}', '^', '$', '?', '|', ].includes(ch)) { return '\\' + ch; } else { return ch; } }).map(map ?? (ch => ch)).join(join); } constructor(raw: string, text: string, advanced: boolean) { this.raw = raw; this.advanced = advanced; this.text = text.trim(); if (this.advanced) { this.regex = new RegExp( `(?=(${this.escapeRegex(this.text, ch => `(${ch})`, '(.*?)')}))`, 'ig' ); } else { this.regex = new RegExp( `((${this.escapeRegex(this.text)}))`, 'i' ); } } withoutPrefix(prefix: string) { if (prefix.length > 0 && this.text.startsWith(prefix)) { return new Query(this.raw, this.text.replace(prefix, ''), this.advanced); } else { return this; } } bestMatch(text: string): string | undefined { text = text.substr(0, MAX_MATCH_LENGTH); let best: string | undefined; for (let match of text.matchAll(this.regex)) { if (!best || match[1].length < best.length) { best = match[1]; } } return best; } matchText(full_text: string): number { const text = full_text.substr(0, MAX_MATCH_LENGTH); if (text.length > 0 && this.text.length > 0) { const best_match = this.bestMatch(text); if (best_match) { const starts_with = text.toLowerCase().startsWith(best_match.toLowerCase()); if (this.text.length === best_match.length) { if (starts_with) { return 0.9 + 0.1 * (this.text.length / text.length); } else { return 0.8 + 0.1 * (this.text.length / text.length); } } else { if (starts_with) { return 0.2 + 0.5 * (this.text.length / best_match.length) + 0.1 * (this.text.length / text.length); } else { return 0.1 + 0.6 * (this.text.length / best_match.length) + 0.1 * (this.text.length / text.length); } } } else { return 0.0; } } else { return 0.0; } } matchGroups(text: string): string[] { if (text.length > 0 && this.text.length > 0) { const match = this.bestMatch(text); if (match && match.length !== 0) { const groups = this.regex.exec(match)!; return [ text.substr(0, text.indexOf(match)), ...groups.slice(2), text.substr(text.indexOf(match) + match.length), ]; } else { return [ text ]; } } else { return [ text ]; } } matchAny(texts: string[], primary?: string): number { return Math.max( ...texts.map(text => this.matchText(text) * (primary ? 0.5 : 1)), this.matchText(primary ?? '') ); } }
32.433333
123
0.443731
3.03125
70a73761380ff86a0493d7ed968308a04e1582c6
5,095
go
Go
master/ApiServer.go
sanjiOP/goodjob
c8bda3eeb059124493abe2d254c4613863ded0af
[ "Apache-2.0" ]
null
null
null
master/ApiServer.go
sanjiOP/goodjob
c8bda3eeb059124493abe2d254c4613863ded0af
[ "Apache-2.0" ]
null
null
null
master/ApiServer.go
sanjiOP/goodjob
c8bda3eeb059124493abe2d254c4613863ded0af
[ "Apache-2.0" ]
null
null
null
package master import ( "net/http" "net" "time" "strconv" "github.com/sanjiOP/goodjob/common" "encoding/json" "fmt" ) // type ApiServer struct { httpServer *http.Server } // route /job/save // param job = {"name":"echo","command":"echo hello","CronExpr":"* * * * * *"} func handleJobSave(w http.ResponseWriter,r *http.Request){ var ( err error postJob string job common.Job oldJob *common.Job responseContent []byte ) // 1:解析post表单 if err = r.ParseForm();err != nil{ goto ERR } // 2:将表单数据转成job结构体 postJob = r.PostForm.Get("job") if err = json.Unmarshal([]byte(postJob),&job);err != nil{ fmt.Println("错误1:",err) goto ERR } // 3:保存到etcd if oldJob,err = G_jobManage.SaveJob(&job);err != nil{ fmt.Println("错误2:",err) goto ERR } // 4:正常响应 if responseContent,err = common.SuccessResponse(oldJob);err == nil { w.Write(responseContent) } return ERR: // 错误响应 if responseContent,err = common.ErrorResponse(-1,err.Error());err == nil { w.Write(responseContent) } } // route /job/delete // param name = job1 func handleJobDelete(w http.ResponseWriter,r *http.Request){ var ( err error jobName string oldJob *common.Job responseContent []byte ) // 1 解析表单 if err = r.ParseForm(); err != nil{ goto ERR } // 2 根据任务名称删除任务 jobName = r.PostForm.Get("name") if oldJob,err = G_jobManage.DeleteJob(jobName);err != nil{ goto ERR } // 3 正常响应 if responseContent,err = common.SuccessResponse(oldJob);err == nil { w.Write(responseContent) } return ERR: // 错误响应 if responseContent,err = common.ErrorResponse(-1,err.Error());err == nil { w.Write(responseContent) } } // route /job/list // param func handleJobList(w http.ResponseWriter,r *http.Request){ var ( err error jobList []*common.Job responseContent []byte ) // 获取列表 if jobList,err = G_jobManage.ListJob();err != nil{ goto ERR } // 3 正常响应 if responseContent,err = common.SuccessResponse(jobList);err == nil { w.Write(responseContent) } return ERR: // 错误响应 if responseContent,err = common.ErrorResponse(-1,err.Error());err == nil { w.Write(responseContent) } } // route /job/kill // param name=job1 func handleJobKill(w http.ResponseWriter,r *http.Request){ var( err error jobName string responseContent []byte ) // 1 解析表单 if err = r.ParseForm(); err != nil{ goto ERR } // 2 根据任务名称删除任务 jobName = r.PostForm.Get("name") if err = G_jobManage.KillJob(jobName);err != nil{ goto ERR } // 3 正常响应 if responseContent,err = common.SuccessResponse(nil);err == nil { w.Write(responseContent) } return ERR: // 错误响应 if responseContent,err = common.ErrorResponse(-1,err.Error());err == nil { w.Write(responseContent) } } // route /job/log // param name=job1 func handleJobLog(w http.ResponseWriter,r *http.Request){ var( err error jobName string page int pageSize int responseContent []byte list []*common.JobLogRecord ) // 1 解析表单 if err = r.ParseForm(); err != nil{ goto ERR } jobName = r.Form.Get("jobName") if page,err = strconv.Atoi(r.Form.Get("page"));err != nil{ page = 0 } if pageSize,err = strconv.Atoi(r.Form.Get("page_size"));err != nil{ pageSize = 20 } // 2 获取日志内容 if list,err = G_logManage.lists(jobName,int64(page),int64(pageSize));err != nil{ goto ERR } // 3 正常响应 if responseContent,err = common.SuccessResponse(list);err == nil { w.Write(responseContent) } return ERR: // 错误响应 if responseContent,err = common.ErrorResponse(-1,err.Error());err == nil { w.Write(responseContent) } } // route /work/list func handleWorkList(w http.ResponseWriter,_ *http.Request){ var( err error responseContent []byte list []string ) // 1 获取节点列表 if list,err = G_workManage.list();err != nil{ goto ERR } // 2 正常响应 if responseContent,err = common.SuccessResponse(list);err == nil { w.Write(responseContent) } return ERR: // 错误响应 if responseContent,err = common.ErrorResponse(-1,err.Error());err == nil { w.Write(responseContent) } } // 单例 var( G_ApiServer * ApiServer ) // 初始化 func InitApiServer() (err error){ var( mux *http.ServeMux lister net.Listener httpServer *http.Server staticDir http.Dir staticHandler http.Handler ) // 动态接口路由 mux = http.NewServeMux() mux.HandleFunc("/job/save",handleJobSave) mux.HandleFunc("/job/delete",handleJobDelete) mux.HandleFunc("/job/list",handleJobList) mux.HandleFunc("/job/kill",handleJobKill) mux.HandleFunc("/job/log",handleJobLog) mux.HandleFunc("/work/list",handleWorkList) // 静态页面 staticDir = http.Dir(G_config.WebRoot) staticHandler = http.FileServer(staticDir) mux.Handle("/",http.StripPrefix("/",staticHandler)) // tcp监听 if lister,err = net.Listen("tcp",":"+strconv.Itoa(G_config.ApiPort));err != nil{ return err } // 创建http服务 httpServer = &http.Server{ ReadTimeout : time.Duration(G_config.ReadTimeOut) * time.Millisecond, WriteTimeout : time.Duration(G_config.WriteTimeOut) * time.Millisecond, Handler : mux, } // 赋值单例 G_ApiServer = &ApiServer{ httpServer:httpServer, } // 启动服务 go httpServer.Serve(lister) return }
16.92691
81
0.675368
3.0625
8e58081749f3b7a636391c0b88730b98f2cc81ad
1,466
swift
Swift
Sources/Private/Experimental/Animations/RectangleAnimation.swift
Kyle-Ye/lottie-ios
cb6bf4e49f6b55cf7da6e602e6d66fadc3137ed1
[ "Apache-2.0" ]
null
null
null
Sources/Private/Experimental/Animations/RectangleAnimation.swift
Kyle-Ye/lottie-ios
cb6bf4e49f6b55cf7da6e602e6d66fadc3137ed1
[ "Apache-2.0" ]
null
null
null
Sources/Private/Experimental/Animations/RectangleAnimation.swift
Kyle-Ye/lottie-ios
cb6bf4e49f6b55cf7da6e602e6d66fadc3137ed1
[ "Apache-2.0" ]
null
null
null
// Created by Cal Stephens on 12/21/21. // Copyright © 2021 Airbnb Inc. All rights reserved. import QuartzCore extension CALayer { /// Adds animations for the given `Rectangle` to this `CALayer` func addAnimations( for rectangle: Rectangle, context: LayerAnimationContext) { addAnimation( for: .path, keyframes: rectangle.size.keyframes, value: { sizeKeyframe in let size = sizeKeyframe.sizeValue // TODO: Is there a reasonable way to handle multiple sets // of keyframes that apply to the same value (`path`, in this case)? // - This seems somewhat unlikely -- if it turns out to be necessary, // this will probably have to be reworked to use more sublayers let position = rectangle.position.keyframes.first!.value.pointValue if rectangle.position.keyframes.count > 1 { fatalError("Rectangle position keyframes are currently unsupported") } let cornerRadius = min(min(rectangle.cornerRadius.keyframes.first!.value.cgFloatValue, size.width), size.height) if rectangle.cornerRadius.keyframes.count > 1 { fatalError("Rectangle corner cornerRadius keyframes are currently unsupported") } return BezierPath.rectangle( position: position, size: size, cornerRadius: cornerRadius, direction: rectangle.direction) .cgPath() }, context: context) } }
34.904762
120
0.662347
3.03125
c7e6cd72df6aa3fcfe1f751e78cea36c4a511c38
1,791
java
Java
src/test/java/data_access/DatabaseItemDaoTest.java
antarcticturtle/Lukuvinkkikirjasto
3df699d1275b0ce45f1a657d32fc01c8b7df2a72
[ "MIT" ]
null
null
null
src/test/java/data_access/DatabaseItemDaoTest.java
antarcticturtle/Lukuvinkkikirjasto
3df699d1275b0ce45f1a657d32fc01c8b7df2a72
[ "MIT" ]
6
2018-11-27T22:39:42.000Z
2018-12-10T12:30:34.000Z
src/test/java/data_access/DatabaseItemDaoTest.java
antarcticturtle/Lukuvinkkikirjasto
3df699d1275b0ce45f1a657d32fc01c8b7df2a72
[ "MIT" ]
2
2018-11-15T17:04:35.000Z
2018-12-08T23:18:30.000Z
package data_access; import io.Color; import io.IO; import item.*; import java.io.File; import org.junit.After; import org.junit.Before; import org.junit.Test; import static org.junit.Assert.*; public class DatabaseItemDaoTest { File testDatabase; DatabaseItemDao dao; @Before public void setUp() throws Exception { testDatabase = new File("testdatabase.db"); Database database = new Database("jdbc:sqlite:testdatabase.db"); database.init(); this.dao = new DatabaseItemDao(database); dao.addItem(new Book(1, "Title", "Author", "url", "description")); dao.addItem(new Podcast(2, "Title2", "Author2", "url2", "description2")); } @Test public void allItemsAreFound() { assertEquals(2, dao.getItems("").size()); } @Test public void wrongItemIdReturnsNoItem() { assertEquals(null, dao.getItemById(3)); } @Test public void itemCanBeFoundById() { assertEquals("(id: " + Color.yellow("1") + ") Book: " + Color.cyan("Title") + " by " + Color.cyan("Author"), dao.getItemById(1).toString()); } @Test public void itemCanBeAdded() { dao.addItem(new Video(3, "Title3", "Author3", "url3", "description3")); assertEquals("(id: " + Color.yellow("3") + ") Video: " + Color.cyan("Title3") + " by " + Color.cyan("Author3"), dao.getItemById(3).toString()); } @Test public void itemCanBeDeleted() { dao.deleteItemById(2); assertEquals(null, dao.getItemById(2)); } @Test public void nothingIsDeletedIfWrongIdIsEntered() { assertEquals(null, dao.deleteItemById(3)); } @After public void tearDown() { testDatabase.delete(); } }
27.553846
151
0.603015
3.109375
39fef573fea5138b95db9af28d48870a72c13e28
1,188
java
Java
src/main/java/com/fuhu/java/iostream/ObjectOutputStreamDemo.java
hufuan/unit_test
180002967bf57eec87c199c16ed32b6d6e99dbe7
[ "MIT" ]
null
null
null
src/main/java/com/fuhu/java/iostream/ObjectOutputStreamDemo.java
hufuan/unit_test
180002967bf57eec87c199c16ed32b6d6e99dbe7
[ "MIT" ]
null
null
null
src/main/java/com/fuhu/java/iostream/ObjectOutputStreamDemo.java
hufuan/unit_test
180002967bf57eec87c199c16ed32b6d6e99dbe7
[ "MIT" ]
null
null
null
package com.fuhu.java.iostream; import java.io.*; import java.net.URL; public class ObjectOutputStreamDemo { public static void main(String[] args) { String s = "hello world"; int i = 897648764; String fileName = "test.txt"; try { URL url = Thread.currentThread().getContextClassLoader().getResource(""); String dir = url.toURI().getPath(); String fullFileName = dir + fileName; System.out.println("fullFileName = " + fullFileName); FileOutputStream out = new FileOutputStream( new File(fullFileName)); ObjectOutputStream oout = new ObjectOutputStream(out); oout.writeObject(s); oout.writeObject(i); //close the stream oout.close(); ObjectInputStream ois = new ObjectInputStream( new FileInputStream(new File(fullFileName)) ); System.out.println("#1: " + (String) ois.readObject()); System.out.println("#2: " + ois.readObject()); } catch ( Exception e) { e.printStackTrace(); } } }
33
86
0.549663
3.3125
0c9682abb64f3ba26f9ba369881899db7f3b759b
3,440
py
Python
tests/test_py4gh.py
iCAN-PCM/py4gh
192e62d531b5fd8c4c9a04a83c98bd63795578b8
[ "Apache-2.0" ]
null
null
null
tests/test_py4gh.py
iCAN-PCM/py4gh
192e62d531b5fd8c4c9a04a83c98bd63795578b8
[ "Apache-2.0" ]
null
null
null
tests/test_py4gh.py
iCAN-PCM/py4gh
192e62d531b5fd8c4c9a04a83c98bd63795578b8
[ "Apache-2.0" ]
null
null
null
import subprocess from pathlib import Path import pytest # from py4gh import __version__ from py4gh.utility import decrypt_files, encrypt_files, get_files # def test_version(): # assert __version__ == "0.1.0" @pytest.fixture(scope="session") def keys(tmpdir_factory): test_pub1 = tmpdir_factory.mktemp("data").join("test1.pub") test_sec1 = tmpdir_factory.mktemp("data").join("test1.sec") test_pub2 = tmpdir_factory.mktemp("data").join("test2.pub") test_sec2 = tmpdir_factory.mktemp("data").join("test2.sec") # with open(stdout, "w") as sdf: # subprocess.run(["echo", "blablaalbla"], stdout=sdf) p1 = subprocess.Popen( ["crypt4gh-keygen", "--sk", test_sec1, "--pk", test_pub1, "--nocrypt"], stdin=subprocess.PIPE, ) p1.stdin.write(b"") p1.communicate()[0] p1.stdin.close() subprocess.run( ["crypt4gh-keygen", "--sk", test_sec2, "--pk", test_pub2, "--nocrypt"], # stdin=subprocess.PIPE, text=True, input="", # encoding="ascii", ) return [(test_pub1, test_sec1), (test_pub2, test_sec2)] @pytest.fixture(scope="session") def files(tmp_path): d = tmp_path / "sub" p = d / "hello.txt" p.write_text("This is a secret message") return p # def test_file(files): # with open(files, "r") as f: # print(f.read()) # assert 1 == 3 def test_encryption(keys, tmpdir): d = tmpdir.mkdir("sub") f = d / "hello.txt" f.write("This is a secret message") files = get_files(d) err, res = encrypt_files(keys[0][1], [keys[0][0]], files) proc = subprocess.run(["ls", d], capture_output=True, text=True) output_list = proc.stdout.split("\n") assert output_list[1] == "hello.txt.c4gh" encrypted_file = Path(d / output_list[1]) assert encrypted_file.stat().st_size != 0 print(err) print(res) def test_multiple_encryption(keys, tmpdir): d = tmpdir.mkdir("sub") f = d / "hello.txt" message = "This is a secret message" f.write(message) files = get_files(d) encrypt_files(keys[0][1], [keys[0][0], keys[1][0]], files) proc = subprocess.run(["ls", d], capture_output=True, text=True) print(proc.stdout) output_list = proc.stdout.split("\n") assert output_list[1] == "hello.txt.c4gh" encrypted_file = Path(d / output_list[1]) # print(encrypted_file.read_bytes()[0]) # assert encrypted_file.read_text() != message assert encrypted_file.stat().st_size != 0 def test_muliple_encryption_decryption(keys, tmpdir): d = tmpdir.mkdir("sub") f = d / "hello.txt" message = "This is a secret message" f.write(message) files = get_files(d) encrypt_files(keys[0][1], [keys[0][0], keys[1][0]], files) subprocess.run(["ls", d], capture_output=True, text=True) subprocess.run(["rm", f]) proc2 = subprocess.run(["ls", d], capture_output=True, text=True) proc2_out = proc2.stdout.split("\n") assert len(proc2_out) == 2 assert proc2_out[0] == "hello.txt.c4gh" assert proc2_out[1] == "" files2 = get_files(d) decrypt_files(keys[1][1], files2) proc = subprocess.run(["ls", d], capture_output=True, text=True) output_list = proc.stdout.split("\n") print(output_list) assert output_list[0] == "hello.txt" decrypted_file = Path(d / output_list[0]) assert decrypted_file.read_text() == message assert decrypted_file.stat().st_size != 0
31.559633
79
0.636047
3.03125
b2a7bbf2b95210c04b5a4943b53d17bfd6bfd265
3,872
rs
Rust
ion-c-sys/src/result.rs
therapon/ion-rust-1
19683123b1c95c4655bbc5ece8d4410e8d04729b
[ "Apache-2.0" ]
1
2021-04-07T22:35:39.000Z
2021-04-07T22:35:39.000Z
ion-c-sys/src/result.rs
Infinite-Blue-1042/ion-rust
b9e43eb985b3f0de177e3b00e722a68b94651000
[ "Apache-2.0" ]
null
null
null
ion-c-sys/src/result.rs
Infinite-Blue-1042/ion-rust
b9e43eb985b3f0de177e3b00e722a68b94651000
[ "Apache-2.0" ]
null
null
null
// Copyright Amazon.com, Inc. or its affiliates. //! Provides convenient integration with `Error` and `Result` for Ion C. use crate::*; use std::error::Error; use std::ffi::CStr; use std::fmt; use std::num::TryFromIntError; /// IonC Error code and its associated error message. #[derive(Copy, Clone, Debug, PartialEq)] pub struct IonCError { pub code: i32, pub message: &'static str, pub additional: &'static str, } impl IonCError { /// Constructs an `IonCError` from an `iERR` error code. pub fn from(code: i32) -> Self { Self::with_additional(code, "iERR Result") } /// Constructs an `IonCError` from an `iERR` error code and its own message pub fn with_additional(code: i32, additional: &'static str) -> Self { match code { ion_error_code_IERR_NOT_IMPL..=ion_error_code_IERR_INVALID_LOB_TERMINATOR => { unsafe { // this gives us static storage pointer so it doesn't violate lifetime let c_str = CStr::from_ptr(ion_error_to_str(code)); // the error codes are all ASCII so a panic here is a bug let message = c_str.to_str().unwrap(); Self { code, message, additional, } } } _ => Self { code, message: "Unknown Ion C Error Code", additional, }, } } } impl fmt::Display for IonCError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!( f, "Error {}: {} ({})", self.code, self.message, self.additional ) } } impl Error for IonCError {} impl From<TryFromIntError> for IonCError { /// Due to the way Ion C works with sizes as i32, it is convenient to be able to coerce /// a TryFromIntError to `IonCError`. fn from(_: TryFromIntError) -> Self { IonCError::from(ion_error_code_IERR_NUMERIC_OVERFLOW) } } impl From<Utf8Error> for IonCError { /// Due to the way Ion C works with raw UTF-8 byte sequences, it is convenient to be able /// to coerce a `Utf8Error` to `IonCError`. fn from(_: Utf8Error) -> Self { IonCError::from(ion_error_code_IERR_INVALID_UTF8) } } /// A type alias to results from Ion C API, the result value is generally `()` to signify /// `ion_error_code_IERR_OK` since Ion C doesn't return results but generally takes /// output parameters. pub type IonCResult<T> = Result<T, IonCError>; /// Macro to transform Ion C error code expressions into `Result<(), IonCError>`. /// Higher-level facades over Ion C functions could map this to `Result<T, IonCError>` /// or the like. /// /// NB: `ionc!` implies `unsafe` code. /// /// ## Usage /// /// ``` /// # use std::ptr; /// # use ion_c_sys::*; /// # use ion_c_sys::result::*; /// # fn main() -> IonCResult<()> { /// let mut data = String::from("42"); /// let mut ion_reader: hREADER = ptr::null_mut(); /// let mut ion_type: ION_TYPE = ptr::null_mut(); /// ionc!( /// ion_reader_open_buffer( /// &mut ion_reader, /// data.as_mut_ptr(), /// data.len() as i32, /// ptr::null_mut() /// ) /// )?; /// /// ionc!(ion_reader_next(ion_reader, &mut ion_type))?; /// assert_eq!(ion_type as u32, tid_INT_INT); /// /// let mut value = 0; /// ionc!(ion_reader_read_int64(ion_reader, &mut value))?; /// assert_eq!(value, 42); /// /// ionc!(ion_reader_close(ion_reader)) /// # } /// ``` #[macro_export] macro_rules! ionc { ($e:expr) => { unsafe { let err: i32 = $e; match err { $crate::ion_error_code_IERR_OK => Ok(()), code => Err($crate::result::IonCError::from(code)), } } }; }
29.557252
93
0.569215
3.078125
27befb4e8a9907df965064255ae59b61a4970611
2,155
asm
Assembly
libsrc/_DEVELOPMENT/string/z80/asm_strrspn.asm
UnivEngineer/z88dk
9047beba62595b1d88991bc934da75c0e2030d07
[ "ClArtistic" ]
1
2022-03-08T11:55:58.000Z
2022-03-08T11:55:58.000Z
libsrc/_DEVELOPMENT/string/z80/asm_strrspn.asm
UnivEngineer/z88dk
9047beba62595b1d88991bc934da75c0e2030d07
[ "ClArtistic" ]
2
2022-03-20T22:17:35.000Z
2022-03-24T16:10:00.000Z
libsrc/_DEVELOPMENT/string/z80/asm_strrspn.asm
jorgegv/z88dk
127130cf11f9ff268ba53e308138b12d2b9be90a
[ "ClArtistic" ]
null
null
null
; =============================================================== ; Dec 2013 ; =============================================================== ; ; size_t strrspn(const char *str, const char *cset) ; ; The reverse of strspn() ; ; Returns the number of leading chars in the input string up to ; and including the last occurrence of a char not in cset. ; ; If all chars of str are in cset, returns 0. ; ; If cset is empty, returns strlen(str). ; ; Example: ; ; char *s = "abcdee"; ; int pos; ; ; // search from the end of s for the first char not in "e" ; pos = strrspn(s, "e"); // returns 4 = num leading chars not in "e" ; ; // remove the last two Es from s by truncating s ; s[pos] = '\0'; ; ; =============================================================== SECTION code_clib SECTION code_string PUBLIC asm_strrspn EXTERN __str_locate_nul, l_neg_bc, asm_strchr, error_znc asm_strrspn: ; enter : de = char *cset = matching set ; hl = char *str = string ; ; exit : hl = position of last char in str not in cset ; bc = char *str = string ; ; carry reset if all of str contains chars only from cset ; ; uses : af, bc, hl push hl ; save str call __str_locate_nul ; hl points at terminating 0 in str call l_neg_bc ; bc = strlen(str) + 1 ld a,(de) or a jr Z,empty_cset loop: dec bc ; position of next char in str ld a,b or c jr Z,all_in_cset dec hl ; & next char in str to check push bc push hl ; see if current char from string is in cset ld c,(hl) ld hl,de ; hl = cset call asm_strchr ; carry reset if in cset pop hl pop bc jr NC,loop ; loop if char in cset not_in_cset: ld hl,bc ; hl = char position pop bc ; bc = char *str ret all_in_cset: pop bc ; bc = char *str jp error_znc empty_cset: ld hl,bc dec hl ; hl = strlen(str) pop bc ; bc = char *str ret
20.92233
69
0.49652
3.34375
b87ffb34d071673151d11a7dfa2486421773f0b4
1,587
kt
Kotlin
petal/src/main/kotlin/petal/common/value/NativeFunction.kt
NekoGoddessAlyx/Petal
bb1fee0549316ebbeae139745f29287f833e1d11
[ "Apache-2.0" ]
null
null
null
petal/src/main/kotlin/petal/common/value/NativeFunction.kt
NekoGoddessAlyx/Petal
bb1fee0549316ebbeae139745f29287f833e1d11
[ "Apache-2.0" ]
null
null
null
petal/src/main/kotlin/petal/common/value/NativeFunction.kt
NekoGoddessAlyx/Petal
bb1fee0549316ebbeae139745f29287f833e1d11
[ "Apache-2.0" ]
null
null
null
package petal.common.value /** * A function that takes arguments from Petal code and returns a value back to Petal. * * The provided [Args] should be used only within the scope of execution. * Any use outside the scope will result in undefined behavior. */ public typealias NativeFunction = (Args) -> Value /** * Represents arguments passed from Petal code to a [NativeFunction]. * * The view represented by this object is only valid during execution of the function. * Attempting to access any element of this object outside the scope of execution will result in undefined behavior. */ public class Args internal constructor( private var stack: Array<Value>, private var start: Int, private var _size: Int ) : Iterable<Value> { public val size: Int get() = _size public val receiver: Value get() = stack[start - 1] public operator fun get(index: Int): Value { if (index !in 0 until _size) return Value.NULL return stack[start + index] } public fun toArray(): Array<Value> = stack.copyOfRange(start, start + _size) override fun iterator(): Iterator<Value> = object : Iterator<Value> { var index = start override fun hasNext(): Boolean = index < start + _size override fun next(): Value = stack[index++] } /** * Called after the [NativeFunction] has been invoked. * This ensures that stack changes don't leak. */ internal fun close() { stack = NULL_ARRAY start = 1 _size = 0 } } private val NULL_ARRAY: Array<Value> = arrayOf(Value.NULL)
30.519231
116
0.672338
3.453125
573cd93a5e3888d8cbe5ab4fbd2c0a43af9dc07c
3,084
c
C
src/graph.c
kkdwivedi/covid-sim
b8d1d42075b8998b02d7b09efac65b1eae205224
[ "MIT" ]
null
null
null
src/graph.c
kkdwivedi/covid-sim
b8d1d42075b8998b02d7b09efac65b1eae205224
[ "MIT" ]
null
null
null
src/graph.c
kkdwivedi/covid-sim
b8d1d42075b8998b02d7b09efac65b1eae205224
[ "MIT" ]
null
null
null
#include <assert.h> #include <stdbool.h> #include <stddef.h> #include <limits.h> #include <stdlib.h> #include <stdio.h> #include "graph.h" #include "config.h" #define POOL_SIZE (SAMPLE_SIZE * (NR_EDGES + 1)) extern List ListS; extern List ListI; extern List ListR; bool sir_list_add_item(Node *n, List *l) { static size_t iterator = 0; static struct sir *pool = NULL; if (!l && !n) { free(pool); pool = NULL; iterator = 0; return true; } if (!pool) { pool = malloc(POOL_SIZE * sizeof *pool); if (!pool) return false; } struct sir *s = NULL; if (iterator < POOL_SIZE) s = pool + iterator++; if (!s) return false; s->list.next = NULL; list_append(l, &s->list); s->item = n; return true; } void sir_list_add_sir(struct sir *s, List *l) { assert(s); list_append(l, &s->list); } struct sir* sir_list_del_item(Node *n, List *l) { /* l is assumed to be anchor, not an object embedded in entry */ struct sir *i; list_for_each_entry(i, l->next, struct sir, list) { if (i->item == n) { struct sir *f = container_of(&i->list, struct sir, list); /* updates l->next */ list_delete(&l->next, &i->list); /* freeing individual elements is not possible in pool based implementation */ // free(f); f->list.next = NULL; return f; } } return NULL; } void sir_list_dump(List *l) { struct sir *i; list_for_each_entry(i, l->next, struct sir, list) fprintf(stderr, "%u ", i->item->id); log_info(""); } void sir_list_del_rec(List *l) { if (!l->next) return; struct sir *i; /* we cannot use list_for_each_entry here, since we need to * advance the iterator and only then free the memory */ for (i = container_of(l->next, struct sir, list); i;) { struct sir *f = i; i = i->list.next ? container_of(i->list.next, struct sir, list) : NULL; //sir_list_dump(&f->list, true); free(f); } l->next = NULL; } size_t sir_list_len(List *l) { /* begin counting from next, as l is anchor */ size_t count = 0; while (l->next) { l = l->next; count++; } return count; } Node* node_new(size_t sz) { Node *n = malloc(sz * sizeof(*n)); if (!n) return NULL; for (size_t i = 0; i < sz; i++) { n[i].id = i + 1; n[i].state = SIR_SUSCEPTIBLE; n[i].neigh.next = NULL; n[i].tail = &n[i].neigh; n[i].initial = false; } return n; } void node_connect(Node *a, Node *b) { assert(a); assert(b); static bool conn_cache[SAMPLE_SIZE][SAMPLE_SIZE] = {}; struct sir *i; if (a == b) return; //list_for_each_entry(i, a->neigh.next, struct sir, list) // if (i->item == b) return; if (!conn_cache[a->id-1][b->id-1] || !conn_cache[b->id-1][a->id-1]) { assert(!conn_cache[a->id-1][b->id-1] && !conn_cache[b->id-1][a->id-1]); conn_cache[a->id-1][b->id-1] = true; conn_cache[b->id-1][a->id-1] = true; } else return; sir_list_add_item(a, b->tail); b->tail = b->tail->next; sir_list_add_item(b, a->tail); a->tail = a->tail->next; max_conn++; } void node_dump_adjacent_nodes(Node *n) { fprintf(stderr, "Node %u: ", n->id); sir_list_dump(&n->neigh); } void node_delete(Node *n) { sir_list_del_rec(&n->neigh); }
21.123288
73
0.626135
3.015625
75ffea1d22321b30218f7adfa15c140da9bd12dc
2,097
php
PHP
13. File System/Filesystem - fopen()l.php
munziru3/Mastering-PHP7
65c97c8f7b7916d48117f54467208b672b72cb76
[ "Unlicense" ]
60
2018-05-10T04:40:54.000Z
2022-02-12T06:04:26.000Z
13. File System/Filesystem - fopen()l.php
Rdx11/Mastering-PHP7
65c97c8f7b7916d48117f54467208b672b72cb76
[ "Unlicense" ]
null
null
null
13. File System/Filesystem - fopen()l.php
Rdx11/Mastering-PHP7
65c97c8f7b7916d48117f54467208b672b72cb76
[ "Unlicense" ]
47
2018-05-16T02:18:54.000Z
2021-10-04T00:56:05.000Z
<!DOCTYPE html> <html> <body> <?php $myfile = fopen("webdictionary.txt", "r") or die("Unable to open file!"); echo fread($myfile,filesize("webdictionary.txt")); fclose($myfile); ?> </body> </html> Tip: The fread() and the fclose() functions will be explained below. The file may be opened in one of the following modes: Modes Description r Open a file for read only. File pointer starts at the beginning of the file w Open a file for write only. Erases the contents of the file or creates a new file if it doesn't exist. File pointer starts at the beginning of the file a Open a file for write only. The existing data in file is preserved. File pointer starts at the end of the file. Creates a new file if the file doesn't exist x Creates a new file for write only. Returns FALSE and an error if file already exists r+ Open a file for read/write. File pointer starts at the beginning of the file w+ Open a file for read/write. Erases the contents of the file or creates a new file if it doesn't exist. File pointer starts at the beginning of the file a+ Open a file for read/write. The existing data in file is preserved. File pointer starts at the end of the file. Creates a new file if the file doesn't exist x+ Creates a new file for read/write. Returns FALSE and an error if file already exists PHP Read File - fread() The fread() function reads from an open file. The first parameter of fread() contains the name of the file to read from and the second parameter specifies the maximum number of bytes to read. The following PHP code reads the "webdictionary.txt" file to the end: fread($myfile,filesize("webdictionary.txt")); PHP Close File - fclose() The fclose() function is used to close an open file. It's a good programming practice to close all files after you have finished with them. You don't want an open file running around on your server taking up resources! The fclose() requires the name of the file (or a variable that holds the filename) we want to close: <?php $myfile = fopen("webdictionary.txt", "r"); // some code to be executed.... fclose($myfile); ?>
45.586957
165
0.75155
3.484375
7fd874a01ca9a73524521fafb5c03ba365046987
6,530
go
Go
server/game/runner.go
jacobpatterson1549/selene-bananas
6f8213ce8786c796f9272f403204c5869f9aa68b
[ "MIT" ]
1
2021-06-22T12:40:21.000Z
2021-06-22T12:40:21.000Z
server/game/runner.go
jacobpatterson1549/selene-bananas
6f8213ce8786c796f9272f403204c5869f9aa68b
[ "MIT" ]
1
2021-03-04T00:48:54.000Z
2021-03-17T20:08:55.000Z
server/game/runner.go
jacobpatterson1549/selene-bananas
6f8213ce8786c796f9272f403204c5869f9aa68b
[ "MIT" ]
null
null
null
package game import ( "context" "fmt" "sync" "github.com/jacobpatterson1549/selene-bananas/game" "github.com/jacobpatterson1549/selene-bananas/game/message" "github.com/jacobpatterson1549/selene-bananas/game/player" "github.com/jacobpatterson1549/selene-bananas/server/log" ) type ( // Runner runs games. Runner struct { // log is used to log errors and other information log log.Logger // games maps game ids to the channel each games listens to for incoming messages // OutChannels are stored here because the Runner writes to the game, which in turn reads from the Runner's channel as an InChannel games map[game.ID]chan<- message.Message // lastID is the ID of themost recently created game. The next new game should get a larger ID. lastID game.ID // WordValidator is used to validate players' words when they try to finish the game. WordValidator WordValidator // UserDao increments user points when a game is finished. userDao UserDao // RunnerConfig contains configuration properties of the Runner. RunnerConfig } // RunnerConfig is used to create a game Runner. RunnerConfig struct { // Debug is a flag that causes the game to log the types messages that are read. Debug bool // The maximum number of games. MaxGames int // The config for creating new games. GameConfig Config } // WordValidator checks if words are valid. WordValidator interface { Validate(word string) bool } // UserDao makes changes to the stored state of users in the game UserDao interface { // UpdatePointsIncrement increments points for the specified usernames based on the userPointsIncrementFunc UpdatePointsIncrement(ctx context.Context, userPoints map[string]int) error } ) // NewRunner creates a new game runner from the config. func (cfg RunnerConfig) NewRunner(log log.Logger, WordValidator WordValidator, userDao UserDao) (*Runner, error) { if err := cfg.validate(log, WordValidator, userDao); err != nil { return nil, fmt.Errorf("creating game runner: validation: %w", err) } m := Runner{ log: log, games: make(map[game.ID]chan<- message.Message, cfg.MaxGames), RunnerConfig: cfg, WordValidator: WordValidator, userDao: userDao, } return &m, nil } // Run consumes messages from the "in" channel, processing them on a new goroutine until the "in" channel closes. // The results of messages are sent on the "out" channel to be read by the subscriber. func (r *Runner) Run(ctx context.Context, wg *sync.WaitGroup, in <-chan message.Message) <-chan message.Message { ctx, cancelFunc := context.WithCancel(ctx) out := make(chan message.Message) wg.Add(1) run := func() { defer wg.Done() defer r.log.Printf("game runner stopped") defer close(out) defer cancelFunc() for { // BLOCKING select { case <-ctx.Done(): return case m, ok := <-in: if !ok { return } r.handleMessage(ctx, wg, m, out) } } } go run() return out } // validate ensures the configuration has no errors. func (cfg RunnerConfig) validate(log log.Logger, WordValidator WordValidator, userDao UserDao) error { switch { case log == nil: return fmt.Errorf("log required") case WordValidator == nil: return fmt.Errorf("word validator required") case userDao == nil: return fmt.Errorf("user dao required") case cfg.MaxGames < 1: return fmt.Errorf("must be able to create at least one game") } return nil } // handleMessage takes appropriate actions for different message types. func (r *Runner) handleMessage(ctx context.Context, wg *sync.WaitGroup, m message.Message, out chan<- message.Message) { switch m.Type { case message.CreateGame: r.createGame(ctx, wg, m, out) case message.DeleteGame: r.deleteGame(ctx, m, out) default: r.handleGameMessage(ctx, m, out) } } // createGame allocates a new game, adding it to the open games. func (r *Runner) createGame(ctx context.Context, wg *sync.WaitGroup, m message.Message, out chan<- message.Message) { if err := r.validateCreateGame(m); err != nil { r.sendError(err, m.PlayerName, out) return } id := r.lastID + 1 gameCfg := r.GameConfig gameCfg.Config = *m.Game.Config g, err := gameCfg.NewGame(r.log, id, r.WordValidator, r.userDao) if err != nil { r.sendError(err, m.PlayerName, out) return } r.lastID = id gIn := make(chan message.Message) g.Run(ctx, wg, gIn, out) // all games publish to the same "out" channel r.games[id] = gIn m.Type = message.JoinGame message.Send(m, gIn, r.Debug, r.log) } // validateCreateGame returns an err if the runner cannot create a new game or the message to create one is invalid. func (r *Runner) validateCreateGame(m message.Message) error { switch { case len(r.games) >= r.MaxGames: return fmt.Errorf("the maximum number of games have already been created (%v)", r.MaxGames) case m.Game == nil, m.Game.Board == nil: return fmt.Errorf("board config required when creating game") case m.Game.Config == nil: return fmt.Errorf("missing config for game properties") } return nil } // deleteGame removes a game from the runner, notifying the game that it is being deleted so it can notify users. func (r *Runner) deleteGame(ctx context.Context, m message.Message, out chan<- message.Message) { gIn, err := r.getGame(m) if err != nil { r.sendError(err, m.PlayerName, out) return } delete(r.games, m.Game.ID) message.Send(m, gIn, r.Debug, r.log) } // handleGameMessage passes an error to the game the message is for. func (r *Runner) handleGameMessage(ctx context.Context, m message.Message, out chan<- message.Message) { gIn, err := r.getGame(m) if err != nil { r.sendError(err, m.PlayerName, out) return } message.Send(m, gIn, r.Debug, r.log) } // getGame retrieves the game from the runner for the message, if the runner has a game for the message's game ID. func (r *Runner) getGame(m message.Message) (chan<- message.Message, error) { if m.Game == nil { return nil, fmt.Errorf("no game for runner to handle in message: %v", m) } gIn, ok := r.games[m.Game.ID] if !ok { return nil, fmt.Errorf("no game ID for runner to handle in message: %v", m) } return gIn, nil } // sendError adds a message for the player on the channel func (r *Runner) sendError(err error, pn player.Name, out chan<- message.Message) { err = fmt.Errorf("player %v: %w", pn, err) r.log.Printf("game runner error: %v", err) m := message.Message{ Type: message.SocketError, Info: err.Error(), PlayerName: pn, } message.Send(m, out, r.Debug, r.log) }
32.326733
133
0.711026
3.40625
fa82de916adac298052379e4e7e81b6c55191986
6,814
sql
SQL
public/question4/Bookazon.sql
bryan-gilbert/assignments
ff5db009a8c324d73aedf07b6b3938a656f919be
[ "Unlicense" ]
1
2020-07-28T21:48:49.000Z
2020-07-28T21:48:49.000Z
public/question4/Bookazon.sql
bryan-gilbert/assignments
ff5db009a8c324d73aedf07b6b3938a656f919be
[ "Unlicense" ]
null
null
null
public/question4/Bookazon.sql
bryan-gilbert/assignments
ff5db009a8c324d73aedf07b6b3938a656f919be
[ "Unlicense" ]
null
null
null
-- MySQL Script generated by MySQL Workbench -- Wed Aug 22 01:55:12 2018 -- Model: New Model Version: 1.0 -- MySQL Workbench Forward Engineering SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0; SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0; SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='ONLY_FULL_GROUP_BY,STRICT_TRANS_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,NO_ENGINE_SUBSTITUTION'; -- ----------------------------------------------------- -- Schema bookazon -- ----------------------------------------------------- -- source Bookazon.sql; -- ----------------------------------------------------- -- Schema bookazon -- ----------------------------------------------------- CREATE SCHEMA IF NOT EXISTS `bookazon` DEFAULT CHARACTER SET utf8 ; USE `bookazon` ; -- ----------------------------------------------------- -- Table `bookazon`.`Authors` -- ----------------------------------------------------- DROP TABLE IF EXISTS `bookazon`.`Authors` ; CREATE TABLE IF NOT EXISTS `bookazon`.`Authors` ( `id` INT NOT NULL AUTO_INCREMENT, `Name` VARCHAR(100) NULL, PRIMARY KEY (`id`), UNIQUE INDEX `id_UNIQUE` (`id` ASC) ) ENGINE = InnoDB; LOAD DATA LOCAL INFILE 'Authors.csv' INTO TABLE Authors FIELDS TERMINATED BY ',' ENCLOSED BY '"' LINES TERMINATED BY '\n' IGNORE 1 LINES; -- ----------------------------------------------------- -- Table `bookazon`.`Books` -- ----------------------------------------------------- DROP TABLE IF EXISTS `bookazon`.`Books` ; CREATE TABLE IF NOT EXISTS `bookazon`.`Books` ( `id` INT NOT NULL AUTO_INCREMENT, `title` VARCHAR(200) NOT NULL, `format` VARCHAR(45) NOT NULL, `price` DOUBLE NULL default 0.0, `authorId` INT NULL, `category` INT NULL, PRIMARY KEY (`id`), UNIQUE INDEX `id_UNIQUE` (`id` ASC) ) ENGINE = InnoDB; LOAD DATA LOCAL INFILE 'Books.csv' INTO TABLE Books FIELDS TERMINATED BY ',' ENCLOSED BY '"' IGNORE 1 LINES; -- ----------------------------------------------------- -- Table `bookazon`.`Categories` -- ----------------------------------------------------- DROP TABLE IF EXISTS `bookazon`.`Categories` ; CREATE TABLE IF NOT EXISTS `bookazon`.`Categories` ( `id` INT UNSIGNED NOT NULL AUTO_INCREMENT, `category` VARCHAR(200) NULL, PRIMARY KEY (`id`), UNIQUE INDEX `id_UNIQUE` (`id` ASC) ) ENGINE = InnoDB; LOAD DATA LOCAL INFILE 'Categories.csv' INTO TABLE Categories FIELDS TERMINATED BY ',' ENCLOSED BY '"' IGNORE 1 LINES; -- ----------------------------------------------------- -- Table `bookazon`.`Customers` -- ----------------------------------------------------- DROP TABLE IF EXISTS `bookazon`.`Customers` ; CREATE TABLE IF NOT EXISTS `bookazon`.`Customers` ( `id` INT NOT NULL AUTO_INCREMENT, `Name` VARCHAR(100) NULL, `City` VARCHAR(45) NULL, `State` VARCHAR(45) NULL, PRIMARY KEY (`id`), UNIQUE INDEX `id_UNIQUE` (`id` ASC) ) ENGINE = InnoDB; LOAD DATA LOCAL INFILE 'Customers.csv' INTO TABLE Customers FIELDS TERMINATED BY ',' ENCLOSED BY '"' IGNORE 1 LINES; -- ----------------------------------------------------- -- Table `bookazon`.`LineItem` -- ----------------------------------------------------- DROP TABLE IF EXISTS `bookazon`.`LineItems` ; CREATE TABLE IF NOT EXISTS `bookazon`.`LineItems` ( `id` INT UNSIGNED NOT NULL AUTO_INCREMENT, `bookId` INT NOT NULL, `quantity` INT NOT NULL, `cost` DOUBLE NULL DEFAULT 0.0, `orderId` INT NOT NULL DEFAULT 0.0, PRIMARY KEY (`id`), UNIQUE INDEX `id_UNIQUE` (`id` ASC) ) ENGINE = InnoDB; LOAD DATA LOCAL INFILE 'LineItems.csv' INTO TABLE LineItems FIELDS TERMINATED BY ',' ENCLOSED BY '"' IGNORE 1 LINES; -- ----------------------------------------------------- -- Table `bookazon`.`Orders` -- ----------------------------------------------------- DROP TABLE IF EXISTS `bookazon`.`Orders` ; CREATE TABLE IF NOT EXISTS `bookazon`.`Orders` ( `id` INT UNSIGNED NOT NULL AUTO_INCREMENT, `date` DATETIME NOT NULL, `custId` INT NULL, PRIMARY KEY (`id`), UNIQUE INDEX `id_UNIQUE` (`id` ASC) ) ENGINE = InnoDB; LOAD DATA LOCAL INFILE 'Orders.csv' INTO TABLE Orders FIELDS TERMINATED BY ',' OPTIONALLY ENCLOSED BY '"' LINES TERMINATED BY '\n' IGNORE 1 LINES; -- select * from salesByAuthor; DROP VIEW IF EXISTS `bookazon`.`salesByAuthor` ; CREATE VIEW `salesByAuthor` AS SELECT authors.Name, SUM(bookSales.totalBookUnits) AS totalAuthorUnits, SUM(bookSales.totalBookSales) AS totalAuthorSales, bookSales.yr FROM Books bookAuthor INNER JOIN Authors authors ON authors.id = bookAuthor.authorId INNER JOIN ( SELECT book.id bookId, FORMAT(SUM(cmb.totalUnits), 2) AS totalBookUnits, FORMAT(SUM(cmb.totalSales), 2) AS totalBookSales, yr FROM Books book INNER JOIN ( SELECT bookId, yr, SUM(quantity * cost) AS totalSales, SUM(quantity ) AS totalUnits FROM LineItems item INNER JOIN ( SELECT id, custId, YEAR(STR_TO_DATE(date, '%Y-%m-%d')) AS yr FROM Orders ) AS orders ON orders.id = item.orderId GROUP BY bookId, yr ORDER BY SUM(quantity * cost), SUM(quantity) ) AS cmb ON cmb.bookId = book.id GROUP BY book.id, yr ORDER BY SUM(book.price * cmb.totalUnits) DESC ) AS bookSales ON bookSales.bookId = bookAuthor.id GROUP BY bookSales.yr, bookAuthor.authorId ORDER BY SUM(bookSales.totalBookUnits) DESC ; -- select * from booksSoldByYear; DROP VIEW IF EXISTS `bookazon`.`booksSoldByYear` ; CREATE VIEW `booksSoldByYear` AS SELECT bookId, yr, SUM(quantity * cost) AS totalSales, SUM(quantity ) AS totalUnits FROM LineItems items INNER JOIN (SELECT id, custId, YEAR(STR_TO_DATE(date, '%Y-%m-%d')) AS yr FROM Orders) orders ON orders.id = items.orderId GROUP BY bookId, yr ORDER BY SUM(quantity * cost), SUM(quantity) ; -- select * from booksSold; DROP VIEW IF EXISTS `bookazon`.`booksSold` ; CREATE VIEW `booksSold` AS select title as bookTitle, cat.category, auth.Name as Author, format, b2.yr, b2.totalSales, b2.totalUnits from books AS b1 INNER JOIN ( SELECT c.id, c.category from Categories c) as cat ON b1.category = cat.id INNER JOIN ( SELECT a.id, a.Name from Authors a ) as auth ON auth.id = b1.authorId INNER JOIN ( SELECT bookId, yr, totalSales, totalUnits from booksSoldByYear b2 ) as b2 ON b1.id = b2.bookId ; DROP VIEW IF EXISTS `bookazon`.`customersOrders` ; CREATE VIEW `customersOrders` AS select Name, City, State, o.yr, li.bookId, li.quantity, li.cost, li.total FROM Customers AS c INNER JOIN (SELECT o.id, custId, YEAR(STR_TO_DATE(date, '%Y-%m-%d')) AS yr from Orders as o) as o ON o.custId = c.id INNER JOIN (SELECT bookId, quantity, cost, (quantity*cost ) total, orderId FROM LineItems li) as li ON li.orderId = o.id ; SET SQL_MODE=@OLD_SQL_MODE; SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS; SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS;
39.387283
159
0.623276
3.0625
aa0f2b4b610691e2b60c7d028cff936bf3f46255
884
asm
Assembly
boot/disk.asm
a-quelle/OSproject
8afaaf87eef9a13fbca7819b751f8d4a7c13099f
[ "MIT" ]
null
null
null
boot/disk.asm
a-quelle/OSproject
8afaaf87eef9a13fbca7819b751f8d4a7c13099f
[ "MIT" ]
null
null
null
boot/disk.asm
a-quelle/OSproject
8afaaf87eef9a13fbca7819b751f8d4a7c13099f
[ "MIT" ]
null
null
null
;params: bx is read buffer address ; dl is drive number to read from ; dh is number of sectors to read ;reads dh sectors, starting at sector 2 of cylinder 0 of drive dl disk_load: push ax push bx push cx push dx mov ah, 0x02 ;disk read with int 0x13 mov al, dh ;al is number of sectors to read mov cl, 0x02 ;cl is start sector mov ch, 0x00 ;ch is cylinder to read mov dh, 0x00 ;dh is head number to use int 0x13 jc disk_error pop dx cmp al, dh ;al contains number of sector read jne sectors_error pop cx pop bx pop ax ret disk_error: mov bx, DISK_ERROR call print call print_nl mov dh, ah ;ah contains error code call print_hex ;will print error code and drive number jmp disk_loop sectors_error: mov bx, SECTORS_ERROR call print disk_loop: jmp $ DISK_ERROR: db "Disk read error", 0 SECTORS_ERROR: db "Incorrect number of sectors read", 0
20.55814
65
0.728507
3.25
0ba7c2583722cded680577d9143e475dac69688c
2,258
js
JavaScript
src/controllers/userController.js
AntonioCopete/node-develop-your-mvc-project
680a52b22062f4e5c93e1549604a234dc19a1682
[ "MIT" ]
2
2021-12-22T16:43:08.000Z
2021-12-26T11:31:21.000Z
src/controllers/userController.js
AntonioCopete/mern-back
e0f83eeba752095aad05c379ff68454e10283c5a
[ "MIT" ]
null
null
null
src/controllers/userController.js
AntonioCopete/mern-back
e0f83eeba752095aad05c379ff68454e10283c5a
[ "MIT" ]
null
null
null
const { errorMiddleware } = require('../middleware'); const db = require('../models'); async function login(req, res, next) { const { uid, email } = req.user; try { const User = await db.User.findOne({ email: email }).select().lean().exec(); console.log(User); if (User) { res.status(200).send({ message: 'Logged successfully', user: User, }); } } catch (err) { next(err); } } async function createUser(req, res, next) { console.log(req.body); const { email, password, fullName, userLink } = req.body; try { const user = await db.User.create({ fullName: fullName, email: email, password: password, userLink: userLink, }); res.status(201).send({ message: 'User created succeessfully', data: user, }); } catch (err) { console.log('hello kitty'); next(err); } } async function getUsers(req, res, next) { try { const users = await db.User.find().lean().exec(); res.status(200).send({ data: users, }); } catch (err) { next(err); } } async function updateUser(req, res, next) { const { userId } = req.params; try { const updateUser = await db.User.findByIdAndUpdate(userId, req.body, { new: true, }); res.status(200).send({ message: 'User updated successfully', data: updateUser, }); } catch (err) { next(err); } } async function deleteUser(req, res, next) { const { userId } = req.params; try { const deleteUser = await db.User.deleteOne({ _id: userId }); if (deleteUser.deletedCount === 1) { res.status(200).send({ message: 'User successfully deleted', }); } else { res.status(500).send({ message: 'User not removed', }); } } catch (err) { next(err); } } async function getSingleUser(req, res, next) { try { const { userId } = req.params; const user = await db.User.findById({ _id: userId }).lean().exec(); res.status(200).send({ data: user, }); } catch (err) { next(err); } } module.exports = { createUser: createUser, login: login, getUsers: getUsers, updateUser: updateUser, deleteUser: deleteUser, getSingleUser: getSingleUser, };
20.160714
80
0.578831
3.03125
0ad2a13bdbcf121ef1a736fdac3c09a1f15a1613
1,715
kt
Kotlin
src/commonTest/kotlin/examples/qep/samples.kt
GameModsBR/quick-expression-parser
91c286ad40e9ca345846a0f575edda5fcd3af40e
[ "Apache-2.0" ]
null
null
null
src/commonTest/kotlin/examples/qep/samples.kt
GameModsBR/quick-expression-parser
91c286ad40e9ca345846a0f575edda5fcd3af40e
[ "Apache-2.0" ]
null
null
null
src/commonTest/kotlin/examples/qep/samples.kt
GameModsBR/quick-expression-parser
91c286ad40e9ca345846a0f575edda5fcd3af40e
[ "Apache-2.0" ]
null
null
null
@file:Suppress("UndocumentedPublicClass", "unused") package examples.qep import br.com.gamemods.qep.ParameterProvider import br.com.gamemods.qep.parseExpression private fun parseExpressionMapSample() = "Hello #{user.name}, today is #dayOfWeek. Will you go #{user.isAdult? 'work' : 'to school'} today?" .parseExpression(mapOf( "dayOfWeek" to "monday", "user" to mapOf( "name" to "Michael", "isAdult" to true ) )) private fun parseExpressionVarargSample() = "Hello #{user.name}, today is #dayOfWeek. Will you go #{user.isAdult? 'work' : 'to school'} today?" .parseExpression( "dayOfWeek" to "monday", "user" to mapOf( "name" to "Michael", "isAdult" to true ) ) private fun parseExpressionParamProviderSample(): String { data class UserBean(val name: String, val age: Int): ParameterProvider { override fun getParameter(identifier: String): Any? = when (identifier) { "name" -> name "isAdult" -> age >= 18 else -> null } } data class ExampleBean(val dayOfWeek: String, val user: UserBean): ParameterProvider { override fun getParameter(identifier: String): Any? = when (identifier) { "dayOfWeek" -> dayOfWeek "user" -> user else -> null } } val bean = ExampleBean("monday", UserBean("Michael", 12)) return "Hello #{user.name}, today is #dayOfWeek. Will you go #{user.isAdult? 'work' : 'to school'} today?" .parseExpression(bean) }
35
110
0.561516
3.15625
2a02c560298b28e3615935fc514fccffd58fa8f5
4,198
java
Java
dap4/d4core/src/main/java/dap4/core/util/DapDump.java
joansmith3/thredds
ac321ce2a15f020f0cdef1ff9a2cf82261d8297c
[ "NetCDF" ]
1
2018-04-24T13:53:46.000Z
2018-04-24T13:53:46.000Z
dap4/d4core/src/main/java/dap4/core/util/DapDump.java
joansmith3/thredds
ac321ce2a15f020f0cdef1ff9a2cf82261d8297c
[ "NetCDF" ]
16
2016-04-11T06:42:41.000Z
2019-05-03T04:04:50.000Z
dap4/d4core/src/main/java/dap4/core/util/DapDump.java
joansmith3/thredds
ac321ce2a15f020f0cdef1ff9a2cf82261d8297c
[ "NetCDF" ]
1
2019-07-22T19:57:26.000Z
2019-07-22T19:57:26.000Z
/* Copyright 2012, UCAR/Unidata. See the LICENSE file for more information. */ package dap4.core.util; import java.io.*; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.Arrays; abstract public class DapDump { ////////////////////////////////////////////////// // Provide a simple dump of binary data // (Static method) ////////////////////////////////////////////////// // Constants static int MAXLIMIT = 20000; ////////////////////////////////////////////////// // Provide a simple dump of binary data static public void dumpbytes(ByteBuffer buf0, boolean skipdmr) { int savepos = buf0.position(); int limit0 = buf0.limit(); int skipcount = 0; if(limit0 > MAXLIMIT) limit0 = MAXLIMIT; if(limit0 >= buf0.limit()) limit0 = buf0.limit(); if(skipdmr) { skipcount = buf0.getInt(); //dmr count skipcount &= 0xFFFFFF; // mask off the flags to get true count skipcount += 4; // skip the count also } byte[] bytes = new byte[(limit0 + 8) - skipcount]; Arrays.fill(bytes, (byte) 0); buf0.position(savepos + skipcount); buf0.get(bytes, 0, limit0 - skipcount); buf0.position(savepos); ByteBuffer buf = ByteBuffer.wrap(bytes).order(buf0.order()); dumpbytes(buf); } /** * Dump the contents of a buffer from 0 to position * * @param buf0 byte buffer to dump */ static public void dumpbytes(ByteBuffer buf0) { int stop = buf0.limit(); int size = stop + 8; ByteBuffer buf = ByteBuffer.allocate(size).order(buf0.order()); Arrays.fill(buf.array(), (byte) 0); buf.put(buf0.array()); buf.position(0); buf.limit(size); int i = 0; try { for(i = 0; buf.position() < stop; i++) { int savepos = buf.position(); int iv = buf.getInt(); buf.position(savepos); long lv = buf.getLong(); buf.position(savepos); short sv = buf.getShort(); buf.position(savepos); byte b = buf.get(); long uiv = ((long) iv) & 0xFFFFFFFFL; int usv = ((int) sv) & 0xFFFF; int ib = (int) b; int ub = (iv & 0xFF); char c = (char) ub; String s = Character.toString(c); if(c == '\r') s = "\\r"; else if(c == '\n') s = "\\n"; else if(c < ' ') s = "?"; System.err.printf("[%03d] %02x %03d %4d '%s'", i, ub, ub, ib, s); System.err.printf("\t%12d 0x%08x", iv, uiv); System.err.printf("\t%5d\t0x%04x", sv, usv); System.err.println(); System.err.flush(); } } catch (Exception e) { System.err.println("failure:" + e); } finally { System.err.flush(); //new Exception().printStackTrace(System.err); System.err.flush(); } } static public void dumpbytestream(OutputStream stream, ByteOrder order, String tag) { if(stream instanceof ByteArrayOutputStream) { byte[] content = ((ByteArrayOutputStream) stream).toByteArray(); dumpbytestream(content, order, tag); } } static public void dumpbytestream(ByteBuffer buf, ByteOrder order, String tag) { dumpbytestream(buf.array(),0,buf.position(),order,tag); } static public void dumpbytestream(byte[] content, ByteOrder order, String tag) { dumpbytestream(content,0,content.length,order,tag); } static public void dumpbytestream(byte[] content, int start, int len, ByteOrder order, String tag) { System.err.println("++++++++++ " + tag + " ++++++++++ "); ByteBuffer tmp = ByteBuffer.wrap(content).order(order); tmp.position(start); tmp.limit(len); DapDump.dumpbytes(tmp); System.err.println("++++++++++ " + tag + " ++++++++++ "); System.err.flush(); } }
31.328358
83
0.50667
3.015625
f07e0ced31d9f3b5a75c59dd3ef793ba14212ab0
2,831
py
Python
tests/base.py
octue/octue-sdk-python
31c6e9358d3401ca708f5b3da702bfe3be3e52ce
[ "MIT" ]
5
2020-10-01T12:43:10.000Z
2022-03-14T17:26:25.000Z
tests/base.py
octue/octue-sdk-python
31c6e9358d3401ca708f5b3da702bfe3be3e52ce
[ "MIT" ]
322
2020-06-24T15:55:22.000Z
2022-03-30T11:49:28.000Z
tests/base.py
octue/octue-sdk-python
31c6e9358d3401ca708f5b3da702bfe3be3e52ce
[ "MIT" ]
null
null
null
import os import subprocess import unittest import uuid import warnings from tempfile import TemporaryDirectory, gettempdir from octue.cloud.emulators import GoogleCloudStorageEmulatorTestResultModifier from octue.mixins import MixinBase, Pathable from octue.resources import Datafile, Dataset, Manifest from tests import TEST_BUCKET_NAME class MyPathable(Pathable, MixinBase): pass class BaseTestCase(unittest.TestCase): """Base test case for twined: - sets a path to the test data directory """ test_result_modifier = GoogleCloudStorageEmulatorTestResultModifier(default_bucket_name=TEST_BUCKET_NAME) setattr(unittest.TestResult, "startTestRun", test_result_modifier.startTestRun) setattr(unittest.TestResult, "stopTestRun", test_result_modifier.stopTestRun) def setUp(self): # Set up paths to the test data directory and to the app templates directory root_dir = os.path.dirname(os.path.abspath(__file__)) self.data_path = os.path.join(root_dir, "data") self.templates_path = os.path.join(os.path.dirname(root_dir), "octue", "templates") # Make unittest ignore excess ResourceWarnings so tests' console outputs are clearer. This has to be done even # if these warnings are ignored elsewhere as unittest forces warnings to be displayed by default. warnings.simplefilter("ignore", category=ResourceWarning) super().setUp() def callCli(self, args): """Utility to call the octue CLI (eg for a templated example) in a separate subprocess Enables testing that multiple processes aren't using the same memory space, or for running multiple apps in parallel to ensure they don't conflict """ call_id = str(uuid.uuid4()) tmp_dir_name = os.path.join(gettempdir(), "octue-sdk-python", f"test-{call_id}") with TemporaryDirectory(dir=tmp_dir_name): subprocess.call(args, cwd=tmp_dir_name) def create_valid_dataset(self): """ Create a valid dataset with two valid datafiles (they're the same file in this case). """ path_from = MyPathable(path=os.path.join(self.data_path, "basic_files", "configuration", "test-dataset")) path = os.path.join("path-within-dataset", "a_test_file.csv") files = [ Datafile(path_from=path_from, path=path, skip_checks=False), Datafile(path_from=path_from, path=path, skip_checks=False), ] return Dataset(files=files) def create_valid_manifest(self): """ Create a valid manifest with two valid datasets (they're the same dataset in this case). """ datasets = [self.create_valid_dataset(), self.create_valid_dataset()] manifest = Manifest(datasets=datasets, keys={"my_dataset": 0, "another_dataset": 1}) return manifest
42.253731
118
0.716001
3.265625
39c20b5333b6e43bd4fb9b976104f021d0684e57
157,167
js
JavaScript
example/main.js
LeeeeeeM/mobx
67540a4e5a6cae3a65e12f74a5a0bf7258cb1f83
[ "MIT" ]
null
null
null
example/main.js
LeeeeeeM/mobx
67540a4e5a6cae3a65e12f74a5a0bf7258cb1f83
[ "MIT" ]
null
null
null
example/main.js
LeeeeeeM/mobx
67540a4e5a6cae3a65e12f74a5a0bf7258cb1f83
[ "MIT" ]
null
null
null
!(function(e) { var t = {} function n(r) { if (t[r]) return t[r].exports var i = (t[r] = { i: r, l: !1, exports: {} }) return e[r].call(i.exports, i, i.exports, n), (i.l = !0), i.exports } ;(n.m = e), (n.c = t), (n.d = function(e, t, r) { n.o(e, t) || Object.defineProperty(e, t, { enumerable: !0, get: r }) }), (n.r = function(e) { "undefined" != typeof Symbol && Symbol.toStringTag && Object.defineProperty(e, Symbol.toStringTag, { value: "Module" }), Object.defineProperty(e, "__esModule", { value: !0 }) }), (n.t = function(e, t) { if ((1 & t && (e = n(e)), 8 & t)) return e if (4 & t && "object" == typeof e && e && e.__esModule) return e var r = Object.create(null) if ( (n.r(r), Object.defineProperty(r, "default", { enumerable: !0, value: e }), 2 & t && "string" != typeof e) ) for (var i in e) n.d( r, i, function(t) { return e[t] }.bind(null, i) ) return r }), (n.n = function(e) { var t = e && e.__esModule ? function() { return e.default } : function() { return e } return n.d(t, "a", t), t }), (n.o = function(e, t) { return Object.prototype.hasOwnProperty.call(e, t) }), (n.p = ""), n((n.s = 6)) })([ function(e, t, n) { "use strict" function r(e, t) { return ( (function(e) { if (Array.isArray(e)) return e })(e) || (function(e, t) { var n = [], r = !0, i = !1, o = void 0 try { for ( var a, u = e[Symbol.iterator](); !(r = (a = u.next()).done) && (n.push(a.value), !t || n.length !== t); r = !0 ); } catch (e) { ;(i = !0), (o = e) } finally { try { r || null == u.return || u.return() } finally { if (i) throw o } } return n })(e, t) || (function() { throw new TypeError("Invalid attempt to destructure non-iterable instance") })() ) } function i(e) { return (i = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function(e) { return typeof e } : function(e) { return e && "function" == typeof Symbol && e.constructor === Symbol && e !== Symbol.prototype ? "symbol" : typeof e })(e) } var o = "An invariant failed, however the error is obfuscated because this is an production build.", a = [] Object.freeze(a) var u = {} function c() { return ++Pe.a.mobxGuid } function s(e) { throw (l(!1, e), "X") } function l(e, t) { if (!e) throw new Error("[mobx] " + (t || o)) } Object.freeze(u) function f(e, t) { return !1 } function h(e) { var t = !1 return function() { if (!t) return (t = !0), e.apply(this, arguments) } } var d = function() {} function v(e) { var t = [] return ( e.forEach(function(e) { ;-1 === t.indexOf(e) && t.push(e) }), t ) } function y(e) { return null !== e && "object" === i(e) } function p(e) { if (null === e || "object" !== i(e)) return !1 var t = Object.getPrototypeOf(e) return t === Object.prototype || null === t } function b(e, t, n) { Object.defineProperty(e, t, { enumerable: !1, writable: !0, configurable: !0, value: n }) } function m(e, t, n) { Object.defineProperty(e, t, { enumerable: !1, writable: !1, configurable: !0, value: n }) } function g(e, t) { var n = Object.getOwnPropertyDescriptor(e, t) return !n || (!1 !== n.configurable && !1 !== n.writable) } function w(e, t) { 0 } function O(e, t) { var n = "isMobX" + e return ( (t.prototype[n] = !0), function(e) { return y(e) && !0 === e[n] } ) } function S(e) { return e instanceof Map } function k(e) { return e instanceof Set } function _(e) { var t = new Set() for (var n in e) t.add(n) return ( Object.getOwnPropertySymbols(e).forEach(function(n) { Object.getOwnPropertyDescriptor(e, n).enumerable && t.add(n) }), Array.from(t) ) } function A(e) { return e && e.toString ? e.toString() : new String(e).toString() } function E(e) { return p(e) ? Object.keys(e) : Array.isArray(e) ? e.map(function(e) { return r(e, 1)[0] }) : S(e) || $t(e) ? Array.from(e.keys()) : s("Cannot get keys from '".concat(e, "'")) } function j(e) { return null === e ? null : "object" === i(e) ? "" + e : e } function T(e, t) { for (var n = 0; n < t.length; n++) { var r = t[n] ;(r.enumerable = r.enumerable || !1), (r.configurable = !0), "value" in r && (r.writable = !0), Object.defineProperty(e, r.key, r) } } var x = Symbol("mobx administration"), C = (function() { function e() { var t = arguments.length > 0 && void 0 !== arguments[0] ? arguments[0] : "Atom@" + c() !(function(e, t) { if (!(e instanceof t)) throw new TypeError("Cannot call a class as a function") })(this, e), (this.name = t), (this.isPendingUnobservation = !1), (this.isBeingObserved = !1), (this.observers = new Set()), (this.diffValue = 0), (this.lastAccessedBy = 0), (this.lowestObserverState = be.NOT_TRACKING) } var t, n, r return ( (t = e), (n = [ { key: "onBecomeObserved", value: function() { this.onBecomeObservedListeners && this.onBecomeObservedListeners.forEach(function(e) { return e() }) } }, { key: "onBecomeUnobserved", value: function() { this.onBecomeUnobservedListeners && this.onBecomeUnobservedListeners.forEach(function(e) { return e() }) } }, { key: "reportObserved", value: function() { return Me(this) } }, { key: "reportChanged", value: function() { Re(), Ue(this), Ie() } }, { key: "toString", value: function() { return this.name } } ]) && T(t.prototype, n), r && T(t, r), e ) })(), P = O("Atom", C) function V(e) { var t = arguments.length > 1 && void 0 !== arguments[1] ? arguments[1] : d, n = arguments.length > 2 && void 0 !== arguments[2] ? arguments[2] : d, r = new C(e) return t !== d && ht(r, t), n !== d && dt(r, n), r } var D = { identity: function(e, t) { return e === t }, structural: function(e, t) { return On(e, t) }, default: function(e, t) { return Object.is(e, t) } }, N = Symbol("mobx did run lazy initializers"), L = Symbol("mobx pending decorators"), B = {}, R = {} function I(e, t) { var n = t ? B : R return ( n[e] || (n[e] = { configurable: !0, enumerable: t, get: function() { return M(this), this[e] }, set: function(t) { M(this), (this[e] = t) } }) ) } function M(e) { if (!0 !== e[N]) { var t = e[L] if (t) for (var n in (b(e, N, !0), t)) { var r = t[n] r.propertyCreator( e, r.prop, r.descriptor, r.decoratorTarget, r.decoratorArguments ) } } } function U(e, t) { return function() { var n, r = function(r, i, o, a) { if (!0 === a) return t(r, i, o, r, n), null if (!Object.prototype.hasOwnProperty.call(r, L)) { var u = r[L] b(r, L, Object.assign({}, u)) } return ( (r[L][i] = { prop: i, propertyCreator: t, descriptor: o, decoratorTarget: r, decoratorArguments: n }), I(i, e) ) } return G(arguments) ? ((n = a), r.apply(null, arguments)) : ((n = Array.prototype.slice.call(arguments)), r) } } function G(e) { return ( ((2 === e.length || 3 === e.length) && "string" == typeof e[1]) || (4 === e.length && !0 === e[3]) ) } function K(e, t, n) { return Ot(e) ? e : Array.isArray(e) ? te.array(e, { name: n }) : p(e) ? te.object(e, void 0, { name: n }) : S(e) ? te.map(e, { name: n }) : k(e) ? te.set(e, { name: n }) : e } function z(e, t, n) { return null == e ? e : vn(e) || qt(e) || $t(e) || rn(e) ? e : Array.isArray(e) ? te.array(e, { name: n, deep: !1 }) : p(e) ? te.object(e, void 0, { name: n, deep: !1 }) : S(e) ? te.map(e, { name: n, deep: !1 }) : k(e) ? te.set(e, { name: n, deep: !1 }) : s(!1) } function H(e) { return e } function J(e, t, n) { return On(e, t) ? t : e } var q = n(4) var W = { deep: !0, name: void 0, defaultDecorator: void 0, proxy: !0 } function X(e) { return null == e ? W : "string" == typeof e ? { name: e, deep: !0, proxy: !0 } : e } Object.freeze(W) var Y = Object(q.a)(K), F = Object(q.a)(z), Q = Object(q.a)(H), Z = Object(q.a)(J) function $(e) { return e.defaultDecorator ? e.defaultDecorator.enhancer : !1 === e.deep ? H : K } var ee = { box: function(e, t) { arguments.length > 2 && ne("box") var n = X(t) return new ve(e, $(n), n.name, !0, n.equals) }, array: function(e, t) { arguments.length > 2 && ne("array") var n = X(t) return Gt(e, $(n), n.name) }, map: function(e, t) { arguments.length > 2 && ne("map") var n = X(t) return new Zt(e, $(n), n.name) }, set: function(e, t) { arguments.length > 2 && ne("set") var n = X(t) return new nn(e, $(n), n.name) }, object: function(e, t, n) { "string" == typeof arguments[1] && ne("object") var r = X(n) if (!1 === r.proxy) return yt({}, e, t, r) var i = pt(r), o = yt({}, void 0, void 0, r), a = Ct(o) return bt(a, e, t, i), a }, ref: Q, shallow: F, deep: Y, struct: Z }, te = function(e, t, n) { if ("string" == typeof arguments[1]) return Y.apply(null, arguments) if (Ot(e)) return e var r = p(e) ? te.object(e, t, n) : Array.isArray(e) ? te.array(e, t) : S(e) ? te.map(e, t) : k(e) ? te.set(e, t) : e if (r !== e) return r s(!1) } function ne(e) { s( "Expected one or two arguments to observable." .concat(e, ". Did you accidentally try to use observable.") .concat(e, " as decorator?") ) } Object.keys(ee).forEach(function(e) { return (te[e] = ee[e]) }) var re = U(!1, function(e, t, n, r, i) { var o = n.get, a = n.set, u = i[0] || {} cn(e).addComputedProp(e, t, Object.assign({ get: o, set: a, context: e }, u)) }) re({ equals: D.structural }) function ie(e, t, n) { var r = function() { return oe(e, t, n || this, arguments) } return (r.isMobxAction = !0), r } function oe(e, t, n, r) { var i = (function(e, t, n, r) { var i = 0 var o = Te() Re() var a = ue(!0) return { prevDerivation: o, prevAllowStateChanges: a, notifySpy: !1, startTime: i } })(), o = !0 try { var a = t.apply(n, r) return (o = !1), a } finally { o ? ((Pe.a.suppressReactionErrors = o), ae(i), (Pe.a.suppressReactionErrors = !1)) : ae(i) } } function ae(e) { ce(e.prevAllowStateChanges), Ie(), xe(e.prevDerivation), e.notifySpy } function ue(e) { var t = Pe.a.allowStateChanges return (Pe.a.allowStateChanges = e), t } function ce(e) { Pe.a.allowStateChanges = e } function se(e) { return (se = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function(e) { return typeof e } : function(e) { return e && "function" == typeof Symbol && e.constructor === Symbol && e !== Symbol.prototype ? "symbol" : typeof e })(e) } function le(e, t) { for (var n = 0; n < t.length; n++) { var r = t[n] ;(r.enumerable = r.enumerable || !1), (r.configurable = !0), "value" in r && (r.writable = !0), Object.defineProperty(e, r.key, r) } } function fe(e, t) { return !t || ("object" !== se(t) && "function" != typeof t) ? (function(e) { if (void 0 === e) throw new ReferenceError( "this hasn't been initialised - super() hasn't been called" ) return e })(e) : t } function he(e) { return (he = Object.setPrototypeOf ? Object.getPrototypeOf : function(e) { return e.__proto__ || Object.getPrototypeOf(e) })(e) } function de(e, t) { return (de = Object.setPrototypeOf || function(e, t) { return (e.__proto__ = t), e })(e, t) } var ve = (function(e) { function t(e, n) { var r, i = arguments.length > 2 && void 0 !== arguments[2] ? arguments[2] : "ObservableValue@" + c(), o = (!(arguments.length > 3 && void 0 !== arguments[3]) || arguments[3], arguments.length > 4 && void 0 !== arguments[4] ? arguments[4] : D.default) return ( (function(e, t) { if (!(e instanceof t)) throw new TypeError("Cannot call a class as a function") })(this, t), ((r = fe(this, he(t).call(this, i))).enhancer = n), (r.name = i), (r.equals = o), (r.hasUnreportedChange = !1), (r.value = n(e, void 0, i)), r ) } var n, r, i return ( (function(e, t) { if ("function" != typeof t && null !== t) throw new TypeError( "Super expression must either be null or a function" ) ;(e.prototype = Object.create(t && t.prototype, { constructor: { value: e, writable: !0, configurable: !0 } })), t && de(e, t) })(t, C), (n = t), (r = [ { key: "dehanceValue", value: function(e) { return void 0 !== this.dehancer ? this.dehancer(e) : e } }, { key: "set", value: function(e) { this.value, (e = this.prepareNewValue(e)) !== Pe.a.UNCHANGED && this.setNewValue(e) } }, { key: "prepareNewValue", value: function(e) { if ((_e(this), Pt(this))) { var t = Dt(this, { object: this, type: "update", newValue: e }) if (!t) return Pe.a.UNCHANGED e = t.newValue } return ( (e = this.enhancer(e, this.value, this.name)), this.equals(this.value, e) ? Pe.a.UNCHANGED : e ) } }, { key: "setNewValue", value: function(e) { var t = this.value ;(this.value = e), this.reportChanged(), Nt(this) && Bt(this, { type: "update", object: this, newValue: e, oldValue: t }) } }, { key: "get", value: function() { return this.reportObserved(), this.dehanceValue(this.value) } }, { key: "intercept", value: function(e) { return Vt(this, e) } }, { key: "observe", value: function(e, t) { return ( t && e({ object: this, type: "update", newValue: this.value, oldValue: void 0 }), Lt(this, e) ) } }, { key: "toJSON", value: function() { return this.get() } }, { key: "toString", value: function() { return "".concat(this.name, "[").concat(this.value, "]") } }, { key: "valueOf", value: function() { return j(this.get()) } }, { key: Symbol.toPrimitive, value: function() { return this.valueOf() } } ]) && le(n.prototype, r), i && le(n, i), t ) })(), ye = O("ObservableValue", ve) function pe(e, t) { for (var n = 0; n < t.length; n++) { var r = t[n] ;(r.enumerable = r.enumerable || !1), (r.configurable = !0), "value" in r && (r.writable = !0), Object.defineProperty(e, r.key, r) } } var be, me, ge = (function() { function e(t) { !(function(e, t) { if (!(e instanceof t)) throw new TypeError("Cannot call a class as a function") })(this, e), (this.dependenciesState = be.NOT_TRACKING), (this.observing = []), (this.newObserving = null), (this.isBeingObserved = !1), (this.isPendingUnobservation = !1), (this.observers = new Set()), (this.diffValue = 0), (this.runId = 0), (this.lastAccessedBy = 0), (this.lowestObserverState = be.UP_TO_DATE), (this.unboundDepsCount = 0), (this.__mapid = "#" + c()), (this.value = new Oe(null)), (this.isComputing = !1), (this.isRunningSetter = !1), (this.isTracing = me.NONE), (this.derivation = t.get), (this.name = t.name || "ComputedValue@" + c()), t.set && (this.setter = ie(this.name + "-setter", t.set)), (this.equals = t.equals || (t.compareStructural || t.struct ? D.structural : D.default)), (this.scope = t.context), (this.requiresReaction = !!t.requiresReaction), (this.keepAlive = !!t.keepAlive) } var t, n, r return ( (t = e), (n = [ { key: "onBecomeStale", value: function() { Ke(this) } }, { key: "onBecomeObserved", value: function() { this.onBecomeObservedListeners && this.onBecomeObservedListeners.forEach(function(e) { return e() }) } }, { key: "onBecomeUnobserved", value: function() { this.onBecomeUnobservedListeners && this.onBecomeUnobservedListeners.forEach(function(e) { return e() }) } }, { key: "get", value: function() { this.isComputing && s( "Cycle detected in computation " .concat(this.name, ": ") .concat(this.derivation) ), 0 !== Pe.a.inBatch || 0 !== this.observers.size || this.keepAlive ? (Me(this), ke(this) && this.trackAndCompute() && Ge(this)) : ke(this) && (this.warnAboutUntrackedRead(), Re(), (this.value = this.computeValue(!1)), Ie()) var e = this.value if (Se(e)) throw e.cause return e } }, { key: "peek", value: function() { var e = this.computeValue(!1) if (Se(e)) throw e.cause return e } }, { key: "set", value: function(e) { if (this.setter) { l( !this.isRunningSetter, "The setter of computed value '".concat( this.name, "' is trying to update itself. Did you intend to update an _observable_ value, instead of the computed property?" ) ), (this.isRunningSetter = !0) try { this.setter.call(this.scope, e) } finally { this.isRunningSetter = !1 } } else l(!1, !1) } }, { key: "trackAndCompute", value: function() { var e = this.value, t = this.dependenciesState === be.NOT_TRACKING, n = this.computeValue(!0), r = t || Se(e) || Se(n) || !this.equals(e, n) return r && (this.value = n), r } }, { key: "computeValue", value: function(e) { var t if (((this.isComputing = !0), Pe.a.computationDepth++, e)) t = Ae(this, this.derivation, this.scope) else if (!0 === Pe.a.disableErrorBoundaries) t = this.derivation.call(this.scope) else try { t = this.derivation.call(this.scope) } catch (e) { t = new Oe(e) } return Pe.a.computationDepth--, (this.isComputing = !1), t } }, { key: "suspend", value: function() { this.keepAlive || (Ee(this), (this.value = void 0)) } }, { key: "observe", value: function(e, t) { var n = this, r = !0, i = void 0 return st(function() { var o = n.get() if (!r || t) { var a = Te() e({ type: "update", object: n, newValue: o, oldValue: i }), xe(a) } ;(r = !1), (i = o) }) } }, { key: "warnAboutUntrackedRead", value: function() {} }, { key: "toJSON", value: function() { return this.get() } }, { key: "toString", value: function() { return "" .concat(this.name, "[") .concat(this.derivation.toString(), "]") } }, { key: "valueOf", value: function() { return j(this.get()) } }, { key: Symbol.toPrimitive, value: function() { return this.valueOf() } } ]) && pe(t.prototype, n), r && pe(t, r), e ) })(), we = O("ComputedValue", ge) !(function(e) { ;(e[(e.NOT_TRACKING = -1)] = "NOT_TRACKING"), (e[(e.UP_TO_DATE = 0)] = "UP_TO_DATE"), (e[(e.POSSIBLY_STALE = 1)] = "POSSIBLY_STALE"), (e[(e.STALE = 2)] = "STALE") })(be || (be = {})), (function(e) { ;(e[(e.NONE = 0)] = "NONE"), (e[(e.LOG = 1)] = "LOG"), (e[(e.BREAK = 2)] = "BREAK") })(me || (me = {})) var Oe = function e(t) { !(function(e, t) { if (!(e instanceof t)) throw new TypeError("Cannot call a class as a function") })(this, e), (this.cause = t) } function Se(e) { return e instanceof Oe } function ke(e) { switch (e.dependenciesState) { case be.UP_TO_DATE: return !1 case be.NOT_TRACKING: case be.STALE: return !0 case be.POSSIBLY_STALE: for (var t = Te(), n = e.observing, r = n.length, i = 0; i < r; i++) { var o = n[i] if (we(o)) { if (Pe.a.disableErrorBoundaries) o.get() else try { o.get() } catch (e) { return xe(t), !0 } if (e.dependenciesState === be.STALE) return xe(t), !0 } } return Ce(e), xe(t), !1 } } function _e(e) { var t = e.observers.size > 0 Pe.a.computationDepth > 0 && t && s(!1), Pe.a.allowStateChanges || (!t && "strict" !== Pe.a.enforceActions) || s(!1) } function Ae(e, t, n) { Ce(e), (e.newObserving = new Array(e.observing.length + 100)), (e.unboundDepsCount = 0), (e.runId = ++Pe.a.runId) var r, i = Pe.a.trackingDerivation if (((Pe.a.trackingDerivation = e), !0 === Pe.a.disableErrorBoundaries)) r = t.call(n) else try { r = t.call(n) } catch (e) { r = new Oe(e) } return ( (Pe.a.trackingDerivation = i), (function(e) { for ( var t = e.observing, n = (e.observing = e.newObserving), r = be.UP_TO_DATE, i = 0, o = e.unboundDepsCount, a = 0; a < o; a++ ) { var u = n[a] 0 === u.diffValue && ((u.diffValue = 1), i !== a && (n[i] = u), i++), u.dependenciesState > r && (r = u.dependenciesState) } ;(n.length = i), (e.newObserving = null), (o = t.length) for (; o--; ) { var c = t[o] 0 === c.diffValue && Le(c, e), (c.diffValue = 0) } for (; i--; ) { var s = n[i] 1 === s.diffValue && ((s.diffValue = 0), Ne(s, e)) } r !== be.UP_TO_DATE && ((e.dependenciesState = r), e.onBecomeStale()) })(e), r ) } function Ee(e) { var t = e.observing e.observing = [] for (var n = t.length; n--; ) Le(t[n], e) e.dependenciesState = be.NOT_TRACKING } function je(e) { var t = Te() try { return e() } finally { xe(t) } } function Te() { var e = Pe.a.trackingDerivation return (Pe.a.trackingDerivation = null), e } function xe(e) { Pe.a.trackingDerivation = e } function Ce(e) { if (e.dependenciesState !== be.UP_TO_DATE) { e.dependenciesState = be.UP_TO_DATE for (var t = e.observing, n = t.length; n--; ) t[n].lowestObserverState = be.UP_TO_DATE } } var Pe = n(5) function Ve(e) { return e.observers && e.observers.size > 0 } function De(e) { return e.observers } function Ne(e, t) { e.observers.add(t), e.lowestObserverState > t.dependenciesState && (e.lowestObserverState = t.dependenciesState) } function Le(e, t) { e.observers.delete(t), 0 === e.observers.size && Be(e) } function Be(e) { !1 === e.isPendingUnobservation && ((e.isPendingUnobservation = !0), Pe.a.pendingUnobservations.push(e)) } function Re() { Pe.a.inBatch++ } function Ie() { if (0 == --Pe.a.inBatch) { Xe() for (var e = Pe.a.pendingUnobservations, t = 0; t < e.length; t++) { var n = e[t] ;(n.isPendingUnobservation = !1), 0 === n.observers.size && (n.isBeingObserved && ((n.isBeingObserved = !1), n.onBecomeUnobserved()), n instanceof ge && n.suspend()) } Pe.a.pendingUnobservations = [] } } function Me(e) { var t = Pe.a.trackingDerivation return null !== t ? (t.runId !== e.lastAccessedBy && ((e.lastAccessedBy = t.runId), (t.newObserving[t.unboundDepsCount++] = e), e.isBeingObserved || ((e.isBeingObserved = !0), e.onBecomeObserved())), !0) : (0 === e.observers.size && Pe.a.inBatch > 0 && Be(e), !1) } function Ue(e) { e.lowestObserverState !== be.STALE && ((e.lowestObserverState = be.STALE), e.observers.forEach(function(t) { t.dependenciesState === be.UP_TO_DATE && (t.isTracing !== me.NONE && ze(t, e), t.onBecomeStale()), (t.dependenciesState = be.STALE) })) } function Ge(e) { e.lowestObserverState !== be.STALE && ((e.lowestObserverState = be.STALE), e.observers.forEach(function(t) { t.dependenciesState === be.POSSIBLY_STALE ? (t.dependenciesState = be.STALE) : t.dependenciesState === be.UP_TO_DATE && (e.lowestObserverState = be.UP_TO_DATE) })) } function Ke(e) { e.lowestObserverState === be.UP_TO_DATE && ((e.lowestObserverState = be.POSSIBLY_STALE), e.observers.forEach(function(t) { t.dependenciesState === be.UP_TO_DATE && ((t.dependenciesState = be.POSSIBLY_STALE), t.isTracing !== me.NONE && ze(t, e), t.onBecomeStale()) })) } function ze(e, t) { if ( (console.log( "[mobx.trace] '" .concat(e.name, "' is invalidated due to a change in: '") .concat(t.name, "'") ), e.isTracing === me.BREAK) ) { var n = [] !(function e(t, n, r) { if (n.length >= 1e3) return void n.push("(and many more)") n.push("".concat(new Array(r).join("\t")).concat(t.name)) t.dependencies && t.dependencies.forEach(function(t) { return e(t, n, r + 1) }) })(mt(e), n, 1), new Function( "debugger;\n/*\nTracing '" .concat( e.name, "'\n\nYou are entering this break point because derivation '" ) .concat(e.name, "' is being traced and '") .concat( t.name, "' is now forcing it to update.\nJust follow the stacktrace you should now see in the devtools to see precisely what piece of your code is causing this update\nThe stackframe you are looking for is at least ~6-8 stack-frames up.\n\n" ) .concat( e instanceof ge ? e.derivation.toString().replace(/[*]\//g, "/") : "", "\n\nThe dependencies for this derivation are:\n\n" ) .concat(n.join("\n"), "\n*/\n ") )() } } function He(e, t) { for (var n = 0; n < t.length; n++) { var r = t[n] ;(r.enumerable = r.enumerable || !1), (r.configurable = !0), "value" in r && (r.writable = !0), Object.defineProperty(e, r.key, r) } } var Je = (function() { function e() { var t = arguments.length > 0 && void 0 !== arguments[0] ? arguments[0] : "Reaction@" + c(), n = arguments.length > 1 ? arguments[1] : void 0, r = arguments.length > 2 ? arguments[2] : void 0 !(function(e, t) { if (!(e instanceof t)) throw new TypeError("Cannot call a class as a function") })(this, e), (this.name = t), (this.onInvalidate = n), (this.errorHandler = r), (this.observing = []), (this.newObserving = []), (this.dependenciesState = be.NOT_TRACKING), (this.diffValue = 0), (this.runId = 0), (this.unboundDepsCount = 0), (this.__mapid = "#" + c()), (this.isDisposed = !1), (this._isScheduled = !1), (this._isTrackPending = !1), (this._isRunning = !1), (this.isTracing = me.NONE) } var t, n, r return ( (t = e), (n = [ { key: "onBecomeStale", value: function() { this.schedule() } }, { key: "schedule", value: function() { this._isScheduled || ((this._isScheduled = !0), Pe.a.pendingReactions.push(this), Xe()) } }, { key: "isScheduled", value: function() { return this._isScheduled } }, { key: "runReaction", value: function() { if (!this.isDisposed) { if ((Re(), (this._isScheduled = !1), ke(this))) { this._isTrackPending = !0 try { this.onInvalidate(), this._isTrackPending } catch (e) { this.reportExceptionInDerivation(e) } } Ie() } } }, { key: "track", value: function(e) { if (!this.isDisposed) { Re(), (this._isRunning = !0) var t = Ae(this, e, void 0) ;(this._isRunning = !1), (this._isTrackPending = !1), this.isDisposed && Ee(this), Se(t) && this.reportExceptionInDerivation(t.cause), Ie() } } }, { key: "reportExceptionInDerivation", value: function(e) { var t = this if (this.errorHandler) this.errorHandler(e, this) else { if (Pe.a.disableErrorBoundaries) throw e var n = "[mobx] Encountered an uncaught exception that was thrown by a reaction or observer component, in: '".concat( this, "'" ) Pe.a.suppressReactionErrors ? console.warn( "[mobx] (error in reaction '".concat( this.name, "' suppressed, fix error of causing action below)" ) ) : console.error(n, e), Pe.a.globalReactionErrorHandlers.forEach(function(n) { return n(e, t) }) } } }, { key: "dispose", value: function() { this.isDisposed || ((this.isDisposed = !0), this._isRunning || (Re(), Ee(this), Ie())) } }, { key: "getDisposer", value: function() { var e = this.dispose.bind(this) return (e[x] = this), e } }, { key: "toString", value: function() { return "Reaction[".concat(this.name, "]") } }, { key: "trace", value: function() { var e = arguments.length > 0 && void 0 !== arguments[0] && arguments[0] _t(this, e) } } ]) && He(t.prototype, n), r && He(t, r), e ) })() var qe = 100, We = function(e) { return e() } function Xe() { Pe.a.inBatch > 0 || Pe.a.isRunningReactions || We(Ye) } function Ye() { Pe.a.isRunningReactions = !0 for (var e = Pe.a.pendingReactions, t = 0; e.length > 0; ) { ++t === qe && (console.error( "Reaction doesn't converge to a stable state after ".concat( qe, " iterations." ) + " Probably there is a cycle in the reactive function: ".concat(e[0]) ), e.splice(0)) for (var n = e.splice(0), r = 0, i = n.length; r < i; r++) n[r].runReaction() } Pe.a.isRunningReactions = !1 } var Fe = O("Reaction", Je) function Qe(e) { var t = We We = function(n) { return e(function() { return t(n) }) } } function Ze() { return !1 } function $e(e) {} function et(e) {} function tt(e) {} function nt(e) { return console.warn("[mobx.spy] Is a no-op in production builds"), function() {} } function rt() { s(!1) } function it(e) { return function(t, n, r) { if (r) { if (r.value) return { value: ie(e, r.value), enumerable: !1, configurable: !0, writable: !0 } var i = r.initializer return { enumerable: !1, configurable: !0, writable: !0, initializer: function() { return ie(e, i.call(this)) } } } return ot(e).apply(this, arguments) } } function ot(e) { return function(t, n, r) { Object.defineProperty(t, n, { configurable: !0, enumerable: !1, get: function() {}, set: function(t) { b(this, n, ut(e, t)) } }) } } function at(e, t, n, r) { return !0 === r ? (ct(e, t, n.value), null) : n ? { configurable: !0, enumerable: !1, get: function() { return ct(this, t, n.value || n.initializer.call(this)), this[t] }, set: rt } : { enumerable: !1, configurable: !0, set: function(e) { ct(this, t, e) }, get: function() {} } } var ut = function(e, t, n, r) { return 1 === arguments.length && "function" == typeof e ? ie(e.name || "<unnamed action>", e) : 2 === arguments.length && "function" == typeof t ? ie(e, t) : 1 === arguments.length && "string" == typeof e ? it(e) : !0 !== r ? it(t).apply(null, arguments) : void b(e, t, ie(e.name || t, n.value, this)) } function ct(e, t, n) { b(e, t, ie(t, n.bind(e))) } function st(e) { var t = arguments.length > 1 && void 0 !== arguments[1] ? arguments[1] : u var n, r = (t && t.name) || e.name || "Autorun@" + c() if (!t.scheduler && !t.delay) n = new Je( r, function() { this.track(a) }, t.onError ) else { var i = ft(t), o = !1 n = new Je( r, function() { o || ((o = !0), i(function() { ;(o = !1), n.isDisposed || n.track(a) })) }, t.onError ) } function a() { e(n) } return n.schedule(), n.getDisposer() } ut.bound = at var lt = function(e) { return e() } function ft(e) { return e.scheduler ? e.scheduler : e.delay ? function(t) { return setTimeout(t, e.delay) } : lt } function ht(e, t, n) { return vt("onBecomeObserved", e, t, n) } function dt(e, t, n) { return vt("onBecomeUnobserved", e, t, n) } function vt(e, t, n, r) { var i = "function" == typeof r ? pn(t, n) : pn(t), o = "function" == typeof r ? r : n, a = "".concat(e, "Listeners") return ( i[a] ? i[a].add(o) : (i[a] = new Set([o])), "function" != typeof i[e] ? s(!1) : function() { var e = i[a] e && (e.delete(o), 0 === e.size && delete i[a]) } ) } function yt(e, t, n, r) { var i = pt((r = X(r))) return M(e), cn(e, r.name, i.enhancer), t && bt(e, t, n, i), e } function pt(e) { return e.defaultDecorator || (!1 === e.deep ? Q : Y) } function bt(e, t, n, r) { Re() try { var i = _(t), o = !0, a = !1, u = void 0 try { for (var c, s = i[Symbol.iterator](); !(o = (c = s.next()).done); o = !0) { var l = c.value, f = Object.getOwnPropertyDescriptor(t, l) 0 var h = (n && l in n ? n[l] : f.get ? re : r)(e, l, f, !0) h && Object.defineProperty(e, l, h) } } catch (e) { ;(a = !0), (u = e) } finally { try { o || null == s.return || s.return() } finally { if (a) throw u } } } finally { Ie() } } function mt(e, t) { return gt(pn(e, t)) } function gt(e) { var t = { name: e.name } return ( e.observing && e.observing.length > 0 && (t.dependencies = v(e.observing).map(gt)), t ) } function wt(e, t) { return ( null != e && (void 0 !== t ? !!vn(e) && e[x].values.has(t) : vn(e) || !!e[x] || P(e) || Fe(e) || we(e)) ) } function Ot(e) { return 1 !== arguments.length && s(!1), wt(e) } function St(e) { return vn(e) ? e[x].getKeys() : $t(e) ? Array.from(e.keys()) : rn(e) ? Array.from(e.keys()) : qt(e) ? e.map(function(e, t) { return t }) : s(!1) } function kt(e, t, n) { if (2 !== arguments.length || rn(e)) if (vn(e)) { var r = e[x] r.values.get(t) ? r.write(t, n) : r.addObservableProp(t, n, r.defaultEnhancer) } else if ($t(e)) e.set(t, n) else if (rn(e)) e.add(t) else { if (!qt(e)) return s(!1) "number" != typeof t && (t = parseInt(t, 10)), l(t >= 0, "Not a valid index: '".concat(t, "'")), Re(), t >= e.length && (e.length = t + 1), (e[t] = n), Ie() } else { Re() var i = t try { for (var o in i) kt(e, o, i[o]) } finally { Ie() } } } function _t() { for (var e = !1, t = arguments.length, n = new Array(t), r = 0; r < t; r++) n[r] = arguments[r] "boolean" == typeof n[n.length - 1] && (e = n.pop()) var i = (function(e) { switch (e.length) { case 0: return Pe.a.trackingDerivation case 1: return pn(e[0]) case 2: return pn(e[0], e[1]) } })(n) if (!i) return s(!1) i.isTracing === me.NONE && console.log("[mobx.trace] '".concat(i.name, "' tracing enabled")), (i.isTracing = e ? me.BREAK : me.LOG) } function At(e) { var t = arguments.length > 1 && void 0 !== arguments[1] ? arguments[1] : void 0 Re() try { return e.apply(t) } finally { Ie() } } function Et(e) { return (Et = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function(e) { return typeof e } : function(e) { return e && "function" == typeof Symbol && e.constructor === Symbol && e !== Symbol.prototype ? "symbol" : typeof e })(e) } function jt(e) { return e[x] } function Tt(e) { return "string" == typeof e || "number" == typeof e || "symbol" === Et(e) } var xt = { has: function(e, t) { if (t === x || "constructor" === t || t === N) return !0 var n = jt(e) return Tt(t) ? n.has(t) : t in e }, get: function(e, t) { if (t === x || "constructor" === t || t === N) return e[t] var n = jt(e), r = n.values.get(t) if (r instanceof C) { var i = r.get() return void 0 === i && n.has(t), i } return Tt(t) && n.has(t), e[t] }, set: function(e, t, n) { return !!Tt(t) && (kt(e, t, n), !0) }, deleteProperty: function(e, t) { return !!Tt(t) && (jt(e).remove(t), !0) }, ownKeys: function(e) { return jt(e).keysAtom.reportObserved(), Reflect.ownKeys(e) }, preventExtensions: function(e) { return s("Dynamic observable objects cannot be frozen"), !1 } } function Ct(e) { var t = new Proxy(e, xt) return (e[x].proxy = t), t } function Pt(e) { return void 0 !== e.interceptors && e.interceptors.length > 0 } function Vt(e, t) { var n = e.interceptors || (e.interceptors = []) return ( n.push(t), h(function() { var e = n.indexOf(t) ;-1 !== e && n.splice(e, 1) }) ) } function Dt(e, t) { var n = Te() try { var r = e.interceptors if (r) for ( var i = 0, o = r.length; i < o && (l( !(t = r[i](t)) || t.type, "Intercept handlers should return nothing or a change object" ), t); i++ ); return t } finally { xe(n) } } function Nt(e) { return void 0 !== e.changeListeners && e.changeListeners.length > 0 } function Lt(e, t) { var n = e.changeListeners || (e.changeListeners = []) return ( n.push(t), h(function() { var e = n.indexOf(t) ;-1 !== e && n.splice(e, 1) }) ) } function Bt(e, t) { var n = Te(), r = e.changeListeners if (r) { for (var i = 0, o = (r = r.slice()).length; i < o; i++) r[i](t) xe(n) } } function Rt(e) { return ( (function(e) { if (Array.isArray(e)) { for (var t = 0, n = new Array(e.length); t < e.length; t++) n[t] = e[t] return n } })(e) || (function(e) { if ( Symbol.iterator in Object(e) || "[object Arguments]" === Object.prototype.toString.call(e) ) return Array.from(e) })(e) || (function() { throw new TypeError("Invalid attempt to spread non-iterable instance") })() ) } function It(e, t) { for (var n = 0; n < t.length; n++) { var r = t[n] ;(r.enumerable = r.enumerable || !1), (r.configurable = !0), "value" in r && (r.writable = !0), Object.defineProperty(e, r.key, r) } } function Mt(e) { return (Mt = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function(e) { return typeof e } : function(e) { return e && "function" == typeof Symbol && e.constructor === Symbol && e !== Symbol.prototype ? "symbol" : typeof e })(e) } var Ut = { get: function(e, t) { return t === x ? e[x] : "length" === t ? e[x].getArrayLength() : "number" == typeof t ? zt.get.call(e, t) : "string" != typeof t || isNaN(t) ? zt.hasOwnProperty(t) ? zt[t] : e[t] : zt.get.call(e, parseInt(t)) }, set: function(e, t, n) { return ( "length" === t && e[x].setArrayLength(n), "number" == typeof t && zt.set.call(e, t, n), "symbol" === Mt(t) || isNaN(t) ? (e[t] = n) : zt.set.call(e, parseInt(t), n), !0 ) }, preventExtensions: function(e) { return s("Observable arrays cannot be frozen"), !1 } } function Gt(e, t) { var n = arguments.length > 2 && void 0 !== arguments[2] ? arguments[2] : "ObservableArray@" + c(), r = arguments.length > 3 && void 0 !== arguments[3] && arguments[3], i = new Kt(n, t, r) m(i.values, x, i) var o = new Proxy(i.values, Ut) if (((i.proxy = o), e && e.length)) { var a = ue(!0) i.spliceWithArray(0, 0, e), ce(a) } return o } var Kt = (function() { function e(t, n, r) { !(function(e, t) { if (!(e instanceof t)) throw new TypeError("Cannot call a class as a function") })(this, e), (this.owned = r), (this.values = []), (this.proxy = void 0), (this.lastKnownLength = 0), (this.atom = new C(t || "ObservableArray@" + c())), (this.enhancer = function(e, r) { return n(e, r, t + "[..]") }) } var t, n, r return ( (t = e), (n = [ { key: "dehanceValue", value: function(e) { return void 0 !== this.dehancer ? this.dehancer(e) : e } }, { key: "dehanceValues", value: function(e) { return void 0 !== this.dehancer && e.length > 0 ? e.map(this.dehancer) : e } }, { key: "intercept", value: function(e) { return Vt(this, e) } }, { key: "observe", value: function(e) { var t = arguments.length > 1 && void 0 !== arguments[1] && arguments[1] return ( t && e({ object: this.proxy, type: "splice", index: 0, added: this.values.slice(), addedCount: this.values.length, removed: [], removedCount: 0 }), Lt(this, e) ) } }, { key: "getArrayLength", value: function() { return this.atom.reportObserved(), this.values.length } }, { key: "setArrayLength", value: function(e) { if ("number" != typeof e || e < 0) throw new Error("[mobx.array] Out of range: " + e) var t = this.values.length if (e !== t) if (e > t) { for (var n = new Array(e - t), r = 0; r < e - t; r++) n[r] = void 0 this.spliceWithArray(t, 0, n) } else this.spliceWithArray(e, t - e) } }, { key: "updateArrayLength", value: function(e, t) { if (e !== this.lastKnownLength) throw new Error( "[mobx] Modification exception: the internal structure of an observable array was changed." ) this.lastKnownLength += t } }, { key: "spliceWithArray", value: function(e, t, n) { var r = this _e(this.atom) var i = this.values.length if ( (void 0 === e ? (e = 0) : e > i ? (e = i) : e < 0 && (e = Math.max(0, i + e)), (t = 1 === arguments.length ? i - e : null == t ? 0 : Math.max(0, Math.min(t, i - e))), void 0 === n && (n = a), Pt(this)) ) { var o = Dt(this, { object: this.proxy, type: "splice", index: e, removedCount: t, added: n }) if (!o) return a ;(t = o.removedCount), (n = o.added) } n = 0 === n.length ? n : n.map(function(e) { return r.enhancer(e, void 0) }) var u = this.spliceItemsIntoValues(e, t, n) return ( (0 === t && 0 === n.length) || this.notifyArraySplice(e, n, u), this.dehanceValues(u) ) } }, { key: "spliceItemsIntoValues", value: function(e, t, n) { var r if (n.length < 1e4) return (r = this.values).splice.apply(r, [e, t].concat(Rt(n))) var i = this.values.slice(e, e + t) return ( (this.values = this.values .slice(0, e) .concat(n, this.values.slice(e + t))), i ) } }, { key: "notifyArrayChildUpdate", value: function(e, t, n) { var r = !this.owned && !1, i = Nt(this), o = i || r ? { object: this.proxy, type: "update", index: e, newValue: t, oldValue: n } : null this.atom.reportChanged(), i && Bt(this, o) } }, { key: "notifyArraySplice", value: function(e, t, n) { var r = !this.owned && !1, i = Nt(this), o = i || r ? { object: this.proxy, type: "splice", index: e, removed: n, added: t, removedCount: n.length, addedCount: t.length } : null this.atom.reportChanged(), i && Bt(this, o) } } ]) && It(t.prototype, n), r && It(t, r), e ) })(), zt = { intercept: function(e) { return this[x].intercept(e) }, observe: function(e) { var t = arguments.length > 1 && void 0 !== arguments[1] && arguments[1], n = this[x] return n.observe(e, t) }, clear: function() { return this.splice(0) }, replace: function(e) { var t = this[x] return t.spliceWithArray(0, t.values.length, e) }, toJS: function() { return this.slice() }, toJSON: function() { return this.toJS() }, splice: function(e, t) { for ( var n = arguments.length, r = new Array(n > 2 ? n - 2 : 0), i = 2; i < n; i++ ) r[i - 2] = arguments[i] var o = this[x] switch (arguments.length) { case 0: return [] case 1: return o.spliceWithArray(e) case 2: return o.spliceWithArray(e, t) } return o.spliceWithArray(e, t, r) }, spliceWithArray: function(e, t, n) { return this[x].spliceWithArray(e, t, n) }, push: function() { for (var e = this[x], t = arguments.length, n = new Array(t), r = 0; r < t; r++) n[r] = arguments[r] return e.spliceWithArray(e.values.length, 0, n), e.values.length }, pop: function() { return this.splice(Math.max(this[x].values.length - 1, 0), 1)[0] }, shift: function() { return this.splice(0, 1)[0] }, unshift: function() { for (var e = this[x], t = arguments.length, n = new Array(t), r = 0; r < t; r++) n[r] = arguments[r] return e.spliceWithArray(0, 0, n), e.values.length }, reverse: function() { var e = this.slice() return e.reverse.apply(e, arguments) }, sort: function(e) { var t = this.slice() return t.sort.apply(t, arguments) }, remove: function(e) { var t = this[x], n = t.dehanceValues(t.values).indexOf(e) return n > -1 && (this.splice(n, 1), !0) }, get: function(e) { var t = this[x] if (t) { if (e < t.values.length) return t.atom.reportObserved(), t.dehanceValue(t.values[e]) console.warn( "[mobx.array] Attempt to read an array index (" .concat(e, ") that is out of bounds (") .concat( t.values.length, "). Please check length first. Out of bound indices will not be tracked by MobX" ) ) } }, set: function(e, t) { var n = this[x], r = n.values if (e < r.length) { _e(n.atom) var i = r[e] if (Pt(n)) { var o = Dt(n, { type: "update", object: n.proxy, index: e, newValue: t }) if (!o) return t = o.newValue } ;(t = n.enhancer(t, i)) !== i && ((r[e] = t), n.notifyArrayChildUpdate(e, t, i)) } else { if (e !== r.length) throw new Error( "[mobx.array] Index out of bounds, " .concat(e, " is larger than ") .concat(r.length) ) n.spliceWithArray(e, 0, [t]) } } } ;[ "concat", "every", "filter", "forEach", "indexOf", "join", "lastIndexOf", "map", "reduce", "reduceRight", "slice", "some", "toString", "toLocaleString" ].forEach(function(e) { zt[e] = function() { var t = this[x] t.atom.reportObserved() var n = t.dehanceValues(t.values) return n[e].apply(n, arguments) } }) var Ht, Jt = O("ObservableArrayAdministration", Kt) function qt(e) { return y(e) && Jt(e[x]) } function Wt(e) { return (Wt = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function(e) { return typeof e } : function(e) { return e && "function" == typeof Symbol && e.constructor === Symbol && e !== Symbol.prototype ? "symbol" : typeof e })(e) } function Xt(e, t) { return ( (function(e) { if (Array.isArray(e)) return e })(e) || (function(e, t) { var n = [], r = !0, i = !1, o = void 0 try { for ( var a, u = e[Symbol.iterator](); !(r = (a = u.next()).done) && (n.push(a.value), !t || n.length !== t); r = !0 ); } catch (e) { ;(i = !0), (o = e) } finally { try { r || null == u.return || u.return() } finally { if (i) throw o } } return n })(e, t) || (function() { throw new TypeError("Invalid attempt to destructure non-iterable instance") })() ) } function Yt(e, t) { for (var n = 0; n < t.length; n++) { var r = t[n] ;(r.enumerable = r.enumerable || !1), (r.configurable = !0), "value" in r && (r.writable = !0), Object.defineProperty(e, r.key, r) } } var Ft, Qt = {}, Zt = (function() { function e(t) { var n = arguments.length > 1 && void 0 !== arguments[1] ? arguments[1] : K, r = arguments.length > 2 && void 0 !== arguments[2] ? arguments[2] : "ObservableMap@" + c() if ( ((function(e, t) { if (!(e instanceof t)) throw new TypeError("Cannot call a class as a function") })(this, e), (this.enhancer = n), (this.name = r), (this[Ht] = Qt), (this._keysAtom = V("".concat(this.name, ".keys()"))), (this[Symbol.toStringTag] = "Map"), "function" != typeof Map) ) throw new Error( "mobx.map requires Map polyfill for the current browser. Check babel-polyfill or core-js/es6/map.js" ) ;(this._data = new Map()), (this._hasMap = new Map()), this.merge(t) } var t, n, r return ( (t = e), (n = [ { key: "_has", value: function(e) { return this._data.has(e) } }, { key: "has", value: function(e) { var t = this if (!Pe.a.trackingDerivation) return this._has(e) var n = this._hasMap.get(e) if (!n) { var r = (n = new ve( this._has(e), H, "".concat(this.name, ".").concat(A(e), "?"), !1 )) this._hasMap.set(e, r), dt(r, function() { return t._hasMap.delete(e) }) } return n.get() } }, { key: "set", value: function(e, t) { var n = this._has(e) if (Pt(this)) { var r = Dt(this, { type: n ? "update" : "add", object: this, newValue: t, name: e }) if (!r) return this t = r.newValue } return n ? this._updateValue(e, t) : this._addValue(e, t), this } }, { key: "delete", value: function(e) { var t = this if ( Pt(this) && !Dt(this, { type: "delete", object: this, name: e }) ) return !1 if (this._has(e)) { var n = Nt(this), r = n ? { type: "delete", object: this, oldValue: this._data.get(e).value, name: e } : null return ( At(function() { t._keysAtom.reportChanged(), t._updateHasMapEntry(e, !1), t._data.get(e).setNewValue(void 0), t._data.delete(e) }), n && Bt(this, r), !0 ) } return !1 } }, { key: "_updateHasMapEntry", value: function(e, t) { var n = this._hasMap.get(e) n && n.setNewValue(t) } }, { key: "_updateValue", value: function(e, t) { var n = this._data.get(e) if ((t = n.prepareNewValue(t)) !== Pe.a.UNCHANGED) { var r = Nt(this), i = r ? { type: "update", object: this, oldValue: n.value, name: e, newValue: t } : null n.setNewValue(t), r && Bt(this, i) } } }, { key: "_addValue", value: function(e, t) { var n = this _e(this._keysAtom), At(function() { var r = new ve( t, n.enhancer, "".concat(n.name, ".").concat(A(e)), !1 ) n._data.set(e, r), (t = r.value), n._updateHasMapEntry(e, !0), n._keysAtom.reportChanged() }) var r = Nt(this), i = r ? { type: "add", object: this, name: e, newValue: t } : null r && Bt(this, i) } }, { key: "get", value: function(e) { return this.has(e) ? this.dehanceValue(this._data.get(e).get()) : this.dehanceValue(void 0) } }, { key: "dehanceValue", value: function(e) { return void 0 !== this.dehancer ? this.dehancer(e) : e } }, { key: "keys", value: function() { return this._keysAtom.reportObserved(), this._data.keys() } }, { key: "values", value: function() { var e = this, t = 0, n = Array.from(this.keys()) return An({ next: function() { return t < n.length ? { value: e.get(n[t++]), done: !1 } : { done: !0 } } }) } }, { key: "entries", value: function() { var e = this, t = 0, n = Array.from(this.keys()) return An({ next: function() { if (t < n.length) { var r = n[t++] return { value: [r, e.get(r)], done: !1 } } return { done: !0 } } }) } }, { key: ((Ht = x), Symbol.iterator), value: function() { return this.entries() } }, { key: "forEach", value: function(e, t) { var n = !0, r = !1, i = void 0 try { for ( var o, a = this[Symbol.iterator](); !(n = (o = a.next()).done); n = !0 ) { var u = Xt(o.value, 2), c = u[0], s = u[1] e.call(t, s, c, this) } } catch (e) { ;(r = !0), (i = e) } finally { try { n || null == a.return || a.return() } finally { if (r) throw i } } } }, { key: "merge", value: function(e) { var t = this return ( $t(e) && (e = e.toJS()), At(function() { p(e) ? _(e).forEach(function(n) { return t.set(n, e[n]) }) : Array.isArray(e) ? e.forEach(function(e) { var n = Xt(e, 2), r = n[0], i = n[1] return t.set(r, i) }) : S(e) ? (e.constructor !== Map && s( "Cannot initialize from classes that inherit from Map: " + e.constructor.name ), e.forEach(function(e, n) { return t.set(n, e) })) : null != e && s("Cannot initialize map from " + e) }), this ) } }, { key: "clear", value: function() { var e = this At(function() { je(function() { var t = !0, n = !1, r = void 0 try { for ( var i, o = e.keys()[Symbol.iterator](); !(t = (i = o.next()).done); t = !0 ) { var a = i.value e.delete(a) } } catch (e) { ;(n = !0), (r = e) } finally { try { t || null == o.return || o.return() } finally { if (n) throw r } } }) }) } }, { key: "replace", value: function(e) { var t = this return ( At(function() { var n = E(e) Array.from(t.keys()) .filter(function(e) { return -1 === n.indexOf(e) }) .forEach(function(e) { return t.delete(e) }), t.merge(e) }), this ) } }, { key: "toPOJO", value: function() { var e = {}, t = !0, n = !1, r = void 0 try { for ( var i, o = this[Symbol.iterator](); !(t = (i = o.next()).done); t = !0 ) { var a = Xt(i.value, 2), u = a[0], c = a[1] e["symbol" === Wt(u) ? u : A(u)] = c } } catch (e) { ;(n = !0), (r = e) } finally { try { t || null == o.return || o.return() } finally { if (n) throw r } } return e } }, { key: "toJS", value: function() { return new Map(this) } }, { key: "toJSON", value: function() { return this.toPOJO() } }, { key: "toString", value: function() { var e = this return ( this.name + "[{ " + Array.from(this.keys()) .map(function(t) { return "".concat(A(t), ": ").concat("" + e.get(t)) }) .join(", ") + " }]" ) } }, { key: "observe", value: function(e, t) { return Lt(this, e) } }, { key: "intercept", value: function(e) { return Vt(this, e) } }, { key: "size", get: function() { return this._keysAtom.reportObserved(), this._data.size } } ]) && Yt(t.prototype, n), r && Yt(t, r), e ) })(), $t = O("ObservableMap", Zt) function en(e, t) { for (var n = 0; n < t.length; n++) { var r = t[n] ;(r.enumerable = r.enumerable || !1), (r.configurable = !0), "value" in r && (r.writable = !0), Object.defineProperty(e, r.key, r) } } var tn = {}, nn = (function() { function e(t) { var n = arguments.length > 1 && void 0 !== arguments[1] ? arguments[1] : K, r = arguments.length > 2 && void 0 !== arguments[2] ? arguments[2] : "ObservableSet@" + c() if ( ((function(e, t) { if (!(e instanceof t)) throw new TypeError("Cannot call a class as a function") })(this, e), (this.name = r), (this[Ft] = tn), (this._data = new Set()), (this._atom = V(this.name)), (this[Symbol.toStringTag] = "Set"), "function" != typeof Set) ) throw new Error( "mobx.set requires Set polyfill for the current browser. Check babel-polyfill or core-js/es6/set.js" ) ;(this.enhancer = function(e, t) { return n(e, t, r) }), t && this.replace(t) } var t, n, r return ( (t = e), (n = [ { key: "dehanceValue", value: function(e) { return void 0 !== this.dehancer ? this.dehancer(e) : e } }, { key: "clear", value: function() { var e = this At(function() { je(function() { var t = !0, n = !1, r = void 0 try { for ( var i, o = e._data.values()[Symbol.iterator](); !(t = (i = o.next()).done); t = !0 ) { var a = i.value e.delete(a) } } catch (e) { ;(n = !0), (r = e) } finally { try { t || null == o.return || o.return() } finally { if (n) throw r } } }) }) } }, { key: "forEach", value: function(e, t) { var n = !0, r = !1, i = void 0 try { for ( var o, a = this[Symbol.iterator](); !(n = (o = a.next()).done); n = !0 ) { var u = o.value e.call(t, u, u, this) } } catch (e) { ;(r = !0), (i = e) } finally { try { n || null == a.return || a.return() } finally { if (r) throw i } } } }, { key: "add", value: function(e) { var t = this if ( (_e(this._atom), Pt(this) && !Dt(this, { type: "add", object: this, newValue: e })) ) return this if (!this.has(e)) { At(function() { t._data.add(t.enhancer(e, void 0)), t._atom.reportChanged() }) var n = Nt(this), r = n ? { type: "add", object: this, newValue: e } : null n && Bt(this, r) } return this } }, { key: "delete", value: function(e) { var t = this if ( Pt(this) && !Dt(this, { type: "delete", object: this, oldValue: e }) ) return !1 if (this.has(e)) { var n = Nt(this), r = n ? { type: "delete", object: this, oldValue: e } : null return ( At(function() { t._atom.reportChanged(), t._data.delete(e) }), n && Bt(this, r), !0 ) } return !1 } }, { key: "has", value: function(e) { return ( this._atom.reportObserved(), this._data.has(this.dehanceValue(e)) ) } }, { key: "entries", value: function() { var e = 0, t = Array.from(this.keys()), n = Array.from(this.values()) return An({ next: function() { var r = e return ( (e += 1), r < n.length ? { value: [t[r], n[r]], done: !1 } : { done: !0 } ) } }) } }, { key: "keys", value: function() { return this.values() } }, { key: "values", value: function() { this._atom.reportObserved() var e = this, t = 0, n = Array.from(this._data.values()) return An({ next: function() { return t < n.length ? { value: e.dehanceValue(n[t++]), done: !1 } : { done: !0 } } }) } }, { key: "replace", value: function(e) { var t = this return ( rn(e) && (e = e.toJS()), At(function() { Array.isArray(e) ? (t.clear(), e.forEach(function(e) { return t.add(e) })) : k(e) ? (t.clear(), e.forEach(function(e) { return t.add(e) })) : null != e && s("Cannot initialize set from " + e) }), this ) } }, { key: "observe", value: function(e, t) { return Lt(this, e) } }, { key: "intercept", value: function(e) { return Vt(this, e) } }, { key: "toJS", value: function() { return new Set(this) } }, { key: "toString", value: function() { return this.name + "[ " + Array.from(this).join(", ") + " ]" } }, { key: ((Ft = x), Symbol.iterator), value: function() { return this.values() } }, { key: "size", get: function() { return this._atom.reportObserved(), this._data.size } } ]) && en(t.prototype, n), r && en(t, r), e ) })(), rn = O("ObservableSet", nn) function on(e, t) { return ( (function(e) { if (Array.isArray(e)) return e })(e) || (function(e, t) { var n = [], r = !0, i = !1, o = void 0 try { for ( var a, u = e[Symbol.iterator](); !(r = (a = u.next()).done) && (n.push(a.value), !t || n.length !== t); r = !0 ); } catch (e) { ;(i = !0), (o = e) } finally { try { r || null == u.return || u.return() } finally { if (i) throw o } } return n })(e, t) || (function() { throw new TypeError("Invalid attempt to destructure non-iterable instance") })() ) } function an(e, t) { for (var n = 0; n < t.length; n++) { var r = t[n] ;(r.enumerable = r.enumerable || !1), (r.configurable = !0), "value" in r && (r.writable = !0), Object.defineProperty(e, r.key, r) } } var un = (function() { function e(t) { var n = arguments.length > 1 && void 0 !== arguments[1] ? arguments[1] : new Map(), r = arguments.length > 2 ? arguments[2] : void 0, i = arguments.length > 3 ? arguments[3] : void 0 !(function(e, t) { if (!(e instanceof t)) throw new TypeError("Cannot call a class as a function") })(this, e), (this.target = t), (this.values = n), (this.name = r), (this.defaultEnhancer = i), (this.keysAtom = new C(r + ".keys")) } var t, n, r return ( (t = e), (n = [ { key: "read", value: function(e) { return this.values.get(e).get() } }, { key: "write", value: function(e, t) { var n = this.target, r = this.values.get(e) if (r instanceof ge) r.set(t) else { if (Pt(this)) { var i = Dt(this, { type: "update", object: this.proxy || n, name: e, newValue: t }) if (!i) return t = i.newValue } if ((t = r.prepareNewValue(t)) !== Pe.a.UNCHANGED) { var o = Nt(this), a = o ? { type: "update", object: this.proxy || n, oldValue: r.value, name: e, newValue: t } : null r.setNewValue(t), o && Bt(this, a) } } } }, { key: "has", value: function(e) { var t = this.pendingKeys || (this.pendingKeys = new Map()), n = t.get(e) if (n) return n.get() var r = !!this.values.get(e) return ( (n = new ve(r, H, "".concat(this.name, ".").concat(A(e), "?"), !1)), t.set(e, n), n.get() ) } }, { key: "addObservableProp", value: function(e, t) { var n = arguments.length > 2 && void 0 !== arguments[2] ? arguments[2] : this.defaultEnhancer, r = this.target if ((w(), Pt(this))) { var i = Dt(this, { object: this.proxy || r, name: e, type: "add", newValue: t }) if (!i) return t = i.newValue } var o = new ve(t, n, "".concat(this.name, ".").concat(A(e)), !1) this.values.set(e, o), (t = o.value), Object.defineProperty(r, e, fn(e)), this.notifyPropertyAddition(e, t) } }, { key: "addComputedProp", value: function(e, t, n) { var r = this.target ;(n.name = n.name || "".concat(this.name, ".").concat(A(t))), this.values.set(t, new ge(n)), (e === r || g(e, t)) && Object.defineProperty( e, t, (function(e) { return ( ln[e] || (ln[e] = { configurable: Pe.a.computedConfigurable, enumerable: !1, get: function() { return hn(this).read(e) }, set: function(t) { hn(this).write(e, t) } }) ) })(t) ) } }, { key: "remove", value: function(e) { if (this.values.has(e)) { var t = this.target if ( Pt(this) && !Dt(this, { object: this.proxy || t, name: e, type: "remove" }) ) return try { Re() var n = Nt(this), r = this.values.get(e), i = r && r.get() if ( (r && r.set(void 0), this.keysAtom.reportChanged(), this.values.delete(e), this.pendingKeys) ) { var o = this.pendingKeys.get(e) o && o.set(!1) } delete this.target[e] var a = n ? { type: "remove", object: this.proxy || t, oldValue: i, name: e } : null n && Bt(this, a) } finally { Ie() } } } }, { key: "illegalAccess", value: function(e, t) { console.warn( "Property '" .concat(t, "' of '") .concat( e, "' was accessed through the prototype chain. Use 'decorate' instead to declare the prop or access it statically through it's owner" ) ) } }, { key: "observe", value: function(e, t) { return Lt(this, e) } }, { key: "intercept", value: function(e) { return Vt(this, e) } }, { key: "notifyPropertyAddition", value: function(e, t) { var n = Nt(this), r = n ? { type: "add", object: this.proxy || this.target, name: e, newValue: t } : null if ((n && Bt(this, r), this.pendingKeys)) { var i = this.pendingKeys.get(e) i && i.set(!0) } this.keysAtom.reportChanged() } }, { key: "getKeys", value: function() { this.keysAtom.reportObserved() var e = [], t = !0, n = !1, r = void 0 try { for ( var i, o = this.values[Symbol.iterator](); !(t = (i = o.next()).done); t = !0 ) { var a = on(i.value, 2), u = a[0] a[1] instanceof ve && e.push(u) } } catch (e) { ;(n = !0), (r = e) } finally { try { t || null == o.return || o.return() } finally { if (n) throw r } } return e } } ]) && an(t.prototype, n), r && an(t, r), e ) })() function cn(e) { var t = arguments.length > 1 && void 0 !== arguments[1] ? arguments[1] : "", n = arguments.length > 2 && void 0 !== arguments[2] ? arguments[2] : K if (Object.prototype.hasOwnProperty.call(e, x)) return e[x] p(e) || (t = (e.constructor.name || "ObservableObject") + "@" + c()), t || (t = "ObservableObject@" + c()) var r = new un(e, new Map(), A(t), n) return b(e, x, r), r } var sn = Object.create(null), ln = Object.create(null) function fn(e) { return ( sn[e] || (sn[e] = { configurable: !0, enumerable: !0, get: function() { return this[x].read(e) }, set: function(t) { this[x].write(e, t) } }) ) } function hn(e) { var t = e[x] return t || (M(e), e[x]) } var dn = O("ObservableObjectAdministration", un) function vn(e) { return !!y(e) && (M(e), dn(e[x])) } function yn(e) { return (yn = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function(e) { return typeof e } : function(e) { return e && "function" == typeof Symbol && e.constructor === Symbol && e !== Symbol.prototype ? "symbol" : typeof e })(e) } function pn(e, t) { if ("object" === yn(e) && null !== e) { if (qt(e)) return void 0 !== t && s(!1), e[x].atom if (rn(e)) return e[x] if ($t(e)) { var n = e if (void 0 === t) return n._keysAtom var r = n._data.get(t) || n._hasMap.get(t) return r || s(!1), r } if ((M(e), t && !e[x] && e[t], vn(e))) { if (!t) return s(!1) var i = e[x].values.get(t) return i || s(!1), i } if (P(e) || we(e) || Fe(e)) return e } else if ("function" == typeof e && Fe(e[x])) return e[x] return s(!1) } function bn(e, t) { return ( e || s("Expecting some object"), void 0 !== t ? bn(pn(e, t)) : P(e) || we(e) || Fe(e) ? e : $t(e) || rn(e) ? e : (M(e), e[x] ? e[x] : void s(!1)) ) } function mn(e, t) { return (void 0 !== t ? pn(e, t) : vn(e) || $t(e) || rn(e) ? bn(e) : pn(e)).name } function gn(e) { return (gn = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function(e) { return typeof e } : function(e) { return e && "function" == typeof Symbol && e.constructor === Symbol && e !== Symbol.prototype ? "symbol" : typeof e })(e) } var wn = Object.prototype.toString function On(e, t) { return Sn(e, t) } function Sn(e, t, n, r) { if (e === t) return 0 !== e || 1 / e == 1 / t if (null == e || null == t) return !1 if (e != e) return t != t var i = gn(e) return ( ("function" === i || "object" === i || "object" == gn(t)) && (function(e, t, n, r) { ;(e = kn(e)), (t = kn(t)) var i = wn.call(e) if (i !== wn.call(t)) return !1 switch (i) { case "[object RegExp]": case "[object String]": return "" + e == "" + t case "[object Number]": return +e != +e ? +t != +t : 0 == +e ? 1 / +e == 1 / t : +e == +t case "[object Date]": case "[object Boolean]": return +e == +t case "[object Symbol]": return ( "undefined" != typeof Symbol && Symbol.valueOf.call(e) === Symbol.valueOf.call(t) ) } var o = "[object Array]" === i if (!o) { if ("object" != gn(e) || "object" != gn(t)) return !1 var a = e.constructor, u = t.constructor if ( a !== u && !( "function" == typeof a && a instanceof a && "function" == typeof u && u instanceof u ) && "constructor" in e && "constructor" in t ) return !1 } r = r || [] var c = (n = n || []).length for (; c--; ) if (n[c] === e) return r[c] === t if ((n.push(e), r.push(t), o)) { if ((c = e.length) !== t.length) return !1 for (; c--; ) if (!Sn(e[c], t[c], n, r)) return !1 } else { var s, l = Object.keys(e) if (((c = l.length), Object.keys(t).length !== c)) return !1 for (; c--; ) if (((s = l[c]), !_n(t, s) || !Sn(e[s], t[s], n, r))) return !1 } return n.pop(), r.pop(), !0 })(e, t, n, r) ) } function kn(e) { return qt(e) ? e.slice() : S(e) || $t(e) ? Array.from(e.entries()) : k(e) || rn(e) ? Array.from(e.entries()) : e } function _n(e, t) { return Object.prototype.hasOwnProperty.call(e, t) } function An(e) { return (e[Symbol.iterator] = En), e } function En() { return this } n.d(t, "e", function() { return a }), n.d(t, "f", function() { return u }), n.d(t, "Y", function() { return c }), n.d(t, "R", function() { return s }), n.d(t, "hb", function() { return l }), n.d(t, "M", function() { return f }), n.d(t, "Ib", function() { return h }), n.d(t, "Db", function() { return d }), n.d(t, "ic", function() { return v }), n.d(t, "nb", function() { return y }), n.d(t, "ub", function() { return p }), n.d(t, "o", function() { return b }), n.d(t, "n", function() { return m }), n.d(t, "vb", function() { return g }), n.d(t, "u", function() { return w }), n.d(t, "F", function() { return O }), n.d(t, "lb", function() { return S }), n.d(t, "mb", function() { return k }), n.d(t, "ab", function() { return _ }), n.d(t, "dc", function() { return A }), n.d(t, "X", function() { return E }), n.d(t, "ec", function() { return j }), n.d(t, "a", function() { return x }), n.d(t, "b", function() { return C }), n.d(t, "ib", function() { return P }), n.d(t, "C", function() { return V }), n.d(t, "z", function() { return D }), n.d(t, "Bb", function() { return N }), n.d(t, "fb", function() { return M }), n.d(t, "H", function() { return U }), n.d(t, "J", function() { return K }), n.d(t, "Wb", function() { return z }), n.d(t, "Ob", function() { return H }), n.d(t, "Nb", function() { return J }), n.d(t, "D", function() { return q.a }), n.d(t, "s", function() { return X }), n.d(t, "I", function() { return Y }), n.d(t, "Mb", function() { return Q }), n.d(t, "Fb", function() { return te }), n.d(t, "A", function() { return re }), n.d(t, "B", function() { return ie }), n.d(t, "O", function() { return oe }), n.d(t, "r", function() { return ue }), n.d(t, "q", function() { return ce }), n.d(t, "j", function() { return ve }), n.d(t, "tb", function() { return ye }), n.d(t, "d", function() { return ge }), n.d(t, "kb", function() { return we }), n.d(t, "g", function() { return be }), n.d(t, "l", function() { return me }), n.d(t, "c", function() { return Oe }), n.d(t, "jb", function() { return Se }), n.d(t, "Xb", function() { return ke }), n.d(t, "x", function() { return _e }), n.d(t, "gc", function() { return Ae }), n.d(t, "y", function() { return Ee }), n.d(t, "jc", function() { return je }), n.d(t, "lc", function() { return Te }), n.d(t, "kc", function() { return xe }), n.d(t, "bb", function() { return Pe.a }), n.d(t, "yb", function() { return Pe.b }), n.d(t, "eb", function() { return Ve }), n.d(t, "Z", function() { return De }), n.d(t, "p", function() { return Ne }), n.d(t, "Rb", function() { return Le }), n.d(t, "cc", function() { return Re }), n.d(t, "N", function() { return Ie }), n.d(t, "Sb", function() { return Me }), n.d(t, "Kb", function() { return Ue }), n.d(t, "Jb", function() { return Ge }), n.d(t, "Lb", function() { return Ke }), n.d(t, "k", function() { return Je }), n.d(t, "Tb", function() { return Xe }), n.d(t, "wb", function() { return Fe }), n.d(t, "Vb", function() { return Qe }), n.d(t, "xb", function() { return Ze }), n.d(t, "Zb", function() { return $e }), n.d(t, "bc", function() { return et }), n.d(t, "ac", function() { return tt }), n.d(t, "Yb", function() { return nt }), n.d(t, "Cb", function() { return it }), n.d(t, "w", function() { return at }), n.d(t, "m", function() { return ut }), n.d(t, "L", function() { return ct }), n.d(t, "v", function() { return st }), n.d(t, "Gb", function() { return ht }), n.d(t, "Hb", function() { return dt }), n.d(t, "P", function() { return yt }), n.d(t, "V", function() { return pt }), n.d(t, "Q", function() { return bt }), n.d(t, "W", function() { return mt }), n.d(t, "ob", function() { return Ot }), n.d(t, "zb", function() { return St }), n.d(t, "Ub", function() { return kt }), n.d(t, "fc", function() { return _t }), n.d(t, "hc", function() { return At }), n.d(t, "E", function() { return Ct }), n.d(t, "cb", function() { return Pt }), n.d(t, "Pb", function() { return Vt }), n.d(t, "gb", function() { return Dt }), n.d(t, "db", function() { return Nt }), n.d(t, "Qb", function() { return Lt }), n.d(t, "Eb", function() { return Bt }), n.d(t, "G", function() { return Gt }), n.d(t, "pb", function() { return qt }), n.d(t, "h", function() { return Zt }), n.d(t, "qb", function() { return $t }), n.d(t, "i", function() { return nn }), n.d(t, "sb", function() { return rn }), n.d(t, "t", function() { return cn }), n.d(t, "rb", function() { return vn }), n.d(t, "T", function() { return pn }), n.d(t, "S", function() { return bn }), n.d(t, "U", function() { return mn }), n.d(t, "K", function() { return On }), n.d(t, "Ab", function() { return An }) }, function(e, t, n) { "use strict" ;(function(e, t) { var r = n(0) function i(e) { return (i = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function(e) { return typeof e } : function(e) { return e && "function" == typeof Symbol && e.constructor === Symbol && e !== Symbol.prototype ? "symbol" : typeof e })(e) } if ("undefined" == typeof Proxy || "undefined" == typeof Symbol) throw new Error( "[mobx] MobX 5+ requires Proxy and Symbol objects. If your environment doesn't support Symbol or Proxy objects, please downgrade to MobX 4. For React Native Android, consider upgrading JSCore." ) "object" === ("undefined" == typeof __MOBX_DEVTOOLS_GLOBAL_HOOK__ ? "undefined" : i(__MOBX_DEVTOOLS_GLOBAL_HOOK__)) && __MOBX_DEVTOOLS_GLOBAL_HOOK__.injectMobx({ spy: r.Yb, extras: { getDebugName: r.U }, $mobx: r.a }) }.call(this, n(2), n(3))) }, function(e, t) { var n n = (function() { return this })() try { n = n || new Function("return this")() } catch (e) { "object" == typeof window && (n = window) } e.exports = n }, function(e, t) { var n, r, i = (e.exports = {}) function o() { throw new Error("setTimeout has not been defined") } function a() { throw new Error("clearTimeout has not been defined") } function u(e) { if (n === setTimeout) return setTimeout(e, 0) if ((n === o || !n) && setTimeout) return (n = setTimeout), setTimeout(e, 0) try { return n(e, 0) } catch (t) { try { return n.call(null, e, 0) } catch (t) { return n.call(this, e, 0) } } } !(function() { try { n = "function" == typeof setTimeout ? setTimeout : o } catch (e) { n = o } try { r = "function" == typeof clearTimeout ? clearTimeout : a } catch (e) { r = a } })() var c, s = [], l = !1, f = -1 function h() { l && c && ((l = !1), c.length ? (s = c.concat(s)) : (f = -1), s.length && d()) } function d() { if (!l) { var e = u(h) l = !0 for (var t = s.length; t; ) { for (c = s, s = []; ++f < t; ) c && c[f].run() ;(f = -1), (t = s.length) } ;(c = null), (l = !1), (function(e) { if (r === clearTimeout) return clearTimeout(e) if ((r === a || !r) && clearTimeout) return (r = clearTimeout), clearTimeout(e) try { r(e) } catch (t) { try { return r.call(null, e) } catch (t) { return r.call(this, e) } } })(e) } } function v(e, t) { ;(this.fun = e), (this.array = t) } function y() {} ;(i.nextTick = function(e) { var t = new Array(arguments.length - 1) if (arguments.length > 1) for (var n = 1; n < arguments.length; n++) t[n - 1] = arguments[n] s.push(new v(e, t)), 1 !== s.length || l || u(d) }), (v.prototype.run = function() { this.fun.apply(null, this.array) }), (i.title = "browser"), (i.browser = !0), (i.env = {}), (i.argv = []), (i.version = ""), (i.versions = {}), (i.on = y), (i.addListener = y), (i.once = y), (i.off = y), (i.removeListener = y), (i.removeAllListeners = y), (i.emit = y), (i.prependListener = y), (i.prependOnceListener = y), (i.listeners = function(e) { return [] }), (i.binding = function(e) { throw new Error("process.binding is not supported") }), (i.cwd = function() { return "/" }), (i.chdir = function(e) { throw new Error("process.chdir is not supported") }), (i.umask = function() { return 0 }) }, function(e, t, n) { "use strict" ;(function(e) { n.d(t, "a", function() { return i }) var r = n(0) function i(t) { Object(r.hb)(t) var n = Object(r.H)(!0, function(e, n, i, o, a) { var u = i ? (i.initializer ? i.initializer.call(e) : i.value) : void 0 Object(r.t)(e).addObservableProp(n, u, t) }), i = (void 0 !== e && e.env, n) return (i.enhancer = t), i } }.call(this, n(3))) }, function(e, t, n) { "use strict" ;(function(e) { n.d(t, "a", function() { return u }), n.d(t, "b", function() { return c }) var r = n(0) var i = function e() { !(function(e, t) { if (!(e instanceof t)) throw new TypeError("Cannot call a class as a function") })(this, e), (this.version = 5), (this.UNCHANGED = {}), (this.trackingDerivation = null), (this.computationDepth = 0), (this.runId = 0), (this.mobxGuid = 0), (this.inBatch = 0), (this.pendingUnobservations = []), (this.pendingReactions = []), (this.isRunningReactions = !1), (this.allowStateChanges = !0), (this.enforceActions = !1), (this.spyListeners = []), (this.globalReactionErrorHandlers = []), (this.computedRequiresReaction = !1), (this.computedConfigurable = !1), (this.disableErrorBoundaries = !1), (this.suppressReactionErrors = !1) }, o = !0, a = !1, u = (function() { var e = l() return ( e.__mobxInstanceCount > 0 && !e.__mobxGlobals && (o = !1), e.__mobxGlobals && e.__mobxGlobals.version !== new i().version && (o = !1), o ? e.__mobxGlobals ? ((e.__mobxInstanceCount += 1), e.__mobxGlobals.UNCHANGED || (e.__mobxGlobals.UNCHANGED = {}), e.__mobxGlobals) : ((e.__mobxInstanceCount = 1), (e.__mobxGlobals = new i())) : (setTimeout(function() { a || Object(r.R)( "There are multiple, different versions of MobX active. Make sure MobX is loaded only once or use `configure({ isolateGlobalState: true })`" ) }, 1), new i()) ) })() function c() { ;(u.pendingReactions.length || u.inBatch || u.isRunningReactions) && Object(r.R)( "isolateGlobalState should be called before MobX is running any reactions" ), (a = !0), o && (0 == --l().__mobxInstanceCount && (l().__mobxGlobals = void 0), (u = new i())) } var s = {} function l() { return "undefined" != typeof window ? window : void 0 !== e ? e : s } }.call(this, n(2))) }, function(e, t, n) { "use strict" n.r(t) var r = n(1), i = document.getElementById("add"), o = document.getElementById("minus"), a = document.getElementById("display"), u = r.default.observable({ name: "Ivan Fan", income: 3, debit: 2 }) r.default.autorun(function() { a.innerHTML = "i'm the content ".concat(u.income) }), i.addEventListener("click", function() { u.income++ }), o.addEventListener("click", function() { u.income-- }) } ])
39.961098
265
0.261467
3.046875
0bb20a1cdedfbffd4e7982cebb075a87cca615b2
3,731
js
JavaScript
src/js/service/keyboard/ShortcutService.js
stayqrious/piskel
a7d43f63b4dc263550e4be2add920f52b11913ee
[ "Apache-2.0" ]
2,039
2015-01-01T16:28:53.000Z
2022-01-14T01:03:19.000Z
src/js/service/keyboard/ShortcutService.js
stayqrious/piskel
a7d43f63b4dc263550e4be2add920f52b11913ee
[ "Apache-2.0" ]
319
2015-01-04T17:01:48.000Z
2017-05-13T00:37:29.000Z
src/js/service/keyboard/ShortcutService.js
stayqrious/piskel
a7d43f63b4dc263550e4be2add920f52b11913ee
[ "Apache-2.0" ]
209
2015-01-17T01:37:33.000Z
2021-09-17T00:39:33.000Z
(function () { var ns = $.namespace('pskl.service.keyboard'); ns.ShortcutService = function () { this.shortcuts_ = []; }; /** * @public */ ns.ShortcutService.prototype.init = function() { $(document.body).keydown($.proxy(this.onKeyDown_, this)); }; /** * Add a keyboard shortcut * @param {pskl.service.keyboard.Shortcut} shortcut * @param {Function} callback should return true to let the original event perform its default action */ ns.ShortcutService.prototype.registerShortcut = function (shortcut, callback) { if (!(shortcut instanceof ns.Shortcut)) { throw 'Invalid shortcut argument, please use instances of pskl.service.keyboard.Shortcut'; } if (typeof callback != 'function') { throw 'Invalid callback argument, please provide a function'; } this.shortcuts_.push({ shortcut : shortcut, callback : callback }); }; ns.ShortcutService.prototype.unregisterShortcut = function (shortcut) { var index = -1; this.shortcuts_.forEach(function (s, i) { if (s.shortcut === shortcut) { index = i; } }); if (index != -1) { this.shortcuts_.splice(index, 1); } }; /** * @private */ ns.ShortcutService.prototype.onKeyDown_ = function(evt) { var eventKey = ns.KeyUtils.createKeyFromEvent(evt); if (this.isInInput_(evt) || !eventKey) { return; } this.shortcuts_.forEach(function (shortcutInfo) { shortcutInfo.shortcut.getKeys().forEach(function (shortcutKey) { if (!ns.KeyUtils.equals(shortcutKey, eventKey)) { return; } var bubble = shortcutInfo.callback(eventKey.key); if (bubble !== true) { evt.preventDefault(); } $.publish(Events.KEYBOARD_EVENT, [evt]); }.bind(this)); }.bind(this)); }; ns.ShortcutService.prototype.isInInput_ = function (evt) { var targetTagName = evt.target.nodeName.toUpperCase(); return targetTagName === 'INPUT' || targetTagName === 'TEXTAREA'; }; ns.ShortcutService.prototype.getShortcutById = function (id) { return pskl.utils.Array.find(this.getShortcuts(), function (shortcut) { return shortcut.getId() === id; }); }; ns.ShortcutService.prototype.getShortcuts = function () { var shortcuts = []; ns.Shortcuts.CATEGORIES.forEach(function (category) { var shortcutMap = ns.Shortcuts[category]; Object.keys(shortcutMap).forEach(function (shortcutKey) { shortcuts.push(shortcutMap[shortcutKey]); }); }); return shortcuts; }; ns.ShortcutService.prototype.updateShortcut = function (shortcut, keyAsString) { var key = keyAsString.replace(/\s/g, ''); var isForbiddenKey = ns.Shortcuts.FORBIDDEN_KEYS.indexOf(key) != -1; if (isForbiddenKey) { $.publish(Events.SHOW_NOTIFICATION, [{ 'content': 'Key cannot be remapped (' + keyAsString + ')', 'hideDelay' : 5000 }]); } else { this.removeKeyFromAllShortcuts_(key); shortcut.updateKeys([key]); $.publish(Events.SHORTCUTS_CHANGED); } }; ns.ShortcutService.prototype.removeKeyFromAllShortcuts_ = function (key) { this.getShortcuts().forEach(function (s) { if (s.removeKeys([key])) { $.publish(Events.SHOW_NOTIFICATION, [{ 'content': 'Shortcut key removed for ' + s.getId(), 'hideDelay' : 5000 }]); } }); }; /** * Restore the default piskel key for all shortcuts */ ns.ShortcutService.prototype.restoreDefaultShortcuts = function () { this.getShortcuts().forEach(function (shortcut) { shortcut.restoreDefault(); }); $.publish(Events.SHORTCUTS_CHANGED); }; })();
28.480916
103
0.628518
3.125
d97c4eed83940ece811d2fc4ce1151eeaee5d519
2,730
rs
Rust
src/columns/write_bytes.rs
sorairolake/procs
01eae490b37037a9c059c2ded04ca0a64a5841a7
[ "MIT" ]
null
null
null
src/columns/write_bytes.rs
sorairolake/procs
01eae490b37037a9c059c2ded04ca0a64a5841a7
[ "MIT" ]
24
2022-01-25T20:30:33.000Z
2022-03-31T20:40:52.000Z
src/columns/write_bytes.rs
doytsujin/procs
65fb32c1e879727ba2561bbb1d95617945733517
[ "MIT" ]
null
null
null
use crate::process::ProcessInfo; use crate::util::bytify; use crate::{column_default, Column}; use std::cmp; use std::collections::HashMap; pub struct WriteBytes { header: String, unit: String, fmt_contents: HashMap<i32, String>, raw_contents: HashMap<i32, u64>, width: usize, } impl WriteBytes { pub fn new(header: Option<String>) -> Self { let header = header.unwrap_or_else(|| String::from("Write")); let unit = String::from("[B/s]"); WriteBytes { fmt_contents: HashMap::new(), raw_contents: HashMap::new(), width: 0, header, unit, } } } #[cfg(any(target_os = "linux", target_os = "android"))] impl Column for WriteBytes { fn add(&mut self, proc: &ProcessInfo) { let (fmt_content, raw_content) = if proc.curr_io.is_some() && proc.prev_io.is_some() { let interval_ms = proc.interval.as_secs() + u64::from(proc.interval.subsec_millis()); let io = (proc.curr_io.as_ref().unwrap().write_bytes - proc.prev_io.as_ref().unwrap().write_bytes) * 1000 / interval_ms; (bytify(io), io) } else { (String::from(""), 0) }; self.fmt_contents.insert(proc.pid, fmt_content); self.raw_contents.insert(proc.pid, raw_content); } column_default!(u64); } #[cfg_attr(tarpaulin, skip)] #[cfg(target_os = "macos")] impl Column for WriteBytes { fn add(&mut self, proc: &ProcessInfo) { let (fmt_content, raw_content) = if proc.curr_res.is_some() && proc.prev_res.is_some() { let interval_ms = proc.interval.as_secs() + u64::from(proc.interval.subsec_millis()); let io = (proc.curr_res.as_ref().unwrap().ri_diskio_byteswritten - proc.prev_res.as_ref().unwrap().ri_diskio_byteswritten) * 1000 / interval_ms; (bytify(io), io) } else { (String::from(""), 0) }; self.fmt_contents.insert(proc.pid, fmt_content); self.raw_contents.insert(proc.pid, raw_content); } column_default!(u64); } #[cfg_attr(tarpaulin, skip)] #[cfg(target_os = "windows")] impl Column for WriteBytes { fn add(&mut self, proc: &ProcessInfo) { let interval_ms = proc.interval.as_secs() + u64::from(proc.interval.subsec_millis()); let io = (proc.disk_info.curr_write - proc.disk_info.prev_write) * 1000 / interval_ms; let raw_content = io; let fmt_content = bytify(raw_content); self.fmt_contents.insert(proc.pid, fmt_content); self.raw_contents.insert(proc.pid, raw_content); } column_default!(u64); }
31.022727
97
0.595971
3.125
7fce45fb82afbd057f10d7053606a05de0d7902f
5,494
go
Go
galley/pkg/runtime/publish/strategy.go
pbohman/istio
6bade8133aadc2b32382256fc5f60d30f99379f5
[ "Apache-2.0" ]
2
2021-01-15T09:23:29.000Z
2021-12-04T13:35:18.000Z
galley/pkg/runtime/publish/strategy.go
pbohman/istio
6bade8133aadc2b32382256fc5f60d30f99379f5
[ "Apache-2.0" ]
7
2020-04-08T00:11:35.000Z
2021-09-21T01:49:26.000Z
galley/pkg/runtime/publish/strategy.go
pbohman/istio
6bade8133aadc2b32382256fc5f60d30f99379f5
[ "Apache-2.0" ]
5
2018-01-16T00:38:11.000Z
2019-07-10T19:04:40.000Z
// Copyright 2018 Istio Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package publish import ( "context" "sync" "time" "istio.io/istio/galley/pkg/runtime/log" "istio.io/istio/galley/pkg/runtime/monitoring" "istio.io/istio/galley/pkg/util" ) const ( // Maximum wait time before deciding to publish the events. defaultMaxWaitDuration = time.Second // Minimum time distance between two events for deciding on the quiesce point. If the time delay // between two events is larger than this, then we can deduce that we hit a quiesce point. defaultQuiesceDuration = time.Second // The frequency for firing the timer events. defaultTimerFrequency = 500 * time.Millisecond ) // Strategy is a heuristic model for deciding when to publish snapshots. It tries to detect // quiesce points for events with a total bounded wait time. type Strategy struct { maxWaitDuration time.Duration quiesceDuration time.Duration timerFrequency time.Duration // stateLock protects the internal state of the publishing strategy. stateLock sync.Mutex // Publish channel is used to trigger the publication of snapshots. Publish chan struct{} // the time of first event that is received. firstEvent time.Time // the time of the latest event that is received. latestEvent time.Time // timer that is used for periodically checking for the quiesce point. timer *time.Timer // nowFn is a testing hook for overriding time.Now() nowFn func() time.Time // startTimerFn is a testing hook for overriding the starting of the timer. startTimerFn func() // worker manages the lifecycle of the timer worker thread. worker *util.Worker // resetChan is used to issue a reset to the timer. resetChan chan struct{} // pendingChanges indicates that there are unpublished changes. pendingChanges bool } // NewStrategyWithDefaults creates a new strategy with default values. func NewStrategyWithDefaults() *Strategy { return NewStrategy(defaultMaxWaitDuration, defaultQuiesceDuration, defaultTimerFrequency) } // NewStrategy creates a new strategy with the given values. func NewStrategy( maxWaitDuration time.Duration, quiesceDuration time.Duration, timerFrequency time.Duration) *Strategy { s := &Strategy{ maxWaitDuration: maxWaitDuration, quiesceDuration: quiesceDuration, timerFrequency: timerFrequency, Publish: make(chan struct{}, 1), nowFn: time.Now, worker: util.NewWorker("runtime publishing strategy", log.Scope), resetChan: make(chan struct{}, 1), } s.startTimerFn = s.startTimer return s } func (s *Strategy) OnChange() { s.stateLock.Lock() monitoring.RecordStrategyOnChange() // Capture the latest event time. s.latestEvent = s.nowFn() if !s.pendingChanges { // This is the first event after a quiesce, start a timer to periodically check event // frequency and fire the publish event. s.pendingChanges = true s.firstEvent = s.latestEvent // Start or reset the timer. if s.timer != nil { // Timer has already been started, just reset it now. // NOTE: Unlocking the state lock first, to avoid a potential race with // the timer thread waiting to enter onTimer. s.stateLock.Unlock() s.resetChan <- struct{}{} return } s.startTimerFn() } s.stateLock.Unlock() } // startTimer performs a start or reset on the timer. Called with lock on stateLock. func (s *Strategy) startTimer() { s.timer = time.NewTimer(s.timerFrequency) eventLoop := func(ctx context.Context) { for { select { case <-s.timer.C: if !s.onTimer() { // We did not publish. Reset the timer and try again later. s.timer.Reset(s.timerFrequency) } case <-s.resetChan: s.timer.Reset(s.timerFrequency) case <-ctx.Done(): // User requested to stop the timer. s.timer.Stop() return } } } // Start a go routine to listen to the timer. _ = s.worker.Start(nil, eventLoop) } func (s *Strategy) onTimer() bool { s.stateLock.Lock() defer s.stateLock.Unlock() now := s.nowFn() // If there has been a long time since the first event, or if there was a quiesce since last event, // then fire publish to create new snapshots. // Otherwise, reset the timer and get a call again. maxTimeReached := now.After(s.firstEvent.Add(s.maxWaitDuration)) quiesceTimeReached := now.After(s.latestEvent.Add(s.quiesceDuration)) published := false if maxTimeReached || quiesceTimeReached { // Try to send to the channel select { case s.Publish <- struct{}{}: s.pendingChanges = false published = true default: // If the calling code is not draining the publish channel, then we can potentially cause // a deadlock here. Avoid the deadlock by going through the timer loop again. log.Scope.Warnf("Unable to publish to the channel, resetting the timer again to avoid deadlock") } } monitoring.RecordOnTimer(maxTimeReached, quiesceTimeReached, !published) return published } func (s *Strategy) Close() { s.worker.Stop() }
28.915789
100
0.729341
3.15625
f7565039202ff3dc92fbf144641c15f512eaeda9
1,906
h
C
inc/ftsq/fast_queue.h
after5cst/fast-thread-safe-queue
1abc40d620afe7b476803577b86a86d70f61bc32
[ "MIT" ]
1
2016-01-19T19:17:17.000Z
2016-01-19T19:17:17.000Z
inc/ftsq/fast_queue.h
after5cst/fast-thread-safe-queue
1abc40d620afe7b476803577b86a86d70f61bc32
[ "MIT" ]
null
null
null
inc/ftsq/fast_queue.h
after5cst/fast-thread-safe-queue
1abc40d620afe7b476803577b86a86d70f61bc32
[ "MIT" ]
null
null
null
#ifndef FTSQ_MUTEX_H #define FTSQ_MUTEX_H #include "ftsq/mutex.h" #include <vector> #include <deque> namespace ftsq { template <typename T, typename mutex_type=ftsq::mutex> class queue_pop_one { public: typedef std::deque<T> queue_type; typedef typename queue_type::size_type size_type; size_type push(T item) { std::lock_guard<mutex_type> guard(m_mutex); m_queue.push_back(std::move(item)); return m_queue.size(); } bool pop(T& item) { std::lock_guard<mutex_type> guard(m_mutex); if(m_queue.empty()) { return false; } item = std::move(m_queue.front()); m_queue.pop_front(); return true; } queue_pop_one() {} // disable object copy queue_pop_one(const queue_pop_one&) = delete; void operator=(const queue_pop_one&) = delete; private: mutex_type m_mutex; queue_type m_queue; }; //class queue_pop_one template <typename T, typename mutex_type=ftsq::mutex> class queue_pop_all { public: typedef std::vector<T> queue_type; typedef typename queue_type::size_type size_type; size_type push(T item) { std::lock_guard<mutex_type> guard(m_mutex); m_queue.push_back(std::move(item)); return m_queue.size(); } queue_type pop_all() { std::lock_guard<mutex_type> guard(m_mutex); return std::move(m_queue); } queue_pop_all() {} // disable object copy queue_pop_all(const queue_pop_all&) = delete; void operator=(const queue_pop_all&) = delete; private: mutex_type m_mutex; queue_type m_queue; }; //class queue_pop_all } #endif // FTSQ_MUTEX_H
25.413333
58
0.573977
3.015625
3f6238e86b0863465a6220c6a98c119e4b3ef3d0
4,218
swift
Swift
Sources/Constraints/Standard/OptionalConstraint.swift
alexcristea/brick-validator
9696dfe2d2095c8e4be80eafb582b54348dcb36e
[ "MIT" ]
32
2017-02-26T19:09:43.000Z
2020-12-07T11:05:53.000Z
Sources/Constraints/Standard/OptionalConstraint.swift
alexcristea/validation-kit
9696dfe2d2095c8e4be80eafb582b54348dcb36e
[ "MIT" ]
22
2017-02-26T23:22:29.000Z
2021-03-06T13:12:08.000Z
Sources/Constraints/Standard/OptionalConstraint.swift
alexcristea/validation-kit
9696dfe2d2095c8e4be80eafb582b54348dcb36e
[ "MIT" ]
15
2017-02-26T19:09:44.000Z
2021-02-10T08:37:00.000Z
import Foundation /** A `Constraint` that accepts an optional input and passes the unwrapped value to an underlying `Constraint`. ```swift enum Failure: Error { case required case invalidEmail } ``` ```swift let email: String? = "[email protected]" let constraint = OptionalConstraint<String, Failure>(required: .required) { PredicateConstraint(.email, error: .invalidEmail) } let result = constraint.evaluate(with: email) ``` */ public struct OptionalConstraint<T, E: Error>: Constraint { public typealias InputType = T? public typealias ErrorType = E private let constraint: AnyConstraint<T, E> private let requiredError: E? /** Returns a new `OptionalConstraint` instance. ```swift enum Failure: Error { case required case invalidEmail } ``` ```swift let email: String? = "[email protected]" let emailConstraint = PredicateConstraint(.email, error: .invalidEmail) let constraint = OptionalConstraint<String, Failure>(required: .required, constraint: emailConstraint) let result = constraint.evaluate(with: email) - parameter required: An optional `Error` that marks the optional as mandatory. - parameter constraint: A `Constraint` to describes the evaluation rule for the unwrapped value of the input. */ public init<C: Constraint>(required requiredError: E? = nil, constraint: C) where C.InputType == T, C.ErrorType == E { self.constraint = constraint.erase() self.requiredError = requiredError } /** Returns a new `OptionalConstraint` instance. ```swift enum Failure: Error { case required case invalidEmail } ``` ```swift let email: String? = "[email protected]" let constraint = OptionalConstraint<String, Failure>(required: .required) { PredicateConstraint(.email, error: .invalidEmail) } let result = constraint.evaluate(with: email) - parameter required: An optional `Error` that marks the optional as mandatory. - parameter constraint: A closure that dynamically builds a `Constraint` to describes the evaluation rule for the unwrapped value of the input. */ public init<C: Constraint>(required requiredError: E? = nil, constraintBuilder: () -> C) where C.InputType == T, C.ErrorType == E { self.init(required: requiredError, constraint: constraintBuilder()) } /** Evaluates the unwrapped input on the underlying constraint. - parameter input: The optional input to be validated. - returns: `.failure` with a `Summary` containing the required error when the optional is marked as required and the input is `nil`, `success` when the optional is not marked as required and the input is `nil`, the evaluation result from the underlying constraint otherwise. */ public func evaluate(with input: T?) -> Result<Void, Summary<E>> { if let input = input { return constraint.evaluate(with: input) } if let requiredError = requiredError { return .failure(Summary(errors: [requiredError])) } return .success(()) } } // MARK: - Constraint modifiers extension Constraint { /** Returns a new `OptionalConstraint` instance. ```swift enum Failure: Error { case required case invalidEmail } ``` ```swift let email: String? = "[email protected]" let emailConstraint = PredicateConstraint(.email, error: .invalidEmail) let constraint = emailConstraint.optional(required: .required) let result = constraint.evaluate(with: email) - parameter required: An optional `Error` that marks the optional as mandatory. - parameter constraint: A `Constraint` to describes the evaluation rule for the unwrapped value of the input. */ public func `optional`<T, E>(required requiredError: E? = nil) -> OptionalConstraint<T, E> where Self.ErrorType == E, Self.InputType == T{ OptionalConstraint(required: requiredError, constraint: self) } }
32.697674
279
0.652916
3.25
5a51656f89a2ad7d0edf1573643150e47ab98e91
13,067
rs
Rust
rosomaxa/src/example.rs
PeakBI/ds-reinterpretcat-vrp
62428fdd5438812ddcf37583a14b9a26bdb43225
[ "Apache-2.0" ]
null
null
null
rosomaxa/src/example.rs
PeakBI/ds-reinterpretcat-vrp
62428fdd5438812ddcf37583a14b9a26bdb43225
[ "Apache-2.0" ]
null
null
null
rosomaxa/src/example.rs
PeakBI/ds-reinterpretcat-vrp
62428fdd5438812ddcf37583a14b9a26bdb43225
[ "Apache-2.0" ]
null
null
null
//! This module contains example models and logic to demonstrate practical usage of rosomaxa crate. #[cfg(test)] #[path = "../tests/unit/example_test.rs"] mod example_test; use crate::evolution::*; use crate::get_default_population; use crate::hyper::*; use crate::population::{DominanceOrder, DominanceOrdered, RosomaxaWeighted, Shuffled}; use crate::prelude::*; use crate::utils::Noise; use hashbrown::{HashMap, HashSet}; use std::any::Any; use std::ops::Deref; use std::sync::Arc; /// An example objective function. pub type VectorFunction = Arc<dyn Fn(&[f64]) -> f64 + Send + Sync>; /// An example heuristic context. pub struct VectorContext { objective: Arc<VectorObjective>, population: Box<dyn HeuristicPopulation<Objective = VectorObjective, Individual = VectorSolution>>, statistics: HeuristicStatistics, environment: Arc<Environment>, state: HashMap<i32, Box<dyn Any + Send + Sync>>, } /// An example heuristic objective. pub struct VectorObjective { func: VectorFunction, } /// An example heuristic solution. pub struct VectorSolution { /// Solution payload. pub data: Vec<f64>, objective: Arc<VectorObjective>, order: DominanceOrder, } impl VectorContext { /// Creates a new instance of `VectorContext`. pub fn new( objective: Arc<VectorObjective>, population: Box<dyn HeuristicPopulation<Objective = VectorObjective, Individual = VectorSolution>>, environment: Arc<Environment>, ) -> Self { Self { objective, population, statistics: Default::default(), environment, state: Default::default() } } } impl HeuristicContext for VectorContext { type Objective = VectorObjective; type Solution = VectorSolution; fn objective(&self) -> &Self::Objective { &self.objective } fn population(&self) -> &dyn HeuristicPopulation<Objective = Self::Objective, Individual = Self::Solution> { self.population.as_ref() } fn population_mut( &mut self, ) -> &mut dyn HeuristicPopulation<Objective = Self::Objective, Individual = Self::Solution> { self.population.as_mut() } fn statistics(&self) -> &HeuristicStatistics { &self.statistics } fn statistics_mut(&mut self) -> &mut HeuristicStatistics { &mut self.statistics } fn environment(&self) -> &Environment { self.environment.as_ref() } } impl Stateful for VectorContext { type Key = i32; fn set_state<T: 'static + Send + Sync>(&mut self, key: Self::Key, state: T) { self.state.insert(key, Box::new(state)); } fn get_state<T: 'static + Send + Sync>(&self, key: &Self::Key) -> Option<&T> { self.state.get(key).and_then(|v| v.downcast_ref::<T>()) } fn state_mut<T: 'static + Send + Sync, F: Fn() -> T>(&mut self, key: Self::Key, inserter: F) -> &mut T { self.state.entry(key).or_insert_with(|| Box::new(inserter())).downcast_mut::<T>().unwrap() } } impl VectorObjective { /// Creates a new instance `VectorObjective`. pub fn new(func: VectorFunction) -> Self { Self { func } } } impl HeuristicObjective for VectorObjective {} impl Objective for VectorObjective { type Solution = VectorSolution; fn fitness(&self, solution: &Self::Solution) -> f64 { self.func.deref()(solution.data.as_slice()) } } impl MultiObjective for VectorObjective { fn objectives<'a>( &'a self, ) -> Box<dyn Iterator<Item = &'a (dyn Objective<Solution = Self::Solution> + Send + Sync)> + 'a> { let objective: &(dyn Objective<Solution = Self::Solution> + Send + Sync) = self; Box::new(std::iter::once(objective)) } } impl Shuffled for VectorObjective { fn get_shuffled(&self, _: &(dyn Random + Send + Sync)) -> Self { Self::new(self.func.clone()) } } impl HeuristicSolution for VectorSolution { fn get_fitness<'a>(&'a self) -> Box<dyn Iterator<Item = f64> + 'a> { Box::new(self.objective.objectives().map(move |objective| objective.fitness(self))) } fn deep_copy(&self) -> Self { Self::new(self.data.clone(), self.objective.clone()) } } impl DominanceOrdered for VectorSolution { fn get_order(&self) -> &DominanceOrder { &self.order } fn set_order(&mut self, order: DominanceOrder) { self.order = order } } impl RosomaxaWeighted for VectorSolution { fn weights(&self) -> Vec<f64> { // TODO: // for the sake of experimentation, consider to provide some configuration here to allow // usage of some noise, smoothing or optional weights, but not only direct mapping of data. self.data.clone() } } impl VectorSolution { /// Creates a new instance of `VectorSolution`. pub fn new(data: Vec<f64>, objective: Arc<VectorObjective>) -> Self { Self { data, objective, order: DominanceOrder::default() } } } /// An example initial operator pub struct VectorInitialOperator { data: Vec<f64>, } impl VectorInitialOperator { /// Creates a new instance of `VectorInitialOperator`. pub fn new(data: Vec<f64>) -> Self { Self { data } } } impl InitialOperator for VectorInitialOperator { type Context = VectorContext; type Objective = VectorObjective; type Solution = VectorSolution; fn create(&self, context: &Self::Context) -> Self::Solution { Self::Solution::new(self.data.clone(), context.objective.clone()) } } /// Specifies mode of heuristic operator. pub enum VectorHeuristicOperatorMode { /// Adds some noice to all dimensions. JustNoise(Noise), /// Adds some noice to specific dimensions. DimensionNoise(Noise, HashSet<usize>), } /// A naive implementation of heuristic search operator in vector space. struct VectorHeuristicOperator { mode: VectorHeuristicOperatorMode, } impl HeuristicOperator for VectorHeuristicOperator { type Context = VectorContext; type Objective = VectorObjective; type Solution = VectorSolution; fn search(&self, context: &Self::Context, solution: &Self::Solution) -> Self::Solution { Self::Solution::new( match &self.mode { VectorHeuristicOperatorMode::JustNoise(noise) => { solution.data.iter().map(|d| *d + noise.add(*d)).collect() } VectorHeuristicOperatorMode::DimensionNoise(noise, dimens) => solution .data .iter() .enumerate() .map(|(idx, d)| if dimens.contains(&idx) { *d + noise.add(*d) } else { *d }) .collect(), }, context.objective.clone(), ) } } type TargetInitialOperator = Box< dyn InitialOperator<Context = VectorContext, Objective = VectorObjective, Solution = VectorSolution> + Send + Sync, >; type TargetHeuristicOperator = Arc< dyn HeuristicOperator<Context = VectorContext, Objective = VectorObjective, Solution = VectorSolution> + Send + Sync, >; /// Specifies solver solutions. pub type SolverSolutions = Vec<(Vec<f64>, f64)>; /// An example of the optimization solver to solve trivial problems. pub struct Solver { initial_solutions: Vec<Vec<f64>>, initial_params: (usize, f64), objective_func: Option<VectorFunction>, max_time: Option<usize>, max_generations: Option<usize>, min_cv: Option<(String, usize, f64, bool)>, target_proximity: Option<(Vec<f64>, f64)>, operators: Vec<(TargetHeuristicOperator, String, f64)>, } impl Default for Solver { fn default() -> Self { Self { initial_solutions: vec![], initial_params: (4, 0.05), objective_func: None, max_time: Some(10), max_generations: Some(100), min_cv: None, target_proximity: None, operators: vec![], } } } impl Solver { /// Sets initial parameters. pub fn with_init_params(mut self, max_size: usize, quota: f64) -> Self { self.initial_params = (max_size, quota); self } /// Sets initial solutions. pub fn with_init_solutions(mut self, init_solutions: Vec<Vec<f64>>) -> Self { self.initial_solutions = init_solutions; self } // TODO add termination to stop when solution close to some target /// Sets termination parameters. pub fn with_termination( mut self, max_time: Option<usize>, max_generations: Option<usize>, min_cv: Option<(String, usize, f64, bool)>, target_proximity: Option<(Vec<f64>, f64)>, ) -> Self { self.max_time = max_time; self.max_generations = max_generations; self.min_cv = min_cv; self.target_proximity = target_proximity; self } /// Sets search operator. pub fn with_operator(mut self, mode: VectorHeuristicOperatorMode, name: &str, probability: f64) -> Self { self.operators.push((Arc::new(VectorHeuristicOperator { mode }), name.to_string(), probability)); self } /// Sets objective function. pub fn with_objective_fun(mut self, objective_func: VectorFunction) -> Self { self.objective_func = Some(objective_func); self } /// Runs the solver using configuration provided through fluent interface methods. pub fn solve(self) -> Result<(SolverSolutions, Option<TelemetryMetrics>), String> { let environment = Arc::new(Environment::new_with_time_quota(self.max_time)); // build instances of implementation types from submitted data let func = self.objective_func.ok_or_else(|| "objective function must be set".to_string())?; let objective = Arc::new(VectorObjective::new(func)); let heuristic = Box::new(MultiSelective::new( Box::new(DynamicSelective::new( self.operators.iter().map(|(op, name, _)| (op.clone(), name.clone())).collect(), environment.random.clone(), )), Box::new(StaticSelective::new( self.operators .iter() .map(|(op, _, probability)| { let random = environment.random.clone(); let probability = *probability; let probability_func: HeuristicProbability<VectorContext, VectorObjective, VectorSolution> = (Box::new(move |_, _| random.is_hit(probability)), Default::default()); (op.clone(), probability_func) }) .collect(), )), )); let initial_operators = self .initial_solutions .into_iter() .map(VectorInitialOperator::new) .map::<(TargetInitialOperator, _), _>(|o| (Box::new(o), 1)) .collect(); // create a heuristic context let context = VectorContext::new( objective.clone(), get_default_population::<VectorContext, _, _>(objective.clone(), environment.clone()), environment.clone(), ); // create a telemetry which will log population let telemetry = Telemetry::new(TelemetryMode::OnlyLogging { logger: environment.logger.clone(), log_best: 100, log_population: 500, dump_population: false, }); // build evolution config using fluent interface let config = EvolutionConfigBuilder::default() .with_heuristic(heuristic) .with_objective(objective) .with_context(context) .with_min_cv(self.min_cv, 1) .with_max_time(self.max_time) .with_max_generations(self.max_generations) .with_target_proximity(self.target_proximity) .with_initial(self.initial_params.0, self.initial_params.1, initial_operators) .with_telemetry(telemetry) .build()?; // solve the problem let (solutions, metrics) = EvolutionSimulator::new(config)?.run()?; let solutions = solutions .into_iter() .map(|s| { let fitness = s.get_fitness().next().expect("empty fitness"); (s.data, fitness) }) .collect(); Ok((solutions, metrics)) } } /// Creates multidimensional Rosenbrock function, also referred to as the Valley or Banana function. /// The function is usually evaluated on the hypercube xi ∈ [-5, 10], for all i = 1, …, d, although /// it may be restricted to the hypercube xi ∈ [-2.048, 2.048], for all i = 1, …, d. pub fn create_rosenbrock_function() -> VectorFunction { Arc::new(|input| { assert!(input.len() > 1); input.windows(2).fold(0., |acc, pair| { let (x1, x2) = match pair { [x1, x2] => (*x1, *x2), _ => unreachable!(), }; acc + 100. * (x2 - x1.powi(2)).powi(2) + (x1 - 1.).powi(2) }) }) }
32.424318
119
0.614296
3.234375
6533cb2d911f06e68a721247deb37def17dac93b
5,448
py
Python
kedro/extras/datasets/pandas/appendable_excel_dataset.py
hfwittmann/kedro
b0d4fcd8f19b49a7916d78fd09daeb6209a7b6c6
[ "Apache-2.0" ]
1
2021-11-25T12:33:13.000Z
2021-11-25T12:33:13.000Z
kedro/extras/datasets/pandas/appendable_excel_dataset.py
MerelTheisenQB/kedro
1eaa2e0fa5d80f96e18ea60b9f3d6e6efc161827
[ "Apache-2.0" ]
null
null
null
kedro/extras/datasets/pandas/appendable_excel_dataset.py
MerelTheisenQB/kedro
1eaa2e0fa5d80f96e18ea60b9f3d6e6efc161827
[ "Apache-2.0" ]
null
null
null
"""``AppendableExcelDataSet`` loads/saves data from/to a local Excel file opened in append mode. It uses pandas to handle the Excel file. """ from copy import deepcopy from pathlib import Path, PurePosixPath from typing import Any, Dict import pandas as pd from kedro.io.core import AbstractDataSet, DataSetError class AppendableExcelDataSet(AbstractDataSet): """``AppendableExcelDataSet`` loads/saves data from/to a local Excel file opened in append mode. It uses pandas to handle the Excel file. Example adding a catalog entry with `YAML API <https://kedro.readthedocs.io/en/stable/05_data/\ 01_data_catalog.html#using-the-data-catalog-with-the-yaml-api>`_: .. code-block:: yaml >>> # AppendableExcelDataSet creates a new sheet for every dataset >>> # ExcelDataSet restricts one dataset per file as it is overwritten >>> >>> preprocessed_companies: >>> type: pandas.AppendableExcelDataSet >>> filepath: data/02_intermediate/preprocessed.xlsx # assumes file already exists >>> save_args: >>> sheet_name: preprocessed_companies >>> load_args: >>> sheet_name: preprocessed_companies >>> >>> preprocessed_shuttles: >>> type: pandas.AppendableExcelDataSet >>> filepath: data/02_intermediate/preprocessed.xlsx >>> save_args: >>> sheet_name: preprocessed_shuttles >>> load_args: >>> sheet_name: preprocessed_shuttles Example using Python API: :: >>> from kedro.extras.datasets.pandas import AppendableExcelDataSet >>> from kedro.extras.datasets.pandas import ExcelDataSet >>> import pandas as pd >>> >>> data_1 = pd.DataFrame({'col1': [1, 2], 'col2': [4, 5], >>> 'col3': [5, 6]}) >>> >>> data_2 = pd.DataFrame({'col1': [7, 8], 'col2': [5, 7]}) >>> >>> regular_ds = ExcelDataSet(filepath="/tmp/test.xlsx") >>> appendable_ds = AppendableExcelDataSet( >>> filepath="/tmp/test.xlsx", >>> save_args={"sheet_name": "my_sheet"}, >>> load_args={"sheet_name": "my_sheet"} >>> ) >>> >>> regular_ds.save(data_1) >>> appendable_ds.save(data_2) >>> reloaded = appendable_ds.load() >>> assert data_2.equals(reloaded) """ DEFAULT_LOAD_ARGS = {"engine": "openpyxl"} DEFAULT_SAVE_ARGS = {"index": False} def __init__( self, filepath: str, load_args: Dict[str, Any] = None, save_args: Dict[str, Any] = None, ) -> None: """Creates a new instance of ``AppendableExcelDataSet`` pointing to an existing local Excel file to be opened in append mode. Args: filepath: Filepath in POSIX format to an existing local Excel file. load_args: Pandas options for loading Excel files. Here you can find all available arguments: https://pandas.pydata.org/pandas-docs/stable/generated/pandas.read_excel.html All defaults are preserved, but "engine", which is set to "openpyxl". save_args: Pandas options for saving Excel files. Here you can find all available arguments: https://pandas.pydata.org/pandas-docs/stable/generated/pandas.DataFrame.to_excel.html All defaults are preserved, but "index", which is set to False. If you would like to specify options for the `ExcelWriter`, you can include them under "writer" key. Here you can find all available arguments: https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.ExcelWriter.html Note: `mode` option of `ExcelWriter` is set to `a` and it can not be overridden. """ self._filepath = PurePosixPath(filepath) # Handle default load and save arguments self._load_args = deepcopy(self.DEFAULT_LOAD_ARGS) if load_args is not None: self._load_args.update(load_args) save_args = deepcopy(save_args) or {} self._save_args = deepcopy(self.DEFAULT_SAVE_ARGS) self._writer_args = save_args.pop("writer", {}) # type: Dict[str, Any] self._writer_args.setdefault("engine", "openpyxl") if save_args is not None: self._save_args.update(save_args) # Use only append mode self._writer_args["mode"] = "a" def _describe(self) -> Dict[str, Any]: return dict( filepath=self._filepath, load_args=self._load_args, save_args=self._save_args, writer_args=self._writer_args, ) def _load(self) -> pd.DataFrame: return pd.read_excel(str(self._filepath), **self._load_args) def _save(self, data: pd.DataFrame) -> None: # pylint: disable=abstract-class-instantiated try: with pd.ExcelWriter(str(self._filepath), **self._writer_args) as writer: data.to_excel(writer, **self._save_args) except FileNotFoundError as exc: raise DataSetError( f"`{self._filepath}` Excel file not found. The file cannot be opened in " f"append mode." ) from exc def _exists(self) -> bool: return Path(self._filepath.as_posix()).is_file()
39.766423
101
0.612518
3.03125
544ceb2743dbbb4ce474f25f42d566e60e7e9573
3,812
go
Go
Godeps/_workspace/src/github.com/ThomasRooney/gexpect/gexpect_test.go
maquanyi/rkt
d213d00ad591e9b2e1542c3b1615a79bab03633d
[ "Apache-2.0" ]
null
null
null
Godeps/_workspace/src/github.com/ThomasRooney/gexpect/gexpect_test.go
maquanyi/rkt
d213d00ad591e9b2e1542c3b1615a79bab03633d
[ "Apache-2.0" ]
null
null
null
Godeps/_workspace/src/github.com/ThomasRooney/gexpect/gexpect_test.go
maquanyi/rkt
d213d00ad591e9b2e1542c3b1615a79bab03633d
[ "Apache-2.0" ]
1
2022-03-22T09:16:50.000Z
2022-03-22T09:16:50.000Z
package gexpect import ( "strings" "testing" ) func TestHelloWorld(t *testing.T) { t.Logf("Testing Hello World... ") child, err := Spawn("echo \"Hello World\"") if err != nil { t.Fatal(err) } err = child.Expect("Hello World") if err != nil { t.Fatal(err) } } func TestDoubleHelloWorld(t *testing.T) { t.Logf("Testing Double Hello World... ") child, err := Spawn(`sh -c "echo Hello World ; echo Hello ; echo Hi"`) if err != nil { t.Fatal(err) } err = child.Expect("Hello World") if err != nil { t.Fatal(err) } err = child.Expect("Hello") if err != nil { t.Fatal(err) } err = child.Expect("Hi") if err != nil { t.Fatal(err) } } func TestHelloWorldFailureCase(t *testing.T) { t.Logf("Testing Hello World Failure case... ") child, err := Spawn("echo \"Hello World\"") if err != nil { t.Fatal(err) } err = child.Expect("YOU WILL NEVER FIND ME") if err != nil { return } t.Fatal("Expected an error for TestHelloWorldFailureCase") } func TestBiChannel(t *testing.T) { t.Logf("Testing BiChannel screen... ") child, err := Spawn("cat") if err != nil { t.Fatal(err) } sender, reciever := child.AsyncInteractChannels() wait := func(str string) { for { msg, open := <-reciever if !open { return } if strings.Contains(msg, str) { return } } } sender <- "echo\n" wait("echo") sender <- "echo2" wait("echo2") child.Close() // child.Wait() } func TestCommandStart(t *testing.T) { t.Logf("Testing Command... ") // Doing this allows you to modify the cmd struct prior to execution, for example to add environment variables child, err := Command("echo 'Hello World'") if err != nil { t.Fatal(err) } child.Start() child.Expect("Hello World") } var regexMatchTests = []struct { re string good string bad string }{ {`a`, `a`, `b`}, {`.b`, `ab`, `ac`}, {`a+hello`, `aaaahello`, `bhello`}, {`(hello|world)`, `hello`, `unknown`}, {`(hello|world)`, `world`, `unknown`}, } func TestRegexMatch(t *testing.T) { t.Logf("Testing Regular Expression Matching... ") for _, tt := range regexMatchTests { runTest := func(input string) bool { var match bool child, err := Spawn("echo \"" + input + "\"") if err != nil { t.Fatal(err) } match, err = child.ExpectRegex(tt.re) if err != nil { t.Fatal(err) } return match } if !runTest(tt.good) { t.Errorf("Regex Not matching [%#q] with pattern [%#q]", tt.good, tt.re) } if runTest(tt.bad) { t.Errorf("Regex Matching [%#q] with pattern [%#q]", tt.bad, tt.re) } } } var regexFindTests = []struct { re string input string matches []string }{ {`he(l)lo wo(r)ld`, `hello world`, []string{"hello world", "l", "r"}}, {`(a)`, `a`, []string{"a", "a"}}, {`so.. (hello|world)`, `so.. hello`, []string{"so.. hello", "hello"}}, {`(a+)hello`, `aaaahello`, []string{"aaaahello", "aaaa"}}, {`\d+ (\d+) (\d+)`, `123 456 789`, []string{"123 456 789", "456", "789"}}, } func TestRegexFind(t *testing.T) { t.Logf("Testing Regular Expression Search... ") for _, tt := range regexFindTests { runTest := func(input string) []string { child, err := Spawn("echo \"" + input + "\"") if err != nil { t.Fatal(err) } matches, err := child.ExpectRegexFind(tt.re) if err != nil { t.Fatal(err) } return matches } matches := runTest(tt.input) if len(matches) != len(tt.matches) { t.Fatalf("Regex not producing the expected number of patterns.. got[%d] ([%s]) expected[%d] ([%s])", len(matches), strings.Join(matches, ","), len(tt.matches), strings.Join(tt.matches, ",")) } for i, _ := range matches { if matches[i] != tt.matches[i] { t.Errorf("Regex Expected group [%s] and got group [%s] with pattern [%#q] and input [%s]", tt.matches[i], matches[i], tt.re, tt.input) } } } }
22.826347
111
0.595226
3.296875
7828db01df90d87ce8957314f72541d2ebdb2520
1,260
swift
Swift
Sources/SATSCore/Extensions/SwiftUI/ViewData/ActionSheetViewData.swift
healthfitnessnordic/SATSCore-iOS
66ca055876bdc92c5df250d140e916d1575eab13
[ "MIT" ]
3
2021-05-18T07:31:59.000Z
2022-03-20T10:07:32.000Z
Sources/SATSCore/Extensions/SwiftUI/ViewData/ActionSheetViewData.swift
healthfitnessnordic/SATSCore-iOS
66ca055876bdc92c5df250d140e916d1575eab13
[ "MIT" ]
12
2021-08-02T08:53:22.000Z
2022-03-23T10:44:28.000Z
Sources/SATSCore/Extensions/SwiftUI/ViewData/ActionSheetViewData.swift
healthfitnessnordic/SATSCore-iOS
66ca055876bdc92c5df250d140e916d1575eab13
[ "MIT" ]
null
null
null
import SwiftUI public struct ActionSheetViewData: Identifiable, Equatable { public let id: String public let title: String public let message: String? public let actions: [ActionViewData] public init(id: String? = nil, title: String, message: String?, actions: [ActionViewData]) { self.id = id ?? UUID().uuidString self.title = title self.message = message self.actions = actions } public struct ActionViewData: Equatable { public let title: String public let perform: () -> Void public init(title: String, perform: @escaping () -> Void) { self.title = title self.perform = perform } public static func == (lhs: Self, rhs: Self) -> Bool { lhs.title == rhs.title } } } public extension ActionSheet { init(viewData: ActionSheetViewData) { var buttons: [Button] = viewData.actions .map { action in Button.default(Text(action.title), action: action.perform) } buttons.append(.cancel()) self.init( title: Text(viewData.title), message: viewData.message.map { Text($0) }, buttons: buttons ) } }
27.391304
96
0.577778
3.109375
410aa5cee9cac9a50c2dba28317823f60b93253f
794
c
C
LeetCode/0064_minimum-path-sum/0064_minimum-path-sum.c
kenjin/DSAlgo
f4f58d57eebc5d7d1ce78f842e08cec360f403a4
[ "MIT" ]
13
2020-08-10T08:25:07.000Z
2022-03-22T07:47:46.000Z
LeetCode/0064_minimum-path-sum/0064_minimum-path-sum.c
kenjin/DSAlgo
f4f58d57eebc5d7d1ce78f842e08cec360f403a4
[ "MIT" ]
null
null
null
LeetCode/0064_minimum-path-sum/0064_minimum-path-sum.c
kenjin/DSAlgo
f4f58d57eebc5d7d1ce78f842e08cec360f403a4
[ "MIT" ]
5
2021-01-05T01:58:04.000Z
2022-03-22T07:47:49.000Z
#define MIN(a, b) (a < b ? a : b) int minPathSum(int **grid, int grid_sz, int *grid_col_sz) { /* sanity check */ if (grid_sz == 0) return 0; int **dp = malloc(sizeof(int *) * grid_sz); int col_sz = grid_col_sz[0], sum = 0; for (int i = 0; i < grid_sz; i++) { dp[i] = malloc(sizeof(int) * col_sz); sum += grid[i][0]; dp[i][0] = sum; } sum = 0; for (int i = 0; i < col_sz; i++) { sum += grid[0][i]; dp[0][i] = sum; } for (int i = 1; i < grid_sz; i++) { for (int j = 1; j < col_sz; j++) dp[i][j] = MIN(dp[i - 1][j], dp[i][j - 1]) + grid[i][j]; } int ret = dp[grid_sz - 1][col_sz - 1]; for (int i = 0; i < grid_sz; i++) free(dp[i]); free(dp); return ret; }
23.352941
68
0.440806
3
3ce2becf1f32314524e24bf314d8c9206b194655
11,946
lua
Lua
resources/[race]/race_random/random_c.lua
AfuSensi/MTA-Resources
e4a0f3981ddc92c8f15c3d93140196c6a8589fa8
[ "MIT", "0BSD" ]
null
null
null
resources/[race]/race_random/random_c.lua
AfuSensi/MTA-Resources
e4a0f3981ddc92c8f15c3d93140196c6a8589fa8
[ "MIT", "0BSD" ]
null
null
null
resources/[race]/race_random/random_c.lua
AfuSensi/MTA-Resources
e4a0f3981ddc92c8f15c3d93140196c6a8589fa8
[ "MIT", "0BSD" ]
null
null
null
--Sweeper local function playRunSound() local sound = playSound("files/run.wma") end addEvent("playRunSound", true) addEventHandler("playRunSound", root, playRunSound) --Launch in air/Send to Heaven local function playHallelujahSound() local sound = playSound("files/hallelujah.wma") end addEvent("playHallelujahSound", true) addEventHandler("playHallelujahSound", root, playHallelujahSound) --Darkness local function enableDarkness() fadeCamera(false, 3, 0, 0, 0) local function disableDarkness() fadeCamera(true, 3, 0, 0 , 0) end setTimer(disableDarkness, 6000, 1) end addEvent("serverDarkness", true) addEventHandler("serverDarkness", root, enableDarkness) --Teleport back local function playTeleportSound() sound = playSound("files/tpsound.wma") end addEvent("playTeleportSound", true) addEventHandler("playTeleportSound", root, playTeleportSound) --Turn player around local function playerTurnAround() if not (getPedOccupiedVehicle(localPlayer) and isElement(getPedOccupiedVehicle(localPlayer))) then return end local vehicle = getPedOccupiedVehicle(getLocalPlayer()) if not vehicle then return end local RotX, RotY, RotZ = getElementRotation(vehicle) setElementRotation(vehicle, RotX, RotY, RotZ+180) end addEvent("playerTurnAround", true) addEventHandler("playerTurnAround", root, playerTurnAround) --Massive Slap local function playerMassiveSlap() if not (getPedOccupiedVehicle(localPlayer) and isElement(getPedOccupiedVehicle(localPlayer))) then return end local vehicle = getPedOccupiedVehicle(getLocalPlayer()) setElementVelocity(vehicle, 0, 0, 0.2) setElementHealth(vehicle, getElementHealth(vehicle) - 100) end addEvent("playerMassiveSlap", true) addEventHandler("playerMassiveSlap", root, playerMassiveSlap) local function serverM() if getElementData(localPlayer,"state") ~= "alive" then local sound = playSound("files/votesound.wav") end end addEvent( "serverN", true ) addEventHandler( "serverN", root, serverM ) local function serverFloat() local vehicle = getPedOccupiedVehicle(localPlayer) if not vehicle or not isElement(vehicle) then return end setVehicleGravity(vehicle, 0, 0, 0) setTimer(setVehicleGravity, 15000, 1, vehicle, 0, 0, -1) end addEvent( "serverGravityFloat", true ) addEventHandler( "serverGravityFloat", root, serverFloat ) local function serverFloatStairwell() local vehicle = getPedOccupiedVehicle(localPlayer) if not vehicle or not isElement(vehicle) then return end setVehicleGravity(vehicle, 0, 0, 1) setTimer(setVehicleGravity,8000,1,vehicle,0,0,-1 ) end addEvent( "serverGravityFloatStairwell", true ) addEventHandler( "serverGravityFloatStairwell", root, serverFloatStairwell ) local function serverSleepWithFish() local vehicle = getPedOccupiedVehicle(source) if not vehicle or not isElement(vehicle) then return end for _, object in ipairs(getElementsByType('object')) do setElementCollidableWith(vehicle, object, false) end end addEvent( "serverSleepWithFish", true ) addEventHandler( "serverSleepWithFish", root, serverSleepWithFish ) function noBrakes() local time = { [1] = 10000, [2] = 12000, [3] = 14000, [4] = 16000, [5] = 20000 } -- revert time local theTime = time[math.random(1,5)] toggleControl( "handbrake", false ) toggleControl( "brake_reverse", false ) exports.messages:outputGameMessage("You have no brakes for "..tostring(theTime/1000).." seconds!",2,255,0,0) setTimer(function() toggleControl( "handbrake", true ) toggleControl( "brake_reverse", true ) exports.messages:outputGameMessage("Your brakes returned!",2,0,255,0) end, theTime, 1) end addEvent( "serverNoBrakes", true ) addEventHandler( "serverNoBrakes", resourceRoot, noBrakes ) function Nuke(Amount) playRunSound() exports.messages:outputGameMessage("There are "..tostring(Amount).." missiles coming your way!",2.5,255,0,0) setTimer(function() local veh = getPedOccupiedVehicle( localPlayer ) if veh then local px,py,pz = getElementPosition( veh ) setTimer(function() createProjectile( veh, 19, px, py, pz+100, 1,localPlayer,0,0,0,0,0,-45 ) end, 200, 1) end end,1000,Amount) end addEvent( "serverNuke", true ) addEventHandler( "serverNuke", resourceRoot, Nuke ) addEvent("clientRemovePickups", true) function c_removePickups() local pickups = exports["race"]:e_getPickups() for f, u in pairs(pickups) do setElementPosition(f, 0,0,-10000) -- Hides colshape to -10000 Z setElementPosition(u["object"],0,0,-10000) -- Hides pickup to -10000 Z end exports.messages:outputGameMessage("All pickups are removed!",2,255,255,255) end addEventHandler("clientRemovePickups", resourceRoot, c_removePickups) addEvent("onRavebreakStart",true) addEvent("stopRaveBreak",true) function c_ravebreak(t) rb_soundVolumes = {} local rb_sounds = getElementsByType( "sound" ) for _,snd in pairs(rb_sounds) do rb_soundVolumes[snd] = getSoundVolume( snd ) setSoundVolume( snd, 0 ) end local screenWidth, screenHeight = guiGetScreenSize() raveBreakBrowser = createBrowser(screenWidth, screenHeight, true, true) ravebreak = playSound("files/ravebreak"..tostring(math.random(1,4))..".mp3") shakeTimer = setTimer ( function() if getElementData(localPlayer,"state") == "alive" then -- If player's alive, explode under car, otherwise check for camera pos px,py,pz = getElementPosition(getLocalPlayer()) createExplosion(px, py, pz-30, 0, false, 2.5, false) else px,py,pz = getCameraMatrix() createExplosion(px-30, py-30, pz-30, 0, false, 2.5, false) end end, 1000, 0 ) colorTimer = setTimer ( function() fadeCamera(false, 0.9, math.random(0,180), math.random(0,180), math.random(0,180) ) end, 250, 0 ) resetTimer = setTimer ( function() fadeCamera(true, 0.3 ) end, 320, 0 ) end addEventHandler("onRavebreakStart",root,c_ravebreak) function renderRaveBreak() local screenWidth, screenHeight = guiGetScreenSize() dxDrawImage(0, 0, screenWidth , screenHeight, raveBreakBrowser, 0, 0, 0, tocolor(255,255,255,180), false) end addEventHandler("onClientBrowserCreated", root, function() if source ~= raveBreakBrowser then return end loadBrowserURL(raveBreakBrowser, "http://mta/local/ravebreak.html") addEventHandler("onClientRender", root, renderRaveBreak) end ) function stopRaveBreak() stopSound( ravebreak ) killTimer(shakeTimer) killTimer(colorTimer) killTimer(resetTimer) fadeCamera(true, 0.5 ) for sound,volume in pairs(rb_soundVolumes) do if isElement(sound) and getElementType(sound) == "sound" and tonumber(volume) then setSoundVolume( sound, volume ) end end removeEventHandler("onClientRender", root, renderRaveBreak) if isElement(raveBreakBrowser) then destroyElement(raveBreakBrowser) end end addEventHandler("stopRaveBreak",root,stopRaveBreak) addCommandHandler("freeravebreak", function() c_ravebreak() setTimer ( stopRaveBreak, 10000, 1 ) end) -- nuked http://community.mtasa.com/index.php?p=resources&s=details&id=71 N_loops = 0 N_cloudRotationAngle = 0 NFlashDelay = 0 stopNFlash = false function FireN ( x, y, z ) local sound = playSound3D( "files/BOMB_SIREN-BOMB_SIREN-247265934.mp3", x, y, z) setSoundMaxDistance(sound, 100) setTimer(destroyElement, 3000, 1, sound) NBeaconX = x --these are for the render function NBeaconY = y NBeaconZ = z N_Cloud = NBeaconZ setTimer ( function() setTimer ( NExplosion, 170, 35 ) end, 2700, 1 ) -- wait 2700 seconds then 35 loops @ 170ms setTimer ( NShot, 500, 1 ) end addEvent("ClientFireN",true) addEventHandler("ClientFireN", getRootElement(), FireN) function NShot () NukeObjectA = createObject ( 16340, NBeaconX, NBeaconY, NBeaconZ + 200 ) NukeObjectB = createObject ( 3865, NBeaconX + 0.072265, NBeaconY + 0.013731, NBeaconZ + 196.153122 ) NukeObjectC = createObject ( 1243, NBeaconX + 0.060547, NBeaconY - 0.017578, NBeaconZ + 189.075554 ) setElementRotation ( NukeObjectA, math.deg(3.150001), math.deg(0), math.deg(0.245437) ) setElementRotation ( NukeObjectB, math.deg(-1.575), math.deg(0), math.deg(1.938950) ) setElementRotation ( NukeObjectC, math.deg(0), math.deg(0), math.deg(-1.767145) ) shotpath = NBeaconZ - 200 moveObject ( NukeObjectA, 5000, NBeaconX, NBeaconY, shotpath, 0, 0, 259.9 ) moveObject ( NukeObjectB, 5000, NBeaconX + 0.072265, NBeaconY + 0.013731, shotpath - 3.846878, 0, 0, 259.9 ) moveObject ( NukeObjectC, 5000, NBeaconX + 0.060547, NBeaconY - 0.017578, shotpath - 10.924446, 0, 0, 259.9 ) end function NExplosion () N_loops = N_loops + 1 r = math.random(1.5, 4.5) angleup = math.random(0, 35999)/100 explosionXCoord = r*math.cos(angleup) + NBeaconX ExplosionYCoord = r*math.sin(angleup) + NBeaconY if N_loops == 1 then N_Cloud = NBeaconZ createExplosion ( explosionXCoord, ExplosionYCoord, N_Cloud, 7 ) killXPosRadius = NBeaconX + 35 killXNegRadius = NBeaconX - 35 killYPosRadius = NBeaconY + 35 killYNegRadius = NBeaconY - 35 --+/- 35 x/y killZPosRadius = NBeaconZ + 28-- +28 killZNegRadius = NBeaconZ - 28-- -28 local x, y, z = getElementPosition ( localPlayer ) if ( x < killXPosRadius ) and ( x > killXNegRadius ) and ( y < killYPosRadius ) and ( y > killYNegRadius ) and ( z < killZPosRadius ) and ( z > killZNegRadius ) then --triggerServerEvent ( "serverKillNukedPlayer", localPlayer ) end elseif N_loops == 2 then N_Cloud = NBeaconZ + 4 createExplosion ( explosionXCoord, ExplosionYCoord, N_Cloud, 7 ) destroyElement ( NukeObjectA ) --Exploded, get rid of objects destroyElement ( NukeObjectB ) destroyElement ( NukeObjectC ) elseif N_loops > 20 then N_cloudRotationAngle = N_cloudRotationAngle + 22.5 if N_explosionLimiter == false then N_cloudRadius = 7 explosionXCoord = N_cloudRadius*math.cos(N_cloudRotationAngle) + NBeaconX --recalculate ExplosionYCoord = N_cloudRadius*math.sin(N_cloudRotationAngle) + NBeaconY --recalculate createExplosion ( explosionXCoord, ExplosionYCoord, N_Cloud, 7 ) N_explosionLimiter = true elseif N_explosionLimiter == true then N_explosionLimiter = false end N_cloudRadius2 = 16 explosionXCoord2 = N_cloudRadius2*math.cos(N_cloudRotationAngle) + NBeaconX ExplosionYCoord2 = N_cloudRadius2*math.sin(N_cloudRotationAngle) + NBeaconY createExplosion ( explosionXCoord2, ExplosionYCoord2, N_Cloud, 7 ) else N_Cloud = N_Cloud + 4 createExplosion ( explosionXCoord, ExplosionYCoord, N_Cloud, 7 ) end if N_loops == 1 then NExplosionFlash = createMarker ( NBeaconX, NBeaconY, NBeaconZ, "corona", 0, 255, 255, 255, 255 ) N_FlashSize = 1 addEventHandler ( "onClientRender", root, NFlash ) elseif N_loops == 35 then stopNFlash = true end end function NFlash () --Corona "flare". Grows after cp marker B grows a little if ( stopNFlash == false ) then if N_FlashSize > 60 then --beginning flash must grow fast, then delayed if NFlashDelay == 2 then N_FlashSize = N_FlashSize + 1 NFlashDelay = 0 else NFlashDelay = NFlashDelay + 1 end else N_FlashSize = N_FlashSize + 1 end else N_FlashSize = N_FlashSize - 1 end setMarkerSize ( NExplosionFlash, N_FlashSize ) if N_FlashSize == 0 then removeEventHandler ( "onClientRender", root, NFlash ) destroyElement ( NExplosionFlash ) N_loops = 0 --reset stuff N_cloudRotationAngle = 0 --reset stuff stopNFlash = false --reset stuff NFlashDelay = 0 --reset stuff --triggerServerEvent ( "serverNukeFinished", getRootElement() ) end end function serverLowFPS(limit, duration) setTimer(setFPSLimit, 10 * 1000, 1, getFPSLimit() ) setFPSLimit ( 30 ) end addEvent( "serverLowFPS", true ) addEventHandler( "serverLowFPS", resourceRoot, serverLowFPS ) -- Quick spectate victims addEvent("onSpectateVictim",true) function spectateVictim(name) if name then executeCommandHandler("s",name) end end addEventHandler("onSpectateVictim",resourceRoot,spectateVictim)
31.028571
133
0.738825
3.078125
b063af9621c085fe9d87be3e0f9946a5380e6991
1,489
rs
Rust
src/identity/bin/password_authenticator/src/keys.rs
allansrc/fuchsia
a2c235b33fc4305044d496354a08775f30cdcf37
[ "BSD-2-Clause" ]
2
2022-02-24T16:24:29.000Z
2022-02-25T22:33:10.000Z
src/identity/bin/password_authenticator/src/keys.rs
allansrc/fuchsia
a2c235b33fc4305044d496354a08775f30cdcf37
[ "BSD-2-Clause" ]
1
2022-03-01T01:12:04.000Z
2022-03-01T01:17:26.000Z
src/identity/bin/password_authenticator/src/keys.rs
allansrc/fuchsia
a2c235b33fc4305044d496354a08775f30cdcf37
[ "BSD-2-Clause" ]
null
null
null
// Copyright 2021 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. use {async_trait::async_trait, fidl_fuchsia_identity_account as faccount, thiserror::Error}; #[derive(Debug, Error)] pub enum KeyError { // TODO(zarvox): remove once NullKey support is removed // This is only needed for NullKeyDerivation -- once we no longer have a key derivation that // would otherwise ignore the password provided, we can simply handle all authentication // failures by letting the resulting derived-key simply not match what the partition will // require to be unsealed. #[error("Password did not meet precondition")] PasswordError, #[error("Failed to derive key from password")] KeyDerivationError, } /// A 256-bit key. pub type Key = [u8; 32]; /// The `KeyDerivation` trait provides a mechanism for deriving a key from a password. /// The returned key is suitable for use with a zxcrypt volume. #[async_trait] pub trait KeyDerivation { /// Derive a key from the given password. The returned key will be 256 bits long. async fn derive_key(&self, password: &str) -> Result<Key, KeyError>; } impl From<KeyError> for faccount::Error { fn from(e: KeyError) -> Self { match e { KeyError::PasswordError => faccount::Error::FailedAuthentication, KeyError::KeyDerivationError => faccount::Error::Internal, } } }
36.317073
96
0.706514
3.140625
0ba3c4d7d4d48cd32673696a0d4ce0dedcefcaca
21,354
py
Python
pootlestuff/watchables.py
pootle/pootles_utils
bb47103e71ccc4fa01269259b73ca1932184af84
[ "UPL-1.0" ]
null
null
null
pootlestuff/watchables.py
pootle/pootles_utils
bb47103e71ccc4fa01269259b73ca1932184af84
[ "UPL-1.0" ]
null
null
null
pootlestuff/watchables.py
pootle/pootles_utils
bb47103e71ccc4fa01269259b73ca1932184af84
[ "UPL-1.0" ]
null
null
null
""" This module provides classes that support observers, smart value handling and debug functions All changes to values nominate an agent, and observers nominate the agent making changes they are interested in. It supercedes the pvars module """ import logging, sys, threading, pathlib, math, json from enum import Enum, auto as enumauto, Flag class loglvls(Enum): """ A class for logging levels so data is self identfying """ VAST = logging.DEBUG-1 DEBUG = logging.DEBUG INFO = logging.INFO WARN = logging.WARN ERROR = logging.ERROR FATAL = logging.FATAL NONE = 0 class myagents(Flag): NONE = 0 app = enumauto() user = enumauto() class wflags(Flag): NONE = 0 DISABLED = enumauto() class watchable(): """ provides a 'smart' object that provides basic observer functionality around an object. Changes to the value can be policed, and updates have to provide an agent that is performing the update. Observers can then request to be notified when the value is changed by specific agents. """ def __init__(self, value, app, flags=wflags.NONE, loglevel=loglvls.INFO): """ creates a new watchable. Initialises the internal value and sets an empty observers list value: the initial value for the object. Not validated! app : the app instance for this. Used for logging and for validating agents """ self._val=value self.app=app self.observers=None self.oblock=threading.Lock() self.flags=flags self.loglevel=loglevel self.log(loglvls.DEBUG, 'watchable type %s setup with value %s' % (type(self).__name__, self._val)) def setValue(self, value, agent): """ Updates the value of a watchable or the loglevel. if not a loglevel, this validates and converts (if relevant) the requested value. If the value is valid and different from the current value, checks for and calls any observers interested in changes by the given agent. """ if isinstance(value, loglvls): self.loglevel = value return False if isinstance(value, wflags): self.flags=value return False assert isinstance(agent, self.app.agentclass), 'unexpected value %s of type %s in setValue' % (value, type(value).__name__) newvalue=self.validValue(value, agent) if newvalue != self._val: self.notify(newvalue, agent) return True else: self.log(loglvls.DEBUG,'value unchanged (%s)' % self._val) return False def getValue(self): return self._val def validValue(self, value, agent=None): """ validates the given value and returns the canonical value which will be stored. Raise an exception if the value is invalid 'Real' classes must implement this """ raise NotImplementedError() def notify(self, newvalue, agent): if self.observers: clist=None with self.oblock: if agent in self.observers: clist=self.observers[agent].copy() oldvalue=self._val self._val=newvalue if clist: for ob in clist: ob(oldValue=oldvalue, newValue=newvalue, agent=agent, watched=self) self.log(loglvls.DEBUG,'value changed (%s)- observers called' % self._val) else: self._val=newvalue self.log(loglvls.DEBUG,'value changed (%s)- no observers' % self._val) def addNotify(self, callback, agent): assert callable(callback) assert isinstance(agent, self.app.agentclass) self.log(loglvls.DEBUG,'added watcher %s' % callback.__name__) with self.oblock: if self.observers is None: self.observers={agent:[callback]} elif agent in self.observers: self.observers[agent].append(callback) else: self.observers[agent]=[callback] def dropNotify(self, callback, agent): with self.oblock: aglist=self.observers[agent] ix = aglist.index(callback) aglist.pop(ix) def log(self, loglevel, *args, **kwargs): """ request a logging operation. This does nothing if the given loglevel is < the loglevel set in the object """ if loglevel.value >= self.loglevel.value: self.app.log(loglevel, *args, **kwargs) class textWatch(watchable): """ A refinement of watchable for text strings. """ def validValue(self, value, agent): """ value : the requested new value for the field, can be anything that str() takes, but None will fail. agent : who asked for then change (ignored here) returns : the valid new value (this is always a str) raises : Any error that str() can raise """ if value is None: raise ValueError('None is not a valid textVar value') return str(value) class floatWatch(watchable): """ A refinement of watchable that restricts the value to numbers - simple floating point. """ def __init__(self, *, maxv=sys.float_info.max, minv=-sys.float_info.max, clamp=False, allowNaN=True, **kwargs): """ Makes a float given min and max values. The value can be set clamped to prevent failures minv : the lowest allowed value - use 0 to allow only positive numbers maxv : the highest value allowed clamp : if True all values that can float() are accepted for updating, but are restricted to be between minv and maxv """ self.maxv=float(maxv) self.minv=float(minv) self.clamp=clamp==True self.allowNaN=allowNaN super().__init__(**kwargs) def validValue(self, value, agent): """ value : the requested new value for the field, can be anything that float(x) can handle that is between minv and maxv - or if clamp is True, any value agent : who asked for then change (ignored here) returns : the valid new value (this is always a float) raises : ValueError if the provided value is invalid """ av=float(value) if math.isnan(av) and self.allowNaN: return av if self.clamp: return self.minv if av < self.minv else self.maxv if av > self.maxv else av if self.minv <= av <= self.maxv: return av raise ValueError('value {} is outside range {} to {}'.format(value, self.minv, self.maxv)) class intWatch(watchable): """ A refinement of watchable that restricts the field value to integer numbers optionally within a range. """ def __init__(self, maxv=None, minv=None, clamp=False, **kwargs): """ creates an integer var maxv: None if unbounded maximum else anything that int() accepts minv: None if unbounded minimum else anything that int() accepts clamp: if True then value is clamped to maxv and minv (either can be None for unbounded in either 'direction' """ self.maxv=maxv if maxv is None else int(maxv) self.minv=minv if minv is None else int(minv) self.clamp=clamp==True super().__init__(**kwargs) def validValue(self, value, agent): """ value : the requested new value for the field, can be anything that int() can handle that is between minv and maxv - or if clamp is True, any value agent : who asked for then change (ignored here) returns : the valid new value (this is always an int) raises : ValueError if the provided value is invalid """ av=int(value) if self.clamp: if not self.minv is None and av < self.minv: return self.minv if not self.maxv is None and av > self.maxv: return self.maxv return av if (self.minv is None or av >= self.minv) and (self.maxv is None or av <= self.maxv): return av raise ValueError('value {} is outside range {} to {} for watchable'.format(value, self.minv, self.maxv)) def increment(self, agent, count=1): incer=int(count) newval=self.getValue()+incer self.setValue(newval, agent) return newval class enumWatch(watchable): """ a watchable that can only take a specific set of values, and can wrap / clamp values. It also allows values to be cycled through """ def __init__(self, vlist, wrap=True, clamp=False, **kwargs): self.wrap=wrap == True self.clamp=clamp == True self.vlist=vlist super().__init__(**kwargs) def validValue(self, value, agent): if not value in self.vlist: raise ValueError('value (%s) not valid' % value) return value def getIndex(self): return self.vlist.index(self._val) def increment(self, agent, inc=1): newi=self.getIndex()+inc if 0 <= newi < len(self.vlist): return self.setValue(self.vlist[newi], agent) elif self.wrap: if newi < 0: useval = self.vlist[-1] else: useval = self.vlist[0] elif self.clamp: if newi < 0: useval = self.vlist[0] else: useval = self.vlist[-1] else: raise ValueError('operation exceeds list boundary') self.setValue(useval, agent) def setIndex(self, ival, agent): if 0 <= ival < len(self.vlist): return self.setValue(self.vlist[ival], agent) else: raise ValueError('index out of range') class btnWatch(watchable): """ For simple click buttons that always notify """ def setValue(self, value, agent): if isinstance(value, loglvls): self.loglevel = value return False if isinstance(value, wflags): self.flags=value return False assert isinstance(agent, self.app.agentclass) self.notify(self._val, agent) return True class folderWatch(watchable): """ Internally. the value is a pathlib path to a folder (subfolders are created automatically). """ def __init__(self, value, **kwargs): super().__init__(value=self.validValue(value, None), **kwargs) def validValue(self, value, agent): tp=pathlib.Path(value).expanduser() if tp.exists(): if tp.is_dir(): return tp else: raise ValueError('%s is not a folder' % str(tp)) else: tp.mkdir(parents=True, exist_ok=True) return tp def getValue(self): return str(self._val) def getFolder(self): return self._val def currentfilenames(self, includes=None, excludes=None): """ returns names of files currently in this folder """ return [pp.name for pp in self.getValue().iterdir() if pp.is_file() and (True if includes is None else [1 for x in includes if pp.name.endswith(x)]) and (True if excludes is None else [1 for x in excludes if not pp.name.endswith(x)])] class watchablegroup(object): def __init__(self, value, wabledefs, loglevel=None): """ value : dict of preferred values for watchables in this activity (e.g. from saved settings file) wabledefs: a list of 5-tuples that define each watchable with the following entries: 0: name of the watchable 1: class of the watchable 2: default value of the watchable 3: True if the watchable is returned by fetchsettings (as a dict member) 4: kwargs to use when setting up the watchable """ self.perslist=[] self.loglevel=loglvls.INFO if loglevel is None else loglevel for awable in wabledefs: ch=self.makeChild(defn=awable, value=awable[2] if value is None else value.get(awable[0], awable[2])) if ch is None: raise ValueError('child construction failed - see log') setattr(self, awable[0], ch) if awable[3]: self.perslist.append(awable[0]) def makeChild(self, value, defn): """ returns a new object with this object as the app using a definition list value : value for the defn: a list of 5-tuples that define each watchable with the following entries: 0: name of the watchable - not used 1: class of the watchable 2: default value of the watchable - only used if value is None 3: True if then watchable is returned by fetchsettings (as a dict member) - not used 4: kwargs to use when setting up the watchable """ deflen=len(defn) if deflen==4: params={} elif deflen==5: params=defn[4] else: raise ValueError('there are not 4 or 5 entries in this definition for class %s: %s' % (type(self).__name__, defn)) try: vv=defn[2] if value is None else value return defn[1](app=self, value=vv, **params) except: print('Exception in makeChild for class %s' % defn[1], ('using defn value (%s)' % defn[2]) if value is None else str(vv)) print('extra keyword args', params) print('input values:', value) self.log(loglvls.ERROR,'class %s exception making variable %s' % (type(self).__name__, defn[0]), exc_info=True, stack_info=True) return None def fetchsettings(self): return {kv: getattr(self,kv).getValue() for kv in self.perslist} def applysettings(self, settings, agent): for k,v in settings: if k in self.perslist: getattr(self, k).setValue(v, agent) class watchablesmart(watchablegroup): """ This class can act as a complete app, or as a part of an app. For a complete app: sets up logging for the app for a component of an app: passes logging calls up to the app. value: for the top level (app is None), if a string, this is the file name for json file which should yield a dict with the settings to be applied in construction otherwise id should be a dict with the settings lower levels always expect a dict app: If app is None, this node is the app, otherwise it should be the app object (which provides logging and save / restore settings """ def __init__(self, value, app=None, loglevel=loglvls.INFO, **kwargs): if app==None: # this is the real (top level) app if loglevel is None or loglevel is loglvls.NONE: self.logger=None print('%s no logging' % type(self).__name__) else: self.agentclass=myagents self.logger=logging.getLogger(__loader__.name+'.'+type(self).__name__) chandler=logging.StreamHandler() chandler.setFormatter(logging.Formatter(fmt= '%(asctime)s %(levelname)7s (%(process)d)%(threadName)12s %(module)s.%(funcName)s: %(message)s', datefmt= "%M:%S")) self.logger.addHandler(chandler) self.logger.setLevel(loglevel.value) self.log(loglvls.INFO,'logging level is %s' % loglevel) self.startsettings, lmsg, self.settingsfrom = loadsettings(value) self.log(loglvls.INFO, lmsg) else: self.app=app self.agentclass=app.agentclass self.startsettings=value super().__init__(value=self.startsettings, loglevel=loglevel, **kwargs) def log(self, level, msg, *args, **kwargs): if hasattr(self,'app'): if self.loglevel.value <= level.value: self.app.log(level, msg, *args, **kwargs) else: if self.logger: self.logger.log(level.value, msg, *args, **kwargs) elif level.value >= loglvls.WARN: print(msg) def savesettings(self, oldValue, newValue, agent, watched): if hasattr(self, 'app'): raise ValueError('only the app level can save settings') try: setts = self.fetchsettings() except: self.log(loglvls.WARN,'fetchsettings failed', exc_info=True, stack_info=True) setts = None if not setts is None: try: settstr=json.dumps(setts, indent=4) except: self.log(loglvls.WARN,'json conversion of these settings failed', exc_info=True, stack_info=True) self.log(loglvls.WARN,str(setts)) settstr=None if not settstr is None: try: with self.settingsfrom.open('w') as sfo: sfo.write(settstr) except: self.log(loglvls.WARN,'save settings failed to write file', exc_info=True, stack_info=True) return self.log(loglvls.INFO,'settings saved to file %s' % str(self.settingsfrom)) class watchablepigpio(watchablesmart): """ a root class that adds in pigpio setup to watchablesmart """ def __init__(self, app=None, pigp=None, **kwargs): """ if the app has a pio attribute, (an instance of pigpio.pi), that is used otherwise one is set up. """ if not app is None and hasattr(app,'pio'): self.pio=app.pio self.mypio=False elif pigp is None: import pigpio ptest=pigpio.pi() if not ptest.connected: raise ValueError('pigpio failed to initialise') self.pio=ptest self.mypio=True else: self.pio=pigp self.mypio=False if not self.pio.connected: raise ValueError('pigpio is not connected') super().__init__(app=app, **kwargs) def close(self): if self.mypio: self.pio.stop() self.mypio=False self.pio=None class watchableAct(watchablegroup): """ An app can have a number of optional activities (that can have their own threads, watched vars etc. This class provides useful common bits for such activities. It provides: A way to set up the watchable variables for the class, using passed in values (for saved settings for example) with defaults if a value isn't passed. A way to automatically retrieve values for a subset of watchable variables (e.g. to save values as a known config) logging via the parent app using Python's standard logging module """ def __init__(self, app, **kwargs): self.app=app self.agentclass=app.agentclass super().__init__(**kwargs) def log(self, loglevel, *args, **kwargs): """ request a logging operation. This does nothing if the given loglevel is < the loglevel set in the object """ if self.loglevel.value <= loglevel.value: self.app.log(loglevel, *args, **kwargs) class watchableApp(object): def __init__(self, agentclass=myagents, loglevel=None): self.agentclass=agentclass if loglevel is None or loglevel is loglvls.NONE: self.logger=None print('%s no logging' % type(self).__name__) else: self.logger=logging.getLogger(__loader__.name+'.'+type(self).__name__) chandler=logging.StreamHandler() chandler.setFormatter(logging.Formatter(fmt= '%(asctime)s %(levelname)7s (%(process)d)%(threadName)12s %(module)s.%(funcName)s: %(message)s', datefmt= "%M:%S")) self.logger.addHandler(chandler) self.logger.setLevel(loglevel.value) def log(self, level, msg, *args, **kwargs): if self.logger: self.logger.log(level.value, msg, *args, **kwargs) def loadsettings(value): if isinstance(value, str): spath=pathlib.Path(value).expanduser() settingsfrom=spath if spath.is_file(): try: with spath.open('r') as spo: startsettings=json.load(spo) return startsettings, 'app settings loaded from file %s' % spath, spath except: return {}, 'failed to load settings from %s - default values used' % spath, spath else: return {}, 'app settings file %s not found - default values used' % str(spath), spath elif hasattr(value,'keys'): return value, 'using settings from passed object', None elif value is None: return {}, 'settings not specified, default values used', None else: return {}, 'setings not processed from passed %s' % type(values).__name__, None
38.475676
177
0.594924
3.234375
4a67ee91447ce787f0f3b06732979750489cf134
826
js
JavaScript
client/src/js/render/clock.js
yg-0103/pomodoro-clone
1ce5a0edf42927afea2fa41a9b89cca7b0f88d28
[ "MIT" ]
1
2021-01-15T08:52:00.000Z
2021-01-15T08:52:00.000Z
client/src/js/render/clock.js
yg-0103/pomodoro-clone
1ce5a0edf42927afea2fa41a9b89cca7b0f88d28
[ "MIT" ]
58
2021-01-18T01:23:33.000Z
2021-01-26T08:39:34.000Z
client/src/js/render/clock.js
yg-0103/pomodoro-clone
1ce5a0edf42927afea2fa41a9b89cca7b0f88d28
[ "MIT" ]
4
2021-01-16T03:03:54.000Z
2021-01-18T17:28:28.000Z
import Pomodoro from '../time'; import fetch from '../axios/fetch'; export default async function () { // 서버에서 설정된 시간들을 가져온다. try { const { long_interval, auto_start } = await fetch.settings(); // 상태에 따라 어떤시간을 렌더링할지 정한다. // 설정된 시간이 0분이면 1분을 넣어준다. const curTime = (await fetch.curClockTime()) || 1; const pomodoro = new Pomodoro(curTime, long_interval, auto_start); // 설정된 시간을 랜더링한다. pomodoro.setTimeText(); const $nav = document.querySelector('.main__btn-group'); $nav.addEventListener('click', async (e) => { if (e.target === e.currentTarget) return; // 네비게이션 버튼이 클릭되면 현재 설정된 시간으로 초기화 되고 초기화 된 시간을 다시 랜더링한다. pomodoro.minute = await fetch.curClockTime(); pomodoro.second = 0; pomodoro.setTimeText(); }); } catch (e) { console.error(e); } }
28.482759
70
0.631961
3.0625
f01e8e597dc20bba7caf3b9b0fddc57695c216de
5,316
py
Python
train.py
ThiruRJST/Deformed-Yolo
c9eb4e8c090dff0e9fc4f8652897ff2c59dce889
[ "MIT" ]
1
2021-09-10T17:20:09.000Z
2021-09-10T17:20:09.000Z
train.py
ThiruRJST/Deformed-Yolo
c9eb4e8c090dff0e9fc4f8652897ff2c59dce889
[ "MIT" ]
1
2021-09-10T17:19:54.000Z
2021-09-11T08:17:14.000Z
wandb/run-20210904_163431-3lkn6hoe/files/code/train.py
ThiruRJST/Deformed-Yolo
c9eb4e8c090dff0e9fc4f8652897ff2c59dce889
[ "MIT" ]
null
null
null
from pandas.core.algorithms import mode import torch import torch.nn as nn from albumentations import Compose,Resize,Normalize from albumentations.pytorch import ToTensorV2 import wandb import time import torchvision import torch.nn.functional as F import torch.optim as optim from torch.cuda.amp import autocast,GradScaler import os import numpy as np from tqdm import tqdm from callbacks import EarlyStopping import pandas as pd from torch.utils.data import Dataset, DataLoader import cv2 import torch.nn.functional as F import random from build_model import Deformed_Darknet53 torch.manual_seed(2021) np.random.seed(2021) random.seed(2021) torch.backends.cudnn.benchmark = True torch.backends.cudnn.deterministic = True DEVICE = "cuda:0" if torch.cuda.is_available() else "cpu" TOTAL_EPOCHS = 100 scaler = GradScaler() early_stop = EarlyStopping() wandb.init(project='deformed-darknet',entity='tensorthug',name='new-darknet-256x256_32') print("***** Loading the Model in {} *****".format(DEVICE)) Model = Deformed_Darknet53().to(DEVICE) print("Model Shipped to {}".format(DEVICE)) data = pd.read_csv("data.csv") train_loss_fn = nn.BCEWithLogitsLoss() val_loss_fn = nn.BCEWithLogitsLoss() optim = torch.optim.Adam(Model.parameters()) wandb.watch(Model) class dog_cat(Dataset): def __init__(self,df,mode="train",folds=0,transforms=None): super(dog_cat,self).__init__() self.df = df self.mode = mode self.folds = folds self.transforms = transforms if self.mode == "train": self.data = self.df[self.df.folds != self.folds].reset_index(drop=True) else: self.data = self.df[self.df.folds == self.folds].reset_index(drop=True) def __len__(self): return len(self.data) def __getitem__(self,idx): img = cv2.imread(self.data.loc[idx,"Paths"]) label = self.data.loc[idx,'Labels'] if self.transforms is not None: image = self.transforms(image=img)['image'] return image,label def train_loop(epoch,dataloader,model,loss_fn,optim,device=DEVICE): model.train() epoch_loss = 0 epoch_acc = 0 #start_time = time.time() pbar = tqdm(enumerate(dataloader),total=len(dataloader)) for i,(img,label) in pbar: optim.zero_grad() img = img.to(DEVICE).float() label = label.to(DEVICE).float() #LOAD_TIME = time.time() - start_time with autocast(): yhat = model(img) #Loss Calculation train_loss = loss_fn(input = yhat.flatten(), target = label) out = (yhat.flatten().sigmoid() > 0.5).float() correct = (label == out).float().sum() scaler.scale(train_loss).backward() scaler.step(optim) scaler.update() epoch_loss += train_loss.item() epoch_acc += correct.item() / out.shape[0] train_epoch_loss = epoch_loss / len(dataloader) train_epoch_acc = epoch_acc / len(dataloader) wandb.log({"Training_Loss":train_epoch_loss}) wandb.log({"Training_Acc":train_epoch_acc}) #print(f"Epoch:{epoch}/{TOTAL_EPOCHS} Epoch Loss:{epoch_loss / len(dataloader):.4f} Epoch Acc:{epoch_acc / len(dataloader):.4f}") return train_epoch_loss,train_epoch_acc def val_loop(epoch,dataloader,model,loss_fn,device = DEVICE): model.eval() val_epoch_loss = 0 val_epoch_acc = 0 pbar = tqdm(enumerate(dataloader),total=len(dataloader)) with torch.no_grad(): for i,(img,label) in pbar: img = img.to(device).float() label = label.to(device).float() yhat = model(img) val_loss = loss_fn(input=yhat.flatten(),target=label) out = (yhat.flatten().sigmoid()>0.5).float() correct = (label == out).float().sum() val_epoch_loss += val_loss.item() val_epoch_acc += correct.item() / out.shape[0] val_lossd = val_epoch_loss / len(dataloader) val_accd = val_epoch_acc / len(dataloader) wandb.log({"Val_Loss":val_lossd,"Epoch":epoch}) wandb.log({"Val_Acc":val_accd/len(dataloader),"Epoch":epoch}) return val_lossd,val_accd if __name__ == "__main__": train_per_epoch_loss,train_per_epoch_acc = [],[] val_per_epoch_loss,val_per_epoch_acc = [],[] train = dog_cat(data,transforms=Compose([Resize(256,256),Normalize(),ToTensorV2()])) val = dog_cat(data,mode='val',transforms=Compose([Resize(256,256),Normalize(),ToTensorV2()])) train_load = DataLoader(train,batch_size=32,shuffle=True,num_workers=4) val_load = DataLoader(val,batch_size=32,num_workers=4) for e in range(TOTAL_EPOCHS): train_loss,train_acc = train_loop(e,train_load,Model,train_loss_fn,optim) val_loss,val_acc = val_loop(e,val_load,Model,val_loss_fn) train_per_epoch_loss.append(train_loss) train_per_epoch_acc.append(train_acc) val_per_epoch_loss.append(val_loss) val_per_epoch_acc.append(val_acc) print(f"TrainLoss:{train_loss:.4f} TrainAcc:{train_acc:.4f}") print(f"ValLoss:{val_loss:.4f} ValAcc:{val_acc:.4f}") early_stop(Model,val_loss) if early_stop.early_stop: break
29.04918
133
0.659518
3.234375
0ce58d7de1508c5e2496368e37a432c416830c42
2,183
py
Python
lib_dsp/iir/iir/design/iir.py
PyGears/lib-dsp
a4c80882f5188799233dc9108f91faa4bab0ac57
[ "MIT" ]
3
2019-08-26T17:32:33.000Z
2022-03-19T02:05:02.000Z
pygears_dsp/lib/iir.py
bogdanvuk/pygears-dsp
ca107d3f9e8d02023e9ccd27f7bc95f10b5aa995
[ "MIT" ]
null
null
null
pygears_dsp/lib/iir.py
bogdanvuk/pygears-dsp
ca107d3f9e8d02023e9ccd27f7bc95f10b5aa995
[ "MIT" ]
5
2019-09-18T18:00:13.000Z
2022-03-28T11:07:26.000Z
from pygears import gear, Intf from pygears.lib import dreg, decouple, saturate, qround @gear def iir_1dsos(din, *, a, b, gain): # add input gain and init delayed inputs zu0 = din * gain zu1 = zu0 | dreg(init=0) zu2 = zu1 | dreg(init=0) # perform b coefficient sum a1 = (zu1 * b[1]) + (zu2 * b[2]) a2 = a1 + (zu0 * b[0]) # declare output interface and its type y = Intf(a2.dtype) # init delayed outputs zy1 = y | decouple(init=0) zy2 = zy1 | dreg(init=0) # perform a coefficient sum b1 = (zy2 * a[2]) + (zy1 * a[1]) # add both sums and set output y |= (a2 - b1) | qround(fract=a2.dtype.fract) | saturate(t=a2.dtype) return y @gear def iir_2tsos(din, *, a, b, gain): # add input gain x = din * gain # declare output interface and its type y = Intf(din.dtype) # perform first tap multiplication and sum z0 = ((x * b[2]) - (y * a[2])) # delay first sum output z0_delayed = z0 | dreg(init=0) # perform second tap multiplication and sum z1 = ((x * b[1]) + z0_delayed - (y * a[1])) # delay second sum output z1_delayed = z1 | decouple(init=0) # perform final sum and set output y |= ((x * b[0]) + z1_delayed) | qround(fract=din.dtype.fract) | saturate(t=din.dtype) return y @gear def iir_df1dsos(din, *, a, b, gain, ogain): # init temp temp = din # add cascades for all b coefficients for i in range(len(b)): # format every cascaded output as input temp = temp | iir_1dsos(a=a[i], b=b[i], gain=gain[i]) | qround(fract=din.dtype.fract) | saturate(t=din.dtype) # add output gain and format as input dout = (temp * ogain) | qround(fract=din.dtype.fract) | saturate(t=din.dtype) return dout @gear def iir_df2tsos(din, *, a, b, gain, ogain): # init temp temp = din # add cascades for all b coefficients for i in range(len(b)): # format every cascaded output as input temp = temp | iir_2tsos(a=a[i], b=b[i], gain=gain[i]) # add output gain and format as input dout = (temp * ogain) | qround(fract=din.dtype.fract) | saturate(t=din.dtype) return dout
24.255556
117
0.601466
3.28125
72352eac308cfa7475d88b80208a2f269df1b337
4,711
lua
Lua
Light Sword.lua
xVoid-xyz/Roblox-Scripts
7eb176fa654f2ea5fbc6bcccced1b15df7ed82c2
[ "BSD-3-Clause" ]
70
2021-02-09T17:21:32.000Z
2022-03-28T12:41:42.000Z
Light Sword.lua
xVoid-xyz/Roblox-Scripts
7eb176fa654f2ea5fbc6bcccced1b15df7ed82c2
[ "BSD-3-Clause" ]
4
2021-08-19T22:05:58.000Z
2022-03-19T18:58:01.000Z
Light Sword.lua
xVoid-xyz/Roblox-Scripts
7eb176fa654f2ea5fbc6bcccced1b15df7ed82c2
[ "BSD-3-Clause" ]
325
2021-02-26T22:23:41.000Z
2022-03-31T19:36:12.000Z
Player = game:GetService("Players").LocalPlayer Cha = Player.Character RShoulder = Cha.Torso['Right Shoulder'] Tool = Instance.new("HopperBin",Player.Backpack) Tool.Name = "ice sword" function onKeyDown(key) key = key:lower() if key == "e" then wal = not wal if wal == true then wl=Instance.new("Part",workspace) wl.BrickColor=BrickColor.new("Toothpaste") wl.Material="Ice" wl.Size=Vector3.new(10,7,2) wl.Anchored=true wl.CFrame=Cha.Torso.CFrame*CFrame.new(0,0,-5) wl2=wl:Clone() wl2.Parent=Workspace wl2.Size=Vector3.new(2,7,10) wl2.CFrame=Cha.Torso.CFrame*CFrame.new(-5,0,0) wl3=wl2:Clone() wl3.Parent=Workspace wl3.CFrame=Cha.Torso.CFrame*CFrame.new(5,0,0) wl4=wl:Clone() wl4.Parent=Workspace wl4.CFrame=Cha.Torso.CFrame*CFrame.new(0,0,5) else for i=1,10 do wait() wl.Transparency=wl.Transparency +.1 wl2.Transparency=wl2.Transparency +.1 wl3.Transparency=wl3.Transparency +.1 wl4.Transparency=wl4.Transparency +.1 wait() end wl:remove() wl2:remove() wl3:remove() wl4:remove() end end end function onClicked(mouse) if (not vDebounce) then vDebounce = true wa = Instance.new("Part",Char) wa.Transparency=1 wa.CanCollide = false wa.Size = Vector3.new(1, 1, 1) wa:BreakJoints() Weld3 = Instance.new("Weld",wa) Weld3.Part0 = Blade Weld3.Part1 = wa Weld3.C0 = CFrame.new(0, 0, -2) * CFrame.Angles(0, 0, 0) function touch(hit) if hit.Parent:findFirstChild("Humanoid") ~= nil then hit.Parent.Humanoid.Health=hit.Parent.Humanoid.Health-5 end end wa.Touched:connect(touch) animation = Instance.new("Animation") animation.Name = "SlashAnim" animation.AnimationId = "http://www.roblox.com/Asset?ID=94161088" animTrack = Cha.Humanoid:LoadAnimation(animation) animTrack:Play() for i = 1,26 do wait() p = Instance.new("Part",workspace) p.FormFactor="Custom" p.Size=Vector3.new(.5,.5,.5) p.TopSurface = 0 p.BottomSurface = 0 p.BrickColor=BrickColor.new("Toothpaste") p.Transparency=.3 p.CanCollide=false p.Anchored=true p.CFrame =(Blade.CFrame*CFrame.new(0,0,-2))*CFrame.Angles(math.random(-3,3),math.random(-3,3),math.random(-3,3)) game.Debris:AddItem(p,.1) end wa:remove() vDebounce = false end end Tool.Selected:connect(function(mouse) mouse.Button1Down:connect(function() onClicked(mouse) end) mouse.KeyDown:connect(onKeyDown) --==THE ASSIMBLE==-- Char=Instance.new("Model",Cha) -- CHA not CHAR Handle = Instance.new("Part", Char) Handle.FormFactor = "Custom" Handle.Size = Vector3.new(1, -1, 1) Handle.TopSurface = "Smooth" Handle.BottomSurface = "Smooth" Handle.BrickColor = BrickColor.new("Toothpaste") Handle.Reflectance = 0 Handle:BreakJoints() Handle.CanCollide=false Mesh = Instance.new("SpecialMesh", Handle) Mesh.MeshType = "Cylinder" Mesh.Scale = Vector3.new(1, 1, 1) HandleWeld = Instance.new("Weld", Char) HandleWeld.Part0 = Cha["Right Arm"] HandleWeld.Part1 = Handle HandleWeld.C0 = CFrame.new(0, -1, 0) * CFrame.Angles(0, math.pi/2, 0) Power = Instance.new("Part", Char) Power.FormFactor = "Custom" Power.Size = Vector3.new(1, 1, 1) Power.TopSurface = "Smooth" Power.BottomSurface = "Smooth" Power.BrickColor = BrickColor.new("Institutional white") Power.Reflectance = 0 Power:BreakJoints() Power.CanCollide=false Mesh = Instance.new("SpecialMesh", Power) Mesh.MeshType = "Sphere" Mesh.Scale = Vector3.new(1, 1, 1) PowerWeld = Instance.new("Weld", Char) PowerWeld.Part0 = Cha["Right Arm"] PowerWeld.Part1 = Power PowerWeld.C0 = CFrame.new(0, -1, 1) * CFrame.Angles(0, 0, 0) Detail = Instance.new("Part", Char) Detail.FormFactor = "Custom" Detail.Size = Vector3.new(1, -1, 1) Detail.TopSurface = "Smooth" Detail.BottomSurface = "Smooth" Detail.BrickColor = BrickColor.new("Institutional white") Detail.Reflectance = 0 Detail:BreakJoints() Detail.CanCollide=false Mesh = Instance.new("SpecialMesh", Detail) Mesh.MeshType = "Cylinder" Mesh.Scale = Vector3.new(1, 1, 1) DetailWeld = Instance.new("Weld", Char) DetailWeld.Part0 = Cha["Right Arm"] DetailWeld.Part1 = Detail DetailWeld.C0 = CFrame.new(0, -1, math.rad(-30)) * CFrame.Angles(0, 0, math.rad(90)) Blade = Instance.new("Part", Char) Blade.FormFactor = "Custom" Blade.Size = Vector3.new(-1, -2, 4) Blade.TopSurface = "Smooth" Blade.BottomSurface = "Smooth" Blade.BrickColor = BrickColor.new("Institutional white") Blade.Reflectance = 0 Blade:BreakJoints() Blade.CanCollide=false Mesh = Instance.new("BlockMesh", Blade) Mesh.Scale = Vector3.new(1, 1, 1) BladeWeld = Instance.new("Weld", Char) BladeWeld.Part0 = Cha["Right Arm"] BladeWeld.Part1 = Blade BladeWeld.C0 = CFrame.new(0, -1, -2) * CFrame.Angles(0, 0, math.rad(90)) end) Tool.Deselected:connect(function(mouse) Char:remove() end)
15.445902
112
0.714286
3.265625
2a898e9ca996780380ac7e002b690ae74c005d3f
1,462
java
Java
chapter_007/src/main/java/ru/job4j/nonblockingcache/NonBlockingCache.java
danailKondov/dkondov
14b3d2940638b2f69072dbdc0a9d7f8ba1b3748b
[ "Apache-2.0" ]
1
2018-05-24T06:36:30.000Z
2018-05-24T06:36:30.000Z
chapter_007/src/main/java/ru/job4j/nonblockingcache/NonBlockingCache.java
danailKondov/dkondov
14b3d2940638b2f69072dbdc0a9d7f8ba1b3748b
[ "Apache-2.0" ]
null
null
null
chapter_007/src/main/java/ru/job4j/nonblockingcache/NonBlockingCache.java
danailKondov/dkondov
14b3d2940638b2f69072dbdc0a9d7f8ba1b3748b
[ "Apache-2.0" ]
1
2018-11-08T23:33:17.000Z
2018-11-08T23:33:17.000Z
package ru.job4j.nonblockingcache; import java.util.concurrent.ConcurrentHashMap; import java.util.function.BiFunction; /** * Class for simple DIY non-blocking cache. * * @since 11/10/2017 * @version 1 */ public class NonBlockingCache { /** * Task storage. */ private ConcurrentHashMap<Integer, Task> storage = new ConcurrentHashMap<>(); /** * Adds new task to storage. If task with same ID * is already present in storage nothing will be added. * * @param task to add. */ public void add(Task task) { storage.putIfAbsent(task.getiD(), task); } /** * Deletes task with same ID. * @param id of task to remove */ public void delete(int id) { storage.remove(id); } /** * Updates task in storage. * @param task to update * @throws OptimisticException if task was already modified */ public void update(Task task) throws OptimisticException { Task result = storage.computeIfPresent(task.getiD(), new BiFunction<Integer, Task, Task>() { @Override public Task apply(Integer integer, Task oldTask) { Task result = null; if(oldTask.getVersion() + 1 == task.getVersion()) { result = task; } else { throw new OptimisticException(); } return result; } }); } }
25.206897
100
0.570451
3.109375
c3c80a79b861ce9007f26e8463ab326a5ddb90fc
1,945
go
Go
portxo/keygen.go
tnakagawa/lit
57c63ed5cc9584bff083047c8fc0b5be1c4fde2f
[ "MIT" ]
560
2016-11-16T02:10:02.000Z
2022-03-26T16:28:58.000Z
portxo/keygen.go
tnakagawa/lit
57c63ed5cc9584bff083047c8fc0b5be1c4fde2f
[ "MIT" ]
374
2016-11-29T21:42:49.000Z
2021-02-16T13:30:44.000Z
portxo/keygen.go
tnakagawa/lit
57c63ed5cc9584bff083047c8fc0b5be1c4fde2f
[ "MIT" ]
126
2016-12-15T21:26:19.000Z
2022-02-22T21:23:03.000Z
package portxo import ( "bytes" "encoding/binary" "fmt" ) // KeyGen describes how to get to the key from the master / seed. // it can be used with bip44 or other custom schemes (up to 5 levels deep) // Depth must be 0 to 5 inclusive. Child indexes of 0 are OK, so we can't just // terminate at the first 0. type KeyGen struct { Depth uint8 `json:"depth"` // how many levels of the path to use. 0 means privkey as-is Step [5]uint32 `json:"steps"` // bip 32 / 44 path numbers PrivKey [32]byte `json:"privkey"` // private key } // Bytes returns the 53 byte serialized key derivation path. // always works func (k KeyGen) Bytes() []byte { var buf bytes.Buffer binary.Write(&buf, binary.BigEndian, k.Depth) binary.Write(&buf, binary.BigEndian, k.Step[0]) binary.Write(&buf, binary.BigEndian, k.Step[1]) binary.Write(&buf, binary.BigEndian, k.Step[2]) binary.Write(&buf, binary.BigEndian, k.Step[3]) binary.Write(&buf, binary.BigEndian, k.Step[4]) buf.Write(k.PrivKey[:]) return buf.Bytes() } // KeyGenFromBytes turns a 53 byte array into a key derivation path. Always works // (note a depth > 5 path is invalid, but this just deserializes & doesn't check) func KeyGenFromBytes(b [53]byte) (k KeyGen) { buf := bytes.NewBuffer(b[:]) binary.Read(buf, binary.BigEndian, &k.Depth) binary.Read(buf, binary.BigEndian, &k.Step[0]) binary.Read(buf, binary.BigEndian, &k.Step[1]) binary.Read(buf, binary.BigEndian, &k.Step[2]) binary.Read(buf, binary.BigEndian, &k.Step[3]) binary.Read(buf, binary.BigEndian, &k.Step[4]) copy(k.PrivKey[:], buf.Next(32)) return } // String turns a keygen into a string func (k KeyGen) String() string { var s string // s = fmt.Sprintf("\tkey derivation path: m") for i := uint8(0); i < k.Depth; i++ { if k.Step[i]&0x80000000 != 0 { // high bit means hardened s += fmt.Sprintf("/%d'", k.Step[i]&0x7fffffff) } else { s += fmt.Sprintf("/%d", k.Step[i]) } } return s }
32.416667
96
0.677635
3.046875
85bdbc597d8ba42af73b254481cdc2da2315503d
1,949
h
C
source/Fenghui_Zhang_Core/inc/renderable_object.h
9prady9/duotone
53d5d8daa9a90ca7ca39698766c267d5b03849cb
[ "BSD-3-Clause" ]
null
null
null
source/Fenghui_Zhang_Core/inc/renderable_object.h
9prady9/duotone
53d5d8daa9a90ca7ca39698766c267d5b03849cb
[ "BSD-3-Clause" ]
null
null
null
source/Fenghui_Zhang_Core/inc/renderable_object.h
9prady9/duotone
53d5d8daa9a90ca7ca39698766c267d5b03849cb
[ "BSD-3-Clause" ]
null
null
null
#ifndef GEOMETRY_RENDERABLE_OBJECT__ #define GEOMETRY_RENDERABLE_OBJECT__ #include "topology_object.h" #include <map> #include <vector> class Face; class Vertex; /** * For regular rendering, we need * (1) coordinates of each vertex * (2) list of faces, each of them has a list of vertices (corners) * (3) the normal of each face for planar modeling * (4) the normal of each corner for polygonal modeling * (5) the material of the object, or each face * (6) color of the object, or each face * (7) texture * * Many of these do not need to be stored with faces or vertices. We * can use property maps instead. These non-core properties are not * critical, i.e., they can be replace or reconstructed easily. * * The Topological Object keeps tracks of the core component * (1) vertices (2) edges (3) rotations * --- Good! * OR * (1) vertices (2) faces with vertex-lists. * --- not good enough, we could have multiple edges between vertices. * OR * DLFL --- the current presentation is bad, but can be fixed. * * Hence it needs * (1) add/delete vertices * (2) add/delete edges */ // Object class. class RenderableObject : public TopologyObject { public: RenderableObject(); std::set<Face*> GetFaces(); float* GetVertexCoordinates(Vertex* v); float* GetFaceNormal(Face* f); void ReComputeFaces(); bool SetCoords(Vertex* v, float*); protected: std::set<Face*> faces_; // property map to store coordinates. // vertex_ID -> coordinates. // TODO: when the vertex is being removed, we have to delete coords pointers. std::map<Vertex*, float*> coords_; // face_ID -> normal. // TODO: when the face is being removed, we have to delete normal pointers. std::map<Face*, float*> normals_; // vertex_ID -> vertex_ID -> face_ID. std::map<Vertex*, std::map<Vertex*, Face*> > face_map_; // TODO: We need to know if the faces are up to date. }; #endif // GEOMETRY_RENDERABLE_OBJECT__
29.530303
79
0.694202
3.0625
6298cfef83d50e3c288ccabb25265909198db02e
1,400
rs
Rust
rust/robot-name/src/lib.rs
TheTonttu/exercism-solutions
25420fc86d4b4a12e45f14f7472546f10f8864ea
[ "MIT" ]
null
null
null
rust/robot-name/src/lib.rs
TheTonttu/exercism-solutions
25420fc86d4b4a12e45f14f7472546f10f8864ea
[ "MIT" ]
null
null
null
rust/robot-name/src/lib.rs
TheTonttu/exercism-solutions
25420fc86d4b4a12e45f14f7472546f10f8864ea
[ "MIT" ]
null
null
null
use once_cell::sync::Lazy; use rand::Rng; use std::collections::HashSet; use std::sync::Mutex; static NAME_REGISTRY: Lazy<Mutex<HashSet<String>>> = Lazy::new(|| Mutex::new(HashSet::new())); #[derive(Default)] pub struct Robot { name: String, } impl Robot { pub fn new() -> Self { Self { name: gen_unique_name(), } } pub fn name(&self) -> &str { self.name.as_str() } pub fn reset_name(&mut self) { unregister_name(&self.name); self.name = gen_unique_name(); } } // Unregister name when robot goes out of scope. impl Drop for Robot { fn drop(&mut self) { unregister_name(&self.name); } } fn unregister_name(name: &str) { NAME_REGISTRY.lock().unwrap().remove(name); } fn gen_unique_name() -> String { let mut registry = NAME_REGISTRY.lock().unwrap(); loop { let new_name = gen_random_name(); if registry.insert(new_name.clone()) { return new_name; } } } fn gen_random_name() -> String { const LETTER_COUNT: usize = 2; const NUMBER_COUNT: usize = 3; let mut rng = rand::thread_rng(); let letters: String = (0..LETTER_COUNT) .map(|_| rng.gen_range('A'..='Z')) .collect(); let numbers: String = (0..NUMBER_COUNT) .map(|_| rng.gen_range('0'..='9')) .collect(); [letters, numbers].concat() }
21.212121
94
0.580714
3.21875
71c93cb15243802170fe3df03855d238e9ee1949
3,557
kt
Kotlin
http4k-aws/src/test/kotlin/org/http4k/aws/AwsRealChunkKeyContentsIfRequiredTest.kt
savagematt/http4k
9cc8ef11121bfbe10a1cd0ca58a17885c297af52
[ "Apache-2.0" ]
null
null
null
http4k-aws/src/test/kotlin/org/http4k/aws/AwsRealChunkKeyContentsIfRequiredTest.kt
savagematt/http4k
9cc8ef11121bfbe10a1cd0ca58a17885c297af52
[ "Apache-2.0" ]
null
null
null
http4k-aws/src/test/kotlin/org/http4k/aws/AwsRealChunkKeyContentsIfRequiredTest.kt
savagematt/http4k
9cc8ef11121bfbe10a1cd0ca58a17885c297af52
[ "Apache-2.0" ]
null
null
null
package org.http4k.aws import com.natpryce.hamkrest.assertion.assertThat import com.natpryce.hamkrest.containsSubstring import com.natpryce.hamkrest.equalTo import org.http4k.client.ApacheClient import org.http4k.core.BodyMode import org.http4k.core.HttpHandler import org.http4k.core.Method.DELETE import org.http4k.core.Method.GET import org.http4k.core.Method.PUT import org.http4k.core.Request import org.http4k.core.Status.Companion.NO_CONTENT import org.http4k.core.Status.Companion.OK import org.http4k.core.then import org.http4k.filter.ChunkKeyContentsIfRequired import org.http4k.filter.ClientFilters import org.http4k.filter.DebuggingFilters import org.http4k.filter.Payload import org.junit.jupiter.api.Disabled import org.junit.jupiter.api.Test class AwsRealChunkKeyContentsIfRequiredTest : AbstractAwsRealS3TestCase() { @Test fun `default usage`() { val requestBodyMode = BodyMode.Memory bucketLifecycle(ClientFilters.ChunkKeyContentsIfRequired(requestBodyMode = requestBodyMode) .then(awsClientFilter(Payload.Mode.Signed)) .then(DebuggingFilters.PrintResponse()) .then(ApacheClient(requestBodyMode = requestBodyMode))) } @Test @Disabled fun `streaming usage`() { val requestBodyMode = BodyMode.Stream bucketLifecycle(ClientFilters.ChunkKeyContentsIfRequired(requestBodyMode = requestBodyMode) .then(awsClientFilter(Payload.Mode.Unsigned)) .then(DebuggingFilters.PrintResponse()) .then(ApacheClient(requestBodyMode = requestBodyMode))) } private fun bucketLifecycle(client: HttpHandler) { val aClient = aClient() val contentOriginal = (1..10 * 1024 * 1024).map { 'a' }.joinToString("") assertThat( "Bucket should not exist in root listing", aClient(Request(GET, s3Root)).bodyString(), !containsSubstring(bucketName)) assertThat( "Put of bucket should succeed", aClient(Request(PUT, bucketUrl)).status, equalTo(OK)) assertThat( "Bucket should exist in root listing", aClient(Request(GET, s3Root)).bodyString(), containsSubstring(bucketName)) assertThat( "Key should not exist in bucket listing", aClient(Request(GET, bucketUrl)).bodyString(), !containsSubstring(key)) client(Request(PUT, keyUrl) .body(contentOriginal.byteInputStream(), contentOriginal.length.toLong())) assertThat( "Key should appear in bucket listing", aClient(Request(GET, bucketUrl)).bodyString(), containsSubstring(key)) assertThat( "Key contents should be as expected", aClient(Request(GET, keyUrl)).bodyString().length, equalTo(contentOriginal.length)) assertThat( "Delete of key should succeed", aClient(Request(DELETE, keyUrl)).status, equalTo(NO_CONTENT)) assertThat( "Key should no longer appear in bucket listing", aClient(Request(GET, bucketUrl)).bodyString(), !containsSubstring(key)) assertThat( "Delete of bucket should succeed", aClient(Request(DELETE, bucketUrl)).status, equalTo(NO_CONTENT)) assertThat( "Bucket should no longer exist in root listing", aClient(Request(GET, s3Root)).bodyString(), !containsSubstring(bucketName)) } }
37.442105
99
0.66826
3.03125
df971cb7f86de38ee4e25e3e47bf4158c7fece5a
1,723
ts
TypeScript
src/commands/takeoff/destroy-project.ts
Takeoff-Env/takeoff
5cfac571159c3ee7815582c6888096422460a015
[ "MIT" ]
101
2017-09-26T03:23:23.000Z
2022-01-23T05:06:49.000Z
src/commands/takeoff/destroy-project.ts
Takeoff-Env/takeoff
5cfac571159c3ee7815582c6888096422460a015
[ "MIT" ]
18
2017-09-27T17:02:01.000Z
2018-12-24T11:02:37.000Z
src/commands/takeoff/destroy-project.ts
Takeoff-Env/takeoff
5cfac571159c3ee7815582c6888096422460a015
[ "MIT" ]
12
2017-10-11T16:59:19.000Z
2018-05-16T12:46:33.000Z
import { CommandResult, TakeoffCommand } from 'commands'; import { TakeoffCmdParameters } from 'takeoff'; import { ExitCode } from 'task'; /** * Destroys an project in a non-reversable way */ export = ({ shell, args, opts, rcFile, pathExists, printMessage, runCommand, }: TakeoffCmdParameters): TakeoffCommand => ({ args: '<name>', command: 'destroy', description: 'Destroys the docker containers for a project. Can also optionally remove the folder, this operation cannot be reversed.', group: 'takeoff', options: [ { description: 'Also removes the directory, otherwise only docker images and volumes are destroyed', option: '-r, --remove-dir', }, ], handler(): CommandResult { const [project]: string[] = args.length > 0 ? args : ['default']; printMessage(`Destroying project ${project}`); const envDir = `${rcFile.rcRoot}/projects/${project}`; if (!pathExists(envDir)) { return { code: ExitCode.Error, fail: `The project ${project} doesn't exist` }; } const runCmd = runCommand(`docker-compose -f docker/docker-compose.yml down --rmi all`, envDir); if (runCmd.code !== 0) { return { extra: runCmd.stderr, code: runCmd.code, fail: `Error destroying ${project}` }; } if (opts['r'] || opts['remove-dir']) { printMessage(`Removing folder ${envDir}`); const removeFolder = shell.rm('-rf', `${envDir}`); if (removeFolder.code !== 0) { return { extra: removeFolder.stderr, code: removeFolder.code, fail: `Error deleting ${project}` }; } printMessage(`Folder ${envDir} removed`); } return { code: ExitCode.Success, success: `Successfully destroyed ${project}` }; }, });
30.22807
126
0.639582
3.1875
d29a64249662933eed11e9ead4c0a07c4caae362
1,040
php
PHP
app/Role.php
apurv4193/RYEC-Backend
8682463777afad323d30f832693d5802f00c1dcd
[ "MIT" ]
null
null
null
app/Role.php
apurv4193/RYEC-Backend
8682463777afad323d30f832693d5802f00c1dcd
[ "MIT" ]
null
null
null
app/Role.php
apurv4193/RYEC-Backend
8682463777afad323d30f832693d5802f00c1dcd
[ "MIT" ]
null
null
null
<?php namespace App; use Illuminate\Database\Eloquent\Model; use Auth; use DB; use Config; class Role extends Model { protected $table = 'roles'; protected $fillable = ['id', 'slug', 'name', 'created_by', 'updated_by', 'status']; public function insertUpdate($role) { if (isset($data['id']) && $data['id'] != '' && $data['id'] > 0) { $updateData = []; foreach ($this->fillable as $field) { if (array_key_exists($field, $data)) { $updateData[$field] = $data[$field]; } } return Role::where('id', $data['id'])->update($updateData); } else { return Role::create($data); } } public function getAllRoles() { $roles = Role::where('status', '<>', Config::get('constant.DELETED_FLAG')) ->orderBy('id', 'DESC') ->paginate(Config::get('constant.ADMIN_RECORD_PER_PAGE')); return $roles; } }
25.365854
90
0.493269
3.046875
9cd8960e7fa0ed1792d1b9fe84ef85aa1dd1c2fa
1,422
lua
Lua
frameworks/cocos2d-x/cocos/scripting/lua-bindings/auto/api/WorldClock.lua
TshineZheng/DragonbonesCocos2dx
cf5e251092d23161dd4876353fa26dfe6425ff18
[ "MIT" ]
6
2016-12-28T08:38:00.000Z
2019-03-28T04:51:54.000Z
frameworks/cocos2d-x/cocos/scripting/lua-bindings/auto/api/WorldClock.lua
TshineZheng/DragonbonesCocos2dx
cf5e251092d23161dd4876353fa26dfe6425ff18
[ "MIT" ]
2
2017-02-10T03:48:11.000Z
2017-03-03T10:14:35.000Z
frameworks/cocos2d-x/cocos/scripting/lua-bindings/auto/api/WorldClock.lua
TshineZheng/DragonbonesCocos2dx
cf5e251092d23161dd4876353fa26dfe6425ff18
[ "MIT" ]
5
2017-04-20T07:31:39.000Z
2022-01-16T15:38:14.000Z
-------------------------------- -- @module WorldClock -- @extend IAnimateble -- @parent_module db -------------------------------- -- -- @function [parent=#WorldClock] clear -- @param self -- @return WorldClock#WorldClock self (return value: db.WorldClock) -------------------------------- -- -- @function [parent=#WorldClock] contains -- @param self -- @param #db.IAnimateble value -- @return bool#bool ret (return value: bool) -------------------------------- -- -- @function [parent=#WorldClock] advanceTime -- @param self -- @param #float passedTime -- @return WorldClock#WorldClock self (return value: db.WorldClock) -------------------------------- -- -- @function [parent=#WorldClock] remove -- @param self -- @param #db.Armature armature -- @return WorldClock#WorldClock self (return value: db.WorldClock) -------------------------------- -- -- @function [parent=#WorldClock] add -- @param self -- @param #db.Armature armature -- @return WorldClock#WorldClock self (return value: db.WorldClock) -------------------------------- -- -- @function [parent=#WorldClock] getInstance -- @param self -- @return WorldClock#WorldClock ret (return value: db.WorldClock) -------------------------------- -- -- @function [parent=#WorldClock] WorldClock -- @param self -- @return WorldClock#WorldClock self (return value: db.WorldClock) return nil
26.333333
67
0.548523
3.40625
9d84bf32c2b80a5f28d98f3e880852db8d2e82e4
1,611
swift
Swift
SwiftBooster/Classes/Extension/Helpers/JSON.swift
3pehrbehroozi/SwiftBooster
33651a5718fb2d8334ba6551facd1d02c3cf2001
[ "Apache-2.0" ]
null
null
null
SwiftBooster/Classes/Extension/Helpers/JSON.swift
3pehrbehroozi/SwiftBooster
33651a5718fb2d8334ba6551facd1d02c3cf2001
[ "Apache-2.0" ]
null
null
null
SwiftBooster/Classes/Extension/Helpers/JSON.swift
3pehrbehroozi/SwiftBooster
33651a5718fb2d8334ba6551facd1d02c3cf2001
[ "Apache-2.0" ]
null
null
null
// // JSON.swift // SwiftBooster // // Created by Sepehr Behroozi on 4/17/19. // Copyright © 2019 ayantech.ir. All rights reserved. // import Foundation /// Typealias for [String: Any] public typealias JSONObject = [String: Any] /// Typealias for [Any] public typealias JSONArray = [Any] public func getValue<T>(input: Any?, subscripts: Any...) -> T? { var extractingValue = input subscripts.forEach { (key) in if let intKey = key as? Int { extractingValue = (extractingValue as? [Any])?[intKey] } if let stringKey = key as? String { extractingValue = (extractingValue as? [String: Any])?[stringKey] } } switch T.self { case is Int.Type: let result = extractingValue as? Int ?? (extractingValue as? String)?.toInt() return result as? T case is Double.Type: let result = extractingValue as? Double ?? (extractingValue as? String)?.toDouble() return result as? T case is String.Type: let result = extractingValue as? String ?? (extractingValue as? Int)?.toString() ?? (extractingValue as? Double)?.toString() if result == "null" { return nil } else { return result as? T } case is Bool.Type: var result = extractingValue as? Bool ?? (extractingValue as? String)?.toBool() if result == nil { if let resultInt = extractingValue as? Int { result = resultInt == 1 } } return result as? T default: return extractingValue as? T } }
28.767857
132
0.58473
3.015625
d3bbf78c213e4cecf5fe1674415c41ebcc0c8580
1,079
lua
Lua
csv.lua
prototux/haproxy-summary
db04e6d9416388c1c0f4b386288111cf2b23764b
[ "BSD-2-Clause" ]
1
2017-01-28T17:19:03.000Z
2017-01-28T17:19:03.000Z
csv.lua
prototux/haproxy-summary
db04e6d9416388c1c0f4b386288111cf2b23764b
[ "BSD-2-Clause" ]
null
null
null
csv.lua
prototux/haproxy-summary
db04e6d9416388c1c0f4b386288111cf2b23764b
[ "BSD-2-Clause" ]
null
null
null
-- Small CSV helper -- Source: http://lua-users.org/wiki/LuaCsv local csv = {} function csv.parse(line, sep) local res = {} local pos = 1 sep = sep or ',' while true do local c = string.sub(line,pos,pos) if (c == "") then break end if (c == '"') then local txt = "" repeat local startp,endp = string.find(line,'^%b""',pos) txt = txt..string.sub(line,startp+1,endp-1) pos = endp + 1 c = string.sub(line,pos,pos) if (c == '"') then txt = txt..'"' end until (c ~= '"') table.insert(res,txt) assert(c == sep or c == "") pos = pos + 1 else local startp,endp = string.find(line,sep,pos) if (startp) then table.insert(res,string.sub(line,pos,startp-1)) pos = endp + 1 else table.insert(res,string.sub(line,pos)) break end end end return res end return csv
27.666667
65
0.453197
3.171875
df8d1ae2242076bdf69f9fa487f491b863946bc4
6,998
tsx
TypeScript
app/components/zaposleni/components/zaposleniModal/zaposleniModal.tsx
BogMil/racunovodja
1ba95eafb2a04056ea279f7a93fb1b034564060a
[ "MIT" ]
1
2020-09-19T19:21:58.000Z
2020-09-19T19:21:58.000Z
app/components/zaposleni/components/zaposleniModal/zaposleniModal.tsx
BogMil/racunovodja
1ba95eafb2a04056ea279f7a93fb1b034564060a
[ "MIT" ]
3
2021-01-28T21:01:59.000Z
2022-02-08T17:50:37.000Z
app/components/zaposleni/components/zaposleniModal/zaposleniModal.tsx
BogMil/racunovodja
1ba95eafb2a04056ea279f7a93fb1b034564060a
[ "MIT" ]
null
null
null
import React from 'react'; import { Button, Modal, Form, Row, Col } from 'react-bootstrap'; import { useSelector, useDispatch } from 'react-redux'; import { close, updateZaposleniState, setErrors } from './zaposleniModal.actions'; import { AppStore } from '../../../../reducers'; import { reloadEmployees } from '../../zaposleni.actions'; import * as Service from '../../zaposleni.service'; import { handleResponse } from '../../../../utils/responseHandler'; import { CREATE_MODE, EDIT_MODE } from '../../../../constants/modalModes'; import { ErrorText } from '../../../common/errorText'; import { User } from '../../../auth/auth.store.types'; export default function ZaposleniModalComponent() { const dispatch = useDispatch(); const { zaposleni, mode, show, title, opstine, errors } = useSelector( (state: AppStore) => { return state.zaposleniPage.zaposleniModal; } ); const { prava_pristupa } = useSelector((state: AppStore) => { return state.auth.user as User; }); const handleClose = () => { dispatch(close()); }; const handleChange = (e: any) => { let value = e.target.value; let name = e.target.name; if (name == 'active') value = e.target.checked; dispatch(updateZaposleniState(name, value)); }; const handleSave = async () => { if (mode == CREATE_MODE) handleResponse( await Service.createEmployee(zaposleni), () => { dispatch(reloadEmployees()); dispatch(close()); }, () => {}, (response: any) => { dispatch(setErrors(response.data.errors)); } ); else if (mode == EDIT_MODE) handleResponse( await Service.updateEmployee(zaposleni), () => { dispatch(reloadEmployees()); dispatch(close()); }, () => {}, (response: any) => { dispatch(setErrors(response.data.errors)); } ); }; return ( <Modal backdrop="static" centered show={show} onHide={handleClose} className="noselect" > <Modal.Header closeButton style={{}}> <Modal.Title as="h5">{title}</Modal.Title> </Modal.Header> <Modal.Body> <Form> <Row> <Col md={6}> <Form.Group> <Form.Label>JMBG</Form.Label> <Form.Control name="jmbg" placeholder="Unesite JMBG" value={zaposleni.jmbg} onChange={handleChange} /> <ErrorText text={errors?.jmbg} /> </Form.Group> </Col> <Col md={6}> <Form.Group> <Form.Label>Broj zaposlenog</Form.Label> <Form.Control name="sifra" onChange={handleChange} placeholder="Unesite broj zaposlenog" value={zaposleni.sifra} /> <ErrorText text={errors?.sifra} /> </Form.Group> </Col> </Row> <Row> <Col md={7}> <Form.Group> <Form.Label>Prezime</Form.Label> <Form.Control name="prezime" onChange={handleChange} placeholder="Unesite prezime" value={zaposleni.prezime} /> <ErrorText text={errors?.prezime} /> </Form.Group> </Col> <Col md={5}> <Form.Group> <Form.Label>Ime</Form.Label> <Form.Control name="ime" onChange={handleChange} placeholder="Unesite ime" value={zaposleni.ime} /> <ErrorText text={errors?.ime} /> </Form.Group> </Col> </Row> <Row> <Col md={6}> <Form.Group> <Form.Label>Broj računa</Form.Label> <Form.Control name="bankovni_racun" onChange={handleChange} placeholder="Unesite broj računa" value={zaposleni.bankovni_racun} /> <ErrorText text={errors?.bankovni_racun} /> </Form.Group> </Col> <Col md={6}> {prava_pristupa.opiro && ( <Form.Group> <Form.Label>Opština stanovanja</Form.Label> <Form.Control as="select" custom name="id_opstine" onChange={handleChange} value={zaposleni.id_opstine} > <> <option value="">---</option> {opstine.map(opstina => { return ( <option key={opstina.id} value={opstina.id}> {opstina.naziv} </option> ); })} </> </Form.Control> <ErrorText text={errors?.opstina_id} /> </Form.Group> )} </Col> </Row> <Row> <Col md={9}> <Form.Group> <Form.Label>Email</Form.Label> <Form.Control name="email1" onChange={handleChange} placeholder="Unesite Email adresu" value={zaposleni.email1 ?? ''} /> <ErrorText text={errors?.email1} /> </Form.Group> </Col> <Col md={3}> <Form.Group controlId="formBasicCheckbox" style={{ marginTop: 35 }} > <Form.Check custom name="aktivan" type="checkbox" label="Aktivan?" checked={zaposleni.aktivan} onChange={handleChange} /> <ErrorText text={errors?.aktivan} /> </Form.Group> </Col> </Row> <Row> <Col md={9}> <Form.Group> <Form.Label>Email 2 (opciono)</Form.Label> <Form.Control name="email2" onChange={handleChange} placeholder="Unesite Email adresu" value={zaposleni.email2 ?? ''} /> <ErrorText text={errors?.email2} /> </Form.Group> </Col> </Row> </Form> </Modal.Body> <Modal.Footer> <Button variant="primary" onClick={handleSave}> Sačuvaj </Button> </Modal.Footer> </Modal> ); }
30.294372
74
0.438697
3.1875
d8d02339b668d288a2c120877157fe869cbd3ed5
7,623
lua
Lua
soccar/gamestates/lobby.lua
Dummiesman/KissMP-Soccar
07eec8951a117daac4f74ecf596ff8f537f78ac9
[ "MIT" ]
2
2021-04-25T20:01:14.000Z
2021-04-26T02:31:40.000Z
soccar/gamestates/lobby.lua
Dummiesman/KissMP-Soccar
07eec8951a117daac4f74ecf596ff8f537f78ac9
[ "MIT" ]
null
null
null
soccar/gamestates/lobby.lua
Dummiesman/KissMP-Soccar
07eec8951a117daac4f74ecf596ff8f537f78ac9
[ "MIT" ]
null
null
null
local M = {} M.name = "Lobby" local readyTimer = 0 local lobbyTeamMap = {} -- team related stuff local function getFirstIdOnTeam(team) for client_id, team2 in pairs(lobbyTeamMap) do if team2 == team then return client_id end end return nil end local function getTeamMemberCount(team) local c = 0 for _,team2 in pairs(lobbyTeamMap) do if team2 == team then c = c + 1 end end return c end local function allClientsOnTeams() local cc = 0 local ctc = 0 for client_id, connection in pairs(getConnections()) do if lobbyTeamMap[client_id] then ctc = ctc + 1 end cc = cc + 1 end return cc == ctc end local function getClientsTableWithoutTeam() local t = {} for client_id, connection in pairs(getConnections()) do if not lobbyTeamMap[client_id] then table.insert(t, client_id) end end return t end local function checkTeamFull(team) local limit = TEAM_LIMITS[team] if not limit then return true end if limit < 0 then return false end return getTeamMemberCount(team) >= limit end local function setTeam(client, team) local currentTeam = lobbyTeamMap[client:getID()] local newTeamName = TEAM_NAMES[team] lobbyTeamMap[client:getID()] = team if currentTeam and currentTeam ~= team then local currentTeamName = TEAM_NAMES[currentTeam] sendChatMessage(client, "Changed team from " .. currentTeamName .. " to " .. newTeamName .. ".", {r=1,g=1}) elseif currentTeam and currentTeam == team then sendChatMessage(client, "You're already on the " .. newTeamName .. " team.", {r=1,g=1}) else sendChatMessage(client, "Set team to " .. newTeamName .. ".", {r=1,g=1}) end end -- game start function local function startGame() -- first off, move someone off their team if -- the other team is empty local cc = getConnectionCount() if cc > 1 then local rc = getTeamMemberCount(TEAM_RED) local bc = getTeamMemberCount(TEAM_BLUE) if rc == cc or bc == cc then -- We must reassign someone if rc == cc then local id = getFirstIdOnTeam(TEAM_RED) lobbyTeamMap[id] = TEAM_BLUE sendChatMessage(getConnection(id), "*** Your team has been reassigned because everyone was on one team. Your new team is Blue ***", {r=1,g=1}) else local id = getFirstIdOnTeam(TEAM_BLUE) lobbyTeamMap[id] = TEAM_RED sendChatMessage(getConnection(id), "*** Your team has been reassigned because everyone was on one team Your new team is Red ***", {r=1,g=1}) end end end -- clear existing game participants leftover from any previous runs GameData.reset() -- add everyone to participants list for client_id, _ in pairs(getConnections()) do local participant = GameData.createPlayer(client_id) GameData.participants[client_id] = participant GameData.teams[lobbyTeamMap[client_id]].participants[client_id] = participant GameData.participants[client_id].team = lobbyTeamMap[client_id] end -- remove players 2nd+ vehicles local removeVehiclesTable = {} for client_id, _ in pairs(getConnections()) do local vc = 0 for vehicle_id, vehicle in pairs(vehicles) do if vehicle:getData():getOwner() == client_id and vehicle:getData():getID() ~= GameData.ballVehicleId then vc = vc + 1 if vc > 1 then table.insert(removeVehiclesTable, vehicle) end end end end for _, vehicle in pairs(removeVehiclesTable) do vehicle:remove() end -- move to running state StateManager.switchToState(GAMESTATE_RUNNING) end -- state stuff local function onPlayerDisconnected(client_id) lobbyTeamMap[client_id] = nil end local function onEnterState() lobbyTeamMap = {} readyTimer = 0 end local function onChatMessage(client_id, message) local messageLower = message:lower() -- debug if GameData.DEBUG_MODE then if message == "/s" then startGame() return "" end end -- team assignment if messageLower == "/team blue" or messageLower == "/blue" then if not checkTeamFull(TEAM_BLUE) then setTeam(getConnection(client_id), TEAM_BLUE) else sendChatMessage(getConnection(client_id), "This team is full", {r=1}) end return "" end if messageLower == "/team red" or messageLower == "/red" then if not checkTeamFull(TEAM_RED) then setTeam(getConnection(client_id), TEAM_RED) else sendChatMessage(getConnection(client_id), "This team is full", {r=1}) end return "" end if messageLower == "/random" then local r = math.random() local attemptTeam = nil local alternateTeam = nil if r > 0.5 then attemptTeam = TEAM_RED alternateTeam = TEAM_BLUE else attemptTeam = TEAM_BLUE alternateTeam = TEAM_RED end if checkTeamFull(attemptTeam) then attemptTeam = alternateTeam end if checkTeamFull(attemptTeam) then -- can't assign any team? sendChatMessage(getConnection(client_id), "All teams are full", {r=1}) else sendChatMessage(getConnection(client_id), "The randomizer assigns you to the " .. TEAM_NAMES[attemptTeam] .. " team.", {r=1, g=1}) setTeam(getConnection(client_id), attemptTeam) end return "" end -- ball assignment if messageLower == "/setball" or messageLower == "/ball" then -- get clients active vehicle and set it as ballVehicleId local client = getConnection(client_id) local vehicleId = vehicleIdWrapper(client:getCurrentVehicle()) if not vehicleId then sendChatMessage(getConnection(client_id), "Failed to set ball vehicle", {r=1}) return "" end local vehicle = vehicles[vehicleId] if not vehicle then sendChatMessage(getConnection(client_id), "Failed to set ball vehicle", {r=1}) return "" end sendChatMessage(getConnection(client_id), "Ball vehicle set", {g=1}) GameData.ballVehicleId = vehicle:getData():getID() return "" end end local function update(dt) local ready = allClientsOnTeams() local connectionCount = getConnectionCount() if ready and connectionCount >= 2 then -- if the timer is 0, we've just entered ready state. Notify clients. local startTime = GameData.DEBUG_MODE and 5 or 10 if readyTimer == 0 then broadcastChatMessageAndToast("The game will start in " .. tostring(startTime) .. " second(s)", {r=1,g=1}) end readyTimer = readyTimer + dt -- start game after timer ends if readyTimer > startTime then startGame() end else -- if the timer is not 0, we *were* in ready state, and something happened if readyTimer ~= 0 then broadcastChatMessageAndToast("Start timer interrupted. All clients are no longer ready.") end -- notify players that they need a team local lobbyNotifTimer = StateManager.timeInState % 60 local lobbyNotifTimerNext = (StateManager.timeInState + dt) % 60 if lobbyNotifTimerNext < lobbyNotifTimer then broadcastChatMessage("In lobby mode. Waiting for all players to assign a team.") -- get the players who have no team local noTeamMap = getClientsTableWithoutTeam() local noTeamNameMap = {} for _,id in pairs(noTeamMap) do table.insert(noTeamNameMap, getConnection(id):getName()) end broadcastChatMessage("The following players have not assigned a team yet: " .. strTableToStr(noTeamNameMap), {r=1}) end -- readyTimer = 0 end end M.onEnterState = onEnterState M.onChatMessage = onChatMessage M.onPlayerDisconnected = onPlayerDisconnected M.update = update return M
30.987805
150
0.685819
3.375
e7f06cecae55d479e6604b53a295b76a9bdf0276
5,005
py
Python
backend/tests/unit/protocols/application/test_lists.py
pez-globo/pufferfish-software
b42fecd652731dd80fbe366e95983503fced37a4
[ "Apache-2.0" ]
1
2020-10-20T23:47:23.000Z
2020-10-20T23:47:23.000Z
backend/tests/unit/protocols/application/test_lists.py
pez-globo/pufferfish-software
b42fecd652731dd80fbe366e95983503fced37a4
[ "Apache-2.0" ]
242
2020-10-23T06:44:01.000Z
2022-01-28T05:50:45.000Z
backend/tests/unit/protocols/application/test_lists.py
pez-globo/pufferfish-vent-software
f1e5e47acf1941e7c729adb750b85bf26c38b274
[ "Apache-2.0" ]
1
2021-04-12T02:10:18.000Z
2021-04-12T02:10:18.000Z
"""Test the functionality of protocols.application.states classes.""" from ventserver.protocols.application import lists from ventserver.protocols.protobuf import mcu_pb as pb def test_send_new_elements() -> None: """Test adding new elements to a list for sending.""" example_sequence = [ lists.UpdateEvent(new_elements=[pb.LogEvent(id=i)]) for i in range(20) ] synchronizer = lists.SendSynchronizer( segment_type=pb.NextLogEvents, max_len=10, max_segment_len=5 ) assert synchronizer.output() is None for update_event in example_sequence: synchronizer.input(update_event) assert synchronizer.output() is None # The first 10 events should've been discarded for next_expected in range(10): synchronizer.input(lists.UpdateEvent(next_expected=next_expected)) output = synchronizer.output() assert isinstance(output, pb.NextLogEvents) assert output.next_expected == next_expected assert output.total == 10 assert output.remaining == 10 for (i, event) in enumerate(output.elements): assert event.id == 10 + i # Segments should be returned as requested for next_expected in range(10, 20): synchronizer.input(lists.UpdateEvent(next_expected=next_expected)) output = synchronizer.output() assert isinstance(output, pb.NextLogEvents) assert output.next_expected == next_expected assert output.total == 10 assert output.remaining == 10 - (next_expected - 10) for (i, event) in enumerate(output.elements): assert event.id == next_expected + i if next_expected <= 15: assert len(output.elements) == 5 else: assert len(output.elements) == 5 - (next_expected - 15) # New elements should be in the segment resulting from a repeated request assert synchronizer.output() is None synchronizer.input(lists.UpdateEvent( new_elements=[pb.LogEvent(id=20)], next_expected=19 )) output = synchronizer.output() assert isinstance(output, pb.NextLogEvents) assert output.next_expected == 19 assert output.total == 10 assert output.remaining == 2 for (i, event) in enumerate(output.elements): assert event.id == 19 + i assert len(output.elements) == 2 # TODO: add a test where we send all events, then reset expected event to 0. # All events should be sent again. def test_receive_new_elements() -> None: """Test adding new elements to a list from receiving.""" example_sequence = [ pb.NextLogEvents( session_id=0, elements=[pb.LogEvent(id=i) for i in range(0, 5)] ), pb.NextLogEvents( session_id=0, elements=[pb.LogEvent(id=i) for i in range(5, 10)] ), pb.NextLogEvents( session_id=0, elements=[pb.LogEvent(id=i) for i in range(7, 11)] ), pb.NextLogEvents( session_id=0, elements=[pb.LogEvent(id=i) for i in range(0, 4)] ), pb.NextLogEvents(session_id=1), pb.NextLogEvents( session_id=1, elements=[pb.LogEvent(id=i) for i in range(0, 4)] ), ] synchronizer: lists.ReceiveSynchronizer[pb.LogEvent] = \ lists.ReceiveSynchronizer() assert synchronizer.output() is None for segment in example_sequence: synchronizer.input(segment) update_event = synchronizer.output() assert update_event is not None assert update_event.session_id == 0 assert update_event.next_expected == 5 assert len(update_event.new_elements) == 5 for (i, element) in enumerate(update_event.new_elements): assert element.id == i update_event = synchronizer.output() assert update_event is not None assert update_event.session_id == 0 assert update_event.next_expected == 10 assert len(update_event.new_elements) == 5 for (i, element) in enumerate(update_event.new_elements): assert element.id == 5 + i update_event = synchronizer.output() assert update_event is not None assert update_event.session_id == 0 assert update_event.next_expected == 11 assert len(update_event.new_elements) == 1 assert update_event.new_elements[0].id == 10 update_event = synchronizer.output() assert update_event is not None assert update_event.session_id == 0 assert update_event.next_expected == 11 assert len(update_event.new_elements) == 0 update_event = synchronizer.output() assert update_event is not None assert update_event.session_id == 1 assert update_event.next_expected == 0 assert len(update_event.new_elements) == 0 update_event = synchronizer.output() assert update_event is not None assert update_event.session_id == 1 assert update_event.next_expected == 4 assert len(update_event.new_elements) == 4 for (i, element) in enumerate(update_event.new_elements): assert element.id == i
36.532847
77
0.675524
3.296875
e754832dab77d8c61522e8d0299d1eb2b720fed1
22,157
js
JavaScript
source/actions.js
RahavLussato/redux-react-firebase
f66b2ca8b0b39b5a51e0538e7e2ca43ab25d1556
[ "MIT" ]
287
2016-01-13T12:20:08.000Z
2022-02-21T03:15:31.000Z
source/actions.js
enkuush-ca/redux-react-firebase
004bfd20d60e6ac8c5793c7aa66b161ae6fc9f03
[ "MIT" ]
60
2016-01-26T15:01:45.000Z
2018-10-18T22:34:14.000Z
source/actions.js
enkuush-ca/redux-react-firebase
004bfd20d60e6ac8c5793c7aa66b161ae6fc9f03
[ "MIT" ]
48
2016-01-18T17:38:37.000Z
2021-03-23T23:46:45.000Z
import { SET, SET_REQUESTED, SET_PROFILE, LOGIN, LOGOUT, LOGIN_ERROR, PERMISSION_DENIED_ERROR, START, INIT_BY_PATH // NO_VALUE } from './constants' import { Promise } from 'es6-promise' import _ from 'lodash' const getWatchPath = (event, path) => event + ':' + ((getCleanPath(path).substring(0, 1) === '/') ? '' : '/') + getCleanPath(path) const setWatcher = (firebase, event, path, ConnectId='Manual') => { const id = getWatchPath(event, path); firebase._.watchers[id] = firebase._.watchers[id] || {}; if (Object.keys(firebase._.watchers[id]).includes(ConnectId)) { firebase._.watchers[id][ConnectId]++ } else { firebase._.watchers[id][ConnectId] = 1 } return firebase._.watchers[id] } const cleanOnceWatcher = (firebase, dispatch, event, path, ConnectId) => { const id = getWatchPath(event, path); if (firebase._.watchers[id]) { if (firebase._.watchers[id][ConnectId] <= 1) { delete firebase._.watchers[id][ConnectId]; if (Object.keys(firebase._.watchers[id]).length === 0) { delete firebase._.watchers[id]; } } else if (firebase._.watchers[id][ConnectId]) { firebase._.watchers[id][ConnectId]-- } } if(firebase._.shouldClearAfterOnce[id]) { for (let clean of firebase._.shouldClearAfterOnce[id]) { firebase.database().ref().child(clean.path).off(clean.event); if(!clean.isSkipClean){ dispatch({ type: INIT_BY_PATH, path: clean.path }) } } delete firebase._.shouldClearAfterOnce[id]; } return firebase._.watchers[id] } const getWatcherCount = (firebase, event, path) => { const id = getWatchPath(event, path); const watchers = firebase._.watchers[id]; return watchers && Object.keys(watchers).length } const getCleanPath = (path) => { let pathSplitted = path.split('#'); return pathSplitted[0]; } const unsetWatcher = (firebase, dispatch, event, path, ConnectId='Manual', isSkipClean=false, isNewQuery=false) => { const id = getWatchPath(event, path); const onceEvent = getWatchPath('once', path); path = path.split('#')[0] if ((firebase._.watchers[id] && firebase._.watchers[id][ConnectId] <= 1) || isNewQuery || ConnectId === 'CleanAll') { var aggregationId = getWatchPath('child_aggregation', path); if (firebase._.timeouts && firebase._.timeouts[aggregationId]) { clearTimeout(firebase._.timeouts[aggregationId]); firebase._.timeouts[aggregationId] = undefined; } ConnectId !== 'CleanAll' && delete firebase._.watchers[id][ConnectId]; const countWatchers = ConnectId !== 'CleanAll' ? Object.keys(firebase._.watchers[id]).length : 0; if (countWatchers === 0 || isNewQuery) { countWatchers === 0 && delete firebase._.watchers[id]; if (event!='once'){ if (!firebase._.watchers[onceEvent]) { event !== 'all' && firebase.database().ref().child(path).off(event); if(!isSkipClean){ dispatch({ type: INIT_BY_PATH, path }) } } else { firebase._.shouldClearAfterOnce[onceEvent] = firebase._.shouldClearAfterOnce[onceEvent] || []; firebase._.shouldClearAfterOnce[onceEvent].push({path, event, isSkipClean}); } } } } else if (firebase._.watchers[id] && firebase._.watchers[id][ConnectId]) { firebase._.watchers[id][ConnectId]-- } } export const isWatchPath = (firebase, dispatch, event, path) => { const id = getWatchPath(event, path); let isWatch = false; if (firebase._.watchers[id] > 0) { isWatch = true; } return isWatch; } function isNumeric(n) { return !isNaN(n - parseFloat(n)); } export const watchEvent = (firebase, dispatch, event, path, ConnectId='Manual', isListenOnlyOnDelta=false, isAggregation=false, setFunc=undefined, setOptions=undefined) => { if (path) { const isNewQuery = path.includes('#') const isNewSet = setOptions !== undefined let queryParams = [] if (isNewQuery) { let pathSplitted = path.split('#') path = pathSplitted[0] queryParams = pathSplitted[1].split('&') } const watchPath = path const counter = getWatcherCount(firebase, event, watchPath) if (counter > 0) { if (isNewQuery || isNewSet) { unsetWatcher(firebase, dispatch, event, path, ConnectId, false, isNewQuery || isNewSet) } else { setWatcher(firebase, event, watchPath, ConnectId) return } } setWatcher(firebase, event, watchPath, ConnectId) let query = firebase.database().ref().child(path) if (isNewQuery) { let doNotParse = false queryParams.forEach((param) => { param = param.split('=') switch (param[0]) { case 'doNotParse': doNotParse = true break case 'orderByValue': query = query.orderByValue() doNotParse = true break case 'orderByPriority': query = query.orderByPriority() doNotParse = true break case 'orderByKey': query = query.orderByKey() doNotParse = true break case 'orderByChild': query = query.orderByChild(param[1]) break case 'limitToFirst': query = query.limitToFirst(parseInt(param[1])) break case 'limitToLast': query = query.limitToLast(parseInt(param[1])) break case 'equalTo': let equalToParam = (!doNotParse && isNumeric(param[1])) ? parseFloat(param[1]) || (param[1] === '0' ? 0 : param[1]) : param[1] equalToParam = equalToParam === 'null' ? null : equalToParam query = param.length === 3 ? query.equalTo(equalToParam, param[2]) : query.equalTo(equalToParam) break case 'startAt': let startAtParam = (!doNotParse && isNumeric(param[1])) ? parseFloat(param[1]) || (param[1] === '0' ? 0 : param[1]) : param[1] startAtParam = startAtParam === 'null' ? null : startAtParam query = param.length === 3 ? query.startAt(startAtParam, param[2]) : query.startAt(startAtParam) break case 'endAt': let endAtParam = (!doNotParse && isNumeric(param[1])) ? parseFloat(param[1]) || (param[1] === '0' ? 0 : param[1]) : param[1] endAtParam = endAtParam === 'null' ? null : endAtParam query = param.length === 3 ? query.endAt(endAtParam, param[2]) : query.endAt(endAtParam) break default: break } }) } const runQuery = (q, e, p) => { dispatch({ type: START, timestamp: Date.now(), requesting: true, requested: false, path }) let aggregationId = getWatchPath('child_aggregation', path); if (e === 'once') { q.once('value') .then(snapshot => { cleanOnceWatcher(firebase, dispatch, event, watchPath, ConnectId) if (snapshot.val() !== null) { if (setFunc) { setFunc(snapshot, 'value', dispatch, setOptions); dispatch({ type: SET_REQUESTED, path: p, key: snapshot.key, timestamp: Date.now(), requesting: false, requested: true }); } else { dispatch({ type: SET, path: p, data: snapshot.val(), snapshot: Object.assign(snapshot, {_event: 'value'}), key: snapshot.key, timestamp: Date.now(), requesting: false, requested: true, isChild: false, isMixSnapshot: false, isMergeDeep: false }) } } }, dispatchPermissionDeniedError) } else if (e === 'child_added' && isListenOnlyOnDelta) { let newItems = false; q.on(e, snapshot => { if (!newItems) return; let tempSnapshot = Object.assign(snapshot, {_event: e}); if (isAggregation) { if (!firebase._.timeouts[aggregationId]) { firebase._.aggregatedData[aggregationId] = {} firebase._.aggregatedSnapshot[aggregationId] = {} firebase._.timeouts[aggregationId] = setTimeout(() => { dispatchBulk(p, aggregationId) }, 1000); } firebase._.aggregatedData[aggregationId][snapshot.key] = snapshot.val() firebase._.aggregatedSnapshot[aggregationId][snapshot.key] = tempSnapshot; } else { if (setFunc) { setFunc(snapshot, 'child_added', dispatch, setOptions); dispatch({ type: SET_REQUESTED, path: p, key: snapshot.key, timestamp: Date.now(), requesting: false, requested: true }); } else { dispatch({ type: SET, path: p, data: snapshot.val(), snapshot: tempSnapshot, key: snapshot.key, timestamp: Date.now(), requesting: false, requested: true, isChild: true, isMixSnapshot: true, isMergeDeep: false }) } } }, dispatchPermissionDeniedError) q.once('value') .then(snapshot => { newItems = true; if (snapshot.val() !== null) { if (setFunc) { setFunc(snapshot, 'value', dispatch, setOptions); dispatch({ type: SET_REQUESTED, path: p, key: snapshot.key, timestamp: Date.now(), requesting: false, requested: true }); } else { dispatch({ type: SET, path: p, data: snapshot.val(), snapshot: Object.assign(snapshot, {_event: 'value'}), key: snapshot.key, timestamp: Date.now(), requesting: false, requested: true, isChild: false, isMixSnapshot: true, isMergeDeep: false }) } } }, dispatchPermissionDeniedError) } else { q.on(e, snapshot => { let data = (e === 'child_removed') ? '_child_removed' : snapshot.val(); let tempSnapshot = Object.assign(snapshot, {_event: e}); if (e !== 'value' && isAggregation) { if (!firebase._.timeouts[aggregationId]) { firebase._.aggregatedData[aggregationId] = {} firebase._.aggregatedSnapshot[aggregationId] = {} firebase._.timeouts[aggregationId] = setTimeout(() => { dispatchBulk(p, aggregationId) }, 1000); } firebase._.aggregatedData[aggregationId][snapshot.key] = data firebase._.aggregatedSnapshot[aggregationId][snapshot.key] = tempSnapshot } else { if (setFunc) { setFunc(tempSnapshot, e, dispatch, setOptions); } else { dispatch({ type: SET, path: p, data, snapshot: tempSnapshot, key: snapshot.key, timestamp: Date.now(), requesting: false, requested: true, isChild: e !== 'value', isMixSnapshot: isListenOnlyOnDelta, isMergeDeep: false }) } } }, (permError) => dispatchPermissionDeniedError(permError, p)) } } const dispatchBulk = (p, aggregationId) => { if (setFunc) { setFunc(firebase._.aggregatedSnapshot[aggregationId], 'aggregated', dispatch, setOptions); dispatch({ type: SET_REQUESTED, path: p, key: '_NONE', timestamp: Date.now(), requesting: false, requested: true }); } else { dispatch({ type: SET, path: p, data: firebase._.aggregatedData[aggregationId], snapshot: firebase._.aggregatedSnapshot[aggregationId], key: '_NONE', timestamp: Date.now(), requesting: false, requested: true, isChild: false, isMixSnapshot: true, isMergeDeep: true }) } firebase._.timeouts[aggregationId] = undefined } const dispatchPermissionDeniedError = (permError, p) => { if (permError && permError.code === 'PERMISSION_DENIED' && permError.message && !permError.message.includes('undefined')) { dispatch({ type: PERMISSION_DENIED_ERROR, data: undefined, snapshot: {val: () => undefined}, path: p, timestamp: Date.now(), requesting: false, requested: true, permError }) } throw permError } runQuery(query, event, path) } } export const unWatchEvent = (firebase, dispatch, event, path, ConnectId, isSkipClean=false) => { unsetWatcher(firebase, dispatch, event, path, ConnectId, isSkipClean) } export const watchEvents = (firebase, dispatch, events, ConnectId='Manual') => events.forEach(event => watchEvent(firebase, dispatch, event.name, event.path, ConnectId, event.isListenOnlyOnDelta, event.isAggregation, event.setFunc, event.setOptions)) export const unWatchEvents = (firebase, dispatch, events, ConnectId='Manual', isUnmount=false) => events.forEach(event => unWatchEvent(firebase, dispatch, event.name, event.path, ConnectId, isUnmount ? !!event.isSkipCleanOnUnmount : event.isSkipClean)) const dispatchLoginError = (dispatch, authError) => dispatch({ type: LOGIN_ERROR, authError }) const dispatchLogin = (dispatch, auth) => dispatch({ type: LOGIN, auth, authError: null }) const unWatchUserProfile = (firebase) => { const authUid = firebase._.authUid const userProfile = firebase._.config.userProfile if (firebase._.profileWatch) { firebase.database().ref().child(`${userProfile}/${authUid}`).off('value', firebase._.profileWatch) firebase._.profileWatch = null } } const watchUserProfile = (dispatch, firebase) => { const authUid = firebase._.authUid const userProfile = firebase._.config.userProfile unWatchUserProfile(firebase) if (firebase._.config.userProfile) { firebase._.profileWatch = firebase.database().ref().child(`${userProfile}/${authUid}`).on('value', snap => { dispatch({ type: SET_PROFILE, profile: snap.val() }) }) } } const createLoginPromise = (firebase, credentials) => { const auth = firebase.auth() if (_.isString(credentials)) { return auth.signInWithCustomToken(credentials) } else if (_.has(credentials, "email") && _.has(credentials, "password")) { return auth.signInWithEmailAndPassword(email, password) } else { return Promise.reject(new Error(`Malformed credentials or unsupported way of logging in: ${credentials}`)) } } export const login = (dispatch, firebase, credentials) => { return new Promise((resolve, reject) => { dispatchLoginError(dispatch, null) createLoginPromise(firebase, credentials) .then(resolve) .catch(err => { dispatchLoginError(dispatch, err) reject(err) }); }) } export const init = (dispatch, firebase) => { firebase.auth().onAuthStateChanged(authData => { if (!authData) { return dispatch({type: LOGOUT}) } firebase._.authUid = authData.uid watchUserProfile(dispatch, firebase) if (!!firebase._.firebasePendingEvents) { for (let key of Object.keys(firebase._.firebasePendingEvents)) { watchEvents(firebase, dispatch, firebase._.firebasePendingEvents[key], key); } firebase._.firebasePendingEvents = undefined } dispatchLogin(dispatch, authData) }); // Run onAuthStateChanged if it exists in config if (firebase._.config.onAuthStateChanged) { firebase._.config.onAuthStateChanged(authData, firebase) } } export const logout = (dispatch, firebase, preserve = [], remove = []) => { firebase.auth().signOut() dispatch({type: LOGOUT, preserve, remove}) firebase._.authUid = null unWatchUserProfile(firebase) } export const createUser = (dispatch, firebase, credentials, profile) => new Promise((resolve, reject) => { dispatchLoginError(dispatch, null) firebase.auth().createUserWithEmailAndPassword(credentials.email, credentials.password) .then((userData) => { if (profile && firebase._.config.userProfile) { firebase.database().ref().child(`${firebase._.config.userProfile}/${userData.uid}`).set(profile) } login(dispatch, firebase, credentials) .then(() => resolve(userData.uid)) .catch(err => reject(err)) }) .catch(err => { dispatchLoginError(dispatch, err) return reject(err) }) }) export const resetPassword = (dispatch, firebase, email) => { dispatchLoginError(dispatch, null) return firebase.auth().sendPasswordResetEmail(email).catch((err) => { if (err) { switch (err.code) { case 'INVALID_USER': dispatchLoginError(dispatch, new Error('The specified user account does not exist.')) break default: dispatchLoginError(dispatch, err) } return } }) } export default { watchEvents, unWatchEvents, init, logout, createUser, resetPassword, isWatchPath }
38.33391
175
0.463104
3.046875
f16a4c0abcbe9f3811a1d34fd2b925a33028ae43
2,477
rb
Ruby
app/controllers/metrics_controller.rb
tkowark/repmine
6d358e1178892fb715ece18e5bc5722c6eb882c9
[ "MIT" ]
3
2017-10-24T18:49:46.000Z
2020-12-22T17:35:32.000Z
app/controllers/metrics_controller.rb
tkowark/repmine
6d358e1178892fb715ece18e5bc5722c6eb882c9
[ "MIT" ]
7
2016-05-02T14:26:41.000Z
2016-05-03T13:52:31.000Z
app/controllers/metrics_controller.rb
tkowark/repmine
6d358e1178892fb715ece18e5bc5722c6eb882c9
[ "MIT" ]
1
2020-05-09T13:48:43.000Z
2020-05-09T13:48:43.000Z
class MetricsController < ApplicationController autocomplete :tag, :name, :class_name => 'ActsAsTaggableOn::Tag' def create @metric = Metric.new @metric.save(validate: false) redirect_to metric_path(@metric) end def show @metric = Metric.find(params[:id]) @measurable_groups = Metric.grouped([@metric.id]).merge(Pattern.grouped){|key, val1, val2| val1 + val2} @existing_connections = [] @metric.metric_nodes.each do |node| node.children.each do |child| @existing_connections << {:source => node.id, :target => child.id} end end @title = @metric.name.blank? ? "New metric" : "Metric '#{@metric.name}'" end def update metric = Metric.find(params[:id]) if metric.update_attributes(params[:metric]) flash[:notice] = "Successfully saved metric!" render json: {} else flash[:error] = "Could not save metric! <br/> #{metric.errors.full_messages.join("<br />")}" render json: {}, :status => :unprocessable_entity end end def create_connection source = MetricNode.find(params[:source_id]) target = MetricNode.find(params[:target_id]) target.parent = source target.save(validate: false) render :nothing => true, :status => 200, :content_type => 'text/html' end def destroy_connection begin source = MetricNode.find(params[:source_id]) target = MetricNode.find(params[:target_id]) target.parent = nil target.save rescue Exception => e end render :nothing => true, :status => 200, :content_type => 'text/html' end def download_csv repository = Repository.find(params[:repository_id]) metric = Metric.find(params[:metrics].first) metric.calculate(repository) send_data( File.open(metric.metrics_path("csv", repository)).read, :type => 'text/csv; charset=utf-8; header=present', :filename => metric.fancy_metric_file_name(repository) ) end def create_node metric = Metric.find(params[:metric_id]) measurable = Measurable.find(params[:pattern_id]) node = metric.create_node(measurable) render :partial => "metric_nodes/show", :layout => false, :locals => {:node => node} end def create_operator metric = Metric.find(params[:metric_id]) node = MetricOperatorNode.create(:operator_cd => params[:operator]) metric.metric_nodes << node render :partial => "metric_nodes/show", :layout => false, :locals => {:node => node} end end
32.592105
107
0.664514
3.296875
12e2ad4fc6525e0a31ce4b9546d519cb7efc7a48
2,551
kt
Kotlin
koma-core-api/common/src/koma/internal/default/generated/matrix/DefaultIntMatrixFactory.kt
drmoose/koma
765dfb206cada4b682a94e140a40ba6c6e95667b
[ "Apache-2.0" ]
233
2017-05-03T16:54:08.000Z
2021-12-04T03:20:04.000Z
koma-core-api/common/src/koma/internal/default/generated/matrix/DefaultIntMatrixFactory.kt
drmoose/koma
765dfb206cada4b682a94e140a40ba6c6e95667b
[ "Apache-2.0" ]
70
2017-05-07T20:07:37.000Z
2021-08-11T20:33:13.000Z
koma-core-api/common/src/koma/internal/default/generated/matrix/DefaultIntMatrixFactory.kt
drmoose/koma
765dfb206cada4b682a94e140a40ba6c6e95667b
[ "Apache-2.0" ]
31
2017-05-18T09:04:56.000Z
2021-05-07T22:40:26.000Z
/** * THIS FILE IS AUTOGENERATED, DO NOT MODIFY. EDIT THE FILES IN templates/ * AND RUN ./gradlew :codegen INSTEAD! */ package koma.internal.default.generated.matrix import koma.* import koma.matrix.* import koma.extensions.* import koma.internal.notImplemented import koma.internal.getRng import koma.internal.syncNotNative class DefaultIntMatrixFactory: MatrixFactory<Matrix<Int>> { override fun zeros(rows: Int, cols: Int) = DefaultIntMatrix(rows, cols) override fun create(data: IntRange): Matrix<Int> { val input = data.map { it.toInt() } val out = DefaultIntMatrix(1, input.size) input.forEachIndexed { idx, ele -> out[idx] = ele } return out } override fun create(data: DoubleArray): Matrix<Int> { val out = DefaultIntMatrix(1, data.size) data.forEachIndexed { idx, ele -> out[idx] = ele.toInt() } return out } override fun create(data: Array<DoubleArray>): Matrix<Int> { val out = DefaultIntMatrix(data.size, data[0].size) data.forEachIndexed { rowIdx, row -> row.forEachIndexed { colIdx, ele -> out[rowIdx, colIdx] = ele.toInt() } } return out } override fun ones(rows: Int, cols: Int): Matrix<Int> = zeros(rows, cols).fill {_,_-> 1.toInt()} override fun eye(size: Int): Matrix<Int> = eye(size, size) override fun eye(rows: Int, cols: Int): Matrix<Int> = zeros(rows, cols) .fill {row,col->if (row==col) 1.toInt() else 0.toInt() } override fun rand(rows: Int, cols: Int): Matrix<Int> { val array = zeros(rows, cols) val rng = getRng() syncNotNative(rng) { array.fill { _, _ -> rng.nextDoubleUnsafe().toInt() } } return array; } override fun randn(rows: Int, cols: Int): Matrix<Int> { val array = zeros(rows, cols) val rng = getRng() syncNotNative(rng) { array.fill { _, _ -> rng.nextGaussianUnsafe().toInt() } } return array; } override fun arange(start: Double, stop: Double, increment: Double): Matrix<Int> { error(notImplemented) } override fun arange(start: Double, stop: Double): Matrix<Int> { error(notImplemented) } override fun arange(start: Int, stop: Int, increment: Int): Matrix<Int> { error(notImplemented) } override fun arange(start: Int, stop: Int): Matrix<Int> { error(notImplemented) } }
29.321839
86
0.596629
3.15625
e8fc72a77fdd416f9afae8ddad6132491cc5fabf
4,405
py
Python
c4/system/history.py
Brewgarten/c4-system-manager
6fdec33ced4b1cb32d82a24cd168447a899b7e10
[ "MIT" ]
null
null
null
c4/system/history.py
Brewgarten/c4-system-manager
6fdec33ced4b1cb32d82a24cd168447a899b7e10
[ "MIT" ]
1
2017-10-17T21:51:40.000Z
2017-10-17T21:51:40.000Z
c4/system/history.py
Brewgarten/c4-system-manager
6fdec33ced4b1cb32d82a24cd168447a899b7e10
[ "MIT" ]
null
null
null
""" Copyright (c) IBM 2015-2017. All Rights Reserved. Project name: c4-system-manager This project is licensed under the MIT License, see LICENSE """ from abc import ABCMeta, abstractmethod class DeviceHistory(object): """ Device manager history """ __metaclass__ = ABCMeta @abstractmethod def add(self, node, name, status, ttl=None): """ Add status for device manager with specified name on specified node :param node: node name :type node: str :param name: device manager name :type name: str :param status: status :type status: :class:`DeviceManagerStatus` :param ttl: time to live (in seconds), infinite by default :type ttl: int """ @abstractmethod def get(self, node, name, limit=None): """ Get status history for device manager with specified name on specified node :param node: node name :type node: str :param name: device manager name :type name: str :param limit: number of statuses to return :type limit: int :returns: list of history entries :rtype: [:class:`Entry`] """ @abstractmethod def getAll(self): """ Get status history for all device managers on all nodes :returns: list of history entries :rtype: [:class:`Entry`] """ @abstractmethod def getLatest(self, node, name): """ Get latest status for device manager with specified name on specified node :param node: node name :type node: str :param name: device manager name :type name: str :returns: history entry :rtype: :class:`Entry` """ @abstractmethod def remove(self, node=None, name=None): """ Remove status history for device managers with specified names on specified nodes. node and name: remove history for specific device on a specific node node and no name remove history for all devices on a specific node no node and name remove history for specific device on all nodes no node and no name remove history for all devices on all nodes :param node: node name :type node: str :param name: device manager name :type name: str """ class Entry(object): """ History entry with timestamp and status information :param timestamp: datetime instance :type timestamp: :class:`Datetime` :param status: status :type status: :class:`SystemManagerStatus` or :class:`DeviceManagerStatus` """ def __init__(self, timestamp, status): self.timestamp = timestamp self.status = status class NodeHistory(object): """ System manager history """ __metaclass__ = ABCMeta @abstractmethod def add(self, node, status, ttl=None): """ Add status for system manager with on specified node :param node: node name :type node: str :param status: status :type status: :class:`SystemManagerStatus` :param ttl: time to live (in seconds), infinite by default :type ttl: int """ @abstractmethod def get(self, node, limit=None): """ Get status history for system manager on specified node :param node: node name :type node: str :param limit: number of statuses to return :type limit: int :returns: list of history entries :rtype: [:class:`Entry`] """ @abstractmethod def getAll(self): """ Get status history for all system managers on all nodes :returns: list of history entries :rtype: [:class:`Entry`] """ @abstractmethod def getLatest(self, node): """ Get latest status for system manager on specified node :param node: node name :type node: str :returns: history entry :rtype: :class:`Entry` """ @abstractmethod def remove(self, node=None): """ Remove status history for system managers on specified nodes. node: remove history for specific node no node remove history for all nodes :param node: node name :type node: str """
26.065089
90
0.595687
3.078125
e7ea5fbf2a5ea893fa5d02bc075a60e6e8983358
4,580
py
Python
app/request.py
angelakarenzi5/News-Highlight
3eae6f743f9e5d9eb4ea80b29ae0e2c57dd0aa62
[ "Unlicense" ]
null
null
null
app/request.py
angelakarenzi5/News-Highlight
3eae6f743f9e5d9eb4ea80b29ae0e2c57dd0aa62
[ "Unlicense" ]
null
null
null
app/request.py
angelakarenzi5/News-Highlight
3eae6f743f9e5d9eb4ea80b29ae0e2c57dd0aa62
[ "Unlicense" ]
null
null
null
from app import app import urllib.request,json from .models import source from .models import article Source = source.Source Article = article.Article # Getting api key api_key = app.config['NEWS_API_KEY'] # Getting the source base url base_url = app.config["SOURCE_API_BASE_URL"] article_url = app.config["ARTICLE_API_BASE_URL"] def process_results(source_list): ''' Function that processes the source result and transform them to a list of Objects Args: source_list: A list of dictionaries that contain source details Returns : source_results: A list of source objects ''' source_results = [] for source_item in source_list: id = source_item.get('id') name = source_item.get('name') description= source_item.get('description') url = source_item.get('url') category = source_item.get('category') language = source_item.get('language') country = source_item.get('country') if url: source_object = Source(id,name,description,url,category,language,country) source_results.append(source_object) return source_results def get_sources(category): ''' Function that gets the json response to our url request ''' get_sources_url = base_url.format(category,api_key) with urllib.request.urlopen(get_sources_url) as url: get_sources_data = url.read() get_sources_response = json.loads(get_sources_data) source_results = None if get_sources_response['sources']: source_results_list = get_sources_response['sources'] source_results = process_results(source_results_list) return source_results def get_articles(category): ''' Function that gets the json response to our url request ''' get_articles_url = article_url.format(category,api_key) with urllib.request.urlopen(get_articles_url) as url: get_articles_data = url.read() get_articles_response = json.loads(get_articles_data) article_results = None if get_articles_response['articles']: article_results_list = get_articles_response['articles'] article_results = process_results(article_results_list) return article_results def get_source(id): get_sources_details_url = article_url.format(id,api_key) with urllib.request.urlopen(get_sources_details_url) as url: source_details_data = url.read() source_details_response = json.loads(source_details_data) source_object = None if source_details_response: id = source_details_response.get('id') name = source_details_response.get('name') description = source_details_response.get('description') url = source_details_response.get('url') category = source_details_response.get('category') language = source_details_response.get('language') country = source_details_response.get('country') source_object = Source(id,name,description,url,category,language,country) return source_object def process_articles(article_list): ''' Function that processes the article result and transform them to a list of Objects Args: article_list: A list of dictionaries that contain article details Returns : article_results: A list of article objects ''' article_results = [] for article_item in article_list: author = article_item.get('author') title = article_item.get('title') description= article_item.get('description') url =article_item.get('url') urlToImage = article_item.get('urlToImage') publishedAt = article_item.get('publishedAt') content = article_item.get('content') if url: article_object =Article(author,title,description, url, urlToImage,publishedAt,content) article_results.append(article_object) return article_results def get_articles(source): ''' Function that gets the json response to our url request ''' get_articles_url = article_url.format(source,api_key) with urllib.request.urlopen(get_articles_url) as url: get_articles_data = url.read() get_articles_response = json.loads(get_articles_data) article_results = None if get_articles_response['articles']: article_results_list = get_articles_response['articles'] article_results = process_articles(article_results_list) return article_results
31.156463
98
0.691921
3.140625
16c5ad86cd82ede7749039f69f9eab6aa6ad6753
1,381
kt
Kotlin
stopwatch-core/src/main/java/com/danielbostwick/stopwatch/core/service/DefaultStopwatchService.kt
bostwick/android-stopwatch
94735805b22592f077a40b5578d661983a57b42a
[ "BSD-3-Clause" ]
29
2015-02-25T23:04:31.000Z
2021-02-17T11:28:06.000Z
stopwatch-core/src/main/java/com/danielbostwick/stopwatch/core/service/DefaultStopwatchService.kt
bostwick/android-stopwatch
94735805b22592f077a40b5578d661983a57b42a
[ "BSD-3-Clause" ]
4
2016-04-28T18:25:07.000Z
2018-02-15T08:44:15.000Z
stopwatch-core/src/main/java/com/danielbostwick/stopwatch/core/service/DefaultStopwatchService.kt
bostwick/android-stopwatch
94735805b22592f077a40b5578d661983a57b42a
[ "BSD-3-Clause" ]
21
2015-02-25T23:04:28.000Z
2021-12-15T05:51:26.000Z
package com.danielbostwick.stopwatch.core.service import com.danielbostwick.stopwatch.core.model.Stopwatch import com.danielbostwick.stopwatch.core.model.StopwatchState.PAUSED import com.danielbostwick.stopwatch.core.model.StopwatchState.STARTED import org.joda.time.DateTime import org.joda.time.Duration import org.joda.time.Interval class DefaultStopwatchService : StopwatchService { override fun create() = Stopwatch(PAUSED, DateTime.now(), Duration.ZERO) override fun start(stopwatch: Stopwatch, startedAt: DateTime) = when (stopwatch.state) { PAUSED -> Stopwatch(STARTED, DateTime.now(), stopwatch.offset) STARTED -> stopwatch } override fun pause(stopwatch: Stopwatch, pausedAt: DateTime) = when (stopwatch.state) { PAUSED -> stopwatch STARTED -> Stopwatch(PAUSED, DateTime.now(), newOffset(stopwatch.offset, stopwatch.startedAt, pausedAt)) } override fun reset(stopwatch: Stopwatch) = create() override fun timeElapsed(stopwatch: Stopwatch, now: DateTime): Duration = when (stopwatch.state) { PAUSED -> stopwatch.offset STARTED -> stopwatch.offset.plus(Interval(stopwatch.startedAt, now).toDuration()) } private fun newOffset(existingOffset: Duration, startedAt: DateTime, pausedAt: DateTime) = existingOffset.plus(Interval(startedAt, pausedAt).toDuration()) }
39.457143
102
0.738595
3.21875
5b175b219a0692a2df8cd6b1cedfce902d4856fd
2,932
c
C
src/pack.c
macton/shannon-fano
e82a26939f34180aff25f5676dd9cc6392b78367
[ "BSD-3-Clause" ]
null
null
null
src/pack.c
macton/shannon-fano
e82a26939f34180aff25f5676dd9cc6392b78367
[ "BSD-3-Clause" ]
null
null
null
src/pack.c
macton/shannon-fano
e82a26939f34180aff25f5676dd9cc6392b78367
[ "BSD-3-Clause" ]
1
2019-06-13T12:15:36.000Z
2019-06-13T12:15:36.000Z
#include "main.h" static ptab ptable[MAPSIZE]; static char codes[MAPSIZE][256]; void pack(const char *input, const char *output) { #ifdef STAT clock_t time1, time2; time1 = clock(); #endif int c, i, j; FILE *infile = fopen(input, "r"); assert(infile); int size = ptablebuild(infile, ptable); encode(0, size - 1); printf("code table size: %d\n", size); #ifdef STAT FILE *codetable = fopen("codetable", "wb"); assert(codetable); for (i = 0; i < size; ++i) { fprintf(codetable, "%c %s %f \n", ptable[i].ch, codes[ptable[i].ch], ptable[i].p); printf("%c->%s\n", ptable[i].ch, codes[ptable[i].ch]); } fclose(codetable); #endif for (i = 0; i < size; ++i) printf("%c->%s\n", ptable[i].ch, codes[ptable[i].ch]); FILE *outfile = fopen(output, "wb"); assert(outfile); putc(size - 1, outfile); buffer buff; buff.size = buff.v = 0; char codesize[8], codebit[8], *ch; for (i = 0; i < size; ++i) { c = ptable[i].ch; chartobit(c, codebit); for (j = 0; j < 8; ++j) writebit(outfile, &buff, codebit[j]); // 8 bits of the code chartobit(strlen(codes[c]) - 1, codesize); for (j = 0; j < 8; ++j) writebit(outfile, &buff, codesize[j]); // size of code j = -1; ch = codes[c]; while (ch[++j] != '\0') writebit(outfile, &buff, ch[j]); // code } fseek(infile, 0, SEEK_SET); while ((c = getc(infile)) != EOF) { ch = codes[c]; j = -1; while (ch[++j] != '\0') writebit(outfile, &buff, ch[j]); } if (buff.size != 8) putc(buff.v, outfile); putc(buff.size, outfile); fclose(outfile); fclose(infile); #ifdef STAT time2 = clock(); printf("time:%f\n", (double)(time2 - time1) / (double)CLOCKS_PER_SEC); #endif } int ptablebuild(FILE *infile, ptab ptable[]) { int freq_table[MAPSIZE], i, c; unsigned long total = 0; for (i = 0; i < MAPSIZE; ++i) freq_table[i] = 0; while ((c = getc(infile)) != EOF) { freq_table[c]++; total++; } double ftot = (double)total; int size = 0; for (i = 0; i < MAPSIZE; ++i) { if (!freq_table[i]) continue; ptable[size].ch = i; ptable[size].p = (double)freq_table[i] / ftot; size++; } quicksort(ptable, 0, size); return size; } void encode(int li, int ri) { if (li == ri) return; int i, isp; float p, phalf; if (ri - li == 1) { charcat(codes[ptable[li].ch], '0'); charcat(codes[ptable[ri].ch], '1'); } else { phalf = 0; for(i = li; i <= ri; ++i) phalf += ptable[i].p; p = 0; isp = -1; phalf *= 0.5f; for(i = li; i <= ri; ++i) { if(p <= phalf) charcat(codes[ptable[i].ch], '0'); else { charcat(codes[ptable[i].ch], '1'); if(isp < 0) isp = i; } p += ptable[i].p; } if (isp < 0) isp = li + 1; encode(li, isp - 1); encode(isp, ri); } } void charcat(char s[], char t) { int i = 0; while (s[i] != '\0') i++; s[i++] = t; s[i++] = '\0'; }
16.947977
85
0.536835
3.21875
3ef74a45487ef684eda81fe738c3e8bee9e2a584
2,861
h
C
src/ScoreTracker.h
syi47/spacebilliards
39a0a55761917144920a0a5ac4ff145a83d69a55
[ "Apache-2.0" ]
null
null
null
src/ScoreTracker.h
syi47/spacebilliards
39a0a55761917144920a0a5ac4ff145a83d69a55
[ "Apache-2.0" ]
null
null
null
src/ScoreTracker.h
syi47/spacebilliards
39a0a55761917144920a0a5ac4ff145a83d69a55
[ "Apache-2.0" ]
null
null
null
/* Copyright 2009 Tatham Johnson Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ #pragma once #include <string> #include <vector> class Score { public: ///Constructor /** @param time Time, in Milliseconds, of the score @param name The name of the Player who owns the score **/ Score(int time, const std::string& name) : m_Time(time), m_Name(name) {} int Time() const { return m_Time; } ///< The time of the score const std::string& Name() const { return m_Name; } ///< The name of the player who owns the score bool operator<(const Score& rvalue) const { return m_Time < rvalue.Time(); } ///< Used to sort the scores private: int m_Time; std::string m_Name; }; class ScoreTracker { public: ///Constructor /** @param fileName The location of the file to save and load scores to/from **/ ScoreTracker(const std::string& fileName); ///Destructor ~ScoreTracker(void); ///Saves the scores to the file void save(); ///Loads the scores from the file void load(); ///Adds a score to the current scores. /** Note: does not automatically save the scores **/ void addScore(int time, const std::string& name); ///Rates a score against the current high scores /** @param time The time to test against the current high scores @return The high score that the time would displace, or -1 if not a high score **/ int rateScore(int time); ///Gets the number of scores currently stored /** @return The number of scores **/ int count() const {return m_Scores.size(); } ///Clears all the high scores /** This will clear all the scores stored inside the file as well **/ void clearScores(); ///Gets the score at the given index const Score& at(int index) const; ///Gets the score at the given index, using square bracket operators const Score& operator[](int index) const { return at(index); } ///Returns the index of the last score added int lastScoreIndex() { return m_LastScoreIndex; } private: private: ///Sorts the scores lowest (best) to highest (worst) void sortScores(); ///Removes scores from memory, but does not delete scores from disk void removeCachedScores(); private: std::string m_FileName; std::vector<Score> m_Scores; int m_LastScoreIndex; typedef std::vector<Score>::iterator ScoreIterator; };
29.802083
107
0.692765
3.265625
72ba66a1ffade0abd7d9777fb505ec5a703b5441
4,797
lua
Lua
layout/stack.lua
ErikRoelofs/renderer
f0702d05752859a6d097f39e08ed97256e677f09
[ "MIT" ]
1
2016-09-26T18:49:36.000Z
2016-09-26T18:49:36.000Z
layout/stack.lua
ErikRoelofs/looky
f0702d05752859a6d097f39e08ed97256e677f09
[ "MIT" ]
null
null
null
layout/stack.lua
ErikRoelofs/looky
f0702d05752859a6d097f39e08ed97256e677f09
[ "MIT" ]
null
null
null
local renderChildren = function(self) self:renderBackground() local locX, locY = self:startCoordsBasedOnGravity() for k, v in ipairs(self.children) do love.graphics.push() love.graphics.translate( self.scaffold[v][1], self.scaffold[v][2]) v:render() love.graphics.pop() end end local function scaffoldViews(self) local hTilt, vTilt local tilt = function (number, direction) if self.tiltDirection[direction] == "start" then return (self.tiltAmount[direction] * (#self.children-1)) - (self.tiltAmount[direction] * number) elseif self.tiltDirection[direction] == "none" then return 0 elseif self.tiltDirection[direction] == "end" then return self.tiltAmount[direction] * number end end local locX, locY = self:startCoordsBasedOnGravity() for k, v in ipairs(self.children) do self.scaffold[v] = { locX + tilt(k-1, 1), locY + tilt(k-1, 2) } end end local function layout(self, children) local maxWidth = self:availableWidth() local maxHeight = self:availableHeight() for k, v in ipairs(children) do local childWidth, childHeight if v:desiredWidth() == "fill" then childWidth = maxWidth else childWidth = math.min(maxWidth, v:desiredWidth()) end if v:desiredHeight() == "fill" then childHeight = maxHeight else childHeight = math.min(maxHeight, v:desiredHeight()) end v:setDimensions(childWidth, childHeight) end for k, v in ipairs(children) do v:layoutingPass() end self:scaffoldViews() end local function containerWidth(self) local width = 0 for k, v in ipairs(self.children) do if v:desiredWidth() == "fill" then return "fill" else if v:desiredWidth() > width then width = v:desiredWidth() end end end return width + (self.tiltAmount[1] * #self.children) end local function containerHeight(self) local height = 0 for k, v in ipairs(self.children) do if v:desiredHeight() == "fill" then return "fill" else if v:desiredHeight() > height then height = v:desiredHeight() end end end height = height + (self.tiltAmount[2] * #self.children) return height end local function clickShouldTargetChild(self, x, y, child) local relativeX = x - self.scaffold[v][1] local relativeY = y - self.scaffold[v][2] return relativeX > 0 and relativeY > 0 and relativeX < child:getGrantedWidth() and relativeY < child:getGrantedHeight() end local function signalTargetedChildren(self, signal, payload) for i, v in ipairs(self:getChildren()) do if clickShouldTargetChild(self, payload.x, payload.y, child) then local thisPayload = { x = payload.x - self.scaffold[child][1] , y = payload.y - self.scaffold[child][2] } v:receiveSignal(signal, thisPayload) end end end return function(looky) return { build = function (options) local base = looky:makeBaseLayout(options) base.renderCustom = renderChildren base.layoutingPass = function(self) layout(self, self.children) end base.contentWidth = containerWidth base.contentHeight = containerHeight base.tiltDirection = options.tiltDirection or {"none", "none"} base.tiltAmount = options.tiltAmount or {0,0} base.scaffoldViews = scaffoldViews base.scaffold = {} base.getLocationOffset = getLocationOffset if not options.signalHandlers then options.signalHandlers = {} if not options.signalHandlers.leftclick then options.signalHandlers.leftclick = signalTargetedChildren end end base.signalHandlers = options.signalHandlers base.update = function(self, dt) for k, v in ipairs(self.children) do v:update(dt) end end base.translateCoordsToChild = function(self, child, x, y) return x - self.scaffold[child][1], y - self.scaffold[child][2] end base.translateCoordsFromChild = function(self, child, x, y) return x + self.scaffold[child][1], y + self.scaffold[child][2] end return base end, schema = looky:extendSchema("base", { tiltAmount = { required = false, schemaType = "table", options = { { required = true, schemaType = "number" }, { required = true, schemaType = "number" }, } }, tiltDirection = { required = false, schemaType = "table", options = { { required = true, schemaType = "fromList", list = { "start", "none", "end" } }, { required = true, schemaType = "fromList", list = { "start", "none", "end" } } } } }) } end
29.611111
111
0.631228
3.34375
77e57aae25476783412603e36caaa852e987551e
1,174
rs
Rust
contract/src/geohash.rs
enigmampc/safetraceV2
6d0598e75c109cab0300e67366074656b9d9b64a
[ "MIT" ]
4
2020-11-22T08:37:05.000Z
2021-01-21T09:20:04.000Z
contract/src/geohash.rs
enigmampc/safetraceV2
6d0598e75c109cab0300e67366074656b9d9b64a
[ "MIT" ]
null
null
null
contract/src/geohash.rs
enigmampc/safetraceV2
6d0598e75c109cab0300e67366074656b9d9b64a
[ "MIT" ]
null
null
null
use cosmwasm_std::{StdError, StdResult}; use geohash::{encode, Coordinate}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; const PRECISION: usize = 9usize; /// return the geohash to a precision degree specified by `PRECISION`. /// 7 ~ 76m /// 8 ~ 20m /// 9 ~ 7m /// 10 ~ 1m pub fn ghash(x: f64, y: f64) -> StdResult<String> { encode( Coordinate { x, // lng y, // lat }, PRECISION, ) .map_err(|_| StdError::generic_err(format!("Cannot encode data to geohash ({}, {})", x, y))) } pub fn neighbors(geohash: &String) -> StdResult<Vec<String>> { let mut all: Vec<String> = vec![]; let positions = geohash::neighbors(geohash) .map_err(|_| StdError::generic_err("Failed to decode geohash"))?; all.push(positions.n); all.push(positions.ne); all.push(positions.e); all.push(positions.se); all.push(positions.s); all.push(positions.sw); all.push(positions.w); all.push(positions.nw); Ok(all) } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct GeoLocationTime { pub geohash: String, pub timestamp_ms: u64, }
24.458333
96
0.625213
3.203125
dfdf0a0793736d8413235dfb32e0be3aa6c6d834
2,778
ts
TypeScript
projects/ng-translation/src/lib/components/ng-trans-subcontent/ng-trans-subcontent.component.spec.ts
wjx774326739/ng-translation
adc0c53a14bb40cf256f75b082a0a3eb69cea37b
[ "MIT" ]
3
2022-02-28T14:30:53.000Z
2022-02-28T14:53:39.000Z
projects/ng-translation/src/lib/components/ng-trans-subcontent/ng-trans-subcontent.component.spec.ts
wjx774326739/ng-translation
adc0c53a14bb40cf256f75b082a0a3eb69cea37b
[ "MIT" ]
3
2022-03-02T13:04:02.000Z
2022-03-11T14:45:22.000Z
projects/ng-translation/src/lib/components/ng-trans-subcontent/ng-trans-subcontent.component.spec.ts
bigBear713/ng-translation
adc0c53a14bb40cf256f75b082a0a3eb69cea37b
[ "MIT" ]
null
null
null
import { ChangeDetectorRef, Component, TemplateRef, ViewChild } from '@angular/core'; import { ComponentFixture, TestBed } from '@angular/core/testing'; import { NgTransTestingModule } from '../../testing'; import { NgTransSubcontentComponent } from './ng-trans-subcontent.component'; @Component({ selector: 'mock-tpl-ref', template: ` <ng-template #tplRef>{{content}}</ng-template> <ng-template #tplRefWithList let-list="list"> <p *ngFor="let item of list">{{item}}</p> </ng-template> `, }) export class MockTplRefComponent { @ViewChild('tplRef') tplRef!: TemplateRef<any>; @ViewChild('tplRefWithList') tplRefWithList!: TemplateRef<any>; content = 'mock templateRef content'; } describe('Component: NgTransSubcontent', () => { let component: NgTransSubcontentComponent; let fixture: ComponentFixture<NgTransSubcontentComponent>; let hostEle: HTMLElement; beforeEach(async () => { await TestBed.configureTestingModule({ imports: [NgTransTestingModule], declarations: [MockTplRefComponent] }) .compileComponents(); }); beforeEach(() => { fixture = TestBed.createComponent(NgTransSubcontentComponent); component = fixture.componentInstance; fixture.detectChanges(); hostEle = fixture.debugElement.nativeElement; }); it('should be created', () => { expect(component).toBeTruthy(); }); it('the content is a string value', () => { const content = 'test content'; component.content = content; detectChanges(); expect(hostEle.textContent?.trim()).toEqual(content); }); it('the content is a templateRef type value', () => { const mockTplRefFixture = TestBed.createComponent(MockTplRefComponent); const mockTplRefComp = mockTplRefFixture.componentInstance; mockTplRefFixture.detectChanges(); const content = mockTplRefComp.tplRef; component.content = content; detectChanges(); expect(hostEle.textContent?.trim()).toEqual(mockTplRefComp.content); }); it('the content is a templateRef type value with string list param', () => { const mockList = ['mock list 1', 'mock list 2']; const mockTplRefFixture = TestBed.createComponent(MockTplRefComponent); const mockTplRefComp = mockTplRefFixture.componentInstance; mockTplRefFixture.detectChanges(); const content = mockTplRefComp.tplRefWithList; component.content = content; component.list = mockList; detectChanges(); const listFromDom = Array.from(hostEle.querySelectorAll('p')).map(item => item.textContent?.trim()); expect(listFromDom).toEqual(mockList); }); function detectChanges() { const changeDR = fixture.componentRef.injector.get(ChangeDetectorRef); changeDR.markForCheck(); fixture.detectChanges(); } });
30.195652
104
0.703744
3.171875
4f9a40ddaeef2dfe752121a12c272ab3436c571f
1,978
lua
Lua
hammerspoon/init.lua
inoc603/dotfiles
a7c7592a59d51bb45a8d7977e51293ac2e845e1b
[ "MIT" ]
1
2017-04-20T13:24:50.000Z
2017-04-20T13:24:50.000Z
hammerspoon/init.lua
inoc603/dotfiles
a7c7592a59d51bb45a8d7977e51293ac2e845e1b
[ "MIT" ]
null
null
null
hammerspoon/init.lua
inoc603/dotfiles
a7c7592a59d51bb45a8d7977e51293ac2e845e1b
[ "MIT" ]
null
null
null
hs.hotkey.bind({"cmd", "ctrl"}, "r", function() hs.reload() end) hs.alert.show("Config loaded") function posX(screen) x, y = screen:position() return x end function screenAtCenter() local screens = hs.screen.allScreens() table.sort(screens, function(a, b) return posX(a) < posX(b) end) return screens[math.ceil(#screens/2)] end local wf=hs.window.filter function startsWith(str, start) return str:sub(1, #start) == start end local alacrittyPrefix = "alacritty-" local appCache = {} function moveToCenter(w) if startsWith(w:title(), alacrittyPrefix) then w:moveToScreen(screenAtCenter(), 0) end end -- when alacritty windows is created or focused by hot key, make sure it's in the center screen. local alacritty = wf.new(false):setAppFilter('Alacritty', {allowTitles=1}) alacritty:subscribe(wf.windowCreated, moveToCenter) alacritty:subscribe(wf.windowFocused, moveToCenter) function launchAlacritty(title, commands) title = alacrittyPrefix .. title app = appCache[title] if app == nil then app = hs.window.get(title) appCache[title] = app end if app == nil then params = {"-t", title, "--config-file", os.getenv("HOME") .. "/.alacritty.yml"} if commands then table.insert(params, "-e") for i, v in ipairs(commands) do table.insert(params, v) end end hs.task.new( "/Applications/Alacritty.app/Contents/MacOS/alacritty", function() print("STOPPED", title) appCache[title] = nil end, params ):start() else app:focus() end end -- ssh to devbox and attach to the last used tmux session. hs.hotkey.bind({"cmd", "ctrl"}, "k", function() launchAlacritty("remote", {"ssh", "t"}) end) -- attach to the last used tmux session or create one from home directory if there is none. hs.hotkey.bind({"cmd", "ctrl"}, "l", function() launchAlacritty("local", {"zsh", "--login", "-i", "-c", "ta"}) end)
25.037975
96
0.651668
3.484375
161a26670648f222d07a3cc7d9d0d73373cc4f96
9,445
ts
TypeScript
src/blockchain/transactions.ts
OasisDEX/xDex
e75d9fe69a9f9ceda4097546c9f6a8308b599a8d
[ "Apache-2.0" ]
null
null
null
src/blockchain/transactions.ts
OasisDEX/xDex
e75d9fe69a9f9ceda4097546c9f6a8308b599a8d
[ "Apache-2.0" ]
1
2022-02-21T14:14:10.000Z
2022-02-22T06:16:16.000Z
src/blockchain/transactions.ts
OasisDEX/xDex
e75d9fe69a9f9ceda4097546c9f6a8308b599a8d
[ "Apache-2.0" ]
1
2021-08-01T16:29:45.000Z
2021-08-01T16:29:45.000Z
/* * Copyright (C) 2020 Maker Ecosystem Growth Holdings, INC. */ import * as _ from 'lodash'; import { fromPairs } from 'ramda'; import { bindNodeCallback, combineLatest, fromEvent, merge, Observable, of, Subject, timer } from 'rxjs'; import { takeWhileInclusive } from 'rxjs-take-while-inclusive'; import { ajax } from 'rxjs/ajax'; import { catchError, filter, first, map, mergeMap, scan, shareReplay, startWith, switchMap } from 'rxjs/operators'; import { UnreachableCaseError } from '../utils/UnreachableCaseError'; import { account$, context$, onEveryBlock$ } from './network'; import { web3 } from './web3'; export enum TxStatus { WaitingForApproval = 'WaitingForApproval', CancelledByTheUser = 'CancelledByTheUser', Propagating = 'Propagating', WaitingForConfirmation = 'WaitingForConfirmation', Success = 'Success', Error = 'Error', Failure = 'Failure', } export function isDone(state: TxState) { return [TxStatus.CancelledByTheUser, TxStatus.Error, TxStatus.Failure, TxStatus.Success].indexOf(state.status) >= 0; } export function isDoneButNotSuccessful(state: TxState) { return [TxStatus.CancelledByTheUser, TxStatus.Error, TxStatus.Failure].indexOf(state.status) >= 0; } export function isSuccess(state: TxState) { return TxStatus.Success === state.status; } export function getTxHash(state: TxState): string | undefined { if ( state.status === TxStatus.Success || state.status === TxStatus.Failure || state.status === TxStatus.Error || state.status === TxStatus.WaitingForConfirmation ) { return state.txHash; } return undefined; } export enum TxRebroadcastStatus { speedup = 'speedup', cancel = 'cancel', } export type TxState = { account: string; txNo: number; networkId: string; meta: any; start: Date; end?: Date; lastChange: Date; dismissed: boolean; } & ( | { status: TxStatus.WaitingForApproval; } | { status: TxStatus.CancelledByTheUser; error: any; } | { status: TxStatus.WaitingForConfirmation | TxStatus.Propagating; txHash: string; broadcastedAt: Date; } | { status: TxStatus.Success; txHash: string; blockNumber: number; receipt: any; confirmations: number; safeConfirmations: number; rebroadcast?: TxRebroadcastStatus; } | { status: TxStatus.Failure; txHash: string; blockNumber: number; receipt: any; } | { status: TxStatus.Error; txHash: string; error: any; } ); let txCounter: number = 1; type NodeCallback<I, R> = (i: I, callback: (err: any, r: R) => any) => any; interface TransactionReceiptLike { transactionHash: string; status: boolean; blockNumber: number; } type GetTransactionReceipt = NodeCallback<string, TransactionReceiptLike>; interface TransactionLike { hash: string; nonce: number; input: string; blockHash: string; } type GetTransaction = NodeCallback<string, TransactionLike | null>; function txRebroadcastStatus({ hash, nonce, input }: TransactionLike) { return combineLatest(externalNonce2tx$, onEveryBlock$).pipe( map(([externalNonce2tx]) => { if (externalNonce2tx[nonce] && externalNonce2tx[nonce].hash !== hash) { return [ externalNonce2tx[nonce].hash, input === externalNonce2tx[nonce].callData ? TxRebroadcastStatus.speedup : TxRebroadcastStatus.cancel, ]; } return [hash, undefined]; }), ) as Observable<[string, undefined | TxRebroadcastStatus]>; } export function send( account: string, networkId: string, meta: any, method: (...args: any[]) => any, // Any contract method ): Observable<TxState> { const common = { account, networkId, meta, txNo: txCounter += 1, start: new Date(), lastChange: new Date(), }; function successOrFailure( txHash: string, receipt: TransactionReceiptLike, rebroadcast: TxRebroadcastStatus | undefined, ): Observable<TxState> { const end = new Date(); if (!receipt.status) { // TODO: failure should be confirmed! return of({ ...common, txHash, receipt, end, lastChange: end, blockNumber: receipt.blockNumber, status: TxStatus.Failure, } as TxState); } // TODO: error handling! return combineLatest(context$, onEveryBlock$).pipe( mergeMap(([context, blockNumber]) => of({ ...common, txHash, receipt, end, rebroadcast, lastChange: new Date(), blockNumber: receipt.blockNumber, status: TxStatus.Success, confirmations: Math.max(0, blockNumber - receipt.blockNumber), safeConfirmations: context.safeConfirmations, } as TxState), ), takeWhileInclusive((state) => state.status === TxStatus.Success && state.confirmations < state.safeConfirmations), ); } const promiEvent = method(); const result: Observable<TxState> = merge(fromEvent(promiEvent, 'transactionHash'), promiEvent).pipe( map((txHash: string) => [txHash, new Date()]), first(), mergeMap(([txHash, broadcastedAt]: [string, Date]) => timer(0, 1000).pipe( switchMap(() => bindNodeCallback(web3.eth.getTransaction as GetTransaction)(txHash)), filter((transaction) => !!transaction), first(), mergeMap( (transaction: TransactionLike) => (txRebroadcastStatus(transaction).pipe( switchMap(([hash, rebroadcast]) => bindNodeCallback(web3.eth.getTransactionReceipt as GetTransactionReceipt)(hash).pipe( filter((receipt) => receipt && !!receipt.blockNumber), mergeMap((receipt) => successOrFailure(hash, receipt, rebroadcast)), ), ), first(), startWith({ ...common, broadcastedAt, txHash, status: TxStatus.WaitingForConfirmation, } as TxState), catchError((error) => { return of({ ...common, error, txHash: transaction.hash, end: new Date(), lastChange: new Date(), status: TxStatus.Error, } as TxState); }), ) as any) as Observable<TxState>, ), startWith({ ...common, broadcastedAt, txHash, status: TxStatus.Propagating, } as TxState), ), ), startWith({ ...common, status: TxStatus.WaitingForApproval, }), shareReplay(1), catchError((error) => { if ((error.message as string).indexOf('User denied transaction signature') === -1) { console.error(error); } return of({ ...common, error, end: new Date(), lastChange: new Date(), status: TxStatus.CancelledByTheUser, }); }), ); result.subscribe((state) => transactionObserver.next({ state, kind: 'newTx' })); return result; } interface NewTransactionChange { kind: 'newTx'; state: TxState; } interface DismissedChange { kind: 'dismissed'; txNo: number; } export const transactionObserver: Subject<TransactionsChange> = new Subject(); type TransactionsChange = NewTransactionChange | DismissedChange; export const transactions$: Observable<TxState[]> = combineLatest( transactionObserver.pipe( scan((transactions: TxState[], change: TransactionsChange) => { switch (change.kind) { case 'newTx': { const newState = change.state; const result = [...transactions]; const i = result.findIndex((t) => t.txNo === newState.txNo); if (i >= 0) { result[i] = newState; } else { result.push(newState); } return result; } case 'dismissed': { const result = [...transactions]; const i = result.findIndex((t) => t.txNo === change.txNo); result[i].dismissed = true; return result; } default: throw new UnreachableCaseError(change); } }, []), ), account$, context$, ).pipe( map(([transactions, account, context]) => transactions.filter((t: TxState) => t.account === account && t.networkId === context.id), ), startWith([]), shareReplay(1), ); interface ExternalNonce2tx { [nonce: number]: { hash: string; callData: string }; } const externalNonce2tx$: Observable<ExternalNonce2tx> = combineLatest( context$, account$, onEveryBlock$.pipe(first()), onEveryBlock$, ).pipe( switchMap(([context, account, firstBlock]) => ajax({ url: `${context.etherscan.apiUrl}?module=account` + `&action=txlist` + `&address=${account}` + `&startblock=${firstBlock}` + `&sort=desc` + `&apikey=${context.etherscan.apiKey}`, }), ), map(({ response }) => response.result), map((transactions: Array<{ hash: string; nonce: string; input: string }>) => fromPairs( _.map( transactions, (tx) => [tx.nonce, { hash: tx.hash, callData: tx.input }] as [string, { hash: string; callData: string }], ), ), ), catchError((error) => { console.error(error); return of({}); }), shareReplay(1), );
27.456395
120
0.602859
3.125
24c6dec269d7c632d90e9801d7fd371e425dba33
14,669
go
Go
make-plural.go
gotnospirit/makeplural
a5f48d94d976801ab2251014c9a626d1c86d7e22
[ "BSD-2-Clause" ]
1
2020-04-02T01:08:35.000Z
2020-04-02T01:08:35.000Z
make-plural.go
gotnospirit/makeplural
a5f48d94d976801ab2251014c9a626d1c86d7e22
[ "BSD-2-Clause" ]
null
null
null
make-plural.go
gotnospirit/makeplural
a5f48d94d976801ab2251014c9a626d1c86d7e22
[ "BSD-2-Clause" ]
2
2019-04-12T07:23:21.000Z
2020-04-01T08:17:57.000Z
package main import ( "encoding/json" "flag" "fmt" "io/ioutil" "net/http" "os" "sort" "strconv" "strings" "text/template" "time" ) type ( Source interface { Culture() string CultureId() string Code() string } Test interface { toString() string } FuncSource struct { culture, vars, impl string } UnitTestSource struct { culture string tests []Test } UnitTest struct { ordinal bool expected, value string } Op struct { previous_logic, left, operator, right, next_logic string } ) func (x FuncSource) Culture() string { return x.culture } func (x FuncSource) CultureId() string { return sanitize(x.culture) } func (x FuncSource) Code() string { result := "" if "" != x.vars { result += x.vars + "\n" } result += x.impl return result } func (x UnitTestSource) Culture() string { return x.culture } func (x UnitTestSource) CultureId() string { return sanitize(x.culture) } func (x UnitTestSource) Code() string { var result []string for _, child := range x.tests { result = append(result, "\t\t"+child.toString()) } return strings.Join(result, "\n") } func (x UnitTest) toString() string { return fmt.Sprintf( "testNamedKey(t, fn, %s, `%s`, `%s`, %v)", x.value, x.expected, fmt.Sprintf("fn("+x.value+", %v)", x.ordinal), x.ordinal, ) } func sanitize(input string) string { var result string for _, char := range input { switch { case char >= 'a' && char <= 'z', char >= 'A' && char <= 'Z': result += string(char) } } return result } func (x Op) conditions() []string { var result []string conditions := strings.Split(x.right, ",") for _, condition := range conditions { pos := strings.Index(condition, "..") if -1 != pos { lower_bound, upper_bound := condition[:pos], condition[pos+2:] lb, _ := strconv.Atoi(lower_bound) ub, _ := strconv.Atoi(upper_bound) r := rangeCondition(x.left, lb, ub, x.operator) result = append(result, r...) } else { result = append(result, fmt.Sprintf("%s %s %s", x.left, x.operator, condition)) } } return result } func get(url, key string, headers *string) (map[string]map[string]string, error) { fmt.Print("GET ", url) response, err := http.Get(url) if err != nil { return nil, err } defer response.Body.Close() if 200 != response.StatusCode { return nil, fmt.Errorf(response.Status) } contents, err := ioutil.ReadAll(response.Body) var document map[string]map[string]json.RawMessage err = json.Unmarshal([]byte(contents), &document) if nil != err { return nil, err } if _, ok := document["supplemental"]; !ok { return nil, fmt.Errorf("Data does not appear to be CLDR data") } *headers += fmt.Sprintf("//\n// URL: %s\n", url) { var version map[string]string err = json.Unmarshal(document["supplemental"]["version"], &version) if nil != err { return nil, err } *headers += fmt.Sprintf("// %s\n", version["_number"]) } { var generation map[string]string err = json.Unmarshal(document["supplemental"]["generation"], &generation) if nil != err { return nil, err } *headers += fmt.Sprintf("// %s\n", generation["_date"]) } var data map[string]map[string]string err = json.Unmarshal(document["supplemental"]["plurals-type-"+key], &data) if nil != err { return nil, err } return data, nil } func rangeCondition(varname string, lower, upper int, operator string) []string { var result []string for i := lower; i <= upper; i++ { result = append(result, fmt.Sprintf("%s %s %d", varname, operator, i)) } return result } func pattern2code(input string, ptr_vars *[]string) []string { left, short, operator, logic := "", "", "", "" var ops []Op buf := "" loop: for _, char := range input { switch char { default: buf += string(char) case '@': break loop case ' ': case '=': if "" != buf { left, operator, buf = buf, "==", "" short = toVar(left, ptr_vars) } case '!': left, operator, buf = buf, "!=", "" short = toVar(left, ptr_vars) } if "" != buf { pos := strings.Index(buf, "and") if -1 != pos { ops = append(ops, Op{logic, short, operator, buf[:pos], "AND"}) buf, left, operator, logic = "", "", "", "AND" } else { pos = strings.Index(buf, "or") if -1 != pos { ops = append(ops, Op{logic, short, operator, buf[:pos], "OR"}) buf, left, operator, logic = "", "", "", "OR" } } } } if "" != buf { ops = append(ops, Op{logic, short, operator, buf, ""}) } if 1 == len(ops) { conditions := ops[0].conditions() if "==" == ops[0].operator { return conditions } else { return []string{strings.Join(conditions, " && ")} } } var result []string var buffer []string buffer_length := 0 for _, o := range ops { conditions := o.conditions() logic = o.previous_logic nextLogic := o.next_logic operator := o.operator if "OR" == logic && buffer_length > 0 { result = append(result, strings.Join(buffer, ", ")) buffer = []string{} buffer_length = 0 } if ("" == logic && "OR" == nextLogic) || ("OR" == logic && "OR" == nextLogic) || ("OR" == logic && "" == nextLogic) { if "==" == operator { buffer = append(buffer, conditions...) } else { buffer = append(buffer, strings.Join(conditions, " && ")) } buffer_length = len(buffer) } else if "AND" == logic && ("AND" == nextLogic || "" == nextLogic) { if "==" == operator { buffer[buffer_length-1] += " && " + joinOr(conditions) } else { buffer[buffer_length-1] += " && " + strings.Join(conditions, " && ") } } else if "" == logic && "AND" == nextLogic { if "==" == operator { buffer = append(buffer, joinOr(conditions)) } else { buffer = append(buffer, strings.Join(conditions, " && ")) } buffer_length = len(buffer) } else if "OR" == logic && "AND" == nextLogic { if "==" == operator { if len(conditions) > 1 { buffer = append(buffer, joinOr(conditions)) } else { buffer = append(buffer, conditions...) } } else { buffer = append(buffer, strings.Join(conditions, " && ")) } buffer_length = len(buffer) } else if "AND" == logic && "OR" == nextLogic { if "==" == operator { buffer[buffer_length-1] += " && " + joinOr(conditions) } else { buffer[buffer_length-1] += " && " + strings.Join(conditions, " && ") } } } if len(buffer) > 0 { if "OR" == logic { result = append(result, buffer...) } else { result = append(result, strings.Join(buffer, " && ")) } } return result } func joinOr(data []string) string { if len(data) > 1 { return "(" + strings.Join(data, " || ") + ")" } return data[0] } func rule2code(key string, data map[string]string, ptr_vars *[]string, padding string) string { if input, ok := data["pluralRule-count-"+key]; ok { result := "" if "other" == key { if 1 == len(data) { return padding + "return \"other\"\n" } result += padding + "default:\n" } else { cases := pattern2code(input, ptr_vars) result += "\n" + padding + "case " + strings.Join(cases, ", ") + ":\n" } result += padding + "\treturn \"" + key + "\"\n" return result } return "" } func map2code(data map[string]string, ptr_vars *[]string, padding string) string { if 1 == len(data) { return rule2code("other", data, ptr_vars, padding) } result := padding + "switch {\n" result += rule2code("other", data, ptr_vars, padding) result += rule2code("zero", data, ptr_vars, padding) result += rule2code("one", data, ptr_vars, padding) result += rule2code("two", data, ptr_vars, padding) result += rule2code("few", data, ptr_vars, padding) result += rule2code("many", data, ptr_vars, padding) result += padding + "}\n" return result } func splitValues(input string) []string { var result []string pos := -1 for idx, char := range input { switch { case (char >= '0' && char <= '9') || '.' == char: if -1 == pos { pos = idx } // Inutile de générer un interval lorsque l'on rencontre '~' :) case ' ' == char || ',' == char || '~' == char: if -1 != pos { result = append(result, input[pos:idx]) pos = -1 } } } if -1 != pos { result = append(result, input[pos:]) } return result } func pattern2test(expected, input string, ordinal bool) []Test { var result []Test patterns := strings.Split(input, "@") for _, pattern := range patterns { if strings.HasPrefix(pattern, "integer") { for _, value := range splitValues(pattern[8:]) { result = append(result, UnitTest{ordinal, expected, value}) } } else if strings.HasPrefix(pattern, "decimal") { for _, value := range splitValues(pattern[8:]) { result = append(result, UnitTest{ordinal, expected, "\"" + value + "\""}) } } } return result } func map2test(ordinals, plurals map[string]string) []Test { var result []Test for _, rule := range []string{"one", "two", "few", "many", "zero", "other"} { if input, ok := ordinals["pluralRule-count-"+rule]; ok { result = append(result, pattern2test(rule, input, true)...) } if input, ok := plurals["pluralRule-count-"+rule]; ok { result = append(result, pattern2test(rule, input, false)...) } } return result } func culture2code(ordinals, plurals map[string]string, padding string) (string, string, []Test) { var code string var vars []string if nil == ordinals { code = map2code(plurals, &vars, padding) } else { code = padding + "if ordinal {\n" code += map2code(ordinals, &vars, padding+"\t") code += padding + "}\n\n" code += map2code(plurals, &vars, padding) } tests := map2test(ordinals, plurals) str_vars := "" max := len(vars) if max > 0 { // http://unicode.org/reports/tr35/tr35-numbers.html#Operands // // Symbol Value // n absolute value of the source number (integer and decimals). // i integer digits of n. // v number of visible fraction digits in n, with trailing zeros. // w number of visible fraction digits in n, without trailing zeros. // f visible fractional digits in n, with trailing zeros. // t visible fractional digits in n, without trailing zeros. var_f := varname('f', vars) var_i := varname('i', vars) var_n := varname('n', vars) var_v := varname('v', vars) var_t := varname('t', vars) var_w := varname('w', vars) if "_" != var_f || "_" != var_v || "_" != var_t || "_" != var_w { str_vars += padding + fmt.Sprintf("%s, %s, %s, %s, %s, %s := finvtw(value)\n", var_f, var_i, var_n, var_v, var_t, var_w) } else { if "_" != var_n { if "_" != var_i { str_vars += padding + "flt := float(value)\n" str_vars += padding + "n := math.Abs(flt)\n" str_vars += padding + "i := int64(flt)\n" } else { str_vars += padding + "n := math.Abs(float(value))\n" } } else if "_" != var_i { str_vars += padding + "i := int64(float(value))\n" } } for i := 0; i < max; i += 2 { k := vars[i] v := vars[i+1] if k != v { str_vars += padding + k + " := " + v + "\n" } } } return str_vars, code, tests } func addVar(varname, expr string, ptr_vars *[]string) string { exists := false for i := 0; i < len(*ptr_vars); i += 2 { if (*ptr_vars)[i] == varname { exists = true break } } if !exists { *ptr_vars = append(*ptr_vars, varname, expr) } return varname } func toVar(expr string, ptr_vars *[]string) string { var varname string if pos := strings.Index(expr, "%"); -1 != pos { k, v := expr[:pos], expr[pos+1:] varname = k + v if "n" == k { expr = "mod(n, " + v + ")" } else { expr = k + " % " + v } } else { varname = expr } return addVar(varname, expr, ptr_vars) } func varname(char uint8, vars []string) string { for i := 0; i < len(vars); i += 2 { if char == vars[i][0] { return string(char) } } return "_" } func createGoFiles(headers string, ptr_plurals, ptr_ordinals *map[string]map[string]string) error { var cultures []string if "*" == *user_culture { // On sait que len(ordinals) <= len(plurals) for culture, _ := range *ptr_plurals { cultures = append(cultures, culture) } } else { for _, culture := range strings.Split(*user_culture, ",") { culture = strings.TrimSpace(culture) if _, ok := (*ptr_plurals)[culture]; !ok { return fmt.Errorf("Aborted, `%s` not found...", culture) } cultures = append(cultures, culture) } } sort.Strings(cultures) if 0 == len(cultures) { return fmt.Errorf("Not enough data to create source...") } var items []Source var tests []Source for _, culture := range cultures { fmt.Print(culture) plurals := (*ptr_plurals)[culture] if nil == plurals { fmt.Println(" \u2717 - Plural not defined") } else if _, ok := plurals["pluralRule-count-other"]; !ok { fmt.Println(" \u2717 - Plural missing mandatory `other` choice...") } else { ordinals := (*ptr_ordinals)[culture] if nil != ordinals { if _, ok := ordinals["pluralRule-count-other"]; !ok { fmt.Println(" \u2717 - Ordinal missing the mandatory `other` choice...") continue } } vars, code, unit_tests := culture2code(ordinals, plurals, "\t\t") items = append(items, FuncSource{culture, vars, code}) fmt.Println(" \u2713") if len(unit_tests) > 0 { tests = append(tests, UnitTestSource{culture, unit_tests}) } } } if len(tests) > 0 { err := createSource("plural_test.tmpl", "plural/func_test.go", headers, tests) if nil != err { return err } } return createSource("plural.tmpl", "plural/func.go", headers, items) } func createSource(tmpl_filepath, dest_filepath, headers string, items []Source) error { source, err := template.ParseFiles(tmpl_filepath) if nil != err { return err } file, err := os.Create(dest_filepath) if nil != err { return err } defer file.Close() return source.Execute(file, struct { Headers string Timestamp string Items []Source }{ headers, time.Now().Format(time.RFC1123Z), items, }) } var user_culture = flag.String("culture", "*", "Culture subset") func main() { flag.Parse() var headers string ordinals, err := get("https://github.com/unicode-cldr/cldr-core/raw/master/supplemental/ordinals.json", "ordinal", &headers) if nil != err { fmt.Println(" \u2717") fmt.Println(err) } else { fmt.Println(" \u2713") plurals, err := get("https://github.com/unicode-cldr/cldr-core/raw/master/supplemental/plurals.json", "cardinal", &headers) if nil != err { fmt.Println(" \u2717") fmt.Println(err) } else { fmt.Println(" \u2713") err = createGoFiles(headers, &plurals, &ordinals) if nil != err { fmt.Println(err, "(╯°□°)╯︵ ┻━┻") } else { fmt.Println("Succeed (ッ)") } } } }
23.4704
125
0.602631
3.234375
149e40eb324765f72b30ac300899143823cc1062
1,109
kt
Kotlin
app/src/main/java/com/mapswithme/maps/search/BookingFilterParams.kt
dnemov/omim.kt
8b75114193e141aee14fcbc207a208c4a39de1db
[ "Apache-2.0" ]
1
2020-03-06T13:56:02.000Z
2020-03-06T13:56:02.000Z
app/src/main/java/com/mapswithme/maps/search/BookingFilterParams.kt
dnemov/omim.kt
8b75114193e141aee14fcbc207a208c4a39de1db
[ "Apache-2.0" ]
null
null
null
app/src/main/java/com/mapswithme/maps/search/BookingFilterParams.kt
dnemov/omim.kt
8b75114193e141aee14fcbc207a208c4a39de1db
[ "Apache-2.0" ]
null
null
null
package com.mapswithme.maps.search import android.os.Parcel import android.os.Parcelable import com.mapswithme.util.ConnectionState import kotlinx.android.parcel.Parcelize @Parcelize class BookingFilterParams (val mCheckinMillisec: Long, val mCheckoutMillisec: Long, vararg val mRooms: Room) : Parcelable { @Parcelize class Room(val mAdultsCount: Int, val mAgeOfChild: Int) : Parcelable { constructor(adultsCount: Int) : this(adultsCount, NO_CHILDREN) companion object { // This value is corresponds to AvailabilityParams::Room::kNoChildren in core. const val NO_CHILDREN = -1 @JvmField val DEFAULT = Room(2) } } class Factory { fun createParams( checkIn: Long, checkOut: Long, vararg rooms: Room ): BookingFilterParams? { return if (ConnectionState.isConnected) BookingFilterParams( checkIn, checkOut, *rooms ) else null } } }
29.972973
90
0.593327
3.078125
cb493c42819d31b414ebda897c1b87d112dc3574
1,718
swift
Swift
XLsn0wQuora/Classes/APIManager/APIManager.swift
XLsn0w/XLsn0wQuora
93d803a321b1696d0507df8294581c85058a2a3e
[ "MIT" ]
10
2017-10-25T08:49:59.000Z
2018-06-16T01:21:24.000Z
XLsn0wQuora/Classes/APIManager/APIManager.swift
XLsn0w/XLsn0wQuora
93d803a321b1696d0507df8294581c85058a2a3e
[ "MIT" ]
null
null
null
XLsn0wQuora/Classes/APIManager/APIManager.swift
XLsn0w/XLsn0wQuora
93d803a321b1696d0507df8294581c85058a2a3e
[ "MIT" ]
1
2018-03-05T07:21:09.000Z
2018-03-05T07:21:09.000Z
import Foundation import Moya //: URL基地址 let BASE_URL = "http://english.6ag.cn/" enum APIManager { case getLaunchImg case getNewsList case getMoreNews(String) case getThemeList case getThemeDesc(Int) case getNewsDesc(Int) } extension APIManager: TargetType { /// The target's base `URL`. var baseURL: URL { return URL.init(string: "http://news-at.zhihu.com/api/")! } /// The path to be appended to `baseURL` to form the full `URL`. var path: String { switch self { case .getLaunchImg: return "7/prefetch-launch-images/750*1142" case .getNewsList: return "4/news/latest" case .getMoreNews(let date): return "4/news/before/" + date case .getThemeList: return "4/themes" case .getThemeDesc(let id): return "4/theme/\(id)" case .getNewsDesc(let id): return "4/news/\(id)" } } /// The HTTP method used in the request. var method: Moya.Method { return .get } /// The parameters to be incoded in the request. var parameters: [String: Any]? { return nil } /// The method used for parameter encoding. var parameterEncoding: ParameterEncoding { return URLEncoding.default } /// Provides stub data for use in testing. var sampleData: Data { return "".data(using: String.Encoding.utf8)! } /// The type of HTTP task to be performed. var task: Task { return .request } /// Whether or not to perform Alamofire validation. Defaults to `false`. var validate: Bool { return false } }
23.861111
76
0.582072
3
3334b81c461f0f87292473253147e7b45b17a48f
1,705
py
Python
scripts/wsi_bot_show_regions.py
higex/qpath
0377f2fdadad6e02ecde8ba2557fe9b957280fa1
[ "MIT" ]
6
2017-03-18T19:17:42.000Z
2019-05-05T14:57:31.000Z
WSItk/tools/wsi_bot_show_regions.py
vladpopovici/WSItk
02db9dbf1148106a576d7b4dd7965c73607efdae
[ "MIT" ]
null
null
null
WSItk/tools/wsi_bot_show_regions.py
vladpopovici/WSItk
02db9dbf1148106a576d7b4dd7965c73607efdae
[ "MIT" ]
4
2015-11-29T14:47:25.000Z
2019-11-28T03:16:39.000Z
# -*- coding: utf-8 -*- """ SHOW_REGIONS Emphasizes some regions in the image, by decreasing the importance of the rest. @author: vlad """ from __future__ import (absolute_import, division, print_function, unicode_literals) from builtins import * import argparse as opt import skimage.io import numpy as np from util.storage import ModelPersistence from util.visualization import enhance_patches __author__ = 'vlad' __version__ = 0.1 def main(): p = opt.ArgumentParser(description=""" Emphasizes the patches with a given code (from BoT) by reducing the contrast of the rest of the image. """ ) p.add_argument('image', action='store', help='image file name') p.add_argument('res_image', action='store', help='name of the resulting image') p.add_argument('bot_result', action='store', help='a file with BoT coding for regions') p.add_argument('bot_code', action='store', help='the code of the regions to be emphasized', type=int) p.add_argument('-g', '--gamma', action='store', nargs=1, type=float, help='the gamma level of the background regions', default=0.2) args = p.parse_args() img = skimage.io.imread(args.image) regs = [] with ModelPersistence(args.bot_result, 'r', format='pickle') as d: block_codes = d['l1_codes'] regs = d['regs'] #print(block_codes) #print(args.bot_code) # filter regions of interest: roi = [ regs[k] for k in np.where(np.array(block_codes, dtype=np.int) == args.bot_code)[0] ] #print(roi) img = enhance_patches(img, roi, _gamma=args.gamma) skimage.io.imsave(args.res_image, img) return if __name__ == '__main__': main()
28.898305
106
0.674487
3.046875
042014a0d273822380e8703c8cba04e1914fcb3d
5,230
js
JavaScript
Develop/app.js
PopSizzle/TeamProfileGenerator
5cfa650fc37824f934211b676fcd5c7a5984fe8d
[ "MIT" ]
null
null
null
Develop/app.js
PopSizzle/TeamProfileGenerator
5cfa650fc37824f934211b676fcd5c7a5984fe8d
[ "MIT" ]
1
2021-05-11T10:37:20.000Z
2021-05-11T10:37:20.000Z
Develop/app.js
PopSizzle/TeamProfileGenerator
5cfa650fc37824f934211b676fcd5c7a5984fe8d
[ "MIT" ]
null
null
null
const Manager = require("./lib/Manager"); const Engineer = require("./lib/Engineer"); const Intern = require("./lib/Intern"); const inquirer = require("inquirer"); const path = require("path"); const fs = require("fs"); let employees = []; const OUTPUT_DIR = path.resolve(__dirname, "output"); const outputPath = path.join(OUTPUT_DIR, "team.html"); const render = require("./lib/htmlRenderer"); // Welcome message function beginTeam () { console.log("Welcome to the team creator!") console.log("We will now begin to construct your team profile"); console.log("------------------------------------------") enterTeamMember(); } // Function for adding a team member function enterTeamMember() { // Inquirer prompt for basic details inquirer.prompt([ { type: "input", message: "Please enter the name of your employee.", name: "name" }, { type: "input", message: "Please enter the id of your employee.", name: "id" }, { type: "input", message: "Please enter the email of your employee.", name: "email" }, { type: "list", message: "Please select this employee's role on your team.", name: "role", choices: [ "Manager", "Engineer", "Intern", ] } ]) .then(function(response) { // Switch case for different classes of employees switch(response.role) { // If manager class case "Manager": // Check if there is already a manager let isManager = employees.filter(employee => employee.getRole() === "Manager"); console.log(isManager); console.log(isManager.length); // If there is a manager go back and try again if(isManager.length > 0){ console.log("Your team already has a manager, please go back and select a different option.") return nextStep(); } inquirer.prompt([ { type: "input", message: "Please enter your manager's office number.", name: "officeNumber" } ]) .then(function(response1){ response.officeNumber = response1.officeNumber; console.log(response); const manager = new Manager(response.name, response.id, response.email, response.officeNumber); employees.push(manager); nextStep(); }) break; // If Engineer class case "Engineer": inquirer.prompt([ { type: "input", message: "Please enter your Engineer's Github username.", name: "github" } ]) .then(function(response1){ response.github = response1.github; console.log(response); const engineer = new Engineer(response.name, response.id, response.email, response.github); employees.push(engineer); nextStep(); }) break; // If neither, must be intern class default: inquirer.prompt([ { type: "input", message: "Please enter your Intern's school.", name: "school" } ]) .then(function(response1){ response.school = response1.school; console.log(response); const intern = new Intern(response.name, response.id, response.email, response.school); employees.push(intern); nextStep(); }) } }) } // Function for switching between adding team members, printing team, and exiting. function nextStep() { inquirer.prompt([ { type: "list", message: "what would you like to do now?", name: "continue", choices: [ "Add another employee", "Print my team to an html file", "Exit" ] } ]) .then(function(response){ // Switch case to handle inquirer response switch(response.continue) { // Add another employee case "Add another employee": enterTeamMember(); break; // Print the team case "Print my team to an html file": console.log(employees); // Using the render function, write the employees to the html templates. fs.writeFile("./output/index.html", render(employees), function(err) { if (err) { return console.log(err); } console.log("Success!"); }); break; // Exit the app default: return; } }) } beginTeam();
31.317365
111
0.484512
3.1875
1e581aecf2f32077037563a16d8b0ce759776e2a
2,765
lua
Lua
CountDownLua.lua
yuzh0816/Count-Down
c546915bea08a1a43356380095ad2a0d2047ce75
[ "MIT" ]
1
2020-12-18T01:07:41.000Z
2020-12-18T01:07:41.000Z
CountDownLua.lua
yuzh0816/Count-Down
c546915bea08a1a43356380095ad2a0d2047ce75
[ "MIT" ]
null
null
null
CountDownLua.lua
yuzh0816/Count-Down
c546915bea08a1a43356380095ad2a0d2047ce75
[ "MIT" ]
null
null
null
PROPERTIES = {year=0, month=0, day=0, hour=0, min=0, sec=0} totalTime = 0 startTime = 0 isWorkOvertime = false YYYY = 2021 MM = 1 DD = 5 H = 8 M = 0 S = 0 function Initialize() stringDate = tolua.cast(SKIN:GetMeter("Date"), "CMeterString") stringHour = tolua.cast(SKIN:GetMeter("Hour"), "CMeterString") stringMinute = tolua.cast(SKIN:GetMeter("Minute"), "CMeterString") stringSecond = tolua.cast(SKIN:GetMeter("Second"), "CMeterString") stringmSecond = tolua.cast(SKIN:GetMeter("mSecond"), "CMeterString") startTime = os.time(getStartWorkTime()) countdownTime = getOffWorkTime() totalTime = os.time(countdownTime)-startTime progress = 0 end -- function Initialize function Update() local rLeft = os.time(countdownTime) - os.time() if rLeft < 0 then rLeft = 0 end local dLeft = math.floor(rLeft/60/60/24) local hLeft = math.floor(rLeft/60/60)%24 local mLeft = math.floor(rLeft/60)%60 local sLeft = math.floor(rLeft)%60 local msLeft = math.floor(1000-(os.clock()*1000)%1000) if rLeft == 0 then stringmSecond:SetText(0) else stringmSecond:SetText(msLeft) end if totalTime > 0 and progress <= 1 then progress = (os.time()-startTime)/totalTime local progressWidth = getMeterWidth() * progress progressMeter = SKIN:GetMeter("progress") progressMeter:SetW(progressWidth) local color = getCurrentColor(progress) --myMeter:SetSolidColor(color) --myMeter:SetOption('SolidColor', color) end stringDate:SetText(dLeft) stringHour:SetText(hLeft) stringMinute:SetText(mLeft) stringSecond:SetText(sLeft) end -- function Update function getMeterWidth() local meterWidth = SKIN:GetMeter("Note"):GetW() + SKIN:GetMeter("Date"):GetW() + SKIN:GetMeter("Hour"):GetW() + SKIN:GetMeter("Minute"):GetW() + SKIN:GetMeter("Second"):GetW() return meterWidth end function getOffWorkTime() local w = os.date("%w") local hour = 21 if w == "5" then hour = 18 end if isWorkOvertime == false then hour = 18 end return {year=YYYY, month=MM, day=DD, hour=H, min=M, sec=S} end function getStartWorkTime() return {year=2020, month=12, day=16, hour=15, min=33, sec=35} end function getCurrentColor(progress) local startR = 30 local startG = 199 local startB = 230 local endR = 146 local endG = 185 local endB = 1 local currentR = getCurrentValue(startR, endR, progress) local currentG = getCurrentValue(startG, endG, progress) local currentB = getCurrentValue(startB, endB, progress) local RGB = {} RGB.r = currentR RGB.g = currentG RGB.b = currentB return RGB end function getCurrentValue(startValue, endValue, progress) local left = endValue - startValue if left == 0 then return startValue end local currentValue = startValue + left * progress return currentValue end
23.235294
69
0.713924
3.40625
ebaa870ae82fce283159efbef5235534c057cc7e
1,271
rs
Rust
src/sinks/aws_lambda/run.rs
savaki/oura
05527037c8ba2e2810554684f492339487b14a19
[ "Apache-2.0" ]
124
2021-12-04T11:13:09.000Z
2022-03-26T10:51:19.000Z
src/sinks/aws_lambda/run.rs
savaki/oura
05527037c8ba2e2810554684f492339487b14a19
[ "Apache-2.0" ]
95
2021-12-14T05:44:16.000Z
2022-03-31T22:38:28.000Z
src/sinks/aws_lambda/run.rs
savaki/oura
05527037c8ba2e2810554684f492339487b14a19
[ "Apache-2.0" ]
16
2021-12-09T19:07:55.000Z
2022-03-28T16:01:05.000Z
use aws_sdk_lambda::{types::Blob, Client}; use serde_json::json; use std::sync::Arc; use crate::{model::Event, pipelining::StageReceiver, utils::Utils, Error}; async fn invoke_lambda_function( client: Arc<Client>, function_name: &str, event: &Event, ) -> Result<(), Error> { let body = json!(event).to_string(); let req = client .invoke() .function_name(function_name) .payload(Blob::new(body)); let res = req.send().await?; log::trace!("Lambda invoke response: {:?}", res); Ok(()) } pub fn writer_loop( input: StageReceiver, client: Client, function_name: &str, utils: Arc<Utils>, ) -> Result<(), Error> { let client = Arc::new(client); let rt = tokio::runtime::Builder::new_current_thread() .enable_time() .enable_io() .build()?; for event in input.iter() { // notify the pipeline where we are utils.track_sink_progress(&event); let client = client.clone(); let result = rt.block_on(invoke_lambda_function(client, function_name, &event)); if let Err(err) = result { log::error!("unrecoverable error invoking lambda function: {:?}", err); return Err(err); } } Ok(()) }
23.109091
88
0.592447
3.046875
f06f16ee399ccb9faac16cda8b08d3cc4df552cb
1,480
py
Python
projectenv/main/forms.py
rzsaglam/project-env
f4c02b15cf924ba5d69d8a4a89efcc686b73aa9c
[ "MIT" ]
null
null
null
projectenv/main/forms.py
rzsaglam/project-env
f4c02b15cf924ba5d69d8a4a89efcc686b73aa9c
[ "MIT" ]
null
null
null
projectenv/main/forms.py
rzsaglam/project-env
f4c02b15cf924ba5d69d8a4a89efcc686b73aa9c
[ "MIT" ]
null
null
null
from django import forms from django.contrib.auth import models from django.db.models.base import Model from django.forms import ModelForm, fields from .models import Paint from django import forms from django.contrib.auth.models import User from django.contrib.auth.forms import UserCreationForm, AuthenticationForm class StockForm(forms.ModelForm): class Meta: model = Paint fields = "__all__" class PaintForm(forms.ModelForm): class Meta: model = Paint fields = "__all__" def save(self, commit=True): paint = super(PaintForm, self).save(commit=False) if commit: paint.save() return paint class NewUserForm(UserCreationForm): username = forms.CharField(max_length=200, required=True, widget=forms.TextInput( attrs={'class': 'input-group-text'})) class Meta: model = User fields = ("username", "password1", "password2") def save(self, commit=True): user = super(NewUserForm, self).save(commit=False) if commit: user.save() return user class LoginForm(AuthenticationForm): username = forms.CharField(max_length=200, required=True, widget=forms.TextInput( attrs={'class': 'input-group-text'})) password = forms.CharField(max_length=200, required=True, widget=forms.TextInput( attrs={'class': 'input-group-text'})) class Meta: model = User fields = ("username", "password")
27.924528
85
0.667568
3.28125
3302f95944549893e6c718830b8f06c614895c10
8,700
py
Python
Python/cs611python.py
david145/CS6112018
7a74c239bf5157507594157b5871c9d0c70fcc23
[ "MIT" ]
null
null
null
Python/cs611python.py
david145/CS6112018
7a74c239bf5157507594157b5871c9d0c70fcc23
[ "MIT" ]
1
2018-10-29T17:41:08.000Z
2018-10-29T17:41:08.000Z
Python/cs611python.py
david145/CS6112018
7a74c239bf5157507594157b5871c9d0c70fcc23
[ "MIT" ]
null
null
null
print("\n") print("PythonExercises-v2 by David Bochan") print("\n") print("=== EXERCISE 1 ===") print("\n") print("(a) 5 / 3 = " + str(5 / 3)) print("=> with python3 you can receive a float even if you divide two \ integers") print("\n") print("(b) 5 % 3 = " + str(5 % 3)) print("=> % is the modulus which divides left hand operand by right hand \ operand and returns remainder") print("\n") print("(c) 5.0 / 3 = " + str(5.0 / 3)) print("=> outputs a float number.. there is no difference if a plain 5 or 5.0 \ is used") print("\n") print("(d) 5 / 3.0 = " + str(5 / 3.0)) print("=> outputs a float number.. there is no difference if a plain 3 or 3.0 \ is used") print("\n") print("(e) 5.2 % 3 = " + str(5.2 % 3)) print("=> % is the modulus which divides left hand operand by right hand \ operand and returns remainder") print("\n") print("=== EXERCISE 2 ===") print("\n") print("(a) 2000.3 ** 200 = ...") try: print(str(2000.3 ** 200)) except OverflowError as e: print("=> The python3 interpreter throws a OverflowError " + str(e)) print("\n") print("(b) 1.0 + 1.0 - 1.0 = " + str(1.0 + 1.0 - 1.0)) print("=> Addition and substraction of float values which results in another \ float value") print("\n") print("(c) 1.0 + 1.0e20 - 1.0e20 = " + str(1.0 + 1.0e20 - 1.0e20)) print("=> 1.0 + 1.0e20 is rounded as close as possible, which is 1.0e20 and \ after substraction of it again it results in 0.0") print("\n") print("=== EXERCISE 3 ===") print("\n") print("(a) float(123) = " + str(float(123))) print("=> Takes the integer value 123 as input and casts it to the float \ value 123.0") print("\n") print("(b) float('123') = " + str(float('123'))) print("=> Takes the string '123' as input and casts it to the float value \ 123.0") print("\n") print("(c) float('123.23') = " + str(float('123.23'))) print("=> Takes the string '123.23' as input and casts it to the float value \ 123.23") print("\n") print("(d) int(123.23) = " + str(int(123.23))) print("=> Takes the float 123.23 as input and casts it to the integer value \ 123") print("\n") print("(e) int('123.23') = ...") try: int('123.23') except ValueError as e: print("=> The int() function can't cast a string to float to int and thus \ throws a ValueError (" + str(e) + ")") print("\n") print("(f) int(float('123.23')) = " + str(int(float(123.23)))) print("=> As we cast the string to float first, we can use it as a input to \ the int() function and receive a integer") print("\n") print("(g) str(12) = " + str(12)) print("=> Takes the integer 12 as input and casts it to the string '12'") print("\n") print("(h) str(12.2) = " + str(12.2)) print("=> Takes the float 12.2 as input and casts it to the string '12.2'") print("\n") print("(i) bool('a') = " + str(bool('a'))) print("=> Because an actual value (the character 'a') is passed to the bool() \ function, True is returned") print("\n") print("(j) bool(0) = " + str(bool(0))) print("=> The boolean value False equals 0 in python, thus False is returned") print("\n") print("(k) bool(0.1) = " + str(bool(0.1))) print("=> Because a value != 0 is provided in the bool() function, \ it returns True") print("\n") print("=== EXERCISE 4 ===") print("\n") print("range(5) = {}".format(range(5))) print("=> range(5) returns a sequence of integers from 0 to 4. for i in \ range(5) is consequently iterating over the sequence of integers") print("\n") print("type(range(5)) = {}".format(type(range(5)))) print("=> The type function returns an object's class. For range(5) the class \ range is returned") print("\n") print("=== EXERCISE 5 ===") print("\n") def div_by_number(numbers_list, max_found): number_found = 0 x = 1 while number_found < max_found: for number in numbers_list: if x % number == 0: print(x) number_found = number_found + 1 x = x + 1 numbers_list = [5, 7, 11] print("div_by_number({}, 20)\n".format(numbers_list)) div_by_number(numbers_list, 20) print("\n") print("=== EXERCISE 6 ===") print("\n") print("(a) & (b)\n") def is_prime(n): if n <= 3: return n > 1 elif n % 2 == 0 or n % 3 == 0: return False i = 5 while i * i <= n: if n % i == 0 or n % (i + 2) == 0: return False i = i + 6 return True print("is_prime(0) = {}\n".format(is_prime(0))) print("is_prime(1) = {}\n".format(is_prime(1))) print("is_prime(3) = {}\n".format(is_prime(3))) print("is_prime(7) = {}\n".format(is_prime(7))) print("is_prime(8) = {}\n".format(is_prime(8))) print("is_prime(112331) = {}".format(is_prime(112331))) def primes_up_to(n): primes = [] for i in range(0, n): if is_prime(i): primes.append(i) return primes print("\n(c) primes_up_to(100) = {}".format(primes_up_to(100))) def first_primes(n): primes = [] i = 0 while len(primes) < n: if is_prime(i): primes.append(i) i = i + 1 return primes print("\n(d) first_primes(12) = {}".format(first_primes(12))) print("\n") print("=== EXERCISE 7 ===") print("\n") print("(a) print_elements(elements_list)\n") def print_elements(elements): for element in elements: print(element) elements_list = [12, "abc", 92.2, "hello"] print_elements(elements_list) print("\n(b) print_elements_reverse(elements_list)\n") def print_elements_reverse(elements): for element in elements[::-1]: print(element) print_elements_reverse(elements_list) print("\n(c) len_elements(elements_list)\n") def len_elements(elements): count = 0 for _ in elements: count = count + 1 return count print("len_elements(elements_list) = {}".format(len_elements(elements_list))) print("\n") print("=== EXERCISE 8 ===") a = [12, "abc", 92.2, "hello"] print("\n") print("(a) a = {}".format(a)) print("\n(b) b = a") b = a print("\n(c) b[1] = 'changed'") b[1] = "changed" print("\n(d) a = {}".format(a)) print("=> b is binding to the same object as a, so when b[1] was changed \ a[1] also shows the change") print("\n(e) c = a[:]") c = a[:] print("\n(f) c[2] = 'also changed'") c[2] = "also changed" print("\n(g) a = {}".format(a)) print("=> A copy of the list a was created with a[:] and assigned to c, thus \ a[2] did not change when c[2] changed") def set_first_elem_to_zero(l): if len(l) > 0: l[0] = 0 return l numbers = [12, 21, 214, 3] print("\n...") print("\nnumbers = {}".format(numbers)) print("set_first_elem_to_zero(numbers) = \ {}".format(set_first_elem_to_zero(numbers))) print("numbers = {}".format(numbers)) print("=> The original list also changed, even though we did not assign \ the returned list to it (same binding)") print("\n") print("=== EXERCISE 9 ===") elements = [[1,3], [3,6]] print("\n") print("elements = {}".format(elements)) flat_list = lambda l: [element for sublist in l for element in sublist] print("flat_list(elements) = {}".format(flat_list(elements))) print("\n") print("=== EXERCISE 10 ===") import matplotlib.pyplot as plt import numpy as np t = np.arange(0.0, 2.0, 0.01) s = np.sin(t - 2) ** 2 * np.e ** (-t ** 2) fig, ax = plt.subplots() ax.plot(t, s) ax.set(xlabel='x', ylabel='y', title='Exercise 10') plt.show() print("\n") print("See Figure_1.png") print("\n") print("=== EXERCISE 11 ===") def product_iteration(numbers): product = 0 if len(numbers) > 0: product = numbers.pop() for number in numbers: product = product * number return product from functools import reduce def product_recursive(numbers): if len(numbers) > 0: return reduce((lambda x, y: x * y), numbers) else: return 0 numbers = [21, 12, 10, 128, 2] empty_list = [] print("\n") print("product_iteration(numbers) = {}".format(product_iteration(numbers))) print("product_iteration(empty_list) = \ {}".format(product_iteration(empty_list))) numbers = [21, 12, 10, 128, 2] print("\n") print("product_recursive(numbers) = {}".format(product_recursive(numbers))) print("product_recursive(empty_list) = \ {}".format(product_recursive(empty_list))) print("\n") print("=== EXERCISE 12 ===") print("\n\nGood to know!") print("\n") print("=== EXERCISE 13 ===") def read_file(filename): with open(filename, 'r') as myfile: data=myfile.read().replace('\n', '') return data file_content = read_file("emails.txt") print("\n\nread_file('emails.txt')\n\n{}".format(file_content)) import re def extract_email(string): match = re.findall(r'[\w\.-]+@[\w\.-]+\.\w+', string) return match print("\nextract_email(file_content)\ \n\n{}".format(extract_email(file_content)))
23.138298
79
0.608046
3.21875
c3da8241c82bf2dfbd9560002b07070e56d88b16
3,040
go
Go
agingMap_test.go
520MianXiangDuiXiang520/agingMap
baf954f604bef9c0e3a9040e5fa331bf736495d4
[ "MIT" ]
null
null
null
agingMap_test.go
520MianXiangDuiXiang520/agingMap
baf954f604bef9c0e3a9040e5fa331bf736495d4
[ "MIT" ]
null
null
null
agingMap_test.go
520MianXiangDuiXiang520/agingMap
baf954f604bef9c0e3a9040e5fa331bf736495d4
[ "MIT" ]
null
null
null
package agingMap import ( "fmt" "math/rand" "sync" "testing" "time" ) func ExampleAgingMap_Delete() { am := NewAgingMap() am.Store("key", "value", time.Second) am.Delete("key") } func ExampleAgingMap_Store() { am := NewAgingMap() am.Store("key", "value", time.Second) } func ExampleAgingMap_Load() { am := NewAgingMap() ch := make(chan string, 10) for i := 0; i < 10; i++ { go func(i int) { for { key := fmt.Sprintf("%d: %d", i, time.Now().UnixNano()) ch <- key am.Store(key, i, time.Second) time.Sleep(time.Duration(rand.Int63n(2000)) * time.Millisecond) } }(i) } for i := 0; i < 10; i++ { go func(i int) { for { key := <-ch val, ok := am.Load(key) fmt.Println(val, ok) } }(i) } for { key := <-ch val, ok := am.Load(key) fmt.Println(val, ok) } } func TestAgingMap(t *testing.T) { aMap := NewWithLazyDelete() aMap.Store("key", "val", time.Second) time.Sleep(time.Second) v, ok := aMap.Load("key") if ok || v != nil { t.Error("get expired data") } } func TestAgingMap_AutoDelete(t *testing.T) { aMap := NewBaseAgingMap(time.Second, 1) for i := 0; i < 7; i++ { aMap.Store(i, "val", time.Second) } time.Sleep(time.Second * 2) for i := 0; i < 7; i++ { v, ok := aMap._map.Load(i) if ok || v != nil { t.Error("get expired data") } } } func TestAgingMap_LoadOrStore(t *testing.T) { aMap := NewBaseAgingMap(time.Second, 1) _, _, stored := aMap.LoadOrStore("key", 1, time.Second) if !stored { t.Errorf("第一次未存储") } v, _, stored := aMap.LoadOrStore("key", 1, time.Second) if v != 1 || stored { t.Errorf("第二次存储") } time.Sleep(time.Second) _, _, stored = aMap.LoadOrStore("key", 1, time.Second) if !stored { t.Errorf("第一次未存储") } } func TestAgingMap_LoadOrStore_concurrent(t *testing.T) { aMap := NewBaseAgingMap(time.Second, 1) wg := sync.WaitGroup{} for i := 0; i < 100; i++ { var v1, v2 interface{} var s1, s2 bool wg.Add(2) go func(i int) { defer wg.Done() v1, _, s1 = aMap.LoadOrStore(i, fmt.Sprintf("F%d", i), time.Second) }(i) go func(i int) { defer wg.Done() v2, _, s2 = aMap.LoadOrStore(i, fmt.Sprintf("S%d", i), time.Second) }(i) wg.Wait() if v1 != v2 { t.Errorf("两次值一样, V1 = %v, V2 = %v", v1, v2) } if s1 && s2 { t.Errorf("true true") } if !(s1 || s2) { t.Errorf("false false") } } } func TestAgingMap_Store(t *testing.T) { aMap := NewBaseAgingMap(time.Minute, 0.5) go func() { for i := 0; i < 7; i++ { aMap.Store(i, "val", time.Second*10) fmt.Println("Store: ", i) time.Sleep(10 * time.Second) } }() time.Sleep(45 * time.Second) aMap.Range(func(k, v interface{}) bool { fmt.Println(k, v) return true }) fmt.Println("------") time.Sleep(20 * time.Second) aMap.Range(func(k, v interface{}) bool { fmt.Println(k, v) return true }) } func TestAgingMap_LoadWithDeadline(t *testing.T) { am := NewAgingMap() am.Store(1, 2, time.Minute) for i := 0; i < 70; i++ { fmt.Println(am.LoadWithDeadline(1)) time.Sleep(time.Second * 10) } }
20
70
0.590461
3.328125
078126e28455007d4256937f05d51acba62cf889
3,027
swift
Swift
LNSideMenu/Classes/LNPanelViewController.swift
luannguyenkhoa/LNSideMenu
ef22b77871ad5dc22e3725438c6b0ee83db34e95
[ "MIT" ]
112
2016-03-22T12:02:20.000Z
2021-01-31T03:22:28.000Z
LNSideMenu/Classes/LNPanelViewController.swift
luannguyenkhoa/LNSideMenu
ef22b77871ad5dc22e3725438c6b0ee83db34e95
[ "MIT" ]
19
2016-08-04T06:32:08.000Z
2020-04-14T10:34:59.000Z
LNSideMenu/Classes/LNPanelViewController.swift
luannguyenkhoa/LNSideMenu
ef22b77871ad5dc22e3725438c6b0ee83db34e95
[ "MIT" ]
18
2016-07-19T22:22:43.000Z
2019-11-01T20:45:23.000Z
// // LNPanelViewController.swift // LNSideMenuEffect // // Created by Luan Nguyen on 6/22/16. // Copyright © 2016 Luan Nguyen. All rights reserved. // import UIKit public final class LNPanelViewController: UIViewController { // MARK: Properties fileprivate var items: [String] = [] fileprivate var didInit = false weak var delegate: LNSMDelegate? var position: Position = .left var isTranslucent = false { didSet { updateFrame() } } // MARK: Colors public var menuBgColor = LNColor.bgView.color public var itemBgColor = LNColor.bgItem.color public var highlightColor = LNColor.highlight.color public var titleColor = LNColor.title.color lazy var sideMenuView: LNSideMenuView = LNSideMenuView() convenience init(items: Array<String>, menuPosition: Position, highlightCellAtIndex: Int = Int.max) { self.init() self.items = items self.position = menuPosition self.sideMenuView.indexOfDefaultCellHighlight = highlightCellAtIndex } override public func viewDidLoad() { super.viewDidLoad() self.view.backgroundColor = .clear self.view.autoresizingMask = [.flexibleHeight, .flexibleWidth] } public override func viewWillAppear(_ animated: Bool) { super.viewWillAppear(animated) if !didInit { didInit = true initialSideMenu() } } /** Initial side menu with components */ fileprivate func initialSideMenu() { sideMenuView.items = items _ = setViewFrame() // Config colors sideMenuView.bgColor = menuBgColor sideMenuView.titleColor = titleColor sideMenuView.itemBgColor = itemBgColor sideMenuView.highlightColor = highlightColor // Setup menu sideMenuView.setupMenu(view, position: position) sideMenuView.delegate = self } internal func setViewFrame() -> Bool { // Set frame for view let distance: CGFloat = isTranslucent ? 0 : 44 + UIApplication.shared.statusBarFrame.size.height if view.y != distance { view.y = distance view.height = screenHeight - view.y return true } return false } internal func updateFrame() { // Just refresh side menu iff the view frame has already changed if setViewFrame() { sideMenuView.refreshMenuWithFrame(view.frame, translucent: isTranslucent) } } // Moving all items out of container view bounds before performing animation internal func prepareForAnimation() { sideMenuView.prepareForAnimation() } internal func animateContents(completion: @escaping Completion) { // Animate items when it's about diplayed sideMenuView.animateContents(completion: completion) } internal func transitionToView() { // TODO: implementing set contentViewController effection } } extension LNPanelViewController: LNSMDelegate { func didSelectItemAtIndex(SideMenu: LNSideMenuView, index: Int) { // Forward did select item at index action delegate?.didSelectItemAtIndex(SideMenu: SideMenu, index: index) } }
27.518182
103
0.707631
3.125
233695968d2c7784c04d5fabcfb8fca3500d015f
708
sql
SQL
sqls/access methods.sql
tomi/presentation-postgres-indexes
d2be24142e989f67bc40a2781b17ecbb652b8128
[ "MIT" ]
2
2019-07-02T06:40:59.000Z
2019-08-01T17:40:12.000Z
sqls/access methods.sql
tomi/presentation-postgres-indexes
d2be24142e989f67bc40a2781b17ecbb652b8128
[ "MIT" ]
2
2020-07-17T08:00:41.000Z
2021-05-09T05:13:00.000Z
sqls/access methods.sql
tomi/presentation-postgres-indexes
d2be24142e989f67bc40a2781b17ecbb652b8128
[ "MIT" ]
null
null
null
------ Scanning techniques ------ DROP INDEX IF EXISTS t_a_idx; -- Sequential scan EXPLAIN(costs off) SELECT * FROM t WHERE a = 5; -- Add index CREATE INDEX t_a_idx ON t(a); -- Index scan EXPLAIN(costs off) SELECT * FROM t WHERE a = 5; -- Bitmap scan EXPLAIN(costs off) SELECT * FROM t WHERE a <= 100; -- Index only-scan EXPLAIN(costs off) SELECT a FROM t WHERE a = 5; ------ Partial indexes ------ CREATE INDEX IF NOT EXISTS t_c_idx ON t(c); EXPLAIN(costs off) SELECT * FROM t WHERE c; EXPLAIN(costs off) SELECT * FROM t WHERE NOT c; -- Check number of pages SELECT relpages FROM pg_class WHERE relname='t_c_idx'; DROP INDEX IF EXISTS t_c_idx; CREATE INDEX IF NOT EXISTS t_c_idx ON t(c) WHERE c;
22.125
54
0.696328
3
e51e96650379da19d23b73ca6b7e943b66e5d48a
2,592
ts
TypeScript
src/chat/api/chat.gateway.ts
ArmNem/fullstackDev2021-Backend-master
ba6287756af8510b5b431ab5d7d6ae3ed04cfa77
[ "MIT" ]
null
null
null
src/chat/api/chat.gateway.ts
ArmNem/fullstackDev2021-Backend-master
ba6287756af8510b5b431ab5d7d6ae3ed04cfa77
[ "MIT" ]
null
null
null
src/chat/api/chat.gateway.ts
ArmNem/fullstackDev2021-Backend-master
ba6287756af8510b5b431ab5d7d6ae3ed04cfa77
[ "MIT" ]
null
null
null
import { ConnectedSocket, MessageBody, OnGatewayConnection, OnGatewayDisconnect, SubscribeMessage, WebSocketGateway, WebSocketServer, } from '@nestjs/websockets'; import { Socket } from 'socket.io'; import { ChatService } from '../core/services/chat.service'; import { WelcomeDto } from './dto/welcome.dto'; import { IChatService, IChatServiceProvider, } from '../core/primary-ports/chat.service.interface'; import { Inject } from '@nestjs/common'; import { JoinChatDto } from './dto/join-chat.dto'; import { ChatClientModule } from '../core/models/chat.client.module'; @WebSocketGateway() export class ChatGateway implements OnGatewayConnection, OnGatewayDisconnect { constructor( @Inject(IChatServiceProvider) private chatService: IChatService, ) {} @WebSocketServer() server; @SubscribeMessage('message') handleChatEvent( @MessageBody() message: string, @ConnectedSocket() client: Socket, ): void { const chatMessage = this.chatService.newMessage(message, client.id); this.server.emit('newmessages', chatMessage); } @SubscribeMessage('typing') handleTypingEvent( @MessageBody() typing: boolean, @ConnectedSocket() client: Socket, ): void { const chatClient = this.chatService.updateTyping(typing, client.id); if (chatClient) { this.server.emit('clientTyping', chatClient); } } @SubscribeMessage('joinchat') async handleJoinChatEvent( @MessageBody() joinChatClientDto: JoinChatDto, @ConnectedSocket() client: Socket, ): Promise<void> { try { let chatClient: ChatClientModule = JSON.parse( JSON.stringify(joinChatClientDto), ); chatClient = await this.chatService.newClient(chatClient); const chatClients = await this.chatService.getClients(); const welcome: WelcomeDto = { clients: chatClients, messages: this.chatService.getMessages(), client: chatClient, }; client.emit('welcome', welcome); this.server.emit('clients', chatClients); } catch (e) { client.error(e.message); } } async handleConnection(client: Socket, ...args: any[]): Promise<any> { console.log('Client Connect', client.id); client.emit('allMessages', this.chatService.getMessages()); this.server.emit('clients', await this.chatService.getClients()); } async handleDisconnect(client: Socket): Promise<any> { await this.chatService.delete(client.id); this.server.emit('clients', this.chatService.getClients()); console.log('Client Disconnect', await this.chatService.getClients()); } }
30.857143
78
0.697531
3
dd540f79ba514c8330c098b284a6473469eed5ba
2,285
go
Go
deepfence_agent/tools/apache/scope/probe/process/walker_darwin.go
tuapuikia/ThreatMapper
22c473e133e2a57a402f27a12d44e1787a2895cc
[ "Apache-2.0" ]
1,281
2020-04-08T17:07:21.000Z
2022-03-31T11:22:16.000Z
deepfence_agent/tools/apache/scope/probe/process/walker_darwin.go
tuapuikia/ThreatMapper
22c473e133e2a57a402f27a12d44e1787a2895cc
[ "Apache-2.0" ]
180
2020-04-06T15:40:16.000Z
2022-03-31T02:19:34.000Z
probe/process/walker_darwin.go
Pradeepkumarbk/scope11
0d87f2b54fe8f291fec0d13ccda5d9db3c91c273
[ "Apache-2.0" ]
148
2020-04-08T21:38:39.000Z
2022-03-30T18:04:50.000Z
package process import ( "fmt" "os/exec" "strconv" "strings" ) // NewWalker returns a Darwin (lsof-based) walker. func NewWalker(_ string, _ bool) Walker { return &walker{} } type walker struct{} const ( lsofBinary = "lsof" lsofFields = "cn" // parseLSOF() depends on the order netstatBinary = "netstat" ) // These functions copied from procspy. // IsProcInAccept returns true if the process has a at least one thread // blocked on the accept() system call func IsProcInAccept(procRoot, pid string) (ret bool) { // Not implemented on darwin return false } func (walker) Walk(f func(Process, Process)) error { output, err := exec.Command( lsofBinary, "-i", // only Internet files "-n", "-P", // no number resolving "-w", // no warnings "-F", lsofFields, // \n based output of only the fields we want. ).CombinedOutput() if err != nil { return err } processes, err := parseLSOF(string(output)) if err != nil { return err } for _, process := range processes { f(process, Process{}) } return nil } func parseLSOF(output string) (map[string]Process, error) { var ( processes = map[string]Process{} // Local addr -> Proc process Process ) for _, line := range strings.Split(output, "\n") { if len(line) <= 1 { continue } var ( field = line[0] value = line[1:] ) switch field { case 'p': pid, err := strconv.Atoi(value) if err != nil { return nil, fmt.Errorf("invalid 'p' field in lsof output: %#v", value) } process.PID = pid case 'c': process.Name = value case 'n': // 'n' is the last field, with '-F cn' // format examples: // "192.168.2.111:44013->54.229.241.196:80" // "[2003:45:2b57:8900:1869:2947:f942:aba7]:55711->[2a00:1450:4008:c01::11]:443" // "*:111" <- a listen addresses := strings.SplitN(value, "->", 2) if len(addresses) != 2 { // That's a listen entry. continue } processes[addresses[0]] = Process{ PID: process.PID, Name: process.Name, } default: return nil, fmt.Errorf("unexpected lsof field: %c in %#v", field, value) } } return processes, nil } // GetDeltaTotalJiffies returns 0 - darwin doesn't have jiffies. func GetDeltaTotalJiffies() (uint64, float64, error) { return 0, 0.0, nil }
21.35514
83
0.629322
3.25