{ // 获取包含Hugging Face文本的span元素 const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap'); spans.forEach(span => { if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) { span.textContent = 'AI快站'; } }); }); // 替换logo图片的alt属性 document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => { if (img.alt.match(/Hugging\s*Face/i)) { img.alt = 'AI快站 logo'; } }); } // 替换导航栏中的链接 function replaceNavigationLinks() { // 已替换标记,防止重复运行 if (window._navLinksReplaced) { return; } // 已经替换过的链接集合,防止重复替换 const replacedLinks = new Set(); // 只在导航栏区域查找和替换链接 const headerArea = document.querySelector('header') || document.querySelector('nav'); if (!headerArea) { return; } // 在导航区域内查找链接 const navLinks = headerArea.querySelectorAll('a'); navLinks.forEach(link => { // 如果已经替换过,跳过 if (replacedLinks.has(link)) return; const linkText = link.textContent.trim(); const linkHref = link.getAttribute('href') || ''; // 替换Spaces链接 - 仅替换一次 if ( (linkHref.includes('/spaces') || linkHref === '/spaces' || linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) && linkText !== 'OCR模型免费转Markdown' && linkText !== 'OCR模型免费转Markdown' ) { link.textContent = 'OCR模型免费转Markdown'; link.href = 'https://fast360.xyz'; link.setAttribute('target', '_blank'); link.setAttribute('rel', 'noopener noreferrer'); replacedLinks.add(link); } // 删除Posts链接 else if ( (linkHref.includes('/posts') || linkHref === '/posts' || linkText === 'Posts' || linkText.match(/^s*Postss*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } // 替换Docs链接 - 仅替换一次 else if ( (linkHref.includes('/docs') || linkHref === '/docs' || linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) && linkText !== '模型下载攻略' ) { link.textContent = '模型下载攻略'; link.href = '/'; replacedLinks.add(link); } // 删除Enterprise链接 else if ( (linkHref.includes('/enterprise') || linkHref === '/enterprise' || linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } }); // 查找可能嵌套的Spaces和Posts文本 const textNodes = []; function findTextNodes(element) { if (element.nodeType === Node.TEXT_NODE) { const text = element.textContent.trim(); if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') { textNodes.push(element); } } else { for (const child of element.childNodes) { findTextNodes(child); } } } // 只在导航区域内查找文本节点 findTextNodes(headerArea); // 替换找到的文本节点 textNodes.forEach(node => { const text = node.textContent.trim(); if (text === 'Spaces') { node.textContent = node.textContent.replace(/Spaces/g, 'OCR模型免费转Markdown'); } else if (text === 'Posts') { // 删除Posts文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } else if (text === 'Enterprise') { // 删除Enterprise文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } }); // 标记已替换完成 window._navLinksReplaced = true; } // 替换代码区域中的域名 function replaceCodeDomains() { // 特别处理span.hljs-string和span.njs-string元素 document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换hljs-string类的span中的域名(移除多余的转义符号) document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换pre和code标签中包含git clone命令的域名 document.querySelectorAll('pre, code').forEach(element => { if (element.textContent && element.textContent.includes('git clone')) { const text = element.innerHTML; if (text.includes('huggingface.co')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 处理特定的命令行示例 document.querySelectorAll('pre, code').forEach(element => { const text = element.innerHTML; if (text.includes('huggingface.co')) { // 针对git clone命令的专门处理 if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 特别处理模型下载页面上的代码片段 document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => { const content = container.innerHTML; if (content && content.includes('huggingface.co')) { container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 特别处理模型仓库克隆对话框中的代码片段 try { // 查找包含"Clone this model repository"标题的对话框 const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]'); if (cloneDialog) { // 查找对话框中所有的代码片段和命令示例 const codeElements = cloneDialog.querySelectorAll('pre, code, span'); codeElements.forEach(element => { if (element.textContent && element.textContent.includes('huggingface.co')) { if (element.innerHTML.includes('huggingface.co')) { element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); } // 更精确地定位克隆命令中的域名 document.querySelectorAll('[data-target]').forEach(container => { const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string'); codeBlocks.forEach(block => { if (block.textContent && block.textContent.includes('huggingface.co')) { if (block.innerHTML.includes('huggingface.co')) { block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); }); } catch (e) { // 错误处理但不打印日志 } } // 当DOM加载完成后执行替换 if (document.readyState === 'loading') { document.addEventListener('DOMContentLoaded', () => { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); }); } else { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); } // 增加一个MutationObserver来处理可能的动态元素加载 const observer = new MutationObserver(mutations => { // 检查是否导航区域有变化 const hasNavChanges = mutations.some(mutation => { // 检查是否存在header或nav元素变化 return Array.from(mutation.addedNodes).some(node => { if (node.nodeType === Node.ELEMENT_NODE) { // 检查是否是导航元素或其子元素 if (node.tagName === 'HEADER' || node.tagName === 'NAV' || node.querySelector('header, nav')) { return true; } // 检查是否在导航元素内部 let parent = node.parentElement; while (parent) { if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') { return true; } parent = parent.parentElement; } } return false; }); }); // 只在导航区域有变化时执行替换 if (hasNavChanges) { // 重置替换状态,允许再次替换 window._navLinksReplaced = false; replaceHeaderBranding(); replaceNavigationLinks(); } }); // 开始观察document.body的变化,包括子节点 if (document.body) { observer.observe(document.body, { childList: true, subtree: true }); } else { document.addEventListener('DOMContentLoaded', () => { observer.observe(document.body, { childList: true, subtree: true }); }); } })(); \n \n );\n }\n}\n\nMyDocument.getInitialProps = async (\n ctx: DocumentContext\n): Promise => {\n const sheets: ServerStyleSheets = new ServerStyleSheets();\n const originalRenderPage: RenderPage = ctx.renderPage;\n\n ctx.renderPage = () => {\n return originalRenderPage({\n enhanceApp: (\n App: NextComponentType<\n AppContextType,\n AppInitialProps,\n AppPropsType\n >\n ) => (props: React.PropsWithChildren>) => {\n return sheets.collect();\n }\n });\n };\n\n const initialProps: DocumentInitialProps = await Document.getInitialProps(\n ctx\n );\n\n return {\n ...initialProps,\n styles: [\n ...React.Children.toArray(initialProps.styles),\n sheets.getStyleElement()\n ]\n };\n};\n"}}},{"rowIdx":1821,"cells":{"text":{"kind":"string","value":"\r\n\r\n# Defensive Programming, Assertions and Exceptions \r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n# Defensive programming\r\n“Programming today is a race between software engineers striving to build bigger and better idiot-proof programs, and the Universe trying to produce bigger and better idiots. So far, the Universe is winning.” \r\n\r\n\\- Rick Cook, The Wizardry Compiled\r\n\r\n\r\n\r\n# Table of Contents\r\n- What is Defensive Programming?\r\n- Assertions and **Debug.Assert(…)**\r\n- Exceptions Handling Principles\r\n- Error Handling Strategies\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n# What is Defensive Programming?\r\n - Similar to defensive driving – you are never sure what other drivers will do\r\n - **Expect incorrect input** and handle it correctly\r\n - Think not only about the usual execution flow, but consider also **unusual** situations!\r\n\r\n\r\n\r\n\r\n\r\n# Protecting from
Invalid Input\r\n- “Garbage in &rarr; garbage out” – **Wrong!**\r\n - Garbage in &rarr; nothing out / exception out / error message out / no garbage allowed in\r\n- Check the values of all data from external sources (from user, file, internet, DB, etc.)\r\n\r\n\r\n\r\n\r\n\r\n\r\n- Check the values of all **routine input parameters**\r\n- Decide how to handle **bad inputs**\r\n - Return neutral value\r\n - Substitute with valid data\r\n - Throw an exception\r\n - Display error message, log it, etc.\r\n- The best form of defensive coding is not inserting error at first place\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n# Assertions\r\n- **Assertion** – a statement placed in the code that **must always be true** at that moment\r\n - Assertions are used during development\r\n - Removed in release builds\r\n - Assertions check for bugs in code\r\n\r\n```cs\r\npublic double GetAverageStudentGrade()\r\n{\r\n Debug.Assert(studentGrades.Count > 0,\r\n \"Student grades are not initialized!\");\r\n return studentGrades.Average();\r\n}\r\n```\r\n\r\n\r\n\r\n\r\n\r\n\r\n - Use assertions for conditions that **should never occur** in practice\r\n - Failed assertion indicates a **fatal error** in the program (usually unrecoverable)\r\n- Use assertions to **document assumptions** made in code (preconditions & postconditions) \r\n\r\n```cs\r\nprivate Student GetRegisteredStudent(int id)\r\n{\r\n\t\tDebug.Assert(id > 0);\r\n\t\tStudent student = registeredStudents[id];\r\n\t\tDebug.Assert(student.IsRegistered);\r\n}\r\n```\r\n\r\n\r\n\r\n - Failed assertion indicates a **fatal error** in the program (usually unrecoverable)\r\n- Avoid putting executable code in assertions \r\n\r\n```cs\r\nDebug.Assert(PerformAction(), \"Could not perform action\");\r\n``` \r\n - Won’t be compiled in production. Better use:\r\n\r\n```cs\r\nbool actionPerformed = PerformAction();\r\nDebug.Assert(actionPerformed, \"Could not perform action\");\r\n```\r\n - Assertions should fail loud\r\n - It is fatal error &rarr; total crash\r\n\r\n\r\n\r\n# Assertions\r\n## [Demo]()\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n## Best Practices for Exception Handling\r\n\r\n\r\n\r\n\r\n# Exceptions\r\n- **Exceptions** provide a way to inform the caller about an error or exceptional events\r\n - Can be caught and processed by the callers\r\n- Methods can **throw** exceptions:\r\n\r\n```cs\r\npublic void ReadInput(string input)\r\n{\r\n if (input == null)\r\n {\r\n throw new ArgumentNullException(\"input\"); }\r\n …\r\n}\r\n```\r\n\r\n\r\n\r\n\r\n\r\n- Use **try-catch** statement to handle exceptions:\r\n- You can use multiple **catch** blocks to specify handlers for different exceptions\r\n- Not handled exceptions propagate to the caller\r\n\r\n```cs\r\nvoid PlayNextTurn()\r\n{\r\n try\r\n {\r\n readInput(input);\r\n …\r\n }\r\n catch (ArgumentException e)\r\n {\r\n Console.WriteLine(\"Invalid argument!\");\r\n }\r\n}\r\n```\r\n\r\n
Exception thrown here
\r\n
The code here will not be executed
\r\n\r\n\r\n\r\n\r\n- Use **finally** block to execute code even if exception occurs (not supported in C++):\r\n- Perfect place to perform cleanup for any resources allocated in the **try** block\r\n\r\n```cs\r\nvoid PlayNextTurn()\r\n{\r\n try\r\n {\r\n … }\r\n finally\r\n {\r\n Console.WriteLine(\"Hello from finally!\");\r\n }\r\n}\r\n```\r\n\r\n
Exceptions can be eventually thrown here
\r\n
The code here is always executed
\r\n\r\n\r\n\r\n\r\n- Use exceptions to notify the other parts of the program about errors\r\n - Errors that should not be ignored\r\n- Throw an exception only for conditions that are **truly exceptional**\r\n - Should I throw an exception when I check for user name and password? &rarr; better return false\r\n- Don’t use exceptions as control flow mechanisms\r\n\r\n\r\n\r\n\r\n- Throw exceptions at the right **level of abstraction**\r\n\r\n```cs\r\nclass Employee\r\n{\r\n\t// Bad\r\n …\r\n public TaxId\r\n { get { throw new NullReferenceException(…); }\r\n}\r\n```\r\n```cs\r\nclass Employee\r\n{\r\n\t// Better\r\n …\r\n public TaxId\r\n { get { throw new EmployeeDataNotAvailable(…); }\r\n}\r\n```\r\n\r\n\r\n\r\n- Use **descriptive error messages**\r\n - Incorrect example:\r\n\t```cs\r\n\tthrow new Exception(\"Error!\");\r\n\t```\r\n - _Example_:\r\n\t```cs\r\n\tthrow new ArgumentException(\"The speed should be a number \" +\r\n \"between \" + MIN_SPEED + \" and \" + MAX_SPEED + \".\");\r\n\t```\r\n - Avoid **empty catch blocks**\r\n\t```cs\r\n\ttry\r\n{\r\n …\r\n}\r\ncatch (Exception ex)\r\n{\r\n}\r\n\t```\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n- Always include the exception **cause** when throwing a new exception\r\n```cs\r\ntry\r\n{\r\n \tWithdrawMoney(account, amount);\r\n}\r\ncatch (DatabaseException dbex)\r\n{\r\n\t throw new WithdrawException(String.Format(\r\n\t \"Can not withdraw the amount {0} from acoount {1}\",\r\n\t amount, account), dbex);\r\n}\r\n```\r\n
We chain the original exception (the source of the problem)
\r\n\r\n\r\n\r\n\r\n- Catch only exceptions that you are capable to process correctly\r\n - Do not catch all exceptions!\r\n - Incorrect example: \r\n\r\n\t```cs\r\n\ttry\r\n\t{\r\n\t ReadSomeFile();\r\n\t}\r\n\tcatch\r\n\t{\r\n\t Console.WriteLine(\"File not found!\");\r\n\t}\r\n\t``` \r\n\r\n - What about **OutOfMemoryException**?\r\n\r\n\r\n\r\n\r\n\r\n\r\n- Have an exception handling strategy for all unexpected / unhandled exceptions:\r\n - Consider logging (e.g. Log4Net)\r\n - Display to the end users only messages that they could understand\r\n\r\n\r\n\r\n\r\n\r\n# Exceptions\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n## Assertions vs. Exceptions vs. Other Techniques\r\n\r\n\r\n\r\n\r\n\r\n\r\n# Error Handling Techniques\r\n- How to handle **errors that you expect** to occur?\r\n - Depends on the situation:\r\n - Throw an **exception** (in OOP)\r\n - The most typical action you can do\r\n - Return a neutral value, e.g. **-1** in **IndexOf(…)**\r\n - Substitute the next piece of valid data (e.g. file)\r\n - Return the same answer as the previous time\r\n - Substitute the closest legal value\r\n - Return an error code (in old languages / APIs)\r\n - Display an error message in the UI\r\n - Call method / Log a warning message to a file\r\n - Crash / shutdown / reboot\r\n\r\n\r\n\r\n# Assertions vs. Exceptions\r\n- **Exceptions** are announcements about error condition or unusual event\r\n - Inform the caller about error or exceptional event\r\n - Can be caught and application can continue working\r\n- **Assertions** are fatal errors\r\n - Assertions always indicate bugs in the code\r\n - Can not be caught and processed\r\n - Application can’t continue in case of failed assertion\r\n- When in doubt &rarr; throw an exception\r\n\r\n\r\n\r\n# Assertions in C#\r\n- Assertions in C# are rarely used\r\n - In C# prefer throwing an **exception** when the input data / internal object state are invalid\r\n - Exceptions are used in C# and Java instead of **preconditions checking**\r\n - Prefer using **unit testing** for testing the code instead of **postconditions checking**\r\n- Assertions are popular in C / C++\r\n - Where exceptions & unit testing are not popular\r\n- In JS there are no built-in assertion mechanism\r\n\r\n\r\n\r\n# Error Handling Strategy\r\n- Choose your **error handling strategy** and follow it consistently\r\n - Assertions / exceptions / error codes / other\r\n- In C#, .NET and OOP prefer using **exceptions**\r\n - Assertions are rarely used, only as additional checks for fatal error\r\n - Throw an exception for incorrect input / incorrect object state / invalid operation\r\n- In JavaScript use exceptions: **try-catch-finally**\r\n- In non-OOP languages use error codes\r\n\r\n\r\n\r\n# Robustness vs. Correctness\r\n- How will you handle error while calculating single pixel color in a computer game?\r\n- How will you handle error in financial software? Can you afford to lose money?\r\n- **Correctness** == never returning wrong result\r\n - Try to achieve correctness as a primary goal\r\n- **Robustness** == always trying to do something that will allow the software to keep running\r\n - Use as last resort, for non-critical errors\r\n\r\n\r\n\r\n# Assertions vs. Exceptions\r\n\r\n```cs\r\npublic string Substring(string str, int startIndex, int length)\r\n{\r\n if (str == null)\r\n {\r\n throw new NullReferenceException(\"Str is null.\");\r\n }\r\n if (startIndex >= str.Length)\r\n {\r\n throw new ArgumentException(\r\n \"Invalid startIndex:\" + startIndex);\r\n }\r\n if (startIndex + count > str.Length)\r\n {\r\n throw new ArgumentException(\"Invalid length:\" + length);\r\n }\r\n …\r\n Debug.Assert(result.Length == length);\r\n}\r\n```\r\n\r\n
Check the input and preconditions
\r\n
Perform the method main logic
\r\n
Check the postconditions
\r\n\r\n\r\n\r\n# Error Barricades\r\n- Barricade your program to stop the damage caused by incorrect data\r\n- Consider same approach for class design\r\n - Public methods &rarr; validate the data\r\n - Private methods &rarr; assume the data is safe\r\n - Consider using exceptions for public methods and assertions for private\r\n- **public methods / functions**\r\n- **private methods / functions**\r\n\r\n\r\n\r\n\r\n\r\n# Being Defensive About Defensive Programming\r\n- Too much defensive programming is not good\r\n - Strive for balance\r\n- How much defensive programming to leave in production code?\r\n - Remove the code that results in hard crashes\r\n - Leave in code that checks for important errors\r\n - Log errors for your technical support personnel\r\n - See that the error messages you show are user-friendly\r\n\r\n\r\n\r\n\r\n\r\n\r\n# Free Trainings @ Telerik Academy\r\n- C# Programming @ Telerik Academy\r\n - [HQC-Part II course](http://academy.telerik.com/student-courses/programming/high-quality-code-part-2/about)\r\n - Telerik Software Academy\r\n - [telerikacademy.com](https://telerikacademy.com)\r\n - Telerik Academy @ Facebook\r\n - [facebook.com/TelerikAcademy](facebook.com/TelerikAcademy)\r\n - Telerik Software Academy Forums\r\n - [forums.academy.telerik.com](forums.academy.telerik.com)\r\n"}}},{"rowIdx":1822,"cells":{"text":{"kind":"string","value":"my $channel = Channel.new();\n$channel.send($_) for 0..10;\n$channel.close;\n\nmy @readers;\nfor 1..3 {\n push @readers, start {\n while 1 {\n my $value = $channel.poll;\n last if $value === Any;\n say \"$value² = \" ~ $value * $value;\n }\n };\n}\n\nawait @readers;\n"}}},{"rowIdx":1823,"cells":{"text":{"kind":"string","value":"import 'database.dart';\n\nconst DEFAULT_DURATION_MINUTES=30;\n\nclass Visit {\n static final empty = Visit(code: '', name: '', address: '', startDate: DateTime.now());\n static final table = 'visit';\n\n int id;\n final DateTime createDate;\n final String code;\n final String name;\n final String address;\n final String? latitude;\n final String? longitude;\n final DateTime startDate;\n final DateTime endDate;\n\n Visit(\n {int? id,\n required this.code,\n required this.name,\n required this.address,\n this.latitude,\n this.longitude,\n required this.startDate,\n DateTime? endDate,\n DateTime? created}):\n this.id = id ?? 0,\n this.createDate = created ?? DateTime.now(),\n this.endDate = endDate ?? startDate.add(Duration(minutes: DEFAULT_DURATION_MINUTES));\n\n /// Create a visit instance from database field map\n Visit.fromDb(Map db) : \n this.id = db['id'] as int,\n this.code = db['code'] as String,\n this.name = db['name'] as String,\n this.address = db['address'] as String,\n this.latitude = db['latitude'] as String?,\n this.longitude = db['longitude'] as String?,\n this.createDate = DateTime.fromMillisecondsSinceEpoch(db['create_date'] as int),\n this.startDate = DateTime.fromMillisecondsSinceEpoch(db['start_date'] as int),\n this.endDate = DateTime.fromMillisecondsSinceEpoch(db['end_date'] as int);\n\n /// Create a new immutable Visit from permitted\n Visit from({DateTime? endDate}) {\n return Visit(\n id: this.id,\n code: this.code,\n name: this.name,\n address: this.address,\n longitude: this.longitude,\n latitude: this.latitude,\n startDate: this.startDate,\n endDate: endDate ?? this.endDate,\n created: this.createDate\n );\n }\n\n /// Create database field map from instance\n Map toDb() {\n return {\n 'id': this.id == 0 ? null : this.id,\n 'code': this.code,\n 'name': this.name,\n 'address': this.address,\n 'latitude': this.latitude,\n 'longitude': this.longitude,\n 'start_date': this.startDate.millisecondsSinceEpoch,\n 'end_date': this.endDate.millisecondsSinceEpoch,\n 'create_date': this.createDate.millisecondsSinceEpoch\n };\n }\n\n /// Create or update a visit row:\n /// - If the id is null, then insert; otherwise\n /// - Update the row\n createOrUpdate() async {\n var database = await DbManager().database;\n\n if (this.id == 0) {\n this.id = await database.insert(table, this.toDb());\n } else {\n await database.update(table, this.toDb());\n }\n }\n\n /// Delete the current visit from the database. Dispose of this instance\n /// afterwards.\n delete() async {\n var database = await DbManager().database;\n\n await database.delete(table, where: 'id=?', whereArgs: [this.id]);\n }\n\n /// Get a list of visits\n static Future> list(int start, {int limit : 20}) async {\n var database = await DbManager().database;\n\n var rows = await database.query(table,\n columns: ['id',\n 'code',\n 'name',\n 'address',\n 'latitude',\n 'longitude',\n 'start_date',\n 'end_date',\n 'create_date'],\n orderBy: 'start_date desc',\n offset: start,\n limit: limit\n );\n\n return rows.map( (r) => Visit.fromDb(r)).toList();\n }\n}\n"}}},{"rowIdx":1824,"cells":{"text":{"kind":"string","value":"package leetcode\n\n/**\n * https://leetcode.com/problems/check-if-numbers-are-ascending-in-a-sentence/\n */\nclass Problem2042 {\n fun areNumbersAscending(s: String): Boolean {\n val words = s.split(\" \")\n var number = 0\n for (word in words) {\n val num = word.toIntOrNull()\n if (num != null) {\n if (number >= num) {\n return false\n }\n number = num\n }\n }\n return true\n }\n}\n"}}},{"rowIdx":1825,"cells":{"text":{"kind":"string","value":"# Linux Server Configuration\n\n- IP: \t34.235.63.160\n- SSH Port: \t2200\n- App URL 34.235.63.160/catalog\n\n### Installed software\n\n- psycopg2\n- psycopg2-binary\n- python\n- apache2\n- postgresql\n- libapache2-mod-wsgi\n\n\n### Configuration\n\nhttps://github.com/ladytrell/LinuxServerConfig\n\n1. Created user grader\n\ta.\tcreate ssh keypair\n\tb.\tadd to sudo list\n2. Configured ssh to port 2200\n\ta. Changed port in /etc/ssh/sshd_config\n3. Configured ufw\n a.\tBlocked all incoming traffic\n\tb. Allow port 2200, 80, and 123\n4. Installed and configured Apache2\n\ta.\tConfigued to run WSGI scipt\n5. Install and config WSGI for python app\n\ta.\tWrote script to call catalog app\n6. Installed and configured PostgreSQL\n\ta. Created Users ubuntu and catalog\n\tb. Created catalog app\n\n\n### App Location\n\n1.\t/usr/local/www/catalog/\n\ta.\tcatalog.db\n\tb.\tcatalog.wsgi\n\tc.\tlotofitems.py\n\td.\tcatalogDB_Model.py\n\te.\tclient_secrets.google.json\n\tf.\tcatalogDB_Model.pyc\n\tg.\tfb_client_secrets.json \n\th.\tcatalog.py\n\ti.\tstatic\n\t\ti.\tresponsive.css\n\t\tii.\tstyles.css\n\tj.\ttemplates\n\t\ti.\tcatalog.html\n\t\tii.\titem.html\n\t\tv.\tcategory.html\n2.\t/etc/apache2/sites-available/\n\ta.\t000-default.conf\n\n\n\n### Referenced Sites\n\nhttps://modwsgi.readthedocs.io/en/develop/configuration-directives/WSGIScriptAlias.html\nhttps://modwsgi.readthedocs.io/en/develop/user-guides/quick-configuration-guide.html\nhttp://flask.pocoo.org/docs/1.0/deploying/mod_wsgi/\nhttps://realpython.com/flask-by-example-part-2-postgres-sqlalchemy-and-alembic/\nhttps://www.digitalocean.com/community/tutorials/how-to-deploy-a-flask-application-on-an-ubuntu-vps\nhttps://docs.sqlalchemy.org/en/latest/core/connections.html\nhttps://www.postgresql.org/docs/9.5/database-roles.html\nhttps://overiq.com/sqlalchemy-101/installing-sqlalchemy-and-connecting-to-database/\nhttps://tutorials.ubuntu.com/tutorial/install-and-configure-apache#2\nhttps://serverfault.com/questions/265410/ubuntu-server-message-says-packages-can-be-updated-but-apt-get-does-not-update\n\n\n\nLicense\n----\n\nMIT\n\n\n**Free Software**\n"}}},{"rowIdx":1826,"cells":{"text":{"kind":"string","value":"import mongoose, { Schema } from \"mongoose\";\nimport Bot from \"../types/Bot\";\n\nconst schema: Schema = new Schema({\n exchangeConnectionId: {\n type: mongoose.Schema.Types.ObjectId\n },\n startBalance: {\n type: Number,\n required: true\n },\n currentBalance: {\n type: Number,\n required: true\n },\n startDate: {\n type: Date,\n required: true\n },\n endDate: {\n type: Date\n },\n status: {\n type: String,\n enum: ['online', 'offline', 'ended'],\n required: true\n },\n strategy: {\n type: mongoose.Schema.Types.Mixed,\n required: true\n },\n type: {\n type: String,\n enum: ['TEST', 'LIVE'],\n required: true\n },\n userId: {\n type: mongoose.Schema.Types.ObjectId,\n required: true\n },\n quoteCurrency: {\n type: String,\n required: true\n }\n});\n\nschema.set('toJSON', {\n virtuals: true,\n versionKey: false,\n transform: (doc, ret) => {\n ret.id = ret._id.toString();\n delete ret._id;\n }\n});\n\nconst model = mongoose.model('bot', schema);\n\nexport default model;\n"}}},{"rowIdx":1827,"cells":{"text":{"kind":"string","value":"---\nfeaturedpath: \"/book2/main/page01.jpg\"\nfeatured: \"\"\npreview: \"/book2/preview/page01.jpg\"\ntitle: \"Book 2, Page 1\"\ncategories: [\"book2\"]\ntype: \"post\"\nlinktitle: \"\"\ndate: \"2018-03-23T22:01:03-05:00\"\nauthor: \"Maria Rice\"\nfeaturedalt: \"\"\ndescription2: []\n\n---\n\n# First colored Morphic page ever!\n\nWelcome back from the intermission! Hope you like the new character and the new coloring style.\n\nI managed to finish coloring page 2 and now I'm coloring pages 3 and 4 (got all the other drawing\nfor those pages done and only the coloring is left).\n\nI'm pleased with my progress so far, but now Spring Break is over and I have to buckle down for\nthe second half of the semester. I expect the school/work pace will pick up from here on out.\nNo worries, though! I expect I'll keep up with the update schedule. I only need four more pages\nto ensure that the schedule is covered until classes are over for the summer. I think I can push that.\n\n**In the meantime---enjoy the colors!** And thanks for reading! Return next week to find out who\nVix is talking to.\n"}}},{"rowIdx":1828,"cells":{"text":{"kind":"string","value":"\n\nimport 'package:anvil/src/build/build_data.dart';\nimport 'package:anvil/src/config.dart';\nimport 'package:anvil/src/content/page.dart';\nimport 'package:anvil/src/content/section.dart';\n\nimport 'build_page.dart';\n\nvoid buildSection(\n Config config, BuildData buildData, Section section) {\n if (section.index != null) {\n final children = section.children.map((e) => e.toMap()).toList();\n final pages = section.children\n .whereType()\n .map((content) => content.toMap())\n .toList();\n\n final sections = section.children\n .whereType
()\n .map((content) => content.toMap())\n .toList();\n\n buildPage(\n config,\n buildData,\n section.index!,\n extraData: {\n 'children': children,\n 'pages': pages,\n 'sections': sections,\n },\n );\n }\n\n try {\n for (final child in section.children) {\n child.when(\n config,\n buildData,\n section: buildSection,\n page: buildPage,\n );\n }\n } catch (e) {\n rethrow;\n }\n}"}}},{"rowIdx":1829,"cells":{"text":{"kind":"string","value":"#![allow(non_snake_case, non_upper_case_globals)]\n#![allow(non_camel_case_types)]\n//! MCU debug component\n//!\n//! Used by: stm32l412, stm32l4x1, stm32l4x2, stm32l4x3\n\nuse crate::{RORegister, RWRegister};\n#[cfg(not(feature = \"nosync\"))]\nuse core::marker::PhantomData;\n\n/// DBGMCU_IDCODE\npub mod IDCODE {\n\n /// Device identifier\n pub mod DEV_ID {\n /// Offset (0 bits)\n pub const offset: u32 = 0;\n /// Mask (12 bits: 0xfff << 0)\n pub const mask: u32 = 0xfff << offset;\n /// Read-only values (empty)\n pub mod R {}\n /// Write-only values (empty)\n pub mod W {}\n /// Read-write values (empty)\n pub mod RW {}\n }\n\n /// Revision identifie\n pub mod REV_ID {\n /// Offset (16 bits)\n pub const offset: u32 = 16;\n /// Mask (16 bits: 0xffff << 16)\n pub const mask: u32 = 0xffff << offset;\n /// Read-only values (empty)\n pub mod R {}\n /// Write-only values (empty)\n pub mod W {}\n /// Read-write values (empty)\n pub mod RW {}\n }\n}\n\n/// Debug MCU configuration register\npub mod CR {\n\n /// Debug Sleep mode\n pub mod DBG_SLEEP {\n /// Offset (0 bits)\n pub const offset: u32 = 0;\n /// Mask (1 bit: 1 << 0)\n pub const mask: u32 = 1 << offset;\n /// Read-only values (empty)\n pub mod R {}\n /// Write-only values (empty)\n pub mod W {}\n /// Read-write values (empty)\n pub mod RW {}\n }\n\n /// Debug Stop mode\n pub mod DBG_STOP {\n /// Offset (1 bits)\n pub const offset: u32 = 1;\n /// Mask (1 bit: 1 << 1)\n pub const mask: u32 = 1 << offset;\n /// Read-only values (empty)\n pub mod R {}\n /// Write-only values (empty)\n pub mod W {}\n /// Read-write values (empty)\n pub mod RW {}\n }\n\n /// Debug Standby mode\n pub mod DBG_STANDBY {\n /// Offset (2 bits)\n pub const offset: u32 = 2;\n /// Mask (1 bit: 1 << 2)\n pub const mask: u32 = 1 << offset;\n /// Read-only values (empty)\n pub mod R {}\n /// Write-only values (empty)\n pub mod W {}\n /// Read-write values (empty)\n pub mod RW {}\n }\n\n /// Trace pin assignment control\n pub mod TRACE_IOEN {\n /// Offset (5 bits)\n pub const offset: u32 = 5;\n /// Mask (1 bit: 1 << 5)\n pub const mask: u32 = 1 << offset;\n /// Read-only values (empty)\n pub mod R {}\n /// Write-only values (empty)\n pub mod W {}\n /// Read-write values (empty)\n pub mod RW {}\n }\n\n /// Trace pin assignment control\n pub mod TRACE_MODE {\n /// Offset (6 bits)\n pub const offset: u32 = 6;\n /// Mask (2 bits: 0b11 << 6)\n pub const mask: u32 = 0b11 << offset;\n /// Read-only values (empty)\n pub mod R {}\n /// Write-only values (empty)\n pub mod W {}\n /// Read-write values (empty)\n pub mod RW {}\n }\n}\n\n/// Debug MCU APB1 freeze register1\npub mod APB1FZR1 {\n\n /// TIM2 counter stopped when core is halted\n pub mod DBG_TIM2_STOP {\n /// Offset (0 bits)\n pub const offset: u32 = 0;\n /// Mask (1 bit: 1 << 0)\n pub const mask: u32 = 1 << offset;\n /// Read-only values (empty)\n pub mod R {}\n /// Write-only values (empty)\n pub mod W {}\n /// Read-write values (empty)\n pub mod RW {}\n }\n\n /// TIM6 counter stopped when core is halted\n pub mod DBG_TIM6_STOP {\n /// Offset (4 bits)\n pub const offset: u32 = 4;\n /// Mask (1 bit: 1 << 4)\n pub const mask: u32 = 1 << offset;\n /// Read-only values (empty)\n pub mod R {}\n /// Write-only values (empty)\n pub mod W {}\n /// Read-write values (empty)\n pub mod RW {}\n }\n\n /// TIM7 counter stopped when core is halted\n pub mod DBG_TIM7_STOP {\n /// Offset (5 bits)\n pub const offset: u32 = 5;\n /// Mask (1 bit: 1 << 5)\n pub const mask: u32 = 1 << offset;\n /// Read-only values (empty)\n pub mod R {}\n /// Write-only values (empty)\n pub mod W {}\n /// Read-write values (empty)\n pub mod RW {}\n }\n\n /// RTC counter stopped when core is halted\n pub mod DBG_RTC_STOP {\n /// Offset (10 bits)\n pub const offset: u32 = 10;\n /// Mask (1 bit: 1 << 10)\n pub const mask: u32 = 1 << offset;\n /// Read-only values (empty)\n pub mod R {}\n /// Write-only values (empty)\n pub mod W {}\n /// Read-write values (empty)\n pub mod RW {}\n }\n\n /// Window watchdog counter stopped when core is halted\n pub mod DBG_WWDG_STOP {\n /// Offset (11 bits)\n pub const offset: u32 = 11;\n /// Mask (1 bit: 1 << 11)\n pub const mask: u32 = 1 << offset;\n /// Read-only values (empty)\n pub mod R {}\n /// Write-only values (empty)\n pub mod W {}\n /// Read-write values (empty)\n pub mod RW {}\n }\n\n /// Independent watchdog counter stopped when core is halted\n pub mod DBG_IWDG_STOP {\n /// Offset (12 bits)\n pub const offset: u32 = 12;\n /// Mask (1 bit: 1 << 12)\n pub const mask: u32 = 1 << offset;\n /// Read-only values (empty)\n pub mod R {}\n /// Write-only values (empty)\n pub mod W {}\n /// Read-write values (empty)\n pub mod RW {}\n }\n\n /// I2C1 SMBUS timeout counter stopped when core is halted\n pub mod DBG_I2C1_STOP {\n /// Offset (21 bits)\n pub const offset: u32 = 21;\n /// Mask (1 bit: 1 << 21)\n pub const mask: u32 = 1 << offset;\n /// Read-only values (empty)\n pub mod R {}\n /// Write-only values (empty)\n pub mod W {}\n /// Read-write values (empty)\n pub mod RW {}\n }\n\n /// I2C2 SMBUS timeout counter stopped when core is halted\n pub mod DBG_I2C2_STOP {\n /// Offset (22 bits)\n pub const offset: u32 = 22;\n /// Mask (1 bit: 1 << 22)\n pub const mask: u32 = 1 << offset;\n /// Read-only values (empty)\n pub mod R {}\n /// Write-only values (empty)\n pub mod W {}\n /// Read-write values (empty)\n pub mod RW {}\n }\n\n /// I2C3 SMBUS timeout counter stopped when core is halted\n pub mod DBG_I2C3_STOP {\n /// Offset (23 bits)\n pub const offset: u32 = 23;\n /// Mask (1 bit: 1 << 23)\n pub const mask: u32 = 1 << offset;\n /// Read-only values (empty)\n pub mod R {}\n /// Write-only values (empty)\n pub mod W {}\n /// Read-write values (empty)\n pub mod RW {}\n }\n\n /// bxCAN stopped when core is halted\n pub mod DBG_CAN_STOP {\n /// Offset (25 bits)\n pub const offset: u32 = 25;\n /// Mask (1 bit: 1 << 25)\n pub const mask: u32 = 1 << offset;\n /// Read-only values (empty)\n pub mod R {}\n /// Write-only values (empty)\n pub mod W {}\n /// Read-write values (empty)\n pub mod RW {}\n }\n\n /// LPTIM1 counter stopped when core is halted\n pub mod DBG_LPTIM1_STOP {\n /// Offset (31 bits)\n pub const offset: u32 = 31;\n /// Mask (1 bit: 1 << 31)\n pub const mask: u32 = 1 << offset;\n /// Read-only values (empty)\n pub mod R {}\n /// Write-only values (empty)\n pub mod W {}\n /// Read-write values (empty)\n pub mod RW {}\n }\n}\n\n/// Debug MCU APB1 freeze register 2\npub mod APB1FZR2 {\n\n /// LPTIM2 counter stopped when core is halted\n pub mod DBG_LPTIM2_STOP {\n /// Offset (5 bits)\n pub const offset: u32 = 5;\n /// Mask (1 bit: 1 << 5)\n pub const mask: u32 = 1 << offset;\n /// Read-only values (empty)\n pub mod R {}\n /// Write-only values (empty)\n pub mod W {}\n /// Read-write values (empty)\n pub mod RW {}\n }\n}\n\n/// Debug MCU APB2 freeze register\npub mod APB2FZR {\n\n /// TIM1 counter stopped when core is halted\n pub mod DBG_TIM1_STOP {\n /// Offset (11 bits)\n pub const offset: u32 = 11;\n /// Mask (1 bit: 1 << 11)\n pub const mask: u32 = 1 << offset;\n /// Read-only values (empty)\n pub mod R {}\n /// Write-only values (empty)\n pub mod W {}\n /// Read-write values (empty)\n pub mod RW {}\n }\n\n /// TIM15 counter stopped when core is halted\n pub mod DBG_TIM15_STOP {\n /// Offset (16 bits)\n pub const offset: u32 = 16;\n /// Mask (1 bit: 1 << 16)\n pub const mask: u32 = 1 << offset;\n /// Read-only values (empty)\n pub mod R {}\n /// Write-only values (empty)\n pub mod W {}\n /// Read-write values (empty)\n pub mod RW {}\n }\n\n /// TIM16 counter stopped when core is halted\n pub mod DBG_TIM16_STOP {\n /// Offset (17 bits)\n pub const offset: u32 = 17;\n /// Mask (1 bit: 1 << 17)\n pub const mask: u32 = 1 << offset;\n /// Read-only values (empty)\n pub mod R {}\n /// Write-only values (empty)\n pub mod W {}\n /// Read-write values (empty)\n pub mod RW {}\n }\n}\n#[repr(C)]\npub struct RegisterBlock {\n /// DBGMCU_IDCODE\n pub IDCODE: RORegister,\n\n /// Debug MCU configuration register\n pub CR: RWRegister,\n\n /// Debug MCU APB1 freeze register1\n pub APB1FZR1: RWRegister,\n\n /// Debug MCU APB1 freeze register 2\n pub APB1FZR2: RWRegister,\n\n /// Debug MCU APB2 freeze register\n pub APB2FZR: RWRegister,\n}\npub struct ResetValues {\n pub IDCODE: u32,\n pub CR: u32,\n pub APB1FZR1: u32,\n pub APB1FZR2: u32,\n pub APB2FZR: u32,\n}\n#[cfg(not(feature = \"nosync\"))]\npub struct Instance {\n pub(crate) addr: u32,\n pub(crate) _marker: PhantomData<*const RegisterBlock>,\n}\n#[cfg(not(feature = \"nosync\"))]\nimpl ::core::ops::Deref for Instance {\n type Target = RegisterBlock;\n #[inline(always)]\n fn deref(&self) -> &RegisterBlock {\n unsafe { &*(self.addr as *const _) }\n }\n}\n#[cfg(feature = \"rtic\")]\nunsafe impl Send for Instance {}\n"}}},{"rowIdx":1830,"cells":{"text":{"kind":"string","value":"#!/bin/bash\noutfile=RooUnfoldExample.cxx.ref\nRooUnfoldExample > $outfile\nbash ref/cleanup.sh $outfile\ndiff $outfile ref/$outfile\n"}}},{"rowIdx":1831,"cells":{"text":{"kind":"string","value":"using System.Threading;\nusing MediatR;\nusing NetCoreKit.Samples.TodoAPI.Domain;\n\nnamespace NetCoreKit.Samples.TodoAPI.v1.Services\n{\n public class EventSubscriber : INotificationHandler\n {\n public async System.Threading.Tasks.Task Handle(ProjectCreated @event, CancellationToken cancellationToken)\n {\n // do something with @event\n //...\n\n await System.Threading.Tasks.Task.FromResult(@event);\n }\n }\n}\n"}}},{"rowIdx":1832,"cells":{"text":{"kind":"string","value":"## 内存信息收集\n\n从Node v. 12开始,可以收集Appium的内存使用信息来分析问题。 这对于分析内存泄漏问题非常有帮助。\n\n\n### 创建dump文件\n\n为了在任意时间创建dump文件,执行`node`进程时增加如下命令行参数,这会执行appium.js脚本:\n\n```\n--heapsnapshot-signal=&lt;signal&gt;\n```\n\n这里的 `signal` 可以是一个有效的自定义信号,例如 `SIGUSR2`。然后你就可以\n\n```\nkill -SIGUSR2 &lt;nodePID&gt;\n```\n\ndump文件会被存放在Appium主脚本执行路径下。文件扩展名为 `.heapsnapshot`,文件可以在Chrome Inspector中加载来进行分析。 \n\n### dump文件分析\n\n详细信息请查看[Rising Stack article](https://blog.risingstack.com/finding-a-memory-leak-in-node-js/)。\n"}}},{"rowIdx":1833,"cells":{"text":{"kind":"string","value":"getCheckoutId();\n }\n\n /**\n * Set the unified purse.\n *\n * @param $value\n * @return self\n */\n public function setPurse($value)\n {\n return $this->setCheckoutId($value);\n }\n\n /**\n * Get the merchant purse.\n *\n * @return string merchant purse\n */\n public function getCheckoutId()\n {\n return $this->getParameter('checkoutId');\n }\n\n /**\n * Set the merchant purse.\n *\n * @param string $purse merchant purse\n *\n * @return self\n */\n public function setCheckoutId($purse)\n {\n return $this->setParameter('checkoutId', $purse);\n }\n\n /**\n * Get the sign algorithm.\n *\n * @return string sign algorithm\n */\n public function getSignAlgorithm()\n {\n return strtolower($this->getParameter('signAlgorithm'));\n }\n\n /**\n * Set the sign algorithm.\n *\n * @param string $value sign algorithm\n *\n * @return self\n */\n public function setSignAlgorithm($value)\n {\n return $this->setParameter('signAlgorithm', $value);\n }\n\n /**\n * Get the sign key.\n *\n * @return string sign key\n */\n public function getSignKey()\n {\n return $this->getParameter('signKey');\n }\n\n /**\n * Set the sign key.\n *\n * @param string $value sign key\n *\n * @return self\n */\n public function setSignKey($value)\n {\n return $this->setParameter('signKey', $value);\n }\n\n /**\n * Get the test key.\n *\n * @return string test key\n */\n public function getTestKey()\n {\n return $this->getParameter('testKey');\n }\n\n /**\n * Set the test key.\n *\n * @param string $value test key\n *\n * @return self\n */\n public function setTestKey($value)\n {\n return $this->setParameter('testKey', $value);\n }\n\n /**\n * Get the method for success return.\n *\n * @return mixed\n */\n public function getReturnMethod()\n {\n return $this->getParameter('returnMethod');\n }\n\n /**\n * Sets the method for success return.\n *\n * @param $returnMethod\n * @return \\Omnipay\\Common\\Message\\AbstractRequest\n */\n public function setReturnMethod($returnMethod)\n {\n return $this->setParameter('returnMethod', $returnMethod);\n }\n\n /**\n * Get the method for canceled payment return.\n *\n * @return mixed\n */\n public function getCancelMethod()\n {\n return $this->getParameter('cancelMethod');\n }\n\n /**\n * Sets the method for canceled payment return.\n *\n * @param $cancelMethod\n * @return \\Omnipay\\Common\\Message\\AbstractRequest\n */\n public function setCancelMethod($cancelMethod)\n {\n return $this->setParameter('cancelMethod', $cancelMethod);\n }\n\n /**\n * Get the method for request notify.\n *\n * @return mixed\n */\n public function getNotifyMethod()\n {\n return $this->getParameter('notifyMethod');\n }\n\n /**\n * Sets the method for request notify.\n *\n * @param $notifyMethod\n * @return \\Omnipay\\Common\\Message\\AbstractRequest\n */\n public function setNotifyMethod($notifyMethod)\n {\n return $this->setParameter('notifyMethod', $notifyMethod);\n }\n\n /**\n * Calculates sign for the $data.\n *\n * @param array $data\n * @param string $signKey\n * @return string\n */\n public function calculateSign($data, $signKey)\n {\n unset($data['ik_sign']);\n ksort($data, SORT_STRING);\n array_push($data, $signKey);\n $signAlgorithm = $this->getSignAlgorithm();\n $signString = implode(':', $data);\n\n return base64_encode(hash($signAlgorithm, $signString, true));\n }\n}\n"}}},{"rowIdx":1834,"cells":{"text":{"kind":"string","value":"#!/usr/bin/env ruby\n# frozen_string_literal: true\n\nrequire File.expand_path(\"../config/boot.rb\", __dir__)\nrequire File.expand_path(\"../config/environment.rb\", __dir__)\nrequire File.expand_path(\"../app/extensions/extensions.rb\", __dir__)\n\ndef do_report(year, do_labels = false)\n warn(\"Doing #{year.inspect}...\")\n query = Query.lookup(:Observation, :all, date: year)\n report = ObservationReport::Symbiota.new(query: query).render\n report.sub!(/^[^\\n]*\\n/, \"\") unless do_labels\n puts report\n warn(\" #{query.num_results} observations\\n\")\n sleep 60\nend\n\ndo_report(%w[1000 1999], :do_labels)\n(2000..2019).each do |year|\n do_report([year.to_s, year.to_s])\nend\nexit 0\n"}}},{"rowIdx":1835,"cells":{"text":{"kind":"string","value":"/*\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for\n * license information.\n * \n * Code generated by Microsoft (R) AutoRest Code Generator 0.17.0.0\n * Changes may cause incorrect behavior and will be lost if the code is\n * regenerated.\n// TODO: Include PageTemplateModels here too?? Probably\n */\n\n\n/**\n * @class\n * Initializes a new instance of the Sku class.\n * @constructor\n * SKU details\n *\n * @member {string} name SKU name to specify whether the key vault is a\n * standard vault or a premium vault. Possible values include: 'standard',\n * 'premium'\n * \n */\nexport interface Sku {\n name: string;\n}\n\n/**\n * @class\n * Initializes a new instance of the AccessPolicyEntry class.\n * @constructor\n * An identity that have access to the key vault. All identities in the array\n * must use the same tenant ID as the key vault's tenant ID.\n *\n * @member {uuid} tenantId The Azure Active Directory tenant ID that should be\n * used for authenticating requests to the key vault.\n * \n * @member {uuid} objectId The object ID of a user, service principal or\n * security group in the Azure Active Directory tenant for the vault. The\n * object ID must be unique for the list of access policies.\n * \n * @member {uuid} [applicationId] Application ID of the client making request\n * on behalf of a principal\n * \n * @member {object} permissions Permissions the identity has for keys, secrets\n * and certificates.\n * \n * @member {array} [permissions.keys] Permissions to keys\n * \n * @member {array} [permissions.secrets] Permissions to secrets\n * \n * @member {array} [permissions.certificates] Permissions to certificates\n * \n */\nexport interface AccessPolicyEntry {\n tenantId: string;\n objectId: string;\n applicationId?: string;\n permissions: Permissions;\n}\n\n/**\n * @class\n * Initializes a new instance of the Permissions class.\n * @constructor\n * Permissions the identity has for keys, secrets and certificates.\n *\n * @member {array} [keys] Permissions to keys\n * \n * @member {array} [secrets] Permissions to secrets\n * \n * @member {array} [certificates] Permissions to certificates\n * \n */\nexport interface Permissions {\n keys?: string[];\n secrets?: string[];\n certificates?: string[];\n}\n\n/**\n * @class\n * Initializes a new instance of the VaultProperties class.\n * @constructor\n * Properties of the vault\n *\n * @member {string} [vaultUri] The URI of the vault for performing operations\n * on keys and secrets.\n * \n * @member {uuid} tenantId The Azure Active Directory tenant ID that should be\n * used for authenticating requests to the key vault.\n * \n * @member {object} sku SKU details\n * \n * @member {string} [sku.name] SKU name to specify whether the key vault is a\n * standard vault or a premium vault. Possible values include: 'standard',\n * 'premium'\n * \n * @member {array} accessPolicies An array of 0 to 16 identities that have\n * access to the key vault. All identities in the array must use the same\n * tenant ID as the key vault's tenant ID.\n * \n * @member {boolean} [enabledForDeployment] Property to specify whether Azure\n * Virtual Machines are permitted to retrieve certificates stored as secrets\n * from the key vault.\n * \n * @member {boolean} [enabledForDiskEncryption] Property to specify whether\n * Azure Disk Encryption is permitted to retrieve secrets from the vault and\n * unwrap keys.\n * \n * @member {boolean} [enabledForTemplateDeployment] Property to specify\n * whether Azure Resource Manager is permitted to retrieve secrets from the\n * key vault.\n * \n */\nexport interface VaultProperties {\n vaultUri?: string;\n tenantId: string;\n sku: Sku;\n accessPolicies: AccessPolicyEntry[];\n enabledForDeployment?: boolean;\n enabledForDiskEncryption?: boolean;\n enabledForTemplateDeployment?: boolean;\n}\n\n/**\n * @class\n * Initializes a new instance of the VaultCreateOrUpdateParameters class.\n * @constructor\n * Parameters for creating or updating a vault\n *\n * @member {string} location The supported Azure location where the key vault\n * should be created.\n * \n * @member {object} [tags] The tags that will be assigned to the key vault.\n * \n * @member {object} properties Properties of the vault\n * \n * @member {string} [properties.vaultUri] The URI of the vault for performing\n * operations on keys and secrets.\n * \n * @member {uuid} [properties.tenantId] The Azure Active Directory tenant ID\n * that should be used for authenticating requests to the key vault.\n * \n * @member {object} [properties.sku] SKU details\n * \n * @member {string} [properties.sku.name] SKU name to specify whether the key\n * vault is a standard vault or a premium vault. Possible values include:\n * 'standard', 'premium'\n * \n * @member {array} [properties.accessPolicies] An array of 0 to 16 identities\n * that have access to the key vault. All identities in the array must use\n * the same tenant ID as the key vault's tenant ID.\n * \n * @member {boolean} [properties.enabledForDeployment] Property to specify\n * whether Azure Virtual Machines are permitted to retrieve certificates\n * stored as secrets from the key vault.\n * \n * @member {boolean} [properties.enabledForDiskEncryption] Property to specify\n * whether Azure Disk Encryption is permitted to retrieve secrets from the\n * vault and unwrap keys.\n * \n * @member {boolean} [properties.enabledForTemplateDeployment] Property to\n * specify whether Azure Resource Manager is permitted to retrieve secrets\n * from the key vault.\n * \n */\nexport interface VaultCreateOrUpdateParameters extends BaseResource {\n location: string;\n tags?: { [propertyName: string]: string };\n properties: VaultProperties;\n}\n\n/**\n * @class\n * Initializes a new instance of the Resource class.\n * @constructor\n * Key Vault resource\n *\n * @member {string} [id] The Azure Resource Manager resource ID for the key\n * vault.\n * \n * @member {string} name The name of the key vault.\n * \n * @member {string} [type] The resource type of the key vault.\n * \n * @member {string} location The supported Azure location where the key vault\n * should be created.\n * \n * @member {object} [tags] The tags that will be assigned to the key vault.\n * \n */\nexport interface Resource extends BaseResource {\n id?: string;\n name: string;\n type?: string;\n location: string;\n tags?: { [propertyName: string]: string };\n}\n\n/**\n * @class\n * Initializes a new instance of the Vault class.\n * @constructor\n * Resource information with extended details.\n *\n * @member {object} properties Properties of the vault\n * \n * @member {string} [properties.vaultUri] The URI of the vault for performing\n * operations on keys and secrets.\n * \n * @member {uuid} [properties.tenantId] The Azure Active Directory tenant ID\n * that should be used for authenticating requests to the key vault.\n * \n * @member {object} [properties.sku] SKU details\n * \n * @member {string} [properties.sku.name] SKU name to specify whether the key\n * vault is a standard vault or a premium vault. Possible values include:\n * 'standard', 'premium'\n * \n * @member {array} [properties.accessPolicies] An array of 0 to 16 identities\n * that have access to the key vault. All identities in the array must use\n * the same tenant ID as the key vault's tenant ID.\n * \n * @member {boolean} [properties.enabledForDeployment] Property to specify\n * whether Azure Virtual Machines are permitted to retrieve certificates\n * stored as secrets from the key vault.\n * \n * @member {boolean} [properties.enabledForDiskEncryption] Property to specify\n * whether Azure Disk Encryption is permitted to retrieve secrets from the\n * vault and unwrap keys.\n * \n * @member {boolean} [properties.enabledForTemplateDeployment] Property to\n * specify whether Azure Resource Manager is permitted to retrieve secrets\n * from the key vault.\n * \n */\nexport interface Vault extends Resource {\n properties: VaultProperties;\n}\n"}}},{"rowIdx":1836,"cells":{"text":{"kind":"string","value":"class ArticleCategory {\n String name;\n \n ArticleCategory({required this.name});\n}\n"}}},{"rowIdx":1837,"cells":{"text":{"kind":"string","value":"import {ICache} from './ICache';\n\nexport class LRUMemCache implements ICache {\n list: { key: string, value: T }[] = [];\n hash: { [key: string]: T } = {};\n\n constructor(private size: number) {\n\n }\n\n get(key: string): Promise {\n if (this.hash[key]) {\n const index = this.list.findIndex(i => i.key === key);\n const item = this.list.splice(index, 1)[0];\n this.list.unshift(item);\n }\n return Promise.resolve(this.hash[key]);\n }\n\n set(key: string, value: T): Promise {\n if (this.list.length >= this.size) {\n delete this.hash[this.list.pop().key];\n }\n const item = {key: key, value: value};\n this.list.unshift(item);\n this.hash[key] = value;\n return Promise.resolve();\n }\n}\n"}}},{"rowIdx":1838,"cells":{"text":{"kind":"string","value":"errorFormat($status , 'Not Found');\n }elseif ($status === 403){\n $this->errorFormat($status ,'Forbidden');\n }elseif ($status === 401){\n $this->errorFormat($status ,'Unauthorized');\n }elseif ($status === 400){\n $this->errorFormat($status ,'Bad Request');\n }elseif ($status === 408){\n $this->errorFormat($status ,'Request Timeout');\n }elseif ($status === 501){\n $this->errorFormat($status ,'Not Implemented');\n }elseif ($status === 502){\n $this->errorFormat($status ,'Bad Gateway');\n }elseif ($status === 503){\n $this->errorFormat($status ,'Service Unavailable');\n }\n }\n\n private function errorFormat($status , $message){\n echo '

'.$status.'
'.$message.'

';\n }\n}"}}},{"rowIdx":1839,"cells":{"text":{"kind":"string","value":"#!/bin/bash\n\n# Based on\n# https://github.com/docker-32bit/debian/blob/i386/build-image.sh\n# and\n# https://github.com/docker/docker/blob/master/contrib/mkimage.sh\n\n# Other resources:\n# https://l3net.wordpress.com/2013/09/21/how-to-build-a-debian-livecd/\n# https://www.opengeeks.me/2015/04/build-your-hybrid-debian-distro-with-xorriso/\n# https://www.reversengineered.com/2014/05/17/building-and-booting-debian-live-over-the-network/\n\nif [ \"$(id -u)\" != \"0\" ]; then\n echo \"This script must be run as root\" 1>&2\n exit 1\nfi\n\nT_START=$(date +'%s')\n\n# Make functions in the files below available for use\n. chroot/chroot_functions.sh\n. chroot/image_functions.sh\n\nOWNER=$1\nDISTRO=$2\nSUITE=$3\nTGZ1=$4\nTGZ2=$5\nUNAME=$6\n\n# Settings\nARCH=i386\nDIR_CHROOT=\"/var/chroot/$SUITE/min\"\nAPT_MIRROR='http://httpredir.debian.org/debian'\nDOCKER_IMAGE=\"$OWNER/32bit-$DISTRO-$SUITE-min\"\n\necho '-----------------'\necho 'Build parameters:'\necho \"Architecture: $ARCH\"\necho \"Suite: $SUITE\"\necho \"Chroot directory: $DIR_CHROOT\"\necho \"Apt-get mirror: $APT_MIRROR\"\necho \"Docker image: $DOCKER_IMAGE\"\necho '---------------------------'\n\n# CHROOT OPERATIONS\ncreate_debian $OWNER $SUITE $DIR_CHROOT\n\nDIR_ROOT=$(dirname $PWD)\nDIR_USR_LOCAL_BIN=$DIR_ROOT/usr_local_bin\n\ncp_user_local_bin () {\n SCRIPT_TO_COPY=$1\n DIR_ROOT=$(dirname $PWD)\n cp $DIR_ROOT/min/usr_local_bin/* $DIR_CHROOT/usr/local/bin\n chmod a+x $DIR_CHROOT/usr/local/bin/$SCRIPT_TO_COPY\n}\n\ncp_user_local_bin 'aptget'\ncp_user_local_bin 'finalize-root'\ncp_user_local_bin 'finalize-user'\ncp_user_local_bin 'min-root'\ncp_user_local_bin 'min-user'\ncp_user_local_bin 'check-min'\n\nexec_chroot $DIR_CHROOT /usr/local/bin/min-root\n\nT_END=$(date +'%s')\nT_ELAPSED=$(($T_END-$T_START))\necho '-------------'\necho 'Time elapsed:'\necho \"$(($T_ELAPSED / 60)) minutes and $(($T_ELAPSED % 60)) seconds\"\n\n# CHROOT -> TGZ\nTGZ_SHORT=$TGZ1\nTGZ_LONG=$TGZ2\ncreate_tgz $TGZ_LONG $DIR_CHROOT\nrm $TGZ_SHORT\ncp $TGZ_LONG $TGZ_SHORT\n\n# OUTPUT FILES: change ownership to user\nchown $UNAME:users $TGZ_SHORT\nchown $UNAME:users $TGZ_LONG\nchown $UNAME:users $TGZ_LONG.md5sum\n\nT_END=$(date +'%s')\nT_ELAPSED=$(($T_END-$T_START))\necho '-------------'\necho 'Time elapsed:'\necho \"$(($T_ELAPSED / 60)) minutes and $(($T_ELAPSED % 60)) seconds\"\n\n# TGZ -> IMAGE\nimport_local_image $TGZ_LONG $DOCKER_IMAGE\n\nT_END=$(date +'%s')\nT_ELAPSED=$(($T_END-$T_START))\necho '-------------'\necho 'Time elapsed:'\necho \"$(($T_ELAPSED / 60)) minutes and $(($T_ELAPSED % 60)) seconds\"\n"}}},{"rowIdx":1840,"cells":{"text":{"kind":"string","value":"using System.Collections;\nusing System.Collections.Generic;\nusing UnityEngine;\n\n\nnamespace viva{\n\npublic class itemSphereClothInteraction : MonoBehaviour\n{\n \n public Cloth cloth;\n [Range(1,4)]\n [SerializeField]\n private int maxColliders = 3;\n [SerializeField]\n private float minimumRadius = 0.04f;\n\n private Set sphereColliders = new Set();\n private Set capsuleColliders = new Set();\n\n\n private void OnTriggerEnter( Collider collider ){\n var newSphere = collider.GetComponentInChildren();\n if( newSphere && !newSphere.isTrigger && newSphere.radius > 0.04f && sphereColliders.Count < maxColliders ){\n sphereColliders.Add( newSphere );\n UpdateSphereArray();\n }\n \n var newCapsule = collider.GetComponentInChildren();\n if( newCapsule && capsuleColliders.Count < maxColliders ){\n capsuleColliders.Add( newCapsule );\n UpdateCapsuleArray();\n }\n }\n \n private void OnTriggerExit( Collider collider ){\n var newSphere = collider.GetComponentInChildren();\n if( newSphere && !newSphere.isTrigger ){\n sphereColliders.Remove( newSphere );\n UpdateSphereArray();\n }\n var newCapsule = collider.GetComponentInChildren();\n if( newCapsule ){\n capsuleColliders.Remove( newCapsule );\n UpdateCapsuleArray();\n }\n }\n\n private void UpdateSphereArray(){\n var list = new List();\n foreach( var sphere in sphereColliders.objects ){\n list.Add( new ClothSphereColliderPair( sphere ) );\n }\n cloth.sphereColliders = list.ToArray();\n }\n\n private void UpdateCapsuleArray(){\n for( int i=capsuleColliders.Count; i-->0; ){\n if( capsuleColliders.objects[i] == null ){\n capsuleColliders.objects.RemoveAt(i);\n }\n }\n\n cloth.capsuleColliders = capsuleColliders.objects.ToArray();\n }\n}\n\n}"}}},{"rowIdx":1841,"cells":{"text":{"kind":"string","value":"#ifndef __DHT11_H__\r\n#define __DHT11_H__\r\n\r\n#include \"stm32f10x_gpio.h\"\r\n\r\ntypedef struct\r\n{\r\n\tGPIO_TypeDef*DATA_GPIO;\r\n\tuint16_t DATA_Pin;\r\n}DHT11;\r\n\r\ntypedef struct\r\n{\r\n\tuint8_t HumidityInteger;\t\r\n\tuint8_t HumidityDecimal;\r\n\tuint8_t TemperatureInteger;\r\n\tuint8_t TemperatureDecimal;\r\n\tuint8_t Check;\r\n}DHT11_Data;\r\n\r\nvoid DHT11_Init(DHT11*dht11);\r\nvoid DHT11_Get(DHT11*dht11,DHT11_Data*data);\r\n\r\n#endif\r\n"}}},{"rowIdx":1842,"cells":{"text":{"kind":"string","value":"import 'package:flutter/cupertino.dart';\nimport 'package:flutter/material.dart';\nimport 'package:flutter/scheduler.dart';\nimport 'package:flutter_news_app/EventsTabs.dart';\nimport 'package:flutter_news_app/NewsTabs.dart';\nimport 'package:flutter_news_app/PodcastPage.dart';\nimport 'package:flutter_news_app/page_view.dart';\nimport 'package:flutter_news_app/util.dart';\n\n\nclass PodcastTabs extends StatefulWidget {\n @override\n PodcastPageState createState() => new PodcastPageState();\n}\n\nclass PodcastPageState extends State {\n Util newUtil = new Util();\n static String _podCastApi;\n String _urlStringPodCast = \"http://api.digitalpodcast.com/v2r/search/?format=json&appid=\";\n String _keyword = \"&keywords=\";\n String _search;\n int _currentIndex = 3;\n\n\n @override\n void initState() {\n super.initState();\n _podCastApi = newUtil.podCastApi;\n }\n\n @override\n Widget build(BuildContext context) {\n timeDilation = 1.0;\n return MaterialApp(\n debugShowCheckedModeBanner: false,\n home: DefaultTabController(\n length: 13,\n child: Scaffold(\n appBar: AppBar(\n backgroundColor: Color.fromRGBO(128, 0, 128, 50.0),\n leading: new IconButton(\n icon: new Icon(Icons.arrow_back),\n onPressed: () {\n Navigator.pop(context, true);\n }),\n bottom: new TabBar(\n isScrollable: true,\n indicatorColor: Color.fromRGBO(128, 0, 128, 50.0),\n tabs: [\n\n new Tab(text: \"Music\"),\n new Tab(text: \"Business\"),\n new Tab(text: \"Educational\"),\n new Tab(text: \"Comedy\"),\n new Tab(text: \"News & Politics\",),\n new Tab(text: \"Science & Medicine\"),\n new Tab(text: \"Sports\"),\n new Tab(text: \"Technology & Gadgets\",),\n new Tab(text: \"Television\",),\n new Tab(text: \"Film & Entertainment\",),\n new Tab(text: \"Charity & Causes\",),\n new Tab(text: \"Religion & Spirituality\",),\n new Tab(text: \"Arts\",)\n ],\n labelStyle: TextStyle(\n fontSize: 20.0, fontFamily: 'RobotoMono',),\n ),\n title: Text(\"Podcast\", style: new TextStyle(\n fontWeight: FontWeight.bold,\n fontFamily: 'Raleway',\n fontSize: 22.0,\n color: Colors.white,\n ),)),\n body: TabBarView(\n children: [\n new HomePage(\n url: _urlStringPodCast + _podCastApi + _keyword + \"music\"),\n new HomePage(\n url: _urlStringPodCast + _podCastApi + _keyword +\n \"business\"),\n new HomePage(\n url: _urlStringPodCast + _podCastApi + _keyword +\n \"educational\"),\n new HomePage(\n url: _urlStringPodCast + _podCastApi + _keyword + \"comedy\"),\n new HomePage(\n url: _urlStringPodCast + _podCastApi + _keyword +\n \"news & politics\"),\n new HomePage(\n url: _urlStringPodCast + _podCastApi + _keyword +\n \"science & medicine\"),\n new HomePage(\n url: _urlStringPodCast + _podCastApi + _keyword + \"sports\"),\n new HomePage(\n url: _urlStringPodCast + _podCastApi + _keyword +\n \"technology & gadgets\"),\n new HomePage(\n url: _urlStringPodCast + _podCastApi + _keyword +\n \"television\"),\n new HomePage(\n url: _urlStringPodCast + _podCastApi + _keyword +\n \"film & entertainment\"),\n new HomePage(\n url: _urlStringPodCast + _podCastApi + _keyword +\n \"charity & causes\"),\n new HomePage(\n url: _urlStringPodCast + _podCastApi + _keyword +\n \"religion & spirituality\"),\n new HomePage(\n url: _urlStringPodCast + _podCastApi + _keyword + \"arts\")\n ],\n ),\n bottomNavigationBar: BottomNavigationBar(\n currentIndex: _currentIndex,\n onTap: (newIndex) =>\n setState(() {\n _currentIndex = newIndex;\n switch (_currentIndex) {\n case 0:\n print(\"In the intropage\");\n Navigator.push(\n context,\n MaterialPageRoute(\n builder: (context) => IntroPageView()),\n );\n break;\n case 1:\n print(\"In the newstabs\");\n Navigator.of(context, rootNavigator: true).push(\n new CupertinoPageRoute(\n fullscreenDialog: false,\n builder: (BuildContext context) =>\n new NewsTabs(country: 'us',)),\n );\n break;\n case 2:\n print(\"In the eventstabs\");\n Navigator.of(context, rootNavigator: true).push(\n new CupertinoPageRoute(\n fullscreenDialog: false,\n builder: (\n BuildContext context) => new EventsTabs()),\n );\n break;\n }\n print(_currentIndex);\n }),\n items: [\n BottomNavigationBarItem(\n icon: new Icon(Icons.home),\n title: new Text('Home'),\n backgroundColor: Color.fromRGBO(128, 0, 128, 50.0)\n ),\n BottomNavigationBarItem(\n icon: new Icon(Icons.book),\n title: new Text('News'),\n backgroundColor: Color.fromRGBO(128, 0, 128, 50.0)\n ),\n BottomNavigationBarItem(\n icon: new Icon(Icons.event),\n title: new Text('Events'),\n backgroundColor: Color.fromRGBO(128, 0, 128, 50.0)\n ),\n BottomNavigationBarItem(\n icon: Icon(Icons.headset),\n title: Text('Podcast'),\n backgroundColor: Color.fromRGBO(128, 0, 128, 50.0)\n\n ),\n ],\n ),\n ),\n )\n\n );\n }\n}\n\nString searchKeyword(String searchCode) {\n String _searchCode;\n switch (searchCode) {\n case 'Music':\n _searchCode = \"music\";\n break;\n case 'Business':\n _searchCode = \"business\";\n break;\n case 'Educational':\n _searchCode = \"educational\";\n break;\n case 'Comedy':\n _searchCode = \"comedy\";\n break;\n case 'News & Politics':\n _searchCode = \"news & politics\";\n break;\n case 'Science & Medicine':\n _searchCode = \"science & medicine\";\n break;\n case 'Sports':\n _searchCode = \"sports\";\n break;\n case 'Technology & Gadgets':\n _searchCode = \"technology & gadgets\";\n break;\n case 'Television':\n _searchCode = \"television\";\n break;\n case 'Film & Entertainment':\n _searchCode = \"film & entertainment\";\n break;\n case 'Charity & Causes':\n _searchCode = \"charity & causes\";\n break;\n case 'Religion & Spirituality':\n _searchCode = \"religion & spirituality\";\n break;\n case 'Arts':\n _searchCode = \"arts\";\n break;\n }\n return _searchCode;\n}\n\n"}}},{"rowIdx":1843,"cells":{"text":{"kind":"string","value":"//\n// RunsNetworkMonitor.h\n// OU_iPad\n//\n// Created by runs on 2017/10/12.\n// Copyright © 2017年 Olacio. All rights reserved.\n//\n\n#import \n#import \"Reachability.h\"\n\nFOUNDATION_EXTERN NSString * const RunsNetworkMonitorDidChangeMessage; //object NSNumber(NetworkStatus)\n\ntypedef void(^RunsNetworkChangeCallback)(NetworkStatus status);\n\n@interface RunsNetworkMonitor : NSObject\n+ (BOOL)isReachable;\n+ (BOOL)isReachableViaWWAN;\n+ (BOOL)isReachableViaWiFi;\n+ (BOOL)NetworkIsReachableWithShowTips:(BOOL)isShow;\n+ (void)NetWorkMonitorWithReachableBlock:(NetworkReachable)reachable unreachableBlock:(NetworkUnreachable)unreachable;\n@end\n"}}},{"rowIdx":1844,"cells":{"text":{"kind":"string","value":"import {\n all,\n fork,\n call,\n delay,\n takeLatest,\n put,\n actionChannel,\n throttle,\n} from 'redux-saga/effects';\nimport { http } from './httpHelper';\nimport { actionTypes } from '../reducers/actionTypes';\nimport { Dictionary } from '../typings/Dictionary';\nimport { AxiosResponse } from 'axios';\nimport {\n JsonResult,\n ListResult,\n PostModel,\n CategoryModel,\n TagModel,\n ImageModel,\n} from '../typings/dto';\nimport { BaseAction } from '../typings/BaseAction';\n\nfunction loadMyPostsApi(query) {\n const { page, limit, keyword } = query;\n\n return http().get(\n `/me/posts?page=${page}&limit=${limit}&keyword=${encodeURIComponent(\n keyword,\n )}`,\n );\n}\n\nfunction* loadMyPosts(action: BaseAction) {\n try {\n const { page, limit, keyword } = action.data;\n\n const result = yield call(loadMyPostsApi, {\n page: page || '1',\n limit: limit || 10,\n keyword: keyword,\n });\n\n const resultData = result.data as JsonResult>;\n const { success, data, message } = resultData;\n\n if (!success) {\n throw new Error(message);\n }\n\n yield put({\n type: actionTypes.LOAD_MY_POSTS_DONE,\n data: {\n ...data,\n page: page || 1,\n },\n });\n } catch (e) {\n // console.error(e);\n yield put({\n type: actionTypes.LOAD_MY_POSTS_FAIL,\n error: e,\n message: e.message,\n });\n }\n}\n\nfunction* watchLoadMyPosts() {\n yield takeLatest(actionTypes.LOAD_MY_POSTS_CALL, loadMyPosts);\n}\n\nfunction writePostApi(formData) {\n return http().post('/me/post', formData);\n}\n\nfunction* writePost(action) {\n try {\n const result = yield call(writePostApi, action.data);\n const resultData = result.data as JsonResult;\n const { success, data, message } = resultData;\n if (success) {\n yield put({\n type: actionTypes.WRITE_POST_DONE,\n data: data,\n });\n } else {\n yield put({\n type: actionTypes.WRITE_POST_FAIL,\n error: new Error(message),\n message: message,\n });\n }\n } catch (e) {\n yield put({\n type: actionTypes.WRITE_POST_FAIL,\n error: e,\n message: e.message,\n });\n }\n}\n\nfunction* watchWritePost() {\n yield takeLatest(actionTypes.WRITE_POST_CALL, writePost);\n}\n\nfunction loadCategoriesApi(query) {\n const { limit, keyword, page } = query;\n\n return http().get(\n `/me/categories?page=${page}&limit=${limit}&keyword=${encodeURIComponent(\n keyword,\n )}`,\n );\n}\n\nfunction* loadCategories(action: BaseAction) {\n try {\n const { limit, keyword, page } = action.data;\n // console.debug('[DEBUG]: category ==> ', action.data);\n const result: AxiosResponse<\n JsonResult>\n > = yield call(loadCategoriesApi, {\n page: page || 1,\n limit: limit || 10,\n keyword: keyword || '',\n });\n\n const { success, data, message } = result.data;\n\n // console.debug('[DEBUG]: categories ==> ', data);\n\n if (!success) {\n throw new Error(message);\n }\n\n yield put({\n type: actionTypes.LOAD_MY_CATEGORIES_DONE,\n data: {\n ...data,\n page: page || 1,\n },\n });\n } catch (e) {\n console.error(e);\n yield put({\n type: actionTypes.LOAD_MY_CATEGORIES_FAIL,\n error: e,\n message: e.message,\n });\n }\n}\n\nfunction* watchLoadCategories() {\n yield takeLatest(actionTypes.LOAD_MY_CATEGORIES_CALL, loadCategories);\n}\n\nfunction loadTagsApi() {\n return http().get('/me/tags');\n}\n\nfunction* loadTags(action) {\n try {\n const result: AxiosResponse<\n JsonResult>\n > = yield call(loadTagsApi);\n // const resultData = result.data as IJsonResult>;\n const { success, data, message } = result.data;\n if (success) {\n yield put({\n type: actionTypes.LOAD_MY_TAGS_DONE,\n data: data,\n });\n } else {\n yield put({\n type: actionTypes.LOAD_MY_TAGS_FAIL,\n error: new Error(message),\n message: message,\n });\n }\n } catch (e) {\n yield put({\n type: actionTypes.LOAD_MY_TAGS_FAIL,\n error: e,\n message: e.message,\n });\n }\n}\n\nfunction* watchLoadTags() {\n yield takeLatest(actionTypes.LOAD_MY_TAGS_CALL, loadTags);\n}\n\nfunction editPostApi(id, data) {\n return http().patch(`/me/post/${id}`, data);\n}\n\nfunction* editPost(action) {\n try {\n const result = yield call(editPostApi, action.id, action.data);\n const resultData = result.data as JsonResult;\n const { success, data, message } = resultData;\n if (success) {\n yield put({\n type: actionTypes.EDIT_POST_DONE,\n data: data,\n });\n } else {\n yield put({\n type: actionTypes.EDIT_POST_FAIL,\n error: new Error(message),\n message: message,\n });\n }\n } catch (e) {\n yield put({\n type: actionTypes.EDIT_POST_FAIL,\n error: e,\n message: e.message,\n });\n }\n}\n\nfunction* watchEditPost() {\n yield takeLatest(actionTypes.EDIT_POST_CALL, editPost);\n}\n\n/**\n * 글을 삭제합니다.\n *\n * @param {number} id 글 식별자 Post.Id\n *\n */\nfunction deletePostApi(id) {\n return http().delete(`/me/post/${id}`);\n}\n\nfunction* deletePost(action) {\n try {\n const result = yield call(deletePostApi, action.data);\n const resultData = result.data as JsonResult;\n const { success, data, message } = resultData;\n if (success) {\n yield put({\n type: actionTypes.DELETE_POST_DONE,\n data: { id: data },\n });\n } else {\n yield put({\n type: actionTypes.DELETE_POST_FAIL,\n error: new Error(message),\n message: message,\n });\n }\n } catch (e) {\n // console.error(e);\n yield put({\n type: actionTypes.DELETE_POST_FAIL,\n error: e,\n message: e.message,\n });\n }\n}\n\nfunction* watchDeletePost() {\n yield takeLatest(actionTypes.DELETE_POST_CALL, deletePost);\n}\n\nfunction loadMyPostApi(query) {\n const { id } = query;\n return http().get(`/me/post/${id}`);\n}\n\nfunction* loadMyPost(action) {\n try {\n const { id } = action.data;\n const result: AxiosResponse> = yield call(\n loadMyPostApi,\n { id },\n );\n\n const { success, data, message } = result.data;\n if (!success) {\n yield put({\n type: actionTypes.LOAD_MY_POST_FAIL,\n error: new Error(message),\n message: message,\n });\n }\n\n yield put({\n type: actionTypes.LOAD_MY_POST_DONE,\n data: {\n post: data,\n },\n });\n } catch (e) {\n // console.error(e);\n yield put({\n type: actionTypes.LOAD_MY_POST_FAIL,\n error: e,\n message: e.message,\n });\n }\n}\n\nfunction* watchLoadMyPost() {\n yield takeLatest(actionTypes.LOAD_MY_POST_CALL, loadMyPost);\n}\n\nfunction* writeNewPost(action) {\n try {\n yield put({\n type: actionTypes.WRITE_NEW_POST_DONE,\n });\n } catch (e) {\n yield put({\n type: actionTypes.WRITE_NEW_POST_FAIL,\n error: e,\n });\n }\n}\n\nfunction* watchWriteNewPost() {\n yield takeLatest(actionTypes.WRITE_NEW_POST_CALL, writeNewPost);\n}\n\nfunction uploadMyMediaFilesApi(data) {\n return http().post('/me/media', data);\n}\n\nfunction* uploadMyMediaFiles(action) {\n try {\n // console.log('==========> form data:', action.data);\n const result = yield call(uploadMyMediaFilesApi, action.data);\n const resultData = result.data as JsonResult>;\n const { success, data, message } = resultData;\n if (success) {\n yield put({\n type: actionTypes.UPLOAD_MY_MEDIA_FILES_DONE,\n data: {\n ...data,\n },\n });\n } else {\n yield put({\n type: actionTypes.UPLOAD_MY_MEDIA_FILES_FAIL,\n error: new Error(message),\n message: message,\n });\n }\n } catch (e) {\n // console.error(e);\n yield put({\n type: actionTypes.UPLOAD_MY_MEDIA_FILES_FAIL,\n error: e,\n message: e.message,\n });\n }\n}\n\nfunction* watchUploadMyMediaFiles() {\n yield takeLatest(\n actionTypes.UPLOAD_MY_MEDIA_FILES_CALL,\n uploadMyMediaFiles,\n );\n}\n\nfunction loadMediaFilesApi(query) {\n const { page, limit, keyword } = query;\n return http().get(\n `/me/media/?page=${page}&limit=${limit}&keyword=${encodeURIComponent(\n keyword,\n )}`,\n );\n}\n\nfunction* loadMediaFiles(action) {\n try {\n const { page, limit, keyword } = action.data;\n const result = yield call(loadMediaFilesApi, {\n page: page || 1,\n limit: limit || 10,\n keyword: keyword || '',\n });\n\n const resultData = result.data as JsonResult>;\n const { success, data, message } = resultData;\n if (success) {\n yield put({\n type: actionTypes.LOAD_MY_MEDIA_FILES_DONE,\n data: {\n ...data,\n page: page || 1,\n },\n });\n } else {\n yield put({\n type: actionTypes.LOAD_MY_MEDIA_FILES_FAIL,\n error: new Error(message),\n message: message,\n });\n }\n } catch (e) {\n console.error(e);\n yield put({\n type: actionTypes.LOAD_MY_MEDIA_FILES_FAIL,\n error: e,\n message: e.message,\n });\n }\n}\n\nfunction* watchLoadMediaFiles() {\n yield takeLatest(actionTypes.LOAD_MY_MEDIA_FILES_CALL, loadMediaFiles);\n}\n\nfunction deleteMediaFileApi(id) {\n return http().delete(`/me/media/${id}`);\n}\n\nfunction* deleteMediaFile(action) {\n try {\n const { id } = action.data;\n const result: AxiosResponse> = yield call(\n deleteMediaFileApi,\n id,\n );\n\n const { success, data, message } = result.data;\n if (success) {\n yield put({\n type: actionTypes.DELETE_MY_MEDIA_FILES_DONE,\n data: {\n id: data,\n },\n });\n } else {\n yield put({\n type: actionTypes.DELETE_MY_MEDIA_FILES_FAIL,\n error: new Error(message),\n message: message,\n });\n }\n } catch (e) {\n console.error(e);\n yield put({\n type: actionTypes.DELETE_MY_MEDIA_FILES_FAIL,\n error: e,\n message: e.message,\n });\n }\n}\n\nfunction* watchDeleteMediaFile() {\n yield takeLatest(actionTypes.DELETE_MY_MEDIA_FILES_CALL, deleteMediaFile);\n}\n\nfunction editCategoryApi(formData) {\n if (!!formData.id) {\n return http().patch(`/me/category/${formData.id}`, formData);\n } else {\n return http().post('/me/category', formData);\n }\n}\n\nfunction* editCategory(action) {\n try {\n const result = yield call(editCategoryApi, action.data);\n const resultData = result.data as JsonResult;\n const { success, data, message } = resultData;\n if (success) {\n yield put({\n type: actionTypes.EDIT_MY_CATEGORY_DONE,\n data: {\n category: data,\n },\n });\n } else {\n yield put({\n type: actionTypes.EDIT_MY_CATEGORY_FAIL,\n error: new Error(message),\n message: message,\n });\n }\n } catch (e) {\n // console.error(e);\n yield put({\n type: actionTypes.EDIT_MY_CATEGORY_FAIL,\n error: e,\n message: e.message,\n });\n }\n}\n\nfunction* wacthEditCategory() {\n yield takeLatest(actionTypes.EDIT_MY_CATEGORY_CALL, editCategory);\n}\n\nfunction deleteCategoryApi(id) {\n return http().delete(`/me/category/${id}`);\n}\n\nfunction* deleteCategory(action) {\n try {\n const { id } = action.data;\n const result: AxiosResponse> = yield call(\n deleteCategoryApi,\n id,\n );\n\n const { success, data, message } = result.data;\n if (!success) {\n throw new Error(message);\n }\n\n yield put({\n type: actionTypes.DELETE_MY_CATEGORY_DONE,\n data: {\n id: data,\n },\n });\n } catch (e) {\n // console.error(e);\n yield put({\n type: actionTypes.DELETE_MY_CATEGORY_FAIL,\n error: e,\n message: e.message,\n });\n }\n}\n\nfunction* watchDeleteCategory() {\n yield takeLatest(actionTypes.DELETE_MY_CATEGORY_CALL, deleteCategory);\n}\n\nfunction loadLikedPostsApi(query) {\n const { limit, keyword, page } = query;\n\n return http().get(\n `/me/liked?&page=${page}&limit=${limit}&keyword=${encodeURIComponent(\n keyword,\n )}`,\n );\n}\n\nfunction* loadLikedPosts(action) {\n try {\n const { limit, keyword, page } = action.data;\n\n const result = yield call(loadLikedPostsApi, {\n page: page || 1,\n limit: limit || 10,\n keyword: keyword || '',\n });\n\n const resultData = result.data as JsonResult>;\n const { success, data, message } = resultData;\n if (success) {\n yield put({\n type: actionTypes.LOAD_LIKED_POSTS_DONE,\n data: {\n ...data,\n keyword: keyword,\n page: page || 1,\n },\n });\n } else {\n yield put({\n type: actionTypes.LOAD_LIKED_POSTS_FAIL,\n error: new Error(message),\n message: message,\n });\n }\n } catch (e) {\n // console.error(e);\n yield put({\n type: actionTypes.LOAD_LIKED_POSTS_FAIL,\n error: e,\n message: e.message,\n });\n }\n}\n\nfunction* watchLoadLikedPosts() {\n yield takeLatest(actionTypes.LOAD_LIKED_POSTS_CALL, loadLikedPosts);\n}\n\nfunction loadStatGeneralApi(query) {\n return http().get('/me/stat/general');\n}\n\nfunction* loadStatGeneral(action) {\n try {\n const result = yield call(loadStatGeneralApi, action.data);\n const resultData = result.data as JsonResult>;\n const { success, data, message } = resultData;\n if (success) {\n yield put({\n type: actionTypes.LOAD_STAT_GENERAL_DONE,\n data: data,\n });\n } else {\n yield put({\n type: actionTypes.LOAD_STAT_GENERAL_FAIL,\n error: new Error(message),\n message: message,\n });\n }\n } catch (e) {\n // console.error(e);\n yield put({\n type: actionTypes.LOAD_STAT_GENERAL_FAIL,\n error: e,\n message: e.message,\n });\n }\n}\n\nfunction* watchLoadStatGeneral() {\n yield takeLatest(actionTypes.LOAD_STAT_GENERAL_CALL, loadStatGeneral);\n}\n\nfunction loadStatReadApi(query) {\n return http().get('/me/stat/postread');\n}\n\nfunction* loadStatRead(action) {\n try {\n const result = yield call(loadStatReadApi, action.data);\n const resultData = result.data as JsonResult>;\n const { success, data, message } = resultData;\n if (success) {\n yield put({\n type: actionTypes.LOAD_STAT_READ_DONE,\n data: data,\n });\n } else {\n yield put({\n type: actionTypes.LOAD_STAT_READ_FAIL,\n error: new Error(message),\n message: message,\n });\n }\n } catch (e) {\n // console.error(e);\n yield put({\n type: actionTypes.LOAD_STAT_READ_FAIL,\n error: e,\n message: e.message,\n });\n }\n}\n\nfunction* watchLoadStatRead() {\n yield takeLatest(actionTypes.LOAD_STAT_READ_CALL, loadStatRead);\n}\n\nexport default function* postSaga() {\n yield all([\n fork(watchLoadMyPosts),\n fork(watchLoadMyPost),\n fork(watchWritePost),\n fork(watchEditPost),\n fork(watchDeletePost),\n fork(watchLoadCategories),\n fork(watchLoadTags),\n fork(watchWriteNewPost),\n fork(watchUploadMyMediaFiles),\n fork(watchLoadMediaFiles),\n fork(watchDeleteMediaFile),\n fork(wacthEditCategory),\n fork(watchDeleteCategory),\n fork(watchLoadLikedPosts),\n fork(watchLoadStatGeneral),\n fork(watchLoadStatRead),\n ]);\n}\n"}}},{"rowIdx":1845,"cells":{"text":{"kind":"string","value":"# Omnipay: Instamojo\n\n**[Instamojo](https://www.instamojo.com/) driver for the Omnipay PHP payment processing library**\n\n[Omnipay](https://github.com/thephpleague/omnipay) is a framework agnostic, multi-gateway payment\nprocessing library for PHP 5.3+.\nThis package implements [Instamojo Payments API v1.1](https://docs.instamojo.com/docs/payments-api).\n\n## Installation\n\nOmnipay is installed via [Composer](http://getcomposer.org/). To install, simply run:\n\n```\ncomposer require gentor/omnipay-instamojo\n```\n\n## Purchase\n\n```php\nuse Omnipay\\Omnipay;\n\n// Setup payment gateway\n$gateway = Omnipay::create('Instamojo');\n$gateway->setApiKey('abc123');\n$gateway->setAuthToken('abc123');\n\n// Send purchase request\n$response = $gateway->purchase(\n [\n 'amount' => '10.00',\n 'purpose' => 'Instamojo Payment'\n ]\n)->send();\n\n// Process response\nif ($response->isSuccessful() && $response->isRedirect()) {\n\n // Redirect to offsite payment gateway\n // print_r($response->getData());\n // echo $response->getTransactionStatus();\n $response->redirect();\n\n} else {\n\n // Request failed\n echo $response->getMessage();\n}\n```\n\n## Complete Purchase\n\n```php\n// Send complete purchase request\n$response = $gateway->completePurchase(\n [\n 'transactionReference' => $_GET['payment_id'],\n ]\n)->send();\n\n// Process response\nif ($response->isSuccessful()) {\n\n // Request was successful\n print_r($response->getData());\n echo $response->getTransactionStatus();\n\n} else {\n\n // Request failed\n echo $response->getMessage();\n}\n```\n\n## Refund\n\n```php\n// Send refund request\n$response = $gateway->refund(\n [\n 'transactionReference' => $payment_id,\n ]\n)->send();\n\n// Process response\nif ($response->isSuccessful()) {\n\n // Request was successful\n print_r($response->getData());\n echo $response->getTransactionStatus();\n\n} else {\n\n // Request failed\n echo $response->getMessage();\n}\n```\n\n## Fetch Payment Request\n\n```php\n// Send fetch payment request\n$response = $gateway->fetchPaymentRequest(\n [\n 'transactionReference' => $payment_request_id,\n ]\n)->send();\n\n// Process response\nif ($response->isSuccessful()) {\n\n // Request was successful\n print_r($response->getData());\n echo $response->getTransactionStatus();\n\n} else {\n\n // Request failed\n echo $response->getMessage();\n}\n```\n\n## Webhook\n\n```php\nuse Omnipay\\Omnipay;\n\n// Setup payment gateway\n$gateway = Omnipay::create('Instamojo');\n$gateway->setSalt('abc123');\n\n// Payment notification request\n$response = $gateway->acceptNotification()->send();\n\n// Process response\nif ($response->isSuccessful()) {\n\n // Request was successful\n print_r($response->getData());\n echo $response->getTransactionReference();\n echo $response->getTransactionStatus();\n\n} else {\n\n // Request failed\n echo $response->getMessage();\n}\n```\n\n## [Instamojo API v1.1 Documentation](https://docs.instamojo.com/docs/payments-api)"}}},{"rowIdx":1846,"cells":{"text":{"kind":"string","value":"using Microsoft.Extensions.Configuration;\n\nnamespace Kubernetes.Configuration.Extensions.Configmap\n{\n public class ConfigmapConfigurationSource : IConfigurationSource\n {\n public string? Namespace { get; set; }\n public string? LabelSelector { get; set; }\n public string? Separator { get; set; }\n public bool ReloadOnChange { get; set; }\n public IConfigurationProvider Build(IConfigurationBuilder builder)\n {\n return new ConfigmapConfigurationProvider(Namespace, LabelSelector, Separator, ReloadOnChange);\n }\n }\n}"}}},{"rowIdx":1847,"cells":{"text":{"kind":"string","value":"# frozen_string_literal: true\n\nclass User < ApplicationRecord\n has_many :authentication_tokens, dependent: :destroy\n rolify before_add: :before_add_role, strict: true\n validates :email, presence: true\n validates :email, uniqueness: true, allow_blank: true\n\n devise :trackable, :token_authenticatable, :omniauthable, omniauth_providers: [:google_oauth2]\n\n def role_level_in(organization)\n levels = roles.global.map(&:level)\n levels << local_role_level_in(organization)\n levels.max\n end\n\n def role_in(organization)\n # Returns only an explicit role in the passed organization, not including global roles\n roles.find_by resource_id: organization.id\n end\n\n def administrator?(organization = nil)\n is_admin_of?(organization) || global_administrator?\n end\n\n def global_role?\n roles.global.present?\n end\n\n def global_role\n roles.global.first\n end\n\n def organizations\n return Organization.all if global_role?\n\n membership_organizations\n end\n\n def global_administrator?\n is_global_admin? || is_super_admin?\n end\n\n def membership_organizations\n # All organizations in which this user has an explicit role, not including global roles\n Organization.where(id: roles.pluck(:resource_id))\n end\n\n def member_of?(organization)\n roles.pluck(:resource_id).include?(organization.id)\n end\n\n def read_only?\n (roles.pluck(:name).map(&:to_sym) - Role::READ_ONLY_ROLES).empty?\n end\n\n private\n\n def before_add_role(role)\n raise ActiveRecord::Rollback if Role::LOCAL_ROLES[role.symbol].nil? && Role::GLOBAL_ROLES[role.symbol].nil?\n raise ActiveRecord::Rollback if roles.pluck(:resource_id).include?(role.resource_id)\n end\n\n def local_role_level_in(organization)\n # Role level in explicit organization, excluding global roles\n role = role_in organization\n return Role::MINIMAL_ROLE_LEVEL if role.nil?\n\n role.level\n end\n\n class << self\n def from_omniauth(auth)\n user = get_user_from_auth auth\n return update_user_from_omniauth user, auth if user\n return create_first_user auth if first_user?\n\n empty_user\n end\n\n def from_id_token(id_token)\n client = OAuth2::Client.new(Rails.configuration.google_client_id, Rails.configuration.google_client_secret)\n\n response = client.request(:get, Rails.configuration.google_token_info_url, params: { id_token: id_token }).parsed\n User.find_by(email: response['email'])\n end\n\n private\n\n def update_user_from_omniauth(user, auth)\n user.update auth_params auth\n user\n end\n\n def empty_user\n User.new\n end\n\n def create_first_user(auth)\n user = User.new auth_params auth\n user.save\n user.add_role :super_admin\n user\n end\n\n def get_user_from_auth(auth)\n User.find_by email: auth['info']['email']\n end\n\n def first_user?\n User.count.zero?\n end\n\n def auth_params(auth)\n {\n uid: auth['uid'],\n name: auth['info']['name'],\n email: auth['info']['email'],\n provider: auth.provider,\n image: auth['info']['image']\n }\n end\n end\nend\n"}}},{"rowIdx":1848,"cells":{"text":{"kind":"string","value":"import React from 'react';\nimport classes from './Spinner.module.css';\n\nconst Spinner = (props) => {\n const style = {\n backgroundColor: `var(--${props.variant})`,\n };\n return (\n
\n
\n
\n
\n );\n};\n\nexport default Spinner;\n"}}},{"rowIdx":1849,"cells":{"text":{"kind":"string","value":"\n
\n \n\n
\n
Add Note
\n View Profile\n
View Dashboard
\n
Email
\n
Disconnect
\n
\n
"}}},{"rowIdx":1850,"cells":{"text":{"kind":"string","value":"describe Coactive::Interface do\n context 'default' do\n let :interface_class do\n Variables::DefaultInterface\n end\n\n it 'sets default value' do\n interface = interface_class.new\n expect(interface.context.in).to eq('default value')\n end\n\n it 'sets default value by method' do\n interface = interface_class.new\n expect(interface.context.in_method).to eq('default value')\n end\n\n it 'sets default value by proc' do\n interface = interface_class.new\n expect(interface.context.in_proc).to eq('default value')\n end\n end\nend\n"}}},{"rowIdx":1851,"cells":{"text":{"kind":"string","value":"## v0.1.6\n\n* Further Opal 1.4 compatibility\n\n## v0.1.5\n\n* Opal 1.4 compatibility\n"}}},{"rowIdx":1852,"cells":{"text":{"kind":"string","value":"/** Michał Wójcik 2021 */\n/**\n * L-System zaimplementowany w języku javascript z wykorzystaniem\n * HTML5 Canvas i turtle-graphics-js [https://www.npmjs.com/package/turtle-graphics-js]\n *\n * Program przyjmuje parametry przez pola tekstowe na stronie\n * a następnie rysuje po wciśnięciu przycisku \"rysuj\"\n *\n * Składnia reguł:\n * Znak:Wartosc_do_zamiany;Znak:Wartosc_do_zamiany;\n * Dowolna liczba reguł\n *\n * Operacje:\n * - F - idź do przodu i rysuj\n * - f - idź do przodu (nie rysuj)\n * - + - obrót w prawo\n * - - - obrót w lewo\n * - [ - odłóż pozycję i rotację na stos\n * - ] - zdejmij pozycję i rotację ze stosu\n * - C - losuj nowy kolor\n * - L - zwiększ długość rysowanej linii\n */\n\nvar stack = [];\n\nvar turtle = new Turtle(document.getElementById(\"canvas\"));\nturtle.pen.color = \"#000\";\nturtle.pen.width = 2;\nturtle.moveTo(400, 300);\nvar initLoc = JSON.parse(JSON.stringify(turtle.loc));\n\n/**Attributes */\n\nvar lineLength = 1;\nvar rotation = 90;\nvar axiom = \"\";\nvar rules = \"\";\nvar iterations = 5;\n\n/**DOM references */\nvar lengthInput = document.getElementById(\"length\");\nvar rotationInput = document.getElementById(\"rotation\");\nvar iterationsInput = document.getElementById(\"iterations\");\nvar axiomInput = document.getElementById(\"axiom\");\nvar rulesInput = document.getElementById(\"rules\");\nvar drawButton = document.getElementById(\"drawButton\");\n\nvar inter = null;\n\ndrawButton.addEventListener(\"click\", () => {\n clearAll();\n drawLSystem();\n});\n\n/**Functions */\nconst drawLSystem = function () {\n getAttributes();\n let ruleObjectsArray = interpretRules(rules);\n axiom = applyRules(axiom, ruleObjectsArray, iterations);\n\n var i = 0;\n inter = setInterval(() => {\n if (i > axiom.length) {\n clearInterval(inter);\n return;\n }\n drawSign(axiom.charAt(i));\n i++;\n }, 6);\n};\n\nconst getAttributes = function () {\n lineLength = lengthInput.value !== undefined ? lengthInput.value : lineLength;\n rotation = rotationInput.value !== undefined ? rotationInput.value : rotaion;\n iterations =\n iterationsInput.value !== undefined ? iterationsInput.value : iterations;\n axiom = axiomInput.value.length > 0 ? axiomInput.value : axiom;\n rules = rulesInput.value.length > 0 ? rulesInput.value : rules;\n};\n\n/**Rule structure:\n * {\n * sign: string;\n * value: string;\n * }\n */\n\nconst interpretRules = function (rules) {\n let ruleArray = rules.split(\";\");\n let ruleObjectsArray = ruleArray.map((ruleString) => {\n let arr = ruleString.split(\":\");\n return { sign: arr[0], value: arr[1] };\n });\n return ruleObjectsArray;\n};\n\nconst applyRules = function (axiom, ruleObjectsArray, iterations) {\n let result = axiom;\n for (let i = 0; i < iterations; i++) {\n ruleObjectsArray.forEach((rule) => {\n result = result.replaceAll(rule.sign, rule.value);\n });\n }\n return result;\n};\n\nconst drawSign = function (sign) {\n switch (sign) {\n case \"F\":\n turtle.penDown();\n case \"f\":\n turtle.forward(lineLength);\n turtle.penUp();\n break;\n\n case \"+\":\n turtle.right(rotation);\n break;\n\n case \"-\":\n turtle.left(rotation);\n break;\n\n case \"[\":\n stack.push(JSON.parse(JSON.stringify(turtle.loc)));\n break;\n\n case \"]\":\n let loc = stack.pop();\n turtle.loc = loc;\n break;\n case \"C\":\n turtle.pen.color = getRandomColor();\n break;\n case \"L\":\n lineLength++;\n break;\n default:\n break;\n }\n return;\n};\n\nconst getRandomColor = function () {\n return `#${Math.floor(Math.random() * 16777215).toString(16)}`;\n};\n\nfunction clearAll() {\n clearInterval(inter);\n stack = [];\n turtle.moveTo(400, 300);\n turtle.angle = 0;\n turtle.pen.color = \"#000\";\n turtle.ctx.clearRect(\n 0,\n 0,\n canvas.width || canvas.style.width,\n canvas.height || canvas.style.height\n );\n}\n"}}},{"rowIdx":1853,"cells":{"text":{"kind":"string","value":"package output\n\nimport (\n\t\"encoding/json\"\n\t\"time\"\n\n\t\"github.com/shopspring/decimal\"\n)\n\ntype ReportInput struct {\n\tMetadata map[string]string\n\tRoot Root\n}\n\nfunc Load(data []byte) (Root, error) {\n\tvar out Root\n\terr := json.Unmarshal(data, &out)\n\treturn out, err\n}\n\nfunc Combine(currency string, inputs []ReportInput, opts Options) Root {\n\tvar combined Root\n\n\tvar totalHourlyCost *decimal.Decimal\n\tvar totalMonthlyCost *decimal.Decimal\n\n\tprojects := make([]Project, 0)\n\tsummaries := make([]*Summary, 0, len(inputs))\n\n\tfor _, input := range inputs {\n\n\t\tprojects = append(projects, input.Root.Projects...)\n\n\t\tsummaries = append(summaries, input.Root.Summary)\n\n\t\tif input.Root.TotalHourlyCost != nil {\n\t\t\tif totalHourlyCost == nil {\n\t\t\t\ttotalHourlyCost = decimalPtr(decimal.Zero)\n\t\t\t}\n\n\t\t\ttotalHourlyCost = decimalPtr(totalHourlyCost.Add(*input.Root.TotalHourlyCost))\n\t\t}\n\t\tif input.Root.TotalMonthlyCost != nil {\n\t\t\tif totalMonthlyCost == nil {\n\t\t\t\ttotalMonthlyCost = decimalPtr(decimal.Zero)\n\t\t\t}\n\n\t\t\ttotalMonthlyCost = decimalPtr(totalMonthlyCost.Add(*input.Root.TotalMonthlyCost))\n\t\t}\n\t}\n\n\tcombined.Version = outputVersion\n\tcombined.Currency = currency\n\tcombined.Projects = projects\n\tcombined.TotalHourlyCost = totalHourlyCost\n\tcombined.TotalMonthlyCost = totalMonthlyCost\n\tcombined.TimeGenerated = time.Now()\n\tcombined.Summary = MergeSummaries(summaries)\n\n\treturn combined\n}\n"}}},{"rowIdx":1854,"cells":{"text":{"kind":"string","value":"import produce from 'immer';\nimport {\n categoriesActionTypes,\n categoryState,\n SELECT_CATEGORY,\n} from './types';\n\nconst INITIAL_STATE: categoryState = {\n category: '',\n};\n\nexport default function optionReducer (\n state = INITIAL_STATE,\n action: categoriesActionTypes,\n): categoryState {\n return produce(state, draft => {\n switch (action.type) {\n case SELECT_CATEGORY: {\n draft.category = action.payload.category\n break;\n }\n default:\n }\n });\n};\n"}}},{"rowIdx":1855,"cells":{"text":{"kind":"string","value":"#ifndef _IOTEX_ABI_READ_CONTRACT_H_\n#define _IOTEX_ABI_READ_CONTRACT_H_\n\n#include \n\n#ifdef\t__cplusplus\nextern \"C\" {\n#endif\n\nuint64_t abi_get_order_start(const char *, size_t);\nuint32_t abi_get_order_duration(const char *, size_t);\nconst char *abi_get_order_endpoint(const char *input, size_t);\nconst char *abi_get_order_token(const char *input, size_t);\n\n#ifdef\t__cplusplus\n}\n#endif\n\n#endif /* _IOTEX_ABI_READ_CONTRACT_H_ */\n"}}},{"rowIdx":1856,"cells":{"text":{"kind":"string","value":"json([\n 'success' => true,\n 'message' => 'Data Request',\n 'data' => $requestor \n ], 200);\n }\n\n public function show_requests(Request $request){\n $reciever = DB::table('request_status')\n ->where('reciever', $request->reciever)\n ->get();\n\n if($reciever){\n return response()->json([\n 'success' => true,\n 'message' => 'Hasil penelusuran',\n 'data' => $reciever \n ], 200);\n }\n else{\n return response()->json([\n 'success' => false,\n 'message' => 'Gagal, tidak ada penelusuran',\n 'data' => $reciever \n ], 400);\n }\n }\n\n public function show_friends(Request $request){\n $requestor = DB::table('request_status')\n ->where('requestor', $request->requestor)\n ->get();\n\n if($requestor){\n return response()->json([\n 'success' => true,\n 'message' => 'Hasil penelusuran',\n 'data' => $requestor \n ], 200);\n }\n else{\n return response()->json([\n 'success' => false,\n 'message' => 'Gagal, tidak ada penelusuran',\n 'data' => $requestor \n ], 400);\n }\n }\n\n public function created(Request $request){\n $cek = DB::table('request_status')\n ->where('requestor', $request->requestor)\n ->where('reciever', $request->reciever)\n // ->where('status','=', 'pending','and','status','=', 'accepted')\n ->count();\n\n $requestor = new request_status;\n $requestor->requestor = $request->requestor;\n $requestor->reciever = $request->reciever;\n $requestor->status = $request->status;\n\n if($cek >= 1){\n return response()->json([\n 'success' => false,\n 'message' => 'Data Gagal Ditambahkan',\n 'data' => $requestor \n ], 400);\n }\n else{\n $requestor->save(); \n\n return response()->json([\n 'success' => true,\n 'message' => 'Data Berhasil Ditambahkan',\n 'data' => $requestor \n ], 200);\n }\n }\n\n public function update(Request $request, $reciever){\n $cek = DB::table('request_status')\n ->where('requestor', $request->requestor)\n ->where('reciever', $reciever)\n ->where('status','=', 'pending')\n ->count();\n\n if($cek >= 1){\n $update = DB::table('request_status')\n ->select('request_status_id')\n ->where('requestor', $request->requestor)\n ->where('reciever', $reciever)\n ->update(['status' => $request->status]);\n\n return response()->json([\n 'success' => true,\n 'message' => 'Data Berhasil Diubah'\n ], 200);\n }\n \n return response()->json([\n 'success' => false,\n 'message' => 'Data Gagal Diubah'\n ], 400);\n }\n\n public function delete($id){\n $requestor = request_status::find($id);\n $requestor->delete();\n\n return response()->json([\n 'success' => true,\n 'message' => 'Post Deleted',\n ], 200);\n }\n}\n"}}},{"rowIdx":1857,"cells":{"text":{"kind":"string","value":"/// Provides data structures for storing component data.\nlibrary component_data;\n\nimport 'dart:async';\nimport 'dart:collection';\n\nimport 'package:observable/observable.dart';\nimport 'package:quiver/core.dart';\n\npart 'src/component_data/linked_hash_map_store.dart';\n\n/// Registers [ComponentTypesStores] for component types.\n///\n/// Stores\nclass TypeStoreRegistry {\n final Map _typesStores = {};\n\n final ChangeNotifier _changeNotifier =\n new ChangeNotifier();\n\n /// A synchronous stream of the changes made to this [TypeStoreRegistry].\n ///\n /// A change is triggered when a [ComponentTypeStore] is added, removed or\n /// changed.\n Stream> get changes =>\n _changeNotifier.changes;\n\n /// The [ComponentTypeStore]s registered with this [TypeStoreRegistry].\n Iterable get stores => _typesStores.values;\n\n /// The types for which [ComponentTypeStore]s are registered with this\n /// [TypeStoreRegistry].\n Iterable get types => _typesStores.keys;\n\n /// Whether or not this [TypeStoreRegistry] contains a [ComponentTypeStore]\n /// for the [type].\n bool hasStore(Type type) => _typesStores.containsKey(type);\n\n /// Returns the [ComponentTypeStore] registered for the [type] or `null` if\n /// no [ComponentTypeStore] is currently registered for the [type].\n ComponentTypeStore getStore(Type type) => _typesStores[type]\n as ComponentTypeStore;\n\n /// Registers the [store] for type [type].\n ///\n /// If another [ComponentTypeStore] was already registered for the [type],\n /// then this other store is replaced with the [store].\n void add(Type type, ComponentTypeStore store) {\n final oldStore = _typesStores[type] as ComponentTypeStore;\n\n _typesStores[type] = store;\n\n if (oldStore == null) {\n _changeNotifier\n ..notifyChange(new TypeStoreRegistryChangeRecord.insert(type, store))\n ..deliverChanges();\n } else {\n _changeNotifier\n ..notifyChange(new TypeStoreRegistryChangeRecord(type, oldStore, store))\n ..deliverChanges();\n }\n }\n\n /// Removes the [ComponentTypeStore] associated with the [type] from this\n /// [TypeStoreRegistry].\n ComponentTypeStore remove(Type type) {\n final store = _typesStores[type] as ComponentTypeStore;\n\n if (store != null) {\n _typesStores.remove(type);\n _changeNotifier\n ..notifyChange(new TypeStoreRegistryChangeRecord.remove(type, store))\n ..deliverChanges();\n\n return store;\n } else {\n return null;\n }\n }\n}\n\n/// Stores component values of type [T] and associates them with entity IDs.\nabstract class ComponentTypeStore {\n /// Instantiates a new [ComponentTypeStore] using the default implementation,\n /// [LinkedHashMapStore].\n factory ComponentTypeStore() = LinkedHashMapStore;\n\n /// A synchronous stream of the changes made to this [ComponentTypeStore].\n ///\n /// A change is triggered when a component value is added, when a component\n /// value is removed, or when a component value is updated.\n ///\n /// See also [ComponentTypeStoreChangeRecord].\n Stream>> get changes;\n\n /// The number of component values currently stored in this\n /// [ComponentTypeStore].\n int get length;\n\n /// Whether this [ComponentTypeStore] is currently empty.\n bool get isEmpty;\n\n /// Whether there is currently at least 1 component value in this\n /// [ComponentTypeStore].\n bool get isNotEmpty;\n\n /// The component values currently stored in this [ComponentTypeStore].\n Iterable get components;\n\n /// The entity IDs for which a component value is currently stored in this\n /// [ComponentTypeStore].\n Iterable get entityIds;\n\n /// Returns a [ComponentStoreIterator] over this [ComponentTypeStore].\n ComponentStoreIterator get iterator;\n\n /// Executes the given function [f] for each ([entityId], [component]) pair\n /// stored in this [ComponentTypeStore].\n void forEach(void f(int entityId, T component));\n\n /// Whether or not this [ComponentTypeStore] contains a component value for\n /// the [entityId].\n bool containsComponentFor(int entityId);\n\n /// Removes the component value associated with the [entityId] from this\n /// [ComponentTypeStore].\n ///\n /// Does nothing if this [ComponentTypeStore] does not contain a component\n /// value for the [entityId].\n ///\n /// Returns the component value if this [ComponentTypeStore] did contain a\n /// component value for the [entityId], or `null` otherwise.\n T remove(int entityId);\n\n /// Returns the value associated with the [entityId] or `null` if this\n /// [ComponentTypeStore] does not currently contain a value for the\n /// [entityId].\n T operator [](int entityId);\n\n /// Associated the given [component] value with the [entityId] and stores it\n /// in this [ComponentTypeStore].\n void operator []=(int entityId, T component);\n}\n\n/// An iterator over a [ComponentTypeStore].\n///\n/// Extends an ordinary [Iterator] by also exposing the [currentEntityId] that\n/// is associated with the [current] component value.\nabstract class ComponentStoreIterator extends Iterator {\n int get currentEntityId;\n}\n\n/// A [ChangeRecord] that denotes adding, removing, or updating a\n/// [ComponentTypeStore].\nclass ComponentTypeStoreChangeRecord implements ChangeRecord {\n /// The entity id for which a component changed.\n final int entityId;\n\n /// The previous component value associated with this key.\n ///\n /// Is always `null` if [isInsert].\n final T oldValue;\n\n /// The new component value associated with this key.\n ///\n /// Is always `null` if [isRemove].\n final T newValue;\n\n /// True if this component value was inserted.\n final bool isInsert;\n\n /// True if this component value was removed.\n final bool isRemove;\n\n /// Create an update record of [entityId] from [oldValue] to [newValue].\n const ComponentTypeStoreChangeRecord(\n this.entityId, this.oldValue, this.newValue)\n : isInsert = false,\n isRemove = false;\n\n /// Create an insert record of [entityId] and [newValue].\n const ComponentTypeStoreChangeRecord.insert(this.entityId, this.newValue)\n : isInsert = true,\n isRemove = false,\n oldValue = null;\n\n /// Create a remove record of [entityId] with a former [oldValue].\n const ComponentTypeStoreChangeRecord.remove(this.entityId, this.oldValue)\n : isInsert = false,\n isRemove = true,\n newValue = null;\n\n /// Apply this change record to the [componentStore].\n void apply(ComponentTypeStore componentStore) {\n if (isRemove) {\n componentStore.remove(entityId);\n } else {\n componentStore[entityId] = newValue;\n }\n }\n\n bool operator ==(Object o) =>\n identical(this, o) ||\n o is ComponentTypeStoreChangeRecord &&\n entityId == o.entityId &&\n oldValue == o.oldValue &&\n newValue == o.newValue &&\n isInsert == o.isInsert &&\n isRemove == o.isRemove;\n\n int get hashCode => hashObjects([\n entityId,\n oldValue,\n newValue,\n isInsert,\n isRemove,\n ]);\n}\n\nclass TypeStoreRegistryChangeRecord extends ChangeRecord {\n /// The component type for which the store changed.\n final Type type;\n\n /// The previous store associated with the [type].\n ///\n /// Is always `null` if [isInsert].\n final ComponentTypeStore oldValue;\n\n /// The new value associated with the [type].\n ///\n /// Is always `null` if [isRemove].\n final ComponentTypeStore newValue;\n\n /// Whether or not this change concerns an insertion.\n final bool isInsert;\n\n /// Whether or not this change concerns a removal.\n final bool isRemove;\n\n /// Create an update record for [type] from [oldValue] to [newValue].\n const TypeStoreRegistryChangeRecord(this.type, this.oldValue, this.newValue)\n : isInsert = false,\n isRemove = false;\n\n /// Create an insert record for [type] and [newValue].\n const TypeStoreRegistryChangeRecord.insert(this.type, this.newValue)\n : isInsert = true,\n isRemove = false,\n oldValue = null;\n\n /// Create a remove record for [type] with a former [oldValue].\n const TypeStoreRegistryChangeRecord.remove(this.type, this.oldValue)\n : isInsert = false,\n isRemove = true,\n newValue = null;\n\n /// Apply this change record to the [typeStoreRegistry].\n void apply(TypeStoreRegistry typeStoreRegistry) {\n if (isRemove) {\n typeStoreRegistry.remove(type);\n } else {\n typeStoreRegistry.add(type, newValue);\n }\n }\n\n bool operator ==(Object other) =>\n identical(this, other) ||\n other is TypeStoreRegistryChangeRecord &&\n type == other.type &&\n oldValue == other.oldValue &&\n newValue == other.newValue &&\n isInsert == other.isInsert &&\n isRemove == other.isRemove;\n\n int get hashCode => hashObjects([\n type,\n oldValue,\n newValue,\n isInsert,\n isRemove,\n ]);\n}\n"}}},{"rowIdx":1858,"cells":{"text":{"kind":"string","value":"package Monitoring::GLPlugin::TableItem;\nour @ISA = qw(Monitoring::GLPlugin::Item);\n\nuse strict;\n\nsub new {\n my ($class, %params) = @_;\n my $self = {};\n bless $self, $class;\n foreach (keys %params) {\n $self->{$_} = $params{$_};\n }\n if ($self->can(\"finish\")) {\n $self->finish(%params);\n }\n return $self;\n}\n\nsub check {\n my ($self) = @_;\n # some tableitems are not checkable, they are only used to enhance other\n # items (e.g. sensorthresholds enhance sensors)\n # normal tableitems should have their own check-method\n}\n\n1;\n\n__END__\n"}}},{"rowIdx":1859,"cells":{"text":{"kind":"string","value":"require 'rails_helper'\nrequire 'email_spec/rspec'\nrequire 'timecop'\n\nrequire 'shared_context/stub_email_rendering'\n\n\nRSpec.describe EmailAlert, type: :model do\n\n let(:mock_log) { instance_double(\"ActivityLogger\") }\n\n # set subject appropriately since it's a Singleton\n let(:subject) { described_class.instance }\n\n let(:user) { create(:user) }\n\n let(:config) { { days: [2, 5, 10] } }\n\n let(:condition) { create(:condition, config: { days: [2, 5, 10] }) }\n let(:timing) { :on }\n\n let(:dec_1) { Time.zone.local(2018, 12, 1) }\n\n let(:users) do\n [create(:user, first_name: 'u1'),\n create(:user, first_name: 'u2')]\n end\n\n\n describe '.condition_response' do\n\n it 'gets the config from the condition' do\n # stubbed methods:\n allow(subject).to receive(:entities_to_check)\n .and_return([])\n\n allow(subject).to receive(:send_alert_this_day?)\n .and_return(true)\n\n allow(subject).to receive(:send_email)\n .with(anything, mock_log)\n\n # expected results:\n expect(described_class).to receive(:get_config)\n\n # actual test:\n Timecop.freeze(dec_1) do\n subject.condition_response(condition, mock_log)\n end\n end\n\n it 'gets the timing from the condition' do\n # stubbed methods:\n allow(subject).to receive(:entities_to_check)\n .and_return([])\n\n allow(subject).to receive(:send_alert_this_day?)\n .and_return(true)\n\n allow(subject).to receive(:send_email)\n .with(anything, mock_log)\n\n # expected results:\n expect(described_class).to receive(:get_timing)\n\n # actual test:\n Timecop.freeze(dec_1) do\n subject.condition_response(condition, mock_log)\n end\n end\n\n\n it 'calls process_entities' do\n\n # stubbed methods:\n allow(subject).to receive(:entities_to_check)\n .and_return(users)\n\n # expected results:\n expect(subject).to receive(:process_entities)\n .and_return(true)\n\n # actual test:\n Timecop.freeze(dec_1) do\n subject.condition_response(condition, mock_log)\n end\n\n end\n\n end\n\n\n describe 'process_entities' do\n\n it 'loops through entities_to_check and calls take_action on each' do\n\n # stub this method\n allow(subject).to receive(:take_action).and_return(true)\n\n expect(subject).to receive(:take_action).exactly(users.size).times\n\n # actual test:\n Timecop.freeze(dec_1) do\n subject.process_entities(users, mock_log)\n end\n end\n end\n\n\n describe 'take_action' do\n\n let(:entity) { create(:member_with_membership_app) }\n\n it 'calls send_email for the entity and log if send_alert_this_day? is true' do\n\n # stubbed methods:\n allow(subject).to receive(:send_alert_this_day?)\n .with(timing, config, anything)\n .and_return(true)\n\n # expected results:\n expect(subject).to receive(:send_alert_this_day?)\n .with(timing, config, anything)\n .once\n expect(subject).to receive(:send_email)\n .with(anything, mock_log)\n .once\n\n # actual test:\n Timecop.freeze(dec_1) do\n subject.timing = timing\n subject.config = config\n subject.take_action(entity, mock_log)\n end\n end\n\n it 'does nothing when send_alert_this_day? is false for a user' do\n\n # stubbed methods:\n allow(subject).to receive(:send_alert_this_day?)\n .with(anything, config, user)\n .and_return(false)\n\n # expected results:\n expect(subject).to receive(:send_alert_this_day?)\n .with(anything, config, anything)\n .once\n\n expect(subject).to receive(:send_email).never\n\n # actual test:\n Timecop.freeze(dec_1) do\n subject.timing = timing\n subject.config = config\n subject.take_action(entity, mock_log)\n end # Timecop\n\n end # it 'does nothing when send_alert_this_day? is false for a user'\n\n end\n\n\n it '.entities_to_check raises NoMethodError (subclasses should implement)' do\n expect {subject.entities_to_check }.to raise_exception NoMethodError\n end\n\n\n it '.mailer_class raises NoMethodError (subclasses should implement)' do\n expect {subject.mailer_class }.to raise_exception NoMethodError\n end\n\n\n it '.mailer_args raises NoMethodError (subclasses should implement)' do\n expect {subject.mailer_args(create(:user)) }.to raise_exception NoMethodError\n end\n\n\n describe '.send_email' do\n\n include_context 'stub email rendering'\n\n\n before(:all) do\n\n # define a method for MemberMailer just for this test\n MemberMailer.class_eval do\n def fake_mailer_method(_user)\n nil\n end\n end\n\n end\n\n after(:all) do\n # remove the method we added\n MemberMailer.undef_method(:fake_mailer_method)\n end\n\n before(:each) do\n Rails.configuration.action_mailer.delivery_method = :mailgun\n ApplicationMailer.mailgun_client.enable_test_mode!\n\n allow(Memberships::MembershipActions).to receive(:for_user)\n .and_return(true)\n end\n\n after(:each) { ApplicationMailer.mailgun_client.disable_test_mode! }\n\n let(:entity) { build(:member) }\n\n\n it 'sends alert email to user and logs a message' do\n expect(MemberMailer.fake_mailer_method(user)).to be_truthy\n\n # stubbed methods:\n allow(subject).to receive(:mailer_class)\n .and_return(MemberMailer)\n allow(subject).to receive(:mailer_args)\n .and_return([entity])\n allow(subject).to receive(:mailer_method).and_return(:test_email)\n\n allow(subject).to receive(:success_str).with(entity)\n .and_return('succeeded with entity')\n\n # expected results:\n expect(MemberMailer).to receive(:test_email).with(entity)\n .and_call_original\n\n expect(subject).to receive(:log_mail_response)\n\n Timecop.freeze(dec_1)\n subject.send_email(entity, mock_log)\n Timecop.return\n\n email = ActionMailer::Base.deliveries.last\n expect(email).to deliver_to(entity.email)\n end\n\n\n it 'does not send email if an error is raised or mail has errors' do\n subject.create_alert_logger(mock_log)\n\n expect(MemberMailer.fake_mailer_method(user)).to be_truthy\n\n # stubbed methods:\n allow(subject).to receive(:mailer_class)\n .and_return(MemberMailer)\n allow(subject).to receive(:mailer_args)\n .and_return([entity])\n allow(subject).to receive(:mailer_method).and_return(:test_email)\n\n allow(subject).to receive(:failure_str).with(entity)\n .and_return('failed with entity')\n\n allow_any_instance_of(Mail::Message).to receive(:deliver)\n .and_raise(Net::ProtocolError)\n\n # expected results:\n expect(MemberMailer).to receive(:test_email).with(entity)\n .and_call_original\n expect(mock_log).to receive(:error).with(/EmailAlert email ATTEMPT FAILED failed with entity\\. Net::ProtocolError Also see for possible info/)\n\n Timecop.freeze(dec_1)\n subject.send_email(entity, mock_log)\n Timecop.return\n\n expect(ActionMailer::Base.deliveries.size).to eq 0\n end\n\n end\n\n\n describe '.mail_message' do\n\n let(:entity) { create(:company) }\n\n it 'calls mailer_args to get the arguments' do\n\n # stubbed methods:\n allow(subject).to receive(:mailer_method).and_return(:test_email)\n allow(subject).to receive(:mailer_class).and_return(MemberMailer)\n\n expect(subject).to receive(:mailer_args).with(entity)\n\n subject.mail_message(entity)\n end\n\n it 'calls mailer_class to get the mailer class' do\n # stubbed methods:\n allow(subject).to receive(:mailer_method).and_return(:test_email)\n allow(subject).to receive(:mailer_class).and_return(MemberMailer)\n allow(subject).to receive(:mailer_args).and_return([entity])\n\n expect(subject).to receive(:mailer_class)\n\n subject.mail_message(entity)\n end\n\n it 'sends the mailer_method to the mailer_class with the arguments' do\n # stubbed methods:\n allow(subject).to receive(:mailer_method).and_return(:test_email)\n allow(subject).to receive(:mailer_class).and_return(MemberMailer)\n allow(subject).to receive(:mailer_args).and_return([entity])\n\n expect(MemberMailer).to receive(:test_email).with(entity)\n\n subject.mail_message(entity)\n end\n\n end\n\n\n describe '.send_on_day_number?' do\n\n let(:config) { { days: [1, 3, 5] } }\n\n it 'true if config[:days].include? day_number' do\n expect(subject.send_on_day_number?(3, config)).to be_truthy\n end\n\n it 'false if day_number is not in config[:days]' do\n expect(subject.send_on_day_number?(0, config)).to be_falsey\n end\n\n it 'false if config does not have :days as a key' do\n expect(subject.send_on_day_number?(3, { blorf: 'blorf' })).to be_falsey\n end\n\n end\n\n\n describe '.log_mail_response' do\n\n let(:entity) { create(:user) }\n\n context 'no mail_response errors (successful)' do\n\n it 'sends log_success to the alert logger' do\n\n subject.create_alert_logger(mock_log)\n\n mail_response_dbl = double(\"Mail::Message\")\n allow(mail_response_dbl).to receive(:errors).and_return([])\n\n expect_any_instance_of(AlertLogger).to receive(:log_success)\n\n subject.log_mail_response(mock_log, mail_response_dbl, entity)\n\n end\n end\n\n\n context 'with mail_response_errors (failure)' do\n\n before(:all) do\n\n # define a method for MemberMailer just for this test\n MemberMailer.class_eval do\n def fake_mailer_method(_user)\n nil\n end\n end\n\n end\n\n after(:all) do\n # remove the method we added\n MemberMailer.undef_method(:fake_mailer_method)\n end\n\n\n it 'sends log_failure' do\n subject.create_alert_logger(mock_log)\n\n mail_response_dbl = double(\"Mail::Message\")\n allow(mail_response_dbl).to receive(:errors).and_return([3])\n\n expect_any_instance_of(AlertLogger).to receive(:log_failure)\n\n subject.log_mail_response(mock_log, mail_response_dbl, entity)\n end\n\n end\n end\n\n\n it '.success_str raises NoMethodError (should be defined by subclasses)' do\n expect{subject.success_str([])}.to raise_exception NoMethodError\n end\n\n\n it '.failure_str raises NoMethodError (should be defined by subclasses)' do\n expect{subject.failure_str([])}.to raise_exception NoMethodError\n end\n\n\n it '.send_alert_this_day?(timing, config, user) raises NoMethodError (should be defined by subclasses)' do\n config = {}\n timing = 'blorf' # doesn't matter what this is\n expect {subject.send_alert_this_day?(timing, config, user) }.to raise_exception NoMethodError\n end\n\n it '.mailer_method raises NoMethodError (should be defined by subclasses)' do\n expect {subject.mailer_method }.to raise_exception NoMethodError\n end\n\nend\n"}}},{"rowIdx":1860,"cells":{"text":{"kind":"string","value":"\n\n\ncordova.commandProxy.add(\"EchoPlugin\",{\n echo:function(successCallback,errorCallback,strInput) {\n var res = EchoRuntimeComponent.EchoPluginRT.echo(strInput);\n\n if(res.indexOf(\"Error\") == 0) {\n errorCallback(res);\n }\n else {\n successCallback(res);\n }\n }\n});"}}},{"rowIdx":1861,"cells":{"text":{"kind":"string","value":"package org.leveloneproject.central.kms.domain.keys\n\nimport java.util.UUID\n\nimport org.leveloneproject.central.kms.domain._\nimport scala.concurrent.Future\n\ntrait KeyStore {\n\n def create(key: Key): Future[Either[KmsError, Key]]\n\n def getById(id: UUID): Future[Option[Key]]\n}\n"}}},{"rowIdx":1862,"cells":{"text":{"kind":"string","value":"package services\n\nimport (\n \"fmt\"\n \"io\"\n \"log\"\n \"net/http\"\n \"os\"\n\tutils \"github.com/kuruvi-bits/transform/utils\"\n)\n\nfunc Resize(message utils.Message) {\n dirPath := fmt.Sprintf(\"%s/%s\", utils.RESIZED_VOL, message.AlbumName)\n filePath := fmt.Sprintf(\"%s/%s\", dirPath, message.PhotoName)\n utils.CreateDirIfNotExist(dirPath)\n\n url := utils.GetResizeURL(message)\n\n response, e := http.Get(url)\n if e != nil {\n log.Fatal(e)\n }\n defer response.Body.Close()\n\n //open a file for writing\n file, err := os.Create(filePath)\n if err != nil {\n log.Fatal(err)\n }\n defer file.Close()\n\n // Use io.Copy to just dump the response body to the file. This supports huge files\n _, err = io.Copy(file, response.Body)\n if err != nil {\n log.Fatal(err)\n }\n\n fmt.Println(\"Success!\")\n}\n"}}},{"rowIdx":1863,"cells":{"text":{"kind":"string","value":"/**\n * Copyright 2014 Yahoo! Inc. Licensed under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with the\n * License. You may obtain a copy of the License at\n * http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law\n * or agreed to in writing, software distributed under the License is\n * distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n * KIND, either express or implied. See the License for the specific language\n * governing permissions and limitations under the License. See accompanying\n * LICENSE file.\n */\npackage com.yahoo.sql4d.indexeragent.meta;\n\nimport com.google.common.collect.ImmutableMap;\nimport static com.yahoo.sql4d.indexeragent.Agent.*;\nimport static com.yahoo.sql4d.indexeragent.sql.SqlMeta.*;\nimport com.yahoo.sql4d.indexeragent.meta.beans.DataSource;\nimport com.yahoo.sql4d.indexeragent.meta.beans.StatusTrail;\nimport java.util.List;\nimport java.util.Map;\nimport javax.persistence.EntityManager;\nimport javax.persistence.EntityManagerFactory;\nimport javax.persistence.Persistence;\nimport org.apache.commons.lang.exception.ExceptionUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * Handles database interaction specifically for Indexer Agent.\n * @author srikalyan\n */\npublic class DBHandler {\n private static final Logger log = LoggerFactory.getLogger(DBHandler.class);\n private final EntityManagerFactory emFactory;\n \n enum Action { ADD, UPDATE, DELETE }\n \n public DBHandler() {\n String host = getHost();\n int port = getPort();\n String id = getId();\n String password = getPassword();\n String dbName = getDbName();\n String dbType = getDbType();\n \n String connectUrl, dialect, driver;\n \n switch(dbType) {\n case \"mysql\":\n connectUrl = String.format(\"jdbc:mysql://%s:%d/%s?autoReconnectForPools=true\", host, port, dbName);\n driver = \"com.mysql.jdbc.Driver\";\n dialect = \"org.hibernate.dialect.MySQLDialect\";\n break;\n case \"derby\" :\n default:\n connectUrl = String.format(\"jdbc:derby://%s:%d/%s;create=true\", host, port, dbName);\n driver = \"org.apache.derby.jdbc.ClientDriver\";\n dialect = \"org.hibernate.dialect.DerbyDialect\";\n }\n \n Map configOverride = ImmutableMap.of(\n \"javax.persistence.jdbc.url\", connectUrl,\n \"javax.persistence.jdbc.user\", id,\n \"javax.persistence.jdbc.password\", password,\n \"hibernate.dialect\", dialect,\n \"javax.persistence.jdbc.driver\", driver);\n log.info(\"Overriding database configuration : {}\", configOverride);\n emFactory = Persistence.createEntityManagerFactory(\"indexerAgent\", configOverride);\n }\n \n private EntityManager getEntityManager() {\n return emFactory.createEntityManager();\n }\n \n private void addUpdateDeleteEntity(Object entity, Action action) {\n EntityManager em = getEntityManager();\n try {\n em.getTransaction().begin();\n switch (action) {\n case ADD:\n em.persist(entity);\n break;\n case UPDATE:\n em.merge(entity);\n break;\n case DELETE:\n em.remove(entity);\n break;\n }\n } catch(RuntimeException e) { \n log.error(\"Something wrong persisting/merging/removing entity {}, so rolling back . Exception is {}\", entity, ExceptionUtils.getStackTrace(e));\n em.getTransaction().rollback();\n } finally {\n if (em.getTransaction().isActive()) {\n em.getTransaction().commit();\n } \n em.close();\n }\n }\n\n public void addDataSource(DataSource ds) {\n addUpdateDeleteEntity(ds, Action.ADD);\n }\n \n public void updateDataSource(DataSource ds) {\n addUpdateDeleteEntity(ds, Action.UPDATE);\n }\n \n public void removeDataSource(DataSource ds) {\n addUpdateDeleteEntity(ds, Action.DELETE);\n }\n\n public void addStatusTrail(StatusTrail st) {\n addUpdateDeleteEntity(st, Action.ADD);\n }\n\n public void updateStatusTrail(StatusTrail st) {\n addUpdateDeleteEntity(st, Action.UPDATE);\n }\n \n public void removeStatusTrail(StatusTrail st) {\n addUpdateDeleteEntity(st, Action.DELETE);\n }\n\n public List getAllDataSources() {\n EntityManager em = getEntityManager();\n try { \n return em.createQuery(\"SELECT ds FROM DataSource ds\", DataSource.class).getResultList(); \n } finally {\n em.close();\n }\n }\n\n public DataSource getDataSource(String tableName) {\n EntityManager em = getEntityManager();\n try { \n List resultList = em.createQuery(\"SELECT ds FROM DataSource ds WHERE ds.name = :name\", DataSource.class).setParameter(\"name\", tableName).getResultList(); \n return resultList.isEmpty()?null:resultList.get(0);\n } finally {\n em.close();\n }\n }\n \n public DataSource getDataSource(int id) {\n EntityManager em = getEntityManager();\n try { \n return em.find(DataSource.class, id);\n } finally {\n em.close();\n }\n }\n /**\n * Tasks whose status:not_done and givenUp:zero\n * @param ds\n * @return \n */\n public List getIncompleteTasks(DataSource ds) {\n EntityManager em = getEntityManager();\n try { \n return em.createQuery(\"SELECT st FROM StatusTrail st WHERE st.dataSourceId = :dataSourceId \"\n + \"AND st.status = 'not_done' AND st.givenUp = 0 ORDER BY st.id DESC\", \n StatusTrail.class).\n setParameter(\"dataSourceId\", ds.getId()).getResultList(); \n } finally {\n em.close();\n }\n }\n\n /**\n * \n * @return \n */\n public List getAllIncompleteTasks() {\n EntityManager em = getEntityManager();\n try { \n return em.createQuery(\"SELECT st FROM StatusTrail st WHERE \"\n + \" st.status = 'not_done' AND st.givenUp = 0 ORDER BY st.id DESC\", \n StatusTrail.class).getResultList(); \n } finally {\n em.close();\n }\n }\n\n /**\n * \n * @return \n */\n public List getAllInprogressTasks() {\n EntityManager em = getEntityManager();\n try { \n return em.createQuery(\"SELECT st FROM StatusTrail st WHERE \"\n + \" st.status = 'in_progress' AND st.givenUp = 0\", \n StatusTrail.class).getResultList(); \n } finally {\n em.close();\n }\n }\n\n /**\n * \n * @return \n */\n public long getInprogressTasksCount() {\n EntityManager em = getEntityManager();\n try { \n return (long)em.createQuery(\"SELECT COUNT(st.id) FROM StatusTrail st WHERE \"\n + \" st.status = 'in_progress' AND st.givenUp = 0\").getSingleResult(); \n } finally {\n em.close();\n }\n }\n\n /**\n * Change the status of a task.\n * @param st\n * @param success \n */\n public void markTask(StatusTrail st, boolean success) { \n st.setStatus(success ? JobStatus.done : JobStatus.not_done);\n st.setAttemptsDone(st.getAttemptsDone() + 1);\n st.setGivenUp(st.getAttemptsDone() >= getMaxTaskAttempts() ? 1 : 0);\n updateStatusTrail(st);\n }\n\n public void shutdown() {\n log.info(\"Shutting down and cleaning up database connections..\");\n emFactory.close();\n }\n}"}}},{"rowIdx":1864,"cells":{"text":{"kind":"string","value":"drop('price_intervals');\n\nCapsule::schema()->create('price_intervals', function ($table) {\n $table->increments('id');\n $table->date('start_date');\n $table->date('end_date');\n $table->double('price');\n $table->timestamps();\n});\n"}}},{"rowIdx":1865,"cells":{"text":{"kind":"string","value":"import ecdsa\nimport json\nimport redis\nfrom typing import NamedTuple, Union\nimport binascii\nfrom binascii import unhexlify\n\nfrom luracoin import errors\nfrom luracoin.exceptions import TransactionNotValid\nfrom luracoin.wallet import pubkey_to_address\nfrom luracoin.config import Config\nfrom luracoin.helpers import (\n mining_reward,\n sha256d,\n bytes_to_signing_key,\n little_endian_to_int,\n)\n\n\nclass Transaction:\n def __init__(\n self,\n chain: int = 0,\n nonce: int = 0,\n fee: int = 0,\n value: int = 0,\n to_address: str = None,\n unlock_sig: bytes = None,\n ) -> None:\n self.chain = chain\n self.nonce = nonce\n self.fee = fee\n self.value = value\n self.to_address = to_address\n self.unlock_sig = unlock_sig\n\n @property\n def is_coinbase(self) -> bool:\n return self.unlock_sig == Config.COINBASE_UNLOCK_SIGNATURE\n\n def sign(self, private_key) -> \"Transaction\":\n signature = sign_transaction(\n private_key=private_key,\n transaction_serialized=self.serialize(to_sign=True).hex(),\n )\n self.unlock_sig = signature\n return self\n\n def json(self) -> dict:\n result = {\n \"id\": self.id,\n \"chain\": self.chain,\n \"nonce\": self.nonce,\n \"fee\": self.fee,\n \"value\": self.value,\n \"to_address\": self.to_address,\n \"unlock_sig\": None,\n }\n if self.unlock_sig:\n result[\"unlock_sig\"] = self.unlock_sig.hex()\n return result\n\n def serialize(self, to_sign=False) -> bytes:\n chain = self.chain.to_bytes(1, byteorder=\"little\", signed=False)\n nonce = self.nonce.to_bytes(4, byteorder=\"little\", signed=False)\n fee = self.fee.to_bytes(4, byteorder=\"little\", signed=False)\n value = self.value.to_bytes(8, byteorder=\"little\", signed=False)\n to_address = str.encode(self.to_address)\n\n if self.unlock_sig:\n unlock_sig = self.unlock_sig\n\n serialized = chain + nonce + fee + value + to_address\n\n if not to_sign and self.unlock_sig:\n serialized += unlock_sig\n\n return serialized\n\n def deserialize(self, serialized_bytes: bytes):\n self.chain = int.from_bytes(serialized_bytes[0:1], byteorder=\"little\")\n self.nonce = int.from_bytes(serialized_bytes[1:5], byteorder=\"little\")\n self.fee = int.from_bytes(serialized_bytes[5:9], byteorder=\"little\")\n self.value = int.from_bytes(serialized_bytes[9:17], byteorder=\"little\")\n self.to_address = serialized_bytes[17:51].decode(\"utf-8\")\n if len(serialized_bytes) > 51:\n self.unlock_sig = serialized_bytes[51:]\n\n @property\n def id(self) -> str:\n \"\"\"\n The ID will be the hash SHA256 of all the txins and txouts.\n \"\"\"\n msg = self.serialize().hex().encode()\n tx_id = sha256d(msg)\n return tx_id\n\n def make_msg(self) -> str:\n \"\"\"\n TODO: Improve the message.\n bitcoin.stackexchange.com/questions/37093/what-goes-in-to-the-message-of-a-transaction-signature\n \"\"\"\n return self.id\n\n def validate_fields(self, raise_exception=False) -> bool:\n \"\"\"\n Checks that the transaction has the correct fields.\n \"\"\"\n if self.chain < 0 or self.chain > 256:\n if raise_exception:\n raise TransactionNotValid(errors.TRANSACTION_FIELD_CHAIN)\n return False\n\n if self.nonce < 0 or self.nonce > 4_294_967_295:\n if raise_exception:\n raise TransactionNotValid(errors.TRANSACTION_FIELD_NONCE)\n return False\n\n if self.fee < 0 or self.fee > 4_294_967_295:\n if raise_exception:\n raise TransactionNotValid(errors.TRANSACTION_FIELD_FEE)\n return False\n\n if self.value <= 0 or self.value > 18_446_744_073_709_551_615:\n if raise_exception:\n raise TransactionNotValid(errors.TRANSACTION_FIELD_VALUE)\n return False\n\n if not self.to_address or len(self.to_address) != 34:\n if raise_exception:\n raise TransactionNotValid(errors.TRANSACTION_FIELD_TO_ADDRESS)\n return False\n\n if not self.unlock_sig or len(self.unlock_sig) != 128:\n if raise_exception:\n raise TransactionNotValid(errors.TRANSACTION_FIELD_SIGNATURE)\n return False\n\n if (\n self.unlock_sig == Config.COINBASE_UNLOCK_SIGNATURE\n and self.to_address == Config.STAKING_ADDRESS\n ):\n if raise_exception:\n raise TransactionNotValid(errors.TRANSACTION_INVALID_STAKING)\n return False\n\n return True\n\n def validate(self, raise_exception=False) -> bool:\n \"\"\"\n Validate a transaction. For a transaction to be valid it has to follow\n these conditions:\n \"\"\"\n if not self.validate_fields(raise_exception=raise_exception):\n return False\n\n if (\n self.unlock_sig != Config.COINBASE_UNLOCK_SIGNATURE\n and not is_valid_unlocking_script(\n unlocking_script=self.unlock_sig,\n transaction_serialized=self.serialize(to_sign=True).hex(),\n )\n ):\n if raise_exception:\n raise TransactionNotValid(errors.TRANSACTION_INVALID_SIGNATURE)\n return False\n\n return True\n\n def to_transaction_pool(self) -> None:\n redis_client = redis.Redis(\n host=Config.REDIS_HOST, port=Config.REDIS_PORT, db=Config.REDIS_DB\n )\n\n redis_client.set(self.id, self.serialize())\n\n def save(self, block_height: int) -> None:\n \"\"\"\n Add a transaction to the chainstate. Inside the chainstate database,\n the following key/value pairs are stored:\n\n 'c' + 32-byte transaction hash -> unspent transaction output record for\n that transaction. These records are only present for transactions that\n have at least one unspent output left.\n\n Each record stores:\n The version of the transaction.\n Whether the transaction was a coinbase or not.\n Which height block contains the transaction.\n Which outputs of that transaction are unspent.\n The scriptPubKey and amount for those unspent outputs.\n\n [TX VERSION][COINBASE][HEIGHT][NUM OUTPUTS][∞][OUTPUT_LEN][OUTPUT]\n ^ ^ ^ ^ ^\n 4 bytes 1 byte 4 bytes VARINT VARINT\n\n 'B' -> 32-byte block hash: the block hash up to which the database\n represents the unspent transaction outputs\n \"\"\"\n pass\n\n\ndef build_message(outpoint, pub_key: str) -> str:\n \"\"\"\n TODO: https://bitcoin.stackexchange.com/questions/37093/what-goes-in-to-the-message-of-a-transaction-signature\n \"\"\"\n return sha256d(str(outpoint.txid) + str(outpoint.txout_idx) + pub_key)\n\n\ndef build_script_sig(signature: str, public_key: str) -> str:\n \"\"\"\n SIGNATUREPUBLIC_KEY\n \"\"\"\n return signature + public_key\n\n\ndef verify_signature(message: str, public_key: str, signature: str) -> bool:\n vk = ecdsa.VerifyingKey.from_string(public_key, curve=ecdsa.SECP256k1)\n return vk.verify(signature, message)\n\n\ndef deserialize_unlocking_script(unlocking_script: bytes) -> dict:\n unlocking_script = unlocking_script.hex()\n pub_key = unlocking_script[:128]\n signature = unlocking_script[128:]\n\n return {\n \"signature\": signature,\n \"public_key\": pub_key,\n \"address\": pubkey_to_address(pub_key.encode()),\n }\n\n\ndef is_valid_unlocking_script(\n unlocking_script: str, transaction_serialized: str\n) -> bool:\n # TODO: This functions allows to spend all outpoints since we are\n # verifying the signature not the signature + matching public key.\n\n try:\n unlocking_script = deserialize_unlocking_script(unlocking_script)\n except binascii.Error:\n return False\n\n message = transaction_serialized.encode()\n\n try:\n is_valid = verify_signature(\n message=message,\n public_key=bytes.fromhex(unlocking_script[\"public_key\"]),\n signature=bytes.fromhex(unlocking_script[\"signature\"]),\n )\n except ecdsa.keys.BadSignatureError:\n is_valid = False\n except AssertionError:\n is_valid = False\n\n return is_valid\n\n\ndef sign_transaction(private_key: bytes, transaction_serialized: str) -> bytes:\n private_key = bytes_to_signing_key(private_key=private_key)\n vk = private_key.get_verifying_key()\n public_key = vk.to_string()\n\n signature = private_key.sign(transaction_serialized.encode())\n\n return public_key + signature\n"}}},{"rowIdx":1866,"cells":{"text":{"kind":"string","value":"reload(\"Persa\")\n\nusing Base.Test\nusing DecisionTree\nusing DatasetsCF\n\n# write your own tests here\n#@test 1 == 2\n###\nreload(\"COFILS\")\n\ndataset = DatasetsCF.MovieLens()\n\nholdout = Persa.HoldOut(dataset, 0.9)\n\n(ds_train, ds_test) = Persa.get(holdout)\n\nmodel = COFILS.Cofils(ds_train, 10)\nPersa.train!(model, ds_train)\n\nprint(Persa.aval(model, ds_test))\n"}}},{"rowIdx":1867,"cells":{"text":{"kind":"string","value":"pub static TEXT: &'static str = \"{% macro asset_url(filename) %}\n\\\"/assets/{{ filename }}\\\"\n{% endmacro asset_url %}\";\n"}}},{"rowIdx":1868,"cells":{"text":{"kind":"string","value":"using System;\nusing System.Runtime.Serialization;\n\nnamespace DomainBlocks.Persistence\n{\n [Serializable]\n public class StreamDeletedException : Exception\n {\n public string StreamName { get; }\n\n public StreamDeletedException(string streamName)\n {\n StreamName = streamName;\n }\n\n public StreamDeletedException(string streamName, string message) : base(message)\n {\n StreamName = streamName;\n }\n\n public StreamDeletedException(string streamName, string message, Exception inner) : base(message, inner)\n {\n StreamName = streamName;\n }\n\n protected StreamDeletedException(\n SerializationInfo info,\n StreamingContext context) : base(info, context)\n {\n if (info == null) throw new ArgumentNullException(nameof(info));\n\n info.AddValue(nameof(StreamName), StreamName);\n\n base.GetObjectData(info, context);\n }\n }\n}"}}},{"rowIdx":1869,"cells":{"text":{"kind":"string","value":"import {bindable} from 'aurelia-framework';\nimport {inject} from 'aurelia-framework';\nimport moment from 'moment';\nimport {GameService} from '../services/gameService';\n@inject(GameService)\nexport class GameListItemCustomElement {\n constructor(GameService){\n this.gameService = GameService;\n }\n @bindable game;\n get gameDate(){\n //TODO Localization\n return moment(`${this.game.date}`).format(\"MMM Do YY\");\n }\n}\n"}}},{"rowIdx":1870,"cells":{"text":{"kind":"string","value":"#!/usr/bin/env ruby\n\nIO.foreach(\"2.2 Ruby Day 2.md\") do |block|\n puts block if block =~ /(.*)代码块(.*)/\nend\n"}}},{"rowIdx":1871,"cells":{"text":{"kind":"string","value":"package net.jp2p.jxse.services;\r\n\r\nimport net.jp2p.jxta.factory.IJxtaComponents.JxtaComponents;\r\nimport net.jxta.impl.loader.JxtaLoaderModuleManager;\r\nimport net.jxta.impl.modulemanager.JxtaModuleBuilder;\r\nimport net.jxta.module.IModuleBuilder;\r\nimport net.jxta.peergroup.core.Module;\r\n\r\npublic class Component{\r\n\r\n\tprivate static JxtaLoaderModuleManager manager;\r\n\t\r\n\tprivate boolean canBuild;\r\n\t\r\n\tpublic Component() {\r\n\t\tmanager = JxtaLoaderModuleManager.getRoot( Component.class, true );\t\r\n\t\tthis.canBuild = false;\r\n\t}\r\n\r\n\tpublic void activate(){ /* DO NOTHING */ }\r\n\t\r\n\tpublic void deactivate(){ /* DO NOTHING */ }\r\n\t\r\n\tprotected final boolean canBuild() {\r\n\t\treturn canBuild;\r\n\t}\r\n\r\n\tpublic void registerBuilder(IModuleBuilder builder) {\r\n\t\tmanager.registerBuilder( builder);\r\n\t\tif( builder instanceof JxtaModuleBuilder )\r\n\t\t\tthis.canBuild = true;\r\n }\r\n\r\n public void unregisterBuilder( IModuleBuilder builder ) {\r\n\t manager.unregisterBuilder( builder );\r\n }\r\n\r\n \r\n public static final boolean canBuild( JxtaComponents jxtaComponent ) {\r\n \t//PlatformDescriptor descriptor = new PlatformDescriptor();\r\n \treturn true;//manager.canBuild(descriptor);\r\n\t}\r\n}"}}},{"rowIdx":1872,"cells":{"text":{"kind":"string","value":"%%%-------------------------------------------------------------------\n%%% @author Michal Stanisz\n%%% @copyright (C) 2021 ACK CYFRONET AGH\n%%% This software is released under the MIT license\n%%% cited in 'LICENSE.txt'.\n%%% @end\n%%%-------------------------------------------------------------------\n%%% @doc\n%%% Module responsible for managing QoS status persistent model.\n%%% For more details consult `qos_status` module doc.\n%%% @end\n%%%-------------------------------------------------------------------\n-module(qos_status_model).\n-author(\"Michal Stanisz\").\n\n-include(\"modules/datastore/qos.hrl\").\n-include(\"modules/datastore/datastore_models.hrl\").\n-include(\"modules/datastore/datastore_runner.hrl\").\n-include_lib(\"ctool/include/errors.hrl\").\n-include_lib(\"ctool/include/logging.hrl\").\n\n%% API\n-export([create/4, update/3, get/2, delete/2]).\n\n%% datastore_model callbacks\n-export([get_record_struct/1, get_record_version/0]).\n\n-type doc() :: datastore_doc:doc(record()).\n-type diff() :: datastore_doc:diff(record()).\n-type id() :: datastore_doc:key().\n-type record() :: #qos_status{}.\n-type dir_type() :: ?QOS_STATUS_TRAVERSE_CHILD_DIR | ?QOS_STATUS_TRAVERSE_START_DIR.\n\n-export_type([diff/0]).\n\n-define(CTX, (qos_status:get_ctx())).\n\n%%%===================================================================\n%%% API\n%%%===================================================================\n\n-spec create(od_space:id(), traverse:id(), file_meta:uuid(), dir_type()) -> \n {ok, doc()}.\ncreate(SpaceId, TraverseId, DirUuid, DirType) ->\n Id = generate_status_doc_id(TraverseId, DirUuid),\n datastore_model:create(?CTX, #document{key = Id, scope = SpaceId,\n value = #qos_status{is_start_dir = DirType == ?QOS_STATUS_TRAVERSE_START_DIR}\n }).\n\n\n-spec update(traverse:id(), file_meta:uuid(), diff()) -> {ok, doc()} | {error, term()}.\nupdate(TraverseId, Uuid, Diff) ->\n Id = generate_status_doc_id(TraverseId, Uuid),\n datastore_model:update(?CTX, Id, Diff).\n\n\n-spec get(traverse:id(), file_meta:uuid()) -> {ok, doc()} | {error, term()}.\nget(TraverseId, Uuid) ->\n Id = generate_status_doc_id(TraverseId, Uuid),\n datastore_model:get(?CTX, Id).\n\n\n-spec delete(traverse:id(), file_meta:uuid()) -> ok | {error, term()}.\ndelete(TraverseId, Uuid)->\n Id = generate_status_doc_id(TraverseId, Uuid),\n datastore_model:delete(?CTX, Id).\n\n%%%===================================================================\n%%% datastore_model callbacks\n%%%===================================================================\n\n-spec get_record_version() -> datastore_model:record_version().\nget_record_version() ->\n 1.\n\n\n-spec get_record_struct(datastore_model:record_version()) ->\n datastore_model:record_struct().\nget_record_struct(1) ->\n {record, [\n {previous_batch_last_filename, binary},\n {current_batch_last_filename, binary},\n {files_list, [string]},\n {child_dirs_count, integer},\n {is_last_batch, boolean},\n {is_start_dir, boolean}\n ]}.\n\n%%%===================================================================\n%%% Internal functions\n%%%===================================================================\n\n%% @private\n-spec generate_status_doc_id(traverse:id(), file_meta:uuid()) -> id().\ngenerate_status_doc_id(TraverseId, DirUuid) ->\n datastore_key:adjacent_from_digest([DirUuid, TraverseId], DirUuid).\n\n"}}},{"rowIdx":1873,"cells":{"text":{"kind":"string","value":"---\nauthor: mikeparker104\nms.author: miparker\nms.date: 06/02/2020\nms.service: notification-hubs\nms.topic: include\nms.openlocfilehash: 5e75c5d5510f596eb7911cae0310e60b6bef67bf\nms.sourcegitcommit: 5cace04239f5efef4c1eed78144191a8b7d7fee8\nms.translationtype: MT\nms.contentlocale: pl-PL\nms.lasthandoff: 07/08/2020\nms.locfileid: \"86146452\"\n---\n### Wysyłanie powiadomienia testowego\n\n1. Otwórz nową kartę w programie [Poster](https://www.postman.com/downloads/).\n\n1. Ustaw żądanie na **wpis**, a następnie wprowadź następujący adres:\n\n ```xml\n https://.azurewebsites.net/api/notifications/requests\n ```\n\n1. Jeśli wybrano opcję ukończenia [uwierzytelniania klientów przy użyciu klucza interfejsu API](#authenticate-clients-using-an-api-key-optional) , należy skonfigurować nagłówki żądania, aby zawierały wartość **apikey** .\n\n | Klucz | Wartość |\n | ------------------------------ | ------------------------------ |\n | apikey | |\n\n1. Wybierz opcję **RAW** dla **treści**, a następnie wybierz pozycję **JSON** z listy opcje formatu, a następnie Dołącz niepewną zawartość **JSON** :\n\n ```json\n {\n \"text\": \"Message from Postman!\",\n \"action\": \"action_a\"\n }\n ```\n\n1. Wybierz przycisk **kod** , który znajduje się poniżej przycisku **Zapisz** w prawym górnym rogu okna. Żądanie powinno wyglądać podobnie do poniższego przykładu w przypadku wyświetlania **kodu HTML** (w zależności od tego, czy został dołączony nagłówek **apikey** ):\n\n ```html\n POST /api/notifications/requests HTTP/1.1\n Host: https://.azurewebsites.net\n apikey: \n Content-Type: application/json\n\n {\n \"text\": \"Message from backend service\",\n \"action\": \"action_a\"\n }\n ```\n\n1. Uruchom aplikację **PushDemo** na jednej lub obu platformach docelowych (**Android** i **iOS**).\n\n > [!NOTE]\n > W przypadku testowania w systemie **Android** upewnij się, że nie uruchomiono **debugowania**lub jeśli aplikacja została wdrożona przez uruchomienie aplikacji, Wymuś zamknięcie aplikacji i jej ponowne uruchomienie przy użyciu programu uruchamiającego.\n\n1. W aplikacji **PushDemo** naciśnij przycisk **zarejestruj** .\n\n1. Z powrotem w programie **[Poster](https://www.postman.com/downloads)** Zamknij okno **Generuj fragmenty kodu** (jeśli jeszcze tego nie zrobiono), a następnie kliknij przycisk **Wyślij** .\n\n1. Sprawdź, czy w programie **[Poster](https://www.postman.com/downloads)** znajduje się odpowiedź **200 OK** i czy alert pojawia się w aplikacji z **odebraną akcją Action**. \n\n1. Zamknij aplikację **PushDemo** , a następnie ponownie kliknij przycisk **Wyślij** **[.](https://www.postman.com/downloads)**\n\n1. Sprawdź, czy ponownie otrzymujesz odpowiedź **200 OK** w **[ogłoszeniu](https://www.postman.com/downloads)** . Sprawdź, czy w obszarze powiadomień dla aplikacji **PushDemo** jest wyświetlana informacja o poprawnym komunikacie.\n\n1. Naciśnij pozycję powiadomienie, aby upewnić się, że aplikacja zostanie otwarta i zostanie wyświetlona **Akcja akcja akcji odebrana** .\n\n1. Z powrotem w programie **[Poster](https://www.postman.com/downloads)** zmodyfikuj poprzednią treść żądania, aby wysłać powiadomienie dyskretne, określając *action_b* zamiast *action_a* dla wartości **akcji** .\n\n ```json\n {\n \"action\": \"action_b\",\n \"silent\": true\n }\n ```\n\n1. Gdy aplikacja jest nadal otwarta, kliknij przycisk **Wyślij** w programie **[Poster](https://www.postman.com/downloads)**.\n\n1. Sprawdź, czy otrzymujesz odpowiedź na **200 OK** w programie **[Poster](https://www.postman.com/downloads)** i czy alert pojawia się w aplikacji pokazującej **odebraną akcję ActionB** zamiast akcji **Action**.\n\n1. Zamknij aplikację **PushDemo** , a następnie ponownie kliknij przycisk **Wyślij** **[.](https://www.postman.com/downloads)**\n\n1. Sprawdź, czy otrzymujesz odpowiedź na **200 OK** w programie **[Poster](https://www.postman.com/downloads)** i czy powiadomienie dyskretne nie jest wyświetlane w obszarze powiadomień.\n"}}},{"rowIdx":1874,"cells":{"text":{"kind":"string","value":"/**\n * System Extensions\n *\n * Copyright (C) 2014-2017 Peter \"SaberUK\" Powell \n *\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use this file except\n * in compliance with the License. You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0.html\n *\n * Unless required by applicable law or agreed to in writing, software distributed under the License\n * is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express\n * or implied. See the License for the specific language governing permissions and limitations under\n * the License.\n */\n\nusing System;\n\nusing SystemExt.Log;\nusing SystemExt.Terminal;\n\nnamespace SystemExt.Demo\n{\n\n /// \n /// Demo for .\n /// \n public static class Log\n {\n\n /// \n /// Entry point for the demo.\n /// \n /// \n /// Command line arguments.\n /// \n /// \n /// The code to terminate the application with on exit.\n /// \n public static int EntryPoint(string[] args)\n {\n return new ApplicationChooser()\n .AddEntryPoint(LogComponents, \"Iterate through various component filters and write a message\")\n .AddEntryPoint(LogLevels, \"Iterate through all log levels and write a message\")\n .Run(args);\n }\n\n /// \n /// A demo which iterates over a series of tokens and logs them to a stream.\n /// \n /// \n /// Command line arguments.\n /// \n /// \n /// The code to terminate the application with on exit.\n /// \n private static int LogComponents(string[] arg)\n {\n // Initialize manager and STDOUT logger.\n var manager = new LogManager();\n var logger = new StreamLogger(Console.OpenStandardOutput());\n\n // Iterate over various log tokens.\n foreach (var logToken in new[] { \"*\", \"INVALID DEMO2\", \"DEMO1 -DEMO2\", \"* -DEMO2 -* INVALID\" })\n {\n manager.AddLogger(logToken, LogLevel.Verbose, logger);\n Console.WriteLine(\"Component filter set to {0}\", logToken);\n manager.Write(LogLevel.Verbose, \"DEMO1\", \"Logging with the DEMO1 component!\");\n manager.Write(LogLevel.Verbose, \"DEMO2\", \"Logging with the DEMO2 component!\");\n manager.Write(LogLevel.Verbose, manager, \"Logging with the LogManager component!\");\n }\n\n Console.WriteLine(\"Press any key to exit.\");\n Console.ReadKey();\n return 0;\n }\n\n /// \n /// A demo which iterates over log levels and writes messages at each.\n /// \n /// \n /// Command line arguments.\n /// \n /// \n /// The code to terminate the application with on exit.\n /// \n private static int LogLevels(string[] arg)\n {\n // Initialize manager and STDOUT logger.\n var manager = new LogManager();\n var logger = new StreamLogger(Console.OpenStandardOutput());\n\n // Iterate through the log levels.\n for (var level = LogLevel.None; level <= LogLevel.Verbose; level++)\n {\n // Change the log level.\n Console.WriteLine(\"Setting log level to {0:G}.\", level);\n manager.AddLogger(\"DEMO\", level, logger);\n\n // Write messages to the logger at every level.\n manager.Write(LogLevel.Verbose, \"DEMO\", \"Verbose!\");\n manager.Write(LogLevel.Information, \"DEMO\", \"Information!\");\n manager.Write(LogLevel.Warning, \"DEMO\", \"Warning!\");\n manager.Write(LogLevel.Error, \"DEMO\", \"Error!\");\n manager.Write(LogLevel.Critical, \"DEMO\", \"Critical!\");\n }\n\n Console.WriteLine(\"Press any key to exit.\");\n Console.ReadKey();\n return 0;\n }\n }\n}\n"}}},{"rowIdx":1875,"cells":{"text":{"kind":"string","value":"# Gamification\n\n> Climbing is demanding, let's make it more fun with game mechanics!\n\nSee p91:\n- p21 for the self-assessment\n- p91 for the technical clues\n\n\n# Table of Contents\n\n- [Physical Skills](#physical-skills)\n- [Technical Skills](#technical-skills)\n- [Mental Skills](#mental-skills)\n- [Organizational Skills](#organizational-skills)\n- [Overall Skills](#overall-skills)\n\n"}}},{"rowIdx":1876,"cells":{"text":{"kind":"string","value":"---\nlayout: post\ntitle: Distributed software testing\nauthor: Daniel Mewes\nauthor_github: danielmewes\n--- \n\n# About me\n\nA word about me first: My name is Daniel Mewes, and I just came over to\nCalifornia to work at RethinkDB as an intern for the oncoming months. After\nhaving been an undergraduate student of computer science at Saarland\nUniversity, Germany for the last two years, I am exited to work on an\ninfluential real-world project at RethinkDB now. Why RethinkDB? Not only does\nRethinkDB develop an exciting and novel piece of database technology, RethinkDB\nalso provides the great \"startup kind\" of work experience.\n\n# Software testing\n\nIn complex software systems like database management systems, different\ncomponents have to work together. These components can interact in complex\nways, yielding a virtually infinite number of possible states that the overall\nsystem can reach. This has consequences for software testing. As bugs in the\ncode might only show up in a small fraction of the possible states,\ncomprehensive testing of the system is essential. Encapsulation of code and\ndata into objects can reduce the number of states that must be considered for\nany single piece of code. However an extremely large number of states can still\nremain, especially when considering parallel systems. Reliability requirements\nfor database management systems on the other hand are stringent. Losing or\ncorrupting data due to bugs in the program cannot be tolerated here.\n\n\nAmong other measures, we at RethinkDB ensure the reliability of our software by\nrunning extensive tests on a daily basis. The problem with these tests is that\nthey take a lot of time to complete. We recently reached time requirements of\nmore than 24 hours on a decent machine for a single test run. So clearly a\nsingle machine is not enough anymore to run the tests. For our daily test runs,\nwe want to get results quickly. Buying more machines is pricey, especially as\nthose machines would be idle during the times at which no tests are run. It\nalso is not very flexible.\n\n# Tapping into the cloud\n\nCloud computing provides a more flexible and less pricey way to circumvent the\nlimitations of limited local hardware resources. We decided to use Amazon's\nElastic Compute Cloud ([Amazon EC2][]). If you need the computing power of ten\nsystems, you can get that from EC2 in a matter of minutes. If you need the\npower of a hundred machines, you can get that in a matter of minutes, too.\nBasically, Amazon's EC2 provides you with as much computing power as you need,\nat just the time that you need it. EC2 allows to dynamically allocate and\ndeallocate virtual compute nodes, which are billed on an hourly basis. Each\nnode can be used like a normal computer. The nodes run Linux (Windows nodes are\nalso available) and are accessible through SSH. So EC2 looked like a promising\nplatform to make our tests finish faster.\n\n[Amazon EC2]: http://aws.amazon.com/ec2/\n\n![Distributed Software Testing](/assets/images/posts/2010-12-09-distributed-software-testing-1.png)\n\n_EC2 console showing a few nodes_\n\nOur existing test suite already split up the work into independent test\nscripts. What was missing for utilizing EC2 was an automated mechanism to start\nand setup a number of EC2 nodes and dispatch the individual tests to these\nnodes to run in parallel. Setting up a node especially involves the step of\ninstalling a current build of RethinkDB together with a number of dependencies\non the node's file system. I wrote a Python script to fulfill exactly these\ntasks. Our main concern was to improve the overall performance of the testing\nprocess as much as possible.\n\nIn more detail, our new distributed testing tool works in the following steps:\n\n * Allocate a number of nodes in Amazon's EC2.\n * Once all nodes are up and booted, install the current build of RethinkDB on\n each of them. As the bandwidth of the Internet connection in our office is\n much lower than what is available to the EC2 nodes, we use SFTP to install\n RethinkDB on only one of the nodes and then let that node distribute it to\n all remaining ones.\n * We can now start running tests on the nodes: \n * Pick a test from the list of all individual tests to be run.\n * Find a node which is not currently busy running another test. If no node\n is available, wait until a node becomes free.\n * Initiate the test on the free node. To do this, we use a wrapper script\n which we invoke and immediately background on the remote node. The\n wrapper script takes care of running the actual test and redirecting its\n output and result into specific files, which we can later retrieve\n asynchronously.\n * After repeating step 3 for all tests in the list, wait for all nodes to\n finish their current work.\n * Collect the results of all tests from the different nodes. This works by\n reading from the files in which our wrapper script has stored the tests'\n results.\n * Finally, terminate the allocated nodes in EC2.\n\nTo communicate with the compute nodes, I opted for the use of [Paramiko][], an\nimplementation of SSH2 for Python. Having direct access to the SSH2 protocol\nfrom a Python script makes running commands remotely as well as fetching and\ninstalling files from/into the remote systems very convenient. For allocating\nand terminating EC2 nodes, we use [Boto][], which provides an interface for\naccessing Amazon's AWS API from within Python programs.\n\n[Paramiko]: http://www.lag.net/paramiko/\n[Boto]: http://boto.s3.amazonaws.com/index.html\n\nThe results are convincing: Instead of 26 hours on a (fast) local machine,\nrunning all of our tests takes only 4 hours when distributed across ten nodes\nin EC2. By using still more nodes, the time for testing can be lowered even\nfurther. This is very useful. Say we just made an important change to our code\nand want to verify that everything works as it is supposed to. With local test\nruns, this would mean waiting at least a day, even longer if our testing\nmachine is occupied with an earlier test run. If one of the test detects a\nproblem with the change and we fix it, it takes another day at least until we\ncan see if the fix even worked and had no other side effects. Thanks to cloud\ncomputing and our distributed testing system, we can now initiate an arbitrary\nnumber of test runs on demand, each of which finishes in a matter of mere\nhours.\n"}}},{"rowIdx":1877,"cells":{"text":{"kind":"string","value":"# AWS User Group Kochi\n\nOfficial Website of AWS User Group Kochi community\n\n\n### Powered by\n\n- GitHub\n- Gatsby\n- Netlify\n"}}},{"rowIdx":1878,"cells":{"text":{"kind":"string","value":"import { Injectable } from '@angular/core';\nimport { Observable } from 'rxjs/Observable';\nimport { HttpClient} from '@angular/common/http'\n\nexport interface Charm {\n id: number,\n slug: string,\n name: string,\n ranks: CharmRank[]\n}\n\nexport interface CharmRank {\n name: string,\n level: number,\n rarity: number,\n skills: SkillRank[],\n crafting: CharmRankCrafting\n}\n\nexport interface SkillRank {\n id: number,\n slug: string,\n level: number,\n description: string,\n skill: number,\n skillName: string,\n modifiers: SkillRankModifiers\n}\n\nexport interface SkillRankModifiers {\n affinity: number,\n attack: number,\n damageFire: number,\n damageWater: number,\n damageIce: number,\n damageThunder: number,\n damageDragon: number,\n defense: number,\n health: number,\n sharpnessBonus: number,\n resistAll: number,\n resistFire: number,\n resistWater: number,\n resistIce: number,\n resistThunder: number,\n resistDragon: number\n}\n\nexport interface CharmRankCrafting {\n craftable: boolean,\n materials: CraftingCost[]\n}\n\nexport interface CraftingCost {\n quantity: number,\n item: Item\n}\n\nexport interface Item {\n id: number,\n name: string,\n description: string,\n rarity: number,\n carryLimit: number,\n value: number\n}\n\n\n@Injectable({\n providedIn: 'root'\n})\nexport class CharmService {\n charms: Charm[] = [];\n\n constructor( private http: HttpClient) { }\n\n getCharm(id: number): Observable {\n return this.http.get('https://mhw-db.com/charms/' + id)\n }\n getAllCharm(): Observable {\n console.log(\"test\");\n var temp = this.http.get('https://mhw-db.com/charms');\n console.log(temp);\n return temp;\n }\n}\n"}}},{"rowIdx":1879,"cells":{"text":{"kind":"string","value":"GiftOption);\n\n $dateTime = DateTimeImmutable::createFromFormat('Y-m-d H:i:s', (string) $element->CreatedAt);\n $createdAt = !empty($dateTime) ? $dateTime : null;\n\n $dateTime = DateTimeImmutable::createFromFormat('Y-m-d H:i:s', (string) $element->UpdatedAt);\n $updatedAt = !empty($dateTime) ? $dateTime : null;\n\n $dateTime = DateTimeImmutable::createFromFormat('Y-m-d H:i:s', (string) $element->AddressUpdatedAt);\n $addressUpdatedAt = !empty($dateTime) ? $dateTime : null;\n\n $addressBilling = AddressFactory::make($element->AddressBilling);\n\n $addressShipping = AddressFactory::make($element->AddressShipping);\n\n $dateTime = DateTimeImmutable::createFromFormat('Y-m-d H:i:s', (string) $element->PromisedShippingTime);\n $promisedShippingTime = !empty($dateTime) ? $dateTime : null;\n\n $statuses = [];\n foreach ($element->Statuses->Status as $status) {\n array_push($statuses, (string) $status);\n }\n\n return Order::fromData(\n (int) $element->OrderId,\n (int) $element->OrderNumber,\n (string) $element->CustomerFirstName,\n (string) $element->CustomerLastName,\n (string) $element->PaymentMethod,\n (string) $element->Remarks,\n (string) $element->DeliveryInfo,\n (float) $element->Price,\n $giftOption,\n (string) $element->GiftMessage,\n (string) $element->VoucherCode,\n $createdAt,\n $updatedAt,\n $addressUpdatedAt,\n $addressBilling,\n $addressShipping,\n (string) $element->NationalRegistrationNumber,\n (int) $element->ItemsCount,\n $promisedShippingTime,\n (string) $element->ExtraAttributes,\n $statuses\n );\n }\n}\n"}}},{"rowIdx":1880,"cells":{"text":{"kind":"string","value":"require 'rails_helper'\n\ndescribe 'GET /locations/:location_id/contacts' do\n context 'when location has contacts' do\n before :all do\n @loc = create(:location)\n @first_contact = @loc.contacts.\n create!(attributes_for(:contact_with_extra_whitespace))\n end\n\n before :each do\n get api_location_contacts_url(@loc, subdomain: ENV['API_SUBDOMAIN'])\n end\n\n after(:all) do\n Organization.find_each(&:destroy)\n end\n\n it 'returns a 200 status' do\n expect(response).to have_http_status(200)\n end\n\n it 'includes the id attribute in the serialization' do\n expect(json.first['id']).to eq(@first_contact.id)\n end\n\n it 'includes the name attribute in the serialization' do\n expect(json.first['name']).to eq(@first_contact.name)\n end\n\n it 'includes the title attribute in the serialization' do\n expect(json.first['title']).to eq(@first_contact.title)\n end\n\n it 'includes the email attribute in the serialization' do\n expect(json.first['email']).to eq(@first_contact.email)\n end\n\n it 'includes the fax attribute in the serialization' do\n expect(json.first['fax']).to eq(@first_contact.fax)\n end\n\n it 'includes the phone attribute in the serialization' do\n expect(json.first['phone']).to eq(@first_contact.phone)\n end\n\n it 'includes the extension attribute in the serialization' do\n expect(json.first['extension']).to eq(@first_contact.extension)\n end\n end\n\n context \"when location doesn't have contacts\" do\n before :all do\n @loc = create(:location)\n end\n\n before :each do\n get api_location_contacts_url(@loc, subdomain: ENV['API_SUBDOMAIN'])\n end\n\n after(:all) do\n Organization.find_each(&:destroy)\n end\n\n it 'returns an empty array' do\n expect(json).to eq([])\n end\n\n it 'returns a 200 status' do\n expect(response).to have_http_status(200)\n end\n end\nend\n"}}},{"rowIdx":1881,"cells":{"text":{"kind":"string","value":"\n *\n * For the full copyright and license information, please view the LICENSE\n * file that was distributed with this source code.\n */\n\nnamespace Runroom\\UserBundle\\Repository;\n\nuse Doctrine\\ORM\\EntityManagerInterface;\nuse Doctrine\\ORM\\EntityRepository;\nuse Runroom\\UserBundle\\Model\\UserInterface;\n\nfinal class UserRepository implements UserRepositoryInterface\n{\n private EntityManagerInterface $entityManager;\n\n /** @phpstan-var class-string */\n private string $class;\n\n /** @phpstan-param class-string $class */\n public function __construct(EntityManagerInterface $entityManager, string $class)\n {\n $this->entityManager = $entityManager;\n $this->class = $class;\n }\n\n public function loadUserByIdentifier(string $identifier): ?UserInterface\n {\n return $this->getRepository()->findOneBy(['email' => $identifier]);\n }\n\n public function create(): UserInterface\n {\n return new $this->class();\n }\n\n public function save(UserInterface $user): void\n {\n $this->entityManager->persist($user);\n $this->entityManager->flush();\n }\n\n /** @phpstan-return EntityRepository */\n private function getRepository(): EntityRepository\n {\n return $this->entityManager->getRepository($this->class);\n }\n}\n"}}},{"rowIdx":1882,"cells":{"text":{"kind":"string","value":"json.id entry.id\njson.feed format_text(@titles[entry.feed_id] || entry.feed.title)\njson.title format_text(entry.title)\njson.author format_text(entry.author)\njson.published entry.published.iso8601\njson.content text_format(entry.content)\n"}}},{"rowIdx":1883,"cells":{"text":{"kind":"string","value":"package api\n\nimport (\n\t\"path\"\n\t\"time\"\n)\n\n// Experiment describes an experiment and its tasks.\ntype Experiment struct {\n\t// Identity\n\tID string `json:\"id\"`\n\tName string `json:\"name,omitempty\"`\n\n\t// Ownership\n\tOwner Identity `json:\"owner\"`\n\tAuthor Identity `json:\"author\"`\n\tUser Identity `json:\"user\"` // TODO: Deprecated.\n\n\tDescription string `json:\"description,omitempty\"`\n\tNodes []ExperimentNode `json:\"nodes\"`\n\tCreated time.Time `json:\"created\"`\n}\n\n// DisplayID returns the most human-friendly name available for an experiment\n// while guaranteeing that it's unique and non-empty.\nfunc (e *Experiment) DisplayID() string {\n\tif e.Name != \"\" {\n\t\treturn path.Join(e.User.Name, e.Name)\n\t}\n\treturn e.ID\n}\n\n// ExperimentSpec describes a set of tasks with optional dependencies.\n// This set represents a (potentially disconnected) directed acyclic graph.\ntype ExperimentSpec struct {\n\t// (optional) Organization on behalf of whom this resource is created. The\n\t// user issuing the request must be a member of the organization. If omitted,\n\t// the resource will be owned by the requestor.\n\tOrganization string `json:\"org,omitempty\"`\n\n\t// (optional) Text description of the experiment.\n\tDescription string `json:\"description,omitempty\"`\n\n\t// (required) Tasks to create. Tasks may be defined in any order, though all\n\t// dependencies must be internally resolvable within the experiment.\n\tTasks []ExperimentTaskSpec `json:\"tasks\"`\n\n\t// (optional) A token representing the user to which the object should be attributed.\n\t// If omitted attribution will be given to the user issuing the request.\n\tAuthorToken string `json:\"author_token,omitempty\"`\n\n\t// (optional) Settings for the Comet.ml integration, if it should be used for this experiment.\n\tComet *ExperimentCometSpec `json:\"comet,omitempty\"`\n}\n\n// ExperimentNode describes a task along with its links within an experiment.\ntype ExperimentNode struct {\n\tName string `json:\"name,omitempty\"`\n\tTaskID string `json:\"task_id\"`\n\tResultID string `json:\"result_id\"`\n\tStatus TaskStatus `json:\"status\"`\n\tCometURL string `json:\"cometUrl,omitempty\"`\n\n\t// Identifiers of tasks dependent on this node within the containing experiment.\n\tChildTasks []string `json:\"child_task_ids\"`\n\n\t// Identifiers of task on which this node depends within the containing experiment.\n\tParentTasks []string `json:\"parent_task_ids\"`\n}\n\n// DisplayID returns the most human-friendly name available for an experiment\n// node while guaranteeing that it's unique within the context of its experiment.\nfunc (n *ExperimentNode) DisplayID() string {\n\tif n.Name != \"\" {\n\t\treturn n.Name\n\t}\n\treturn n.TaskID\n}\n\n// ExperimentTaskSpec describes a task spec with optional dependencies on other\n// tasks within an experiment. Tasks refer to each other by the Name field.\ntype ExperimentTaskSpec struct {\n\t// (optional) Name of the task node, which need only be defined if\n\t// dependencies reference it.\n\tName string `json:\"name,omitempty\"`\n\n\t// (required) Specification describing the task to run.\n\tSpec TaskSpec `json:\"spec\"`\n\n\t// (optional) Tasks on which this task depends. Mounts will be applied, in\n\t// the order defined here, after existing mounts in the task spec.\n\tDependsOn []TaskDependency `json:\"depends_on,omitempty\"`\n}\n\n// TaskDependency describes a single \"edge\" in a task dependency graph.\ntype TaskDependency struct {\n\t// (required) Name of the task on which the referencing task depends.\n\tParentName string `json:\"parent_name\"`\n\n\t// (optional) Path in the child task to which parent results will be mounted.\n\t// If absent, this is treated as an order-only dependency.\n\tContainerPath string `json:\"container_path,omitempty\"`\n}\n\ntype ExperimentCometSpec struct {\n\t// (required) Whether or not to enable the integration for this experiment.\n\tEnable bool `json:\"enable\"`\n\n\t// (optional) The name of the experiment (shown in the Comet.ml interface)\n\tExperimentName string `json:\"experiment,omitempty\"`\n\n\t// (optional) The name of the Comet.ml project for this experiment.\n\tProjectName string `json:\"project,omitempty\"`\n\n\t// (optional) The name of the Comet.ml workspace for this experiment.\n\tWorkspaceName string `json:\"workspace,omitempty\"`\n}\n\n// ExperimentPatchSpec describes a patch to apply to an experiment's editable\n// fields. Only one field may be set in a single request.\ntype ExperimentPatchSpec struct {\n\t// (optional) Unqualified name to assign to the experiment. It is considered\n\t// a collision error if another experiment has the same creator and name.\n\tName *string `json:\"name,omitempty\"`\n\n\t// (optional) Description to assign to the experiment or empty string to\n\t// delete an existing description.\n\tDescription *string `json:\"description,omitempty\"`\n}\n"}}},{"rowIdx":1884,"cells":{"text":{"kind":"string","value":"#!/bin/bash\n# ftrc.sh\n# Simple wrapper to use kernel ftrace facility.\n\ntrap 'echo 0 > ${PFX}/tracing_on ; popd > /dev/null' INT QUIT\n\nname=$(basename $0)\nPFX=/sys/kernel/debug/tracing\nTRACE_INTERVAL=5\n\nif [ `id -u` -ne 0 ]; then\n\techo \"$name: need to be root.\"\n\texit 1\nfi\n\nif [ $# -ne 1 ]; then\n\techo \"Usage: $name ftrace-interval-in-sec\"\n\texit 1\nfi\nTRACE_INTERVAL=$1\n\npushd . >/dev/null\ncd ${PFX}\n\necho \"Select tracer from the list:\"\ncat ${PFX}/available_tracers\nread tracer\necho \"tracer = $tracer\"\n#TODO- validity check\necho \"${tracer}\" > ${PFX}/current_tracer\n\necho -n \"[current_tracer] Current Tracer is: \"\ncat ${PFX}/current_tracer\necho \"[trace_options] Current Trace Options are: \"\ncat ${PFX}/trace_options\necho\n\nif [ ${tracer} == \"function_graph\" ]; then\n\techo \"[set_graph_function] Current function(s) traced are: \"\n\tcat /sys/kernel/debug/tracing/set_graph_function\n\techo \"Type in your own functions (space-separated); [Enter] keeps default: \"\n\tread graph_funcs\n\tif [ -n \"${graph_funcs}\" ]; then\n\t\tfor func in ${graph_funcs}\n\t\tdo\n\t\t\techo \"function: $func\"\n\t\t\techo \"$func\" >> /sys/kernel/debug/tracing/set_graph_function\n\t\tdone\n\t\techo\n\t\techo \"New graph-traced functions are:\"\n\t\tcat /sys/kernel/debug/tracing/set_graph_function\n\tfi\nfi\n\n\necho -n \"Confirm Trace options above and START trace? [Y/n]: \"\nread reply\nif [[ $reply == \"n\" ]] || [[ $reply == \"N\" ]]; then\n echo \"$name: aborting now...\"\n exit 1\nfi\necho\necho \"Will now ftrace for $TRACE_INTERVAL seconds...\"\necho \"To manually Stop, ^C\"\necho\necho \"Starting trace now...\"\necho 1 > ${PFX}/tracing_on\n\nsleep $TRACE_INTERVAL\necho 0 > ${PFX}/tracing_on\n\n#tail -f ${PFX}/trace >> /tmp/ftrace_log.txt\ncat ${PFX}/trace > /tmp/ftrace_log.txt\npopd > /dev/null\n\n"}}},{"rowIdx":1885,"cells":{"text":{"kind":"string","value":"# `Faker().breakingBad`\n\n[Dictionary file](../src/main/resources/locales/en/breaking_bad.yml)\n\nAvailable Functions: \n```kotlin\nFaker().breakingBad.character() // => Walter White\n\nFaker().breakingBad.episode() // => Pilot\n```\n"}}},{"rowIdx":1886,"cells":{"text":{"kind":"string","value":"(function (window) {\n\t// 'use strict';//目前驾驭不了严格模式有空尽量看一看\n\t// Your starting point. Enjoy the ride!\n\t//ajax原理\n\t// var xhr = new XMLHttpRequest()\n\t// \txhr.open('get','http://localhost:8080/todos/getDataAll')\n\t// \txhr.send()\n\t// \txhr.onreadystatechange = function(){\n\t// \t\tif(xhr.readyState === 4 && xhr.status === 200){\n\t// \t\t\tconsole.log(xhr.responseText)\n\t// \t\t}\n\t// }\n\t//模版引擎通过原理获取模版引擎内部的内容去覆盖,想要修饰的内容。也就是说模版引擎解决了内容的问题,往哪里放随你咯。\n\taxios.defaults.baseURL = \"http://localhost:8080/todos/\"\n\tgetListDetail()\n\tfunction getListDetail() {\n\t\taxios({\n\t\t\turl: 'getDataAll'\n\t\t}).then(res => {\n\t\t\tconst { data, meta } = res.data\n\t\t\tif (meta.code === 200) {\n\t\t\t\t//渲染页面\n\t\t\t\t// console.log(data)\n\t\t\t\t//判断hash变化在渲染页面之前控制数据\n\t\t\t\t//通过window.location可以获取有关网页url相关的信息\n\t\t\t\tconst url = window.location.hash\n\t\t\t\tconst active = data.filter(item => { return item.isFinish === '0' })\n\t\t\t\tconst completed = data.filter(item => { return item.isFinish === '1' })\n\t\t\t\tswitch (url) {\n\t\t\t\t\tcase \"\":\n\t\t\t\t\tcase '#/': renderPage(data, total = data, url)\n\t\t\t\t\t\tbreak\n\t\t\t\t\tcase '#/active': renderPage(active, data, url)\n\t\t\t\t\t\tbreak\n\t\t\t\t\tcase '#/completed': renderPage(completed, data, url)\n\t\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t})\n\t}\n\t//使用total判断footer的隐藏,但是#/的情况下也需要穿入参数判断,所以给其默认值为temporarily。\n\tfunction renderPage(temporarily, total, url) {\n\t\tconst todos = document.querySelector('.todoapp');\n\t\tconst noFinish = total.filter(item => { return item.isFinish === '0' }).length\n\t\tconst isFinish = total.filter(item => { return item.isFinish === '1' }).length\n\t\tconst html = template('tpl-todos', { list: temporarily, total, noFinish, isFinish })\n\t\ttodos.innerHTML = html\n\t\t//在页面加载之后运行添加一个todo函数是不行的因为它获取的是页面原有的元素,而并不是由模版引擎渲染之后的页面元素。\n\t\taddTodo()\n\t\tdelTodo()\n\t\tmodify()\n\t\tshowEdit(temporarily)\n\t\tdelCompleted(temporarily)\n\t\tselectAll(temporarily)\n\t\t// changeAll(data)\n\t\tfooterChange(url)\n\t}\n\t//添加一个todo\n\tfunction addTodo() {\n\t\t//js中事件是元素的一个属性\n\t\t// document.querySelector('.new-todo').onkeyup= function (e){\n\t\t// \t console.log(e)\n\t\t// }\n\t\tconst addTodo = document.querySelector('.new-todo')\n\t\taddTodo.addEventListener('keyup', (e) => {\n\t\t\t//此处因为使用箭头函数所以this指向window,我们用元素本身代替也没问题的\n\t\t\tif (e.keyCode === 13 && addTodo.value.trim() !== '') {\n\t\t\t\tlet data = {\n\t\t\t\t\tcontent: addTodo.value.trim(), // 必须携带,新增 todo 的内容\n\t\t\t\t\tisFinish: 0 // 必须携带,新增 todo 的状态\n\t\t\t\t}\n\t\t\t\taxios.post('addTodo', data).then(res => {\n\t\t\t\t\tconst { meta } = res.data\n\t\t\t\t\tif (meta.code === 201) {\n\t\t\t\t\t\tgetListDetail()\n\t\t\t\t\t}\n\n\t\t\t\t})\n\t\t\t}\n\t\t})\n\t}\n\t//footer角标切换\n\tfunction footerChange(url) {\n\t\tconst arr = document.querySelectorAll('.filters li > a ')\n\t\tif (arr.length === 0) return\n\t\tarr.forEach(item => { item.classList.remove('selected') })\n\t\tswitch (url) {\n\t\t\tcase '':\n\t\t\tcase '#/': arr[0].classList.add('selected')\n\t\t\t\tbreak\n\t\t\tcase '#/active': arr[1].classList.add('selected')\n\t\t\t\tbreak\n\t\t\tcase \"#/completed\": arr[2].classList.add('selected')\n\t\t}\n\t}\n\t//删除一个todo\n\tfunction delTodo() {\n\t\tconst delTodo = document.querySelectorAll('.destroy')\n\t\t// console.log(delTodo)\n\t\tdelTodo.forEach(function (item) {\n\t\t\titem.addEventListener('click', function (e) {\n\t\t\t\tconst id = this.dataset.id\n\t\t\t\tif (confirm('确定要删除?')) {\n\t\t\t\t\taxios.delete(`delTodo?id=${id}`).then(res => {\n\t\t\t\t\t\tconst { meta } = res.data\n\t\t\t\t\t\tif (meta.code === 202) {\n\t\t\t\t\t\t\tgetListDetail()\n\t\t\t\t\t\t}\n\t\t\t\t\t})\n\t\t\t\t}\n\t\t\t})\n\t\t})\n\t}\n\t//修改单条状态\n\tfunction modify() {\n\t\tconst toggle = document.querySelectorAll('.toggle')\n\t\ttoggle.forEach(item => {\n\t\t\titem.addEventListener('change', function () {\n\t\t\t\tconst data = {\n\t\t\t\t\tid: this.dataset.id,\n\t\t\t\t\tisFinish: this.checked ? '1' : '0'\n\t\t\t\t}\n\t\t\t\taxios.put('changeStatu', data).then(res => {\n\t\t\t\t\tconst { meta } = res.data\n\t\t\t\t\tif (meta.code === 203) {\n\t\t\t\t\t\tgetListDetail()\n\t\t\t\t\t}\n\t\t\t\t})\n\t\t\t})\n\t\t})\n\t}\n\t//编辑一条todo\n\tfunction showEdit(data) {\n\t\tconst lis = document.querySelectorAll('.todo-list li')\n\t\t//显示编辑栏\n\t\tlis.forEach((item, index) => {\n\t\t\titem.addEventListener('dblclick', function () {\n\t\t\t\tlis.forEach(item => {\n\t\t\t\t\titem.classList.remove('editing')\n\t\t\t\t})\n\t\t\t\tthis.classList.add('editing')\n\t\t\t\tedit(index, data[index], item)\n\t\t\t})\n\t\t})\n\t}\n\t//为了拿到删除的todo我们通过回调函数的方式传递参数\n\t//编辑操作\n\tfunction edit(index, data, todo) {\n\t\tconst edit = document.querySelectorAll('.edit')[index]\n\t\tedit.focus()\n\t\tedit.value = data.content\n\t\tconst id = todo.dataset.id\n\t\tedit.addEventListener('keyup', function (e) {\n\t\t\tif (e.keyCode === 13) {\n\t\t\t\tconst value = {\n\t\t\t\t\tcontent: this.value,\n\t\t\t\t\tid\n\t\t\t\t}\n\t\t\t\t//如果修改后为空,删除此todo\n\t\t\t\tif (!this.value) {\n\t\t\t\t\taxios.delete(`delTodo?id=${id}`).then(res => {\n\t\t\t\t\t\tconst { meta } = res.data\n\t\t\t\t\t\tif (meta.code === 202) {\n\t\t\t\t\t\t\tgetListDetail()\n\t\t\t\t\t\t}\n\t\t\t\t\t})\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\t//修改后数据相同取消编辑样式\n\t\t\t\tif (this.value === data.content) {\n\t\t\t\t\ttodo.classList.remove('editing')\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\t//和原数据不同时发起修改请求\n\t\t\t\taxios.put('changeContent', value).then(res => {\n\t\t\t\t\tconst { meta } = res.data\n\t\t\t\t\tif (meta.code === 203) {\n\t\t\t\t\t\tgetListDetail()\n\t\t\t\t\t}\n\t\t\t\t})\n\t\t\t}\n\t\t})\n\t}\n\t//删除所有已经完成的todo\n\tfunction delCompleted(data) {\n\t\tconst completed = document.querySelector('.clear-completed')\n\t\tif (!completed) return\n\t\tconst arr = []\n\t\tdata.filter(item => {\n\t\t\tif (item.isFinish === '1') {\n\t\t\t\tarr.push(item.id)\n\t\t\t}\n\t\t})\n\t\tcompleted.addEventListener('click', function (e) {\n\t\t\taxios.delete(`/delAll?id=${arr.toString()}`).then(res => {\n\t\t\t\tconst { meta } = res.data\n\t\t\t\tif (meta.code === 202) {\n\t\t\t\t\tgetListDetail()\n\t\t\t\t}\n\t\t\t})\n\t\t})\n\t}\n\t//全选按钮\n\tfunction selectAll(data) {\n\t\tconst toggle_all = document.querySelector('.toggle-all')\n\t\ttoggle_all.addEventListener('click', function (e) {\n\t\t\t// console.log(this.checked)\n\t\t\tconst noFinish = data.filter(item => { return item.isFinish === \"0\" }).length\n\t\t\tconst isFinish = data.filter(item => { return item.isFinish === \"1\" }).length\n\t\t\tif (isFinish === data.length) {\n\t\t\t\tgetSelAll(false)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tif (noFinish <= data.length) {\n\t\t\t\tgetSelAll(true)\n\t\t\t\treturn\n\t\t\t}\n\t\t})\n\t}\n\t//请求函数\n\tfunction getSelAll(bool) {\n\t\taxios.get(`changeStatusAll?isFinish=${bool}`).then(res => {\n\t\t\tconst { meta } = res.data\n\t\t\tif (meta.code === 203) {\n\t\t\t\tgetListDetail()\n\t\t\t}\n\t\t})\n\t}\n\t//通过change事件改变全选按钮\n\t// function changeAll(data){\n\t// \tconst toggle_all = document.querySelector('#toggle-all')\n\t// \ttoggle_all.addEventListener('change',function(){\n\t// \t\t// const bool = this.checked\n\t// \t\t console.log(this.checked)\n\t// \t\taxios.get(`changeStatusAll?isFinish=false`).then(res => {\n\t// \t\t\tconst { meta } = res.data\n\t// \t\t\tif (meta.code === 203) {\n\t// \t\t\t\tgetListDetail()\n\t// \t\t\t}\n\t// \t\t})\n\t// \t})\n\t// }\n\t//监听一个hashchange改变事件 给window添加一个hashchange事件\n\twindow.addEventListener('hashchange', (e) => {\n\t\tgetListDetail()\n\t})\n})(window);\n"}}},{"rowIdx":1887,"cells":{"text":{"kind":"string","value":"package com.github.antonpopoff.colorwheel.extensions\n\nimport android.os.Build\nimport android.os.Parcel\n\ninternal fun Parcel.writeBooleanCompat(value: Boolean) {\n if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {\n this.writeBoolean(value)\n } else {\n this.writeInt(if (value) 1 else 0)\n }\n}\n\ninternal fun Parcel.readBooleanCompat(): Boolean {\n return if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {\n this.readBoolean()\n } else {\n this.readInt() == 1\n }\n}\n"}}},{"rowIdx":1888,"cells":{"text":{"kind":"string","value":"/*!\n * CanJS - 2.3.27\n * http://canjs.com/\n * Copyright (c) 2016 Bitovi\n * Thu, 15 Sep 2016 21:14:18 GMT\n * Licensed MIT\n */\n\n/*can@2.3.27#construct/super/super*/\nsteal('can/util', 'can/construct', function (can, Construct) {\n var isFunction = can.isFunction, fnTest = /xyz/.test(function () {\n return this.xyz;\n }) ? /\\b_super\\b/ : /.*/, getset = [\n 'get',\n 'set'\n ], getSuper = function (base, name, fn) {\n return function () {\n var tmp = this._super, ret;\n this._super = base[name];\n ret = fn.apply(this, arguments);\n this._super = tmp;\n return ret;\n };\n };\n can.Construct._defineProperty = function (addTo, base, name, descriptor) {\n var _super = Object.getOwnPropertyDescriptor(base, name);\n if (_super) {\n can.each(getset, function (method) {\n if (isFunction(_super[method]) && isFunction(descriptor[method])) {\n descriptor[method] = getSuper(_super, method, descriptor[method]);\n } else if (!isFunction(descriptor[method])) {\n descriptor[method] = _super[method];\n }\n });\n }\n Object.defineProperty(addTo, name, descriptor);\n };\n can.Construct._overwrite = function (addTo, base, name, val) {\n addTo[name] = isFunction(val) && isFunction(base[name]) && fnTest.test(val) ? getSuper(base, name, val) : val;\n };\n return can;\n});"}}},{"rowIdx":1889,"cells":{"text":{"kind":"string","value":"from django.conf import settings\nfrom django.contrib.auth.mixins import PermissionRequiredMixin\nfrom django.shortcuts import get_object_or_404\nfrom django.views.generic import DetailView\n\nfrom django_filters.views import FilterView\nfrom django_tables2.views import SingleTableView\n\nfrom sidekick.filters import (\n LogicalSystemFilterSet, RoutingTypeFilterSet,\n NetworkServiceTypeFilterSet, NetworkServiceFilterSet,\n NetworkServiceGroupFilterSet,\n)\n\nfrom sidekick.tables import (\n IPPrefixTable,\n LogicalSystemTable, RoutingTypeTable,\n NetworkServiceTypeTable, NetworkServiceTable,\n NetworkServiceGroupTable,\n)\n\nfrom sidekick.models import (\n LogicalSystem, RoutingType,\n NetworkServiceType,\n NetworkService,\n NetworkServiceGroup,\n)\n\nfrom sidekick.utils import (\n get_all_ip_prefixes,\n get_graphite_service_graph,\n)\n\n\n# IP Prefix Index\nclass IPPrefixIndexView(PermissionRequiredMixin, SingleTableView):\n permission_required = 'sidekick.view_ipprefix'\n model = NetworkService\n context_object_name = 'ns'\n template_name = 'sidekick/networkservice/ipprefix_index.html'\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n\n prefixes = []\n for member_id, data in get_all_ip_prefixes().items():\n for prefix in data['prefixes']:\n prefixes.append({\n 'prefix': prefix,\n 'member': data['member'],\n })\n table = IPPrefixTable(prefixes)\n context['table'] = table\n\n return context\n\n\n# Logical System Index\nclass LogicalSystemIndexView(PermissionRequiredMixin, FilterView, SingleTableView):\n permission_required = 'sidekick.view_logicalsystem'\n model = LogicalSystem\n table_class = LogicalSystemTable\n filterset_class = LogicalSystemFilterSet\n template_name = 'sidekick/networkservice/logicalsystem_index.html'\n\n\n# Logical System Details\nclass LogicalSystemDetailView(PermissionRequiredMixin, DetailView):\n permission_required = 'sidekick.view_logicalsystem'\n model = LogicalSystem\n template_name = 'sidekick/networkservice/logicalsystem.html'\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n\n logical_system = get_object_or_404(LogicalSystem, slug=self.kwargs['slug'])\n context['logical_system'] = logical_system\n\n table = NetworkServiceTable(NetworkService.objects.filter(\n network_service_devices__network_service_l3__logical_system=logical_system.id))\n context['table'] = table\n\n return context\n\n\n# Routing Type Index\nclass RoutingTypeIndexView(PermissionRequiredMixin, FilterView, SingleTableView):\n permission_required = 'sidekick.view_routingtype'\n model = RoutingType\n table_class = RoutingTypeTable\n filterset_class = RoutingTypeFilterSet\n template_name = 'sidekick/networkservice/routingtype_index.html'\n\n\n# Routing Type Details\nclass RoutingTypeDetailView(PermissionRequiredMixin, DetailView):\n permission_required = 'sidekick.view_routingtype'\n model = RoutingType\n template_name = 'sidekick/networkservice/routingtype.html'\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n\n routing_type = get_object_or_404(RoutingType, slug=self.kwargs['slug'])\n context['routing_type'] = routing_type\n\n table = NetworkServiceTable(NetworkService.objects.filter(\n network_service_devices__network_service_l3__routing_type=routing_type.id))\n context['table'] = table\n\n return context\n\n\n# Network Service Type Index\nclass NetworkServiceTypeIndexView(PermissionRequiredMixin, FilterView, SingleTableView):\n permission_required = 'sidekick.view_networkservicetype'\n model = NetworkServiceType\n table_class = NetworkServiceTypeTable\n filterset_class = NetworkServiceTypeFilterSet\n template_name = 'sidekick/networkservice/networkservicetype_index.html'\n\n\n# Network Service Type Details\nclass NetworkServiceTypeDetailView(PermissionRequiredMixin, DetailView):\n permission_required = 'sidekick.view_networkservicetype'\n model = NetworkServiceType\n template_name = 'sidekick/networkservice/networkservicetype.html'\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n\n nst = get_object_or_404(NetworkServiceType, slug=self.kwargs['slug'])\n context['nst'] = nst\n\n table = NetworkServiceTable(NetworkService.objects.filter(\n network_service_type=nst.id))\n context['table'] = table\n\n return context\n\n\n# Network Service Index\nclass NetworkServiceIndexView(PermissionRequiredMixin, FilterView, SingleTableView):\n permission_required = 'sidekick.view_networkservice'\n model = NetworkService\n table_class = NetworkServiceTable\n filterset_class = NetworkServiceFilterSet\n template_name = 'sidekick/networkservice/networkservice_index.html'\n\n\n# Network Service Details\nclass NetworkServiceDetailView(PermissionRequiredMixin, DetailView):\n permission_required = 'sidekick.view_networkservice'\n model = NetworkService\n context_object_name = 'ns'\n template_name = 'sidekick/networkservice/networkservice.html'\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n\n ns = NetworkService.objects.get(pk=self.kwargs['pk'])\n\n graphite_render_host = settings.PLUGINS_CONFIG['sidekick'].get('graphite_render_host', None)\n graph_data = get_graphite_service_graph(ns, graphite_render_host)\n context['graph_data'] = graph_data\n\n return context\n\n\n# Network Service Group Index\nclass NetworkServiceGroupIndexView(PermissionRequiredMixin, FilterView, SingleTableView):\n permission_required = 'sidekick.view_networkservicegroup'\n model = NetworkServiceGroup\n table_class = NetworkServiceGroupTable\n filterset_class = NetworkServiceGroupFilterSet\n template_name = 'sidekick/networkservice/networkservicegroup_index.html'\n\n\n# Network Service Group Details\nclass NetworkServiceGroupDetailView(PermissionRequiredMixin, DetailView):\n permission_required = 'sidekick.view_networkservicegroup'\n model = NetworkServiceGroup\n context_object_name = 'nsg'\n template_name = 'sidekick/networkservice/networkservicegroup.html'\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n\n nsg = get_object_or_404(NetworkServiceGroup, pk=self.kwargs['pk'])\n context['nsg'] = nsg\n\n table = NetworkServiceTable(NetworkService.objects.filter(\n pk__in=nsg.network_services.all()))\n context['table'] = table\n\n return context\n"}}},{"rowIdx":1890,"cells":{"text":{"kind":"string","value":"# zergtel-android\nPort of ZTVDC to android\nDeprecated - practically no features implemented at the moment, and probably indefintely.\nSee [https://github.com/s-zeng/ZTVDC](https://github.com/s-zeng/ZTVDC) instead\n"}}},{"rowIdx":1891,"cells":{"text":{"kind":"string","value":"import { Component, OnInit } from '@angular/core';\n\nimport { Product } from '../../../model/beans/product/product.model';\nimport { ProductService } from '../../../model/services/product/product.service';\n\n@Component({\n selector: 'bp-landing-page-jewelery-component',\n templateUrl: './jewelery.component.html'\n})\n\nexport class LandingPageJeweleryComponent implements OnInit {\n public pretrad: string;\n public urlRest: string;\n public p1: Product;\n public p2: Product;\n\n constructor (\n private productService: ProductService\n ) {\n this.pretrad = 'MODULES.LANDING-PAGE.JEWELERY.';\n\n this.urlRest = process.env.API_URL.slice(0, -1);\n\n this.p1 = new Product();\n this.p2 = new Product();\n }\n\n public ngOnInit(): void {\n this.productService.getByReference('P1', Product).then(\n (response) => {\n this.p1 = response;\n },\n (error) => {\n console.error(error);\n }\n );\n this.productService.getByReference('P2', Product).then(\n (response) => {\n this.p2 = response;\n },\n (error) => {\n console.error(error);\n }\n );\n }\n}"}}},{"rowIdx":1892,"cells":{"text":{"kind":"string","value":"//=========================================================================\n// Copyright (C) 2012 The Elastos Open Source Project\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\n//=========================================================================\n\n#include \"elastos/droid/server/pm/CLauncherAppsImpl.h\"\n#include \"Elastos.Droid.Net.h\"\n#include \"Elastos.Droid.Provider.h\"\n#include \"elastos/droid/os/Binder.h\"\n#include \"elastos/droid/os/UserHandle.h\"\n#include \"elastos/droid/app/AppGlobals.h\"\n#include \n\n#include \nusing Elastos::Core::AutoLock;\nusing Elastos::Droid::App::AppGlobals;\nusing Elastos::Droid::Content::CIntent;\nusing Elastos::Droid::Content::Pm::IPackageInfo;\nusing Elastos::Droid::Content::Pm::IIPackageManager;\nusing Elastos::Droid::Content::Pm::IApplicationInfo;\nusing Elastos::Droid::Content::Pm::IActivityInfo;\nusing Elastos::Droid::Content::Pm::IUserInfo;\nusing Elastos::Droid::Content::Pm::EIID_IILauncherApps;\nusing Elastos::Droid::Content::Pm::IPackageItemInfo;\nusing Elastos::Droid::Content::Pm::IComponentInfo;\nusing Elastos::Droid::Net::IUriHelper;\nusing Elastos::Droid::Net::CUriHelper;\nusing Elastos::Droid::Net::IUri;\nusing Elastos::Droid::Os::Binder;\nusing Elastos::Droid::Os::UserHandle;\nusing Elastos::Droid::Os::CUserHandle;\nusing Elastos::Droid::Os::EIID_IBinder;\nusing Elastos::Droid::Provider::ISettings;\nusing Elastos::Utility::Logging::Logger;\nusing Elastos::Utility::IArrayList;\nusing Elastos::Utility::CArrayList;\nusing Elastos::Utility::IIterator;\n\nnamespace Elastos {\nnamespace Droid {\nnamespace Server {\nnamespace Pm {\n\n//==============================================================================\n// CLauncherAppsImpl::MyPackageMonitor\n//==============================================================================\n\nBoolean CLauncherAppsImpl::MyPackageMonitor::IsEnabledProfileOf(\n /* [in] */ IUserHandle* user,\n /* [in] */ IUserHandle* listeningUser,\n /* [in] */ const String& debugMsg)\n{\n Int32 id, lisId;\n user->GetIdentifier(&id);\n listeningUser->GetIdentifier(&lisId);\n if (id == lisId) {\n if (DEBUG) Logger::D(TAG, \"Delivering msg to same user %s\", debugMsg.string());\n return TRUE;\n }\n Int64 ident = Binder::ClearCallingIdentity();\n // try {\n AutoPtr userInfo, listeningUserInfo;\n if (FAILED(mHost->mUm->GetUserInfo(id, (IUserInfo**)&userInfo))) {\n Binder::RestoreCallingIdentity(ident);\n return FALSE;\n }\n if (FAILED(mHost->mUm->GetUserInfo(lisId, (IUserInfo**)&listeningUserInfo))) {\n Binder::RestoreCallingIdentity(ident);\n return FALSE;\n }\n Int32 groupId, lisGroupId;\n Boolean isEnabled;\n if (userInfo == NULL || listeningUserInfo == NULL\n || (userInfo->GetProfileGroupId(&groupId), groupId == IUserInfo::NO_PROFILE_GROUP_ID)\n || (listeningUserInfo->GetProfileGroupId(&lisGroupId), groupId != lisGroupId)\n || (userInfo->IsEnabled(&isEnabled), !isEnabled)) {\n if (DEBUG) {\n Logger::D(TAG, \"Not delivering msg from %p to %p:%s\", user, listeningUser, debugMsg.string());\n }\n Binder::RestoreCallingIdentity(ident);\n return FALSE;\n }\n else {\n if (DEBUG) {\n Logger::D(TAG, \"Delivering msg from %p to %p:%s\", user, listeningUser, debugMsg.string());\n }\n Binder::RestoreCallingIdentity(ident);\n return TRUE;\n }\n // } finally {\n // Binder.restoreCallingIdentity(ident);\n // }\n}\n\nECode CLauncherAppsImpl::MyPackageMonitor::OnPackageAdded(\n /* [in] */ const String& packageName,\n /* [in] */ Int32 uid)\n{\n Int32 id;\n GetChangingUserId(&id);\n AutoPtr user;\n CUserHandle::New(id, (IUserHandle**)&user);\n Int32 n;\n mHost->mListeners->BeginBroadcast(&n);\n for (Int32 i = 0; i < n; i++) {\n AutoPtr item;\n mHost->mListeners->GetBroadcastItem(i, (IInterface**)&item);\n AutoPtr listener = IOnAppsChangedListener::Probe(item);\n AutoPtr cookie;\n mHost->mListeners->GetBroadcastCookie(i, (IInterface**)&cookie);\n AutoPtr listeningUser = IUserHandle::Probe(cookie);\n if (!IsEnabledProfileOf(user, listeningUser, String(\"onPackageAdded\"))) continue;\n // try {\n if (FAILED(listener->OnPackageAdded(user, packageName))) {\n Logger::D(TAG, \"Callback failed \");\n }\n // } catch (RemoteException re) {\n // Slog.d(TAG, \"Callback failed \", re);\n // }\n }\n mHost->mListeners->FinishBroadcast();\n\n return PackageMonitor::OnPackageAdded(packageName, uid);\n}\n\nECode CLauncherAppsImpl::MyPackageMonitor::OnPackageRemoved(\n /* [in] */ const String& packageName,\n /* [in] */ Int32 uid)\n{\n Int32 id;\n GetChangingUserId(&id);\n AutoPtr user;\n CUserHandle::New(id, (IUserHandle**)&user);\n Int32 n;\n mHost->mListeners->BeginBroadcast(&n);\n for (Int32 i = 0; i < n; i++) {\n AutoPtr item;\n mHost->mListeners->GetBroadcastItem(i, (IInterface**)&item);\n AutoPtr listener = IOnAppsChangedListener::Probe(item);\n AutoPtr cookie;\n mHost->mListeners->GetBroadcastCookie(i, (IInterface**)&cookie);\n AutoPtr listeningUser = IUserHandle::Probe(cookie);\n if (!IsEnabledProfileOf(user, listeningUser, String(\"onPackageRemoved\"))) continue;\n // try {\n if (FAILED(listener->OnPackageRemoved(user, packageName))) {\n Logger::D(TAG, \"Callback failed \");\n }\n // } catch (RemoteException re) {\n // Slog.d(TAG, \"Callback failed \", re);\n // }\n }\n mHost->mListeners->FinishBroadcast();\n\n return PackageMonitor::OnPackageRemoved(packageName, uid);\n}\n\nECode CLauncherAppsImpl::MyPackageMonitor::OnPackageModified(\n /* [in] */ const String& packageName)\n{\n Int32 id;\n GetChangingUserId(&id);\n AutoPtr user;\n CUserHandle::New(id, (IUserHandle**)&user);\n Int32 n;\n mHost->mListeners->BeginBroadcast(&n);\n for (Int32 i = 0; i < n; i++) {\n AutoPtr item;\n mHost->mListeners->GetBroadcastItem(i, (IInterface**)&item);\n AutoPtr listener = IOnAppsChangedListener::Probe(item);\n AutoPtr cookie;\n mHost->mListeners->GetBroadcastCookie(i, (IInterface**)&cookie);\n AutoPtr listeningUser = IUserHandle::Probe(cookie);\n if (!IsEnabledProfileOf(user, listeningUser, String(\"onPackageModified\"))) continue;\n // try {\n if (FAILED(listener->OnPackageChanged(user, packageName))) {\n Logger::D(TAG, \"Callback failed \");\n }\n // } catch (RemoteException re) {\n // Slog.d(TAG, \"Callback failed \", re);\n // }\n }\n mHost->mListeners->FinishBroadcast();\n\n return PackageMonitor::OnPackageModified(packageName);\n}\n\nECode CLauncherAppsImpl::MyPackageMonitor::OnPackagesAvailable(\n /* [in] */ ArrayOf* packages)\n{\n Int32 id;\n GetChangingUserId(&id);\n AutoPtr user;\n CUserHandle::New(id, (IUserHandle**)&user);\n Int32 n;\n mHost->mListeners->BeginBroadcast(&n);\n for (Int32 i = 0; i < n; i++) {\n AutoPtr item;\n mHost->mListeners->GetBroadcastItem(i, (IInterface**)&item);\n AutoPtr listener = IOnAppsChangedListener::Probe(item);\n AutoPtr cookie;\n mHost->mListeners->GetBroadcastCookie(i, (IInterface**)&cookie);\n AutoPtr listeningUser = IUserHandle::Probe(cookie);\n if (!IsEnabledProfileOf(user, listeningUser, String(\"onPackagesAvailable\"))) continue;\n // try {\n Boolean isReplacing;\n IsReplacing(&isReplacing);\n if (FAILED(listener->OnPackagesAvailable(user, packages, isReplacing))) {\n Logger::D(TAG, \"Callback failed \");\n }\n // } catch (RemoteException re) {\n // Slog.d(TAG, \"Callback failed \", re);\n // }\n }\n mHost->mListeners->FinishBroadcast();\n\n return PackageMonitor::OnPackagesAvailable(packages);\n}\n\nECode CLauncherAppsImpl::MyPackageMonitor::OnPackagesUnavailable(\n /* [in] */ ArrayOf* packages)\n{\n Int32 id;\n GetChangingUserId(&id);\n AutoPtr user;\n CUserHandle::New(id, (IUserHandle**)&user);\n Int32 n;\n mHost->mListeners->BeginBroadcast(&n);\n for (Int32 i = 0; i < n; i++) {\n AutoPtr item;\n mHost->mListeners->GetBroadcastItem(i, (IInterface**)&item);\n AutoPtr listener = IOnAppsChangedListener::Probe(item);\n AutoPtr cookie;\n mHost->mListeners->GetBroadcastCookie(i, (IInterface**)&cookie);\n AutoPtr listeningUser = IUserHandle::Probe(cookie);\n if (!IsEnabledProfileOf(user, listeningUser, String(\"onPackagesUnavailable\"))) continue;\n // try {\n Boolean isReplacing;\n IsReplacing(&isReplacing);\n if (FAILED(listener->OnPackagesUnavailable(user, packages, isReplacing))) {\n Logger::D(TAG, \"Callback failed \");\n }\n // } catch (RemoteException re) {\n // Slog.d(TAG, \"Callback failed \", re);\n // }\n }\n mHost->mListeners->FinishBroadcast();\n\n return PackageMonitor::OnPackagesUnavailable(packages);\n}\n\n\n//==============================================================================\n// CLauncherAppsImpl::PackageCallbackList\n//==============================================================================\n\nECode CLauncherAppsImpl::PackageCallbackList::OnCallbackDied(\n /* [in] */ IInterface* callback,\n /* [in] */ IInterface* cookie)\n{\n mHost->CheckCallbackCount();\n return NOERROR;\n}\n\n\n//==============================================================================\n// CLauncherAppsImpl\n//==============================================================================\n\nconst Boolean CLauncherAppsImpl::DEBUG;\nconst String CLauncherAppsImpl::TAG(\"CLauncherAppsImpl\");\n\nCLauncherAppsImpl::CLauncherAppsImpl()\n{\n mListeners = new PackageCallbackList(this);\n mPackageMonitor = new MyPackageMonitor(this);\n}\n\nCAR_INTERFACE_IMPL_2(CLauncherAppsImpl, Object, IILauncherApps, IBinder)\n\nCAR_OBJECT_IMPL(CLauncherAppsImpl)\n\nECode CLauncherAppsImpl::constructor(\n /* [in] */ IContext* ctx)\n{\n mContext = ctx;\n mContext->GetPackageManager((IPackageManager**)&mPm);\n AutoPtr service;\n mContext->GetSystemService(IContext::USER_SERVICE, (IInterface**)&service);\n mUm = IUserManager::Probe(service);\n return NOERROR;\n}\n\nECode CLauncherAppsImpl::AddOnAppsChangedListener(\n /* [in] */ IOnAppsChangedListener* listener)\n{\n { AutoLock syncLock(mListenersLock);\n if (DEBUG) {\n Logger::D(TAG, \"Adding listener from %p\", Binder::GetCallingUserHandle().Get());\n }\n Int32 count;\n if (mListeners->GetRegisteredCallbackCount(&count), count == 0) {\n if (DEBUG) {\n Logger::D(TAG, \"Starting package monitoring\");\n }\n StartWatchingPackageBroadcasts();\n }\n Boolean result;\n FAIL_RETURN(mListeners->Unregister(listener, &result))\n AutoPtr handle = Binder::GetCallingUserHandle();\n FAIL_RETURN(mListeners->Register(listener, handle, &result))\n }\n return NOERROR;\n}\n\nECode CLauncherAppsImpl::RemoveOnAppsChangedListener(\n /* [in] */ IOnAppsChangedListener* listener)\n{\n { AutoLock syncLock(mListenersLock);\n if (DEBUG) {\n Logger::D(TAG, \"Removing listener from %p\", Binder::GetCallingUserHandle().Get());\n }\n Boolean result;\n FAIL_RETURN(mListeners->Unregister(listener, &result))\n Int32 count;\n if (mListeners->GetRegisteredCallbackCount(&count), count == 0) {\n StopWatchingPackageBroadcasts();\n }\n }\n return NOERROR;\n}\n\nvoid CLauncherAppsImpl::StartWatchingPackageBroadcasts()\n{\n mPackageMonitor->Register(mContext, NULL, UserHandle::ALL, TRUE);\n}\n\nvoid CLauncherAppsImpl::StopWatchingPackageBroadcasts()\n{\n if (DEBUG) {\n Logger::D(TAG, \"Stopped watching for packages\");\n }\n mPackageMonitor->Unregister();\n}\n\nvoid CLauncherAppsImpl::CheckCallbackCount()\n{\n { AutoLock syncLock(mListenersLock);\n Int32 count;\n mListeners->GetRegisteredCallbackCount(&count);\n if (DEBUG) {\n Logger::D(TAG, \"Callback count = %d\", count);\n }\n if (count == 0) {\n StopWatchingPackageBroadcasts();\n }\n }\n}\n\nECode CLauncherAppsImpl::EnsureInUserProfiles(\n /* [in] */ IUserHandle* userToCheck,\n /* [in] */ const String& message)\n{\n Int32 callingUserId = UserHandle::GetCallingUserId();\n Int32 targetUserId;\n userToCheck->GetIdentifier(&targetUserId);\n\n if (targetUserId == callingUserId) return NOERROR;\n\n Int64 ident = Binder::ClearCallingIdentity();\n // try {\n AutoPtr callingUserInfo;\n mUm->GetUserInfo(callingUserId, (IUserInfo**)&callingUserInfo);\n AutoPtr targetUserInfo;\n mUm->GetUserInfo(targetUserId, (IUserInfo**)&targetUserInfo);\n Int32 targetId, callingId;\n if (targetUserInfo == NULL\n || (targetUserInfo->GetProfileGroupId(&targetId), targetId == IUserInfo::NO_PROFILE_GROUP_ID)\n || (callingUserInfo->GetProfileGroupId(&callingId), targetId != callingId)) {\n Binder::RestoreCallingIdentity(ident);\n return E_SECURITY_EXCEPTION;\n }\n // } finally {\n // Binder.restoreCallingIdentity(ident);\n // }\n Binder::RestoreCallingIdentity(ident);\n return NOERROR;\n}\n\nBoolean CLauncherAppsImpl::IsUserEnabled(\n /* [in] */ IUserHandle* user)\n{\n Int64 ident = Binder::ClearCallingIdentity();\n // try {\n Int32 id;\n user->GetIdentifier(&id);\n AutoPtr targetUserInfo;\n mUm->GetUserInfo(id, (IUserInfo**)&targetUserInfo);\n Binder::RestoreCallingIdentity(ident);\n Boolean isEnabled;\n return targetUserInfo != NULL && (targetUserInfo->IsEnabled(&isEnabled), isEnabled);\n // } finally {\n // Binder.restoreCallingIdentity(ident);\n // }\n}\n\nECode CLauncherAppsImpl::GetLauncherActivities(\n /* [in] */ const String& packageName,\n /* [in] */ IUserHandle* user,\n /* [out] */ IList** list)\n{\n VALIDATE_NOT_NULL(list)\n *list = NULL;\n\n String str = Object::ToString(user);\n FAIL_RETURN(EnsureInUserProfiles(user,\n String(\"Cannot retrieve activities for unrelated profile \") + str))\n if (!IsUserEnabled(user)) {\n return CArrayList::New(list);\n }\n\n AutoPtr mainIntent;\n CIntent::New(IIntent::ACTION_MAIN, NULL, (IIntent**)&mainIntent);\n mainIntent->AddCategory(IIntent::CATEGORY_LAUNCHER);\n mainIntent->SetPackage(packageName);\n Int64 ident = Binder::ClearCallingIdentity();\n // try {\n Int32 id;\n user->GetIdentifier(&id);\n ECode ec = mPm->QueryIntentActivitiesAsUser(mainIntent, 0 /* flags */, id, list);\n Binder::RestoreCallingIdentity(ident);\n return ec;\n // } finally {\n // Binder.restoreCallingIdentity(ident);\n // }\n}\n\nECode CLauncherAppsImpl::ResolveActivity(\n /* [in] */ IIntent* intent,\n /* [in] */ IUserHandle* user,\n /* [out] */ IResolveInfo** info)\n{\n VALIDATE_NOT_NULL(info)\n *info = NULL;\n\n String str = Object::ToString(user);\n FAIL_RETURN(EnsureInUserProfiles(user,\n String(\"Cannot resolve activity for unrelated profile \") + str))\n if (!IsUserEnabled(user)) {\n return NOERROR;\n }\n\n Int64 ident = Binder::ClearCallingIdentity();\n // try {\n Int32 id;\n user->GetIdentifier(&id);\n ECode ec = mPm->ResolveActivityAsUser(intent, 0, id, info);\n Binder::RestoreCallingIdentity(ident);\n return ec;\n // } finally {\n // Binder.restoreCallingIdentity(ident);\n // }\n}\n\nECode CLauncherAppsImpl::IsPackageEnabled(\n /* [in] */ const String& packageName,\n /* [in] */ IUserHandle* user,\n /* [out] */ Boolean* result)\n{\n VALIDATE_NOT_NULL(result)\n *result = FALSE;\n\n String str = Object::ToString(user);\n FAIL_RETURN(EnsureInUserProfiles(user,\n String(\"Cannot check package for unrelated profile \") + str))\n if (!IsUserEnabled(user)) {\n return NOERROR;\n }\n\n Int64 ident = Binder::ClearCallingIdentity();\n // try {\n AutoPtr pm = AppGlobals::GetPackageManager();\n Int32 id;\n user->GetIdentifier(&id);\n AutoPtr info;\n ECode ec = pm->GetPackageInfo(packageName, 0, id, (IPackageInfo**)&info);\n if (FAILED(ec)) {\n Binder::RestoreCallingIdentity(ident);\n return ec;\n }\n if (info != NULL) {\n AutoPtr ai;\n info->GetApplicationInfo((IApplicationInfo**)&ai);\n ai->GetEnabled(result);\n }\n Binder::RestoreCallingIdentity(ident);\n return NOERROR;\n // } finally {\n // Binder.restoreCallingIdentity(ident);\n // }\n}\n\nECode CLauncherAppsImpl::IsActivityEnabled(\n /* [in] */ IComponentName* component,\n /* [in] */ IUserHandle* user,\n /* [out] */ Boolean* result)\n{\n VALIDATE_NOT_NULL(result)\n *result = FALSE;\n\n String str = Object::ToString(user);\n FAIL_RETURN(EnsureInUserProfiles(user,\n String(\"Cannot check component for unrelated profile \") + str))\n if (!IsUserEnabled(user)) {\n return NOERROR;\n }\n\n Int64 ident = Binder::ClearCallingIdentity();\n // try {\n AutoPtr pm = AppGlobals::GetPackageManager();\n Int32 id;\n user->GetIdentifier(&id);\n AutoPtr info;\n ECode ec = pm->GetActivityInfo(component, 0, id, (IActivityInfo**)&info);\n if (FAILED(ec)) {\n Binder::RestoreCallingIdentity(ident);\n return ec;\n }\n *result = info != NULL;\n Binder::RestoreCallingIdentity(ident);\n return NOERROR;\n // } finally {\n // Binder.restoreCallingIdentity(ident);\n // }\n}\n\nECode CLauncherAppsImpl::StartActivityAsUser(\n /* [in] */ IComponentName* component,\n /* [in] */ IRect* sourceBounds,\n /* [in] */ IBundle* opts,\n /* [in] */ IUserHandle* user)\n{\n String str = Object::ToString(user);\n FAIL_RETURN(EnsureInUserProfiles(user,\n String(\"Cannot start activity for unrelated profile \") + str))\n if (!IsUserEnabled(user)) {\n Logger::E(TAG, \"Cannot start activity for disabled profile %s\", str.string());\n return E_ILLEGAL_STATE_EXCEPTION;\n }\n\n AutoPtr launchIntent;\n CIntent::New(IIntent::ACTION_MAIN, (IIntent**)&launchIntent);\n launchIntent->AddCategory(IIntent::CATEGORY_LAUNCHER);\n launchIntent->SetSourceBounds(sourceBounds);\n launchIntent->AddFlags(IIntent::FLAG_ACTIVITY_NEW_TASK);\n String pkgName;\n component->GetPackageName(&pkgName);\n launchIntent->SetPackage(pkgName);\n\n Int64 ident = Binder::ClearCallingIdentity();\n // try {\n AutoPtr pm = AppGlobals::GetPackageManager();\n Int32 id;\n user->GetIdentifier(&id);\n AutoPtr info;\n ECode ec = pm->GetActivityInfo(component, 0, id, (IActivityInfo**)&info);\n if (FAILED(ec)) {\n Binder::RestoreCallingIdentity(ident);\n return ec;\n }\n\n Boolean exported;\n if (IComponentInfo::Probe(info)->GetExported(&exported), !exported) {\n Logger::E(TAG, \"Cannot launch non-exported components %p\", component);\n Binder::RestoreCallingIdentity(ident);\n return E_SECURITY_EXCEPTION;\n }\n\n // Check that the component actually has Intent.CATEGORY_LAUCNCHER\n // as calling startActivityAsUser ignores the category and just\n // resolves based on the component if present.\n AutoPtr apps;\n ec = mPm->QueryIntentActivitiesAsUser(launchIntent, 0 /* flags */, id, (IList**)&apps);\n if (FAILED(ec)) {\n Binder::RestoreCallingIdentity(ident);\n return ec;\n }\n\n AutoPtr it;\n apps->GetIterator((IIterator**)&it);\n Boolean hasNext;\n String aiPkgName, aiClsName, className;\n component->GetClassName(&className);\n\n while (it->HasNext(&hasNext), hasNext) {\n AutoPtr value;\n it->GetNext((IInterface**)&value);\n AutoPtr ri = IResolveInfo::Probe(value);\n AutoPtr activityInfo;\n ri->GetActivityInfo((IActivityInfo**)&activityInfo);\n IPackageItemInfo::Probe(activityInfo)->GetPackageName(&aiPkgName);\n if (aiPkgName.Equals(pkgName)) {\n IPackageItemInfo::Probe(activityInfo)->GetName(&aiClsName);\n if (aiClsName.Equals(className)) {\n // Found an activity with category launcher that matches\n // this component so ok to launch.\n launchIntent->SetComponent(component);\n ec = mContext->StartActivityAsUser(launchIntent, opts, user);\n Binder::RestoreCallingIdentity(ident);\n if (FAILED(ec)) {\n Logger::E(TAG, \"Failed to launch activity [%s], ec=%08x.\", TO_CSTR(component), ec);\n }\n return ec;\n }\n }\n }\n\n Logger::E(TAG, \"Attempt to launch activity [%s] without category Intent.CATEGORY_LAUNCHER\", TO_CSTR(component));\n Binder::RestoreCallingIdentity(ident);\n return E_SECURITY_EXCEPTION;\n // } finally {\n // Binder.restoreCallingIdentity(ident);\n // }\n}\n\nECode CLauncherAppsImpl::ShowAppDetailsAsUser(\n /* [in] */ IComponentName* component,\n /* [in] */ IRect* sourceBounds,\n /* [in] */ IBundle* opts,\n /* [in] */ IUserHandle* user)\n{\n String str = Object::ToString(user);\n FAIL_RETURN(EnsureInUserProfiles(user,\n String(\"Cannot show app details for unrelated profile \") + str))\n if (!IsUserEnabled(user)) {\n Logger::E(TAG, \"Cannot show app details for disabled profile %s\", str.string());\n }\n\n Int64 ident = Binder::ClearCallingIdentity();\n // try {\n String packageName;\n component->GetPackageName(&packageName);\n AutoPtr helper;\n CUriHelper::AcquireSingleton((IUriHelper**)&helper);\n AutoPtr uri;\n helper->FromParts(String(\"package\"), packageName, String(NULL), (IUri**)&uri);\n AutoPtr intent;\n CIntent::New(ISettings::ACTION_APPLICATION_DETAILS_SETTINGS, uri, (IIntent**)&intent);\n intent->SetFlags(IIntent::FLAG_ACTIVITY_NEW_TASK | IIntent::FLAG_ACTIVITY_CLEAR_TASK |\n IIntent::FLAG_ACTIVITY_EXCLUDE_FROM_RECENTS);\n intent->SetSourceBounds(sourceBounds);\n Binder::RestoreCallingIdentity(ident);\n ECode ec = mContext->StartActivityAsUser(intent, opts, user);\n return ec;\n // } finally {\n // Binder.restoreCallingIdentity(ident);\n // }\n}\n\nECode CLauncherAppsImpl::ToString(\n /* [out] */ String* str)\n{\n VALIDATE_NOT_NULL(str)\n return Object::ToString(str);\n}\n\n} // namespace Pm\n} // namespace Server\n} // namespace Droid\n} // namespace Elastos\n"}}},{"rowIdx":1893,"cells":{"text":{"kind":"string","value":"using FluentValidation;\nusing FluentValidation.TestHelper;\nusing Survi.Prevention.ApiClient.DataTransferObjects;\nusing Survi.Prevention.ServiceLayer.Import.Lane;\nusing Xunit;\n\nnamespace Survi.Prevention.ServiceLayer.Tests.Import.LaneImportation\n{\n public class LaneGenericCodeImportValidatorTests: AbstractValidator\n {\n\t private readonly LaneGenericCodeValidator validator;\n\n\t public LaneGenericCodeImportValidatorTests()\n\t {\t\t \t\t \n\t\t validator = new LaneGenericCodeValidator();\n\t }\n\t [Fact]\n\t public void IdIsValidWhenNotEmpty()\n\t {\n\t\t validator.ShouldNotHaveValidationErrorFor(genCode => genCode.Id, \"IdGenericCode\");\n\t }\n\n\t [Theory]\n\t [InlineData(\"\")]\n\t [InlineData(\" \")]\n\t [InlineData(null)]\n\t public void IdIsNotValidWhenEmpty(string id)\n\t {\n\t\t validator.ShouldHaveValidationErrorFor(genCode => genCode.Id, id);\n\t }\n\n\t\t[Fact]\n\t public void CodeIsValidWhenNotEmpty()\n\t {\n\t\t validator.ShouldNotHaveValidationErrorFor(genCode => genCode.Code, \"1\");\n\t }\n\n\t [Theory]\n\t [InlineData(\"\")]\n\t [InlineData(\" \")]\n\t [InlineData(null)]\n\t [InlineData(\"CodeTooLong\")]\n\t public void CodeIsInvalidWhenNullEmptyOrTooLong(string code)\n\t {\n\t\t validator.ShouldHaveValidationErrorFor(genCode => genCode.Code, code);\n\t }\n\n\t [Fact]\n\t public void DescriptionIsValidWhenNotEmpty()\n\t {\n\t\t validator.ShouldNotHaveValidationErrorFor(genCode => genCode.Description, \"Generic code\");\n\t }\n\n\t [Theory]\n\t [InlineData(null)]\n\t [InlineData(\"TooLongDescriptionToValidate\")]\n\t public void DescriptionIsInvalidWhenNullEmptyOrTooLong(string description)\n\t {\n\t\t validator.ShouldHaveValidationErrorFor(genCode => genCode.Description, description);\n\t }\n }\n}\n"}}},{"rowIdx":1894,"cells":{"text":{"kind":"string","value":"\n\n\n# AWS Upload & Transcribe Local Files \n###### Uploads local audio files to Amazon AWS bucket and starts the transcription job\n\n\n\n### _Future Features_\n```\n1) Save file locally after transcription is completed\n2) Format and save the file as a .docx format \n3) Identify and split multiple speakers and format in the response \n4) Accept audio and video files \n```\n\n\n\n#### Setup Environment \n###### Create a .env file in the src directory and add the following keys \n\n###### The language to transcribe the audio in, by default set to english \nLANG=en-US\n\n###### The AWS access key id \nAWS_ACCESS_KEY_ID\n###### The AWS secret access token \nAWS_SECRET_ACCESS_KEY=\n###### The storage bucket name \nAWS_STORAGE_BUCKET=\n###### The region name that the bucket is located in \nAWS_STORAGE_REGION=\n\n### Setup\n\nClone the repository and install the dependencies.\n\n```\nStep 1 - git clone https://github.com/BradleySeymourSAE/transcribe-audio-file.git \nStep 2 - Get AWS authentication credentials and create a .env file with the above keys\nStep 3 - npm install\nStep 4 - npm start\n```\n\n\n

\n#### Version\n__node@12.18.3__
\n__npm@6.14.6__\n
\n## License\n\nMIT\n"}}},{"rowIdx":1895,"cells":{"text":{"kind":"string","value":"namespace WebCore.API.Models\r\n{\r\n public class Note\r\n {\r\n public string Key {get;set;}\r\n public string Subject {get;set;}\r\n public string Body {get;set;}\r\n }\r\n}"}}},{"rowIdx":1896,"cells":{"text":{"kind":"string","value":"package com.entimer.coronatracker.view.splash\n\nimport android.content.Context\nimport android.content.Intent\nimport android.net.ConnectivityManager\nimport android.os.Bundle\nimport android.widget.Toast\nimport androidx.appcompat.app.AppCompatActivity\nimport com.entimer.coronatracker.R\nimport com.entimer.coronatracker.view.main.MainActivity\n\nclass SplashActivity: AppCompatActivity(), SplashContract.View {\n private lateinit var presenter: SplashPresenter\n\n override fun onCreate(savedInstanceState: Bundle?) {\n super.onCreate(savedInstanceState)\n\n if(!checkNetwork()) {\n Toast.makeText(applicationContext, getString(R.string.splashNetworkFailed), Toast.LENGTH_LONG).show()\n finishAffinity()\n }\n else {\n presenter = SplashPresenter(this)\n presenter.initCountryList(applicationContext)\n }\n }\n\n private fun checkNetwork(): Boolean {\n val manager = getSystemService(Context.CONNECTIVITY_SERVICE) as ConnectivityManager\n val networdInfo = manager.activeNetworkInfo\n if(networdInfo != null) {\n val type = networdInfo.type\n if(type == ConnectivityManager.TYPE_MOBILE || type == ConnectivityManager.TYPE_WIFI)\n return true\n }\n return false\n }\n\n override fun onInitFinished() {\n val intent = Intent(applicationContext, MainActivity::class.java)\n startActivity(intent)\n finish()\n }\n\n override fun onInitFailed() {\n Toast.makeText(applicationContext, getString(R.string.splashInitFailed), Toast.LENGTH_LONG).show()\n finishAffinity()\n }\n}"}}},{"rowIdx":1897,"cells":{"text":{"kind":"string","value":"package Agua::Ops::Sge;\nuse Moose::Role;\nuse Method::Signatures::Simple;\n\n#### SUN GRID ENGINE METHODS\n\nmethod stopSgeProcess ($port) {\n\t$self->logDebug(\"Ops::stopSgeProcess(port)\");\n\t$self->logDebug(\"port\", $port);\n\t#### INPUT FORMAT: netstat -ntulp | grep sge_*\n\t#### tcp 0 0 0.0.0.0:36472 0.0.0.0:* LISTEN 9855/sge_exec\n\tmy $netstat = qq{netstat -ntulp | grep sge | grep $port};\n\t$self->logDebug(\"netstat\", $netstat);\n\tmy $output = $self->runCommand($netstat);\n\tmy ($pid) = $output =~ /^\\s*\\S+\\s+\\S+\\s+\\S+\\s+[^:]+:\\d+\\s+\\S+\\s+\\S+\\s+(\\d+)\\/\\S+\\s*/;\n\t$self->logDebug(\"pid\", $pid) if defined $pid;\n\t$self->logDebug(\"pid NOT DEFINED. No running SGE port\") if not defined $pid;\n\treturn if not defined $pid;\n\t\n\t$self->killProcess($pid);\n}\n\nmethod killProcess ($pid) {\n\t$self->logError(\"pid is empty\") and exit if $pid eq '';\n\tmy $command = \"kill -9 $pid\";\n\t$self->logDebug(\"command\", $command);\n\t$self->runCommand($command);\n}\n\nmethod qmasterRunning ($port) {\n#### VERIFY THAT THE SGE MASTER DAEMON IS LISTENING AT CORRECT PORT\n\t$self->logDebug(\"port\", $port);\n\treturn $self->sgeProcessListening($port, \"sge_qmaster\");\n}\n\nmethod execdRunning ($port) {\n#### VERIFY THAT SGE EXEC DAEMON IS LISTENING AT CORRECT PORT\n\t$self->logDebug(\"port\", $port);\n\treturn $self->sgeProcessListening($port, \"sge_execd\");\n}\n\nmethod sgeProcessListening ($port, $pattern) {\n#### LISTENER VERIFIER. LATER: REDO WITH REGEX\n\t$self->logDebug(\"port\", $port);\n\t$self->logDebug(\"pattern\", $pattern) if defined $pattern;\n\t$self->logError(\"Neither port nor pattern are defined\") and exit if not defined $port and not defined $pattern;\n\n\tmy $command = \"netstat -ntulp \";\n\t$command .= \"| grep $port \" if defined $port;\n\t$command .= \"| grep $pattern \" if defined $pattern;\n\n\t#### EXPECTED OUTPUT FORMAT:\n\t####tcp 0 0 0.0.0.0:36361 0.0.0.0:* LISTEN 5920/sge_qmaster\n\t####tcp 0 0 0.0.0.0:36362 0.0.0.0:* LISTEN 4780/sge_execd\n\t\n\tmy ($result) = $self->runCommand($command);\t\n\t$result =~ s/\\s+$//;\n\t$self->logDebug(\"result\", $result);\n\n\treturn $result if defined $result and $result;\n\treturn 0;\n}\n\n\n1;\n"}}},{"rowIdx":1898,"cells":{"text":{"kind":"string","value":"#!/usr/bin/env zsh\n\nbindkey -e\n\n# Black magic to set terminal modes properly\n# See: https://github.com/robbyrussell/oh-my-zsh/blob/3705d47bb3f3229234cba992320eadc97a221caf/lib/key-bindings.zsh#L5\nif (( ${+terminfo[smkx]} )) && (( ${+terminfo[rmkx]} )); then\n function zle-line-init() {\n echoti smkx\n }\n function zle-line-finish() {\n echoti rmkx\n }\n zle -N zle-line-init\n zle -N zle-line-finish\nfi\n\nautoload -U up-line-or-beginning-search\nzle -N up-line-or-beginning-search\nbindkey \"${terminfo[kcuu1]}\" up-line-or-beginning-search\n\nautoload -U down-line-or-beginning-search\nzle -N down-line-or-beginning-search\nbindkey \"${terminfo[kcud1]}\" down-line-or-beginning-search\n\n\nbindkey '^?' backward-delete-char\nif [[ \"${terminfo[kdch1]}\" != \"\" ]]; then\n bindkey \"${terminfo[kdch1]}\" delete-char\nelse\n bindkey \"^[[3~\" delete-char\n bindkey \"^[3;5~\" delete-char\n bindkey \"\\e[3~\" delete-char\nfi\n"}}},{"rowIdx":1899,"cells":{"text":{"kind":"string","value":"class SurveyTaker < ActiveRecord::Base\n\n\tdef self.search(search)\n\t\tputs search.class\n\t\twhere(number: search)\n\tend\nend\n"}}}],"truncated":false,"partial":false},"paginationData":{"pageIndex":18,"numItemsPerPage":100,"numTotalItems":43696,"offset":1800,"length":100}},"jwt":"eyJhbGciOiJFZERTQSJ9.eyJyZWFkIjp0cnVlLCJwZXJtaXNzaW9ucyI6eyJyZXBvLmNvbnRlbnQucmVhZCI6dHJ1ZX0sImlhdCI6MTc1Njk2NzQzNSwic3ViIjoiL2RhdGFzZXRzL1ppaGFvLUxpL0NvZGUiLCJleHAiOjE3NTY5NzEwMzUsImlzcyI6Imh0dHBzOi8vaHVnZ2luZ2ZhY2UuY28ifQ.dO93FpqWxcxVoFv3v91NBjtjiMp9itlJMpxa8G-oIvXbcR1g1MUqX9bFJYA9tUsHl3lLNf88oaOXm2M8s3ZeCw","displayUrls":true},"discussionsStats":{"closed":0,"open":0,"total":0},"fullWidth":true,"hasGatedAccess":true,"hasFullAccess":true,"isEmbedded":false,"savedQueries":{"community":[],"user":[]}}">
text
stringlengths
27
775k
module Rubillow module Models # Common data for responses containing zpid's module Zpidable # @return [String] ZPID of property attr_accessor :zpid protected # @private def extract_zpid(xml) # TODO: clean up this logic if !xml.xpath('//response/zpid').empty? selector = '//response/zpid' elsif !xml.xpath('//result/zpid').empty? selector = '//result/zpid' else selector = '//zpid' end @zpid = xml.xpath(selector).first.text end end end end
mod client; pub mod code; mod game; pub mod shift_code; pub use crate::{ client::Client, code::Code, game::Game, shift_code::ShiftCode, }; /// Library Result Type pub type OrczResult<T> = Result<T, OrczError>; /// Library Error Type #[derive(Debug, thiserror::Error)] pub enum OrczError { /// Reqwest HTTP Error #[error("{0}")] Reqwest(#[from] reqwest::Error), /// Invalid HTTP StatusCode #[error("invalid status '{0}'")] InvalidStatus(reqwest::StatusCode), /// Error Parsing a Table /// /// This is usually a library error; update this lib. #[error("invalid table")] TableParse, /// a tokio task failed #[error("{0}")] TokioJoin(#[from] tokio::task::JoinError), } #[cfg(test)] mod tests { use super::*; #[tokio::test] async fn it_works_bl() { let client = Client::new(); let codes = client.get_shift_codes(Game::Borderlands).await.unwrap(); dbg!(codes); } #[tokio::test] async fn it_works_bl2() { let client = Client::new(); let codes = client.get_shift_codes(Game::Borderlands2).await.unwrap(); dbg!(codes); } #[tokio::test] async fn it_works_blps() { let client = Client::new(); let codes = client .get_shift_codes(Game::BorderlandsPreSequel) .await .unwrap(); dbg!(codes); } #[tokio::test] async fn it_works_bl3() { let client = Client::new(); let codes = client.get_shift_codes(Game::Borderlands3).await.unwrap(); dbg!(codes); } }
## Signs a file param([string] $file = $(throw "Please specify a filename.")) $cert = @(Get-ChildItem cert:\CurrentUser\My -CodeSigningCert)[0] Set-AuthenticodeSignature $file $cert
#![no_std] #![no_main] #![feature(naked_functions)] #![feature(alloc_error_handler)] #![feature(llvm_asm)] #![feature(asm)] #![feature(global_asm)] mod hal; #[cfg(not(test))] use core::alloc::Layout; #[cfg(not(test))] use core::panic::PanicInfo; use linked_list_allocator::LockedHeap; use rustsbi::{print, println}; use riscv::register::{ mcause::{self, Exception, Interrupt, Trap}, medeleg, mepc, mhartid, mideleg, mie, mip, misa::{self, MXL}, mstatus::{self, MPP}, mtval, mtvec::{self, TrapMode}, }; #[global_allocator] static ALLOCATOR: LockedHeap = LockedHeap::empty(); #[cfg(not(test))] #[panic_handler] fn panic(info: &PanicInfo) -> ! { let hart_id = mhartid::read(); // 输出的信息大概是“[rustsbi-panic] hart 0 panicked at ...” println!("[rustsbi-panic] hart {} {}", hart_id, info); println!("[rustsbi-panic] system shutdown scheduled due to RustSBI panic"); use rustsbi::Reset; hal::Reset.system_reset( rustsbi::reset::RESET_TYPE_SHUTDOWN, rustsbi::reset::RESET_REASON_SYSTEM_FAILURE ); loop { } } #[cfg(not(test))] #[alloc_error_handler] fn oom(_layout: Layout) -> ! { loop {} } lazy_static::lazy_static! { // 最大的硬件线程编号;只在启动时写入,跨核软中断发生时读取 pub static ref MAX_HART_ID: spin::Mutex<usize> = spin::Mutex::new(compiled_max_hartid()); } // #[export_name = "_mp_hook"] pub extern "C" fn mp_hook() -> bool { let hartid = mhartid::read(); if hartid == 0 { true } else { use riscv::asm::wfi; use hal::Clint; unsafe { let mut clint = Clint::new(0x200_0000 as *mut u8); // Clear IPI clint.clear_soft(hartid); // Start listening for software interrupts mie::set_msoft(); loop { wfi(); if mip::read().msoft() { break; } } // Stop listening for software interrupts mie::clear_msoft(); // Clear IPI clint.clear_soft(hartid); } false } } #[export_name = "_start"] #[link_section = ".text.entry"] // this is stable #[naked] // extern "C" for Rust ABI is by now unsupported for naked functions unsafe extern "C" fn start() -> ! { asm!( " csrr a2, mhartid lui t0, %hi(_max_hart_id) add t0, t0, %lo(_max_hart_id) bgtu a2, t0, _start_abort la sp, _stack_start lui t0, %hi(_hart_stack_size) add t0, t0, %lo(_hart_stack_size) .ifdef __riscv_mul mul t0, a2, t0 .else beqz a2, 2f // Jump if single-hart mv t1, a2 mv t2, t0 1: add t0, t0, t2 addi t1, t1, -1 bnez t1, 1b 2: .endif sub sp, sp, t0 csrw mscratch, zero j main _start_abort: wfi j _start_abort ", options(noreturn)) } #[export_name = "main"] fn main() -> ! { // Ref: https://github.com/qemu/qemu/blob/aeb07b5f6e69ce93afea71027325e3e7a22d2149/hw/riscv/boot.c#L243 let dtb_pa = unsafe { let dtb_pa: usize; llvm_asm!("":"={a1}"(dtb_pa)); dtb_pa }; if mp_hook() { // init } /* setup trap */ extern "C" { fn _start_trap(); } unsafe { mtvec::write(_start_trap as usize, TrapMode::Direct); } /* main function start */ extern "C" { static mut _sheap: u8; static _heap_size: u8; } if mhartid::read() == 0 { let sheap = unsafe { &mut _sheap } as *mut _ as usize; let heap_size = unsafe { &_heap_size } as *const u8 as usize; unsafe { ALLOCATOR.lock().init(sheap, heap_size); } // 其实这些参数不用提供,直接通过pac库生成 let serial = hal::Ns16550a::new(0x10000000, 0, 11_059_200, 115200); // use through macro use rustsbi::legacy_stdio::init_legacy_stdio_embedded_hal; init_legacy_stdio_embedded_hal(serial); let clint = hal::Clint::new(0x2000000 as *mut u8); use rustsbi::init_ipi; init_ipi(clint); // todo: do not create two instances let clint = hal::Clint::new(0x2000000 as *mut u8); use rustsbi::init_timer; init_timer(clint); use rustsbi::init_reset; init_reset(hal::Reset); } // 把S的中断全部委托给S层 unsafe { mideleg::set_sext(); mideleg::set_stimer(); mideleg::set_ssoft(); medeleg::set_instruction_misaligned(); medeleg::set_breakpoint(); medeleg::set_user_env_call(); medeleg::set_instruction_page_fault(); medeleg::set_load_page_fault(); medeleg::set_store_page_fault(); medeleg::set_instruction_fault(); medeleg::set_load_fault(); medeleg::set_store_fault(); mie::set_mext(); // 不打开mie::set_mtimer mie::set_msoft(); } if mhartid::read() == 0 { println!("[rustsbi] RustSBI version {}", rustsbi::VERSION); println!("{}", rustsbi::LOGO); println!("[rustsbi] Platform: QEMU (Version {})", env!("CARGO_PKG_VERSION")); let isa = misa::read(); if let Some(isa) = isa { let mxl_str = match isa.mxl() { MXL::XLEN32 => "RV32", MXL::XLEN64 => "RV64", MXL::XLEN128 => "RV128", }; print!("[rustsbi] misa: {}", mxl_str); for ext in 'A'..='Z' { if isa.has_extension(ext) { print!("{}", ext); } } println!(""); } println!("[rustsbi] mideleg: {:#x}", mideleg::read().bits()); println!("[rustsbi] medeleg: {:#x}", medeleg::read().bits()); let mut guard = MAX_HART_ID.lock(); *guard = unsafe { count_harts(dtb_pa) }; drop(guard); println!("[rustsbi] Kernel entry: 0x80200000"); } unsafe { mepc::write(s_mode_start as usize); mstatus::set_mpp(MPP::Supervisor); rustsbi::enter_privileged(mhartid::read(), dtb_pa) } } #[naked] #[link_section = ".text"] // must add link section for all naked functions unsafe extern "C" fn s_mode_start() -> ! { asm!(" 1: auipc ra, %pcrel_hi(1f) ld ra, %pcrel_lo(1b)(ra) jr ra .align 3 1: .dword 0x80200000 ", options(noreturn)) } unsafe fn count_harts(dtb_pa: usize) -> usize { use device_tree::{DeviceTree, Node}; const DEVICE_TREE_MAGIC: u32 = 0xD00DFEED; // 遍历“cpu_map”结构 // 这个结构的子结构是“处理核簇”(cluster) // 每个“处理核簇”的子结构分别表示一个处理器核 fn enumerate_cpu_map(cpu_map_node: &Node) -> usize { let mut tot = 0; for cluster_node in cpu_map_node.children.iter() { let name = &cluster_node.name; let count = cluster_node.children.iter().count(); // 会输出:Hart count: cluster0 with 2 cores // 在justfile的“threads := "2"”处更改 println!("[rustsbi-dtb] Hart count: {} with {} cores", name, count); tot += count; } tot } #[repr(C)] struct DtbHeader { magic: u32, size: u32 } let header = &*(dtb_pa as *const DtbHeader); // from_be 是大小端序的转换(from big endian) let magic = u32::from_be(header.magic); if magic == DEVICE_TREE_MAGIC { let size = u32::from_be(header.size); // 拷贝数据,加载并遍历 let data = core::slice::from_raw_parts(dtb_pa as *const u8, size as usize); if let Ok(dt) = DeviceTree::load(data) { if let Some(cpu_map) = dt.find("/cpus/cpu-map") { return enumerate_cpu_map(cpu_map) } } } // 如果DTB的结构不对(读不到/cpus/cpu-map),返回默认的8个核 let ans = compiled_max_hartid(); println!("[rustsbi-dtb] Could not read '/cpus/cpu-map' from 'dtb_pa' device tree root; assuming {} cores", ans); ans } #[inline] fn compiled_max_hartid() -> usize { let ans; unsafe { asm!(" lui {ans}, %hi(_max_hart_id) add {ans}, {ans}, %lo(_max_hart_id) ", ans = out(reg) ans) }; ans } global_asm!( " .equ REGBYTES, 8 .macro STORE reg, offset sd \\reg, \\offset*REGBYTES(sp) .endm .macro LOAD reg, offset ld \\reg, \\offset*REGBYTES(sp) .endm .section .text .global _start_trap .p2align 2 _start_trap: csrrw sp, mscratch, sp bnez sp, 1f /* from M level, load sp */ csrrw sp, mscratch, zero 1: addi sp, sp, -16 * REGBYTES STORE ra, 0 STORE t0, 1 STORE t1, 2 STORE t2, 3 STORE t3, 4 STORE t4, 5 STORE t5, 6 STORE t6, 7 STORE a0, 8 STORE a1, 9 STORE a2, 10 STORE a3, 11 STORE a4, 12 STORE a5, 13 STORE a6, 14 STORE a7, 15 mv a0, sp call _start_trap_rust LOAD ra, 0 LOAD t0, 1 LOAD t1, 2 LOAD t2, 3 LOAD t3, 4 LOAD t4, 5 LOAD t5, 6 LOAD t6, 7 LOAD a0, 8 LOAD a1, 9 LOAD a2, 10 LOAD a3, 11 LOAD a4, 12 LOAD a5, 13 LOAD a6, 14 LOAD a7, 15 addi sp, sp, 16 * REGBYTES csrrw sp, mscratch, sp mret " ); // #[doc(hidden)] // #[export_name = "_mp_hook"] // pub extern "Rust" fn _mp_hook() -> bool { // match mhartid::read() { // 0 => true, // _ => loop { // unsafe { riscv::asm::wfi() } // }, // } // } #[allow(unused)] #[derive(Debug)] struct TrapFrame { ra: usize, t0: usize, t1: usize, t2: usize, t3: usize, t4: usize, t5: usize, t6: usize, a0: usize, a1: usize, a2: usize, a3: usize, a4: usize, a5: usize, a6: usize, a7: usize, } #[export_name = "_start_trap_rust"] extern "C" fn start_trap_rust(trap_frame: &mut TrapFrame) { let cause = mcause::read().cause(); match cause { Trap::Exception(Exception::SupervisorEnvCall) => { let params = [trap_frame.a0, trap_frame.a1, trap_frame.a2, trap_frame.a3]; // Call RustSBI procedure let ans = rustsbi::ecall(trap_frame.a7, trap_frame.a6, params); // Return the return value to TrapFrame trap_frame.a0 = ans.error; trap_frame.a1 = ans.value; // Skip ecall instruction mepc::write(mepc::read().wrapping_add(4)); } Trap::Interrupt(Interrupt::MachineSoft) => { // 机器软件中断返回给S层 unsafe { mip::set_ssoft(); mie::clear_msoft(); } } Trap::Interrupt(Interrupt::MachineTimer) => { // 机器时间中断返回给S层 unsafe { mip::set_stimer(); mie::clear_mtimer(); } } Trap::Exception(Exception::IllegalInstruction) => { #[inline] unsafe fn get_vaddr_u32(vaddr: usize) -> u32 { let mut ans: u32; llvm_asm!(" li t0, (1 << 17) mv t1, $1 csrrs t0, mstatus, t0 lwu t1, 0(t1) csrw mstatus, t0 mv $0, t1 " :"=r"(ans) :"r"(vaddr) :"t0", "t1"); ans } let vaddr = mepc::read(); let ins = unsafe { get_vaddr_u32(vaddr) }; if ins & 0xFFFFF07F == 0xC0102073 { // rdtime let rd = ((ins >> 7) & 0b1_1111) as u8; // todo: one instance only let clint = hal::Clint::new(0x2000000 as *mut u8); let time_usize = clint.get_mtime() as usize; match rd { 10 => trap_frame.a0 = time_usize, 11 => trap_frame.a1 = time_usize, 12 => trap_frame.a2 = time_usize, 13 => trap_frame.a3 = time_usize, 14 => trap_frame.a4 = time_usize, 15 => trap_frame.a5 = time_usize, 16 => trap_frame.a6 = time_usize, 17 => trap_frame.a7 = time_usize, 5 => trap_frame.t0 = time_usize, 6 => trap_frame.t1 = time_usize, 7 => trap_frame.t2 = time_usize, 28 => trap_frame.t3 = time_usize, 29 => trap_frame.t4 = time_usize, 30 => trap_frame.t5 = time_usize, 31 => trap_frame.t6 = time_usize, _ => panic!("invalid target"), } mepc::write(mepc::read().wrapping_add(4)); // 跳过指令 } else { #[cfg(target_pointer_width = "64")] panic!("invalid instruction, mepc: {:016x?}, instruction: {:016x?}", mepc::read(), ins); #[cfg(target_pointer_width = "32")] panic!("invalid instruction, mepc: {:08x?}, instruction: {:08x?}", mepc::read(), ins); } } #[cfg(target_pointer_width = "64")] cause => panic!( "Unhandled exception! mcause: {:?}, mepc: {:016x?}, mtval: {:016x?}, trap frame: {:p}, {:x?}", cause, mepc::read(), mtval::read(), &trap_frame as *const _, trap_frame ), #[cfg(target_pointer_width = "32")] cause => panic!( "Unhandled exception! mcause: {:?}, mepc: {:08x?}, mtval: {:08x?}, trap frame: {:x?}", cause, mepc::read(), mtval::read(), trap_frame ), } }
set -v sudo apt-get update sudo apt-get install -y git linux-image-extra-`uname -r` sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 36A1D7869245C8950F966E92D8576A8BA88D21E9 sudo sh -c "echo deb http://get.docker.io/ubuntu docker main > /etc/apt/sources.list.d/docker.list" sudo apt-get update sudo apt-get install -y lxc-docker
//$ class TopWindow { public: virtual void State(int reason); private: TopWindowFrame *frame; void SyncRect(); void SyncFrameRect(const Rect& r); void DestroyFrame(); friend class Ctrl; public: void GripResize(); //$ };
package provider import ( "context" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" "os" "testing" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) var testAccProvider *schema.Provider var testAccProviderFactories = map[string]func() (*schema.Provider, error){ "runscope": func() (*schema.Provider, error) { return Provider(), nil }, } const testAccBucketNamePrefix = "terraform-runscope-testacc" func init() { testAccProvider = Provider() testAccProviderFactories = map[string]func() (*schema.Provider, error){ "runscope": func() (*schema.Provider, error) { return Provider(), nil }, } } func TestMain(m *testing.M) { resource.TestMain(m) } func TestProvider(t *testing.T) { if err := Provider().InternalValidate(); err != nil { t.Fatalf("err: %s", err) } } func TestProviderImpl(t *testing.T) { var _ = Provider() } func testAccPreCheck(t *testing.T) { ctx := context.TODO() if v := os.Getenv("RUNSCOPE_ACCESS_TOKEN"); v == "" { t.Fatal("RUNSCOPE_ACCESS_TOKEN must be set for acceptance tests") } if v := os.Getenv("RUNSCOPE_TEAM_ID"); v == "" { t.Fatal("RUNSCOPE_TEAM_ID must be set for acceptance tests") } diags := testAccProvider.Configure(ctx, terraform.NewResourceConfigRaw(nil)) if diags.HasError() { t.Fatal(diags[0].Summary) } return } func testAccRandomBucketName() string { return acctest.RandomWithPrefix("terraform-runscope-testacc") }
package me.aleiv.core.paper.tablist; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import com.google.gson.Gson; import com.google.gson.JsonObject; import org.bukkit.Bukkit; import org.bukkit.entity.Player; import lombok.Getter; import me.aleiv.core.paper.Core; import me.aleiv.core.paper.teams.objects.Team; import me.aleiv.core.paper.utilities.PlayerDBUtil; import net.md_5.bungee.api.ChatColor; public class DedsafioTablistGenerator extends TablistGenerator { private @Getter static ConcurrentHashMap<UUID, String> cachedNames = new ConcurrentHashMap<>(); private static final Gson gson = new Gson(); private static final String STAR = Character.toString('\uEAA6'); private static final String teamTag = ChatColor.of("#59e4fc") + "Team %s " + ChatColor.WHITE + "%d" + ChatColor.RESET + STAR; private static final String teamMemberTag = ChatColor.of("#fef1aa") + "%s"; private static final String ffaTag = "%d" + STAR + " " + ChatColor.of("#fef1aa") + "%s"; class SortByPoints implements Comparator<Team> { @Override public int compare(Team a, Team b) { return b.getPoints() - a.getPoints(); } } public DedsafioTablistGenerator(Core plugin) { super(plugin); plugin.getTeamManager().getRedisSyncConnection().hgetall("uuids:names").entrySet().forEach(entry -> { var name = gson.fromJson(entry.getValue(), JsonObject.class); if (name != null) { var actualName = name.get("name"); if (actualName != null && !actualName.isJsonNull()) { cachedNames.put(UUID.fromString(entry.getKey()), actualName.getAsString()); return; } } cachedNames.put(UUID.fromString(entry.getKey()), "null"); }); } public static void recacheNames() { Core.getInstance().getTeamManager().getRedisSyncConnection().hgetall("uuids:names").entrySet() .forEach(entry -> { var name = gson.fromJson(entry.getValue(), JsonObject.class); if (name != null) { var actualName = name.get("name"); if (actualName != null && !actualName.isJsonNull()) { cachedNames.put(UUID.fromString(entry.getKey()), actualName.getAsString()); return; } } cachedNames.put(UUID.fromString(entry.getKey()), "null"); }); } @Override public String[] generateHeaderFooter(Player paramPlayer) { return List.of("§7§lDEDSAFIO", "§7§lDEDSAFIO").toArray(new String[0]); } @Override public TabEntry[] generateBars(Player paramPlayer) { var array = new TabEntry[80]; int i = 0; // Obtain all the entries var entries = new ArrayList<Team>(plugin.getTeamManager().getTeamsMap().values()); // Sort them by points Collections.sort(entries, new SortByPoints()); // Handle the ffa case if (plugin.getTeamManager().getDataset().equalsIgnoreCase("ffa")) { var iter = entries.iterator(); while (iter.hasNext() && i < 80) { var team = iter.next(); var id = team.getMembers().get(0); var name = getNameForId(id); var shortName = name.substring(0, Math.min(12, name.length())); var entry = new TabEntry(String.format(ffaTag, team.getPoints(), shortName)); array[i] = entry; i++; } } else { // Handle team case var iter = entries.iterator(); while (iter.hasNext() && i < 80) { var next = iter.next(); array[i] = new TabEntry(String.format(teamTag, next.getTeamName(), next.getPoints())); for (var member : next.getMembers()) { i++; if (i < 80) { array[i] = new TabEntry(String.format(teamMemberTag, getNameForId(member))); } else { break; } } i++; } } // Fill emptys slots with nothing. for (; i < 80; i++) { array[i] = new TabEntry(" "); } return array; } private String getNameForId(UUID id) { var player = Bukkit.getOfflinePlayer(id); if (player.getName() != null) { return player.getName(); } var cachedName = cachedNames.get(id); if (cachedName != null) { return cachedName; } // If the player is not cached, we need to get it from playerdb.co cacheName(id); return "null"; } private static void cacheName(UUID uuid) { PlayerDBUtil.getNameFromIdAsync(uuid).thenAccept(name -> { if (name != null) { cachedNames.put(uuid, name); writeNameOntoRedisCache(uuid, name); } }); } private static void writeNameOntoRedisCache(UUID uuid, String name) { var json = new JsonObject(); json.addProperty("name", name); json.addProperty("timeStamp", System.currentTimeMillis()); Core.getInstance().getTeamManager().getRedisSyncConnection().hset("uuids:names", uuid.toString(), gson.toJson(json)); } public static void writeIfNotPresent(Player player) { if (!cachedNames.containsKey(player.getUniqueId())) { writeNameOntoRedisCache(player.getUniqueId(), player.getName()); } } }
package com.emaginalabs.wecodeproperties import org.scalacheck.Gen import org.scalatest.prop.PropertyChecks import org.scalatest.{FlatSpec, Matchers} import scala.util.{Failure, Success, Try} class PlayingWithLibrarySpec extends FlatSpec with PropertyChecks with Matchers { "Playing with the library" should "add logs to know how it executes" in { forAll() { (a: Int, b: Int) => println(s"Just executed the test with values [a: $a, b: $b]") a + b shouldBe b + a } } it should "allow us to modify the number of executions" in { var numOfExecutions = 0; forAll(minSuccessful(500)) { (a: Int) => numOfExecutions += 1 succeed } numOfExecutions shouldBe 500 } it should "make a test fail in order to see how shrink works" in { val result = Try { forAll() { (a: Int, b: Int) => println(s"Just executed the test with values [a: $a, b: $b]") if (a > 5 || b > 7) { fail("Just fail") } } } result match { case Failure(exception) => print(exception.getMessage) case Success(_) => fail("The property should fail but it didn't") } } it should "fail when a generator is too restrictive" in { val result = Try { forAll( Gen .choose(Int.MinValue, Int.MaxValue) .filter(a => { a > 3 && a < 5 })) { (a: Int) => succeed } } result match { case Failure(exception) => print(exception.getMessage) case Success(_) => fail("The property should fail because generator is to restritive") } } def numericRangeGenerator(a: Int, b: Int): Gen[Int] = for { generated <- Gen.chooseNum(Int.MinValue, Int.MaxValue) absA = Math.abs(a) absB = Math.abs(b) i = Math.abs(generated) % (absB - absA) number = i + absA x = number } yield (number) it should "fail when a generator is limited but not restrictive" in { forAll(numericRangeGenerator(3, 5)) { (a: Int) => a should be >= 3 a should be <= 5 } } }
// // IRILaunchRouterName.h // IRiskSDK // // Created by owen on 2020/8/13. // Copyright © 2020 owen. All rights reserved. // #import <Foundation/Foundation.h> FOUNDATION_EXTERN NSString * _Nullable const IRROUTERNAME_INSPECT; NS_ASSUME_NONNULL_BEGIN @interface IRILaunchRouterName : NSObject @end NS_ASSUME_NONNULL_END
package com.tencent.bk.devops.plugin.utils import java.util.Locale object MachineEnvUtils { fun getOS(): String { val osName = System.getProperty("os.name", "generic").toLowerCase(Locale.ENGLISH) return if (osName.indexOf(string = "mac") >= 0 || osName.indexOf("darwin") >= 0) { OSType.MAC_OS } else if (osName.indexOf("win") >= 0) { OSType.WINDOWS } else if (osName.indexOf("nux") >= 0) { OSType.LINUX } else { OSType.OTHER } } object OSType { const val WINDOWS = "WINDOWS" const val LINUX = "LINUX" const val MAC_OS = "MAC_OS" const val OTHER = "OTHER" } }
<?php /** * [PHPFOX_HEADER] */ defined('PHPFOX') or exit('NO DICE!'); /** * * * @copyright [PHPFOX_COPYRIGHT] * @author Raymond Benc * @package Module_Rss * @version $Id: ajax.class.php 704 2009-06-21 18:50:42Z Raymond_Benc $ */ class Rss_Component_Ajax_Ajax extends Phpfox_Ajax { public function updateFeedActivity() { if (Phpfox::getService('rss.process')->updateActivity($this->get('id'), $this->get('active'))) { } } public function updateSiteWide() { if (Phpfox::getService('rss.process')->updateSiteWide($this->get('id'), $this->get('active'))) { } } public function ordering() { if (Phpfox::getService('rss.process')->updateOrder($this->get('val'))) { } } public function groupOrdering() { if (Phpfox::getService('rss.group.process')->updateOrder($this->get('val'))) { } } public function log() { Phpfox::isUser(true); Phpfox::getBlock('rss.log', array( 'rss' => array( 'table' => 'rss_log_user', 'field' => 'user_id', 'key' => Phpfox::getUserId() ) ) ); } } ?>
Один из красивейших портов на сегодняшний день. Поддерживает 3D Модели, текстуры высокого разрешения, прыжки, обзор с помощью мыши, навороченные спецэффекты, игру через интернет или по локальной сети и многое другое. В общем, рекомендуется всем, кто хочет совместить динамичный геймплей классического Doomа и достаточно современную графику. Скачанные файлы с расширением **pk3** не пытаться распаковывать, а кидать непосредственно в папку **addons** внутри папки  **snowberry** ZIP-файлы с дополнительными текстурами - **распаковывать** туда же. [Линукс-версии.](http://dengine.net/linux)  
/** * @file btree.h * Definition of a B-tree class which can be used as a generic dictionary * (insert-only). Designed to take advantage of caching to be faster than * standard balanced binary search trees. */ #pragma once #include <vector> #include <iostream> #include <string> #include <sstream> /** * BTree class. Provides interfaces for inserting and finding elements in * B-tree. * * @author Matt Joras * @date Winter 2013 */ template <class K, class V> class BTree { private: /** * A fancy key-value pair which acts as elements in the BTree. * Can be compared with <, >, ==. Additionally they can be compared against * a K with <, > and == based on its key. * */ struct DataPair { K key; V value; /** * Constructs a DataPair from the given key and value. * @param key The key of the pair. * @param value The value of the pair. */ DataPair(K key, V value) : key(key), value(value) { } /** * Less than operator for a DataPair. The object is less than another * if its key is less than the other's key. * @param rhs The right hand of the < operator. * @return true if the object's key is less than rhs' key, false * otherwise. */ inline bool operator<(const DataPair& rhs) const { return this->key < rhs.key; } /** * Less than operator for a DataPair and a K. * @param rhs The right hand side (K) of the < operator. * @return true if the object's key is less than rhs, false otherwise. */ inline bool operator<(const K& rhs) const { return this->key < rhs; } /** * Less than operator for a K and a DataPair. * @param lhs The left hand side (K) of the < operator. * @param rhs The right hand side (DataPair) of the < operator. * @return true if lhs is less than rhs's key, false otherwise. */ inline friend bool operator<(const K& lhs, const DataPair& rhs) { return lhs < rhs.key; } /** * Greater than operator for a DataPair. DataPair is greater than another * if its key is greater than the other's key. * @param rhs The right hand of the > operator. * @return true if the object's key is greater than rhs's key, false otherwise. */ inline bool operator>(const DataPair& rhs) const { return this->key > rhs.key; } /** * Greater than operator for a K and a DataPair. * @param lhs The left hand side (K) of the > operator. * @param rhs The right hand side (DataPair) of the > operator. * @return true if lhs is greater than rhs's key, false otherwise. */ inline friend bool operator>(const K& lhs, const DataPair& rhs) { return lhs > rhs.key; } /** * Greater than operator for a DataPair and a K. * @param rhs The right hand side (K) of the > operator. * @return true if the object's key is greater than rhs, false otherwise. */ inline bool operator>(const K& rhs) const { return this->key > rhs; } /** * Equality operator for a DataPair. One is equal to another * if its key is equal to the other's key. * @param rhs The right hand of the == operator. * @return true if the object's key is greater than rhs's key, false otherwise. */ inline bool operator==(const DataPair& rhs) const { return this->key == rhs.key; } /** * Equality operator for a DataPair and a K. * @param rhs The right hand side (K) of the == operator. * @return true if the object's key is equal to rhs, false otherwise. */ inline bool operator==(const K& rhs) const { return this->key == rhs; } /** * Equality operator for a K and a DataPair. * @param lhs The left hand side (K) of the == operator. * @param rhs The right hand side (DataPair) of the == operator. * @return true if lhs is equal to rhs's key, false otherwise. */ inline friend bool operator==(const K& lhs, const DataPair& rhs) { return lhs == rhs.key; } }; /** * A class for the basic node structure of the BTree. A node contains * two vectors, one with DataPairs representing the data, and one of * BTreeNode*s, representing the node's children. */ struct BTreeNode { bool is_leaf; std::vector<DataPair> elements; std::vector<BTreeNode*> children; /** * Constructs a BTreeNode. The vectors will reserve to avoid * reallocations. */ BTreeNode(bool is_leaf, unsigned int order) : is_leaf(is_leaf) { elements.reserve(order + 1); children.reserve(order + 2); } /** * Constructs a BTreeNode based on another. Only copies over * the elements and is_leaf information. */ BTreeNode(const BTreeNode& other) : is_leaf(other.is_leaf), elements(other.elements) { } /** * Printing operator for a BTreeNode. E.g. a node containing 4, 5, 6 * would look like: * <pre> * | 4 | 5 | 6 | * * * * * * </pre> * The stars below the bars represent non-null child pointers. Null * child pointers are represented by an "N". If there are no children * then "no children" is displayed instead. * @param out The ostream to be written to. * @param n The node to be printed. * @return The modified ostream. */ inline friend std::ostream& operator<<(std::ostream& out, const BTreeNode& n) { std::string node_str; node_str.reserve(2 * (4 * n.elements.size() + 1)); for (auto& elem : n.elements) { std::stringstream temp; temp << elem.key; node_str += "| "; node_str += temp.str(); node_str += " "; } if (!n.elements.empty()) { node_str += "|"; } node_str += "\n"; for (auto& child : n.children) { if (child == nullptr) { node_str += "N "; } else { node_str += "* "; } } if (n.children.empty()) { node_str += "no children"; } out << node_str; return out; } }; unsigned int order; BTreeNode* root; public: /** * Constructs a default, order 64 BTree. */ BTree(); /** * Constructs a BTree with the specified order. The minimum order allowed * is order 3. * @param order The order of the constructed BTree. */ BTree(unsigned int order); /** * Constructs a BTree as a deep copy of another. * @param other The BTree to copy. */ BTree(const BTree& other); /** * Performs checks to make sure the BTree is valid. Specifically * it will check to make sure that an in-order traversal of the tree * will result in a sorted sequence of keys. Also verifies that each * BTree node doesn't have more nodes than its order. * @return true if it satisfies the conditions, false otherwise. */ bool is_valid(unsigned int order = 64) const; /** * Destroys a BTree. */ ~BTree(); /** * Assignment operator for a BTree. * @param rhs The BTree to assign into this one. * @return The copied BTree. */ const BTree& operator=(const BTree& rhs); /** * Clears the BTree of all data. */ void clear(); /** * Inserts a key and value into the BTree. If the key is already in the * tree do nothing. * @param key The key to insert. * @param value The value to insert. */ void insert(const K& key, const V& value); /** * Finds the value associated with a given key. * @param key The key to look up. * @return The value (if found), the default V if not. */ V find(const K& key) const; private: /** * Private recursive version of the insert function. * @param subroot A reference of a pointer to the current BTreeNode. * @param pair The DataPair to be inserted. */ void insert(BTreeNode* subroot, const DataPair& pair); /** * Private recursive version of the find function. * @param subroot A reference of a pointer to the current BTreeNode. * @param key The key we are looking up. * @return The value (if found), the default V if not. */ V find(const BTreeNode* subroot, const K& key) const; /** * Splits a child node of a BTreeNode. Called if the child became too * large. Modifies the parent such that children[child_idx] contains * half as many elements as before, and similarly for * children[child_idx + 1] (which is a new BTreeNode*). * @param parent The parent whose child we are trying to split. * @param child_idx The index of the child in its parent's children * vector. */ void split_child(BTreeNode* parent, size_t child_idx); /** * Private recursive version of the clear function. * @param subroot A pointer to the current node being cleared. */ void clear(BTreeNode* subroot); /** * Private recursive version of the copy function. * @param subroot A pointer to the current node being copied. */ BTreeNode* copy(const BTreeNode* subroot); /** * Private recursive version of the is_valid function. * @param subroot A pointer to the current node being checked for * validity. * @return true if the node's subtree is valid, false otherwise. */ bool is_valid(const BTreeNode* subroot, std::vector<DataPair>& data, unsigned int order) const; }; /** * Generalized function for finding the insertion index of a given element * into a given sorted vector. * @param elements A sorted vector of some type. * @param val A value which represents something to be inserted into the vector. * Must either be the same type as T, or one that can compare to it. E.g. for * the elements of a BTreeNode we might pass in either a DataPair value or a * K value (the key). * @return The index at which val could be inserted into elements to maintain * the sorted order of elements. If val occurs in elements, then this returns * the index of val in elements. */ template <class T, class C> size_t insertion_idx(const std::vector<T>& elements, const C& val) { /* TODO Your code goes here! */ for(unsigned i = 0; i < elements.size(); i++){ if(elements[i] > val || elements[i] == val){ return i; } } return elements.size(); // return 5; } #include "btree_given.cpp" #include "btree.cpp"
# encoding: UTF-8 # Copyright (c) 2015 VMware, Inc. All Rights Reserved. require 'spec_helper' require 'vagrant-guests-photon/guest' describe VagrantPlugins::GuestPhoton::Guest do include_context 'machine' it 'should be detected with Photon' do expect(communicate).to receive(:test).with("grep 'VMware Photon' /etc/photon-release") guest.detect?(machine) end end
package world.gregs.game.playground.spatial.quadtree import java.awt.Point import java.awt.Rectangle interface QuadTree { /** * The capacity of a leaf before division */ val capacity: Int /** * Inserts a point into the tree */ fun insert(point: Point): Boolean /** * Queries an [area] for points */ fun query(area: Rectangle, results: MutableList<Point>): List<Point> }
import { Component, Input } from "@angular/core"; import { CompassForm } from "../compass-form"; import { CompassControl } from "../compass-control"; @Component({ selector: "compass-form", templateUrl: "./compass-form.component.html", styleUrls: ["./compass-form.component.scss"] }) export class CompassFormComponent<T> { @Input() compassForm: CompassForm<T>; @Input() ignoreControls: string[] = []; getControls(): CompassControl<T, any>[] { return this.compassForm.controlsArray.filter( x => !this.ignoreControls.some(p => p === x.key) ); } getStyle(control: CompassControl<T, any>) { return { width: control.snapshot.width, "flex-basis": control.snapshot.width, display: control.snapshot.display ? "block" : "none" }; } }
import routes from '@/modules/iam/iam-routes'; import store from '@/modules/iam/iam-store'; export default { routes, store, };
import ValueComponent from './ValueComponent'; import CheckboxInput from './CheckboxInput'; export default class BooleanComponent extends ValueComponent { getActionHandlers() { return { toggleValue: this._toggleValue, }; } render($$) { const model = this.props.model; const value = model.getValue(); let el = $$('div').addClass('sc-boolean'); if (!this.context.editable) { el.addclass('sm-readonly'); } el.append($$(CheckboxInput, { value, disabled: this.props.disabled })); return el; } _toggleValue() { if (this.context.editable) { const model = this.props.model; this.props.model.setValue(!model.getValue()); } } }
<?php /** * Created by PhpStorm. * User: jon * Date: 2018/10/6 * Time: 下午4:44 */ namespace app\common\model; class Complete extends BaseModel { public function Theraise() { return $this->hasOne('Theraise', 'id', 'theraise_id'); } public static function PostByAdd($data) { $res = self::create($data); return $res; } public static function PostByUpdate($data) { $res = self::where('theraise_id', $data['theraise_id'])->data($data)->update(); return $res; } public static function GetByList($data) { $res = self::with('Theraise')->paginate($data['limit'], false, ['query' => $data['page']]); return $res; } public static function GetByFind($id) { $res = self::with('Theraise')->where('id', $id)->find(); return $res; } }
import Document, { Head, Main, NextScript, DocumentContext, DocumentInitialProps } from "next/document"; import React from "react"; import { ServerStyleSheets } from "@material-ui/core"; import { RenderPage, NextComponentType, AppContextType, AppInitialProps, AppPropsType } from "next/dist/next-server/lib/utils"; import { NextRouter } from "next/router"; export default class MyDocument extends Document { render() { return ( <html lang="en"> <Head /> <body> <Main /> <NextScript /> </body> </html> ); } } MyDocument.getInitialProps = async ( ctx: DocumentContext ): Promise<DocumentInitialProps> => { const sheets: ServerStyleSheets = new ServerStyleSheets(); const originalRenderPage: RenderPage = ctx.renderPage; ctx.renderPage = () => { return originalRenderPage({ enhanceApp: ( App: NextComponentType< AppContextType<NextRouter>, AppInitialProps, AppPropsType<NextRouter, {}> > ) => (props: React.PropsWithChildren<AppPropsType<NextRouter, {}>>) => { return sheets.collect(<App {...props} />); } }); }; const initialProps: DocumentInitialProps = await Document.getInitialProps( ctx ); return { ...initialProps, styles: [ ...React.Children.toArray(initialProps.styles), sheets.getStyleElement() ] }; };
<!-- section start --> <!-- attr: { id:'', class:'slide-title', showInPresentation:true, hasScriptWrapper:true } --> # Defensive Programming, Assertions and Exceptions <!-- <img class="slide-image" showInPresentation="true" src="imgs\pic01.png" style="top:60%; left:62%; width:38.41%; z-index:-1; border: 1px solid white; border-radius: 5px;" /> --> <!-- <img class="slide-image" showInPresentation="true" src="imgs\pic02.png" style="top:15%; left:2%; width:17.08%; z-index:-1" /> --> <article class="signature"> <p class="signature-course">High-Quality Code - Part II</p> <p class="signature-initiative">Telerik Software Academy</p> <a href="http://academy.telerik.com " class="signature-link">http://academy.telerik.com </a> </article> <!-- section start --> # Defensive programming “Programming today is a race between software engineers striving to build bigger and better idiot-proof programs, and the Universe trying to produce bigger and better idiots. So far, the Universe is winning.” \- Rick Cook, The Wizardry Compiled <!-- section start --> <!-- attr: { id:'', showInPresentation:true, hasScriptWrapper:true } --> # Table of Contents - What is Defensive Programming? - Assertions and **Debug.Assert(…)** - Exceptions Handling Principles - Error Handling Strategies <!-- <img class="slide-image" showInPresentation="true" src="imgs\pic03.png" style="top:39.67%; left:67.37%; width:36.14%; z-index:-1" /> --> <!-- <img class="slide-image" showInPresentation="true" src="imgs\pic04.png" style="top:49.92%; left:22.72%; width:21.23%; z-index:-1" /> --> <!-- section start --> <!-- attr: { id:'', class:'slide-section', showInPresentation:true, hasScriptWrapper:true } --> <!-- # Defensive Programming --> <!-- ## Using Assertions and Exceptions Correctly --> <!-- <img class="slide-image" showInPresentation="true" src="imgs\pic05.png" style="top:42.96%; left:8.07%; width:39%; z-index:-1; border: 1px solid white; border-radius: 5px;" /> --> <!-- <img class="slide-image" showInPresentation="true" src="imgs\pic06.png" style="top:43.20%; left:55.79%; width:45%; z-index:-1; border: 1px solid white; border-radius: 5px;" /> --> <!-- attr: { showInPresentation:true, hasScriptWrapper:true } --> # What is Defensive Programming? - Similar to defensive driving – you are never sure what other drivers will do - **Expect incorrect input** and handle it correctly - Think not only about the usual execution flow, but consider also **unusual** situations! <!-- <img class="slide-image" showInPresentation="true" src="imgs\pic07.png" style="top:64%; left:30%; width:40%; z-index:-1; border: 1px solid white; border-radius: 5px;" /> --> <!-- attr: { showInPresentation:true, hasScriptWrapper:true } --> # Protecting from <br/> Invalid Input - “Garbage in &rarr; garbage out” – **Wrong!** - Garbage in &rarr; nothing out / exception out / error message out / no garbage allowed in - Check the values of all data from external sources (from user, file, internet, DB, etc.) <!-- <img class="slide-image" showInPresentation="true" src="imgs\pic08.png" style="top:63%; left:2%; width:52.77%; z-index:-1; border: 1px solid black; border-radius: 5px;" /> --> <!-- <img class="slide-image" showInPresentation="true" src="imgs\pic09.png" style="top:76%; left:50%; width:50.37%; z-index:-1; border: 1px solid black; border-radius: 5px;" /> --> <!-- attr: { showInPresentation:true, hasScriptWrapper:true } --> <!-- # Protecting from <br/> Invalid Input --> - Check the values of all **routine input parameters** - Decide how to handle **bad inputs** - Return neutral value - Substitute with valid data - Throw an exception - Display error message, log it, etc. - The best form of defensive coding is not inserting error at first place <!-- section start --> <!-- attr: { id:'', class:'slide-section', showInPresentation:true, hasScriptWrapper:true } --> <!-- # Assertions --> <!-- ## Checking Preconditions and Postconditions --> <!-- <img class="slide-image" showInPresentation="true" src="imgs\pic10.png" style="top:45%; left:20%; width:30.85%; z-index:-1" /> --> <!-- <img class="slide-image" showInPresentation="true" src="imgs\pic11.png" style="top:45%; left:60%; width:25.56%; z-index:-1" /> --> <!-- attr: { showInPresentation:true, hasScriptWrapper:true } --> # Assertions - **Assertion** – a statement placed in the code that **must always be true** at that moment - Assertions are used during development - Removed in release builds - Assertions check for bugs in code ```cs public double GetAverageStudentGrade() { Debug.Assert(studentGrades.Count > 0, "Student grades are not initialized!"); return studentGrades.Average(); } ``` <!-- <img class="slide-image" showInPresentation="true" src="imgs\pic12.png" style="top:30%; left:85%; width:24.13%; z-index:-1; border: 1px solid white; border-radius:5px;" /> --> <!-- attr: { showInPresentation:true, hasScriptWrapper:true } --> <!-- # Assertions --> - Use assertions for conditions that **should never occur** in practice - Failed assertion indicates a **fatal error** in the program (usually unrecoverable) - Use assertions to **document assumptions** made in code (preconditions & postconditions) ```cs private Student GetRegisteredStudent(int id) { Debug.Assert(id > 0); Student student = registeredStudents[id]; Debug.Assert(student.IsRegistered); } ``` <!-- attr: { showInPresentation:true, hasScriptWrapper:true } --> <!-- # Assertions --> - Failed assertion indicates a **fatal error** in the program (usually unrecoverable) - Avoid putting executable code in assertions ```cs Debug.Assert(PerformAction(), "Could not perform action"); ``` - Won’t be compiled in production. Better use: ```cs bool actionPerformed = PerformAction(); Debug.Assert(actionPerformed, "Could not perform action"); ``` - Assertions should fail loud - It is fatal error &rarr; total crash <!-- attr: { class:'slide-section demo', showInPresentation:true, hasScriptWrapper:true } --> # Assertions ## [Demo]() <!-- <img class="slide-image" showInPresentation="true" src="imgs\pic13.png" style="top:18%; left:0%; width:40%; z-index:-1" /> --> <!-- section start --> <!-- attr: { id:'', class:'slide-section', showInPresentation:true, hasScriptWrapper:true } --> <!-- # Exceptions --> ## Best Practices for Exception Handling <!-- <img class="slide-image" showInPresentation="true" src="imgs\pic14.png" style="top:55%; left:30%; width:40%; z-index:-1" /> --> <!-- attr: { showInPresentation:true, hasScriptWrapper:true } --> # Exceptions - **Exceptions** provide a way to inform the caller about an error or exceptional events - Can be caught and processed by the callers - Methods can **throw** exceptions: ```cs public void ReadInput(string input) { if (input == null) { throw new ArgumentNullException("input"); } … } ``` <!-- attr: { showInPresentation:true, hasScriptWrapper:true } --> <!-- # Exceptions --> - Use **try-catch** statement to handle exceptions: - You can use multiple **catch** blocks to specify handlers for different exceptions - Not handled exceptions propagate to the caller ```cs void PlayNextTurn() { try { readInput(input); … } catch (ArgumentException e) { Console.WriteLine("Invalid argument!"); } } ``` <div class="fragment balloon" style="top:55%; left:40.55%; width:35.26%">Exception thrown here</div> <div class="fragment balloon" style="top:70%; left:43.20%; width:52.01%">The code here will not be executed</div> <!-- attr: { showInPresentation:true, hasScriptWrapper:true } --> <!-- # Exceptions --> - Use **finally** block to execute code even if exception occurs (not supported in C++): - Perfect place to perform cleanup for any resources allocated in the **try** block ```cs void PlayNextTurn() { try { … } finally { Console.WriteLine("Hello from finally!"); } } ``` <div class="fragment balloon" style="top:60%; left:22.04%; width:37.91%">Exceptions can be eventually thrown here</div> <div class="fragment balloon" style="top:78%; left:27.33%; width:49.37%">The code here is always executed</div> <!-- attr: { showInPresentation:true, hasScriptWrapper:true } --> <!-- # Exceptions --> - Use exceptions to notify the other parts of the program about errors - Errors that should not be ignored - Throw an exception only for conditions that are **truly exceptional** - Should I throw an exception when I check for user name and password? &rarr; better return false - Don’t use exceptions as control flow mechanisms <!-- attr: { showInPresentation:true, hasScriptWrapper:true } --> <!-- # Exceptions --> - Throw exceptions at the right **level of abstraction** ```cs class Employee { // Bad … public TaxId { get { throw new NullReferenceException(…); } } ``` ```cs class Employee { // Better … public TaxId { get { throw new EmployeeDataNotAvailable(…); } } ``` <!-- attr: { showInPresentation:true, hasScriptWrapper:true } --> <!-- # Exceptions --> - Use **descriptive error messages** - Incorrect example: ```cs throw new Exception("Error!"); ``` - _Example_: ```cs throw new ArgumentException("The speed should be a number " + "between " + MIN_SPEED + " and " + MAX_SPEED + "."); ``` - Avoid **empty catch blocks** ```cs try { … } catch (Exception ex) { } ``` <!-- attr: { showInPresentation:true, hasScriptWrapper:true } --> <!-- # Exceptions --> - Always include the exception **cause** when throwing a new exception ```cs try { WithdrawMoney(account, amount); } catch (DatabaseException dbex) { throw new WithdrawException(String.Format( "Can not withdraw the amount {0} from acoount {1}", amount, account), dbex); } ``` <div class="fragment balloon" style="top:70%; left:45.84%; width:47.60%">We chain the original exception (the source of the problem)</div> <!-- attr: { showInPresentation:true, hasScriptWrapper:true } --> <!-- # Exceptions --> - Catch only exceptions that you are capable to process correctly - Do not catch all exceptions! - Incorrect example: ```cs try { ReadSomeFile(); } catch { Console.WriteLine("File not found!"); } ``` - What about **OutOfMemoryException**? <!-- attr: { showInPresentation:true, hasScriptWrapper:true } --> <!-- # Exceptions --> - Have an exception handling strategy for all unexpected / unhandled exceptions: - Consider logging (e.g. Log4Net) - Display to the end users only messages that they could understand <!-- <img class="slide-image" showInPresentation="true" src="imgs\pic15.png" style="top:55%; left:3.74%; width:48.48%; z-index:-1; border: 1px solid black; border-radius:5px;" /> --> <!-- <img class="slide-image" showInPresentation="true" src="imgs\pic16.png" style="top:55%; left:61.26%; width:44.55%; z-index:-1; border: 1px solid black; border-radius:5px;" /> --> <!-- attr: { class:'slide-section demo', showInPresentation:true, hasScriptWrapper:true } --> # Exceptions <!-- <img class="slide-image" showInPresentation="true" src="imgs\pic17.png" style="top:42%; left:33%; width:34.38%; z-index:-1; border: 1px solid black; border-radius:5px;" /> --> <!-- section start --> <!-- attr: { id:'', class:'slide-section', showInPresentation:true, hasScriptWrapper:true } --> <!-- # Error Handling Strategies --> ## Assertions vs. Exceptions vs. Other Techniques <!-- <img class="slide-image" showInPresentation="true" src="imgs\pic18.png" style="top:57%; left:72.02%; width:23.20%; z-index:-1" /> --> <!-- <img class="slide-image" showInPresentation="true" src="imgs\pic19.png" style="top:57%; left:46.09%; width:17.63%; z-index:-1" /> --> <!-- <img class="slide-image" showInPresentation="true" src="imgs\pic20.png" style="top:57%; left:15.21%; width:23.25%; z-index:-1" /> --> <!-- attr: { showInPresentation:true, hasScriptWrapper:true } --> # Error Handling Techniques - How to handle **errors that you expect** to occur? - Depends on the situation: - Throw an **exception** (in OOP) - The most typical action you can do - Return a neutral value, e.g. **-1** in **IndexOf(…)** - Substitute the next piece of valid data (e.g. file) - Return the same answer as the previous time - Substitute the closest legal value - Return an error code (in old languages / APIs) - Display an error message in the UI - Call method / Log a warning message to a file - Crash / shutdown / reboot <!-- attr: { showInPresentation:true, hasScriptWrapper:true } --> # Assertions vs. Exceptions - **Exceptions** are announcements about error condition or unusual event - Inform the caller about error or exceptional event - Can be caught and application can continue working - **Assertions** are fatal errors - Assertions always indicate bugs in the code - Can not be caught and processed - Application can’t continue in case of failed assertion - When in doubt &rarr; throw an exception <!-- attr: { showInPresentation:true, hasScriptWrapper:true } --> # Assertions in C# - Assertions in C# are rarely used - In C# prefer throwing an **exception** when the input data / internal object state are invalid - Exceptions are used in C# and Java instead of **preconditions checking** - Prefer using **unit testing** for testing the code instead of **postconditions checking** - Assertions are popular in C / C++ - Where exceptions & unit testing are not popular - In JS there are no built-in assertion mechanism <!-- attr: { showInPresentation:true, hasScriptWrapper:true } --> # Error Handling Strategy - Choose your **error handling strategy** and follow it consistently - Assertions / exceptions / error codes / other - In C#, .NET and OOP prefer using **exceptions** - Assertions are rarely used, only as additional checks for fatal error - Throw an exception for incorrect input / incorrect object state / invalid operation - In JavaScript use exceptions: **try-catch-finally** - In non-OOP languages use error codes <!-- attr: { showInPresentation:true, hasScriptWrapper:true } --> # Robustness vs. Correctness - How will you handle error while calculating single pixel color in a computer game? - How will you handle error in financial software? Can you afford to lose money? - **Correctness** == never returning wrong result - Try to achieve correctness as a primary goal - **Robustness** == always trying to do something that will allow the software to keep running - Use as last resort, for non-critical errors <!-- attr: { showInPresentation:true, hasScriptWrapper:true } --> # Assertions vs. Exceptions ```cs public string Substring(string str, int startIndex, int length) { if (str == null) { throw new NullReferenceException("Str is null."); } if (startIndex >= str.Length) { throw new ArgumentException( "Invalid startIndex:" + startIndex); } if (startIndex + count > str.Length) { throw new ArgumentException("Invalid length:" + length); } … Debug.Assert(result.Length == length); } ``` <div class="fragment balloon" style="top:34.62%; left:66.12%; width:29.09%">Check the input and preconditions</div> <div class="fragment balloon" style="top:75%; left:20.28%; width:46.72%">Perform the method main logic</div> <div class="fragment balloon" style="top:85%; left:55%; width:24.68%">Check the postconditions</div> <!-- attr: { showInPresentation:true, hasScriptWrapper:true } --> # Error Barricades - Barricade your program to stop the damage caused by incorrect data - Consider same approach for class design - Public methods &rarr; validate the data - Private methods &rarr; assume the data is safe - Consider using exceptions for public methods and assertions for private - **public methods / functions** - **private methods / functions** <!-- <img class="slide-image" showInPresentation="true" src="imgs\pic21.png" style="top:20%; left:100%; width:13.46%; z-index:-1" /> --> <!-- <img class="slide-image" showInPresentation="true" src="imgs\pic22.png" style="top:40%; left:100%; width:10.50%; z-index:-1" /> --> <!-- attr: { showInPresentation:true, hasScriptWrapper:true } --> # Being Defensive About Defensive Programming - Too much defensive programming is not good - Strive for balance - How much defensive programming to leave in production code? - Remove the code that results in hard crashes - Leave in code that checks for important errors - Log errors for your technical support personnel - See that the error messages you show are user-friendly <!-- attr: { class:'slide-section', showInPresentation:true, hasScriptWrapper:true } --> <!-- # HQC-Part 2: Defensive Programming ## Questions? --> <!-- attr: { showInPresentation:true, hasScriptWrapper:true } --> # Free Trainings @ Telerik Academy - C# Programming @ Telerik Academy - [HQC-Part II course](http://academy.telerik.com/student-courses/programming/high-quality-code-part-2/about) - Telerik Software Academy - [telerikacademy.com](https://telerikacademy.com) - Telerik Academy @ Facebook - [facebook.com/TelerikAcademy](facebook.com/TelerikAcademy) - Telerik Software Academy Forums - [forums.academy.telerik.com](forums.academy.telerik.com)
my $channel = Channel.new(); $channel.send($_) for 0..10; $channel.close; my @readers; for 1..3 { push @readers, start { while 1 { my $value = $channel.poll; last if $value === Any; say "$value² = " ~ $value * $value; } }; } await @readers;
import 'database.dart'; const DEFAULT_DURATION_MINUTES=30; class Visit { static final empty = Visit(code: '', name: '', address: '', startDate: DateTime.now()); static final table = 'visit'; int id; final DateTime createDate; final String code; final String name; final String address; final String? latitude; final String? longitude; final DateTime startDate; final DateTime endDate; Visit( {int? id, required this.code, required this.name, required this.address, this.latitude, this.longitude, required this.startDate, DateTime? endDate, DateTime? created}): this.id = id ?? 0, this.createDate = created ?? DateTime.now(), this.endDate = endDate ?? startDate.add(Duration(minutes: DEFAULT_DURATION_MINUTES)); /// Create a visit instance from database field map Visit.fromDb(Map<String, dynamic> db) : this.id = db['id'] as int, this.code = db['code'] as String, this.name = db['name'] as String, this.address = db['address'] as String, this.latitude = db['latitude'] as String?, this.longitude = db['longitude'] as String?, this.createDate = DateTime.fromMillisecondsSinceEpoch(db['create_date'] as int), this.startDate = DateTime.fromMillisecondsSinceEpoch(db['start_date'] as int), this.endDate = DateTime.fromMillisecondsSinceEpoch(db['end_date'] as int); /// Create a new immutable Visit from permitted Visit from({DateTime? endDate}) { return Visit( id: this.id, code: this.code, name: this.name, address: this.address, longitude: this.longitude, latitude: this.latitude, startDate: this.startDate, endDate: endDate ?? this.endDate, created: this.createDate ); } /// Create database field map from instance Map<String, Object?> toDb() { return { 'id': this.id == 0 ? null : this.id, 'code': this.code, 'name': this.name, 'address': this.address, 'latitude': this.latitude, 'longitude': this.longitude, 'start_date': this.startDate.millisecondsSinceEpoch, 'end_date': this.endDate.millisecondsSinceEpoch, 'create_date': this.createDate.millisecondsSinceEpoch }; } /// Create or update a visit row: /// - If the id is null, then insert; otherwise /// - Update the row createOrUpdate() async { var database = await DbManager().database; if (this.id == 0) { this.id = await database.insert(table, this.toDb()); } else { await database.update(table, this.toDb()); } } /// Delete the current visit from the database. Dispose of this instance /// afterwards. delete() async { var database = await DbManager().database; await database.delete(table, where: 'id=?', whereArgs: [this.id]); } /// Get a list of visits static Future<List<Visit>> list(int start, {int limit : 20}) async { var database = await DbManager().database; var rows = await database.query(table, columns: ['id', 'code', 'name', 'address', 'latitude', 'longitude', 'start_date', 'end_date', 'create_date'], orderBy: 'start_date desc', offset: start, limit: limit ); return rows.map( (r) => Visit.fromDb(r)).toList(); } }
package leetcode /** * https://leetcode.com/problems/check-if-numbers-are-ascending-in-a-sentence/ */ class Problem2042 { fun areNumbersAscending(s: String): Boolean { val words = s.split(" ") var number = 0 for (word in words) { val num = word.toIntOrNull() if (num != null) { if (number >= num) { return false } number = num } } return true } }
# Linux Server Configuration - IP: 34.235.63.160 - SSH Port: 2200 - App URL 34.235.63.160/catalog ### Installed software - psycopg2 - psycopg2-binary - python - apache2 - postgresql - libapache2-mod-wsgi ### Configuration https://github.com/ladytrell/LinuxServerConfig 1. Created user grader a. create ssh keypair b. add to sudo list 2. Configured ssh to port 2200 a. Changed port in /etc/ssh/sshd_config 3. Configured ufw a. Blocked all incoming traffic b. Allow port 2200, 80, and 123 4. Installed and configured Apache2 a. Configued to run WSGI scipt 5. Install and config WSGI for python app a. Wrote script to call catalog app 6. Installed and configured PostgreSQL a. Created Users ubuntu and catalog b. Created catalog app ### App Location 1. /usr/local/www/catalog/ a. catalog.db b. catalog.wsgi c. lotofitems.py d. catalogDB_Model.py e. client_secrets.google.json f. catalogDB_Model.pyc g. fb_client_secrets.json h. catalog.py i. static i. responsive.css ii. styles.css j. templates i. catalog.html ii. item.html v. category.html 2. /etc/apache2/sites-available/ a. 000-default.conf ### Referenced Sites https://modwsgi.readthedocs.io/en/develop/configuration-directives/WSGIScriptAlias.html https://modwsgi.readthedocs.io/en/develop/user-guides/quick-configuration-guide.html http://flask.pocoo.org/docs/1.0/deploying/mod_wsgi/ https://realpython.com/flask-by-example-part-2-postgres-sqlalchemy-and-alembic/ https://www.digitalocean.com/community/tutorials/how-to-deploy-a-flask-application-on-an-ubuntu-vps https://docs.sqlalchemy.org/en/latest/core/connections.html https://www.postgresql.org/docs/9.5/database-roles.html https://overiq.com/sqlalchemy-101/installing-sqlalchemy-and-connecting-to-database/ https://tutorials.ubuntu.com/tutorial/install-and-configure-apache#2 https://serverfault.com/questions/265410/ubuntu-server-message-says-packages-can-be-updated-but-apt-get-does-not-update License ---- MIT **Free Software**
import mongoose, { Schema } from "mongoose"; import Bot from "../types/Bot"; const schema: Schema<Bot> = new Schema({ exchangeConnectionId: { type: mongoose.Schema.Types.ObjectId }, startBalance: { type: Number, required: true }, currentBalance: { type: Number, required: true }, startDate: { type: Date, required: true }, endDate: { type: Date }, status: { type: String, enum: ['online', 'offline', 'ended'], required: true }, strategy: { type: mongoose.Schema.Types.Mixed, required: true }, type: { type: String, enum: ['TEST', 'LIVE'], required: true }, userId: { type: mongoose.Schema.Types.ObjectId, required: true }, quoteCurrency: { type: String, required: true } }); schema.set('toJSON', { virtuals: true, versionKey: false, transform: (doc, ret) => { ret.id = ret._id.toString(); delete ret._id; } }); const model = mongoose.model('bot', schema); export default model;
--- featuredpath: "/book2/main/page01.jpg" featured: "" preview: "/book2/preview/page01.jpg" title: "Book 2, Page 1" categories: ["book2"] type: "post" linktitle: "" date: "2018-03-23T22:01:03-05:00" author: "Maria Rice" featuredalt: "" description2: [] --- # First colored Morphic page ever! Welcome back from the intermission! Hope you like the new character and the new coloring style. I managed to finish coloring page 2 and now I'm coloring pages 3 and 4 (got all the other drawing for those pages done and only the coloring is left). I'm pleased with my progress so far, but now Spring Break is over and I have to buckle down for the second half of the semester. I expect the school/work pace will pick up from here on out. No worries, though! I expect I'll keep up with the update schedule. I only need four more pages to ensure that the schedule is covered until classes are over for the summer. I think I can push that. **In the meantime---enjoy the colors!** And thanks for reading! Return next week to find out who Vix is talking to.
import 'package:anvil/src/build/build_data.dart'; import 'package:anvil/src/config.dart'; import 'package:anvil/src/content/page.dart'; import 'package:anvil/src/content/section.dart'; import 'build_page.dart'; void buildSection( Config config, BuildData buildData, Section section) { if (section.index != null) { final children = section.children.map((e) => e.toMap()).toList(); final pages = section.children .whereType<Page>() .map((content) => content.toMap()) .toList(); final sections = section.children .whereType<Section>() .map((content) => content.toMap()) .toList(); buildPage( config, buildData, section.index!, extraData: <String, Object?>{ 'children': children, 'pages': pages, 'sections': sections, }, ); } try { for (final child in section.children) { child.when( config, buildData, section: buildSection, page: buildPage, ); } } catch (e) { rethrow; } }
#![allow(non_snake_case, non_upper_case_globals)] #![allow(non_camel_case_types)] //! MCU debug component //! //! Used by: stm32l412, stm32l4x1, stm32l4x2, stm32l4x3 use crate::{RORegister, RWRegister}; #[cfg(not(feature = "nosync"))] use core::marker::PhantomData; /// DBGMCU_IDCODE pub mod IDCODE { /// Device identifier pub mod DEV_ID { /// Offset (0 bits) pub const offset: u32 = 0; /// Mask (12 bits: 0xfff << 0) pub const mask: u32 = 0xfff << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Revision identifie pub mod REV_ID { /// Offset (16 bits) pub const offset: u32 = 16; /// Mask (16 bits: 0xffff << 16) pub const mask: u32 = 0xffff << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } } /// Debug MCU configuration register pub mod CR { /// Debug Sleep mode pub mod DBG_SLEEP { /// Offset (0 bits) pub const offset: u32 = 0; /// Mask (1 bit: 1 << 0) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Debug Stop mode pub mod DBG_STOP { /// Offset (1 bits) pub const offset: u32 = 1; /// Mask (1 bit: 1 << 1) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Debug Standby mode pub mod DBG_STANDBY { /// Offset (2 bits) pub const offset: u32 = 2; /// Mask (1 bit: 1 << 2) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Trace pin assignment control pub mod TRACE_IOEN { /// Offset (5 bits) pub const offset: u32 = 5; /// Mask (1 bit: 1 << 5) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Trace pin assignment control pub mod TRACE_MODE { /// Offset (6 bits) pub const offset: u32 = 6; /// Mask (2 bits: 0b11 << 6) pub const mask: u32 = 0b11 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } } /// Debug MCU APB1 freeze register1 pub mod APB1FZR1 { /// TIM2 counter stopped when core is halted pub mod DBG_TIM2_STOP { /// Offset (0 bits) pub const offset: u32 = 0; /// Mask (1 bit: 1 << 0) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// TIM6 counter stopped when core is halted pub mod DBG_TIM6_STOP { /// Offset (4 bits) pub const offset: u32 = 4; /// Mask (1 bit: 1 << 4) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// TIM7 counter stopped when core is halted pub mod DBG_TIM7_STOP { /// Offset (5 bits) pub const offset: u32 = 5; /// Mask (1 bit: 1 << 5) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// RTC counter stopped when core is halted pub mod DBG_RTC_STOP { /// Offset (10 bits) pub const offset: u32 = 10; /// Mask (1 bit: 1 << 10) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Window watchdog counter stopped when core is halted pub mod DBG_WWDG_STOP { /// Offset (11 bits) pub const offset: u32 = 11; /// Mask (1 bit: 1 << 11) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Independent watchdog counter stopped when core is halted pub mod DBG_IWDG_STOP { /// Offset (12 bits) pub const offset: u32 = 12; /// Mask (1 bit: 1 << 12) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// I2C1 SMBUS timeout counter stopped when core is halted pub mod DBG_I2C1_STOP { /// Offset (21 bits) pub const offset: u32 = 21; /// Mask (1 bit: 1 << 21) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// I2C2 SMBUS timeout counter stopped when core is halted pub mod DBG_I2C2_STOP { /// Offset (22 bits) pub const offset: u32 = 22; /// Mask (1 bit: 1 << 22) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// I2C3 SMBUS timeout counter stopped when core is halted pub mod DBG_I2C3_STOP { /// Offset (23 bits) pub const offset: u32 = 23; /// Mask (1 bit: 1 << 23) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// bxCAN stopped when core is halted pub mod DBG_CAN_STOP { /// Offset (25 bits) pub const offset: u32 = 25; /// Mask (1 bit: 1 << 25) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// LPTIM1 counter stopped when core is halted pub mod DBG_LPTIM1_STOP { /// Offset (31 bits) pub const offset: u32 = 31; /// Mask (1 bit: 1 << 31) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } } /// Debug MCU APB1 freeze register 2 pub mod APB1FZR2 { /// LPTIM2 counter stopped when core is halted pub mod DBG_LPTIM2_STOP { /// Offset (5 bits) pub const offset: u32 = 5; /// Mask (1 bit: 1 << 5) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } } /// Debug MCU APB2 freeze register pub mod APB2FZR { /// TIM1 counter stopped when core is halted pub mod DBG_TIM1_STOP { /// Offset (11 bits) pub const offset: u32 = 11; /// Mask (1 bit: 1 << 11) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// TIM15 counter stopped when core is halted pub mod DBG_TIM15_STOP { /// Offset (16 bits) pub const offset: u32 = 16; /// Mask (1 bit: 1 << 16) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// TIM16 counter stopped when core is halted pub mod DBG_TIM16_STOP { /// Offset (17 bits) pub const offset: u32 = 17; /// Mask (1 bit: 1 << 17) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } } #[repr(C)] pub struct RegisterBlock { /// DBGMCU_IDCODE pub IDCODE: RORegister<u32>, /// Debug MCU configuration register pub CR: RWRegister<u32>, /// Debug MCU APB1 freeze register1 pub APB1FZR1: RWRegister<u32>, /// Debug MCU APB1 freeze register 2 pub APB1FZR2: RWRegister<u32>, /// Debug MCU APB2 freeze register pub APB2FZR: RWRegister<u32>, } pub struct ResetValues { pub IDCODE: u32, pub CR: u32, pub APB1FZR1: u32, pub APB1FZR2: u32, pub APB2FZR: u32, } #[cfg(not(feature = "nosync"))] pub struct Instance { pub(crate) addr: u32, pub(crate) _marker: PhantomData<*const RegisterBlock>, } #[cfg(not(feature = "nosync"))] impl ::core::ops::Deref for Instance { type Target = RegisterBlock; #[inline(always)] fn deref(&self) -> &RegisterBlock { unsafe { &*(self.addr as *const _) } } } #[cfg(feature = "rtic")] unsafe impl Send for Instance {}
#!/bin/bash outfile=RooUnfoldExample.cxx.ref RooUnfoldExample > $outfile bash ref/cleanup.sh $outfile diff $outfile ref/$outfile
using System.Threading; using MediatR; using NetCoreKit.Samples.TodoAPI.Domain; namespace NetCoreKit.Samples.TodoAPI.v1.Services { public class EventSubscriber : INotificationHandler<ProjectCreated> { public async System.Threading.Tasks.Task Handle(ProjectCreated @event, CancellationToken cancellationToken) { // do something with @event //... await System.Threading.Tasks.Task.FromResult(@event); } } }
## 内存信息收集 从Node v. 12开始,可以收集Appium的内存使用信息来分析问题。 这对于分析内存泄漏问题非常有帮助。 ### 创建dump文件 为了在任意时间创建dump文件,执行`node`进程时增加如下命令行参数,这会执行appium.js脚本: ``` --heapsnapshot-signal=&lt;signal&gt; ``` 这里的 `signal` 可以是一个有效的自定义信号,例如 `SIGUSR2`。然后你就可以 ``` kill -SIGUSR2 &lt;nodePID&gt; ``` dump文件会被存放在Appium主脚本执行路径下。文件扩展名为 `.heapsnapshot`,文件可以在Chrome Inspector中加载来进行分析。 ### dump文件分析 详细信息请查看[Rising Stack article](https://blog.risingstack.com/finding-a-memory-leak-in-node-js/)。
<?php /** * InterKassa driver for the Omnipay PHP payment processing library * * @link https://github.com/hiqdev/omnipay-interkassa * @package omnipay-interkassa * @license MIT * @copyright Copyright (c) 2015-2017, HiQDev (http://hiqdev.com/) */ namespace Omnipay\InterKassa\Message; /** * InterKassa Abstract Request. */ abstract class AbstractRequest extends \Omnipay\Common\Message\AbstractRequest { /** * {@inheritdoc} */ protected $zeroAmountAllowed = false; /** * @var string */ protected $endpoint = 'https://sci.interkassa.com/'; /** * Get the unified purse. * * @return string merchant purse */ public function getPurse() { return $this->getCheckoutId(); } /** * Set the unified purse. * * @param $value * @return self */ public function setPurse($value) { return $this->setCheckoutId($value); } /** * Get the merchant purse. * * @return string merchant purse */ public function getCheckoutId() { return $this->getParameter('checkoutId'); } /** * Set the merchant purse. * * @param string $purse merchant purse * * @return self */ public function setCheckoutId($purse) { return $this->setParameter('checkoutId', $purse); } /** * Get the sign algorithm. * * @return string sign algorithm */ public function getSignAlgorithm() { return strtolower($this->getParameter('signAlgorithm')); } /** * Set the sign algorithm. * * @param string $value sign algorithm * * @return self */ public function setSignAlgorithm($value) { return $this->setParameter('signAlgorithm', $value); } /** * Get the sign key. * * @return string sign key */ public function getSignKey() { return $this->getParameter('signKey'); } /** * Set the sign key. * * @param string $value sign key * * @return self */ public function setSignKey($value) { return $this->setParameter('signKey', $value); } /** * Get the test key. * * @return string test key */ public function getTestKey() { return $this->getParameter('testKey'); } /** * Set the test key. * * @param string $value test key * * @return self */ public function setTestKey($value) { return $this->setParameter('testKey', $value); } /** * Get the method for success return. * * @return mixed */ public function getReturnMethod() { return $this->getParameter('returnMethod'); } /** * Sets the method for success return. * * @param $returnMethod * @return \Omnipay\Common\Message\AbstractRequest */ public function setReturnMethod($returnMethod) { return $this->setParameter('returnMethod', $returnMethod); } /** * Get the method for canceled payment return. * * @return mixed */ public function getCancelMethod() { return $this->getParameter('cancelMethod'); } /** * Sets the method for canceled payment return. * * @param $cancelMethod * @return \Omnipay\Common\Message\AbstractRequest */ public function setCancelMethod($cancelMethod) { return $this->setParameter('cancelMethod', $cancelMethod); } /** * Get the method for request notify. * * @return mixed */ public function getNotifyMethod() { return $this->getParameter('notifyMethod'); } /** * Sets the method for request notify. * * @param $notifyMethod * @return \Omnipay\Common\Message\AbstractRequest */ public function setNotifyMethod($notifyMethod) { return $this->setParameter('notifyMethod', $notifyMethod); } /** * Calculates sign for the $data. * * @param array $data * @param string $signKey * @return string */ public function calculateSign($data, $signKey) { unset($data['ik_sign']); ksort($data, SORT_STRING); array_push($data, $signKey); $signAlgorithm = $this->getSignAlgorithm(); $signString = implode(':', $data); return base64_encode(hash($signAlgorithm, $signString, true)); } }
#!/usr/bin/env ruby # frozen_string_literal: true require File.expand_path("../config/boot.rb", __dir__) require File.expand_path("../config/environment.rb", __dir__) require File.expand_path("../app/extensions/extensions.rb", __dir__) def do_report(year, do_labels = false) warn("Doing #{year.inspect}...") query = Query.lookup(:Observation, :all, date: year) report = ObservationReport::Symbiota.new(query: query).render report.sub!(/^[^\n]*\n/, "") unless do_labels puts report warn(" #{query.num_results} observations\n") sleep 60 end do_report(%w[1000 1999], :do_labels) (2000..2019).each do |year| do_report([year.to_s, year.to_s]) end exit 0
/* * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator 0.17.0.0 * Changes may cause incorrect behavior and will be lost if the code is * regenerated. // TODO: Include PageTemplateModels here too?? Probably */ /** * @class * Initializes a new instance of the Sku class. * @constructor * SKU details * * @member {string} name SKU name to specify whether the key vault is a * standard vault or a premium vault. Possible values include: 'standard', * 'premium' * */ export interface Sku { name: string; } /** * @class * Initializes a new instance of the AccessPolicyEntry class. * @constructor * An identity that have access to the key vault. All identities in the array * must use the same tenant ID as the key vault's tenant ID. * * @member {uuid} tenantId The Azure Active Directory tenant ID that should be * used for authenticating requests to the key vault. * * @member {uuid} objectId The object ID of a user, service principal or * security group in the Azure Active Directory tenant for the vault. The * object ID must be unique for the list of access policies. * * @member {uuid} [applicationId] Application ID of the client making request * on behalf of a principal * * @member {object} permissions Permissions the identity has for keys, secrets * and certificates. * * @member {array} [permissions.keys] Permissions to keys * * @member {array} [permissions.secrets] Permissions to secrets * * @member {array} [permissions.certificates] Permissions to certificates * */ export interface AccessPolicyEntry { tenantId: string; objectId: string; applicationId?: string; permissions: Permissions; } /** * @class * Initializes a new instance of the Permissions class. * @constructor * Permissions the identity has for keys, secrets and certificates. * * @member {array} [keys] Permissions to keys * * @member {array} [secrets] Permissions to secrets * * @member {array} [certificates] Permissions to certificates * */ export interface Permissions { keys?: string[]; secrets?: string[]; certificates?: string[]; } /** * @class * Initializes a new instance of the VaultProperties class. * @constructor * Properties of the vault * * @member {string} [vaultUri] The URI of the vault for performing operations * on keys and secrets. * * @member {uuid} tenantId The Azure Active Directory tenant ID that should be * used for authenticating requests to the key vault. * * @member {object} sku SKU details * * @member {string} [sku.name] SKU name to specify whether the key vault is a * standard vault or a premium vault. Possible values include: 'standard', * 'premium' * * @member {array} accessPolicies An array of 0 to 16 identities that have * access to the key vault. All identities in the array must use the same * tenant ID as the key vault's tenant ID. * * @member {boolean} [enabledForDeployment] Property to specify whether Azure * Virtual Machines are permitted to retrieve certificates stored as secrets * from the key vault. * * @member {boolean} [enabledForDiskEncryption] Property to specify whether * Azure Disk Encryption is permitted to retrieve secrets from the vault and * unwrap keys. * * @member {boolean} [enabledForTemplateDeployment] Property to specify * whether Azure Resource Manager is permitted to retrieve secrets from the * key vault. * */ export interface VaultProperties { vaultUri?: string; tenantId: string; sku: Sku; accessPolicies: AccessPolicyEntry[]; enabledForDeployment?: boolean; enabledForDiskEncryption?: boolean; enabledForTemplateDeployment?: boolean; } /** * @class * Initializes a new instance of the VaultCreateOrUpdateParameters class. * @constructor * Parameters for creating or updating a vault * * @member {string} location The supported Azure location where the key vault * should be created. * * @member {object} [tags] The tags that will be assigned to the key vault. * * @member {object} properties Properties of the vault * * @member {string} [properties.vaultUri] The URI of the vault for performing * operations on keys and secrets. * * @member {uuid} [properties.tenantId] The Azure Active Directory tenant ID * that should be used for authenticating requests to the key vault. * * @member {object} [properties.sku] SKU details * * @member {string} [properties.sku.name] SKU name to specify whether the key * vault is a standard vault or a premium vault. Possible values include: * 'standard', 'premium' * * @member {array} [properties.accessPolicies] An array of 0 to 16 identities * that have access to the key vault. All identities in the array must use * the same tenant ID as the key vault's tenant ID. * * @member {boolean} [properties.enabledForDeployment] Property to specify * whether Azure Virtual Machines are permitted to retrieve certificates * stored as secrets from the key vault. * * @member {boolean} [properties.enabledForDiskEncryption] Property to specify * whether Azure Disk Encryption is permitted to retrieve secrets from the * vault and unwrap keys. * * @member {boolean} [properties.enabledForTemplateDeployment] Property to * specify whether Azure Resource Manager is permitted to retrieve secrets * from the key vault. * */ export interface VaultCreateOrUpdateParameters extends BaseResource { location: string; tags?: { [propertyName: string]: string }; properties: VaultProperties; } /** * @class * Initializes a new instance of the Resource class. * @constructor * Key Vault resource * * @member {string} [id] The Azure Resource Manager resource ID for the key * vault. * * @member {string} name The name of the key vault. * * @member {string} [type] The resource type of the key vault. * * @member {string} location The supported Azure location where the key vault * should be created. * * @member {object} [tags] The tags that will be assigned to the key vault. * */ export interface Resource extends BaseResource { id?: string; name: string; type?: string; location: string; tags?: { [propertyName: string]: string }; } /** * @class * Initializes a new instance of the Vault class. * @constructor * Resource information with extended details. * * @member {object} properties Properties of the vault * * @member {string} [properties.vaultUri] The URI of the vault for performing * operations on keys and secrets. * * @member {uuid} [properties.tenantId] The Azure Active Directory tenant ID * that should be used for authenticating requests to the key vault. * * @member {object} [properties.sku] SKU details * * @member {string} [properties.sku.name] SKU name to specify whether the key * vault is a standard vault or a premium vault. Possible values include: * 'standard', 'premium' * * @member {array} [properties.accessPolicies] An array of 0 to 16 identities * that have access to the key vault. All identities in the array must use * the same tenant ID as the key vault's tenant ID. * * @member {boolean} [properties.enabledForDeployment] Property to specify * whether Azure Virtual Machines are permitted to retrieve certificates * stored as secrets from the key vault. * * @member {boolean} [properties.enabledForDiskEncryption] Property to specify * whether Azure Disk Encryption is permitted to retrieve secrets from the * vault and unwrap keys. * * @member {boolean} [properties.enabledForTemplateDeployment] Property to * specify whether Azure Resource Manager is permitted to retrieve secrets * from the key vault. * */ export interface Vault extends Resource { properties: VaultProperties; }
class ArticleCategory { String name; ArticleCategory({required this.name}); }
import {ICache} from './ICache'; export class LRUMemCache<T> implements ICache<T> { list: { key: string, value: T }[] = []; hash: { [key: string]: T } = {}; constructor(private size: number) { } get(key: string): Promise<T> { if (this.hash[key]) { const index = this.list.findIndex(i => i.key === key); const item = this.list.splice(index, 1)[0]; this.list.unshift(item); } return Promise.resolve(this.hash[key]); } set(key: string, value: T): Promise<void> { if (this.list.length >= this.size) { delete this.hash[this.list.pop().key]; } const item = {key: key, value: value}; this.list.unshift(item); this.hash[key] = value; return Promise.resolve(); } }
<?php namespace Traits; Trait Errors{ public function error($status){ if($status === 404){ $this->errorFormat($status , 'Not Found'); }elseif ($status === 403){ $this->errorFormat($status ,'Forbidden'); }elseif ($status === 401){ $this->errorFormat($status ,'Unauthorized'); }elseif ($status === 400){ $this->errorFormat($status ,'Bad Request'); }elseif ($status === 408){ $this->errorFormat($status ,'Request Timeout'); }elseif ($status === 501){ $this->errorFormat($status ,'Not Implemented'); }elseif ($status === 502){ $this->errorFormat($status ,'Bad Gateway'); }elseif ($status === 503){ $this->errorFormat($status ,'Service Unavailable'); } } private function errorFormat($status , $message){ echo '<p style="text-align: center; margin-top: 350px; font-size: larger">'.$status.'<br>'.$message.'</p>'; } }
#!/bin/bash # Based on # https://github.com/docker-32bit/debian/blob/i386/build-image.sh # and # https://github.com/docker/docker/blob/master/contrib/mkimage.sh # Other resources: # https://l3net.wordpress.com/2013/09/21/how-to-build-a-debian-livecd/ # https://www.opengeeks.me/2015/04/build-your-hybrid-debian-distro-with-xorriso/ # https://www.reversengineered.com/2014/05/17/building-and-booting-debian-live-over-the-network/ if [ "$(id -u)" != "0" ]; then echo "This script must be run as root" 1>&2 exit 1 fi T_START=$(date +'%s') # Make functions in the files below available for use . chroot/chroot_functions.sh . chroot/image_functions.sh OWNER=$1 DISTRO=$2 SUITE=$3 TGZ1=$4 TGZ2=$5 UNAME=$6 # Settings ARCH=i386 DIR_CHROOT="/var/chroot/$SUITE/min" APT_MIRROR='http://httpredir.debian.org/debian' DOCKER_IMAGE="$OWNER/32bit-$DISTRO-$SUITE-min" echo '-----------------' echo 'Build parameters:' echo "Architecture: $ARCH" echo "Suite: $SUITE" echo "Chroot directory: $DIR_CHROOT" echo "Apt-get mirror: $APT_MIRROR" echo "Docker image: $DOCKER_IMAGE" echo '---------------------------' # CHROOT OPERATIONS create_debian $OWNER $SUITE $DIR_CHROOT DIR_ROOT=$(dirname $PWD) DIR_USR_LOCAL_BIN=$DIR_ROOT/usr_local_bin cp_user_local_bin () { SCRIPT_TO_COPY=$1 DIR_ROOT=$(dirname $PWD) cp $DIR_ROOT/min/usr_local_bin/* $DIR_CHROOT/usr/local/bin chmod a+x $DIR_CHROOT/usr/local/bin/$SCRIPT_TO_COPY } cp_user_local_bin 'aptget' cp_user_local_bin 'finalize-root' cp_user_local_bin 'finalize-user' cp_user_local_bin 'min-root' cp_user_local_bin 'min-user' cp_user_local_bin 'check-min' exec_chroot $DIR_CHROOT /usr/local/bin/min-root T_END=$(date +'%s') T_ELAPSED=$(($T_END-$T_START)) echo '-------------' echo 'Time elapsed:' echo "$(($T_ELAPSED / 60)) minutes and $(($T_ELAPSED % 60)) seconds" # CHROOT -> TGZ TGZ_SHORT=$TGZ1 TGZ_LONG=$TGZ2 create_tgz $TGZ_LONG $DIR_CHROOT rm $TGZ_SHORT cp $TGZ_LONG $TGZ_SHORT # OUTPUT FILES: change ownership to user chown $UNAME:users $TGZ_SHORT chown $UNAME:users $TGZ_LONG chown $UNAME:users $TGZ_LONG.md5sum T_END=$(date +'%s') T_ELAPSED=$(($T_END-$T_START)) echo '-------------' echo 'Time elapsed:' echo "$(($T_ELAPSED / 60)) minutes and $(($T_ELAPSED % 60)) seconds" # TGZ -> IMAGE import_local_image $TGZ_LONG $DOCKER_IMAGE T_END=$(date +'%s') T_ELAPSED=$(($T_END-$T_START)) echo '-------------' echo 'Time elapsed:' echo "$(($T_ELAPSED / 60)) minutes and $(($T_ELAPSED % 60)) seconds"
using System.Collections; using System.Collections.Generic; using UnityEngine; namespace viva{ public class itemSphereClothInteraction : MonoBehaviour { public Cloth cloth; [Range(1,4)] [SerializeField] private int maxColliders = 3; [SerializeField] private float minimumRadius = 0.04f; private Set<SphereCollider> sphereColliders = new Set<SphereCollider>(); private Set<CapsuleCollider> capsuleColliders = new Set<CapsuleCollider>(); private void OnTriggerEnter( Collider collider ){ var newSphere = collider.GetComponentInChildren<SphereCollider>(); if( newSphere && !newSphere.isTrigger && newSphere.radius > 0.04f && sphereColliders.Count < maxColliders ){ sphereColliders.Add( newSphere ); UpdateSphereArray(); } var newCapsule = collider.GetComponentInChildren<CapsuleCollider>(); if( newCapsule && capsuleColliders.Count < maxColliders ){ capsuleColliders.Add( newCapsule ); UpdateCapsuleArray(); } } private void OnTriggerExit( Collider collider ){ var newSphere = collider.GetComponentInChildren<SphereCollider>(); if( newSphere && !newSphere.isTrigger ){ sphereColliders.Remove( newSphere ); UpdateSphereArray(); } var newCapsule = collider.GetComponentInChildren<CapsuleCollider>(); if( newCapsule ){ capsuleColliders.Remove( newCapsule ); UpdateCapsuleArray(); } } private void UpdateSphereArray(){ var list = new List<ClothSphereColliderPair>(); foreach( var sphere in sphereColliders.objects ){ list.Add( new ClothSphereColliderPair( sphere ) ); } cloth.sphereColliders = list.ToArray(); } private void UpdateCapsuleArray(){ for( int i=capsuleColliders.Count; i-->0; ){ if( capsuleColliders.objects[i] == null ){ capsuleColliders.objects.RemoveAt(i); } } cloth.capsuleColliders = capsuleColliders.objects.ToArray(); } } }
#ifndef __DHT11_H__ #define __DHT11_H__ #include "stm32f10x_gpio.h" typedef struct { GPIO_TypeDef*DATA_GPIO; uint16_t DATA_Pin; }DHT11; typedef struct { uint8_t HumidityInteger; uint8_t HumidityDecimal; uint8_t TemperatureInteger; uint8_t TemperatureDecimal; uint8_t Check; }DHT11_Data; void DHT11_Init(DHT11*dht11); void DHT11_Get(DHT11*dht11,DHT11_Data*data); #endif
import 'package:flutter/cupertino.dart'; import 'package:flutter/material.dart'; import 'package:flutter/scheduler.dart'; import 'package:flutter_news_app/EventsTabs.dart'; import 'package:flutter_news_app/NewsTabs.dart'; import 'package:flutter_news_app/PodcastPage.dart'; import 'package:flutter_news_app/page_view.dart'; import 'package:flutter_news_app/util.dart'; class PodcastTabs extends StatefulWidget { @override PodcastPageState createState() => new PodcastPageState(); } class PodcastPageState extends State<PodcastTabs> { Util newUtil = new Util(); static String _podCastApi; String _urlStringPodCast = "http://api.digitalpodcast.com/v2r/search/?format=json&appid="; String _keyword = "&keywords="; String _search; int _currentIndex = 3; @override void initState() { super.initState(); _podCastApi = newUtil.podCastApi; } @override Widget build(BuildContext context) { timeDilation = 1.0; return MaterialApp( debugShowCheckedModeBanner: false, home: DefaultTabController( length: 13, child: Scaffold( appBar: AppBar( backgroundColor: Color.fromRGBO(128, 0, 128, 50.0), leading: new IconButton( icon: new Icon(Icons.arrow_back), onPressed: () { Navigator.pop(context, true); }), bottom: new TabBar( isScrollable: true, indicatorColor: Color.fromRGBO(128, 0, 128, 50.0), tabs: <Widget>[ new Tab(text: "Music"), new Tab(text: "Business"), new Tab(text: "Educational"), new Tab(text: "Comedy"), new Tab(text: "News & Politics",), new Tab(text: "Science & Medicine"), new Tab(text: "Sports"), new Tab(text: "Technology & Gadgets",), new Tab(text: "Television",), new Tab(text: "Film & Entertainment",), new Tab(text: "Charity & Causes",), new Tab(text: "Religion & Spirituality",), new Tab(text: "Arts",) ], labelStyle: TextStyle( fontSize: 20.0, fontFamily: 'RobotoMono',), ), title: Text("Podcast", style: new TextStyle( fontWeight: FontWeight.bold, fontFamily: 'Raleway', fontSize: 22.0, color: Colors.white, ),)), body: TabBarView( children: [ new HomePage( url: _urlStringPodCast + _podCastApi + _keyword + "music"), new HomePage( url: _urlStringPodCast + _podCastApi + _keyword + "business"), new HomePage( url: _urlStringPodCast + _podCastApi + _keyword + "educational"), new HomePage( url: _urlStringPodCast + _podCastApi + _keyword + "comedy"), new HomePage( url: _urlStringPodCast + _podCastApi + _keyword + "news & politics"), new HomePage( url: _urlStringPodCast + _podCastApi + _keyword + "science & medicine"), new HomePage( url: _urlStringPodCast + _podCastApi + _keyword + "sports"), new HomePage( url: _urlStringPodCast + _podCastApi + _keyword + "technology & gadgets"), new HomePage( url: _urlStringPodCast + _podCastApi + _keyword + "television"), new HomePage( url: _urlStringPodCast + _podCastApi + _keyword + "film & entertainment"), new HomePage( url: _urlStringPodCast + _podCastApi + _keyword + "charity & causes"), new HomePage( url: _urlStringPodCast + _podCastApi + _keyword + "religion & spirituality"), new HomePage( url: _urlStringPodCast + _podCastApi + _keyword + "arts") ], ), bottomNavigationBar: BottomNavigationBar( currentIndex: _currentIndex, onTap: (newIndex) => setState(() { _currentIndex = newIndex; switch (_currentIndex) { case 0: print("In the intropage"); Navigator.push( context, MaterialPageRoute( builder: (context) => IntroPageView()), ); break; case 1: print("In the newstabs"); Navigator.of(context, rootNavigator: true).push( new CupertinoPageRoute<bool>( fullscreenDialog: false, builder: (BuildContext context) => new NewsTabs(country: 'us',)), ); break; case 2: print("In the eventstabs"); Navigator.of(context, rootNavigator: true).push( new CupertinoPageRoute<bool>( fullscreenDialog: false, builder: ( BuildContext context) => new EventsTabs()), ); break; } print(_currentIndex); }), items: [ BottomNavigationBarItem( icon: new Icon(Icons.home), title: new Text('Home'), backgroundColor: Color.fromRGBO(128, 0, 128, 50.0) ), BottomNavigationBarItem( icon: new Icon(Icons.book), title: new Text('News'), backgroundColor: Color.fromRGBO(128, 0, 128, 50.0) ), BottomNavigationBarItem( icon: new Icon(Icons.event), title: new Text('Events'), backgroundColor: Color.fromRGBO(128, 0, 128, 50.0) ), BottomNavigationBarItem( icon: Icon(Icons.headset), title: Text('Podcast'), backgroundColor: Color.fromRGBO(128, 0, 128, 50.0) ), ], ), ), ) ); } } String searchKeyword(String searchCode) { String _searchCode; switch (searchCode) { case 'Music': _searchCode = "music"; break; case 'Business': _searchCode = "business"; break; case 'Educational': _searchCode = "educational"; break; case 'Comedy': _searchCode = "comedy"; break; case 'News & Politics': _searchCode = "news & politics"; break; case 'Science & Medicine': _searchCode = "science & medicine"; break; case 'Sports': _searchCode = "sports"; break; case 'Technology & Gadgets': _searchCode = "technology & gadgets"; break; case 'Television': _searchCode = "television"; break; case 'Film & Entertainment': _searchCode = "film & entertainment"; break; case 'Charity & Causes': _searchCode = "charity & causes"; break; case 'Religion & Spirituality': _searchCode = "religion & spirituality"; break; case 'Arts': _searchCode = "arts"; break; } return _searchCode; }
// // RunsNetworkMonitor.h // OU_iPad // // Created by runs on 2017/10/12. // Copyright © 2017年 Olacio. All rights reserved. // #import <Foundation/Foundation.h> #import "Reachability.h" FOUNDATION_EXTERN NSString * const RunsNetworkMonitorDidChangeMessage; //object NSNumber(NetworkStatus) typedef void(^RunsNetworkChangeCallback)(NetworkStatus status); @interface RunsNetworkMonitor : NSObject + (BOOL)isReachable; + (BOOL)isReachableViaWWAN; + (BOOL)isReachableViaWiFi; + (BOOL)NetworkIsReachableWithShowTips:(BOOL)isShow; + (void)NetWorkMonitorWithReachableBlock:(NetworkReachable)reachable unreachableBlock:(NetworkUnreachable)unreachable; @end
import { all, fork, call, delay, takeLatest, put, actionChannel, throttle, } from 'redux-saga/effects'; import { http } from './httpHelper'; import { actionTypes } from '../reducers/actionTypes'; import { Dictionary } from '../typings/Dictionary'; import { AxiosResponse } from 'axios'; import { JsonResult, ListResult, PostModel, CategoryModel, TagModel, ImageModel, } from '../typings/dto'; import { BaseAction } from '../typings/BaseAction'; function loadMyPostsApi(query) { const { page, limit, keyword } = query; return http().get( `/me/posts?page=${page}&limit=${limit}&keyword=${encodeURIComponent( keyword, )}`, ); } function* loadMyPosts(action: BaseAction) { try { const { page, limit, keyword } = action.data; const result = yield call(loadMyPostsApi, { page: page || '1', limit: limit || 10, keyword: keyword, }); const resultData = result.data as JsonResult<ListResult<PostModel>>; const { success, data, message } = resultData; if (!success) { throw new Error(message); } yield put<BaseAction>({ type: actionTypes.LOAD_MY_POSTS_DONE, data: { ...data, page: page || 1, }, }); } catch (e) { // console.error(e); yield put<BaseAction>({ type: actionTypes.LOAD_MY_POSTS_FAIL, error: e, message: e.message, }); } } function* watchLoadMyPosts() { yield takeLatest(actionTypes.LOAD_MY_POSTS_CALL, loadMyPosts); } function writePostApi(formData) { return http().post('/me/post', formData); } function* writePost(action) { try { const result = yield call(writePostApi, action.data); const resultData = result.data as JsonResult<PostModel>; const { success, data, message } = resultData; if (success) { yield put<BaseAction>({ type: actionTypes.WRITE_POST_DONE, data: data, }); } else { yield put<BaseAction>({ type: actionTypes.WRITE_POST_FAIL, error: new Error(message), message: message, }); } } catch (e) { yield put<BaseAction>({ type: actionTypes.WRITE_POST_FAIL, error: e, message: e.message, }); } } function* watchWritePost() { yield takeLatest(actionTypes.WRITE_POST_CALL, writePost); } function loadCategoriesApi(query) { const { limit, keyword, page } = query; return http().get( `/me/categories?page=${page}&limit=${limit}&keyword=${encodeURIComponent( keyword, )}`, ); } function* loadCategories(action: BaseAction) { try { const { limit, keyword, page } = action.data; // console.debug('[DEBUG]: category ==> ', action.data); const result: AxiosResponse< JsonResult<ListResult<CategoryModel>> > = yield call(loadCategoriesApi, { page: page || 1, limit: limit || 10, keyword: keyword || '', }); const { success, data, message } = result.data; // console.debug('[DEBUG]: categories ==> ', data); if (!success) { throw new Error(message); } yield put<BaseAction>({ type: actionTypes.LOAD_MY_CATEGORIES_DONE, data: { ...data, page: page || 1, }, }); } catch (e) { console.error(e); yield put<BaseAction>({ type: actionTypes.LOAD_MY_CATEGORIES_FAIL, error: e, message: e.message, }); } } function* watchLoadCategories() { yield takeLatest(actionTypes.LOAD_MY_CATEGORIES_CALL, loadCategories); } function loadTagsApi() { return http().get('/me/tags'); } function* loadTags(action) { try { const result: AxiosResponse< JsonResult<ListResult<TagModel>> > = yield call(loadTagsApi); // const resultData = result.data as IJsonResult<IListResult<ITagModel>>; const { success, data, message } = result.data; if (success) { yield put<BaseAction>({ type: actionTypes.LOAD_MY_TAGS_DONE, data: data, }); } else { yield put<BaseAction>({ type: actionTypes.LOAD_MY_TAGS_FAIL, error: new Error(message), message: message, }); } } catch (e) { yield put<BaseAction>({ type: actionTypes.LOAD_MY_TAGS_FAIL, error: e, message: e.message, }); } } function* watchLoadTags() { yield takeLatest(actionTypes.LOAD_MY_TAGS_CALL, loadTags); } function editPostApi(id, data) { return http().patch(`/me/post/${id}`, data); } function* editPost(action) { try { const result = yield call(editPostApi, action.id, action.data); const resultData = result.data as JsonResult<PostModel>; const { success, data, message } = resultData; if (success) { yield put<BaseAction>({ type: actionTypes.EDIT_POST_DONE, data: data, }); } else { yield put<BaseAction>({ type: actionTypes.EDIT_POST_FAIL, error: new Error(message), message: message, }); } } catch (e) { yield put<BaseAction>({ type: actionTypes.EDIT_POST_FAIL, error: e, message: e.message, }); } } function* watchEditPost() { yield takeLatest(actionTypes.EDIT_POST_CALL, editPost); } /** * 글을 삭제합니다. * * @param {number} id 글 식별자 Post.Id * */ function deletePostApi(id) { return http().delete(`/me/post/${id}`); } function* deletePost(action) { try { const result = yield call(deletePostApi, action.data); const resultData = result.data as JsonResult<number>; const { success, data, message } = resultData; if (success) { yield put<BaseAction>({ type: actionTypes.DELETE_POST_DONE, data: { id: data }, }); } else { yield put<BaseAction>({ type: actionTypes.DELETE_POST_FAIL, error: new Error(message), message: message, }); } } catch (e) { // console.error(e); yield put<BaseAction>({ type: actionTypes.DELETE_POST_FAIL, error: e, message: e.message, }); } } function* watchDeletePost() { yield takeLatest(actionTypes.DELETE_POST_CALL, deletePost); } function loadMyPostApi(query) { const { id } = query; return http().get(`/me/post/${id}`); } function* loadMyPost(action) { try { const { id } = action.data; const result: AxiosResponse<JsonResult<PostModel>> = yield call( loadMyPostApi, { id }, ); const { success, data, message } = result.data; if (!success) { yield put<BaseAction>({ type: actionTypes.LOAD_MY_POST_FAIL, error: new Error(message), message: message, }); } yield put<BaseAction>({ type: actionTypes.LOAD_MY_POST_DONE, data: { post: data, }, }); } catch (e) { // console.error(e); yield put<BaseAction>({ type: actionTypes.LOAD_MY_POST_FAIL, error: e, message: e.message, }); } } function* watchLoadMyPost() { yield takeLatest(actionTypes.LOAD_MY_POST_CALL, loadMyPost); } function* writeNewPost(action) { try { yield put<BaseAction>({ type: actionTypes.WRITE_NEW_POST_DONE, }); } catch (e) { yield put<BaseAction>({ type: actionTypes.WRITE_NEW_POST_FAIL, error: e, }); } } function* watchWriteNewPost() { yield takeLatest(actionTypes.WRITE_NEW_POST_CALL, writeNewPost); } function uploadMyMediaFilesApi(data) { return http().post('/me/media', data); } function* uploadMyMediaFiles(action) { try { // console.log('==========> form data:', action.data); const result = yield call(uploadMyMediaFilesApi, action.data); const resultData = result.data as JsonResult<ListResult<ImageModel>>; const { success, data, message } = resultData; if (success) { yield put<BaseAction>({ type: actionTypes.UPLOAD_MY_MEDIA_FILES_DONE, data: { ...data, }, }); } else { yield put<BaseAction>({ type: actionTypes.UPLOAD_MY_MEDIA_FILES_FAIL, error: new Error(message), message: message, }); } } catch (e) { // console.error(e); yield put<BaseAction>({ type: actionTypes.UPLOAD_MY_MEDIA_FILES_FAIL, error: e, message: e.message, }); } } function* watchUploadMyMediaFiles() { yield takeLatest( actionTypes.UPLOAD_MY_MEDIA_FILES_CALL, uploadMyMediaFiles, ); } function loadMediaFilesApi(query) { const { page, limit, keyword } = query; return http().get( `/me/media/?page=${page}&limit=${limit}&keyword=${encodeURIComponent( keyword, )}`, ); } function* loadMediaFiles(action) { try { const { page, limit, keyword } = action.data; const result = yield call(loadMediaFilesApi, { page: page || 1, limit: limit || 10, keyword: keyword || '', }); const resultData = result.data as JsonResult<ListResult<ImageModel>>; const { success, data, message } = resultData; if (success) { yield put<BaseAction>({ type: actionTypes.LOAD_MY_MEDIA_FILES_DONE, data: { ...data, page: page || 1, }, }); } else { yield put<BaseAction>({ type: actionTypes.LOAD_MY_MEDIA_FILES_FAIL, error: new Error(message), message: message, }); } } catch (e) { console.error(e); yield put<BaseAction>({ type: actionTypes.LOAD_MY_MEDIA_FILES_FAIL, error: e, message: e.message, }); } } function* watchLoadMediaFiles() { yield takeLatest(actionTypes.LOAD_MY_MEDIA_FILES_CALL, loadMediaFiles); } function deleteMediaFileApi(id) { return http().delete(`/me/media/${id}`); } function* deleteMediaFile(action) { try { const { id } = action.data; const result: AxiosResponse<JsonResult<number>> = yield call( deleteMediaFileApi, id, ); const { success, data, message } = result.data; if (success) { yield put<BaseAction>({ type: actionTypes.DELETE_MY_MEDIA_FILES_DONE, data: { id: data, }, }); } else { yield put<BaseAction>({ type: actionTypes.DELETE_MY_MEDIA_FILES_FAIL, error: new Error(message), message: message, }); } } catch (e) { console.error(e); yield put<BaseAction>({ type: actionTypes.DELETE_MY_MEDIA_FILES_FAIL, error: e, message: e.message, }); } } function* watchDeleteMediaFile() { yield takeLatest(actionTypes.DELETE_MY_MEDIA_FILES_CALL, deleteMediaFile); } function editCategoryApi(formData) { if (!!formData.id) { return http().patch(`/me/category/${formData.id}`, formData); } else { return http().post('/me/category', formData); } } function* editCategory(action) { try { const result = yield call(editCategoryApi, action.data); const resultData = result.data as JsonResult<CategoryModel>; const { success, data, message } = resultData; if (success) { yield put<BaseAction>({ type: actionTypes.EDIT_MY_CATEGORY_DONE, data: { category: data, }, }); } else { yield put<BaseAction>({ type: actionTypes.EDIT_MY_CATEGORY_FAIL, error: new Error(message), message: message, }); } } catch (e) { // console.error(e); yield put<BaseAction>({ type: actionTypes.EDIT_MY_CATEGORY_FAIL, error: e, message: e.message, }); } } function* wacthEditCategory() { yield takeLatest(actionTypes.EDIT_MY_CATEGORY_CALL, editCategory); } function deleteCategoryApi(id) { return http().delete(`/me/category/${id}`); } function* deleteCategory(action) { try { const { id } = action.data; const result: AxiosResponse<JsonResult<number>> = yield call( deleteCategoryApi, id, ); const { success, data, message } = result.data; if (!success) { throw new Error(message); } yield put<BaseAction>({ type: actionTypes.DELETE_MY_CATEGORY_DONE, data: { id: data, }, }); } catch (e) { // console.error(e); yield put<BaseAction>({ type: actionTypes.DELETE_MY_CATEGORY_FAIL, error: e, message: e.message, }); } } function* watchDeleteCategory() { yield takeLatest(actionTypes.DELETE_MY_CATEGORY_CALL, deleteCategory); } function loadLikedPostsApi(query) { const { limit, keyword, page } = query; return http().get( `/me/liked?&page=${page}&limit=${limit}&keyword=${encodeURIComponent( keyword, )}`, ); } function* loadLikedPosts(action) { try { const { limit, keyword, page } = action.data; const result = yield call(loadLikedPostsApi, { page: page || 1, limit: limit || 10, keyword: keyword || '', }); const resultData = result.data as JsonResult<ListResult<PostModel>>; const { success, data, message } = resultData; if (success) { yield put<BaseAction>({ type: actionTypes.LOAD_LIKED_POSTS_DONE, data: { ...data, keyword: keyword, page: page || 1, }, }); } else { yield put<BaseAction>({ type: actionTypes.LOAD_LIKED_POSTS_FAIL, error: new Error(message), message: message, }); } } catch (e) { // console.error(e); yield put<BaseAction>({ type: actionTypes.LOAD_LIKED_POSTS_FAIL, error: e, message: e.message, }); } } function* watchLoadLikedPosts() { yield takeLatest(actionTypes.LOAD_LIKED_POSTS_CALL, loadLikedPosts); } function loadStatGeneralApi(query) { return http().get('/me/stat/general'); } function* loadStatGeneral(action) { try { const result = yield call(loadStatGeneralApi, action.data); const resultData = result.data as JsonResult<Dictionary<any>>; const { success, data, message } = resultData; if (success) { yield put<BaseAction>({ type: actionTypes.LOAD_STAT_GENERAL_DONE, data: data, }); } else { yield put<BaseAction>({ type: actionTypes.LOAD_STAT_GENERAL_FAIL, error: new Error(message), message: message, }); } } catch (e) { // console.error(e); yield put<BaseAction>({ type: actionTypes.LOAD_STAT_GENERAL_FAIL, error: e, message: e.message, }); } } function* watchLoadStatGeneral() { yield takeLatest(actionTypes.LOAD_STAT_GENERAL_CALL, loadStatGeneral); } function loadStatReadApi(query) { return http().get('/me/stat/postread'); } function* loadStatRead(action) { try { const result = yield call(loadStatReadApi, action.data); const resultData = result.data as JsonResult<Dictionary<any>>; const { success, data, message } = resultData; if (success) { yield put<BaseAction>({ type: actionTypes.LOAD_STAT_READ_DONE, data: data, }); } else { yield put<BaseAction>({ type: actionTypes.LOAD_STAT_READ_FAIL, error: new Error(message), message: message, }); } } catch (e) { // console.error(e); yield put<BaseAction>({ type: actionTypes.LOAD_STAT_READ_FAIL, error: e, message: e.message, }); } } function* watchLoadStatRead() { yield takeLatest(actionTypes.LOAD_STAT_READ_CALL, loadStatRead); } export default function* postSaga() { yield all([ fork(watchLoadMyPosts), fork(watchLoadMyPost), fork(watchWritePost), fork(watchEditPost), fork(watchDeletePost), fork(watchLoadCategories), fork(watchLoadTags), fork(watchWriteNewPost), fork(watchUploadMyMediaFiles), fork(watchLoadMediaFiles), fork(watchDeleteMediaFile), fork(wacthEditCategory), fork(watchDeleteCategory), fork(watchLoadLikedPosts), fork(watchLoadStatGeneral), fork(watchLoadStatRead), ]); }
# Omnipay: Instamojo **[Instamojo](https://www.instamojo.com/) driver for the Omnipay PHP payment processing library** [Omnipay](https://github.com/thephpleague/omnipay) is a framework agnostic, multi-gateway payment processing library for PHP 5.3+. This package implements [Instamojo Payments API v1.1](https://docs.instamojo.com/docs/payments-api). ## Installation Omnipay is installed via [Composer](http://getcomposer.org/). To install, simply run: ``` composer require gentor/omnipay-instamojo ``` ## Purchase ```php use Omnipay\Omnipay; // Setup payment gateway $gateway = Omnipay::create('Instamojo'); $gateway->setApiKey('abc123'); $gateway->setAuthToken('abc123'); // Send purchase request $response = $gateway->purchase( [ 'amount' => '10.00', 'purpose' => 'Instamojo Payment' ] )->send(); // Process response if ($response->isSuccessful() && $response->isRedirect()) { // Redirect to offsite payment gateway // print_r($response->getData()); // echo $response->getTransactionStatus(); $response->redirect(); } else { // Request failed echo $response->getMessage(); } ``` ## Complete Purchase ```php // Send complete purchase request $response = $gateway->completePurchase( [ 'transactionReference' => $_GET['payment_id'], ] )->send(); // Process response if ($response->isSuccessful()) { // Request was successful print_r($response->getData()); echo $response->getTransactionStatus(); } else { // Request failed echo $response->getMessage(); } ``` ## Refund ```php // Send refund request $response = $gateway->refund( [ 'transactionReference' => $payment_id, ] )->send(); // Process response if ($response->isSuccessful()) { // Request was successful print_r($response->getData()); echo $response->getTransactionStatus(); } else { // Request failed echo $response->getMessage(); } ``` ## Fetch Payment Request ```php // Send fetch payment request $response = $gateway->fetchPaymentRequest( [ 'transactionReference' => $payment_request_id, ] )->send(); // Process response if ($response->isSuccessful()) { // Request was successful print_r($response->getData()); echo $response->getTransactionStatus(); } else { // Request failed echo $response->getMessage(); } ``` ## Webhook ```php use Omnipay\Omnipay; // Setup payment gateway $gateway = Omnipay::create('Instamojo'); $gateway->setSalt('abc123'); // Payment notification request $response = $gateway->acceptNotification()->send(); // Process response if ($response->isSuccessful()) { // Request was successful print_r($response->getData()); echo $response->getTransactionReference(); echo $response->getTransactionStatus(); } else { // Request failed echo $response->getMessage(); } ``` ## [Instamojo API v1.1 Documentation](https://docs.instamojo.com/docs/payments-api)
using Microsoft.Extensions.Configuration; namespace Kubernetes.Configuration.Extensions.Configmap { public class ConfigmapConfigurationSource : IConfigurationSource { public string? Namespace { get; set; } public string? LabelSelector { get; set; } public string? Separator { get; set; } public bool ReloadOnChange { get; set; } public IConfigurationProvider Build(IConfigurationBuilder builder) { return new ConfigmapConfigurationProvider(Namespace, LabelSelector, Separator, ReloadOnChange); } } }
# frozen_string_literal: true class User < ApplicationRecord has_many :authentication_tokens, dependent: :destroy rolify before_add: :before_add_role, strict: true validates :email, presence: true validates :email, uniqueness: true, allow_blank: true devise :trackable, :token_authenticatable, :omniauthable, omniauth_providers: [:google_oauth2] def role_level_in(organization) levels = roles.global.map(&:level) levels << local_role_level_in(organization) levels.max end def role_in(organization) # Returns only an explicit role in the passed organization, not including global roles roles.find_by resource_id: organization.id end def administrator?(organization = nil) is_admin_of?(organization) || global_administrator? end def global_role? roles.global.present? end def global_role roles.global.first end def organizations return Organization.all if global_role? membership_organizations end def global_administrator? is_global_admin? || is_super_admin? end def membership_organizations # All organizations in which this user has an explicit role, not including global roles Organization.where(id: roles.pluck(:resource_id)) end def member_of?(organization) roles.pluck(:resource_id).include?(organization.id) end def read_only? (roles.pluck(:name).map(&:to_sym) - Role::READ_ONLY_ROLES).empty? end private def before_add_role(role) raise ActiveRecord::Rollback if Role::LOCAL_ROLES[role.symbol].nil? && Role::GLOBAL_ROLES[role.symbol].nil? raise ActiveRecord::Rollback if roles.pluck(:resource_id).include?(role.resource_id) end def local_role_level_in(organization) # Role level in explicit organization, excluding global roles role = role_in organization return Role::MINIMAL_ROLE_LEVEL if role.nil? role.level end class << self def from_omniauth(auth) user = get_user_from_auth auth return update_user_from_omniauth user, auth if user return create_first_user auth if first_user? empty_user end def from_id_token(id_token) client = OAuth2::Client.new(Rails.configuration.google_client_id, Rails.configuration.google_client_secret) response = client.request(:get, Rails.configuration.google_token_info_url, params: { id_token: id_token }).parsed User.find_by(email: response['email']) end private def update_user_from_omniauth(user, auth) user.update auth_params auth user end def empty_user User.new end def create_first_user(auth) user = User.new auth_params auth user.save user.add_role :super_admin user end def get_user_from_auth(auth) User.find_by email: auth['info']['email'] end def first_user? User.count.zero? end def auth_params(auth) { uid: auth['uid'], name: auth['info']['name'], email: auth['info']['email'], provider: auth.provider, image: auth['info']['image'] } end end end
import React from 'react'; import classes from './Spinner.module.css'; const Spinner = (props) => { const style = { backgroundColor: `var(--${props.variant})`, }; return ( <div className={classes.Spinner}> <div className={classes.Bounce1} style={style}></div> <div className={classes.Bounce2} style={style}></div> </div> ); }; export default Spinner;
<?php /** * Created by PhpStorm. * User: KustovVA * Date: 25.06.2015 * Time: 18:40 */ /** @var \common\models\Store $store */ ?> <div class="info-panel f-right"> <span class="info-link" title="Info"></span> <div class="info-popup"> <div class="info-item font-edit-write">Add Note</div> <a href="#" class="info-item font-user">View Profile</a> <div class="info-item font-bar-chart">View Dashboard</div> <div class="info-item font-letter-mail">Email</div> <div class="info-item font-link-broken">Disconnect</div> </div> </div>
describe Coactive::Interface do context 'default' do let :interface_class do Variables::DefaultInterface end it 'sets default value' do interface = interface_class.new expect(interface.context.in).to eq('default value') end it 'sets default value by method' do interface = interface_class.new expect(interface.context.in_method).to eq('default value') end it 'sets default value by proc' do interface = interface_class.new expect(interface.context.in_proc).to eq('default value') end end end
## v0.1.6 * Further Opal 1.4 compatibility ## v0.1.5 * Opal 1.4 compatibility
/** Michał Wójcik 2021 */ /** * L-System zaimplementowany w języku javascript z wykorzystaniem * HTML5 Canvas i turtle-graphics-js [https://www.npmjs.com/package/turtle-graphics-js] * * Program przyjmuje parametry przez pola tekstowe na stronie * a następnie rysuje po wciśnięciu przycisku "rysuj" * * Składnia reguł: * Znak:Wartosc_do_zamiany;Znak:Wartosc_do_zamiany; * Dowolna liczba reguł * * Operacje: * - F - idź do przodu i rysuj * - f - idź do przodu (nie rysuj) * - + - obrót w prawo * - - - obrót w lewo * - [ - odłóż pozycję i rotację na stos * - ] - zdejmij pozycję i rotację ze stosu * - C - losuj nowy kolor * - L - zwiększ długość rysowanej linii */ var stack = []; var turtle = new Turtle(document.getElementById("canvas")); turtle.pen.color = "#000"; turtle.pen.width = 2; turtle.moveTo(400, 300); var initLoc = JSON.parse(JSON.stringify(turtle.loc)); /**Attributes */ var lineLength = 1; var rotation = 90; var axiom = ""; var rules = ""; var iterations = 5; /**DOM references */ var lengthInput = document.getElementById("length"); var rotationInput = document.getElementById("rotation"); var iterationsInput = document.getElementById("iterations"); var axiomInput = document.getElementById("axiom"); var rulesInput = document.getElementById("rules"); var drawButton = document.getElementById("drawButton"); var inter = null; drawButton.addEventListener("click", () => { clearAll(); drawLSystem(); }); /**Functions */ const drawLSystem = function () { getAttributes(); let ruleObjectsArray = interpretRules(rules); axiom = applyRules(axiom, ruleObjectsArray, iterations); var i = 0; inter = setInterval(() => { if (i > axiom.length) { clearInterval(inter); return; } drawSign(axiom.charAt(i)); i++; }, 6); }; const getAttributes = function () { lineLength = lengthInput.value !== undefined ? lengthInput.value : lineLength; rotation = rotationInput.value !== undefined ? rotationInput.value : rotaion; iterations = iterationsInput.value !== undefined ? iterationsInput.value : iterations; axiom = axiomInput.value.length > 0 ? axiomInput.value : axiom; rules = rulesInput.value.length > 0 ? rulesInput.value : rules; }; /**Rule structure: * { * sign: string; * value: string; * } */ const interpretRules = function (rules) { let ruleArray = rules.split(";"); let ruleObjectsArray = ruleArray.map((ruleString) => { let arr = ruleString.split(":"); return { sign: arr[0], value: arr[1] }; }); return ruleObjectsArray; }; const applyRules = function (axiom, ruleObjectsArray, iterations) { let result = axiom; for (let i = 0; i < iterations; i++) { ruleObjectsArray.forEach((rule) => { result = result.replaceAll(rule.sign, rule.value); }); } return result; }; const drawSign = function (sign) { switch (sign) { case "F": turtle.penDown(); case "f": turtle.forward(lineLength); turtle.penUp(); break; case "+": turtle.right(rotation); break; case "-": turtle.left(rotation); break; case "[": stack.push(JSON.parse(JSON.stringify(turtle.loc))); break; case "]": let loc = stack.pop(); turtle.loc = loc; break; case "C": turtle.pen.color = getRandomColor(); break; case "L": lineLength++; break; default: break; } return; }; const getRandomColor = function () { return `#${Math.floor(Math.random() * 16777215).toString(16)}`; }; function clearAll() { clearInterval(inter); stack = []; turtle.moveTo(400, 300); turtle.angle = 0; turtle.pen.color = "#000"; turtle.ctx.clearRect( 0, 0, canvas.width || canvas.style.width, canvas.height || canvas.style.height ); }
package output import ( "encoding/json" "time" "github.com/shopspring/decimal" ) type ReportInput struct { Metadata map[string]string Root Root } func Load(data []byte) (Root, error) { var out Root err := json.Unmarshal(data, &out) return out, err } func Combine(currency string, inputs []ReportInput, opts Options) Root { var combined Root var totalHourlyCost *decimal.Decimal var totalMonthlyCost *decimal.Decimal projects := make([]Project, 0) summaries := make([]*Summary, 0, len(inputs)) for _, input := range inputs { projects = append(projects, input.Root.Projects...) summaries = append(summaries, input.Root.Summary) if input.Root.TotalHourlyCost != nil { if totalHourlyCost == nil { totalHourlyCost = decimalPtr(decimal.Zero) } totalHourlyCost = decimalPtr(totalHourlyCost.Add(*input.Root.TotalHourlyCost)) } if input.Root.TotalMonthlyCost != nil { if totalMonthlyCost == nil { totalMonthlyCost = decimalPtr(decimal.Zero) } totalMonthlyCost = decimalPtr(totalMonthlyCost.Add(*input.Root.TotalMonthlyCost)) } } combined.Version = outputVersion combined.Currency = currency combined.Projects = projects combined.TotalHourlyCost = totalHourlyCost combined.TotalMonthlyCost = totalMonthlyCost combined.TimeGenerated = time.Now() combined.Summary = MergeSummaries(summaries) return combined }
import produce from 'immer'; import { categoriesActionTypes, categoryState, SELECT_CATEGORY, } from './types'; const INITIAL_STATE: categoryState = { category: '', }; export default function optionReducer ( state = INITIAL_STATE, action: categoriesActionTypes, ): categoryState { return produce(state, draft => { switch (action.type) { case SELECT_CATEGORY: { draft.category = action.payload.category break; } default: } }); };
#ifndef _IOTEX_ABI_READ_CONTRACT_H_ #define _IOTEX_ABI_READ_CONTRACT_H_ #include <stdint.h> #ifdef __cplusplus extern "C" { #endif uint64_t abi_get_order_start(const char *, size_t); uint32_t abi_get_order_duration(const char *, size_t); const char *abi_get_order_endpoint(const char *input, size_t); const char *abi_get_order_token(const char *input, size_t); #ifdef __cplusplus } #endif #endif /* _IOTEX_ABI_READ_CONTRACT_H_ */
<?php namespace App\Http\Controllers; use App\Models\request_status; use Illuminate\Support\Facades\DB; use Illuminate\Http\Request; class request_statusController extends Controller { public function index(){ $requestor = request_status::all(); return response()->json([ 'success' => true, 'message' => 'Data Request', 'data' => $requestor ], 200); } public function show_requests(Request $request){ $reciever = DB::table('request_status') ->where('reciever', $request->reciever) ->get(); if($reciever){ return response()->json([ 'success' => true, 'message' => 'Hasil penelusuran', 'data' => $reciever ], 200); } else{ return response()->json([ 'success' => false, 'message' => 'Gagal, tidak ada penelusuran', 'data' => $reciever ], 400); } } public function show_friends(Request $request){ $requestor = DB::table('request_status') ->where('requestor', $request->requestor) ->get(); if($requestor){ return response()->json([ 'success' => true, 'message' => 'Hasil penelusuran', 'data' => $requestor ], 200); } else{ return response()->json([ 'success' => false, 'message' => 'Gagal, tidak ada penelusuran', 'data' => $requestor ], 400); } } public function created(Request $request){ $cek = DB::table('request_status') ->where('requestor', $request->requestor) ->where('reciever', $request->reciever) // ->where('status','=', 'pending','and','status','=', 'accepted') ->count(); $requestor = new request_status; $requestor->requestor = $request->requestor; $requestor->reciever = $request->reciever; $requestor->status = $request->status; if($cek >= 1){ return response()->json([ 'success' => false, 'message' => 'Data Gagal Ditambahkan', 'data' => $requestor ], 400); } else{ $requestor->save(); return response()->json([ 'success' => true, 'message' => 'Data Berhasil Ditambahkan', 'data' => $requestor ], 200); } } public function update(Request $request, $reciever){ $cek = DB::table('request_status') ->where('requestor', $request->requestor) ->where('reciever', $reciever) ->where('status','=', 'pending') ->count(); if($cek >= 1){ $update = DB::table('request_status') ->select('request_status_id') ->where('requestor', $request->requestor) ->where('reciever', $reciever) ->update(['status' => $request->status]); return response()->json([ 'success' => true, 'message' => 'Data Berhasil Diubah' ], 200); } return response()->json([ 'success' => false, 'message' => 'Data Gagal Diubah' ], 400); } public function delete($id){ $requestor = request_status::find($id); $requestor->delete(); return response()->json([ 'success' => true, 'message' => 'Post Deleted', ], 200); } }
/// Provides data structures for storing component data. library component_data; import 'dart:async'; import 'dart:collection'; import 'package:observable/observable.dart'; import 'package:quiver/core.dart'; part 'src/component_data/linked_hash_map_store.dart'; /// Registers [ComponentTypesStores] for component types. /// /// Stores class TypeStoreRegistry { final Map<Type, ComponentTypeStore> _typesStores = {}; final ChangeNotifier<TypeStoreRegistryChangeRecord> _changeNotifier = new ChangeNotifier(); /// A synchronous stream of the changes made to this [TypeStoreRegistry]. /// /// A change is triggered when a [ComponentTypeStore] is added, removed or /// changed. Stream<List<TypeStoreRegistryChangeRecord>> get changes => _changeNotifier.changes; /// The [ComponentTypeStore]s registered with this [TypeStoreRegistry]. Iterable<ComponentTypeStore> get stores => _typesStores.values; /// The types for which [ComponentTypeStore]s are registered with this /// [TypeStoreRegistry]. Iterable<Type> get types => _typesStores.keys; /// Whether or not this [TypeStoreRegistry] contains a [ComponentTypeStore] /// for the [type]. bool hasStore(Type type) => _typesStores.containsKey(type); /// Returns the [ComponentTypeStore] registered for the [type] or `null` if /// no [ComponentTypeStore] is currently registered for the [type]. ComponentTypeStore<T> getStore<T>(Type type) => _typesStores[type] as ComponentTypeStore<T>; /// Registers the [store] for type [type]. /// /// If another [ComponentTypeStore] was already registered for the [type], /// then this other store is replaced with the [store]. void add<T>(Type type, ComponentTypeStore<T> store) { final oldStore = _typesStores[type] as ComponentTypeStore<T>; _typesStores[type] = store; if (oldStore == null) { _changeNotifier ..notifyChange(new TypeStoreRegistryChangeRecord<T>.insert(type, store)) ..deliverChanges(); } else { _changeNotifier ..notifyChange(new TypeStoreRegistryChangeRecord<T>(type, oldStore, store)) ..deliverChanges(); } } /// Removes the [ComponentTypeStore] associated with the [type] from this /// [TypeStoreRegistry]. ComponentTypeStore<T> remove<T>(Type type) { final store = _typesStores[type] as ComponentTypeStore<T>; if (store != null) { _typesStores.remove(type); _changeNotifier ..notifyChange(new TypeStoreRegistryChangeRecord<T>.remove(type, store)) ..deliverChanges(); return store; } else { return null; } } } /// Stores component values of type [T] and associates them with entity IDs. abstract class ComponentTypeStore<T> { /// Instantiates a new [ComponentTypeStore] using the default implementation, /// [LinkedHashMapStore]. factory ComponentTypeStore() = LinkedHashMapStore<T>; /// A synchronous stream of the changes made to this [ComponentTypeStore]. /// /// A change is triggered when a component value is added, when a component /// value is removed, or when a component value is updated. /// /// See also [ComponentTypeStoreChangeRecord]. Stream<List<ComponentTypeStoreChangeRecord<T>>> get changes; /// The number of component values currently stored in this /// [ComponentTypeStore]. int get length; /// Whether this [ComponentTypeStore] is currently empty. bool get isEmpty; /// Whether there is currently at least 1 component value in this /// [ComponentTypeStore]. bool get isNotEmpty; /// The component values currently stored in this [ComponentTypeStore]. Iterable<T> get components; /// The entity IDs for which a component value is currently stored in this /// [ComponentTypeStore]. Iterable<int> get entityIds; /// Returns a [ComponentStoreIterator] over this [ComponentTypeStore]. ComponentStoreIterator<T> get iterator; /// Executes the given function [f] for each ([entityId], [component]) pair /// stored in this [ComponentTypeStore]. void forEach(void f(int entityId, T component)); /// Whether or not this [ComponentTypeStore] contains a component value for /// the [entityId]. bool containsComponentFor(int entityId); /// Removes the component value associated with the [entityId] from this /// [ComponentTypeStore]. /// /// Does nothing if this [ComponentTypeStore] does not contain a component /// value for the [entityId]. /// /// Returns the component value if this [ComponentTypeStore] did contain a /// component value for the [entityId], or `null` otherwise. T remove(int entityId); /// Returns the value associated with the [entityId] or `null` if this /// [ComponentTypeStore] does not currently contain a value for the /// [entityId]. T operator [](int entityId); /// Associated the given [component] value with the [entityId] and stores it /// in this [ComponentTypeStore]. void operator []=(int entityId, T component); } /// An iterator over a [ComponentTypeStore]. /// /// Extends an ordinary [Iterator] by also exposing the [currentEntityId] that /// is associated with the [current] component value. abstract class ComponentStoreIterator<T> extends Iterator<T> { int get currentEntityId; } /// A [ChangeRecord] that denotes adding, removing, or updating a /// [ComponentTypeStore]. class ComponentTypeStoreChangeRecord<T> implements ChangeRecord { /// The entity id for which a component changed. final int entityId; /// The previous component value associated with this key. /// /// Is always `null` if [isInsert]. final T oldValue; /// The new component value associated with this key. /// /// Is always `null` if [isRemove]. final T newValue; /// True if this component value was inserted. final bool isInsert; /// True if this component value was removed. final bool isRemove; /// Create an update record of [entityId] from [oldValue] to [newValue]. const ComponentTypeStoreChangeRecord( this.entityId, this.oldValue, this.newValue) : isInsert = false, isRemove = false; /// Create an insert record of [entityId] and [newValue]. const ComponentTypeStoreChangeRecord.insert(this.entityId, this.newValue) : isInsert = true, isRemove = false, oldValue = null; /// Create a remove record of [entityId] with a former [oldValue]. const ComponentTypeStoreChangeRecord.remove(this.entityId, this.oldValue) : isInsert = false, isRemove = true, newValue = null; /// Apply this change record to the [componentStore]. void apply(ComponentTypeStore<T> componentStore) { if (isRemove) { componentStore.remove(entityId); } else { componentStore[entityId] = newValue; } } bool operator ==(Object o) => identical(this, o) || o is ComponentTypeStoreChangeRecord<T> && entityId == o.entityId && oldValue == o.oldValue && newValue == o.newValue && isInsert == o.isInsert && isRemove == o.isRemove; int get hashCode => hashObjects([ entityId, oldValue, newValue, isInsert, isRemove, ]); } class TypeStoreRegistryChangeRecord<T> extends ChangeRecord { /// The component type for which the store changed. final Type type; /// The previous store associated with the [type]. /// /// Is always `null` if [isInsert]. final ComponentTypeStore<T> oldValue; /// The new value associated with the [type]. /// /// Is always `null` if [isRemove]. final ComponentTypeStore<T> newValue; /// Whether or not this change concerns an insertion. final bool isInsert; /// Whether or not this change concerns a removal. final bool isRemove; /// Create an update record for [type] from [oldValue] to [newValue]. const TypeStoreRegistryChangeRecord(this.type, this.oldValue, this.newValue) : isInsert = false, isRemove = false; /// Create an insert record for [type] and [newValue]. const TypeStoreRegistryChangeRecord.insert(this.type, this.newValue) : isInsert = true, isRemove = false, oldValue = null; /// Create a remove record for [type] with a former [oldValue]. const TypeStoreRegistryChangeRecord.remove(this.type, this.oldValue) : isInsert = false, isRemove = true, newValue = null; /// Apply this change record to the [typeStoreRegistry]. void apply(TypeStoreRegistry typeStoreRegistry) { if (isRemove) { typeStoreRegistry.remove(type); } else { typeStoreRegistry.add(type, newValue); } } bool operator ==(Object other) => identical(this, other) || other is TypeStoreRegistryChangeRecord<T> && type == other.type && oldValue == other.oldValue && newValue == other.newValue && isInsert == other.isInsert && isRemove == other.isRemove; int get hashCode => hashObjects([ type, oldValue, newValue, isInsert, isRemove, ]); }
package Monitoring::GLPlugin::TableItem; our @ISA = qw(Monitoring::GLPlugin::Item); use strict; sub new { my ($class, %params) = @_; my $self = {}; bless $self, $class; foreach (keys %params) { $self->{$_} = $params{$_}; } if ($self->can("finish")) { $self->finish(%params); } return $self; } sub check { my ($self) = @_; # some tableitems are not checkable, they are only used to enhance other # items (e.g. sensorthresholds enhance sensors) # normal tableitems should have their own check-method } 1; __END__
require 'rails_helper' require 'email_spec/rspec' require 'timecop' require 'shared_context/stub_email_rendering' RSpec.describe EmailAlert, type: :model do let(:mock_log) { instance_double("ActivityLogger") } # set subject appropriately since it's a Singleton let(:subject) { described_class.instance } let(:user) { create(:user) } let(:config) { { days: [2, 5, 10] } } let(:condition) { create(:condition, config: { days: [2, 5, 10] }) } let(:timing) { :on } let(:dec_1) { Time.zone.local(2018, 12, 1) } let(:users) do [create(:user, first_name: 'u1'), create(:user, first_name: 'u2')] end describe '.condition_response' do it 'gets the config from the condition' do # stubbed methods: allow(subject).to receive(:entities_to_check) .and_return([]) allow(subject).to receive(:send_alert_this_day?) .and_return(true) allow(subject).to receive(:send_email) .with(anything, mock_log) # expected results: expect(described_class).to receive(:get_config) # actual test: Timecop.freeze(dec_1) do subject.condition_response(condition, mock_log) end end it 'gets the timing from the condition' do # stubbed methods: allow(subject).to receive(:entities_to_check) .and_return([]) allow(subject).to receive(:send_alert_this_day?) .and_return(true) allow(subject).to receive(:send_email) .with(anything, mock_log) # expected results: expect(described_class).to receive(:get_timing) # actual test: Timecop.freeze(dec_1) do subject.condition_response(condition, mock_log) end end it 'calls process_entities' do # stubbed methods: allow(subject).to receive(:entities_to_check) .and_return(users) # expected results: expect(subject).to receive(:process_entities) .and_return(true) # actual test: Timecop.freeze(dec_1) do subject.condition_response(condition, mock_log) end end end describe 'process_entities' do it 'loops through entities_to_check and calls take_action on each' do # stub this method allow(subject).to receive(:take_action).and_return(true) expect(subject).to receive(:take_action).exactly(users.size).times # actual test: Timecop.freeze(dec_1) do subject.process_entities(users, mock_log) end end end describe 'take_action' do let(:entity) { create(:member_with_membership_app) } it 'calls send_email for the entity and log if send_alert_this_day? is true' do # stubbed methods: allow(subject).to receive(:send_alert_this_day?) .with(timing, config, anything) .and_return(true) # expected results: expect(subject).to receive(:send_alert_this_day?) .with(timing, config, anything) .once expect(subject).to receive(:send_email) .with(anything, mock_log) .once # actual test: Timecop.freeze(dec_1) do subject.timing = timing subject.config = config subject.take_action(entity, mock_log) end end it 'does nothing when send_alert_this_day? is false for a user' do # stubbed methods: allow(subject).to receive(:send_alert_this_day?) .with(anything, config, user) .and_return(false) # expected results: expect(subject).to receive(:send_alert_this_day?) .with(anything, config, anything) .once expect(subject).to receive(:send_email).never # actual test: Timecop.freeze(dec_1) do subject.timing = timing subject.config = config subject.take_action(entity, mock_log) end # Timecop end # it 'does nothing when send_alert_this_day? is false for a user' end it '.entities_to_check raises NoMethodError (subclasses should implement)' do expect {subject.entities_to_check }.to raise_exception NoMethodError end it '.mailer_class raises NoMethodError (subclasses should implement)' do expect {subject.mailer_class }.to raise_exception NoMethodError end it '.mailer_args raises NoMethodError (subclasses should implement)' do expect {subject.mailer_args(create(:user)) }.to raise_exception NoMethodError end describe '.send_email' do include_context 'stub email rendering' before(:all) do # define a method for MemberMailer just for this test MemberMailer.class_eval do def fake_mailer_method(_user) nil end end end after(:all) do # remove the method we added MemberMailer.undef_method(:fake_mailer_method) end before(:each) do Rails.configuration.action_mailer.delivery_method = :mailgun ApplicationMailer.mailgun_client.enable_test_mode! allow(Memberships::MembershipActions).to receive(:for_user) .and_return(true) end after(:each) { ApplicationMailer.mailgun_client.disable_test_mode! } let(:entity) { build(:member) } it 'sends alert email to user and logs a message' do expect(MemberMailer.fake_mailer_method(user)).to be_truthy # stubbed methods: allow(subject).to receive(:mailer_class) .and_return(MemberMailer) allow(subject).to receive(:mailer_args) .and_return([entity]) allow(subject).to receive(:mailer_method).and_return(:test_email) allow(subject).to receive(:success_str).with(entity) .and_return('succeeded with entity') # expected results: expect(MemberMailer).to receive(:test_email).with(entity) .and_call_original expect(subject).to receive(:log_mail_response) Timecop.freeze(dec_1) subject.send_email(entity, mock_log) Timecop.return email = ActionMailer::Base.deliveries.last expect(email).to deliver_to(entity.email) end it 'does not send email if an error is raised or mail has errors' do subject.create_alert_logger(mock_log) expect(MemberMailer.fake_mailer_method(user)).to be_truthy # stubbed methods: allow(subject).to receive(:mailer_class) .and_return(MemberMailer) allow(subject).to receive(:mailer_args) .and_return([entity]) allow(subject).to receive(:mailer_method).and_return(:test_email) allow(subject).to receive(:failure_str).with(entity) .and_return('failed with entity') allow_any_instance_of(Mail::Message).to receive(:deliver) .and_raise(Net::ProtocolError) # expected results: expect(MemberMailer).to receive(:test_email).with(entity) .and_call_original expect(mock_log).to receive(:error).with(/EmailAlert email ATTEMPT FAILED failed with entity\. Net::ProtocolError Also see for possible info/) Timecop.freeze(dec_1) subject.send_email(entity, mock_log) Timecop.return expect(ActionMailer::Base.deliveries.size).to eq 0 end end describe '.mail_message' do let(:entity) { create(:company) } it 'calls mailer_args to get the arguments' do # stubbed methods: allow(subject).to receive(:mailer_method).and_return(:test_email) allow(subject).to receive(:mailer_class).and_return(MemberMailer) expect(subject).to receive(:mailer_args).with(entity) subject.mail_message(entity) end it 'calls mailer_class to get the mailer class' do # stubbed methods: allow(subject).to receive(:mailer_method).and_return(:test_email) allow(subject).to receive(:mailer_class).and_return(MemberMailer) allow(subject).to receive(:mailer_args).and_return([entity]) expect(subject).to receive(:mailer_class) subject.mail_message(entity) end it 'sends the mailer_method to the mailer_class with the arguments' do # stubbed methods: allow(subject).to receive(:mailer_method).and_return(:test_email) allow(subject).to receive(:mailer_class).and_return(MemberMailer) allow(subject).to receive(:mailer_args).and_return([entity]) expect(MemberMailer).to receive(:test_email).with(entity) subject.mail_message(entity) end end describe '.send_on_day_number?' do let(:config) { { days: [1, 3, 5] } } it 'true if config[:days].include? day_number' do expect(subject.send_on_day_number?(3, config)).to be_truthy end it 'false if day_number is not in config[:days]' do expect(subject.send_on_day_number?(0, config)).to be_falsey end it 'false if config does not have :days as a key' do expect(subject.send_on_day_number?(3, { blorf: 'blorf' })).to be_falsey end end describe '.log_mail_response' do let(:entity) { create(:user) } context 'no mail_response errors (successful)' do it 'sends log_success to the alert logger' do subject.create_alert_logger(mock_log) mail_response_dbl = double("Mail::Message") allow(mail_response_dbl).to receive(:errors).and_return([]) expect_any_instance_of(AlertLogger).to receive(:log_success) subject.log_mail_response(mock_log, mail_response_dbl, entity) end end context 'with mail_response_errors (failure)' do before(:all) do # define a method for MemberMailer just for this test MemberMailer.class_eval do def fake_mailer_method(_user) nil end end end after(:all) do # remove the method we added MemberMailer.undef_method(:fake_mailer_method) end it 'sends log_failure' do subject.create_alert_logger(mock_log) mail_response_dbl = double("Mail::Message") allow(mail_response_dbl).to receive(:errors).and_return([3]) expect_any_instance_of(AlertLogger).to receive(:log_failure) subject.log_mail_response(mock_log, mail_response_dbl, entity) end end end it '.success_str raises NoMethodError (should be defined by subclasses)' do expect{subject.success_str([])}.to raise_exception NoMethodError end it '.failure_str raises NoMethodError (should be defined by subclasses)' do expect{subject.failure_str([])}.to raise_exception NoMethodError end it '.send_alert_this_day?(timing, config, user) raises NoMethodError (should be defined by subclasses)' do config = {} timing = 'blorf' # doesn't matter what this is expect {subject.send_alert_this_day?(timing, config, user) }.to raise_exception NoMethodError end it '.mailer_method raises NoMethodError (should be defined by subclasses)' do expect {subject.mailer_method }.to raise_exception NoMethodError end end
cordova.commandProxy.add("EchoPlugin",{ echo:function(successCallback,errorCallback,strInput) { var res = EchoRuntimeComponent.EchoPluginRT.echo(strInput); if(res.indexOf("Error") == 0) { errorCallback(res); } else { successCallback(res); } } });
package org.leveloneproject.central.kms.domain.keys import java.util.UUID import org.leveloneproject.central.kms.domain._ import scala.concurrent.Future trait KeyStore { def create(key: Key): Future[Either[KmsError, Key]] def getById(id: UUID): Future[Option[Key]] }
package services import ( "fmt" "io" "log" "net/http" "os" utils "github.com/kuruvi-bits/transform/utils" ) func Resize(message utils.Message) { dirPath := fmt.Sprintf("%s/%s", utils.RESIZED_VOL, message.AlbumName) filePath := fmt.Sprintf("%s/%s", dirPath, message.PhotoName) utils.CreateDirIfNotExist(dirPath) url := utils.GetResizeURL(message) response, e := http.Get(url) if e != nil { log.Fatal(e) } defer response.Body.Close() //open a file for writing file, err := os.Create(filePath) if err != nil { log.Fatal(err) } defer file.Close() // Use io.Copy to just dump the response body to the file. This supports huge files _, err = io.Copy(file, response.Body) if err != nil { log.Fatal(err) } fmt.Println("Success!") }
/** * Copyright 2014 Yahoo! Inc. Licensed under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law * or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the specific language * governing permissions and limitations under the License. See accompanying * LICENSE file. */ package com.yahoo.sql4d.indexeragent.meta; import com.google.common.collect.ImmutableMap; import static com.yahoo.sql4d.indexeragent.Agent.*; import static com.yahoo.sql4d.indexeragent.sql.SqlMeta.*; import com.yahoo.sql4d.indexeragent.meta.beans.DataSource; import com.yahoo.sql4d.indexeragent.meta.beans.StatusTrail; import java.util.List; import java.util.Map; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.persistence.Persistence; import org.apache.commons.lang.exception.ExceptionUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Handles database interaction specifically for Indexer Agent. * @author srikalyan */ public class DBHandler { private static final Logger log = LoggerFactory.getLogger(DBHandler.class); private final EntityManagerFactory emFactory; enum Action { ADD, UPDATE, DELETE } public DBHandler() { String host = getHost(); int port = getPort(); String id = getId(); String password = getPassword(); String dbName = getDbName(); String dbType = getDbType(); String connectUrl, dialect, driver; switch(dbType) { case "mysql": connectUrl = String.format("jdbc:mysql://%s:%d/%s?autoReconnectForPools=true", host, port, dbName); driver = "com.mysql.jdbc.Driver"; dialect = "org.hibernate.dialect.MySQLDialect"; break; case "derby" : default: connectUrl = String.format("jdbc:derby://%s:%d/%s;create=true", host, port, dbName); driver = "org.apache.derby.jdbc.ClientDriver"; dialect = "org.hibernate.dialect.DerbyDialect"; } Map<String, String> configOverride = ImmutableMap.of( "javax.persistence.jdbc.url", connectUrl, "javax.persistence.jdbc.user", id, "javax.persistence.jdbc.password", password, "hibernate.dialect", dialect, "javax.persistence.jdbc.driver", driver); log.info("Overriding database configuration : {}", configOverride); emFactory = Persistence.createEntityManagerFactory("indexerAgent", configOverride); } private EntityManager getEntityManager() { return emFactory.createEntityManager(); } private void addUpdateDeleteEntity(Object entity, Action action) { EntityManager em = getEntityManager(); try { em.getTransaction().begin(); switch (action) { case ADD: em.persist(entity); break; case UPDATE: em.merge(entity); break; case DELETE: em.remove(entity); break; } } catch(RuntimeException e) { log.error("Something wrong persisting/merging/removing entity {}, so rolling back . Exception is {}", entity, ExceptionUtils.getStackTrace(e)); em.getTransaction().rollback(); } finally { if (em.getTransaction().isActive()) { em.getTransaction().commit(); } em.close(); } } public void addDataSource(DataSource ds) { addUpdateDeleteEntity(ds, Action.ADD); } public void updateDataSource(DataSource ds) { addUpdateDeleteEntity(ds, Action.UPDATE); } public void removeDataSource(DataSource ds) { addUpdateDeleteEntity(ds, Action.DELETE); } public void addStatusTrail(StatusTrail st) { addUpdateDeleteEntity(st, Action.ADD); } public void updateStatusTrail(StatusTrail st) { addUpdateDeleteEntity(st, Action.UPDATE); } public void removeStatusTrail(StatusTrail st) { addUpdateDeleteEntity(st, Action.DELETE); } public List<DataSource> getAllDataSources() { EntityManager em = getEntityManager(); try { return em.createQuery("SELECT ds FROM DataSource ds", DataSource.class).getResultList(); } finally { em.close(); } } public DataSource getDataSource(String tableName) { EntityManager em = getEntityManager(); try { List<DataSource> resultList = em.createQuery("SELECT ds FROM DataSource ds WHERE ds.name = :name", DataSource.class).setParameter("name", tableName).getResultList(); return resultList.isEmpty()?null:resultList.get(0); } finally { em.close(); } } public DataSource getDataSource(int id) { EntityManager em = getEntityManager(); try { return em.find(DataSource.class, id); } finally { em.close(); } } /** * Tasks whose status:not_done and givenUp:zero * @param ds * @return */ public List<StatusTrail> getIncompleteTasks(DataSource ds) { EntityManager em = getEntityManager(); try { return em.createQuery("SELECT st FROM StatusTrail st WHERE st.dataSourceId = :dataSourceId " + "AND st.status = 'not_done' AND st.givenUp = 0 ORDER BY st.id DESC", StatusTrail.class). setParameter("dataSourceId", ds.getId()).getResultList(); } finally { em.close(); } } /** * * @return */ public List<StatusTrail> getAllIncompleteTasks() { EntityManager em = getEntityManager(); try { return em.createQuery("SELECT st FROM StatusTrail st WHERE " + " st.status = 'not_done' AND st.givenUp = 0 ORDER BY st.id DESC", StatusTrail.class).getResultList(); } finally { em.close(); } } /** * * @return */ public List<StatusTrail> getAllInprogressTasks() { EntityManager em = getEntityManager(); try { return em.createQuery("SELECT st FROM StatusTrail st WHERE " + " st.status = 'in_progress' AND st.givenUp = 0", StatusTrail.class).getResultList(); } finally { em.close(); } } /** * * @return */ public long getInprogressTasksCount() { EntityManager em = getEntityManager(); try { return (long)em.createQuery("SELECT COUNT(st.id) FROM StatusTrail st WHERE " + " st.status = 'in_progress' AND st.givenUp = 0").getSingleResult(); } finally { em.close(); } } /** * Change the status of a task. * @param st * @param success */ public void markTask(StatusTrail st, boolean success) { st.setStatus(success ? JobStatus.done : JobStatus.not_done); st.setAttemptsDone(st.getAttemptsDone() + 1); st.setGivenUp(st.getAttemptsDone() >= getMaxTaskAttempts() ? 1 : 0); updateStatusTrail(st); } public void shutdown() { log.info("Shutting down and cleaning up database connections.."); emFactory.close(); } }
<?php require "config.php"; use Illuminate\Database\Capsule\Manager as Capsule; Capsule::schema()->drop('price_intervals'); Capsule::schema()->create('price_intervals', function ($table) { $table->increments('id'); $table->date('start_date'); $table->date('end_date'); $table->double('price'); $table->timestamps(); });
import ecdsa import json import redis from typing import NamedTuple, Union import binascii from binascii import unhexlify from luracoin import errors from luracoin.exceptions import TransactionNotValid from luracoin.wallet import pubkey_to_address from luracoin.config import Config from luracoin.helpers import ( mining_reward, sha256d, bytes_to_signing_key, little_endian_to_int, ) class Transaction: def __init__( self, chain: int = 0, nonce: int = 0, fee: int = 0, value: int = 0, to_address: str = None, unlock_sig: bytes = None, ) -> None: self.chain = chain self.nonce = nonce self.fee = fee self.value = value self.to_address = to_address self.unlock_sig = unlock_sig @property def is_coinbase(self) -> bool: return self.unlock_sig == Config.COINBASE_UNLOCK_SIGNATURE def sign(self, private_key) -> "Transaction": signature = sign_transaction( private_key=private_key, transaction_serialized=self.serialize(to_sign=True).hex(), ) self.unlock_sig = signature return self def json(self) -> dict: result = { "id": self.id, "chain": self.chain, "nonce": self.nonce, "fee": self.fee, "value": self.value, "to_address": self.to_address, "unlock_sig": None, } if self.unlock_sig: result["unlock_sig"] = self.unlock_sig.hex() return result def serialize(self, to_sign=False) -> bytes: chain = self.chain.to_bytes(1, byteorder="little", signed=False) nonce = self.nonce.to_bytes(4, byteorder="little", signed=False) fee = self.fee.to_bytes(4, byteorder="little", signed=False) value = self.value.to_bytes(8, byteorder="little", signed=False) to_address = str.encode(self.to_address) if self.unlock_sig: unlock_sig = self.unlock_sig serialized = chain + nonce + fee + value + to_address if not to_sign and self.unlock_sig: serialized += unlock_sig return serialized def deserialize(self, serialized_bytes: bytes): self.chain = int.from_bytes(serialized_bytes[0:1], byteorder="little") self.nonce = int.from_bytes(serialized_bytes[1:5], byteorder="little") self.fee = int.from_bytes(serialized_bytes[5:9], byteorder="little") self.value = int.from_bytes(serialized_bytes[9:17], byteorder="little") self.to_address = serialized_bytes[17:51].decode("utf-8") if len(serialized_bytes) > 51: self.unlock_sig = serialized_bytes[51:] @property def id(self) -> str: """ The ID will be the hash SHA256 of all the txins and txouts. """ msg = self.serialize().hex().encode() tx_id = sha256d(msg) return tx_id def make_msg(self) -> str: """ TODO: Improve the message. bitcoin.stackexchange.com/questions/37093/what-goes-in-to-the-message-of-a-transaction-signature """ return self.id def validate_fields(self, raise_exception=False) -> bool: """ Checks that the transaction has the correct fields. """ if self.chain < 0 or self.chain > 256: if raise_exception: raise TransactionNotValid(errors.TRANSACTION_FIELD_CHAIN) return False if self.nonce < 0 or self.nonce > 4_294_967_295: if raise_exception: raise TransactionNotValid(errors.TRANSACTION_FIELD_NONCE) return False if self.fee < 0 or self.fee > 4_294_967_295: if raise_exception: raise TransactionNotValid(errors.TRANSACTION_FIELD_FEE) return False if self.value <= 0 or self.value > 18_446_744_073_709_551_615: if raise_exception: raise TransactionNotValid(errors.TRANSACTION_FIELD_VALUE) return False if not self.to_address or len(self.to_address) != 34: if raise_exception: raise TransactionNotValid(errors.TRANSACTION_FIELD_TO_ADDRESS) return False if not self.unlock_sig or len(self.unlock_sig) != 128: if raise_exception: raise TransactionNotValid(errors.TRANSACTION_FIELD_SIGNATURE) return False if ( self.unlock_sig == Config.COINBASE_UNLOCK_SIGNATURE and self.to_address == Config.STAKING_ADDRESS ): if raise_exception: raise TransactionNotValid(errors.TRANSACTION_INVALID_STAKING) return False return True def validate(self, raise_exception=False) -> bool: """ Validate a transaction. For a transaction to be valid it has to follow these conditions: """ if not self.validate_fields(raise_exception=raise_exception): return False if ( self.unlock_sig != Config.COINBASE_UNLOCK_SIGNATURE and not is_valid_unlocking_script( unlocking_script=self.unlock_sig, transaction_serialized=self.serialize(to_sign=True).hex(), ) ): if raise_exception: raise TransactionNotValid(errors.TRANSACTION_INVALID_SIGNATURE) return False return True def to_transaction_pool(self) -> None: redis_client = redis.Redis( host=Config.REDIS_HOST, port=Config.REDIS_PORT, db=Config.REDIS_DB ) redis_client.set(self.id, self.serialize()) def save(self, block_height: int) -> None: """ Add a transaction to the chainstate. Inside the chainstate database, the following key/value pairs are stored: 'c' + 32-byte transaction hash -> unspent transaction output record for that transaction. These records are only present for transactions that have at least one unspent output left. Each record stores: The version of the transaction. Whether the transaction was a coinbase or not. Which height block contains the transaction. Which outputs of that transaction are unspent. The scriptPubKey and amount for those unspent outputs. [TX VERSION][COINBASE][HEIGHT][NUM OUTPUTS][∞][OUTPUT_LEN][OUTPUT] ^ ^ ^ ^ ^ 4 bytes 1 byte 4 bytes VARINT VARINT 'B' -> 32-byte block hash: the block hash up to which the database represents the unspent transaction outputs """ pass def build_message(outpoint, pub_key: str) -> str: """ TODO: https://bitcoin.stackexchange.com/questions/37093/what-goes-in-to-the-message-of-a-transaction-signature """ return sha256d(str(outpoint.txid) + str(outpoint.txout_idx) + pub_key) def build_script_sig(signature: str, public_key: str) -> str: """ <VARINT>SIGNATURE<VARINT>PUBLIC_KEY """ return signature + public_key def verify_signature(message: str, public_key: str, signature: str) -> bool: vk = ecdsa.VerifyingKey.from_string(public_key, curve=ecdsa.SECP256k1) return vk.verify(signature, message) def deserialize_unlocking_script(unlocking_script: bytes) -> dict: unlocking_script = unlocking_script.hex() pub_key = unlocking_script[:128] signature = unlocking_script[128:] return { "signature": signature, "public_key": pub_key, "address": pubkey_to_address(pub_key.encode()), } def is_valid_unlocking_script( unlocking_script: str, transaction_serialized: str ) -> bool: # TODO: This functions allows to spend all outpoints since we are # verifying the signature not the signature + matching public key. try: unlocking_script = deserialize_unlocking_script(unlocking_script) except binascii.Error: return False message = transaction_serialized.encode() try: is_valid = verify_signature( message=message, public_key=bytes.fromhex(unlocking_script["public_key"]), signature=bytes.fromhex(unlocking_script["signature"]), ) except ecdsa.keys.BadSignatureError: is_valid = False except AssertionError: is_valid = False return is_valid def sign_transaction(private_key: bytes, transaction_serialized: str) -> bytes: private_key = bytes_to_signing_key(private_key=private_key) vk = private_key.get_verifying_key() public_key = vk.to_string() signature = private_key.sign(transaction_serialized.encode()) return public_key + signature
reload("Persa") using Base.Test using DecisionTree using DatasetsCF # write your own tests here #@test 1 == 2 ### reload("COFILS") dataset = DatasetsCF.MovieLens() holdout = Persa.HoldOut(dataset, 0.9) (ds_train, ds_test) = Persa.get(holdout) model = COFILS.Cofils(ds_train, 10) Persa.train!(model, ds_train) print(Persa.aval(model, ds_test))
pub static TEXT: &'static str = "{% macro asset_url(filename) %} \"/assets/{{ filename }}\" {% endmacro asset_url %}";
using System; using System.Runtime.Serialization; namespace DomainBlocks.Persistence { [Serializable] public class StreamDeletedException : Exception { public string StreamName { get; } public StreamDeletedException(string streamName) { StreamName = streamName; } public StreamDeletedException(string streamName, string message) : base(message) { StreamName = streamName; } public StreamDeletedException(string streamName, string message, Exception inner) : base(message, inner) { StreamName = streamName; } protected StreamDeletedException( SerializationInfo info, StreamingContext context) : base(info, context) { if (info == null) throw new ArgumentNullException(nameof(info)); info.AddValue(nameof(StreamName), StreamName); base.GetObjectData(info, context); } } }
import {bindable} from 'aurelia-framework'; import {inject} from 'aurelia-framework'; import moment from 'moment'; import {GameService} from '../services/gameService'; @inject(GameService) export class GameListItemCustomElement { constructor(GameService){ this.gameService = GameService; } @bindable game; get gameDate(){ //TODO Localization return moment(`${this.game.date}`).format("MMM Do YY"); } }
#!/usr/bin/env ruby IO.foreach("2.2 Ruby Day 2.md") do |block| puts block if block =~ /(.*)代码块(.*)/ end
package net.jp2p.jxse.services; import net.jp2p.jxta.factory.IJxtaComponents.JxtaComponents; import net.jxta.impl.loader.JxtaLoaderModuleManager; import net.jxta.impl.modulemanager.JxtaModuleBuilder; import net.jxta.module.IModuleBuilder; import net.jxta.peergroup.core.Module; public class Component{ private static JxtaLoaderModuleManager<Module> manager; private boolean canBuild; public Component() { manager = JxtaLoaderModuleManager.getRoot( Component.class, true ); this.canBuild = false; } public void activate(){ /* DO NOTHING */ } public void deactivate(){ /* DO NOTHING */ } protected final boolean canBuild() { return canBuild; } public void registerBuilder(IModuleBuilder<Module> builder) { manager.registerBuilder( builder); if( builder instanceof JxtaModuleBuilder ) this.canBuild = true; } public void unregisterBuilder( IModuleBuilder<Module> builder ) { manager.unregisterBuilder( builder ); } public static final boolean canBuild( JxtaComponents jxtaComponent ) { //PlatformDescriptor descriptor = new PlatformDescriptor(); return true;//manager.canBuild(descriptor); } }
%%%------------------------------------------------------------------- %%% @author Michal Stanisz %%% @copyright (C) 2021 ACK CYFRONET AGH %%% This software is released under the MIT license %%% cited in 'LICENSE.txt'. %%% @end %%%------------------------------------------------------------------- %%% @doc %%% Module responsible for managing QoS status persistent model. %%% For more details consult `qos_status` module doc. %%% @end %%%------------------------------------------------------------------- -module(qos_status_model). -author("Michal Stanisz"). -include("modules/datastore/qos.hrl"). -include("modules/datastore/datastore_models.hrl"). -include("modules/datastore/datastore_runner.hrl"). -include_lib("ctool/include/errors.hrl"). -include_lib("ctool/include/logging.hrl"). %% API -export([create/4, update/3, get/2, delete/2]). %% datastore_model callbacks -export([get_record_struct/1, get_record_version/0]). -type doc() :: datastore_doc:doc(record()). -type diff() :: datastore_doc:diff(record()). -type id() :: datastore_doc:key(). -type record() :: #qos_status{}. -type dir_type() :: ?QOS_STATUS_TRAVERSE_CHILD_DIR | ?QOS_STATUS_TRAVERSE_START_DIR. -export_type([diff/0]). -define(CTX, (qos_status:get_ctx())). %%%=================================================================== %%% API %%%=================================================================== -spec create(od_space:id(), traverse:id(), file_meta:uuid(), dir_type()) -> {ok, doc()}. create(SpaceId, TraverseId, DirUuid, DirType) -> Id = generate_status_doc_id(TraverseId, DirUuid), datastore_model:create(?CTX, #document{key = Id, scope = SpaceId, value = #qos_status{is_start_dir = DirType == ?QOS_STATUS_TRAVERSE_START_DIR} }). -spec update(traverse:id(), file_meta:uuid(), diff()) -> {ok, doc()} | {error, term()}. update(TraverseId, Uuid, Diff) -> Id = generate_status_doc_id(TraverseId, Uuid), datastore_model:update(?CTX, Id, Diff). -spec get(traverse:id(), file_meta:uuid()) -> {ok, doc()} | {error, term()}. get(TraverseId, Uuid) -> Id = generate_status_doc_id(TraverseId, Uuid), datastore_model:get(?CTX, Id). -spec delete(traverse:id(), file_meta:uuid()) -> ok | {error, term()}. delete(TraverseId, Uuid)-> Id = generate_status_doc_id(TraverseId, Uuid), datastore_model:delete(?CTX, Id). %%%=================================================================== %%% datastore_model callbacks %%%=================================================================== -spec get_record_version() -> datastore_model:record_version(). get_record_version() -> 1. -spec get_record_struct(datastore_model:record_version()) -> datastore_model:record_struct(). get_record_struct(1) -> {record, [ {previous_batch_last_filename, binary}, {current_batch_last_filename, binary}, {files_list, [string]}, {child_dirs_count, integer}, {is_last_batch, boolean}, {is_start_dir, boolean} ]}. %%%=================================================================== %%% Internal functions %%%=================================================================== %% @private -spec generate_status_doc_id(traverse:id(), file_meta:uuid()) -> id(). generate_status_doc_id(TraverseId, DirUuid) -> datastore_key:adjacent_from_digest([DirUuid, TraverseId], DirUuid).
--- author: mikeparker104 ms.author: miparker ms.date: 06/02/2020 ms.service: notification-hubs ms.topic: include ms.openlocfilehash: 5e75c5d5510f596eb7911cae0310e60b6bef67bf ms.sourcegitcommit: 5cace04239f5efef4c1eed78144191a8b7d7fee8 ms.translationtype: MT ms.contentlocale: pl-PL ms.lasthandoff: 07/08/2020 ms.locfileid: "86146452" --- ### <a name="send-a-test-notification"></a>Wysyłanie powiadomienia testowego 1. Otwórz nową kartę w programie [Poster](https://www.postman.com/downloads/). 1. Ustaw żądanie na **wpis**, a następnie wprowadź następujący adres: ```xml https://<app_name>.azurewebsites.net/api/notifications/requests ``` 1. Jeśli wybrano opcję ukończenia [uwierzytelniania klientów przy użyciu klucza interfejsu API](#authenticate-clients-using-an-api-key-optional) , należy skonfigurować nagłówki żądania, aby zawierały wartość **apikey** . | Klucz | Wartość | | ------------------------------ | ------------------------------ | | apikey | <your_api_key> | 1. Wybierz opcję **RAW** dla **treści**, a następnie wybierz pozycję **JSON** z listy opcje formatu, a następnie Dołącz niepewną zawartość **JSON** : ```json { "text": "Message from Postman!", "action": "action_a" } ``` 1. Wybierz przycisk **kod** , który znajduje się poniżej przycisku **Zapisz** w prawym górnym rogu okna. Żądanie powinno wyglądać podobnie do poniższego przykładu w przypadku wyświetlania **kodu HTML** (w zależności od tego, czy został dołączony nagłówek **apikey** ): ```html POST /api/notifications/requests HTTP/1.1 Host: https://<app_name>.azurewebsites.net apikey: <your_api_key> Content-Type: application/json { "text": "Message from backend service", "action": "action_a" } ``` 1. Uruchom aplikację **PushDemo** na jednej lub obu platformach docelowych (**Android** i **iOS**). > [!NOTE] > W przypadku testowania w systemie **Android** upewnij się, że nie uruchomiono **debugowania**lub jeśli aplikacja została wdrożona przez uruchomienie aplikacji, Wymuś zamknięcie aplikacji i jej ponowne uruchomienie przy użyciu programu uruchamiającego. 1. W aplikacji **PushDemo** naciśnij przycisk **zarejestruj** . 1. Z powrotem w programie **[Poster](https://www.postman.com/downloads)** Zamknij okno **Generuj fragmenty kodu** (jeśli jeszcze tego nie zrobiono), a następnie kliknij przycisk **Wyślij** . 1. Sprawdź, czy w programie **[Poster](https://www.postman.com/downloads)** znajduje się odpowiedź **200 OK** i czy alert pojawia się w aplikacji z **odebraną akcją Action**. 1. Zamknij aplikację **PushDemo** , a następnie ponownie kliknij przycisk **Wyślij** **[.](https://www.postman.com/downloads)** 1. Sprawdź, czy ponownie otrzymujesz odpowiedź **200 OK** w **[ogłoszeniu](https://www.postman.com/downloads)** . Sprawdź, czy w obszarze powiadomień dla aplikacji **PushDemo** jest wyświetlana informacja o poprawnym komunikacie. 1. Naciśnij pozycję powiadomienie, aby upewnić się, że aplikacja zostanie otwarta i zostanie wyświetlona **Akcja akcja akcji odebrana** . 1. Z powrotem w programie **[Poster](https://www.postman.com/downloads)** zmodyfikuj poprzednią treść żądania, aby wysłać powiadomienie dyskretne, określając *action_b* zamiast *action_a* dla wartości **akcji** . ```json { "action": "action_b", "silent": true } ``` 1. Gdy aplikacja jest nadal otwarta, kliknij przycisk **Wyślij** w programie **[Poster](https://www.postman.com/downloads)**. 1. Sprawdź, czy otrzymujesz odpowiedź na **200 OK** w programie **[Poster](https://www.postman.com/downloads)** i czy alert pojawia się w aplikacji pokazującej **odebraną akcję ActionB** zamiast akcji **Action**. 1. Zamknij aplikację **PushDemo** , a następnie ponownie kliknij przycisk **Wyślij** **[.](https://www.postman.com/downloads)** 1. Sprawdź, czy otrzymujesz odpowiedź na **200 OK** w programie **[Poster](https://www.postman.com/downloads)** i czy powiadomienie dyskretne nie jest wyświetlane w obszarze powiadomień.
/** * System Extensions * * Copyright (C) 2014-2017 Peter "SaberUK" Powell <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0.html * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ using System; using SystemExt.Log; using SystemExt.Terminal; namespace SystemExt.Demo { /// <summary> /// Demo for <see cref="SystemExt.Log"/>. /// </summary> public static class Log { /// <summary> /// Entry point for the <see cref="SystemExt.Log"/> demo. /// </summary> /// <param name="args"> /// Command line arguments. /// </param> /// <returns> /// The code to terminate the application with on exit. /// </returns> public static int EntryPoint(string[] args) { return new ApplicationChooser() .AddEntryPoint(LogComponents, "Iterate through various component filters and write a message") .AddEntryPoint(LogLevels, "Iterate through all log levels and write a message") .Run(args); } /// <summary> /// A demo which iterates over a series of tokens and logs them to a stream. /// </summary> /// <param name="arg"> /// Command line arguments. /// </param> /// <returns> /// The code to terminate the application with on exit. /// </returns> private static int LogComponents(string[] arg) { // Initialize manager and STDOUT logger. var manager = new LogManager(); var logger = new StreamLogger(Console.OpenStandardOutput()); // Iterate over various log tokens. foreach (var logToken in new[] { "*", "INVALID DEMO2", "DEMO1 -DEMO2", "* -DEMO2 -* INVALID" }) { manager.AddLogger(logToken, LogLevel.Verbose, logger); Console.WriteLine("Component filter set to {0}", logToken); manager.Write(LogLevel.Verbose, "DEMO1", "Logging with the DEMO1 component!"); manager.Write(LogLevel.Verbose, "DEMO2", "Logging with the DEMO2 component!"); manager.Write(LogLevel.Verbose, manager, "Logging with the LogManager component!"); } Console.WriteLine("Press any key to exit."); Console.ReadKey(); return 0; } /// <summary> /// A demo which iterates over log levels and writes messages at each. /// </summary> /// <param name="arg"> /// Command line arguments. /// </param> /// <returns> /// The code to terminate the application with on exit. /// </returns> private static int LogLevels(string[] arg) { // Initialize manager and STDOUT logger. var manager = new LogManager(); var logger = new StreamLogger(Console.OpenStandardOutput()); // Iterate through the log levels. for (var level = LogLevel.None; level <= LogLevel.Verbose; level++) { // Change the log level. Console.WriteLine("Setting log level to {0:G}.", level); manager.AddLogger("DEMO", level, logger); // Write messages to the logger at every level. manager.Write(LogLevel.Verbose, "DEMO", "Verbose!"); manager.Write(LogLevel.Information, "DEMO", "Information!"); manager.Write(LogLevel.Warning, "DEMO", "Warning!"); manager.Write(LogLevel.Error, "DEMO", "Error!"); manager.Write(LogLevel.Critical, "DEMO", "Critical!"); } Console.WriteLine("Press any key to exit."); Console.ReadKey(); return 0; } } }
# Gamification > Climbing is demanding, let's make it more fun with game mechanics! See p91: - p21 for the self-assessment - p91 for the technical clues <!----------------------------------------------------------------------------> # Table of Contents - [Physical Skills](#physical-skills) - [Technical Skills](#technical-skills) - [Mental Skills](#mental-skills) - [Organizational Skills](#organizational-skills) - [Overall Skills](#overall-skills)
--- layout: post title: Distributed software testing author: Daniel Mewes author_github: danielmewes --- # About me A word about me first: My name is Daniel Mewes, and I just came over to California to work at RethinkDB as an intern for the oncoming months. After having been an undergraduate student of computer science at Saarland University, Germany for the last two years, I am exited to work on an influential real-world project at RethinkDB now. Why RethinkDB? Not only does RethinkDB develop an exciting and novel piece of database technology, RethinkDB also provides the great "startup kind" of work experience. # Software testing In complex software systems like database management systems, different components have to work together. These components can interact in complex ways, yielding a virtually infinite number of possible states that the overall system can reach. This has consequences for software testing. As bugs in the code might only show up in a small fraction of the possible states, comprehensive testing of the system is essential. Encapsulation of code and data into objects can reduce the number of states that must be considered for any single piece of code. However an extremely large number of states can still remain, especially when considering parallel systems. Reliability requirements for database management systems on the other hand are stringent. Losing or corrupting data due to bugs in the program cannot be tolerated here. <!--more--> Among other measures, we at RethinkDB ensure the reliability of our software by running extensive tests on a daily basis. The problem with these tests is that they take a lot of time to complete. We recently reached time requirements of more than 24 hours on a decent machine for a single test run. So clearly a single machine is not enough anymore to run the tests. For our daily test runs, we want to get results quickly. Buying more machines is pricey, especially as those machines would be idle during the times at which no tests are run. It also is not very flexible. # Tapping into the cloud Cloud computing provides a more flexible and less pricey way to circumvent the limitations of limited local hardware resources. We decided to use Amazon's Elastic Compute Cloud ([Amazon EC2][]). If you need the computing power of ten systems, you can get that from EC2 in a matter of minutes. If you need the power of a hundred machines, you can get that in a matter of minutes, too. Basically, Amazon's EC2 provides you with as much computing power as you need, at just the time that you need it. EC2 allows to dynamically allocate and deallocate virtual compute nodes, which are billed on an hourly basis. Each node can be used like a normal computer. The nodes run Linux (Windows nodes are also available) and are accessible through SSH. So EC2 looked like a promising platform to make our tests finish faster. [Amazon EC2]: http://aws.amazon.com/ec2/ ![Distributed Software Testing](/assets/images/posts/2010-12-09-distributed-software-testing-1.png) _EC2 console showing a few nodes_ Our existing test suite already split up the work into independent test scripts. What was missing for utilizing EC2 was an automated mechanism to start and setup a number of EC2 nodes and dispatch the individual tests to these nodes to run in parallel. Setting up a node especially involves the step of installing a current build of RethinkDB together with a number of dependencies on the node's file system. I wrote a Python script to fulfill exactly these tasks. Our main concern was to improve the overall performance of the testing process as much as possible. In more detail, our new distributed testing tool works in the following steps: * Allocate a number of nodes in Amazon's EC2. * Once all nodes are up and booted, install the current build of RethinkDB on each of them. As the bandwidth of the Internet connection in our office is much lower than what is available to the EC2 nodes, we use SFTP to install RethinkDB on only one of the nodes and then let that node distribute it to all remaining ones. * We can now start running tests on the nodes: * Pick a test from the list of all individual tests to be run. * Find a node which is not currently busy running another test. If no node is available, wait until a node becomes free. * Initiate the test on the free node. To do this, we use a wrapper script which we invoke and immediately background on the remote node. The wrapper script takes care of running the actual test and redirecting its output and result into specific files, which we can later retrieve asynchronously. * After repeating step 3 for all tests in the list, wait for all nodes to finish their current work. * Collect the results of all tests from the different nodes. This works by reading from the files in which our wrapper script has stored the tests' results. * Finally, terminate the allocated nodes in EC2. To communicate with the compute nodes, I opted for the use of [Paramiko][], an implementation of SSH2 for Python. Having direct access to the SSH2 protocol from a Python script makes running commands remotely as well as fetching and installing files from/into the remote systems very convenient. For allocating and terminating EC2 nodes, we use [Boto][], which provides an interface for accessing Amazon's AWS API from within Python programs. [Paramiko]: http://www.lag.net/paramiko/ [Boto]: http://boto.s3.amazonaws.com/index.html The results are convincing: Instead of 26 hours on a (fast) local machine, running all of our tests takes only 4 hours when distributed across ten nodes in EC2. By using still more nodes, the time for testing can be lowered even further. This is very useful. Say we just made an important change to our code and want to verify that everything works as it is supposed to. With local test runs, this would mean waiting at least a day, even longer if our testing machine is occupied with an earlier test run. If one of the test detects a problem with the change and we fix it, it takes another day at least until we can see if the fix even worked and had no other side effects. Thanks to cloud computing and our distributed testing system, we can now initiate an arbitrary number of test runs on demand, each of which finishes in a matter of mere hours.
# AWS User Group Kochi Official Website of AWS User Group Kochi community ### Powered by - GitHub - Gatsby - Netlify
import { Injectable } from '@angular/core'; import { Observable } from 'rxjs/Observable'; import { HttpClient} from '@angular/common/http' export interface Charm { id: number, slug: string, name: string, ranks: CharmRank[] } export interface CharmRank { name: string, level: number, rarity: number, skills: SkillRank[], crafting: CharmRankCrafting } export interface SkillRank { id: number, slug: string, level: number, description: string, skill: number, skillName: string, modifiers: SkillRankModifiers } export interface SkillRankModifiers { affinity: number, attack: number, damageFire: number, damageWater: number, damageIce: number, damageThunder: number, damageDragon: number, defense: number, health: number, sharpnessBonus: number, resistAll: number, resistFire: number, resistWater: number, resistIce: number, resistThunder: number, resistDragon: number } export interface CharmRankCrafting { craftable: boolean, materials: CraftingCost[] } export interface CraftingCost { quantity: number, item: Item } export interface Item { id: number, name: string, description: string, rarity: number, carryLimit: number, value: number } @Injectable({ providedIn: 'root' }) export class CharmService { charms: Charm[] = []; constructor( private http: HttpClient) { } getCharm(id: number): Observable<Charm> { return this.http.get<Charm>('https://mhw-db.com/charms/' + id) } getAllCharm(): Observable<Charm[]> { console.log("test"); var temp = this.http.get<Charm[]>('https://mhw-db.com/charms'); console.log(temp); return temp; } }
<?php declare(strict_types=1); namespace Linio\SellerCenter\Factory\Xml\Order; use DateTimeImmutable; use Linio\SellerCenter\Exception\InvalidXmlStructureException; use Linio\SellerCenter\Model\Order\Order; use SimpleXMLElement; class OrderFactory { public static function make(SimpleXMLElement $element): Order { if (!property_exists($element, 'OrderId')) { throw new InvalidXmlStructureException('Order', 'OrderId'); } if (!property_exists($element, 'CustomerFirstName')) { throw new InvalidXmlStructureException('Order', 'CustomerFirstName'); } if (!property_exists($element, 'CustomerLastName')) { throw new InvalidXmlStructureException('Order', 'CustomerLastName'); } if (!property_exists($element, 'OrderNumber')) { throw new InvalidXmlStructureException('Order', 'OrderNumber'); } if (!property_exists($element, 'PaymentMethod')) { throw new InvalidXmlStructureException('Order', 'PaymentMethod'); } if (!property_exists($element, 'Remarks')) { throw new InvalidXmlStructureException('Order', 'Remarks'); } if (!property_exists($element, 'DeliveryInfo')) { throw new InvalidXmlStructureException('Order', 'DeliveryInfo'); } if (!property_exists($element, 'Price')) { throw new InvalidXmlStructureException('Order', 'Price'); } if (!property_exists($element, 'GiftOption')) { throw new InvalidXmlStructureException('Order', 'GiftOption'); } if (!property_exists($element, 'GiftMessage')) { throw new InvalidXmlStructureException('Order', 'GiftMessage'); } if (!property_exists($element, 'VoucherCode')) { throw new InvalidXmlStructureException('Order', 'VoucherCode'); } if (!property_exists($element, 'CreatedAt')) { throw new InvalidXmlStructureException('Order', 'CreatedAt'); } if (!property_exists($element, 'UpdatedAt')) { throw new InvalidXmlStructureException('Order', 'UpdatedAt'); } if (!property_exists($element, 'AddressUpdatedAt')) { throw new InvalidXmlStructureException('Order', 'AddressUpdatedAt'); } if (!property_exists($element, 'AddressBilling')) { throw new InvalidXmlStructureException('Order', 'AddressBilling'); } if (!property_exists($element, 'AddressShipping')) { throw new InvalidXmlStructureException('Order', 'AddressShipping'); } if (!property_exists($element, 'NationalRegistrationNumber')) { throw new InvalidXmlStructureException('Order', 'NationalRegistrationNumber'); } if (!property_exists($element, 'ItemsCount')) { throw new InvalidXmlStructureException('Order', 'ItemsCount'); } if (!property_exists($element, 'PromisedShippingTime')) { throw new InvalidXmlStructureException('Order', 'PromisedShippingTime'); } if (!property_exists($element, 'ExtraAttributes')) { throw new InvalidXmlStructureException('Order', 'ExtraAttributes'); } if (!property_exists($element, 'Statuses')) { throw new InvalidXmlStructureException('Order', 'Statuses'); } $giftOption = !empty($element->GiftOption); $dateTime = DateTimeImmutable::createFromFormat('Y-m-d H:i:s', (string) $element->CreatedAt); $createdAt = !empty($dateTime) ? $dateTime : null; $dateTime = DateTimeImmutable::createFromFormat('Y-m-d H:i:s', (string) $element->UpdatedAt); $updatedAt = !empty($dateTime) ? $dateTime : null; $dateTime = DateTimeImmutable::createFromFormat('Y-m-d H:i:s', (string) $element->AddressUpdatedAt); $addressUpdatedAt = !empty($dateTime) ? $dateTime : null; $addressBilling = AddressFactory::make($element->AddressBilling); $addressShipping = AddressFactory::make($element->AddressShipping); $dateTime = DateTimeImmutable::createFromFormat('Y-m-d H:i:s', (string) $element->PromisedShippingTime); $promisedShippingTime = !empty($dateTime) ? $dateTime : null; $statuses = []; foreach ($element->Statuses->Status as $status) { array_push($statuses, (string) $status); } return Order::fromData( (int) $element->OrderId, (int) $element->OrderNumber, (string) $element->CustomerFirstName, (string) $element->CustomerLastName, (string) $element->PaymentMethod, (string) $element->Remarks, (string) $element->DeliveryInfo, (float) $element->Price, $giftOption, (string) $element->GiftMessage, (string) $element->VoucherCode, $createdAt, $updatedAt, $addressUpdatedAt, $addressBilling, $addressShipping, (string) $element->NationalRegistrationNumber, (int) $element->ItemsCount, $promisedShippingTime, (string) $element->ExtraAttributes, $statuses ); } }
require 'rails_helper' describe 'GET /locations/:location_id/contacts' do context 'when location has contacts' do before :all do @loc = create(:location) @first_contact = @loc.contacts. create!(attributes_for(:contact_with_extra_whitespace)) end before :each do get api_location_contacts_url(@loc, subdomain: ENV['API_SUBDOMAIN']) end after(:all) do Organization.find_each(&:destroy) end it 'returns a 200 status' do expect(response).to have_http_status(200) end it 'includes the id attribute in the serialization' do expect(json.first['id']).to eq(@first_contact.id) end it 'includes the name attribute in the serialization' do expect(json.first['name']).to eq(@first_contact.name) end it 'includes the title attribute in the serialization' do expect(json.first['title']).to eq(@first_contact.title) end it 'includes the email attribute in the serialization' do expect(json.first['email']).to eq(@first_contact.email) end it 'includes the fax attribute in the serialization' do expect(json.first['fax']).to eq(@first_contact.fax) end it 'includes the phone attribute in the serialization' do expect(json.first['phone']).to eq(@first_contact.phone) end it 'includes the extension attribute in the serialization' do expect(json.first['extension']).to eq(@first_contact.extension) end end context "when location doesn't have contacts" do before :all do @loc = create(:location) end before :each do get api_location_contacts_url(@loc, subdomain: ENV['API_SUBDOMAIN']) end after(:all) do Organization.find_each(&:destroy) end it 'returns an empty array' do expect(json).to eq([]) end it 'returns a 200 status' do expect(response).to have_http_status(200) end end end
<?php declare(strict_types=1); /* * This file is part of the Runroom package. * * (c) Runroom <[email protected]> * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ namespace Runroom\UserBundle\Repository; use Doctrine\ORM\EntityManagerInterface; use Doctrine\ORM\EntityRepository; use Runroom\UserBundle\Model\UserInterface; final class UserRepository implements UserRepositoryInterface { private EntityManagerInterface $entityManager; /** @phpstan-var class-string<UserInterface> */ private string $class; /** @phpstan-param class-string<UserInterface> $class */ public function __construct(EntityManagerInterface $entityManager, string $class) { $this->entityManager = $entityManager; $this->class = $class; } public function loadUserByIdentifier(string $identifier): ?UserInterface { return $this->getRepository()->findOneBy(['email' => $identifier]); } public function create(): UserInterface { return new $this->class(); } public function save(UserInterface $user): void { $this->entityManager->persist($user); $this->entityManager->flush(); } /** @phpstan-return EntityRepository<UserInterface> */ private function getRepository(): EntityRepository { return $this->entityManager->getRepository($this->class); } }
json.id entry.id json.feed format_text(@titles[entry.feed_id] || entry.feed.title) json.title format_text(entry.title) json.author format_text(entry.author) json.published entry.published.iso8601 json.content text_format(entry.content)
package api import ( "path" "time" ) // Experiment describes an experiment and its tasks. type Experiment struct { // Identity ID string `json:"id"` Name string `json:"name,omitempty"` // Ownership Owner Identity `json:"owner"` Author Identity `json:"author"` User Identity `json:"user"` // TODO: Deprecated. Description string `json:"description,omitempty"` Nodes []ExperimentNode `json:"nodes"` Created time.Time `json:"created"` } // DisplayID returns the most human-friendly name available for an experiment // while guaranteeing that it's unique and non-empty. func (e *Experiment) DisplayID() string { if e.Name != "" { return path.Join(e.User.Name, e.Name) } return e.ID } // ExperimentSpec describes a set of tasks with optional dependencies. // This set represents a (potentially disconnected) directed acyclic graph. type ExperimentSpec struct { // (optional) Organization on behalf of whom this resource is created. The // user issuing the request must be a member of the organization. If omitted, // the resource will be owned by the requestor. Organization string `json:"org,omitempty"` // (optional) Text description of the experiment. Description string `json:"description,omitempty"` // (required) Tasks to create. Tasks may be defined in any order, though all // dependencies must be internally resolvable within the experiment. Tasks []ExperimentTaskSpec `json:"tasks"` // (optional) A token representing the user to which the object should be attributed. // If omitted attribution will be given to the user issuing the request. AuthorToken string `json:"author_token,omitempty"` // (optional) Settings for the Comet.ml integration, if it should be used for this experiment. Comet *ExperimentCometSpec `json:"comet,omitempty"` } // ExperimentNode describes a task along with its links within an experiment. type ExperimentNode struct { Name string `json:"name,omitempty"` TaskID string `json:"task_id"` ResultID string `json:"result_id"` Status TaskStatus `json:"status"` CometURL string `json:"cometUrl,omitempty"` // Identifiers of tasks dependent on this node within the containing experiment. ChildTasks []string `json:"child_task_ids"` // Identifiers of task on which this node depends within the containing experiment. ParentTasks []string `json:"parent_task_ids"` } // DisplayID returns the most human-friendly name available for an experiment // node while guaranteeing that it's unique within the context of its experiment. func (n *ExperimentNode) DisplayID() string { if n.Name != "" { return n.Name } return n.TaskID } // ExperimentTaskSpec describes a task spec with optional dependencies on other // tasks within an experiment. Tasks refer to each other by the Name field. type ExperimentTaskSpec struct { // (optional) Name of the task node, which need only be defined if // dependencies reference it. Name string `json:"name,omitempty"` // (required) Specification describing the task to run. Spec TaskSpec `json:"spec"` // (optional) Tasks on which this task depends. Mounts will be applied, in // the order defined here, after existing mounts in the task spec. DependsOn []TaskDependency `json:"depends_on,omitempty"` } // TaskDependency describes a single "edge" in a task dependency graph. type TaskDependency struct { // (required) Name of the task on which the referencing task depends. ParentName string `json:"parent_name"` // (optional) Path in the child task to which parent results will be mounted. // If absent, this is treated as an order-only dependency. ContainerPath string `json:"container_path,omitempty"` } type ExperimentCometSpec struct { // (required) Whether or not to enable the integration for this experiment. Enable bool `json:"enable"` // (optional) The name of the experiment (shown in the Comet.ml interface) ExperimentName string `json:"experiment,omitempty"` // (optional) The name of the Comet.ml project for this experiment. ProjectName string `json:"project,omitempty"` // (optional) The name of the Comet.ml workspace for this experiment. WorkspaceName string `json:"workspace,omitempty"` } // ExperimentPatchSpec describes a patch to apply to an experiment's editable // fields. Only one field may be set in a single request. type ExperimentPatchSpec struct { // (optional) Unqualified name to assign to the experiment. It is considered // a collision error if another experiment has the same creator and name. Name *string `json:"name,omitempty"` // (optional) Description to assign to the experiment or empty string to // delete an existing description. Description *string `json:"description,omitempty"` }
#!/bin/bash # ftrc.sh # Simple wrapper to use kernel ftrace facility. trap 'echo 0 > ${PFX}/tracing_on ; popd > /dev/null' INT QUIT name=$(basename $0) PFX=/sys/kernel/debug/tracing TRACE_INTERVAL=5 if [ `id -u` -ne 0 ]; then echo "$name: need to be root." exit 1 fi if [ $# -ne 1 ]; then echo "Usage: $name ftrace-interval-in-sec" exit 1 fi TRACE_INTERVAL=$1 pushd . >/dev/null cd ${PFX} echo "Select tracer from the list:" cat ${PFX}/available_tracers read tracer echo "tracer = $tracer" #TODO- validity check echo "${tracer}" > ${PFX}/current_tracer echo -n "[current_tracer] Current Tracer is: " cat ${PFX}/current_tracer echo "[trace_options] Current Trace Options are: " cat ${PFX}/trace_options echo if [ ${tracer} == "function_graph" ]; then echo "[set_graph_function] Current function(s) traced are: " cat /sys/kernel/debug/tracing/set_graph_function echo "Type in your own functions (space-separated); [Enter] keeps default: " read graph_funcs if [ -n "${graph_funcs}" ]; then for func in ${graph_funcs} do echo "function: $func" echo "$func" >> /sys/kernel/debug/tracing/set_graph_function done echo echo "New graph-traced functions are:" cat /sys/kernel/debug/tracing/set_graph_function fi fi echo -n "Confirm Trace options above and START trace? [Y/n]: " read reply if [[ $reply == "n" ]] || [[ $reply == "N" ]]; then echo "$name: aborting now..." exit 1 fi echo echo "Will now ftrace for $TRACE_INTERVAL seconds..." echo "To manually Stop, ^C" echo echo "Starting trace now..." echo 1 > ${PFX}/tracing_on sleep $TRACE_INTERVAL echo 0 > ${PFX}/tracing_on #tail -f ${PFX}/trace >> /tmp/ftrace_log.txt cat ${PFX}/trace > /tmp/ftrace_log.txt popd > /dev/null
# `Faker().breakingBad` [Dictionary file](../src/main/resources/locales/en/breaking_bad.yml) Available Functions: ```kotlin Faker().breakingBad.character() // => Walter White Faker().breakingBad.episode() // => Pilot ```
(function (window) { // 'use strict';//目前驾驭不了严格模式有空尽量看一看 // Your starting point. Enjoy the ride! //ajax原理 // var xhr = new XMLHttpRequest() // xhr.open('get','http://localhost:8080/todos/getDataAll') // xhr.send() // xhr.onreadystatechange = function(){ // if(xhr.readyState === 4 && xhr.status === 200){ // console.log(xhr.responseText) // } // } //模版引擎通过原理获取模版引擎内部的内容去覆盖,想要修饰的内容。也就是说模版引擎解决了内容的问题,往哪里放随你咯。 axios.defaults.baseURL = "http://localhost:8080/todos/" getListDetail() function getListDetail() { axios({ url: 'getDataAll' }).then(res => { const { data, meta } = res.data if (meta.code === 200) { //渲染页面 // console.log(data) //判断hash变化在渲染页面之前控制数据 //通过window.location可以获取有关网页url相关的信息 const url = window.location.hash const active = data.filter(item => { return item.isFinish === '0' }) const completed = data.filter(item => { return item.isFinish === '1' }) switch (url) { case "": case '#/': renderPage(data, total = data, url) break case '#/active': renderPage(active, data, url) break case '#/completed': renderPage(completed, data, url) break } } }) } //使用total判断footer的隐藏,但是#/的情况下也需要穿入参数判断,所以给其默认值为temporarily。 function renderPage(temporarily, total, url) { const todos = document.querySelector('.todoapp'); const noFinish = total.filter(item => { return item.isFinish === '0' }).length const isFinish = total.filter(item => { return item.isFinish === '1' }).length const html = template('tpl-todos', { list: temporarily, total, noFinish, isFinish }) todos.innerHTML = html //在页面加载之后运行添加一个todo函数是不行的因为它获取的是页面原有的元素,而并不是由模版引擎渲染之后的页面元素。 addTodo() delTodo() modify() showEdit(temporarily) delCompleted(temporarily) selectAll(temporarily) // changeAll(data) footerChange(url) } //添加一个todo function addTodo() { //js中事件是元素的一个属性 // document.querySelector('.new-todo').onkeyup= function (e){ // console.log(e) // } const addTodo = document.querySelector('.new-todo') addTodo.addEventListener('keyup', (e) => { //此处因为使用箭头函数所以this指向window,我们用元素本身代替也没问题的 if (e.keyCode === 13 && addTodo.value.trim() !== '') { let data = { content: addTodo.value.trim(), // 必须携带,新增 todo 的内容 isFinish: 0 // 必须携带,新增 todo 的状态 } axios.post('addTodo', data).then(res => { const { meta } = res.data if (meta.code === 201) { getListDetail() } }) } }) } //footer角标切换 function footerChange(url) { const arr = document.querySelectorAll('.filters li > a ') if (arr.length === 0) return arr.forEach(item => { item.classList.remove('selected') }) switch (url) { case '': case '#/': arr[0].classList.add('selected') break case '#/active': arr[1].classList.add('selected') break case "#/completed": arr[2].classList.add('selected') } } //删除一个todo function delTodo() { const delTodo = document.querySelectorAll('.destroy') // console.log(delTodo) delTodo.forEach(function (item) { item.addEventListener('click', function (e) { const id = this.dataset.id if (confirm('确定要删除?')) { axios.delete(`delTodo?id=${id}`).then(res => { const { meta } = res.data if (meta.code === 202) { getListDetail() } }) } }) }) } //修改单条状态 function modify() { const toggle = document.querySelectorAll('.toggle') toggle.forEach(item => { item.addEventListener('change', function () { const data = { id: this.dataset.id, isFinish: this.checked ? '1' : '0' } axios.put('changeStatu', data).then(res => { const { meta } = res.data if (meta.code === 203) { getListDetail() } }) }) }) } //编辑一条todo function showEdit(data) { const lis = document.querySelectorAll('.todo-list li') //显示编辑栏 lis.forEach((item, index) => { item.addEventListener('dblclick', function () { lis.forEach(item => { item.classList.remove('editing') }) this.classList.add('editing') edit(index, data[index], item) }) }) } //为了拿到删除的todo我们通过回调函数的方式传递参数 //编辑操作 function edit(index, data, todo) { const edit = document.querySelectorAll('.edit')[index] edit.focus() edit.value = data.content const id = todo.dataset.id edit.addEventListener('keyup', function (e) { if (e.keyCode === 13) { const value = { content: this.value, id } //如果修改后为空,删除此todo if (!this.value) { axios.delete(`delTodo?id=${id}`).then(res => { const { meta } = res.data if (meta.code === 202) { getListDetail() } }) return } //修改后数据相同取消编辑样式 if (this.value === data.content) { todo.classList.remove('editing') return } //和原数据不同时发起修改请求 axios.put('changeContent', value).then(res => { const { meta } = res.data if (meta.code === 203) { getListDetail() } }) } }) } //删除所有已经完成的todo function delCompleted(data) { const completed = document.querySelector('.clear-completed') if (!completed) return const arr = [] data.filter(item => { if (item.isFinish === '1') { arr.push(item.id) } }) completed.addEventListener('click', function (e) { axios.delete(`/delAll?id=${arr.toString()}`).then(res => { const { meta } = res.data if (meta.code === 202) { getListDetail() } }) }) } //全选按钮 function selectAll(data) { const toggle_all = document.querySelector('.toggle-all') toggle_all.addEventListener('click', function (e) { // console.log(this.checked) const noFinish = data.filter(item => { return item.isFinish === "0" }).length const isFinish = data.filter(item => { return item.isFinish === "1" }).length if (isFinish === data.length) { getSelAll(false) return } if (noFinish <= data.length) { getSelAll(true) return } }) } //请求函数 function getSelAll(bool) { axios.get(`changeStatusAll?isFinish=${bool}`).then(res => { const { meta } = res.data if (meta.code === 203) { getListDetail() } }) } //通过change事件改变全选按钮 // function changeAll(data){ // const toggle_all = document.querySelector('#toggle-all') // toggle_all.addEventListener('change',function(){ // // const bool = this.checked // console.log(this.checked) // axios.get(`changeStatusAll?isFinish=false`).then(res => { // const { meta } = res.data // if (meta.code === 203) { // getListDetail() // } // }) // }) // } //监听一个hashchange改变事件 给window添加一个hashchange事件 window.addEventListener('hashchange', (e) => { getListDetail() }) })(window);
package com.github.antonpopoff.colorwheel.extensions import android.os.Build import android.os.Parcel internal fun Parcel.writeBooleanCompat(value: Boolean) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { this.writeBoolean(value) } else { this.writeInt(if (value) 1 else 0) } } internal fun Parcel.readBooleanCompat(): Boolean { return if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { this.readBoolean() } else { this.readInt() == 1 } }
/*! * CanJS - 2.3.27 * http://canjs.com/ * Copyright (c) 2016 Bitovi * Thu, 15 Sep 2016 21:14:18 GMT * Licensed MIT */ /*[email protected]#construct/super/super*/ steal('can/util', 'can/construct', function (can, Construct) { var isFunction = can.isFunction, fnTest = /xyz/.test(function () { return this.xyz; }) ? /\b_super\b/ : /.*/, getset = [ 'get', 'set' ], getSuper = function (base, name, fn) { return function () { var tmp = this._super, ret; this._super = base[name]; ret = fn.apply(this, arguments); this._super = tmp; return ret; }; }; can.Construct._defineProperty = function (addTo, base, name, descriptor) { var _super = Object.getOwnPropertyDescriptor(base, name); if (_super) { can.each(getset, function (method) { if (isFunction(_super[method]) && isFunction(descriptor[method])) { descriptor[method] = getSuper(_super, method, descriptor[method]); } else if (!isFunction(descriptor[method])) { descriptor[method] = _super[method]; } }); } Object.defineProperty(addTo, name, descriptor); }; can.Construct._overwrite = function (addTo, base, name, val) { addTo[name] = isFunction(val) && isFunction(base[name]) && fnTest.test(val) ? getSuper(base, name, val) : val; }; return can; });
from django.conf import settings from django.contrib.auth.mixins import PermissionRequiredMixin from django.shortcuts import get_object_or_404 from django.views.generic import DetailView from django_filters.views import FilterView from django_tables2.views import SingleTableView from sidekick.filters import ( LogicalSystemFilterSet, RoutingTypeFilterSet, NetworkServiceTypeFilterSet, NetworkServiceFilterSet, NetworkServiceGroupFilterSet, ) from sidekick.tables import ( IPPrefixTable, LogicalSystemTable, RoutingTypeTable, NetworkServiceTypeTable, NetworkServiceTable, NetworkServiceGroupTable, ) from sidekick.models import ( LogicalSystem, RoutingType, NetworkServiceType, NetworkService, NetworkServiceGroup, ) from sidekick.utils import ( get_all_ip_prefixes, get_graphite_service_graph, ) # IP Prefix Index class IPPrefixIndexView(PermissionRequiredMixin, SingleTableView): permission_required = 'sidekick.view_ipprefix' model = NetworkService context_object_name = 'ns' template_name = 'sidekick/networkservice/ipprefix_index.html' def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) prefixes = [] for member_id, data in get_all_ip_prefixes().items(): for prefix in data['prefixes']: prefixes.append({ 'prefix': prefix, 'member': data['member'], }) table = IPPrefixTable(prefixes) context['table'] = table return context # Logical System Index class LogicalSystemIndexView(PermissionRequiredMixin, FilterView, SingleTableView): permission_required = 'sidekick.view_logicalsystem' model = LogicalSystem table_class = LogicalSystemTable filterset_class = LogicalSystemFilterSet template_name = 'sidekick/networkservice/logicalsystem_index.html' # Logical System Details class LogicalSystemDetailView(PermissionRequiredMixin, DetailView): permission_required = 'sidekick.view_logicalsystem' model = LogicalSystem template_name = 'sidekick/networkservice/logicalsystem.html' def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) logical_system = get_object_or_404(LogicalSystem, slug=self.kwargs['slug']) context['logical_system'] = logical_system table = NetworkServiceTable(NetworkService.objects.filter( network_service_devices__network_service_l3__logical_system=logical_system.id)) context['table'] = table return context # Routing Type Index class RoutingTypeIndexView(PermissionRequiredMixin, FilterView, SingleTableView): permission_required = 'sidekick.view_routingtype' model = RoutingType table_class = RoutingTypeTable filterset_class = RoutingTypeFilterSet template_name = 'sidekick/networkservice/routingtype_index.html' # Routing Type Details class RoutingTypeDetailView(PermissionRequiredMixin, DetailView): permission_required = 'sidekick.view_routingtype' model = RoutingType template_name = 'sidekick/networkservice/routingtype.html' def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) routing_type = get_object_or_404(RoutingType, slug=self.kwargs['slug']) context['routing_type'] = routing_type table = NetworkServiceTable(NetworkService.objects.filter( network_service_devices__network_service_l3__routing_type=routing_type.id)) context['table'] = table return context # Network Service Type Index class NetworkServiceTypeIndexView(PermissionRequiredMixin, FilterView, SingleTableView): permission_required = 'sidekick.view_networkservicetype' model = NetworkServiceType table_class = NetworkServiceTypeTable filterset_class = NetworkServiceTypeFilterSet template_name = 'sidekick/networkservice/networkservicetype_index.html' # Network Service Type Details class NetworkServiceTypeDetailView(PermissionRequiredMixin, DetailView): permission_required = 'sidekick.view_networkservicetype' model = NetworkServiceType template_name = 'sidekick/networkservice/networkservicetype.html' def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) nst = get_object_or_404(NetworkServiceType, slug=self.kwargs['slug']) context['nst'] = nst table = NetworkServiceTable(NetworkService.objects.filter( network_service_type=nst.id)) context['table'] = table return context # Network Service Index class NetworkServiceIndexView(PermissionRequiredMixin, FilterView, SingleTableView): permission_required = 'sidekick.view_networkservice' model = NetworkService table_class = NetworkServiceTable filterset_class = NetworkServiceFilterSet template_name = 'sidekick/networkservice/networkservice_index.html' # Network Service Details class NetworkServiceDetailView(PermissionRequiredMixin, DetailView): permission_required = 'sidekick.view_networkservice' model = NetworkService context_object_name = 'ns' template_name = 'sidekick/networkservice/networkservice.html' def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) ns = NetworkService.objects.get(pk=self.kwargs['pk']) graphite_render_host = settings.PLUGINS_CONFIG['sidekick'].get('graphite_render_host', None) graph_data = get_graphite_service_graph(ns, graphite_render_host) context['graph_data'] = graph_data return context # Network Service Group Index class NetworkServiceGroupIndexView(PermissionRequiredMixin, FilterView, SingleTableView): permission_required = 'sidekick.view_networkservicegroup' model = NetworkServiceGroup table_class = NetworkServiceGroupTable filterset_class = NetworkServiceGroupFilterSet template_name = 'sidekick/networkservice/networkservicegroup_index.html' # Network Service Group Details class NetworkServiceGroupDetailView(PermissionRequiredMixin, DetailView): permission_required = 'sidekick.view_networkservicegroup' model = NetworkServiceGroup context_object_name = 'nsg' template_name = 'sidekick/networkservice/networkservicegroup.html' def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) nsg = get_object_or_404(NetworkServiceGroup, pk=self.kwargs['pk']) context['nsg'] = nsg table = NetworkServiceTable(NetworkService.objects.filter( pk__in=nsg.network_services.all())) context['table'] = table return context
# zergtel-android Port of ZTVDC to android Deprecated - practically no features implemented at the moment, and probably indefintely. See [https://github.com/s-zeng/ZTVDC](https://github.com/s-zeng/ZTVDC) instead
import { Component, OnInit } from '@angular/core'; import { Product } from '../../../model/beans/product/product.model'; import { ProductService } from '../../../model/services/product/product.service'; @Component({ selector: 'bp-landing-page-jewelery-component', templateUrl: './jewelery.component.html' }) export class LandingPageJeweleryComponent implements OnInit { public pretrad: string; public urlRest: string; public p1: Product; public p2: Product; constructor ( private productService: ProductService ) { this.pretrad = 'MODULES.LANDING-PAGE.JEWELERY.'; this.urlRest = process.env.API_URL.slice(0, -1); this.p1 = new Product(); this.p2 = new Product(); } public ngOnInit(): void { this.productService.getByReference('P1', Product).then( (response) => { this.p1 = response; }, (error) => { console.error(error); } ); this.productService.getByReference('P2', Product).then( (response) => { this.p2 = response; }, (error) => { console.error(error); } ); } }
//========================================================================= // Copyright (C) 2012 The Elastos Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //========================================================================= #include "elastos/droid/server/pm/CLauncherAppsImpl.h" #include "Elastos.Droid.Net.h" #include "Elastos.Droid.Provider.h" #include "elastos/droid/os/Binder.h" #include "elastos/droid/os/UserHandle.h" #include "elastos/droid/app/AppGlobals.h" #include <elastos/utility/logging/Logger.h> #include <elastos/core/AutoLock.h> using Elastos::Core::AutoLock; using Elastos::Droid::App::AppGlobals; using Elastos::Droid::Content::CIntent; using Elastos::Droid::Content::Pm::IPackageInfo; using Elastos::Droid::Content::Pm::IIPackageManager; using Elastos::Droid::Content::Pm::IApplicationInfo; using Elastos::Droid::Content::Pm::IActivityInfo; using Elastos::Droid::Content::Pm::IUserInfo; using Elastos::Droid::Content::Pm::EIID_IILauncherApps; using Elastos::Droid::Content::Pm::IPackageItemInfo; using Elastos::Droid::Content::Pm::IComponentInfo; using Elastos::Droid::Net::IUriHelper; using Elastos::Droid::Net::CUriHelper; using Elastos::Droid::Net::IUri; using Elastos::Droid::Os::Binder; using Elastos::Droid::Os::UserHandle; using Elastos::Droid::Os::CUserHandle; using Elastos::Droid::Os::EIID_IBinder; using Elastos::Droid::Provider::ISettings; using Elastos::Utility::Logging::Logger; using Elastos::Utility::IArrayList; using Elastos::Utility::CArrayList; using Elastos::Utility::IIterator; namespace Elastos { namespace Droid { namespace Server { namespace Pm { //============================================================================== // CLauncherAppsImpl::MyPackageMonitor //============================================================================== Boolean CLauncherAppsImpl::MyPackageMonitor::IsEnabledProfileOf( /* [in] */ IUserHandle* user, /* [in] */ IUserHandle* listeningUser, /* [in] */ const String& debugMsg) { Int32 id, lisId; user->GetIdentifier(&id); listeningUser->GetIdentifier(&lisId); if (id == lisId) { if (DEBUG) Logger::D(TAG, "Delivering msg to same user %s", debugMsg.string()); return TRUE; } Int64 ident = Binder::ClearCallingIdentity(); // try { AutoPtr<IUserInfo> userInfo, listeningUserInfo; if (FAILED(mHost->mUm->GetUserInfo(id, (IUserInfo**)&userInfo))) { Binder::RestoreCallingIdentity(ident); return FALSE; } if (FAILED(mHost->mUm->GetUserInfo(lisId, (IUserInfo**)&listeningUserInfo))) { Binder::RestoreCallingIdentity(ident); return FALSE; } Int32 groupId, lisGroupId; Boolean isEnabled; if (userInfo == NULL || listeningUserInfo == NULL || (userInfo->GetProfileGroupId(&groupId), groupId == IUserInfo::NO_PROFILE_GROUP_ID) || (listeningUserInfo->GetProfileGroupId(&lisGroupId), groupId != lisGroupId) || (userInfo->IsEnabled(&isEnabled), !isEnabled)) { if (DEBUG) { Logger::D(TAG, "Not delivering msg from %p to %p:%s", user, listeningUser, debugMsg.string()); } Binder::RestoreCallingIdentity(ident); return FALSE; } else { if (DEBUG) { Logger::D(TAG, "Delivering msg from %p to %p:%s", user, listeningUser, debugMsg.string()); } Binder::RestoreCallingIdentity(ident); return TRUE; } // } finally { // Binder.restoreCallingIdentity(ident); // } } ECode CLauncherAppsImpl::MyPackageMonitor::OnPackageAdded( /* [in] */ const String& packageName, /* [in] */ Int32 uid) { Int32 id; GetChangingUserId(&id); AutoPtr<IUserHandle> user; CUserHandle::New(id, (IUserHandle**)&user); Int32 n; mHost->mListeners->BeginBroadcast(&n); for (Int32 i = 0; i < n; i++) { AutoPtr<IInterface> item; mHost->mListeners->GetBroadcastItem(i, (IInterface**)&item); AutoPtr<IOnAppsChangedListener> listener = IOnAppsChangedListener::Probe(item); AutoPtr<IInterface> cookie; mHost->mListeners->GetBroadcastCookie(i, (IInterface**)&cookie); AutoPtr<IUserHandle> listeningUser = IUserHandle::Probe(cookie); if (!IsEnabledProfileOf(user, listeningUser, String("onPackageAdded"))) continue; // try { if (FAILED(listener->OnPackageAdded(user, packageName))) { Logger::D(TAG, "Callback failed "); } // } catch (RemoteException re) { // Slog.d(TAG, "Callback failed ", re); // } } mHost->mListeners->FinishBroadcast(); return PackageMonitor::OnPackageAdded(packageName, uid); } ECode CLauncherAppsImpl::MyPackageMonitor::OnPackageRemoved( /* [in] */ const String& packageName, /* [in] */ Int32 uid) { Int32 id; GetChangingUserId(&id); AutoPtr<IUserHandle> user; CUserHandle::New(id, (IUserHandle**)&user); Int32 n; mHost->mListeners->BeginBroadcast(&n); for (Int32 i = 0; i < n; i++) { AutoPtr<IInterface> item; mHost->mListeners->GetBroadcastItem(i, (IInterface**)&item); AutoPtr<IOnAppsChangedListener> listener = IOnAppsChangedListener::Probe(item); AutoPtr<IInterface> cookie; mHost->mListeners->GetBroadcastCookie(i, (IInterface**)&cookie); AutoPtr<IUserHandle> listeningUser = IUserHandle::Probe(cookie); if (!IsEnabledProfileOf(user, listeningUser, String("onPackageRemoved"))) continue; // try { if (FAILED(listener->OnPackageRemoved(user, packageName))) { Logger::D(TAG, "Callback failed "); } // } catch (RemoteException re) { // Slog.d(TAG, "Callback failed ", re); // } } mHost->mListeners->FinishBroadcast(); return PackageMonitor::OnPackageRemoved(packageName, uid); } ECode CLauncherAppsImpl::MyPackageMonitor::OnPackageModified( /* [in] */ const String& packageName) { Int32 id; GetChangingUserId(&id); AutoPtr<IUserHandle> user; CUserHandle::New(id, (IUserHandle**)&user); Int32 n; mHost->mListeners->BeginBroadcast(&n); for (Int32 i = 0; i < n; i++) { AutoPtr<IInterface> item; mHost->mListeners->GetBroadcastItem(i, (IInterface**)&item); AutoPtr<IOnAppsChangedListener> listener = IOnAppsChangedListener::Probe(item); AutoPtr<IInterface> cookie; mHost->mListeners->GetBroadcastCookie(i, (IInterface**)&cookie); AutoPtr<IUserHandle> listeningUser = IUserHandle::Probe(cookie); if (!IsEnabledProfileOf(user, listeningUser, String("onPackageModified"))) continue; // try { if (FAILED(listener->OnPackageChanged(user, packageName))) { Logger::D(TAG, "Callback failed "); } // } catch (RemoteException re) { // Slog.d(TAG, "Callback failed ", re); // } } mHost->mListeners->FinishBroadcast(); return PackageMonitor::OnPackageModified(packageName); } ECode CLauncherAppsImpl::MyPackageMonitor::OnPackagesAvailable( /* [in] */ ArrayOf<String>* packages) { Int32 id; GetChangingUserId(&id); AutoPtr<IUserHandle> user; CUserHandle::New(id, (IUserHandle**)&user); Int32 n; mHost->mListeners->BeginBroadcast(&n); for (Int32 i = 0; i < n; i++) { AutoPtr<IInterface> item; mHost->mListeners->GetBroadcastItem(i, (IInterface**)&item); AutoPtr<IOnAppsChangedListener> listener = IOnAppsChangedListener::Probe(item); AutoPtr<IInterface> cookie; mHost->mListeners->GetBroadcastCookie(i, (IInterface**)&cookie); AutoPtr<IUserHandle> listeningUser = IUserHandle::Probe(cookie); if (!IsEnabledProfileOf(user, listeningUser, String("onPackagesAvailable"))) continue; // try { Boolean isReplacing; IsReplacing(&isReplacing); if (FAILED(listener->OnPackagesAvailable(user, packages, isReplacing))) { Logger::D(TAG, "Callback failed "); } // } catch (RemoteException re) { // Slog.d(TAG, "Callback failed ", re); // } } mHost->mListeners->FinishBroadcast(); return PackageMonitor::OnPackagesAvailable(packages); } ECode CLauncherAppsImpl::MyPackageMonitor::OnPackagesUnavailable( /* [in] */ ArrayOf<String>* packages) { Int32 id; GetChangingUserId(&id); AutoPtr<IUserHandle> user; CUserHandle::New(id, (IUserHandle**)&user); Int32 n; mHost->mListeners->BeginBroadcast(&n); for (Int32 i = 0; i < n; i++) { AutoPtr<IInterface> item; mHost->mListeners->GetBroadcastItem(i, (IInterface**)&item); AutoPtr<IOnAppsChangedListener> listener = IOnAppsChangedListener::Probe(item); AutoPtr<IInterface> cookie; mHost->mListeners->GetBroadcastCookie(i, (IInterface**)&cookie); AutoPtr<IUserHandle> listeningUser = IUserHandle::Probe(cookie); if (!IsEnabledProfileOf(user, listeningUser, String("onPackagesUnavailable"))) continue; // try { Boolean isReplacing; IsReplacing(&isReplacing); if (FAILED(listener->OnPackagesUnavailable(user, packages, isReplacing))) { Logger::D(TAG, "Callback failed "); } // } catch (RemoteException re) { // Slog.d(TAG, "Callback failed ", re); // } } mHost->mListeners->FinishBroadcast(); return PackageMonitor::OnPackagesUnavailable(packages); } //============================================================================== // CLauncherAppsImpl::PackageCallbackList //============================================================================== ECode CLauncherAppsImpl::PackageCallbackList::OnCallbackDied( /* [in] */ IInterface* callback, /* [in] */ IInterface* cookie) { mHost->CheckCallbackCount(); return NOERROR; } //============================================================================== // CLauncherAppsImpl //============================================================================== const Boolean CLauncherAppsImpl::DEBUG; const String CLauncherAppsImpl::TAG("CLauncherAppsImpl"); CLauncherAppsImpl::CLauncherAppsImpl() { mListeners = new PackageCallbackList(this); mPackageMonitor = new MyPackageMonitor(this); } CAR_INTERFACE_IMPL_2(CLauncherAppsImpl, Object, IILauncherApps, IBinder) CAR_OBJECT_IMPL(CLauncherAppsImpl) ECode CLauncherAppsImpl::constructor( /* [in] */ IContext* ctx) { mContext = ctx; mContext->GetPackageManager((IPackageManager**)&mPm); AutoPtr<IInterface> service; mContext->GetSystemService(IContext::USER_SERVICE, (IInterface**)&service); mUm = IUserManager::Probe(service); return NOERROR; } ECode CLauncherAppsImpl::AddOnAppsChangedListener( /* [in] */ IOnAppsChangedListener* listener) { { AutoLock syncLock(mListenersLock); if (DEBUG) { Logger::D(TAG, "Adding listener from %p", Binder::GetCallingUserHandle().Get()); } Int32 count; if (mListeners->GetRegisteredCallbackCount(&count), count == 0) { if (DEBUG) { Logger::D(TAG, "Starting package monitoring"); } StartWatchingPackageBroadcasts(); } Boolean result; FAIL_RETURN(mListeners->Unregister(listener, &result)) AutoPtr<IUserHandle> handle = Binder::GetCallingUserHandle(); FAIL_RETURN(mListeners->Register(listener, handle, &result)) } return NOERROR; } ECode CLauncherAppsImpl::RemoveOnAppsChangedListener( /* [in] */ IOnAppsChangedListener* listener) { { AutoLock syncLock(mListenersLock); if (DEBUG) { Logger::D(TAG, "Removing listener from %p", Binder::GetCallingUserHandle().Get()); } Boolean result; FAIL_RETURN(mListeners->Unregister(listener, &result)) Int32 count; if (mListeners->GetRegisteredCallbackCount(&count), count == 0) { StopWatchingPackageBroadcasts(); } } return NOERROR; } void CLauncherAppsImpl::StartWatchingPackageBroadcasts() { mPackageMonitor->Register(mContext, NULL, UserHandle::ALL, TRUE); } void CLauncherAppsImpl::StopWatchingPackageBroadcasts() { if (DEBUG) { Logger::D(TAG, "Stopped watching for packages"); } mPackageMonitor->Unregister(); } void CLauncherAppsImpl::CheckCallbackCount() { { AutoLock syncLock(mListenersLock); Int32 count; mListeners->GetRegisteredCallbackCount(&count); if (DEBUG) { Logger::D(TAG, "Callback count = %d", count); } if (count == 0) { StopWatchingPackageBroadcasts(); } } } ECode CLauncherAppsImpl::EnsureInUserProfiles( /* [in] */ IUserHandle* userToCheck, /* [in] */ const String& message) { Int32 callingUserId = UserHandle::GetCallingUserId(); Int32 targetUserId; userToCheck->GetIdentifier(&targetUserId); if (targetUserId == callingUserId) return NOERROR; Int64 ident = Binder::ClearCallingIdentity(); // try { AutoPtr<IUserInfo> callingUserInfo; mUm->GetUserInfo(callingUserId, (IUserInfo**)&callingUserInfo); AutoPtr<IUserInfo> targetUserInfo; mUm->GetUserInfo(targetUserId, (IUserInfo**)&targetUserInfo); Int32 targetId, callingId; if (targetUserInfo == NULL || (targetUserInfo->GetProfileGroupId(&targetId), targetId == IUserInfo::NO_PROFILE_GROUP_ID) || (callingUserInfo->GetProfileGroupId(&callingId), targetId != callingId)) { Binder::RestoreCallingIdentity(ident); return E_SECURITY_EXCEPTION; } // } finally { // Binder.restoreCallingIdentity(ident); // } Binder::RestoreCallingIdentity(ident); return NOERROR; } Boolean CLauncherAppsImpl::IsUserEnabled( /* [in] */ IUserHandle* user) { Int64 ident = Binder::ClearCallingIdentity(); // try { Int32 id; user->GetIdentifier(&id); AutoPtr<IUserInfo> targetUserInfo; mUm->GetUserInfo(id, (IUserInfo**)&targetUserInfo); Binder::RestoreCallingIdentity(ident); Boolean isEnabled; return targetUserInfo != NULL && (targetUserInfo->IsEnabled(&isEnabled), isEnabled); // } finally { // Binder.restoreCallingIdentity(ident); // } } ECode CLauncherAppsImpl::GetLauncherActivities( /* [in] */ const String& packageName, /* [in] */ IUserHandle* user, /* [out] */ IList** list) { VALIDATE_NOT_NULL(list) *list = NULL; String str = Object::ToString(user); FAIL_RETURN(EnsureInUserProfiles(user, String("Cannot retrieve activities for unrelated profile ") + str)) if (!IsUserEnabled(user)) { return CArrayList::New(list); } AutoPtr<IIntent> mainIntent; CIntent::New(IIntent::ACTION_MAIN, NULL, (IIntent**)&mainIntent); mainIntent->AddCategory(IIntent::CATEGORY_LAUNCHER); mainIntent->SetPackage(packageName); Int64 ident = Binder::ClearCallingIdentity(); // try { Int32 id; user->GetIdentifier(&id); ECode ec = mPm->QueryIntentActivitiesAsUser(mainIntent, 0 /* flags */, id, list); Binder::RestoreCallingIdentity(ident); return ec; // } finally { // Binder.restoreCallingIdentity(ident); // } } ECode CLauncherAppsImpl::ResolveActivity( /* [in] */ IIntent* intent, /* [in] */ IUserHandle* user, /* [out] */ IResolveInfo** info) { VALIDATE_NOT_NULL(info) *info = NULL; String str = Object::ToString(user); FAIL_RETURN(EnsureInUserProfiles(user, String("Cannot resolve activity for unrelated profile ") + str)) if (!IsUserEnabled(user)) { return NOERROR; } Int64 ident = Binder::ClearCallingIdentity(); // try { Int32 id; user->GetIdentifier(&id); ECode ec = mPm->ResolveActivityAsUser(intent, 0, id, info); Binder::RestoreCallingIdentity(ident); return ec; // } finally { // Binder.restoreCallingIdentity(ident); // } } ECode CLauncherAppsImpl::IsPackageEnabled( /* [in] */ const String& packageName, /* [in] */ IUserHandle* user, /* [out] */ Boolean* result) { VALIDATE_NOT_NULL(result) *result = FALSE; String str = Object::ToString(user); FAIL_RETURN(EnsureInUserProfiles(user, String("Cannot check package for unrelated profile ") + str)) if (!IsUserEnabled(user)) { return NOERROR; } Int64 ident = Binder::ClearCallingIdentity(); // try { AutoPtr<IIPackageManager> pm = AppGlobals::GetPackageManager(); Int32 id; user->GetIdentifier(&id); AutoPtr<IPackageInfo> info; ECode ec = pm->GetPackageInfo(packageName, 0, id, (IPackageInfo**)&info); if (FAILED(ec)) { Binder::RestoreCallingIdentity(ident); return ec; } if (info != NULL) { AutoPtr<IApplicationInfo> ai; info->GetApplicationInfo((IApplicationInfo**)&ai); ai->GetEnabled(result); } Binder::RestoreCallingIdentity(ident); return NOERROR; // } finally { // Binder.restoreCallingIdentity(ident); // } } ECode CLauncherAppsImpl::IsActivityEnabled( /* [in] */ IComponentName* component, /* [in] */ IUserHandle* user, /* [out] */ Boolean* result) { VALIDATE_NOT_NULL(result) *result = FALSE; String str = Object::ToString(user); FAIL_RETURN(EnsureInUserProfiles(user, String("Cannot check component for unrelated profile ") + str)) if (!IsUserEnabled(user)) { return NOERROR; } Int64 ident = Binder::ClearCallingIdentity(); // try { AutoPtr<IIPackageManager> pm = AppGlobals::GetPackageManager(); Int32 id; user->GetIdentifier(&id); AutoPtr<IActivityInfo> info; ECode ec = pm->GetActivityInfo(component, 0, id, (IActivityInfo**)&info); if (FAILED(ec)) { Binder::RestoreCallingIdentity(ident); return ec; } *result = info != NULL; Binder::RestoreCallingIdentity(ident); return NOERROR; // } finally { // Binder.restoreCallingIdentity(ident); // } } ECode CLauncherAppsImpl::StartActivityAsUser( /* [in] */ IComponentName* component, /* [in] */ IRect* sourceBounds, /* [in] */ IBundle* opts, /* [in] */ IUserHandle* user) { String str = Object::ToString(user); FAIL_RETURN(EnsureInUserProfiles(user, String("Cannot start activity for unrelated profile ") + str)) if (!IsUserEnabled(user)) { Logger::E(TAG, "Cannot start activity for disabled profile %s", str.string()); return E_ILLEGAL_STATE_EXCEPTION; } AutoPtr<IIntent> launchIntent; CIntent::New(IIntent::ACTION_MAIN, (IIntent**)&launchIntent); launchIntent->AddCategory(IIntent::CATEGORY_LAUNCHER); launchIntent->SetSourceBounds(sourceBounds); launchIntent->AddFlags(IIntent::FLAG_ACTIVITY_NEW_TASK); String pkgName; component->GetPackageName(&pkgName); launchIntent->SetPackage(pkgName); Int64 ident = Binder::ClearCallingIdentity(); // try { AutoPtr<IIPackageManager> pm = AppGlobals::GetPackageManager(); Int32 id; user->GetIdentifier(&id); AutoPtr<IActivityInfo> info; ECode ec = pm->GetActivityInfo(component, 0, id, (IActivityInfo**)&info); if (FAILED(ec)) { Binder::RestoreCallingIdentity(ident); return ec; } Boolean exported; if (IComponentInfo::Probe(info)->GetExported(&exported), !exported) { Logger::E(TAG, "Cannot launch non-exported components %p", component); Binder::RestoreCallingIdentity(ident); return E_SECURITY_EXCEPTION; } // Check that the component actually has Intent.CATEGORY_LAUCNCHER // as calling startActivityAsUser ignores the category and just // resolves based on the component if present. AutoPtr<IList> apps; ec = mPm->QueryIntentActivitiesAsUser(launchIntent, 0 /* flags */, id, (IList**)&apps); if (FAILED(ec)) { Binder::RestoreCallingIdentity(ident); return ec; } AutoPtr<IIterator> it; apps->GetIterator((IIterator**)&it); Boolean hasNext; String aiPkgName, aiClsName, className; component->GetClassName(&className); while (it->HasNext(&hasNext), hasNext) { AutoPtr<IInterface> value; it->GetNext((IInterface**)&value); AutoPtr<IResolveInfo> ri = IResolveInfo::Probe(value); AutoPtr<IActivityInfo> activityInfo; ri->GetActivityInfo((IActivityInfo**)&activityInfo); IPackageItemInfo::Probe(activityInfo)->GetPackageName(&aiPkgName); if (aiPkgName.Equals(pkgName)) { IPackageItemInfo::Probe(activityInfo)->GetName(&aiClsName); if (aiClsName.Equals(className)) { // Found an activity with category launcher that matches // this component so ok to launch. launchIntent->SetComponent(component); ec = mContext->StartActivityAsUser(launchIntent, opts, user); Binder::RestoreCallingIdentity(ident); if (FAILED(ec)) { Logger::E(TAG, "Failed to launch activity [%s], ec=%08x.", TO_CSTR(component), ec); } return ec; } } } Logger::E(TAG, "Attempt to launch activity [%s] without category Intent.CATEGORY_LAUNCHER", TO_CSTR(component)); Binder::RestoreCallingIdentity(ident); return E_SECURITY_EXCEPTION; // } finally { // Binder.restoreCallingIdentity(ident); // } } ECode CLauncherAppsImpl::ShowAppDetailsAsUser( /* [in] */ IComponentName* component, /* [in] */ IRect* sourceBounds, /* [in] */ IBundle* opts, /* [in] */ IUserHandle* user) { String str = Object::ToString(user); FAIL_RETURN(EnsureInUserProfiles(user, String("Cannot show app details for unrelated profile ") + str)) if (!IsUserEnabled(user)) { Logger::E(TAG, "Cannot show app details for disabled profile %s", str.string()); } Int64 ident = Binder::ClearCallingIdentity(); // try { String packageName; component->GetPackageName(&packageName); AutoPtr<IUriHelper> helper; CUriHelper::AcquireSingleton((IUriHelper**)&helper); AutoPtr<IUri> uri; helper->FromParts(String("package"), packageName, String(NULL), (IUri**)&uri); AutoPtr<IIntent> intent; CIntent::New(ISettings::ACTION_APPLICATION_DETAILS_SETTINGS, uri, (IIntent**)&intent); intent->SetFlags(IIntent::FLAG_ACTIVITY_NEW_TASK | IIntent::FLAG_ACTIVITY_CLEAR_TASK | IIntent::FLAG_ACTIVITY_EXCLUDE_FROM_RECENTS); intent->SetSourceBounds(sourceBounds); Binder::RestoreCallingIdentity(ident); ECode ec = mContext->StartActivityAsUser(intent, opts, user); return ec; // } finally { // Binder.restoreCallingIdentity(ident); // } } ECode CLauncherAppsImpl::ToString( /* [out] */ String* str) { VALIDATE_NOT_NULL(str) return Object::ToString(str); } } // namespace Pm } // namespace Server } // namespace Droid } // namespace Elastos
using FluentValidation; using FluentValidation.TestHelper; using Survi.Prevention.ApiClient.DataTransferObjects; using Survi.Prevention.ServiceLayer.Import.Lane; using Xunit; namespace Survi.Prevention.ServiceLayer.Tests.Import.LaneImportation { public class LaneGenericCodeImportValidatorTests: AbstractValidator<LaneGenericCode> { private readonly LaneGenericCodeValidator validator; public LaneGenericCodeImportValidatorTests() { validator = new LaneGenericCodeValidator(); } [Fact] public void IdIsValidWhenNotEmpty() { validator.ShouldNotHaveValidationErrorFor(genCode => genCode.Id, "IdGenericCode"); } [Theory] [InlineData("")] [InlineData(" ")] [InlineData(null)] public void IdIsNotValidWhenEmpty(string id) { validator.ShouldHaveValidationErrorFor(genCode => genCode.Id, id); } [Fact] public void CodeIsValidWhenNotEmpty() { validator.ShouldNotHaveValidationErrorFor(genCode => genCode.Code, "1"); } [Theory] [InlineData("")] [InlineData(" ")] [InlineData(null)] [InlineData("CodeTooLong")] public void CodeIsInvalidWhenNullEmptyOrTooLong(string code) { validator.ShouldHaveValidationErrorFor(genCode => genCode.Code, code); } [Fact] public void DescriptionIsValidWhenNotEmpty() { validator.ShouldNotHaveValidationErrorFor(genCode => genCode.Description, "Generic code"); } [Theory] [InlineData(null)] [InlineData("TooLongDescriptionToValidate")] public void DescriptionIsInvalidWhenNullEmptyOrTooLong(string description) { validator.ShouldHaveValidationErrorFor(genCode => genCode.Description, description); } } }
# AWS Upload & Transcribe Local Files ###### Uploads local audio files to Amazon AWS bucket and starts the transcription job ### _Future Features_ ``` 1) Save file locally after transcription is completed 2) Format and save the file as a .docx format 3) Identify and split multiple speakers and format in the response 4) Accept audio and video files ``` #### Setup Environment ###### Create a .env file in the src directory and add the following keys ###### The language to transcribe the audio in, by default set to english LANG=en-US ###### The AWS access key id AWS_ACCESS_KEY_ID ###### The AWS secret access token AWS_SECRET_ACCESS_KEY= ###### The storage bucket name AWS_STORAGE_BUCKET= ###### The region name that the bucket is located in AWS_STORAGE_REGION= ### Setup Clone the repository and install the dependencies. ``` Step 1 - git clone https://github.com/BradleySeymourSAE/transcribe-audio-file.git Step 2 - Get AWS authentication credentials and create a .env file with the above keys Step 3 - npm install Step 4 - npm start ``` <br></br> #### Version [email protected]__ <br> [email protected]__ </br> ## License MIT
namespace WebCore.API.Models { public class Note { public string Key {get;set;} public string Subject {get;set;} public string Body {get;set;} } }
package com.entimer.coronatracker.view.splash import android.content.Context import android.content.Intent import android.net.ConnectivityManager import android.os.Bundle import android.widget.Toast import androidx.appcompat.app.AppCompatActivity import com.entimer.coronatracker.R import com.entimer.coronatracker.view.main.MainActivity class SplashActivity: AppCompatActivity(), SplashContract.View { private lateinit var presenter: SplashPresenter override fun onCreate(savedInstanceState: Bundle?) { super.onCreate(savedInstanceState) if(!checkNetwork()) { Toast.makeText(applicationContext, getString(R.string.splashNetworkFailed), Toast.LENGTH_LONG).show() finishAffinity() } else { presenter = SplashPresenter(this) presenter.initCountryList(applicationContext) } } private fun checkNetwork(): Boolean { val manager = getSystemService(Context.CONNECTIVITY_SERVICE) as ConnectivityManager val networdInfo = manager.activeNetworkInfo if(networdInfo != null) { val type = networdInfo.type if(type == ConnectivityManager.TYPE_MOBILE || type == ConnectivityManager.TYPE_WIFI) return true } return false } override fun onInitFinished() { val intent = Intent(applicationContext, MainActivity::class.java) startActivity(intent) finish() } override fun onInitFailed() { Toast.makeText(applicationContext, getString(R.string.splashInitFailed), Toast.LENGTH_LONG).show() finishAffinity() } }
package Agua::Ops::Sge; use Moose::Role; use Method::Signatures::Simple; #### SUN GRID ENGINE METHODS method stopSgeProcess ($port) { $self->logDebug("Ops::stopSgeProcess(port)"); $self->logDebug("port", $port); #### INPUT FORMAT: netstat -ntulp | grep sge_* #### tcp 0 0 0.0.0.0:36472 0.0.0.0:* LISTEN 9855/sge_exec my $netstat = qq{netstat -ntulp | grep sge | grep $port}; $self->logDebug("netstat", $netstat); my $output = $self->runCommand($netstat); my ($pid) = $output =~ /^\s*\S+\s+\S+\s+\S+\s+[^:]+:\d+\s+\S+\s+\S+\s+(\d+)\/\S+\s*/; $self->logDebug("pid", $pid) if defined $pid; $self->logDebug("pid NOT DEFINED. No running SGE port") if not defined $pid; return if not defined $pid; $self->killProcess($pid); } method killProcess ($pid) { $self->logError("pid is empty") and exit if $pid eq ''; my $command = "kill -9 $pid"; $self->logDebug("command", $command); $self->runCommand($command); } method qmasterRunning ($port) { #### VERIFY THAT THE SGE MASTER DAEMON IS LISTENING AT CORRECT PORT $self->logDebug("port", $port); return $self->sgeProcessListening($port, "sge_qmaster"); } method execdRunning ($port) { #### VERIFY THAT SGE EXEC DAEMON IS LISTENING AT CORRECT PORT $self->logDebug("port", $port); return $self->sgeProcessListening($port, "sge_execd"); } method sgeProcessListening ($port, $pattern) { #### LISTENER VERIFIER. LATER: REDO WITH REGEX $self->logDebug("port", $port); $self->logDebug("pattern", $pattern) if defined $pattern; $self->logError("Neither port nor pattern are defined") and exit if not defined $port and not defined $pattern; my $command = "netstat -ntulp "; $command .= "| grep $port " if defined $port; $command .= "| grep $pattern " if defined $pattern; #### EXPECTED OUTPUT FORMAT: ####tcp 0 0 0.0.0.0:36361 0.0.0.0:* LISTEN 5920/sge_qmaster ####tcp 0 0 0.0.0.0:36362 0.0.0.0:* LISTEN 4780/sge_execd my ($result) = $self->runCommand($command); $result =~ s/\s+$//; $self->logDebug("result", $result); return $result if defined $result and $result; return 0; } 1;
#!/usr/bin/env zsh bindkey -e # Black magic to set terminal modes properly # See: https://github.com/robbyrussell/oh-my-zsh/blob/3705d47bb3f3229234cba992320eadc97a221caf/lib/key-bindings.zsh#L5 if (( ${+terminfo[smkx]} )) && (( ${+terminfo[rmkx]} )); then function zle-line-init() { echoti smkx } function zle-line-finish() { echoti rmkx } zle -N zle-line-init zle -N zle-line-finish fi autoload -U up-line-or-beginning-search zle -N up-line-or-beginning-search bindkey "${terminfo[kcuu1]}" up-line-or-beginning-search autoload -U down-line-or-beginning-search zle -N down-line-or-beginning-search bindkey "${terminfo[kcud1]}" down-line-or-beginning-search bindkey '^?' backward-delete-char if [[ "${terminfo[kdch1]}" != "" ]]; then bindkey "${terminfo[kdch1]}" delete-char else bindkey "^[[3~" delete-char bindkey "^[3;5~" delete-char bindkey "\e[3~" delete-char fi
class SurveyTaker < ActiveRecord::Base def self.search(search) puts search.class where(number: search) end end