{ // 获取包含Hugging Face文本的span元素 const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap'); spans.forEach(span => { if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) { span.textContent = 'AI快站'; } }); }); // 替换logo图片的alt属性 document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => { if (img.alt.match(/Hugging\s*Face/i)) { img.alt = 'AI快站 logo'; } }); } // 替换导航栏中的链接 function replaceNavigationLinks() { // 已替换标记,防止重复运行 if (window._navLinksReplaced) { return; } // 已经替换过的链接集合,防止重复替换 const replacedLinks = new Set(); // 只在导航栏区域查找和替换链接 const headerArea = document.querySelector('header') || document.querySelector('nav'); if (!headerArea) { return; } // 在导航区域内查找链接 const navLinks = headerArea.querySelectorAll('a'); navLinks.forEach(link => { // 如果已经替换过,跳过 if (replacedLinks.has(link)) return; const linkText = link.textContent.trim(); const linkHref = link.getAttribute('href') || ''; // 替换Spaces链接 - 仅替换一次 if ( (linkHref.includes('/spaces') || linkHref === '/spaces' || linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) && linkText !== 'OCR模型免费转Markdown' && linkText !== 'OCR模型免费转Markdown' ) { link.textContent = 'OCR模型免费转Markdown'; link.href = 'https://fast360.xyz'; link.setAttribute('target', '_blank'); link.setAttribute('rel', 'noopener noreferrer'); replacedLinks.add(link); } // 删除Posts链接 else if ( (linkHref.includes('/posts') || linkHref === '/posts' || linkText === 'Posts' || linkText.match(/^s*Postss*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } // 替换Docs链接 - 仅替换一次 else if ( (linkHref.includes('/docs') || linkHref === '/docs' || linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) && linkText !== '模型下载攻略' ) { link.textContent = '模型下载攻略'; link.href = '/'; replacedLinks.add(link); } // 删除Enterprise链接 else if ( (linkHref.includes('/enterprise') || linkHref === '/enterprise' || linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } }); // 查找可能嵌套的Spaces和Posts文本 const textNodes = []; function findTextNodes(element) { if (element.nodeType === Node.TEXT_NODE) { const text = element.textContent.trim(); if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') { textNodes.push(element); } } else { for (const child of element.childNodes) { findTextNodes(child); } } } // 只在导航区域内查找文本节点 findTextNodes(headerArea); // 替换找到的文本节点 textNodes.forEach(node => { const text = node.textContent.trim(); if (text === 'Spaces') { node.textContent = node.textContent.replace(/Spaces/g, 'OCR模型免费转Markdown'); } else if (text === 'Posts') { // 删除Posts文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } else if (text === 'Enterprise') { // 删除Enterprise文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } }); // 标记已替换完成 window._navLinksReplaced = true; } // 替换代码区域中的域名 function replaceCodeDomains() { // 特别处理span.hljs-string和span.njs-string元素 document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换hljs-string类的span中的域名(移除多余的转义符号) document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换pre和code标签中包含git clone命令的域名 document.querySelectorAll('pre, code').forEach(element => { if (element.textContent && element.textContent.includes('git clone')) { const text = element.innerHTML; if (text.includes('huggingface.co')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 处理特定的命令行示例 document.querySelectorAll('pre, code').forEach(element => { const text = element.innerHTML; if (text.includes('huggingface.co')) { // 针对git clone命令的专门处理 if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 特别处理模型下载页面上的代码片段 document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => { const content = container.innerHTML; if (content && content.includes('huggingface.co')) { container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 特别处理模型仓库克隆对话框中的代码片段 try { // 查找包含"Clone this model repository"标题的对话框 const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]'); if (cloneDialog) { // 查找对话框中所有的代码片段和命令示例 const codeElements = cloneDialog.querySelectorAll('pre, code, span'); codeElements.forEach(element => { if (element.textContent && element.textContent.includes('huggingface.co')) { if (element.innerHTML.includes('huggingface.co')) { element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); } // 更精确地定位克隆命令中的域名 document.querySelectorAll('[data-target]').forEach(container => { const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string'); codeBlocks.forEach(block => { if (block.textContent && block.textContent.includes('huggingface.co')) { if (block.innerHTML.includes('huggingface.co')) { block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); }); } catch (e) { // 错误处理但不打印日志 } } // 当DOM加载完成后执行替换 if (document.readyState === 'loading') { document.addEventListener('DOMContentLoaded', () => { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); }); } else { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); } // 增加一个MutationObserver来处理可能的动态元素加载 const observer = new MutationObserver(mutations => { // 检查是否导航区域有变化 const hasNavChanges = mutations.some(mutation => { // 检查是否存在header或nav元素变化 return Array.from(mutation.addedNodes).some(node => { if (node.nodeType === Node.ELEMENT_NODE) { // 检查是否是导航元素或其子元素 if (node.tagName === 'HEADER' || node.tagName === 'NAV' || node.querySelector('header, nav')) { return true; } // 检查是否在导航元素内部 let parent = node.parentElement; while (parent) { if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') { return true; } parent = parent.parentElement; } } return false; }); }); // 只在导航区域有变化时执行替换 if (hasNavChanges) { // 重置替换状态,允许再次替换 window._navLinksReplaced = false; replaceHeaderBranding(); replaceNavigationLinks(); } }); // 开始观察document.body的变化,包括子节点 if (document.body) { observer.observe(document.body, { childList: true, subtree: true }); } else { document.addEventListener('DOMContentLoaded', () => { observer.observe(document.body, { childList: true, subtree: true }); }); } })(); \n@if (IsSectionDefined(\"AddToScripts\"))\n{\n @await RenderSectionAsync(\"AddToScripts\", false)\n}\n"}}},{"rowIdx":2380,"cells":{"text":{"kind":"string","value":"class Orc480AddIndexToWorklog < ActiveRecord::Migration[5.1]\n def up\n remove_index :worklogs, %i[task_id user_id project_id date]\n add_index :worklogs, %i[task_id date], unique: true\n end\n\n def dowm\n remove_index :worklogs, %i[task_id date]\n add_index :worklogs, %i[task_id user_id project_id date], unique: true\n end\nend\n"}}},{"rowIdx":2381,"cells":{"text":{"kind":"string","value":"/* Generated by RuntimeBrowser\n Image: /System/Library/PrivateFrameworks/NeutrinoCore.framework/NeutrinoCore\n */\n\n@interface NUVideoPropertiesJob : NURenderJob {\n * _videoProperties;\n}\n\n- (void).cxx_destruct;\n- (bool)prepare:(out id*)arg1;\n- (id)result;\n- (bool)wantsCompleteStage;\n- (bool)wantsRenderStage;\n\n@end\n"}}},{"rowIdx":2382,"cells":{"text":{"kind":"string","value":"var o = {p1:1};\nvar prop = {value:2, writable:true, enumerable:false, configurable:true}\n\nvar x = Object.defineProperty(o, \"p2\", prop);\n\nvar __result1 = x.propertyIsEnumerable(\"p1\");\nvar __expect1 = true;\n\nvar __result2 = x.propertyIsEnumerable(\"p2\");\nvar __expect2 = false;\n"}}},{"rowIdx":2383,"cells":{"text":{"kind":"string","value":"/*\n * The Optimal Kick Hamiltonian split evaluates every interaction in the system\n * at the optimal split time step, thereby using the least amount of force\n * evaluations to evolve the system.\n */\n\n#include \n#include \n#include \n#include \"evolve.h\"\n#include \"evolve_ok.h\"\n\nstruct forces zeroforces = {0, NULL, NULL};\n\n#define IS_ZEROFORCES(F) (((F).n == 0) && ((F).forc == NULL) && ((F).last == NULL))\n\n#define LOG_FORCES(F) \\\n{ \\\n for (UINT i = 0; i < (F).n; i++) { \\\n printf(\"%u\\t%u\\t%f\\n\", (F).forc[i].parti->id, (F).forc[i].partj->id, (F).forc[i].timestep); \\\n } \\\n};\n\nstatic void ok_timestep_cpu(int clevel,struct forces f, DOUBLE dt)\n{\n int dir=SIGN(dt);\n for (UINT i = 0; i < f.n; i++)\n {\n //if (f.forc[i].timestep != HUGE_VAL) ENDRUN(\"timestep??\");\n f.forc[i].timestep = timestep_ij(f.forc[i].parti, f.forc[i].partj,dir);\n }\n diag->tstep[clevel]++;\n diag->tcount[clevel] += f.n;\n}\n\n/*\n * split_ok_forces: split forces into smaller than dt, faster than dt\n */\nstatic void ok_split(FLOAT dt, struct forces f, struct forces *slow, struct forces *fast)\n{\n //LOG(\"dt=%lf f.n=%u\\n\", dt, f.n);\n UINT i = 0;\n struct force *left, *right;\n left = f.forc;\n right = f.last;\n dt=fabs(dt);\n while (1)\n {\n if (i >= f.n) ENDRUN(\"forces split error 1\\n\");\n i++;\n while ((left->timestep < dt) && (lefttimestep >= dt) && (lefttimestep < dt) left++;\n slow->n = f.last - left + 1;\n fast->n = left - f.forc;\n if (fast->n == 1)\n {\n fast->n = 0;\n slow->n = f.n;\n }\n if (slow->n > 0)\n {\n slow->forc = f.forc + fast->n;\n slow->last = f.last;//slow->part+slow->n-1;\n }\n if (fast->n > 0)\n {\n fast->forc = f.forc;\n fast->last = f.forc + fast->n - 1;\n }\n if (fast->n + slow->n != f.n)\n ENDRUN(\"forces split error 2: fast->n=%u slow->n=%u f.n=%u\\n\", fast->n, slow->n, f.n);\n //for (i = 0; i < f.n; i++) f.forc[i].level = clevel;\n}\n\nstruct forces ok_main_forces = {0, NULL, NULL};\n\nvoid evolve_ok_init(struct sys s)\n{\n UINT n_forces = s.n * s.n - s.n;\n if (ok_main_forces.forc != NULL) ENDRUN(\"OK (re)allocation error\");\n ok_main_forces.forc = (struct force *) malloc(n_forces * sizeof(struct force));\n ok_main_forces.last = &(ok_main_forces.forc[n_forces - 1]);\n ok_main_forces.n = n_forces;\n\n // initialize pointers of the forces structure\n UINT k = 0;\n for (UINT i = 0; i < s.n; i++)\n {\n for (UINT j = 0; j < s.n; j++)\n {\n if (i != j)\n {\n ok_main_forces.forc[k].parti = &( s.part[i] );\n ok_main_forces.forc[k].partj = &( s.part[j] );\n k++;\n }\n }\n }\n}\n\nvoid evolve_ok_stop()\n{\n if (ok_main_forces.forc != NULL)\n {\n free(ok_main_forces.forc);\n ok_main_forces.forc = NULL;\n }\n}\n\nstatic void ok_kick(int clevel,struct forces f, DOUBLE dt)\n{\n FLOAT dx[3],dr3,dr2,dr,acci;\n FLOAT acc[3];\n\n for (UINT i = 0; i < f.n; i++)\n {\n acc[0] = 0.;\n acc[1] = 0.;\n acc[2] = 0.;\n\n dx[0] = f.forc[i].parti->pos[0] - f.forc[i].partj->pos[0];\n dx[1] = f.forc[i].parti->pos[1] - f.forc[i].partj->pos[1];\n dx[2] = f.forc[i].parti->pos[2] - f.forc[i].partj->pos[2];\n dr2 = dx[0]*dx[0] + dx[1]*dx[1] + dx[2]*dx[2] + eps2;\n\n if (dr2 > 0) {\n dr = sqrt(dr2);\n dr3 = dr*dr2;\n acci = f.forc[i].partj->mass / dr3;\n\n f.forc[i].parti->vel[0] -= dt * dx[0] * acci;\n f.forc[i].parti->vel[1] -= dt * dx[1] * acci;\n f.forc[i].parti->vel[2] -= dt * dx[2] * acci;\n }\n }\n\n diag->kstep[clevel]++;\n diag->kcount[clevel] += f.n;\n}\n\nvoid evolve_ok2(int clevel,struct sys s, struct forces f, DOUBLE stime, DOUBLE etime, DOUBLE dt, int calc_timestep)\n{\n if (IS_ZEROFORCES(f) && clevel == 0) { f = ok_main_forces; }\n CHECK_TIMESTEP(etime,stime,dt,clevel);\n // all particles are drifted together\n if (f.n == 0)\n {\n diag->deepsteps++;\n diag->simtime += dt;\n drift(clevel,s, etime, dt);\n return;\n }\n if (calc_timestep) ok_timestep_cpu(clevel,f, dt);\n struct forces slowf = zeroforces, fastf = zeroforces;\n ok_split((FLOAT) dt, f, &slowf, &fastf);\n evolve_ok2(clevel+1,s, fastf, stime, stime+dt/2, dt/2, 0);\n ok_kick(clevel,slowf, dt);\n evolve_ok2(clevel+1,s, fastf, stime+dt/2, etime, dt/2, 1);\n}\n"}}},{"rowIdx":2384,"cells":{"text":{"kind":"string","value":"#!/bin/sh\n#\n# bear.sh - shell wrapper for bear.php \n#\n# $Id: bear.sh 707 2009-07-06 18:31:29Z koriyama@users.sourceforge.jp $\n#\nBEAR_HOME=\"@PEAR-DIR@/BEAR\"\n\nif (test -z \"$PHP_COMMAND\");\nthen\n export PHP_COMMAND=php\nfi\n\nif (test -z \"$PHP_CLASSPATH\");\nthen\n PHP_CLASSPATH=BEAR_HOME/class\n export PHP_CLASSPATH\nfi\n$PHP_COMMAND -d html_errors=off -qC $BEAR_HOME/BEAR/bin/bear.php $*"}}},{"rowIdx":2385,"cells":{"text":{"kind":"string","value":"fields = $data;\n }\n\n public function getCreatedAt(string $format = 'Y-m-d H:i', string $timezome = 'Europe/Moscow')\n {\n $datetime = new \\DateTime($this->fields['created_at']);\n $datetime->setTimezone(new \\DateTimeZone($timezome));\n\n return $datetime->format($format);\n }\n\n public function getId()\n {\n return $this->fields['id'];\n }\n\n public function getDescription()\n {\n return $this->fields['description'];\n }\n\n public function getTextbody()\n {\n return $this->fields['body'];\n }\n\n public function getTitle()\n {\n return $this->fields['title'];\n }\n\n public function __set($key, $value)\n {\n return null;\n }\n\n public function getAuthor()\n {\n return $this->fields['author'];\n }\n}\n"}}},{"rowIdx":2386,"cells":{"text":{"kind":"string","value":"#if UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX || UNITY_EDITOR\n/**\n * Autogenerated by Thrift Compiler ()\n *\n * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING\n * @generated\n */\n\nnamespace GetSocialSdk.Core \n{\n /// \n /// to determine which query to run.\n /// \n public enum QueryType\n {\n APP_SESSIONS = 0,\n INVITE_EVENTS = 1,\n INSTALL_EVENTS = 2,\n ACTIVITY_EVENTS = 3,\n INVITE_CYCLE = 4,\n PN_EVENTS = 5,\n CONTENT_SESSIONS = 6,\n REENGAGEMENT = 7,\n KFACTOR = 8,\n COMBINED_INVITE_EVENTS = 9,\n CONVERSION = 10,\n RETENTION = 11,\n INSTALL_DISTRIBUTION = 12,\n USERS = 13,\n UNIQUE_USERS = 14,\n UNIQUE_INVITE_USERS = 15,\n INVITE_EVENTS_PER_1000 = 16,\n ETL_STATUS = 17,\n RETENTION_PER_PERIOD = 18,\n SAVINGS = 19,\n AUDIENCE_COMPARE = 20,\n AUDIENCE_RETENTION = 21,\n USER = 22,\n AUDIENCE_USER = 23,\n AUDIENCE = 24,\n ENGAGEMENT = 25,\n ENGAGEMENT_PER_PERIOD = 26,\n PN_TYPE = 27,\n LOCAL_AUDIENCE_USERS = 28,\n EXPORT_USERS_FACEBOOK = 29,\n LOCAL_AUDIENCE = 30,\n EXPORT_EVENTS = 31,\n EXPORT_USERS = 32,\n AVAILABLE_AUDIENCE = 33,\n APPS_OVERVIEW = 34,\n ENGAGEMENT_DISTRIBUTION = 35,\n BILLING_DAU = 36,\n BILLING_USAGE = 37,\n PURCHASE_EVENTS = 38,\n DAU = 39,\n USERS_PURCHASES = 40,\n PURCHASE_CYCLE = 41,\n PURCHASE_TIMELINE = 42,\n PURCHASE_PAIRS = 43,\n CUSTOM_EVENTS = 44,\n PROMO_CODE_EVENTS = 45,\n USERS_EVENTS = 46,\n USERS_PROPERTIES = 47,\n CUSTOM_EVENTS_PROPERTIES = 48,\n ACTIVITY_ENGAGED_USERS = 49,\n REFERRAL_EVENTS = 50,\n }\n}\n#endif\n"}}},{"rowIdx":2387,"cells":{"text":{"kind":"string","value":"package untitled.goose.framework.model.entities.runtime\n\nimport untitled.goose.framework.model.events.PlayerEvent\n\ntrait Player extends Defined[PlayerDefinition] with History[PlayerEvent] {\n\n /** Compares two players. */\n def ==(obj: Player): Boolean = definition == obj.definition && history == obj.history\n\n override def equals(obj: Any): Boolean = obj match {\n case x: Player => x == this\n case _ => false\n }\n\n override def hashCode(): Int = 17 * definition.hashCode + 23\n\n override def toString: String =\n this.getClass.getSimpleName + \":\" +\n definition.name\n}\n\nobject Player {\n\n private class PlayerDefImpl(val definition: PlayerDefinition) extends Player {\n val history: Seq[PlayerEvent] = List()\n }\n\n case class PlayerImpl(definition: PlayerDefinition, history: Seq[PlayerEvent] = Seq()) extends Player\n\n /** Factory method that creates a new tile from the definition. */\n def apply(playerDefinition: PlayerDefinition): Player = new PlayerDefImpl(playerDefinition)\n\n}\n"}}},{"rowIdx":2388,"cells":{"text":{"kind":"string","value":"/*\nCopyright 2020 Gravitational, Inc.\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\nUnless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an \"AS IS\" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\nSee the License for the specific language governing permissions and\nlimitations under the License.\n*/\n\npackage framework\n\nimport (\n\t\"github.com/gravitational/robotest/infra\"\n\t\"github.com/gravitational/robotest/lib/loc\"\n\t\"github.com/gravitational/trace\"\n)\n\n// TestState represents the state of the test between boostrapping a cluster\n// and teardown.\n// The state is updated on each in-between test run to sync the provisioner state.\ntype TestState struct {\n\t// EntryURL defines the entry point to the application.\n\t// This can be the address of existing Ops Center or local application endpoint URL\n\tEntryURL string `json:\"ops_url,omitempty\"`\n\t// Application defines the application package to test as retrieved from the wizard\n\tApplication *loc.Locator `json:\"application,omitempty\"`\n\t// Login specifies optional login to connect to the EntryURL.\n\t// Falls back to TestContext.Login if unspecified\n\tLogin *Login `json:\"login,omitempty\"`\n\t// ServiceLogin specifies optional service login to connect to the EntryURL.\n\tServiceLogin *ServiceLogin `json:\"service_login,omitempty\"`\n\t// Bandwagon specifies bandwagon creation details\n\tBandwagon *BandwagonConfig `json:\"bandwagon,omitempty\"`\n\t// Provisioner defines the provisioner used to create the infrastructure.\n\t// This can be empty for the automatic provisioner\n\tProvisioner *Provisioner `json:\"provisioner,omitempty\"`\n\t// Onprem defines the provisioner state.\n\t// The provisioner used is specified by Provisioner.\n\t// With automatic provisioner, no provisioner state is stored\n\tProvisionerState *infra.ProvisionerState `json:\"provisioner_state,omitempty\"`\n\t// StateDir specifies the location of temporary state used for a single test run\n\t// (from bootstrapping to destroy)\n\tStateDir string `json:\"state_dir\"`\n\t// BackupState defines state of backup.\n\t// Used for backup/restore operations.\n\tBackupState *BackupState `json:\"backup_state,omitempty\"`\n}\n\n// BackupState defines state of backup.\ntype BackupState struct {\n\t// Addr is the address of a node where backup is storing\n\tAddr string `json:\"addr\"`\n\t// Path is an absolute path to the backup file\n\tPath string `json:\"path\"`\n}\n\nfunc (r TestState) Validate() error {\n\tvar errors []error\n\tif r.Provisioner != nil && r.ProvisionerState == nil {\n\t\terrors = append(errors, trace.BadParameter(\"ProvisionerState is required\"))\n\t}\n\tif r.Provisioner == nil && r.ProvisionerState != nil {\n\t\terrors = append(errors, trace.BadParameter(\"Provisioner is required\"))\n\t}\n\tif r.StateDir == \"\" {\n\t\terrors = append(errors, trace.BadParameter(\"StateDir is required\"))\n\t}\n\treturn trace.NewAggregate(errors...)\n}\n"}}},{"rowIdx":2389,"cells":{"text":{"kind":"string","value":"package fr.anthonygodin.api.controller;\n\nimport fr.anthonygodin.api.dto.entity.ToolDTO;\nimport fr.anthonygodin.api.dto.entity.ToolToCreateDTO;\nimport fr.anthonygodin.api.service.CrudService;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport org.springframework.beans.factory.annotation.Autowired;\nimport org.springframework.web.bind.annotation.RequestMapping;\nimport org.springframework.web.bind.annotation.RestController;\n\n/**\n * Created by AnthoGdn on 26/03/17.\n */\n@RestController\n@RequestMapping(\"api/tools\")\npublic class ToolController extends CrudController {\n\n private static final Logger LOGGER = LoggerFactory.getLogger(ToolController.class);\n\n @Autowired\n private CrudService toolService;\n\n @Override\n protected Logger getLogger() {\n return LOGGER;\n }\n @Override\n protected CrudService getService() {\n return toolService;\n }\n}\n\n"}}},{"rowIdx":2390,"cells":{"text":{"kind":"string","value":"/*\n * InputDialog.java\n *\n * Copyright (C) 1998-2003 Peter Graves\n * $Id: InputDialog.java,v 1.3 2003/07/23 16:13:51 piso Exp $\n *\n * This program is free software; you can redistribute it and/or\n * modify it under the terms of the GNU General Public License\n * as published by the Free Software Foundation; either version 2\n * of the License, or (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU General Public License for more details.\n *\n * You should have received a copy of the GNU General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.\n */\n\npackage org.armedbear.j;\n\nimport java.awt.BorderLayout;\nimport java.awt.event.InputEvent;\nimport java.awt.event.KeyEvent;\nimport java.awt.event.KeyListener;\nimport java.util.List;\nimport javax.swing.BoxLayout;\nimport javax.swing.JDialog;\nimport javax.swing.JPanel;\nimport javax.swing.border.EmptyBorder;\n\npublic class InputDialog extends JDialog implements KeyListener\n{\n protected final Editor editor;\n\n protected HistoryTextField textField;\n\n private String defaultValue;\n private History history;\n private String input;\n private List completions;\n private int index;\n\n public InputDialog(Editor editor, String prompt, String title,\n String defaultValue)\n {\n super(editor.getFrame(), title, true);\n this.editor = editor;\n this.defaultValue = defaultValue;\n JPanel panel = new JPanel();\n panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS));\n panel.setBorder(new EmptyBorder(5, 5, 5, 5));\n panel.add(new Label(prompt));\n textField = new HistoryTextField(20);\n textField.addKeyListener(this);\n panel.add(textField);\n getContentPane().add(panel, BorderLayout.CENTER);\n pack();\n textField.setFocusTraversalKeysEnabled(false);\n }\n\n public static String showInputDialog(Editor editor, String prompt,\n String title, String defaultValue)\n {\n InputDialog d = new InputDialog(editor, prompt, title, defaultValue);\n editor.centerDialog(d);\n d.show();\n return d.input;\n }\n\n public static String showInputDialog(Editor editor, String prompt,\n String title)\n {\n return showInputDialog(editor, prompt, title, null);\n }\n\n @Override\n\tpublic void show()\n {\n if (defaultValue != null && defaultValue.length() > 0) {\n textField.setText(defaultValue);\n textField.selectAll();\n }\n textField.requestFocus();\n super.show();\n }\n\n public final void setDefaultValue(String s)\n {\n defaultValue = s;\n }\n\n public final String getInput()\n {\n return input;\n }\n\n public void setHistory(History history)\n {\n this.history = history;\n textField.setHistory(history);\n }\n\n protected void enter()\n {\n input = textField.getText();\n if (history != null) {\n history.append(input);\n history.save();\n }\n dispose();\n }\n\n protected void escape()\n {\n input = null;\n dispose();\n }\n\n @Override\n\tpublic void keyPressed(KeyEvent e)\n {\n final int keyCode = e.getKeyCode();\n final int modifiers = e.getModifiers();\n switch (keyCode) {\n case KeyEvent.VK_TAB: {\n String s = null;\n if (modifiers == InputEvent.SHIFT_MASK)\n s = previousGuess();\n else\n s = guess(textField.getText());\n e.consume();\n if (s != null) {\n textField.setText(s);\n textField.setCaretPosition(s.length());\n }\n return;\n }\n case KeyEvent.VK_ENTER:\n enter();\n return;\n case KeyEvent.VK_ESCAPE:\n escape();\n return;\n case KeyEvent.VK_SHIFT:\n case KeyEvent.VK_META:\n case KeyEvent.VK_ALT:\n // Ignore modifers.\n return;\n default:\n // Anything but tab, start over.\n completions = null;\n return;\n }\n }\n\n @Override\n\tpublic void keyReleased(KeyEvent e) {}\n\n @Override\n\tpublic void keyTyped(KeyEvent e) {}\n\n @Override\n\tpublic void dispose()\n {\n super.dispose();\n editor.restoreFocus();\n }\n\n private String guess(String prefix)\n {\n if (completions == null) {\n completions = getCompletions(prefix);\n if (completions == null)\n return null;\n index = 0;\n } else if (index >= completions.size())\n index = 0; // Start over.\n if (index < completions.size())\n return (String) completions.get(index++);\n return null;\n }\n\n private String previousGuess()\n {\n if (completions != null) {\n if (completions.size() > 1) {\n index -= 2;\n if (index < 0)\n index += completions.size();\n return (String) completions.get(index++);\n }\n }\n return null;\n }\n\n // Derived classes can override this method to provide completion\n // functionality.\n protected List getCompletions(String prefix)\n {\n return null;\n }\n}\n"}}},{"rowIdx":2391,"cells":{"text":{"kind":"string","value":"added = $this->fileAdded;\n }\n\n public function uninstall()\n {\n $parser = MediaParser::i();\n $parser->unbind($this);\n }\n\n public function fileAdded(Event $event)\n {\n $files = Files::i();\n $item = $files->getItem($event->id);\n if ('image' != $item['media']) {\n return;\n }\n\n $fileurl = $files->getUrl($id);\n if ($s = Http::get('http://www.smushit.com/ysmush.it/ws.php?img=' . urlencode($fileurl))) {\n $json = json_decode($s);\n if (isset($json->error) || (-1 === (int)$json->dest_size) || !$json->dest) {\n return;\n }\n\n $div = $item['size'] - (int)$json->dest_size;\n if (($div / ($item['size'] / 100)) < 3) {\n return;\n }\n\n $dest = urldecode($json->dest);\n if (!Str::begin($dest, 'http')) {\n $dest = 'http://www.smushit.com/' . $dest;\n }\n if ($content = Http::get($dest)) {\n return $files->setContent($id, $content);\n }\n }\n }\n}\n"}}},{"rowIdx":2392,"cells":{"text":{"kind":"string","value":"module Plotting\n\nusing Plots\n\nusing ..Structs\n\nexport plot_rectangles\n\nrectangle(x, y, w, h) = Shape(x .+ [0, w, w, 0], y .+ [0, 0, h, h])\n\nfunction plot_rectangles(rect_sizes, positions)\n p = Plots.plot(legend = false)\n\n for (dims, pos) in zip(rect_sizes, positions)\n Plots.plot!(\n p,\n rectangle(pos..., dims...),\n #fillcolor=nothing,\n # linewidth=0\n )\n end\n\n return p\nend\n\nend # module\n"}}},{"rowIdx":2393,"cells":{"text":{"kind":"string","value":"createNewImageAction->execute($depositDTO->checkPicture, $depositDTO->userId);\n\n return Deposit::create([\n \"amount\" => $depositDTO->amount,\n \"description\" => $depositDTO->description,\n \"status\" => DepositStatusEnum::pending(),\n \"image_id\" => $savedCheckImage->id,\n \"user_id\" => $depositDTO->userId\n ])->load(\"image\");\n });\n }\n}\n"}}},{"rowIdx":2394,"cells":{"text":{"kind":"string","value":"/*\n * FBOTestApp.cpp\n *\n * Copyright (C) 2007 by Universitaet Stuttgart (VIS). Alle Rechte vorbehalten.\n */\n#ifdef _WIN32\n#include \n#else /* _WIN32 */\n#endif /* _WIN32 */\n\n#include \n#include \"vislib/graphics/gl/IncludeAllGL.h\"\n#include \n#include \"GlutAppManager.h\"\n#include \"vislib/graphics/FpsCounter.h\"\n#include \"vislib/VersionNumber.h\"\n#include \"vislib/graphics/gl/glfunctions.h\"\n\n\n/** not nice! */\nextern vislib::graphics::FpsCounter fpsCounter;\n\n\n/*\n * GlutAppManager::AbstractFactory::AbstractFactory\n */\nGlutAppManager::AbstractFactory::AbstractFactory(const char *name) \n : name(name) {\n}\n\n\n/*\n * GlutAppManager::GlutAppManager\n */\nGlutAppManager::GlutAppManager(void) \n : app(NULL), factories(), windowMenu(0), appMenu(0) {\n}\n\n\n/*\n * GlutAppManager::~GlutAppManager\n */\nGlutAppManager::~GlutAppManager(void) {\n if (app) {\n app->GLDeinit();\n delete app;\n }\n\n for (int i = int(this->factories.Count()) - 1; i >= 0; i--) {\n AbstractFactory *f = this->factories[i];\n this->factories[i] = NULL;\n delete f;\n }\n this->factories.Clear();\n\n if (this->windowMenu != 0) {\n glutDetachMenu(this->windowMenu);\n glutDestroyMenu(this->windowMenu);\n this->windowMenu = 0;\n }\n if (this->appMenu != 0) {\n glutDestroyMenu(this->appMenu);\n this->appMenu = 0;\n }\n}\n\n\n/*\n * GlutAppManager::GetInstance\n */\nGlutAppManager * GlutAppManager::GetInstance(void) {\n static GlutAppManager instance;\n return &instance;\n}\n\n\n/*\n * GlutAppManager::InstallFactory\n */\nvoid GlutAppManager::InstallFactory(AbstractFactory *factory) {\n if (factory == NULL) return;\n\n GlutAppManager::GetInstance()->factories.Append(factory);\n}\n\n\n/*\n * GlutAppManager::InitGlutWindow\n */\nvoid GlutAppManager::InitGlutWindow(void) {\n if (this->windowMenu != 0) {\n return;\n }\n\n this->appMenu = glutCreateMenu(GlutAppManager::OnMenuItemClicked);\n vislib::StringA name;\n for (int i = 0; i < int(this->factories.Count()); i++) {\n if (this->factories[i] != NULL) {\n name.Format(\"%d: %s\", (i + 1), this->factories[i]->GetName());\n glutAddMenuEntry(name.PeekBuffer(), i + 1);\n }\n }\n\n this->windowMenu = glutCreateMenu(GlutAppManager::OnMenuItemClicked);\n\n glutAddSubMenu(\"Select Test\", this->appMenu);\n#if defined(VISGLUT_EXTENSIONS)\n ::glutAddMenuSeparator();\n#endif /* VISGLUT_EXTENSIONS */\n glutAddMenuEntry(\"Restart Test\", -2);\n glutAddMenuEntry(\"Exit\", -1);\n\n glutAttachMenu(GLUT_RIGHT_BUTTON);\n}\n\n\n/*\n * GlutAppManager::OnMenuItemClicked\n */\nvoid GlutAppManager::OnMenuItemClicked(int menuID) {\n if (menuID == -1) {\n GlutAppManager::ExitApplication(0);\n } else if (menuID == -2) {\n GlutAppManager *This = GlutAppManager::GetInstance();\n if (This->app) {\n This->app->GLDeinit();\n if (This->app->GLInit() == 0) {\n // TODO: initializes the glut stuff\n This->app->OnResize(This->width, This->height);\n\n printf(\"Test restarted.\\n\");\n\n fpsCounter.Reset();\n\n } else {\n delete This->app;\n This->app = NULL;\n printf(\"Test could not be restarted.\\n\");\n }\n }\n } else if ((menuID > 0) && (menuID <= int(GlutAppManager::GetInstance()->factories.Count()))) {\n GlutAppManager *This = GlutAppManager::GetInstance();\n // select an test application factory\n printf(\"Selecting Test: %s\\n\", This->factories[menuID - 1]->GetName());\n if (This->app) {\n if (This->factories[menuID - 1]->HasCreated(This->app)) {\n printf(\" Test already selected.\\n\");\n } else {\n This->app->GLDeinit();\n delete This->app;\n This->app = NULL;\n }\n }\n if (!This->app) {\n This->app = This->factories[menuID - 1]->CreateApplication();\n if (This->app) {\n if (This->app->GLInit() == 0) {\n // TODO: initializes the glut stuff\n This->app->OnResize(This->width, This->height);\n\n printf(\" Test selected.\\n\");\n\n fpsCounter.Reset();\n\n } else {\n delete This->app;\n This->app = NULL;\n printf(\" Test could not be initialized.\\n\");\n }\n\n } else {\n printf(\" Test could not be created.\\n\");\n }\n }\n\n glutPostRedisplay();\n }\n}\n\n\n/*\n * GlutAppManager::ExitApplication\n */\nvoid GlutAppManager::ExitApplication(int exitcode) {\n exit(exitcode);\n}\n\n\n/*\n * GlutAppManager::SetSize\n */\nvoid GlutAppManager::SetSize(int w, int h) {\n this->width = w;\n this->height = h;\n if (this->app == NULL) {\n glViewport(0, 0, this->width, this->height);\n }\n}\n\n\n/*\n * glprintf\n */\nstatic void glprintf(float x, float y, const void *font, const char *string) {\n glRasterPos2f(x, y);\n while (*string) {\n glutBitmapCharacter((void *)font, *string++);\n }\n}\n\n/*\n * GlutAppManager::glRenderEmptyScreen\n */\nvoid GlutAppManager::glRenderEmptyScreen(void) {\n GlutAppManager *This = GlutAppManager::GetInstance();\n glViewport(0, 0, This->width, This->height);\n glClearColor(0.0f, 0.0f, 0.0f, 0.0f);\n glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);\n\n glDisable(GL_DEPTH_TEST);\n\n glDisable(GL_LIGHTING);\n glDisable(GL_LIGHT0);\n\n glMatrixMode(GL_PROJECTION);\n glLoadIdentity();\n glOrtho(-0.5, This->width, -0.5, This->height, -1.0, 1.0);\n glMatrixMode(GL_MODELVIEW);\n glLoadIdentity();\n\n glColor3f(0.7f, 0.8f, 1.0f);\n\n glprintf(10.0f, float(This->height - 28), GLUT_BITMAP_HELVETICA_18,\n \"VISlib glutTest Application\");\n glprintf(10.0f, float(This->height - 44), GLUT_BITMAP_HELVETICA_12,\n \"Copyright 2007, Universität Stuttgart (VIS). Alle Rechte vorbehalten.\");\n vislib::StringA txt;\n txt.Format(\"OpenGL Version: %s\", \n vislib::graphics::gl::GLVersion().ToStringA(3).PeekBuffer());\n glprintf(10.0f, float(This->height - 60), GLUT_BITMAP_HELVETICA_12, \n txt.PeekBuffer());\n glprintf(10.0f, float(This->height - 76), GLUT_BITMAP_HELVETICA_12,\n \"Use the right click context menu to select a test.\");\n\n glFlush();\n\n glutSwapBuffers();\n}\n"}}},{"rowIdx":2395,"cells":{"text":{"kind":"string","value":"describe 'wildfly::profile_path' do\n it { is_expected.to run.with_params('').and_return(nil) }\n it { is_expected.to run.with_params(nil).and_return(nil) }\n it { is_expected.to run.with_params('full-ha').and_return('/profile=full-ha') }\nend\n"}}},{"rowIdx":2396,"cells":{"text":{"kind":"string","value":"package com.uploadcare.android.library.data\n\nimport com.squareup.moshi.Json\nimport com.uploadcare.android.library.api.UploadcareGroup\nimport java.net.URI\n\ndata class GroupPageData(val next: URI? = null,\n val previous: URI? = null,\n val total: Int,\n @Json(name = \"per_page\") val perPage: Int,\n val results: List) : PageData {\n\n override fun getResultsData() = results\n\n override fun hasMore() = next != null\n\n override fun getNextURI() = next\n}"}}},{"rowIdx":2397,"cells":{"text":{"kind":"string","value":"//\n// Created by Zhen Peng on 8/4/19.\n//\n\n#include \n#include \n#include \n#include \n#include \"dglobals.h\"\n#include \"globals.h\"\n\nusing namespace PADO;\n\n/*\n * Create a binary file. The format is just sequence of pairs of Vertex IDs.\n */\n\n\nvoid create(const char *filename, VertexID num_v, EdgeID num_e)\n{\n std::ofstream fout(filename);\n if (!fout.is_open()) {\n fprintf(stderr, \"Error: cannot create file %s\\n\", filename);\n exit(EXIT_FAILURE);\n }\n srand(time(0));\n\n double time_running = -WallTimer::get_time_mark();\n for (EdgeID e_i = 0; e_i < num_e; ++e_i) {\n VertexID head = rand() % num_v;\n VertexID tail = rand() % num_v;\n fout.write(reinterpret_cast(&head), sizeof(head));\n fout.write(reinterpret_cast(&tail), sizeof(tail));\n }\n time_running += WallTimer::get_time_mark();\n printf(\"running_time: %f\\n\", time_running);\n}\n\nint main(int argc, char *argv[])\n{\n if (argc < 4) {\n fprintf(stderr,\n \"Usage: ./createfile \\n\");\n exit(EXIT_FAILURE);\n }\n\n create(argv[1], strtoull(argv[2], nullptr, 0), strtoull(argv[3], nullptr, 0));\n return EXIT_SUCCESS;\n}"}}},{"rowIdx":2398,"cells":{"text":{"kind":"string","value":"// Copyright 2021 The Fuchsia Authors. All rights reserved.\n// Use of this source code is governed by a BSD-style license that can be\n// found in the LICENSE file.\n\nuse zerocopy::{AsBytes, FromBytes};\n\nuse crate::types::*;\n\n/// Matches iovec_t.\n#[derive(Debug, Default, Clone, Copy, AsBytes, FromBytes)]\n#[repr(C)]\npub struct UserBuffer {\n pub address: UserAddress,\n pub length: usize,\n}\n\nimpl UserBuffer {\n pub fn get_total_length(buffers: &[UserBuffer]) -> usize {\n let mut total = 0;\n for buffer in buffers {\n total += buffer.length;\n }\n total\n }\n}\n"}}},{"rowIdx":2399,"cells":{"text":{"kind":"string","value":"---\ntitle: combinatorics\nicon: 'null'\nrelated: [\"math\"]\nemoji: ⚖ \n---"}}}],"truncated":false,"partial":false},"paginationData":{"pageIndex":23,"numItemsPerPage":100,"numTotalItems":43696,"offset":2300,"length":100}},"jwt":"eyJhbGciOiJFZERTQSJ9.eyJyZWFkIjp0cnVlLCJwZXJtaXNzaW9ucyI6eyJyZXBvLmNvbnRlbnQucmVhZCI6dHJ1ZX0sImlhdCI6MTc1NzE4MDAxMCwic3ViIjoiL2RhdGFzZXRzL1ppaGFvLUxpL0NvZGUiLCJleHAiOjE3NTcxODM2MTAsImlzcyI6Imh0dHBzOi8vaHVnZ2luZ2ZhY2UuY28ifQ.NSYJF3pEexrVYOqWqY2h8nUq1xMGRfubYNEcn2u8DZnagKcOzFTm2kTTkez_8KOil7pgSK1ONN56kAcG1-IUBg","displayUrls":true},"discussionsStats":{"closed":0,"open":0,"total":0},"fullWidth":true,"hasGatedAccess":true,"hasFullAccess":true,"isEmbedded":false,"savedQueries":{"community":[],"user":[]}}">
text
stringlengths
27
775k
package logging import ( "fmt" "log" "os" ) //A logging interface type Logger interface { Debug(v ...interface{}) Debugf(format string, v ...interface{}) Info(v ...interface{}) Infof(format string, v ...interface{}) Warn(v ...interface{}) Warnf(format string, v ...interface{}) Error(v ...interface{}) Errorf(format string, v ...interface{}) } func NewStdLogger(usercolor bool, loglvl int, prefix string) Logger { logPrefix := map[int]string{ ERROR: "[ERROR] ", WARN: "[WARN] ", INFO: "[INFO] ", DEBUG: "[DEBUG] ", } postfix := "" if usercolor { logColor := map[int]string{ ERROR: "\033[0m\033[31m", WARN: "\033[0m\033[33m", INFO: "\033[0m\033[35m", DEBUG: "\033[0m\033[34m", } for lvl, color := range logColor { logPrefix[lvl] = color + logPrefix[lvl] } postfix = "\033[0m" } l := &stdlogger{ logger: log.New(os.Stderr, "", log.LstdFlags|log.Lshortfile|log.Lmicroseconds), LogLevel: loglvl, LogLvlPrefix: logPrefix, LogPrefix: prefix, LogPostfix: postfix, } return l } type stdlogger struct { logger *log.Logger LogLevel int LogLvlPrefix map[int]string LogPrefix string LogPostfix string } func (l *stdlogger) Debug(v ...interface{}) { l.logP(DEBUG, v...) } func (l *stdlogger) Debugf(format string, v ...interface{}) { l.logPf(DEBUG, format, v...) } func (l *stdlogger) Info(v ...interface{}) { l.logP(INFO, v...) } func (l *stdlogger) Infof(format string, v ...interface{}) { l.logPf(INFO, format, v...) } func (l *stdlogger) Warn(v ...interface{}) { l.logP(WARN, v...) } func (l *stdlogger) Warnf(format string, v ...interface{}) { l.logPf(WARN, format, v...) } func (l *stdlogger) Error(v ...interface{}) { l.logP(ERROR, v...) } func (l *stdlogger) Errorf(format string, v ...interface{}) { l.logPf(ERROR, format, v...) } func (l *stdlogger) logP(logLvl int, v ...interface{}) { if l.LogLevel >= logLvl && l.logger != nil { l.logger.Output(3, l.LogPrefix+l.LogLvlPrefix[logLvl]+fmt.Sprint(v...)+l.LogPostfix) } } func (l *stdlogger) logPf(logLvl int, format string, v ...interface{}) { if l.LogLevel >= logLvl && l.logger != nil { l.logger.Output(3, l.LogPrefix+l.LogLvlPrefix[logLvl]+fmt.Sprintf(format, v...)+l.LogPostfix) } }
if(!window.Skyline) throw new Error("The skyline component 'image-capture' requires component Skyline."); export default window.Skyline;
#!/bin/bash set -eu -o pipefail if [ -e ./tmp ]; then echo "$dirname exists. Skipped."; exit 0; fi mkdir -p ./tmp/ssh-key set +e yes | ssh-keygen -N "" -f ./tmp/ssh-key/id_rsa set -e cp ./tmp/ssh-key/id_rsa.pub ./tmp/ssh-key/authorized_keys chmod 700 ./tmp/ssh-key chmod 600 ./tmp/ssh-key/*
using OpenRasta.DI; using OpenRasta.Hosting.InMemory; using Shouldly; using Xunit; namespace Tests.Hosting.InMemory { public class custom_resolver { InMemoryHost _host; CustomResolver _customResolver; public custom_resolver() { _customResolver = new CustomResolver(); _host = new InMemoryHost(()=>{}, _customResolver); } [Fact] public void the_resolver_is_a_custom_dependency_resolver() { _host.Resolver.ShouldBe(_customResolver); } class CustomResolver : InternalDependencyResolver { } } }
# Webpack과 Gulp 그리고 Grunt 현재 Build Tool의 대세는 당연코 Webpack이다. 그런데 이 이전에는 Gulp와 Grunt가 있었다. 나는 자바스크립트 빌드의 개념을 배울 때 이미 웹팩이 대중화되는 시기였다. 그래서 그냥 웹팩을 사용했다. 그런데 모든 새로운 기술에는 등장이유가 있으며 장점과 단점이 존재한다. 웹팩이 다른 기술들과는 어떤 차이점을 가지는지 간단하게 알아보자. ## Webpack과 Gulp&Grunt는 태생부터 다르다 ### Gulp와 Grunt --- webpack과 gulp, grunt는 사실 아예 다르다. Gulp와 Grunt(이하 걸프로 통일하겠음)는 Task Runner라고 한다. 용어로 얘기하면 되게 생소해 보이지만 별건 없다. 간단한 Task를 실행해주는 것이다. uglify나 압축등의 반복 가능한 간단한 작업을 자동화해주는 툴이다. ### 서로의 차이점 --- gulp와 grunt의 하는일은 같지만 만들어내는 방식이 다르다. grunt는 package.json처럼 json형식으로 설정을 선언하여 사용한다. 이와 다르게 gulp는 nodeJS의 스트림 기능을 이용하여 자바스크립트 코드를 사용한다. 이건 현재 유행하는 기술도 아니고 크게 중요한 점도 아니니 넘어가도록 하자. ## Webpack 이전 툴들이 Task Runner인 것과 다르게 웹팩은 이름부터 다르다. `Module Bundler` 혹은 `Package Bundler` 라고 불린다. 모듈 개념은 이미 한 번 정리 했었다. [Module](./Module.md) ### 모듈번들러 --- 모듈 번들러가 뭐냐면 이 각각의 모듈들의 의존성을 파악하여 번들(묶는다)해주는 것이다. 모듈과 의존성이라는 단어가 낯설다. 이 단어들에 대해서 다시 생각해보자. ### 모듈과 의존성 --- 자바스크립트는 큰 소스를 나눠 편하게 개발하고 유지보수하기 위해 모듈이라는 추상적인 개념을 사용한다. 마치 클래스와 비슷하다 이렇게 모듈 방식으로 코딩을하고 거기에 모듈별로 파일까지 나누어 개발하면 참 좋다. 모듈과 파일이 분기된 개념은 보통은 노드JS에서 많이 사용한다. 모듈화 된 각각의 파일들은 서로의 의존성을 가진다. 의존성이란 쉽게 말해 `import * from './index'` 이 구문이다. 현재 파일에서 다른 파일을 이용하게되면 서로 의존성이 생긴다. 그런데 브라우저 상에서는 이러한 의존성을 표현하기가 어렵다. 특히 HTTP/1.1을 사용해야 하는 환경이라면 더욱 힘들다. HTTP/2.0의 경우 한 번의 요청에 여러 파일을 받아올 수 있지만 1.1의 경우는 의존성을 통해 여러 파일이 필요하게 된다면 너무 많은 네트워크 자원을 소모하게 된다. 그러면 많은 의존성으로 엮인 JS파일들을 그냥 하나의 JS파일로 압축해서 만들면 어떨까?? 요청 한 번에 그 압축파일 하나만 주면 땡! 하게 말이다. 그게 웹팩이다. 꼭 하나는 아니다. 라이브러리 / 핵심 소스를 나누어 파일을 두개로 분기할 수도 있다. 요점은 다양한 파일들을 번들(bundle)해서 네트워크 비용을 최소화하여 파일을 번들한다! 라는 것이다. Gulp나 Grunt처럼 필요한 자동화 기능까지 더해 빌드 해주는 것이 바로 모듈 번들러다. ### 더 나아가 --- 웹팩은 위에서 말한 자바스크립트의 의존성을 파악하여 번들하는 것만이 아니다. 모든 리소스(javascript, css, image, font, 심지어 typescript, coffeescript, less, sass 등)에 대한 dependancy graph를 생성하여 빌드 시켜준다. 요즘처럼 SPA를 구현하게 되면 이러한 의존성은 꼬리에 꼬리를 물고 Graph(Tree) 형태로 만들어지게 되는데 이걸 번들링하여 하나의 js 파일로 딱 만들어주는게 모듈 번들러의 역할이다. ## 결론 두 종류의 빌드 툴 모두 리소스들을 압축한다는 공통점이 있다. Gulp나 Grunt는 단순 자동화 작업으로 파일을 압축하는 작업을 많이 하게 된다. 그러나 웹팩의 경우 자바스크립트의 각 모듈 혹은 파일, 심지어는 다양한 리소스들까지 의존성을 파악하여 묶어주기 때문에 엄청나게 큰 차이점을 보인다. 따라서 현재 코드가 모듈화 된 코드가 아니거나, 다양한 의존성을 다루어야 하는 작업이 아니라면 Gulp나 Grunt도 충분히 좋은 빌드 툴이 될 수 있다. 그러나 꽤 준수한 프로젝트 규모를 가지거나, 자바스크립트를 모듈화하여 코딩하거나, 무거워질 수 밖에 없는 프레임워크를 사용하는 프로젝트 등에는 웹팩이 훨씬 더 좋은 툴이 될 것이다.
use super::pmio::{pmio_config_read_addr, pmio_config_write_addr}; use super::PciAddrSpace; use numeric_enum_macro::numeric_enum; #[derive(Debug)] pub struct PciConfig { pub addr_space: PciAddrSpace, pub base: usize, } #[allow(unsafe_code)] impl PciConfig { pub fn read8_offset(&self, offset: usize) -> u8 { trace!("read8 @ {:#x?}", offset); match self.addr_space { PciAddrSpace::MMIO => unsafe { u8::from_le(*(offset as *const u8)) }, PciAddrSpace::PIO => pmio_config_read_addr(offset as u32, 8).unwrap() as u8, } } pub fn read16_offset(&self, addr: usize) -> u16 { trace!("read16 @ {:#x?}", addr); match self.addr_space { PciAddrSpace::MMIO => unsafe { u16::from_le(*(addr as *const u16)) }, PciAddrSpace::PIO => pmio_config_read_addr(addr as u32, 16).unwrap() as u16, } } pub fn read32_offset(&self, addr: usize) -> u32 { trace!("read32 @ {:#x?}", addr); match self.addr_space { PciAddrSpace::MMIO => unsafe { u32::from_le(*(addr as *const u32)) }, PciAddrSpace::PIO => pmio_config_read_addr(addr as u32, 32).unwrap(), } } pub fn read8(&self, addr: PciReg8) -> u8 { self.read8_offset(self.base + addr as usize) } pub fn read8_(&self, addr: usize) -> u8 { self.read8_offset(self.base + addr) } pub fn read16(&self, addr: PciReg16) -> u16 { self.read16_offset(self.base + addr as usize) } pub fn read16_(&self, addr: usize) -> u16 { self.read16_offset(self.base + addr) } pub fn read32(&self, addr: PciReg32) -> u32 { self.read32_offset(self.base + addr as usize) } pub fn read32_(&self, addr: usize) -> u32 { self.read32_offset(self.base + addr) } pub fn read_bar(&self, bar_: usize) -> u32 { self.read32_offset(self.base + PciReg32::BARBase as usize + bar_ * 4) } pub fn write8_offset(&self, addr: usize, val: u8) { match self.addr_space { PciAddrSpace::MMIO => unsafe { *(addr as *mut u8) = val }, PciAddrSpace::PIO => pmio_config_write_addr(addr as u32, val as u32, 8).unwrap(), } } pub fn write16_offset(&self, addr: usize, val: u16) { trace!( "write16 @ {:#x?}, addr_space = {:#x?}", addr, self.addr_space ); match self.addr_space { PciAddrSpace::MMIO => unsafe { *(addr as *mut u16) = val }, PciAddrSpace::PIO => pmio_config_write_addr(addr as u32, val as u32, 16).unwrap(), } } pub fn write32_offset(&self, addr: usize, val: u32) { match self.addr_space { PciAddrSpace::MMIO => unsafe { *(addr as *mut u32) = val }, PciAddrSpace::PIO => pmio_config_write_addr(addr as u32, val as u32, 32).unwrap(), } } pub fn write8(&self, addr: PciReg8, val: u8) { self.write8_offset(self.base + addr as usize, val) } pub fn write16(&self, addr: PciReg16, val: u16) { self.write16_offset(self.base + addr as usize, val) } pub fn write16_(&self, addr: usize, val: u16) { self.write16_offset(self.base + addr, val) } pub fn write32(&self, addr: PciReg32, val: u32) { self.write32_offset(self.base + addr as usize, val) } pub fn write32_(&self, addr: usize, val: u32) { self.write32_offset(self.base + addr, val) } pub fn write_bar(&self, bar_: usize, val: u32) { self.write32_offset(self.base + PciReg32::BARBase as usize + bar_ * 4, val) } } numeric_enum! { #[repr(usize)] pub enum PciReg8 { // standard RevisionId = 0x8, ProgramInterface = 0x9, SubClass = 0xA, BaseClass = 0xB, CacheLineSize = 0xC, LatencyTimer = 0xD, HeaderType = 0xE, Bist = 0xF, // bridge PrimaryBusId = 0x18, SecondaryBusId = 0x19, SubordinateBusId = 0x1A, SecondaryLatencyTimer = 0x1B, IoBase = 0x1C, IoLimit = 0x1D, CapabilitiesPtr = 0x34, InterruptLine = 0x3C, InterruptPin = 0x3D, MinGrant = 0x3E, MaxLatency = 0x3F, } } numeric_enum! { #[repr(usize)] pub enum PciReg16 { // standard VendorId = 0x0, DeviceId = 0x2, Command = 0x4, Status = 0x6, // bridge SecondaryStatus = 0x1E, MemoryBase = 0x20, MemoryLimit = 0x22, PrefetchableMemoryBase = 0x24, PrefetchableMemoryLimit = 0x26, IoBaseUpper = 0x30, IoLimitUpper = 0x32, BridgeControl = 0x3E, } } numeric_enum! { #[repr(usize)] pub enum PciReg32 { // standard BARBase = 0x10, // bridge PrefetchableMemoryBaseUpper = 0x28, PrefetchableMemoryLimitUpper = 0x2C, BridgeExpansionRomAddress = 0x38, } } pub const PCIE_BASE_CONFIG_SIZE: usize = 256; pub const PCIE_EXTENDED_CONFIG_SIZE: usize = 4096;
package app import ( "github.com/nwpc-oper/nwpc-message-client/common/consumer" log "github.com/sirupsen/logrus" "github.com/spf13/cobra" ) const productionLongDescription = ` Consume production message from rabbitmq and store them into elasticsearch. ` type productionCommand struct { BaseCommand consumerType string rabbitmqServer string rabbitmqQueueName string elasticServer string workerCount int bulkSize int isDebug bool } func (c *productionCommand) consumeProduction(cmd *cobra.Command, args []string) error { var currentConsumer consumer.Consumer = nil currentSource := consumer.RabbitMQSource{ Server: c.rabbitmqServer, Exchange: "nwpc.operation.production", Topics: []string{"*.production.*"}, Queue: c.rabbitmqQueueName, } if c.consumerType == string(printerConsumerType) { currentConsumer = createPrinterConsumer(currentSource, c.workerCount, c.isDebug) } else if c.consumerType == string(elasticsearchConsumerType) { target := consumer.ElasticSearchTarget{ Server: c.elasticServer, } currentConsumer = createElasticSearchConsumer(currentSource, target, c.workerCount, c.bulkSize, c.isDebug) } if currentConsumer == nil { log.Fatalf("consumer type is not supported: %s", c.consumerType) return nil } log.WithFields(log.Fields{ "component": "production", "event": "consumer", }).Info("start to consume...") err := currentConsumer.ConsumeMessages() if err != nil { log.WithFields(log.Fields{ "component": "production", "event": "consumer", }).Errorf("%v", err) } return err } func newProductionCommand() *productionCommand { pc := &productionCommand{} productionCmd := &cobra.Command{ Use: "production", Short: "consume production message", Long: productionLongDescription, RunE: pc.consumeProduction, } productionCmd.Flags().StringVar(&pc.rabbitmqServer, "rabbitmq-server", "", "rabbitmq server") productionCmd.Flags().StringVar(&pc.rabbitmqQueueName, "rabbitmq-queue-name", "", "rabbitmq queue name") productionCmd.Flags().StringVar(&pc.consumerType, "consumer-type", "print", "consumer type") productionCmd.Flags().IntVar(&pc.workerCount, "worker-count", 2, "worker count") productionCmd.Flags().StringVar(&pc.elasticServer, "elasticsearch-server", "", "elasticsearch server") productionCmd.Flags().IntVar(&pc.bulkSize, "bulk-size", 20, "bulk size") productionCmd.Flags().BoolVar(&pc.isDebug, "debug", true, "debug mode") productionCmd.MarkFlagRequired("rabbitmq-server") productionCmd.MarkFlagRequired("rabbitmq-queue-name") pc.cmd = productionCmd return pc }
#include <iostream> #include <string> #include <vector> constexpr size_t alphabet_size = 128; using Table = std::vector<std::vector<size_t>>; // DFA - deterministic finite automata Table buildDFAfromPattern(const std::string& pattern) { Table dfa(alphabet_size, std::vector<size_t>(pattern.size())); dfa[pattern[0]][0] = 1; for (size_t state = 0, j = 1; j < pattern.size(); ++j) { for (size_t c = 0; c < alphabet_size; ++c) { dfa[c][j] = dfa[c][state]; } dfa[pattern[j]][j] = j + 1; state = dfa[pattern[j]][state]; } return dfa; } // Knuth-Morris-Pratt substring search (Internet for more details) size_t KMP(const std::string& str, const std::string& pattern) { Table dfa = buildDFAfromPattern(pattern); size_t i = 0; size_t j = 0; for (; i < str.size() && j < pattern.size(); ++i) { j = dfa[str[i]][j]; } if (j == pattern.size()) { return i - j; } else { return str.size(); } } int main() { std::string str; std::string pattern; std::cout << "Enter string and pattern which need to be found\n"; std::cin >> str >> pattern; size_t index_pattern = KMP(str, pattern); if (index_pattern == str.size()) { std::cout << "Pattern wasn't found in string\n"; } else { std::cout << "First character from pattern has index = " << index_pattern << '\n'; } return 0; }
<?php defined('BASEPATH') OR exit('No direct script access allowed'); class Reasoning extends CI_Controller { protected $page_header = 'Reasoning'; public function __construct() { parent::__construct(); $this->load->model(array('data_model' => 'data', 'centroid_model' => 'centroid')); $this->load->library('cluster_lib'); } public function index() { $data['page_header'] = $this->page_header; $data['panel_heading'] = 'Input New Case'; $data['page'] = 'index'; $this->frontend->view('reasoning_v', $data); } public function similaritas() { $var = $this->input->post('var'); //$var = array(0=>0.32,1=>0.43,2=>0.79,3=>0.28,4=>0.09); $rows = $this->centroid->fields('v1, v2, v3, v4, v5')->get_all(); for($i=0, $centroid = array(); $i < count($rows); $i++){ $centroid[$i][0] = $rows[$i]->v1; $centroid[$i][1] = $rows[$i]->v2; $centroid[$i][2] = $rows[$i]->v3; $centroid[$i][3] = $rows[$i]->v4; $centroid[$i][4] = $rows[$i]->v5; } for ($i=0, $newData = array(); $i < count($var) ; $i++) { $newData[$i] = $var[$i]; } $knn = new knn($centroid, $newData, 1); $nearestCluster = $knn->get_nn() + 1; $rows = $this->data->fields('id, v1, v2, v3, v4, v5')->where('cluster', $nearestCluster)->get_all(); for($i=0, $data = array(); $i < count($rows); $i++){ $data[$i][0] = $rows[$i]->v1; $data[$i][1] = $rows[$i]->v2; $data[$i][2] = $rows[$i]->v3; $data[$i][3] = $rows[$i]->v4; $data[$i][4] = $rows[$i]->v5; } $knn = new knn($data, $newData, 1); $index = $knn->get_nn(); $nearestData = $rows[$index]->id; echo json_encode(array('nearestCluster'=>$nearestCluster, 'nearestData'=> $nearestData, 'row'=>$rows[$index])); } }
package com.eg.timeTrackingHelper.service.meeting import java.time.LocalDate import cats.effect.IO import cats.implicits._ import com.eg.timeTrackingHelper.configuration.model.KeywordMapping import com.eg.timeTrackingHelper.model.DatePeriod import com.eg.timeTrackingHelper.repository.jira.model.TicketId import com.eg.timeTrackingHelper.repository.meeting.MeetingRepository import com.eg.timeTrackingHelper.repository.meeting.model.Meeting import com.eg.timeTrackingHelper.repository.meeting.outlook.exception.UnknownException import com.eg.timeTrackingHelper.service.model.{ActivityType, WorklogEntity} import org.mockito.MockitoSugar import org.scalatest.EitherValues import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import scala.concurrent.duration._ class MeetingServiceSpec extends AnyFlatSpec with MockitoSugar with Matchers with EitherValues { private val meetingRepositoryMock = mock[MeetingRepository] private val defaultTicket = "defaultTicket" private val ticket1 = "ticket1" private val ticket2 = "ticket2" private val keywordMapping = KeywordMapping( defaultTicket, Map((ticket1, Set("test-1", "test-2", "test-3")), (ticket2, Set("test-4", "test-5", "test-6"))) ) private val meetingService = MeetingService(meetingRepositoryMock, keywordMapping) "MeetingService.getTicketIdBySubject" should "return correctly ticketId based on letter subject" in { meetingService.getTicketIdBySubject( "A small discussion about test-2 and test-5".some ) should be(TicketId(ticket1).some) meetingService.getTicketIdBySubject("A small discussion about test-3".some) should be( TicketId(ticket1).some ) meetingService.getTicketIdBySubject( "A small discussion about test-5 and test-2".some ) should be(TicketId(ticket1).some) meetingService.getTicketIdBySubject("A small discussion about test-4".some) should be( TicketId(ticket2).some ) } it should "return the ticket based on a partial match" in { meetingService.getTicketIdBySubject("A small discussion about test-25".some) should be( TicketId(ticket1).some ) meetingService.getTicketIdBySubject("A small discussion about test-43".some) should be( TicketId(ticket2).some ) } it should "return default ticketId" in { meetingService.getTicketIdBySubject("A small discussion about vacations".some) should be( TicketId(defaultTicket).some ) meetingService.getTicketIdBySubject(None) should be(None) } "MeetingService.getLogEntityByMeeting" should "return LogEntity by Meeting" in { val start = LocalDate .of(2020, 3, 31) .atStartOfDay .plusHours(10) val end = start.plusHours(1) val subject = "A small discussion about vacations".some val meeting = Meeting(subject, isTakePlace = true, start, end, 1 hour) val logEntity = WorklogEntity(start, end, TicketId(defaultTicket), ActivityType.Major, subject) meetingService.getLogEntityByMeeting(meeting) should be(logEntity.some) } "MeetingService.getMeetingsLogEntity" should "return Map[LocalDate, List[WorklogEntity]] with upcoming meetings" in { val start = LocalDate.of(2020, 3, 31) val end = start.plusDays(3) val datePeriod = DatePeriod(start, end) val meetingStart = start.atStartOfDay.plusHours(10) val meetings = List( Meeting( "meeting #1".some, isTakePlace = true, meetingStart, meetingStart.plusHours(1), 1 hour ), Meeting( "meeting #2".some, isTakePlace = true, meetingStart.plusDays(1), meetingStart.plusDays(1).plusHours(1), 1 hour ), Meeting( "meeting test-6 #1.1".some, isTakePlace = true, meetingStart.plusHours(1).plusMinutes(30), meetingStart.plusHours(2), 30 minutes ), Meeting( "meeting #3".some, isTakePlace = true, meetingStart.plusDays(2), meetingStart.plusDays(2).plusHours(1), 1 hour ), Meeting( "meeting #2.1".some, isTakePlace = true, meetingStart.plusDays(1).plusHours(2), meetingStart.plusDays(1).plusHours(3).plusMinutes(30), 90 minutes ) ) when(meetingRepositoryMock.getTakePlaceMeetings(datePeriod)).thenReturn(IO.pure(meetings)) val result = meetingService.getMeetingsLogEntity(datePeriod).unsafeRunSync() result.size should be(3) result.get(start) should be( Some( List( WorklogEntity( meetingStart, meetingStart.plusHours(1), TicketId(defaultTicket), ActivityType.Major, "meeting #1".some ), WorklogEntity( meetingStart.plusHours(1).plusMinutes(30), meetingStart.plusHours(2), TicketId(ticket2), ActivityType.Major, "meeting test-6 #1.1".some ) ) ) ) result.get(start.plusDays(1)) should be( Some( List( WorklogEntity( meetingStart.plusDays(1), meetingStart.plusDays(1).plusHours(1), TicketId(defaultTicket), ActivityType.Major, "meeting #2".some ), WorklogEntity( meetingStart.plusDays(1).plusHours(2), meetingStart.plusDays(1).plusHours(3).plusMinutes(30), TicketId(defaultTicket), ActivityType.Major, "meeting #2.1".some ) ) ) ) result.get(start.plusDays(2)) should be( Some( List( WorklogEntity( meetingStart.plusDays(2), meetingStart.plusDays(2).plusHours(1), TicketId(defaultTicket), ActivityType.Major, "meeting #3".some ) ) ) ) } it should "return exception(Negative scenario)" in { val start = LocalDate.of(2020, 3, 31) val end = start.plusDays(3) val datePeriod = DatePeriod(start, end) val unknownException = UnknownException(new RuntimeException("Boom")) when(meetingRepositoryMock.getTakePlaceMeetings(datePeriod)) .thenReturn(IO.raiseError[List[Meeting]](unknownException)) meetingService.getMeetingsLogEntity(datePeriod).attempt.unsafeRunSync() should be( unknownException.asLeft ) } }
#!/usr/bin/env perl package bin::pumper::upic_fetcher; use lib '/play/backend/lib'; use Moo; use MooX::Options; with 'Play::Pumper'; use Try::Tiny; use Log::Any '$log'; use Play::DB qw(db); use Play::Flux; has 'in' => ( is => 'lazy', default => sub { Play::Flux->upic->in('/data/storage/upic/pos') }, ); sub reschedule { my $self = shift; my ($item) = @_; if ($item->{retries} and $item->{retries} >= 3) { return; } $item->{retries}++; my $storage = Play::Flux->upic; $storage->write($item); # TODO - DelayedQueue $storage->commit; } sub run_once { my $self = shift; while (my $item = $self->in->read) { try { db->images->fetch_upic($item->{upic}, $item->{login}); $self->add_stat('ok'); } catch { $log->warn("Failed to fetch upic for $item->{login}: $_"); $self->reschedule($item); $self->add_stat('failed'); }; $self->in->commit; } } __PACKAGE__->run_script;
<?php // Error reporting ini_set('display_errors', 1); error_reporting(E_ALL); // home page url $home_url="http://localhost/restful/api/"; ?>
use crate::base::Resource; #[derive(Resource)] #[uri_prefix = "/search/"] pub enum SearchSource { Anime, Manga, Person, Character, } pub enum SearchSourceType { Anime(AnimeType), Manga(MangaType), } #[derive(Resource)] #[uri_prefix = "type="] pub enum AnimeType { TV, OVA, Movie, Special, ONA, Music, } #[derive(Resource)] #[uri_prefix = "type="] pub enum MangaType { Manga, Novel, OneShot, Doujin, Manhwa, Manhua, } pub enum SourceStatus { Anime(AnimeStatus), Manga(MangaStatus), } #[derive(Resource, Clone)] pub enum AnimeStatus { Airing, #[rename_uri = "complete"] Completed, ToBeAired, } #[derive(Resource)] pub enum MangaStatus { Publishing, #[rename_uri = "complete"] Completed, ToBePublished, } #[derive(Resource)] #[uri_prefix = "rated="] pub enum Rating { G, Pg, Pg13, R17, R, Rx, } #[derive(Resource)] #[uri_prefix = "order_by="] pub enum OrderBy { Title, StartDate, EndDate, Score, Type, Members, Id, Episodes, Rating, Volumes, Chapters, } #[derive(Resource, Clone)] #[uri_prefix = "sort="] pub enum Sort { Ascending, Descending, } pub enum Genres { Anime(Vec<AnimeGenre>), Manga(Vec<MangaGenre>), } #[derive(Copy, Clone)] pub enum AnimeGenre { Action = 1, Adventure = 2, Cars = 3, Comedy = 4, Dementia = 5, Demons = 6, Mystery = 7, Drama = 8, Ecchi = 9, Fantasy = 10, Game = 11, Hentai = 12, Historical = 13, Horror = 14, Kids = 15, Magic = 16, MartialArts = 17, Mecha = 18, Music = 19, Parody = 20, Samurai = 21, Romance = 22, School = 23, SciFi = 24, Shoujo = 25, ShoujoAi = 26, Shounen = 27, ShounenAi = 28, Space = 29, Sports = 30, SuperPower = 31, Vampire = 32, Yaoi = 33, Yuri = 34, Harem = 35, SliceOfLife = 36, Supernatural = 37, Military = 38, Police = 39, Psychological = 40, Thriller = 41, Seinen = 42, Josei = 43, } #[derive(Copy, Clone)] pub enum MangaGenre { Action = 1, Adventure = 2, Cars = 3, Comedy = 4, Dementia = 5, Demons = 6, Mystery = 7, Drama = 8, Ecchi = 9, Fantasy = 10, Game = 11, Hentai = 12, Historical = 13, Horror = 14, Kids = 15, Magic = 16, MartialArts = 17, Mecha = 18, Music = 19, Parody = 20, Samurai = 21, Romance = 22, School = 23, SciFi = 24, Shoujo = 25, ShoujoAi = 26, Shounen = 27, ShounenAi = 28, Space = 29, Sports = 30, SuperPower = 31, Vampire = 32, Yaoi = 33, Yuri = 34, Harem = 35, SliceOfLife = 36, Supernatural = 37, Military = 38, Police = 39, Psychological = 40, Seinen = 41, Josei = 42, Doujinshi = 43, GenderBender = 44, Thriller = 45, }
package latis.util import cats.effect.IO import cats.effect.unsafe.implicits.global import cats.syntax.all._ import fs2._ import org.scalatest.funsuite.AnyFunSuite import latis.catalog.Catalog import latis.dataset.Dataset import latis.dsl.DatasetGenerator import latis.util.Identifier.IdentifierStringContext class DatasetTesterSuite extends AnyFunSuite { private lazy val catalog: IO[Catalog] = IO( new Catalog { def datasets: Stream[IO, Dataset] = Stream.emits(List( DatasetGenerator("x -> a", id"datasetA"), DatasetGenerator("(x, y) -> b", id"datasetB") )) }) private lazy val tester = catalog.map(new DatasetTester(_)) private lazy val lines: List[String] = List( "datasetA, 0, 0", "datasetB, 0, 0, 0" ) ignore("test data") { tester.flatMap { dsTester => lines.traverse(dsTester.testLine) }.unsafeRunSync() } }
#include "e.h" #include "e_mod_main.h" #ifdef HAVE_WAYLAND_CLIENTS # include "e_mod_comp_wl.h" # include "e_mod_comp_wl_output.h" #endif # define WL_OUTPUT_FLIPPED 0x01 /* local function prototypes */ static void _e_mod_comp_wl_output_bind(struct wl_client *client, void *data, uint32_t version __UNUSED__, uint32_t id); /* private variables */ static Wayland_Output *_wl_output; Eina_Bool e_mod_comp_wl_output_init(void) { Ecore_X_Window *roots; int num = 0, rw, rh; LOGFN(__FILE__, __LINE__, __FUNCTION__); roots = ecore_x_window_root_list(&num); if ((!roots) || (num <= 0)) { EINA_LOG_ERR("Could not get root window list\n"); return EINA_FALSE; } ecore_x_window_size_get(roots[0], &rw, &rh); free(roots); if (!(_wl_output = malloc(sizeof(Wayland_Output)))) { EINA_LOG_ERR("Could not allocate space for output\n"); return EINA_FALSE; } memset(_wl_output, 0, sizeof(*_wl_output)); _wl_output->mode.flags = (WL_OUTPUT_MODE_CURRENT | WL_OUTPUT_MODE_PREFERRED); _wl_output->mode.w = rw; _wl_output->mode.h = rh; _wl_output->mode.refresh = 60; _wl_output->x = 0; _wl_output->y = 0; _wl_output->w = rw; _wl_output->h = rh; _wl_output->flags = WL_OUTPUT_FLIPPED; wl_list_init(&_wl_output->link); wl_list_init(&_wl_output->frame_callbacks); if (!wl_display_add_global(_wl_disp, &wl_output_interface, _wl_output, _e_mod_comp_wl_output_bind)) { EINA_LOG_ERR("Failed to add output to wayland\n"); free(_wl_output); return EINA_FALSE; } return EINA_TRUE; } void e_mod_comp_wl_output_shutdown(void) { LOGFN(__FILE__, __LINE__, __FUNCTION__); if (!_wl_output) return; wl_list_remove(&_wl_output->frame_callbacks); wl_list_remove(&_wl_output->link); free(_wl_output); } Wayland_Output * e_mod_comp_wl_output_get(void) { return _wl_output; } /* local functions */ static void _e_mod_comp_wl_output_bind(struct wl_client *client, void *data, uint32_t version __UNUSED__, uint32_t id) { Wayland_Output *output; struct wl_resource *resource; LOGFN(__FILE__, __LINE__, __FUNCTION__); if (!(output = data)) return; resource = wl_client_add_object(client, &wl_output_interface, NULL, id, data); wl_resource_post_event(resource, WL_OUTPUT_GEOMETRY, output->x, output->y, output->w, output->h, output->subpixel, output->make, output->model); wl_resource_post_event(resource, WL_OUTPUT_MODE, output->mode.flags, output->mode.w, output->mode.h, output->mode.refresh); }
<?php namespace Arch\JWT\Encoder; /** * Interface EncoderInterface * @package Arch\JWT\Encoder */ interface EncoderInterface { /** * Returns an encoded string * * @param string $raw * @return string */ function encode(string $raw): string; /** * Returns the raw string * * @param string $encoded * @return string */ function decode(string $encoded): string; }
//! TCP socket connection to a validator use super::secret_connection::{PublicKey, SecretConnection}; use crate::{ error::{Error, ErrorKind::*}, prelude::*, }; use signatory::{ed25519, public_key::PublicKeyed}; use signatory_dalek::Ed25519Signer; use std::{net::TcpStream, time::Duration}; use subtle::ConstantTimeEq; use tendermint::node; /// Open a TCP socket connection encrypted with SecretConnection pub fn open_secret_connection( host: &str, port: u16, peer_id: &Option<node::Id>, secret_key: &ed25519::Seed, ) -> Result<SecretConnection<TcpStream>, Error> { let signer = Ed25519Signer::from(secret_key); let public_key = PublicKey::from(signer.public_key().map_err(|_| Error::from(InvalidKey))?); info!("KMS node ID: {}", &public_key); let socket = TcpStream::connect(format!("{}:{}", host, port))?; socket.set_read_timeout(Some(Duration::from_secs(2)))?; socket.set_write_timeout(Some(Duration::from_secs(2)))?; let connection = SecretConnection::new(socket, &public_key, &signer)?; let actual_peer_id = connection.remote_pubkey().peer_id(); // TODO(tarcieri): move this into `SecretConnection::new` if let Some(expected_peer_id) = peer_id { if expected_peer_id.ct_eq(&actual_peer_id).unwrap_u8() == 0 { fail!( VerificationError, "{}:{}: validator peer ID mismatch! (expected {}, got {})", host, port, expected_peer_id, actual_peer_id ); } } Ok(connection) }
if ($loglvl) print("--> Mc.rb\n"); end $mc = Mc.new ack("$mc.instance_of?(Mc)", true) ack("$mc.kind_of?(Mc)", true) ack("$mc.class.to_s", "Mc") ack("$mc.debug = 1;", 1); ack("$mc.debug = 0;", 0); # ack('$mc.expand("%{_bindir}")', "/usr/bin"); # XXX noisy, adding pattern/index/level filters todo++ # nack('$mc.list()', null); ack('$mc.add("foo bar")', true); ack('$mc.expand("%{foo}")', "bar"); ack('$mc.del("foo")', true); ack('$mc.expand("%{foo}")', "%{foo}"); # XXX noisy, adding pattern/index/level filters todo++ # ack('$mc.list()', false); ack('$mc.expand("%{lua:print(\\"lua\\")}")', "lua"); # FIXME: reloading rpm modules within embedded interpreter segfaults # ack('$mc.expand("%{perl:print \\"perl\\"}")', "perl"); # ack('$mc.expand("%{python:print \\"python\\"}")', "python"); # FIXME: ruby can't load ruby # ack('$mc.expand("%{ruby:puts \\"ruby\\"}")', "ruby"); ack('$mc.expand("%{tcl:puts \\"tcl\\"}")', "tcl"); # $mc = Mc.new("cli"); ack('$mc.list()', nil); ack('$mc.add("foo bar")', true); ack('$mc.list()', "%foo\tbar"); ack('$mc.expand("%{foo}")', "bar"); ack('$mc.del("foo")', true); ack('$mc.list()', nil); ack('$mc.expand("%{foo}")', "%{foo}"); # $mc = Mc.new("tscripts/macros"); ack('$mc.list()', nil); ack('$mc.add("foo bar")', true); ack('$mc.list()', "%foo\tbar"); ack('$mc.expand("%{foo}")', "bar"); ack('$mc.del("foo")', true); ack('$mc.list()', nil); ack('$mc.expand("%{foo}")', "%{foo}"); # $mc = Mc.new(""); ack('$mc.list()', nil); ack('$mc.add("foo bar")', true); ack('$mc.list()', "%foo\tbar"); ack('$mc.expand("%{foo}")', "bar"); ack('$mc.del("foo")', true); ack('$mc.list()', nil); ack('$mc.expand("%{foo}")', "%{foo}"); # FIXME: there's no internal code path error returns to force an error out. # $mc = Mc.new("tscripts/nonexistent"); if ($loglvl) print("<-- Mc.rb\n"); end
use std::sync::Arc; use crate::{ array::{Array, FixedSizeListArray}, bitmap::MutableBitmap, datatypes::DataType, }; use super::{ make_growable, utils::{build_extend_null_bits, ExtendNullBits}, Growable, }; /// Concrete [`Growable`] for the [`FixedSizeListArray`]. pub struct GrowableFixedSizeList<'a> { arrays: Vec<&'a FixedSizeListArray>, validity: MutableBitmap, values: Box<dyn Growable<'a> + 'a>, extend_null_bits: Vec<ExtendNullBits<'a>>, size: usize, } impl<'a> GrowableFixedSizeList<'a> { /// Creates a new [`GrowableFixedSizeList`] bound to `arrays` with a pre-allocated `capacity`. /// # Panics /// If `arrays` is empty. pub fn new( arrays: Vec<&'a FixedSizeListArray>, mut use_validity: bool, capacity: usize, ) -> Self { assert!(!arrays.is_empty()); // if any of the arrays has nulls, insertions from any array requires setting bits // as there is at least one array with nulls. if !use_validity & arrays.iter().any(|array| array.null_count() > 0) { use_validity = true; }; let size = if let DataType::FixedSizeList(_, size) = &arrays[0].data_type().to_logical_type() { *size as usize } else { unreachable!("`GrowableFixedSizeList` expects `DataType::FixedSizeList`") }; let extend_null_bits = arrays .iter() .map(|array| build_extend_null_bits(*array, use_validity)) .collect(); let inner = arrays .iter() .map(|array| array.values().as_ref()) .collect::<Vec<_>>(); let values = make_growable(&inner, use_validity, 0); Self { arrays, values, validity: MutableBitmap::with_capacity(capacity), extend_null_bits, size, } } fn to(&mut self) -> FixedSizeListArray { let validity = std::mem::take(&mut self.validity); let values = self.values.as_arc(); FixedSizeListArray::new(self.arrays[0].data_type().clone(), values, validity.into()) } } impl<'a> Growable<'a> for GrowableFixedSizeList<'a> { fn extend(&mut self, index: usize, start: usize, len: usize) { (self.extend_null_bits[index])(&mut self.validity, start, len); self.values .extend(index, start * self.size, len * self.size); } fn extend_validity(&mut self, additional: usize) { self.values.extend_validity(additional * self.size); self.validity.extend_constant(additional, false); } fn as_arc(&mut self) -> Arc<dyn Array> { Arc::new(self.to()) } fn as_box(&mut self) -> Box<dyn Array> { Box::new(self.to()) } } impl<'a> From<GrowableFixedSizeList<'a>> for FixedSizeListArray { fn from(val: GrowableFixedSizeList<'a>) -> Self { let mut values = val.values; let values = values.as_arc(); Self::new( val.arrays[0].data_type().clone(), values, val.validity.into(), ) } }
package com.osacky.doctor import com.osacky.doctor.internal.Clock import com.osacky.doctor.internal.DirtyBeanCollector import com.osacky.doctor.internal.ScanApi import java.text.NumberFormat class GarbagePrinter( private val clock: Clock, private val collector: DirtyBeanCollector, private val extension: DoctorExtension ) : BuildStartFinishListener, HasBuildScanTag { private val startGarbageTime = collector.collect() private val startBuildTime = clock.upTimeMillis() private val formatter = NumberFormat.getPercentInstance() private val warningThreshold = 10 * 1000 override fun onStart() { } override fun onFinish(): List<String> { val endGarbageTime = collector.collect() val endBuildTime = clock.upTimeMillis() val buildDuration = endBuildTime - startBuildTime val garbageDuration = endGarbageTime - startGarbageTime val percentGarbageCollecting = (garbageDuration * 1f / buildDuration) if (buildDuration > warningThreshold && percentGarbageCollecting > extension.GCWarningThreshold.get()) { val message = """ This build spent ${formatter.format(percentGarbageCollecting)} garbage collecting. If this is the first build with this Daemon, it likely means that this build needs more heap space. Otherwise, if this is happening after several builds it could indicate a memory leak. For a quick fix, restart this Gradle daemon. ./gradlew --stop """.trimIndent() return listOf(message) } return emptyList() } override fun addCustomValues(buildScanApi: ScanApi) { buildScanApi.tag("doctor-high-gc") } }
# EvilNet Small serialization and network engine for Java. The goal of this engine is to show how to do serialization for network applications properly in Java. The engine can handle everything between taking objects and serialize them and send them on one end and receiving them as a byte-array on the other end and then deserialize them into objects with the same values. The engine makes use of generic types so a user can send any type of object into the engine as long as the object implements the Serializable-interface. EvilNet can make use of all network traffic protocols, including multicast sockets. Messages are sent based on a tick rate that is defined when EvilNet is initialized. Download this engine as a zip and put it into your project. Depending on where you put the files you may have to chanage the package-names for EvilNet. After that, check the file EvilNet.java to get an idea of how to make use of the engine in your project.
<?php namespace App\Models; use Illuminate\Database\Eloquent\Model; class ItineraryDeparture extends Model { protected $table = 'itinerary_departures'; protected $primaryKey = 'itinerary_departure_id'; public function itinerary() { return $this->belongsTo('App\Models\Itinerary'); } }
package xurl.services import xurl.health.Health import cats.effect._ import cats.implicits._ import org.scalacheck.Gen import weaver.SimpleIOSuite import weaver.scalacheck.Checkers object HealthCheckSuite extends SimpleIOSuite with Checkers { private def dummyHealth(b: Boolean): Health[IO] = new Health[IO] { def ok: IO[Boolean] = b.pure[IO] } val tupleBoolGen: Gen[(Boolean, Boolean)] = for { s <- Gen.oneOf(true, false) c <- Gen.oneOf(true, false) } yield (s, c) test("status") { forall(tupleBoolGen) { case (s, c) => val dummyStorage = dummyHealth(s) val dummyCache = dummyHealth(c) val h = HealthCheck.make(dummyStorage, dummyCache) h.status.map { status => expect(status.storage.value === s) && expect(status.cache.value === c) } } } }
class RenameLocationsToCommunities < ActiveRecord::Migration[6.0] def change rename_table :locations, :communities PaperTrail::Version.where(item_type: 'Location').update_all(item_type: 'Community') end end
class Candidate < ActiveRecord::Base has_many :vote_candidates, dependent: :destroy has_many :votes, through: :vote_candidates before_save :format_fields def format_fields self.name = self.name.strip.titleize end def score @score ||= calc_score end def presentation_filename(extension = "docx") filename = ActiveSupport::Inflector.transliterate(self.name).downcase.split.join('_') "#{filename}.#{extension}" end def total_score factors = 0.0 Section.all.each do |section| nact = section.activists.count(:vote_id) factors += Math.sqrt(nact) end factors end def vote_percent ((score/total_score)*100) end def calc_score sum = 0 Section.all.each do |section| p = Math.sqrt(section.activists.count(:vote_id))/section.activists.count(:vote_id) sum += p*votes.joins(:activist).where('section_id = ? AND activists.vote_id IS NOT NULL', section).count end return sum end def selected false end end
pub mod tick; pub mod time; pub mod format; pub mod sys; pub use self::tick::*; pub use self::time::*; pub use self::format::*;
exports.name = 'angel.co'; exports.login = login; exports.reset_password = reset_password; function login(casper, username, password, success_callback) { casper.thenOpen('https://angel.co/login'); casper.waitForUrl('https://angel.co/login'); casper.fillMagically("#new_user", { "user[email]": username, "user[password]": password, }, function success() { return document.body.innerHTML.indexOf("Log Out") > -1; }, null, success_callback); }; function reset_password(casper, username, old_password, new_password, success_callback) { login(casper, username, old_password, function login_success(success) { if (!success) { success_callback(success); casper.bypass(1000); } }); casper.thenOpen('https://angel.co/settings/password'); casper.waitForUrl('https://angel.co/settings/password'); casper.fillMagically("form[action='/settings/password']", { "user[current_password]": old_password, "user[password]": new_password, "user[password_confirmation]": new_password }, function success() { return document.body.innerHTML.indexOf("Password changed. Please re-authenticate with your new password.") > -1; }, null, success_callback); }
# This is a script of example tests you can run in the command line rostest harmoni_tts polly.test # tts rostest harmoni_bot lex.test # bot rostest harmoni_face face.test rostest harmoni_web web.test rostest harmoni_speaker speaker.test rostest harmoni_stt w2l.test rostest harmoni_stt deepspeech.test rostest harmoni_face_detect face_detect.test rostest harmoni_camera camera.test rostest harmoni_microphone microphone.test # harmoni_decision launcher.launch service:='harmoni,hardware'
1 17 2 7 17 19 3 15 19 22 4 5 2 25 6 1 7 15 8 13 23 9 19 10 11 9 12 11 13 5 6 16 14 8 12 15 9 18 21 16 2 20 24 17 7 24 18 21 19 4 20 24 20 3 19 21 6 22 13 23 24 25 25
# delete specified tag from local and remote # --- command line repo_folder=$1 tag_name=$2 branch_name=$3 cd $repo_folder # --- pull tag from remote git pull origin $branch_name --tag if [ "$?" != 0 ]; then echo "Fail to pull tag from remote." exit 1 fi # --- check whether tag exists res=$(git tag | grep $tag_name) if [ -z "$res" ]; then echo "$tag_name does not exist!" exit 1 fi # --- delete tag from local git tag -d $tag_name if [ "$?" != 0 ]; then echo "Fail to delete tag from local." exit 1 fi # --- push to remote repo git push origin $branch_name :refs/tags/$tag_name if [ "$?" != 0 ]; then echo "Fail to push tag to remote." exit 1 fi # --- finished exit 0
/* logika boolean and dan or and && T T T T F F F T F F F F or || F F F T */ import java.util.Scanner fun main(args: Array<String>) { var bilangan : Int? var bilangan2 : Int? var input = Scanner(System.`in`) println("Masukkan bilangan pertama : ") bilangan = input.nextInt() println("Masukkan bilangan kedua : ") bilangan2 = input.nextInt() if (bilangan > bilangan2) { println("Maka $bilangan yang terbesar ") }else{ println("Maka $bilangan2 yang terbesar ") } if (bilangan %2 == 0) { println("Maka $bilangan adalah Genap") }else println("Maka $bilangan adalah Ganjil") }
wget https://raw.githubusercontent.com/ocaml/ocaml-ci-scripts/master/.travis-ocaml.sh export OPAM_INIT=false bash -ex .travis-ocaml.sh
package mod type ( Log struct { Level int32 `json:"level" yaml:"level"` } // Configuration structure is here to describe the application configuration Configuration struct { Log Log `json:"log" yaml:"log"` } )
/* This file is part of the IrcA2A project, which is released under MIT License. * See LICENSE.md or visit: * https://github.com/michaelpduda/irca2a/blob/main/LICENSE.md */ using System; using System.Collections.Concurrent; using System.Threading; using NetIrc2; using NetIrc2.Events; namespace IrcA2A.Communication { public class MessageReceiver : IDisposable { private readonly IrcClient _ircClient; private readonly BlockingCollection<Received> _receivedMessages = new BlockingCollection<Received>(); private Received _failedMessage; public MessageReceiver(IrcClient ircClient) { _ircClient = ircClient ?? throw new ArgumentNullException(nameof(ircClient)); _ircClient.GotLeaveChannel += IrcClientGotLeaveChannel; _ircClient.GotMessage += IrcClientGotMessage; _ircClient.GotNameChange += IrcClientGotNameChange; _ircClient.GotUserKicked += IrcClientGotUserKick; } public void Dispose() { _ircClient.GotLeaveChannel -= IrcClientGotLeaveChannel; _ircClient.GotMessage -= IrcClientGotMessage; _ircClient.GotNameChange -= IrcClientGotNameChange; _ircClient.GotUserKicked -= IrcClientGotUserKick; } public void GetMessage(CancellationToken cancellationToken, Action<Received> processor) { Received received = null; try { received = _failedMessage ?? _receivedMessages.Take(cancellationToken); } catch (OperationCanceledException) { } _failedMessage = null; try { processor(received); } catch (Exception) { _failedMessage = received; throw; } } private void IrcClientGotMessage(object sender, ChatMessageEventArgs e) => _receivedMessages.Add(new ReceivedMessage { Sender = e.Sender.Nickname, Message = e.Message }); private void IrcClientGotNameChange(object sender, NameChangeEventArgs e) => _receivedMessages.Add(new ReceivedNickChange { Sender = e.Identity.Nickname, NewNick = e.NewName }); private void IrcClientGotLeaveChannel(object sender, JoinLeaveEventArgs e) => _receivedMessages.Add(new ReceivedLeft { Sender = e.Identity.Nickname }); private void IrcClientGotUserKick(object sender, KickEventArgs e) => _receivedMessages.Add(new ReceivedLeft { Sender = e.Recipient }); } }
$(document).ready(function() { getRedditJson(); }); function getRedditJson() { $.ajax({ method: "GET", url: "https://www.reddit.com/r/todayilearned/top.json", dataType: "json", success: returnData, error: onError }); } function returnData(data) { const listings = data.data.children; for (let i = 0; i < listings.length; i++) { const list = listings[i].data; const headline = list.title; const author = list.author; let mainHTML = $("#main"); let div = document.createElement("div"); let authorName = document.createElement("h4"); let post = document.createElement("p"); let currentUpvotes = document.createElement("span"); let upvoteBtn = document.createElement("button"); div.className = "grid-item"; upvoteBtn.clicked = false; upvoteBtn.ups = list.ups; upvoteBtn.span = currentUpvotes; upvoteBtn.innerHTML = "Upvote "; authorName.innerHTML = "Posted by: " + author; post.innerHTML = headline; currentUpvotes.innerHTML = list.ups; div.append(authorName); div.append(post); div.append(upvoteBtn); div.append(currentUpvotes); mainHTML.append(div); } $("button").click(function() { if (this.clicked) { this.ups--; } else { this.ups++; } this.clicked = !this.clicked; $(this).toggleClass("changeColor"); $(this.span).text(this.ups); }); } //if JSON fails function onError() { console.log("error"); }
#include "TrackingTools/Producers/interface/AnalyticalPropagatorESProducer.h" #include "TrackingTools/Producers/interface/StraightLinePropagatorESProducer.h" #include "TrackingTools/Producers/interface/SmartPropagatorESProducer.h" #include "TrackingTools/Producers/interface/BeamHaloPropagatorESProducer.h" #include "TrackingTools/Producers/interface/TrajectoryCleanerESProducer.h" #include "FWCore/Framework/interface/EventSetup.h" #include "FWCore/Framework/interface/ESHandle.h" #include "FWCore/Framework/interface/ModuleFactory.h" #include "FWCore/Framework/interface/ESProducer.h" #include "FWCore/Utilities/interface/typelookup.h" DEFINE_FWK_EVENTSETUP_MODULE(StraightLinePropagatorESProducer); DEFINE_FWK_EVENTSETUP_MODULE(AnalyticalPropagatorESProducer); DEFINE_FWK_EVENTSETUP_MODULE(SmartPropagatorESProducer); DEFINE_FWK_EVENTSETUP_MODULE(BeamHaloPropagatorESProducer); DEFINE_FWK_EVENTSETUP_MODULE(TrajectoryCleanerESProducer);
package stream import ( "context" "errors" "sync" ) var errClose = errors.New("closed") var errPingDisabled = errors.New("ping disabled") type mockConn struct { pingCh chan struct{} closeCh chan struct{} closeOnce sync.Once readCh chan []byte writeCh chan []byte pingDisabled bool } var _ conn = (*mockConn)(nil) func newMockConn() *mockConn { return &mockConn{ pingCh: make(chan struct{}, 10), closeCh: make(chan struct{}), readCh: make(chan []byte, 10), writeCh: make(chan []byte, 10), } } func (c *mockConn) close() error { select { case <-c.closeCh: default: c.closeOnce.Do(func() { close(c.closeCh) }) } return nil } func (c *mockConn) ping(ctx context.Context) error { if c.pingDisabled { return errPingDisabled } select { case <-c.closeCh: return errClose default: } c.pingCh <- struct{}{} return nil } func (c *mockConn) readMessage(ctx context.Context) (data []byte, err error) { select { case <-ctx.Done(): return nil, ctx.Err() case data := <-c.readCh: return data, nil case <-c.closeCh: return nil, errClose } } func (c *mockConn) writeMessage(ctx context.Context, data []byte) error { select { case <-c.closeCh: return errClose default: } c.writeCh <- data return nil }
package com.fangdd.traffic.common.mongo.reflection.dto; import com.google.common.collect.Maps; import java.util.ArrayList; import java.util.List; import java.util.Map; /** * Created by ycoe on 17/1/6. */ public class ClassMate { /** * 类 */ private Class clazz; /** * 这个类相关联的其它类 */ private List<Class> referClassList = new ArrayList<>(); /** * 属性与属性Field */ private Map<String, FieldMate> fieldMateMap = Maps.newHashMap(); /** * 当前类自增配置 */ private AutoIncrementInfo autoIncrementInfo; public Class getClazz() { return clazz; } public void setClazz(Class clazz) { this.clazz = clazz; } public Map<String, FieldMate> getFieldMateMap() { return fieldMateMap; } public void setFieldMateMap(Map<String, FieldMate> fieldMateMap) { this.fieldMateMap = fieldMateMap; } public void addFieldMate(String fieldName, FieldMate fieldMate) { if(fieldMateMap.containsKey(fieldName)) { } fieldMateMap.put(fieldName, fieldMate); } public void addReferClass(Class clazz) { if (clazz == String.class) { return; } if (!referClassList.contains(clazz)) { referClassList.add(clazz); } } public List<Class> getReferClassList() { return referClassList; } public FieldMate getFieldMate(String fieldName) { return fieldMateMap.get(fieldName); } public AutoIncrementInfo getAutoIncrementInfo() { return autoIncrementInfo; } public void setAutoIncrementInfo(AutoIncrementInfo autoIncrementInfo) { this.autoIncrementInfo = autoIncrementInfo; } }
<?= $this->extend('templates/default'); ?> <?= $this->section('title') ?> <?= $page->name ?> <?= $this->endSection(); ?> <?= $this->section('body') ?> <?= $page->text ?> <?= $this->endSection() ?>
--- title: 'Bar Chart with Mean' path: '/documentation/examples/bar_chart_with_mean' order: 8 --- <bar-chart-with-mean></bar-chart-with-mean> `embed:charts/bar-chart-with-mean.tsx`
<?php namespace Kyranb\Footprints; use Illuminate\Database\Eloquent\Model; use Illuminate\Http\Request; use Kyranb\Footprints\Jobs\AssignPreviousVisits; /** * Class TrackRegistrationAttribution. * * @method static void created(callable $callback) */ trait TrackRegistrationAttribution { public static function bootTrackRegistrationAttribution() { // Add an observer that upon registration will automatically sync up prior visits. static::created(function (Model $model) { $model->trackRegistration(request()); }); } /** * Get all of the visits for the user. * * @return \Illuminate\Database\Eloquent\Collection */ public function visits() { return $this->hasMany(Visit::class, config('footprints.column_name'))->orderBy('created_at', 'desc'); } /** * Method depricated use 'trackRegistration' method. * * @deprecated * @return void */ public function assignPreviousVisits() { return $this->trackRegistration(); } /** * Assign earlier visits using current request. */ public function trackRegistration(Request $request): void { $job = new AssignPreviousVisits($request->footprint(), $this); if (config('footprints.async') == true) { dispatch($job); } else { $job->handle(); } } /** * The initial attribution data that eventually led to a registration. * * @return \Illuminate\Database\Eloquent\Collection */ public function initialAttributionData() { return $this->hasMany(Visit::class, config('footprints.column_name'))->orderBy('created_at', 'asc')->first(); } /** * The final attribution data before registration. * * @return \Illuminate\Database\Eloquent\Collection */ public function finalAttributionData() { return $this->hasMany(Visit::class, config('footprints.column_name'))->orderBy('created_at', 'desc')->first(); } }
<?php namespace SDF; /** * Model Boilerplate. * Add your custom model codes to this file */ class Model extends Core { }
package com.technicalassigments.moviewapp.data.model data class AuthorDetails( var avatar_path: String?, var name: String?, var rating: Any?, var username: String? )
unit AggGammaSpline; //////////////////////////////////////////////////////////////////////////////// // // // Anti-Grain Geometry (modernized Pascal fork, aka 'AggPasMod') // // Maintained by Christian-W. Budde ([email protected]) // // Copyright (c) 2012-2020 // // // // Based on: // // Pascal port by Milan Marusinec alias Milano ([email protected]) // // Copyright (c) 2005-2006, see http://www.aggpas.org // // // // Original License: // // Anti-Grain Geometry - Version 2.4 (Public License) // // Copyright (C) 2002-2005 Maxim Shemanarev (http://www.antigrain.com) // // Contact: [email protected] / [email protected] // // // // Permission to copy, use, modify, sell and distribute this software // // is granted provided this copyright notice appears in all copies. // // This software is provided "as is" without express or implied // // warranty, and with no claim as to its suitability for any purpose. // // // //////////////////////////////////////////////////////////////////////////////// interface {$I AggCompiler.inc} uses AggBasics, AggBSpline, AggVertexSource; type // Class-helper for calculation Gamma-correction arrays. A Gamma-correction // array is an array of 256 Cardinal chars that determine the actual values // of Anti-Aliasing for each pixel coverage value from 0 to 255. If all the // values in the array are equal to its index, i.e. 0,1,2,3,... there's // no Gamma-correction. Class agg::polyfill allows you to use custom // Gamma-correction arrays. You can calculate it using any approach, and // class TAggGammaSpline allows you to calculate almost any reasonable shape // of the Gamma-curve with using only 4 values - kx1, ky1, kx2, ky2. // // kx2 // +----------------------------------+ // | | | . | // | | | . | // | | . ------| ky2 // | | . | // | | . | // |----------------.|----------------| // | . | | // | . | | // ky1 |-------. | | // | . | | | // | . | | | // +----------------------------------+ // kx1 // // Each value can be in range [0...2]. Value 1.0 means one quarter of the // bounding rectangle. Function values() calculates the curve by these // 4 values. After calling it one can get the Gamma-array with call Gamma(). // Class also supports the vertex source interface, i.e rewind() and // vertex(). It's made for convinience and used in class GammaControl. // Before calling rewind/vertex one must set the bounding box // box() using pixel coordinates. TAggGammaSpline = class(TAggVertexSource) private FGamma: array [0..255] of Int8u; FX, FY: array [0..3] of Double; FX1, FY1, FX2, FY2, FCurrentX: Double; FSpline: TAggBSpline; public constructor Create; destructor Destroy; override; procedure Values(Kx1, Ky1, Kx2, Ky2: Double); overload; procedure Values(Kx1, Ky1, Kx2, Ky2: PDouble); overload; function Gamma: PInt8u; function GetY(X: Double): Double; procedure Box(X1, Y1, X2, Y2: Double); procedure Rewind(PathID: Cardinal); override; function Vertex(X, Y: PDouble): Cardinal; override; end; implementation { TAggGammaSpline } constructor TAggGammaSpline.Create; begin FSpline := TAggBSpline.Create; FX1 := 0; FY1 := 0; FX2 := 10; FY2 := 10; FCurrentX := 0.0; Values(1.0, 1.0, 1.0, 1.0); end; destructor TAggGammaSpline.Destroy; begin FSpline.Free; inherited; end; procedure TAggGammaSpline.Values(Kx1, Ky1, Kx2, Ky2: Double); var I: Integer; begin if Kx1 < 0.001 then Kx1 := 0.001; if Kx1 > 1.999 then Kx1 := 1.999; if Ky1 < 0.001 then Ky1 := 0.001; if Ky1 > 1.999 then Ky1 := 1.999; if Kx2 < 0.001 then Kx2 := 0.001; if Kx2 > 1.999 then Kx2 := 1.999; if Ky2 < 0.001 then Ky2 := 0.001; if Ky2 > 1.999 then Ky2 := 1.999; FX[0] := 0.0; FY[0] := 0.0; FX[1] := Kx1 * 0.25; FY[1] := Ky1 * 0.25; FX[2] := 1.0 - Kx2 * 0.25; FY[2] := 1.0 - Ky2 * 0.25; FX[3] := 1.0; FY[3] := 1.0; FSpline.Init(4, @FX, @FY); for I := 0 to 255 do FGamma[I] := Trunc(GetY(I / 255.0) * 255.0); end; procedure TAggGammaSpline.Values(Kx1, Ky1, Kx2, Ky2: PDouble); begin Kx1^ := FX[1] * 4.0; Ky1^ := FY[1] * 4.0; Kx2^ := (1.0 - FX[2]) * 4.0; Ky2^ := (1.0 - FY[2]) * 4.0; end; function TAggGammaSpline.Gamma; begin Result := @FGamma[0]; end; function TAggGammaSpline.GetY; var Val: Double; begin if X < 0.0 then X := 0.0; if X > 1.0 then X := 1.0; Val := FSpline.Get(X); if Val < 0.0 then Val := 0.0; if Val > 1.0 then Val := 1.0; Result := Val; end; procedure TAggGammaSpline.Box; begin FX1 := X1; FY1 := Y1; FX2 := X2; FY2 := Y2; end; procedure TAggGammaSpline.Rewind(PathID: Cardinal); begin FCurrentX := 0.0; end; function TAggGammaSpline.Vertex(X, Y: PDouble): Cardinal; begin if FCurrentX = 0.0 then begin X^ := FX1; Y^ := FY1; FCurrentX := FCurrentX + (1.0 / (FX2 - FX1)); Result := CAggPathCmdMoveTo; Exit; end; if FCurrentX > 1.0 then begin Result := CAggPathCmdStop; Exit; end; X^ := FX1 + FCurrentX * (FX2 - FX1); Y^ := FY1 + GetY(FCurrentX) * (FY2 - FY1); FCurrentX := FCurrentX + (1.0 / (FX2 - FX1)); Result := CAggPathCmdLineTo; end; end.
# -*- coding: utf-8 -*- require 'test_helper' class PageTrantitionTestTest < ActionDispatch::IntegrationTest test "ログインしてページを遷移したときに、ログイン情報が保持されている" do login get '/' #assert_select 'div[class="navbar-left"] li:nth-child(3)', 'ログアウト' #get '/demands/index' #assert_select 'div[class="navbar-left"] li:nth-child(3)', 'ログアウト' assert_select 'ul[class="nav navbar-nav"] li:nth-child(1)', 'ログアウト' get '/demands/index' assert_select 'ul[class="nav navbar-nav"] li:nth-child(1)', 'ログアウト' end end
import { useState } from "react"; import styled from "styled-components"; import { ImageModal, Price, PriceSmall } from "../../elements"; import { ConfigDetailsType, IStyled } from "../../types"; import { calculatePrice } from "../../utility/calculatePrice"; import useClickOutside from "../../utility/useClickOutside"; interface IProps extends IStyled { details: ConfigDetailsType; price: string; config: string; } const Body = ({ className, config, details, price }: IProps) => { const { baths, bookingPrice, carpet, img } = details; const { p, text } = calculatePrice(bookingPrice); const { ref, isVisible, setIsVisible } = useClickOutside(false); const handleModal = () => { setIsVisible(true); }; return ( <div className={className}> <div className="bhk">{config} Bhk</div> <div> Bathrooms : <span> {baths} {/* <small>x</small> <i className="fas fa-bath"></i> */} </span> </div> <div> Booking Amount : <span> {p} {text} </span> </div> <div> Carpet area : <span>{carpet} sqft</span> </div> <div className="image" onClick={handleModal}> <img src={process.env.REACT_APP_IMAGE_SM_URL + img} alt="" /> </div> {isVisible && ( <div ref={ref}> <ImageModal img={process.env.REACT_APP_IMAGE_LG_URL + img} /> </div> )} </div> ); }; const ConfigDetails = styled(Body)` font-family: ${(props) => props.theme.family.a}; color: ${(props) => props.theme.colors.a + "c0"}; margin: 2rem 0; border: 1px solid ${(props) => props.theme.colors.a + "c0"}; padding: 1rem; .bhk { font-size: ${(props) => props.theme.size.h3}; color: ${(props) => props.theme.colors.c}; background-color: ${(props) => props.theme.colors.a}; text-align: center; font-weight: 300; text-transform: uppercase; } span { padding: 1rem; color: ${(props) => props.theme.colors.b}; font-size: ${(props) => props.theme.size.h3}; font-weight: 300; } .image { text-align: center; padding: 1rem; cursor: pointer; img { width: 14rem; height: 10rem; } } `; export default ConfigDetails;
package main import ( "github.com/aws/aws-lambda-go/lambda" teams "github.com/ericdaugherty/msteams-webhook-go" ) type webHook struct { } func (w webHook) OnMessage(req teams.Request) (teams.Response, error) { return teams.BuildResponse("Hi " + req.FromUser.Name), nil } func main() { lambda.Start(teams.NewHandler(false, "", webHook{})) }
/// <reference path="../abstract-builder.ts" /> class SVGBuilder extends AbstractTagBuilder<SVGElement> { constructor(viewBox?: string, id?: string, xmlns = 'http://www.w3.org/2000/svg') { super('svg', id, xmlns); if (Objects.isEmptyOrWhitespace(xmlns)) Objects.requireNonNull(null, 'xmlns'); if (!Objects.isEmptyOrWhitespace(viewBox)) this.attr('viewBox', viewBox); } public bounds(width: string | number, height: string | number): SVGBuilder { this.width(width); this.height(height); return this; } public height(height: string | number): SVGBuilder { this.attr('height', height); return this; } public width(width: string | number): SVGBuilder { this.attr('width', width); return this; } public clone(): SVGBuilder { const builder = new SVGBuilder(); builder.isCached = this.isCached; if (this.isHeadlessMode) builder.hNode = this.hNode.clone(); else builder.node = this.node.cloneNode(true) as SVGElement; return builder; } }
#!/bin/bash # # Functions to fetch languages. # LANGPACKSFOLDER='../../moodle-langpacks' BUCKET='moodle-lang-prod' MOODLEORG_URL='https://download.moodle.org/download.php/direct/langpack' DEFAULT_LASTVERSION='4.0' # Checks if AWS is available and configured. function check_aws { AWS_SERVICE=1 aws --version &> /dev/null if [ $? -ne 0 ]; then AWS_SERVICE=0 echo 'AWS not installed. Check https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-install.html for more info.' return fi # In order to login to AWS, use credentials file or AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY vars. if [ ! -f ~/.aws/credentials ] && ([ -z "$AWS_ACCESS_KEY_ID" ] || [ -z "$AWS_SECRET_ACCESS_KEY" ]); then AWS_SERVICE=0 lastversion=$DEFAULT_LASTVERSION echo 'AWS Cannot authenticate. Use aws configure or set the proper env vars.' return fi } # Get last version of Moodle to fetch latest languages. function get_last_version { if [ ! -z "${lastversion}" ]; then return fi check_aws if [ $AWS_SERVICE -eq 0 ]; then lastversion=$DEFAULT_LASTVERSION echo "Using default version $lastversion" return fi list=`aws s3 ls s3://$BUCKET/` if [ $? -ne 0 ]; then AWS_SERVICE=0 lastversion=$DEFAULT_LASTVERSION echo "AWS Cannot authenticate. Using default version $lastversion" return fi lastversion='' for folder in $list; do if [ $folder != 'PRE' ]; then lastversion=${folder/\//} fi done if [ ! -z "${lastversion}" ]; then echo "Last version $lastversion detected" return fi lastversion=$DEFAULT_LASTVERSION } # Create langfolder function create_langfolder { if [ ! -d $LANGPACKSFOLDER ]; then mkdir $LANGPACKSFOLDER fi } # Get all language list from AWS. function get_all_languages_aws { langsfiles=`aws s3 ls s3://$BUCKET/$lastversion/` langs="" for file in $langsfiles; do if [[ "$file" == *.zip ]]; then file=${file/\.zip/} langs+="$file " fi done } # Get language list from the installed ones (will not discover new translations). function get_installed_languages { langs=`jq -r '.languages | keys[]' ../moodle.config.json` } # Entry function to get a language file. function get_language { lang=$1 lang=${lang/-/_} get_last_version create_langfolder echo "Getting $lang language" pushd $LANGPACKSFOLDER > /dev/null curl -s $MOODLEORG_URL/$lastversion/$lang.zip --output $lang.zip > /dev/null size=$(du -k "$lang.zip" | cut -f 1) if [ ! -n $lang.zip ] || [ $size -le 60 ]; then echo "Wrong language name or corrupt file for $lang" rm $lang.zip popd > /dev/null return fi rm -R $lang > /dev/null 2>&1> /dev/null unzip -o -u $lang.zip > /dev/null # This is the AWS version to get the language but right now it's slower. # aws s3 cp s3://$BUCKET/$lastversion/$lang.zip . > /dev/null rm $lang.zip popd > /dev/null } # Entry function to get all language files. function get_languages { suffix=$1 if [ -z $suffix ]; then suffix='' fi get_last_version if [ -d $LANGPACKSFOLDER ]; then lastupdate=`date -r $LANGPACKSFOLDER +%s` currenttime=`date +%s` ellapsedtime=$((currenttime - lastupdate)) if [ $ellapsedtime -lt 3600 ]; then echo 'Recently updated, skip update languages' return fi else create_langfolder fi if [ $AWS_SERVICE -eq 1 ]; then get_all_languages_aws suffix='' else echo "Fallback language list will only get current installation languages" get_installed_languages fi for lang in $langs; do get_language "$lang" if [ $suffix != '' ]; then get_language "$lang$suffix" fi done }
import React from "react"; import { Dimensions, Image, Linking, StyleSheet, Text, TouchableOpacity, View } from "react-native"; import { Feather as Icon } from "@expo/vector-icons"; import { LinearGradient } from "expo-linear-gradient"; import { StyleGuide } from "../components"; const d = Dimensions.get("window"); export const width = d.width * 0.75; export const height = d.height * 0.5; const styles = StyleSheet.create({ container: { width, height, borderRadius: 24, backgroundColor: "white", justifyContent: "center", alignItems: "center" }, gradient: { ...StyleSheet.absoluteFillObject, borderRadius: 24 }, avatar: { width: 100, height: 100, borderRadius: 50 }, title: { fontWeight: "bold", marginTop: 16 }, handle: { color: StyleGuide.palette.primary, textDecorationLine: "underline" }, divider: { height: 1, backgroundColor: "#D8DAE0", width: "100%", marginVertical: 32 }, row: { flexDirection: "row", alignItems: "center", marginVertical: 8 }, icon: { marginRight: 8 }, label: { fontSize: 16, fontWeight: "500" } }); interface RowProps { icon: string; label: string; href: string; } const Row = ({ icon, label, href }: RowProps) => ( <TouchableOpacity onPress={() => Linking.openURL(href)}> <View style={styles.row}> <Icon name={icon} size={24} style={styles.icon} /> <Text style={styles.label}>{label}</Text> </View> </TouchableOpacity> ); export default () => ( <View style={styles.container}> <LinearGradient style={styles.gradient} colors={["#FEFEFE", "#D2D6DE"]} /> <Image source={require("./assets/avatar.jpg")} style={styles.avatar} /> <Text style={styles.title}>William Candillon</Text> <Text style={styles.handle}>@wcandillon</Text> <View style={styles.divider} /> <View> <Row icon="code" label="Start React Native" href="https://start-react-native.dev/" /> <Row icon="youtube" label="YouTube" href="https://www.youtube.com/user/wcandill" /> <Row icon="twitter" label="Twitter" href="https://twitter.com/wcandillon" /> </View> </View> );
import scala.concurrent.duration.Duration import scala.concurrent.{Await, Future} object DTOTest02 extends App with DTOHelper02 { object Foo { val name = Future.successful("name") val id = Right(1234) val age = Future.successful(Right(6789)) } case class Bar(id: Int, name: String, age: Int) val ec = scala.concurrent.ExecutionContext.Implicits.global val newModel: Future[Either[Exception, Bar]] = fe.effect(fe.singleModel[Bar](Foo).compile).data(ec) println(Await.result(newModel, Duration.Inf)) }
package com.github.jmatsu.multipreference.processor.extension import com.google.common.base.CaseFormat fun String.toLowerCamel(): String { return when { isLowerCamel -> this isUpperCamel -> toLowerCamel(CaseFormat.UPPER_CAMEL) isLowerUnderscore -> toLowerCamel(CaseFormat.LOWER_UNDERSCORE) isUpperUnderscore -> toLowerCamel(CaseFormat.UPPER_UNDERSCORE) else -> toLowerCamel(CaseFormat.UPPER_CAMEL) // cannot be determined } } fun String.toUpperCamel(): String { return when { isLowerCamel -> toUpperCamel(CaseFormat.LOWER_CAMEL) isUpperCamel -> this isLowerUnderscore -> toUpperCamel(CaseFormat.LOWER_UNDERSCORE) isUpperUnderscore -> toUpperCamel(CaseFormat.UPPER_UNDERSCORE) else -> toUpperCamel(CaseFormat.LOWER_CAMEL) // cannot be determined } } fun String.toLowerUnderscore(): String { return when { isLowerCamel -> toLowerUnderscore(CaseFormat.LOWER_CAMEL) isUpperCamel -> toLowerUnderscore(CaseFormat.UPPER_CAMEL) isLowerUnderscore -> this isUpperUnderscore -> toLowerUnderscore(CaseFormat.UPPER_UNDERSCORE) else -> toLowerUnderscore(CaseFormat.LOWER_CAMEL) // cannot be determined } } fun String.toUpperUnderscore(): String { return when { isLowerCamel -> toUpperUnderscore(CaseFormat.LOWER_CAMEL) isUpperCamel -> toUpperUnderscore(CaseFormat.UPPER_CAMEL) isLowerUnderscore -> toUpperUnderscore(CaseFormat.LOWER_UNDERSCORE) isUpperUnderscore -> this else -> toUpperUnderscore(CaseFormat.UPPER_CAMEL) // cannot be determined } } val String.isLowerCamel: Boolean get() = this == toUpperUnderscore(CaseFormat.LOWER_CAMEL).toLowerCamel(CaseFormat.UPPER_UNDERSCORE) val String.isUpperCamel: Boolean get() = this == toLowerUnderscore(CaseFormat.UPPER_CAMEL).toUpperCamel(CaseFormat.LOWER_UNDERSCORE) val String.isLowerUnderscore: Boolean get() = this == toUpperCamel(CaseFormat.LOWER_UNDERSCORE).toLowerUnderscore(CaseFormat.UPPER_CAMEL) val String.isUpperUnderscore: Boolean get() = this == toLowerCamel(CaseFormat.UPPER_UNDERSCORE).toUpperUnderscore(CaseFormat.LOWER_CAMEL) private fun String.toLowerCamel(base: CaseFormat): String = base.to(CaseFormat.LOWER_CAMEL, this) private fun String.toUpperCamel(base: CaseFormat): String = base.to(CaseFormat.UPPER_CAMEL, this) private fun String.toLowerUnderscore(base: CaseFormat): String = base.to(CaseFormat.LOWER_UNDERSCORE, this) private fun String.toUpperUnderscore(base: CaseFormat): String = base.to(CaseFormat.UPPER_UNDERSCORE, this)
import { Component, OnInit, OnDestroy } from '@angular/core'; import { ActivatedRoute } from '@angular/router'; import { Variable, Extensions, ILogger, ConsoleLogger, PromiseService } from '../../../../utils/index'; import { SiteEntity } from '../../../../entities/index'; import { ISiteApiService, SiteApiService } from '../../../../services/serverApi/index'; @Component({ selector: 'site-details-relations', styleUrls: ['./siteDetailsRelations.scss'], templateUrl: './siteDetailsRelations.html', }) export class SiteDetailsRelationsComponent implements OnInit, OnDestroy { /// fields private _entityId: number; private _parameterSubscription: any; // type should be Subscription; protected entity: SiteEntity; /// injected dependencies protected logger: ILogger; protected siteApiService: ISiteApiService; protected promiseService: PromiseService; protected route: ActivatedRoute; /// ctor constructor(logger: ConsoleLogger, siteApiService: SiteApiService, promiseService: PromiseService, route: ActivatedRoute) { this.logger = logger; this.siteApiService = siteApiService; this.promiseService = promiseService; this.route = route; } /// methods ngOnInit(): void { let self = this; self._parameterSubscription = self.route.params .subscribe(params => { self._entityId = +params['entityId']; self.getEntity(); }); } ngOnDestroy() { if (this._parameterSubscription && this._parameterSubscription.unsubscribe) { this._parameterSubscription.unsubscribe(); } } protected getEntity(): Promise<void> { let self = this; self.promiseService.applicationPromises.sites.get.entityId = self._entityId; self.promiseService.applicationPromises.sites.get.promise = self.siteApiService .get(self._entityId) .then(function (response: SiteEntity): Promise<void> { self.entity = response; return Promise.resolve(); }) .then( () => { self.promiseService.applicationPromises.sites.get.promise = null; self.promiseService.applicationPromises.sites.get.entityId = null; }, () => { self.promiseService.applicationPromises.sites.get.promise = null; self.promiseService.applicationPromises.sites.get.entityId = null; }); return self.promiseService.applicationPromises.sites.get.promise; } // beverages protected getBeveragesFilter(): any { let filter = null; let filterOptionSiteId: number = Variable.isNotNullOrUndefined(this.entity) && Variable.isNotNullOrUndefined(this.entity.id) && this.entity.id !== 0 ? this.entity.id : null; let anyFilterOptionIsDefined: boolean = Variable.isNotNullOrUndefined(filterOptionSiteId); if (anyFilterOptionIsDefined) { filter = {}; if (Variable.isNotNullOrUndefined(filterOptionSiteId)) { filter.siteId = filterOptionSiteId; } } return filter; } protected actualizeBeveragesInfo(changes: any): void { if (changes) { if (Variable.isNotNullOrUndefined(changes.totalCount) && this.entity.beveragesAmount !== changes.totalCount) { this.entity.beveragesAmount = changes.totalCount; } if (changes.entityWasActivated) { this.entity.activeBeveragesAmount++; } if (changes.entityWasDeactivated) { this.entity.activeBeveragesAmount--; } } } // experts protected getExpertsFilter(): any { let filter = null; let filterOptionSiteId: number = Variable.isNotNullOrUndefined(this.entity) && Variable.isNotNullOrUndefined(this.entity.id) && this.entity.id !== 0 ? this.entity.id : null; let anyFilterOptionIsDefined: boolean = Variable.isNotNullOrUndefined(filterOptionSiteId); if (anyFilterOptionIsDefined) { filter = {}; if (Variable.isNotNullOrUndefined(filterOptionSiteId)) { filter.siteId = filterOptionSiteId; } } return filter; } protected actualizeExpertsInfo(changes: any): void { if (changes) { if (Variable.isNotNullOrUndefined(changes.totalCount) && this.entity.expertsAmount !== changes.totalCount) { this.entity.expertsAmount = changes.totalCount; } if (changes.entityWasActivated) { this.entity.activeExpertsAmount++; } if (changes.entityWasDeactivated) { this.entity.activeExpertsAmount--; } } } // routes protected getRoutesFilter(): any { let filter = null; let filterOptionSiteId: number = Variable.isNotNullOrUndefined(this.entity) && Variable.isNotNullOrUndefined(this.entity.id) && this.entity.id !== 0 ? this.entity.id : null; let anyFilterOptionIsDefined: boolean = Variable.isNotNullOrUndefined(filterOptionSiteId); if (anyFilterOptionIsDefined) { filter = {}; if (Variable.isNotNullOrUndefined(filterOptionSiteId)) { filter.siteId = filterOptionSiteId; } } return filter; } protected actualizeRoutesInfo(changes: any): void { if (changes) { if (Variable.isNotNullOrUndefined(changes.totalCount) && this.entity.routesAmount !== changes.totalCount) { this.entity.routesAmount = changes.totalCount; } if (changes.entityWasActivated) { this.entity.activeRoutesAmount++; } if (changes.entityWasDeactivated) { this.entity.activeRoutesAmount--; } } } // notifications protected patchSiteContacts(newContacts: string) { let self = this; self.promiseService.applicationPromises.sites.patch.contactsPromise = self.siteApiService .patchContacts(self.entity.id, newContacts) .then( function(): void { self.entity.contacts = newContacts; self.promiseService.applicationPromises.sites.patch.contactsPromise = null; }, function(): void { self.promiseService.applicationPromises.sites.patch.contactsPromise = null; }); } protected isSiteContactsReadOnly(): boolean { return Variable.isNotNullOrUndefined(this.promiseService.applicationPromises.sites.patch.contactsPromise); } }
<h1>Mapping App</h1> <h2>Flow Specification</h2> Flow consumes a single Question object, and a global-context object and interacts with the user. ```js module.exports = { flow: { ``` It has the following phases: 1. pre: for loading pre-fill options, etc. 1. question: the UI for the question object (ie., the current node). 1. answer: how to store answer of the current node (and its children) into the context. 1. child: flow of the children of the current node. 1. post: for making post-answer hooks. 1. exit: how to exit node? default: return to parent; loop questions may use these. Each of the above have specific implementations, which together provide various functionalities. # Pre Flow ```js pre: { ``` 1. fill: specifies list of pre-fill entries. ```js fill: [ // eg. {scope: 'global', name: 'selectedDistrict.villages'}, // eg. {scope: 'answer', name: '2.10'}, ], ``` 2. skip: specifies pre-requisites to do this question. ```js skip: { question: null, // eg. '2.5.1' option: [], // eg. [1, 3, 5] } ``` ```js }, ``` # Question: for question UI, etc. ```js question: { ``` 1. ui: declare UI to use. ```js ui: 'SINGLE_CHOICE', // 'MULTIPLE_CHOICE', // 'GPS', // 'INPUT', // 'INFO' ``` 2. validation: specify validations for the input. ```js validation: null, // 'NUMBER', // 'SURVEYOR_CODE ( 4-digit /[0-9]{4}/ ), }, ``` # Answering scope for the question ```js answer: { scope: 'once', // 'option', 'multiple' }, ``` # Child flow ```js child: { strategy: 'cascade', // OR 'select' select: { ui: 'grid', repeat: 'once', // OR 'multiple', }, }, ``` # Post flow The use-cases are: 1. Load Surveyor data from API call. 2. Load Districts from Surveyor data, Villages from District, etc. ```js post: { }, ``` # Exit Flow ```js exit: { strategy: 'parent', // OR 'repeat' }, ``` ```js } // flow = } // module.exports = { flow } ```
import 'package:flutter_architecture_template/core/usecases/usecase.dart'; import 'package:flutter_architecture_template/domain/entities/firebase_auth/app_user.dart'; import 'package:flutter_architecture_template/domain/repositories/firebase_auth/app_user_repository.dart'; import 'package:injectable/injectable.dart'; @prod @lazySingleton @injectable class GetAppUser extends Usecase<AppUser, NoParams> { final AppUserRepository repository; const GetAppUser(this.repository); @override AppUser call(NoParams) { return repository.profile; } }
/* * Copyright (C) 2017 Ricky.yao https://github.com/vihuela * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * */ package com.github.kotlin_mvpro.ui import android.os.Bundle import android.support.v4.app.Fragment import android.support.v4.app.FragmentPagerAdapter import com.github.kotlin_mvpro.R import com.github.kotlin_mvpro.databinding.ActivityMainBinding import com.github.kotlin_mvpro.ui.base.BaseActivity import com.github.kotlin_mvpro.ui.fragment.ImageFragment import com.github.kotlin_mvpro.ui.fragment.NewsFragment import com.github.kotlin_mvpro.utils.LIST_TOP import com.github.library.utils.eventbus.Event import com.github.library.utils.eventbus.sendEvent import com.github.library.utils.ext.applyStatusBarDark import com.ricky.mvp_core.base.defaults.EmptyPresenter class MainActivity : BaseActivity<EmptyPresenter, ActivityMainBinding>() { override fun getLayoutId(): Int = R.layout.activity_main override fun onCreate(savedInstanceState: Bundle?) { super.onCreate(savedInstanceState) val title = arrayOf("美图", "知乎日报") val items = arrayOf(ImageFragment(), NewsFragment()) mBinding.pager.adapter = object : FragmentPagerAdapter(supportFragmentManager) { override fun getItem(position: Int): Fragment = items[position] as Fragment override fun getCount(): Int = title.size override fun getPageTitle(position: Int): CharSequence = title[position] } mBinding.tab.setViewPager(mBinding.pager, title) mBinding.top.setOnClickListener { sendEvent(Event.obtain(LIST_TOP, it.tag)) } applyStatusBarDark() } }
require 'spec_helper' RSpec.describe Furs::Models::SalesBookIdentifier do it 'inherits from Furs::Models::BaseRequest' do expect(Furs::Models::SalesBookIdentifier).to be < Furs::Models::BaseRequest end it 'is valid if all parameters are valid' do expect(build :sales_book_identifier).to be_valid end it 'is not valid if invoice_number is blank' do expect(build :sales_book_identifier, invoice_number: nil).not_to be_valid end it 'is not valid if invoice_number is longer than 20 characters' do expect(build :sales_book_identifier, invoice_number: "a1$"*7).not_to be_valid end it 'is not valid if set_number is blank' do expect(build :sales_book_identifier, set_number: nil).not_to be_valid end it 'is not valid if set_number does not have two characters' do expect(build :sales_book_identifier, set_number: 'a').not_to be_valid expect(build :sales_book_identifier, set_number: 'a12').not_to be_valid end it 'is not valid if serial_number is blank' do expect(build :sales_book_identifier, serial_number: nil).not_to be_valid end it 'is not valid if serial_number does not have 12 characters' do expect(build :sales_book_identifier, serial_number: "abcefg12345").not_to be_valid expect(build :sales_book_identifier, serial_number: "abcefg1234567").not_to be_valid end end
;; Copyright © 2016 Dynamic Object Language Labs Inc. ;; ;; This software is licensed under the terms of the ;; Apache License, Version 2.0 which can be found in ;; the file LICENSE at the root of this distribution. (ns planviz.tplan "temporal planning layout algorithm" (:require [cljs.pprint :refer [pprint]] ;; DEBUG [clojure.set :as set] [avenir.utils :as au :refer [assoc-if]] [avenir.math :as math :refer [atan]] [webtasks.tasks :as tasks :refer [chain sleep success! error!]] [planviz.ui :as ui :refer [node-key-fn edge-key-fn network-key-fn non-zero? constraint? activity-type? begin? composite-key]])) ;; helper function (defn reversev [s] (vec (reverse s))) (defn safe-min [a b] (if a (if b (min a b) a) (if b b math/js-max-int))) (defn safe-max [a b] (if a (if b (max a b) a) (if b b math/js-min-int))) ;; ------------------------------- (def graph-params-tpn {:xsize 10 :ysize 10 :nodesep 40 :ranksep 50 ;; (* ??? (+ ysize nodesep)) :label-char 7 :min-length 1 :plid nil :network-plid-id nil :ranking {}}) (def graph-params-htn {:xsize 10; 50 :ysize 20; 50 :nodesep 80 ;; 250 :ranksep 100 ;; 250 ;; (* ??? (+ ysize nodesep)) :label-char 7 :min-length 1 :plid nil :network-plid-id nil :ranking {}}) (def graph-params (atom {})) (def graph (atom {})) (defn set-width-height [width height] (swap! graph (fn [g] (let [{:keys [plid network-plid-id]} @graph-params {:keys [network/network-by-plid-id]} g network (assoc (get network-by-plid-id network-plid-id) :network/width width :network/height height) network-by-plid-id (assoc network-by-plid-id network-plid-id network)] (assoc g :network/network-by-plid-id network-by-plid-id))))) (defn temporal-constraint? [edge] (keyword-identical? (:edge/type edge) :temporal-constraint)) (defn all-nodes [] (keys (:node/node-by-plid-id @graph))) (defn all-edges [] (keys (:edge/edge-by-plid-id @graph))) (defn get-node [node-id] (get-in @graph [:node/node-by-plid-id node-id])) (defn get-edge [edge-id] (get-in @graph [:edge/edge-by-plid-id edge-id])) (defn update-node [node] (let [plid-id (node-key-fn node) ref [:node/node-by-plid-id plid-id]] (swap! graph update-in ref merge node))) (defn update-edge [edge] (let [plid-id (edge-key-fn edge) ref [:edge/edge-by-plid-id plid-id]] (swap! graph update-in ref merge edge))) (defn create-vnode [rank] (let [{:keys [plid network-plid-id]} @graph-params id (keyword (gensym "vnode-")) node {:plan/plid plid :node/id id :node/type :virtual :node/actvities #{} :node/constraints #{} :node/state :normal :node/hidden false :node/incoming [] :node/outgoing [] :node/rank rank :node/x 0 :node/y 0} plid-id (node-key-fn node) ref [:node/node-by-plid-id plid-id]] (swap! graph (fn [g] (let [{:keys [network/network-by-plid-id node/node-by-plid-id]} g network (update-in (get network-by-plid-id network-plid-id) [:network/nodes] conj plid-id) ;; ref) network-by-plid-id (assoc network-by-plid-id network-plid-id network) node-by-plid-id (assoc node-by-plid-id plid-id node)] (assoc g :network/network-by-plid-id network-by-plid-id :node/node-by-plid-id node-by-plid-id)))) (swap! graph-params update-in [:ranking rank] conj plid-id) node)) (defn create-vedge [from-id to-id] (let [{:keys [plid network-plid-id]} @graph-params from (get-node from-id) to (get-node to-id) id (keyword (gensym "vedge-")) edge {:plan/plid plid :edge/id id :edge/type :virtual :edge/from from-id :edge/to to-id :edge/state :normal :edge/weight 1 :edge/hidden false} plid-id (edge-key-fn edge) ref [:edge/edge-by-plid-id plid-id]] (swap! graph (fn [g] (let [{:keys [network/network-by-plid-id edge/edge-by-plid-id]} g network (update-in (get network-by-plid-id network-plid-id) [:network/edges] conj plid-id) ;; ref) network-by-plid-id (assoc network-by-plid-id network-plid-id network) edge-by-plid-id (assoc edge-by-plid-id plid-id edge)] (assoc g :network/network-by-plid-id network-by-plid-id :edge/edge-by-plid-id edge-by-plid-id)))) (update-node (update-in from [:node/outgoing] conj plid-id)) (update-node (update-in to [:node/incoming] conj plid-id)) edge)) ;; set incoming and outgoing (ignore constraint edges / zero weight) (defn add-incoming-outgoing [d] (println "TPLAN add-incoming-outgoing") (doseq [[edge-id edge] (:edge/edge-by-plid-id @graph)] (let [{:keys [edge/weight edge/from edge/to]} edge ;; _ (swap! notes conj (str "AIO " edge-id " from " from " to " to)) from (get-node from) to (get-node to)] (update-node (update-in from [:node/outgoing] conj edge-id)) (update-node (update-in to [:node/incoming] conj edge-id)) )) true) ;; for chain (defn map-nodes [node-fn] (doseq [[node-id node] (:node/node-by-plid-id @graph)] (node-fn node))) (defn map-incoming [node edge-fn] (doall (map (comp edge-fn get-edge) (:node/incoming node)))) (defn map-outgoing [node edge-fn] (doall (map (comp edge-fn get-edge) (:node/outgoing node)))) (defn visit-nodes [node-id prev-visited node-fn] (if (prev-visited node-id) prev-visited (let [node (get-node node-id) visited (atom (conj prev-visited node-id)) tovisit (remove nil? (node-fn node))] ;; visit any nodes returned (loop [visit (first tovisit) more (rest tovisit)] (when visit (swap! visited set/union (visit-nodes visit @visited node-fn)) (recur (first more) (rest more)))) @visited))) (defn rank-sweep [min-length moves] (let [node-ids @moves] (reset! moves []) (doseq [node-id node-ids] (let [node (get-node node-id)] (map-incoming node (fn [edge] (when (#{:activity :null-activity :delay-activity :parallel-edge :choice-edge :virtual} (:edge/type edge)) (let [old-rank (:node/rank node) from (get-node (:edge/from edge)) rank (max old-rank (+ (:node/rank from) min-length))] ;; (println "DEBUG rank-sweep EDGE rank" rank) (when (not= rank old-rank) (update-node (assoc node :node/rank rank)) (swap! moves conj (node-key-fn node)) nil))))) (map-outgoing node (fn [edge] (when (#{:activity :null-activity :delay-activity :parallel-edge :choice-edge :virtual} (:edge/type edge)) (let [rank (+ (:node/rank node) min-length) to (get-node (:edge/to edge)) old-rank (:node/rank to) rank (max rank old-rank)] (when (not= rank old-rank) (update-node (assoc to :node/rank rank)) (swap! moves conj (node-key-fn to))))))))))) (defn rank [d] (let [{:keys [network-plid-id min-length]} @graph-params {:keys [network/network-by-plid-id]} @graph network (get network-by-plid-id network-plid-id) {:keys [network/begin]} network moves (atom [begin])] (println "TPLAN rank") (update-node (assoc (get-node begin) :node/rank 0)) (while (pos? (count @moves)) ;; (println " rank moves" (count @moves)) (rank-sweep min-length moves)) true ;; for chain )) (defn save-ranking [d] (let [{:keys [network-plid-id]} @graph-params ;; {:keys [node/node-by-plid-id]} @graph {:keys [network/network-by-plid-id]} @graph network (get network-by-plid-id network-plid-id) {:keys [network/begin]} network ] (println "TPLAN save-ranking") ;; save in ranking map -- in outgoing order!!! (swap! graph-params assoc :ranking {}) (visit-nodes begin #{} (fn [node] (let [{:keys [node/rank]} node plid-id (node-key-fn node)] (when (not (neg? rank)) (swap! graph-params (fn [gp] (let [nodes (get-in gp [:ranking rank]) nodes (if nodes (conj nodes plid-id) [plid-id])] (assoc-in gp [:ranking rank] nodes)))) (map-outgoing node (fn [edge] (:edge/to edge))) )))) true)) ;; for chain (defn balance-sweep [d min-length node-ids done] ;; (println " balance-sweep" (count node-ids)) (if (empty? node-ids) (do (success! d true) true) (let [visit (atom #{}) _ (doseq [node-id node-ids] (swap! done conj node-id) (let [node (get-node node-id) real-incoming (atom [])] (map-incoming node (fn [edge] (when-not (constraint? edge) (let [from (:edge/from edge)] (if-not (keyword-identical? (:edge/type edge) :null-activity) (swap! real-incoming conj from)) (if-not (or (@visit from) (@done from)) (swap! visit conj from)))))) (map-outgoing node (fn [edge] (when (activity-type? edge) (let [{:keys [node/rank]} node node-plid-id (node-key-fn node) {:keys [edge/to edge/type]} edge to (get-node to) rightmost (- (:node/rank to) min-length) slack (- rightmost rank)] ;; move last state nodes -OR- state nodes in sequence (when (and (pos? slack) (or (keyword-identical? type :null-activity) (= 1 (count @real-incoming)))) ;; move node rank to rightmost (swap! graph-params (fn [gp] (let [old-nodes (get-in gp [:ranking rank]) old-nodes (vec (filter #(not= % node-plid-id) old-nodes))] (-> gp (assoc-in [:ranking rank] old-nodes) (update-in [:ranking rightmost] conj node-plid-id))))) (update-node (assoc node :node/rank rightmost))))))) )) start (tasks/deferred) finish (-> start (sleep 30) ;; pace (chain #(balance-sweep d min-length @visit done)))] (tasks/on-realized finish (constantly true)) ;; consume finish (success! start true) true ;; finish ))) ;; Nodes having equal in- and out-edge weights and multiple feasible ;; ranks are moved to a feasible rank with the fewest nodes. ;; As the ranks are initially biased to be low we'll start with the ;; end node and greedily balance ranks moving towards the head. ;; NOTE we must ignore temporal constraint edges (defn balance [d] (let [dbalance (tasks/deferred) {:keys [network-plid-id min-length]} @graph-params {:keys [network/network-by-plid-id]} @graph network (get network-by-plid-id network-plid-id) {:keys [network/end]} network done (atom #{})] (println "TPLAN balance") (balance-sweep dbalance min-length [end] done) dbalance)) ;; for chain ;; values must be a sorted vector (defn calc-median [values] (let [len (count values) m (quot len 2)] (cond (zero? len) 0 (odd? len) (get values m) (= 2 len) (/ (+ (first values) (second values)) 2) :else (let [vl (get values (dec m)) vm (get values m) left (- vl (first values)) right (- (last values) vm)] (/ (+ (* vl right) (* vm left)) (+ left right)))))) ;; the minimum distance between nodes a and b on the same rank (defn rho [a b] (let [{:keys [ysize nodesep]} @graph-params] (+ ysize nodesep))) (defn calc-min [node node-ids] (loop [y-min 0 node-id (first node-ids) more (rest node-ids)] (if (or (nil? node-id) (keyword-identical? (node-key-fn node) node-id)) y-min (let [prev (get-node node-id) y (+ (:node/y prev) (rho prev prev)) ;; fix if rho by node y-min (max y-min y)] (recur y-min (first more) (rest more)))))) ;; sort nodes by up-priority ;; if (and (> up-priority 1) (not moved?)) ;; gather the y values of (not (or hidden (zero? weight))) incoming ;; calc median ;; set y to median ;; now update y in nodes per rho ;; if y changed set moved? ;; nodes-by-plid-id (reduce #(assoc %1 %2 (get-node %2)) {} node-ids) (defn medianpos [iteration] (let [{:keys [plan-type ranking]} @graph-params ranks (count (keys ranking))] ;; (println "TPLAN medianpos" iteration) ;; DEBUG (if (odd? iteration) (doseq [r (range (dec ranks) -1 -1)] ;; bottom to top, downward priority (let [node-ids (get ranking r) nodes (map get-node node-ids) down-priority (map (fn [n] [(node-key-fn n) (:node/down-priority n)]) nodes) nodes-down (map first (sort-by second > down-priority)) rho-y (rho nil nil)] ;; (println " ranking" r "node-ids" node-ids) ;; DEBUG (loop [node-id (first nodes-down) more (rest nodes-down)] (when node-id (let [{:keys [node/y node/down-priority] :as node} (get-node node-id) y-min (calc-min node node-ids) y-down (vec (sort (remove nil? (map-outgoing node (fn [edge] (let [{:keys [edge/to edge/hidden edge/weight]} edge y (if-not (or hidden (zero? weight)) (:node/y (get-node to)))] y)))))) y-median (calc-median y-down) y-new (max y-min (if (or (and (keyword-identical? plan-type :htn-network) ;; (nil? down-priority)) (= down-priority 1)) (> down-priority 1) ;; (neg? y) ) y-median y))] (when (not= y-new y) (update-node (assoc node :node/y y-new))) (recur (first more) (rest more))))) ;; To handle the case where a node with higher down priority ;; gets positioned before (op top of or to the left) of ;; a lower priority node double check rho here. (loop [y-min -1 node-id (first node-ids) more (rest node-ids)] (when node-id (let [node (get-node node-id) y (:node/y node) y-new (if (neg? y-min) y ;; first one does not change position (max y-min y)) y-min (+ y-new rho-y)] ;; min position for next one (when (not= y-new y) ;; DEBUG ;; (println " RHO CHECK node" node-id "moved" y "to" y-new) (update-node (assoc node :node/y y-new))) (recur y-min (first more) (rest more))))))) (doseq [r (range ranks)] ;; top to bottom, upward priority (let [node-ids (get ranking r) nodes (map get-node node-ids) up-priority (map (fn [n] [(node-key-fn n) (:node/up-priority n)]) nodes) nodes-up (map first (sort-by second > up-priority))] (loop [node-id (first nodes-up) more (rest nodes-up)] (when node-id (let [{:keys [node/y node/up-priority] :as node} (get-node node-id) y-min (calc-min node node-ids) y-up (vec (sort (remove nil? (map-incoming node (fn [edge] (let [{:keys [edge/from edge/hidden edge/weight]} edge y (if-not (or hidden (zero? weight)) (:node/y (get-node from)))] y)))))) y-median (calc-median y-up) y-new (max y-min (if (or (and (keyword-identical? plan-type :htn-network) (= up-priority 1)) (> up-priority 1)) y-median y))] (if-not (= y-new y) (update-node (assoc node :node/y y-new))) (recur (first more) (rest more)))))))))) (defn calc-label-width [label-char n] (let [extra (if (> n 25) 4 6)] (* (+ n extra) label-char))) (defn set-coord [plan-type] ;; y starts at 2 * nodesep, x starts at 1/2 ranksep ;; will calculate w and h ;; where w = rank * ranksep (starts at 0.5 ranksep) ;; and h = max y + 3 * nodesep (starts at 2 * nodesep) ;; (println "set-coord" plan-type) (let [{:keys [ranksep nodesep ranking label-char]} @graph-params ranks (range (count (keys ranking))) x0 (/ ranksep 2) ;; allow enough vertical space for long constraints y0 (* (if (keyword-identical? plan-type :tpn-network) 5 2) nodesep) x-key (if (keyword-identical? plan-type :htn-network) :node/y :node/x) y-key (if (keyword-identical? plan-type :htn-network) :node/x :node/y) ] (loop [r (first ranks) more (rest ranks) x x0 ymax y0] (if-not r ;; done (if (keyword-identical? plan-type :htn-network) ;; normal ;; (set-width-height (+ ymax y0) x) ;; extra margin on the bottom ;; (set-width-height (+ ymax y0) (+ x ranksep)) (set-width-height (+ ymax nodesep) x) ;; normal ;; (set-width-height (+ x x0) (+ ymax y0))) ;; extra margin on the bottom ;; (set-width-height (+ x x0) (+ ymax y0 y0)) (set-width-height x (+ ymax y0)) ) (let [nodes (get ranking r) edge-fn (fn [edge] ;; returns xrank (let [{:keys [edge/hidden edge/weight edge/name edge/label edge/display-name edge/type edge/value edge/sequence-label edge/plant edge/plantid edge/command edge/args edge/cost edge/reward edge/probability edge/guard]} edge ;; HERE the label symbol should become display-name ;; and construct-label should be construct-display-name label (ui/construct-label name label display-name sequence-label plant plantid command args type value) ;; CONSIDER ui/construct-edge-tip length ;; max-label (max (count label) (count tip)) max-label (count label)] ;; unhide non-aggregation edges (if (and (keyword-identical? plan-type :tpn-network) hidden (not (keyword-identical? type :aggregation))) (update-edge (assoc edge :edge/hidden false))) (if (or hidden (zero? weight)) 0 (calc-label-width label-char max-label)))) node-fn (fn [node] ;; returns [xrank ymax] (let [{:keys [node/y]} node y (+ y y0)] (update-node (assoc node x-key x y-key y)) [(reduce max 0 (map-outgoing node edge-fn)) y])) xrank_ymax (map (comp node-fn get-node) nodes) xrank (reduce max ranksep (map first xrank_ymax)) ymax (reduce max ymax (map second xrank_ymax))] (recur (first more) (rest more) (+ x xrank) ymax)))))) (defn virtualize-path [real-from long-edge real-to] (let [vranks (range (inc (:node/rank real-from)) (:node/rank real-to)) vnodes (map create-vnode vranks)] (loop [from real-from more (concat vnodes [real-to])] (let [to (first more)] (when to (create-vedge (node-key-fn from) (node-key-fn to)) (recur to (rest more))))) ;; hide long-edge (update-edge (assoc long-edge :edge/hidden true)) nil)) (defn add-virtual-nodes [] (let [{:keys [network-plid-id]} @graph-params {:keys [node/node-by-plid-id]} @graph] (doseq [node-id (keys node-by-plid-id)] (let [node (get node-by-plid-id node-id)] (map-outgoing node (fn [edge] (let [{:keys [edge/to edge/weight edge/hidden edge/type]} edge to-id to to (get-node to-id) from-rank (:node/rank node) to-rank (:node/rank to)] (if (and (not hidden) (pos? weight) (> to-rank (inc from-rank))) (virtualize-path node edge to))))))))) ;; determine number of crossings assuming left-id is to the ;; left of right-id (defn crossing-pair [left-id right-id] (let [left (get-node left-id) right (get-node right-id) rank (:node/rank left)] (reduce + 0 (map-incoming left (fn [left-edge] (let [{:keys [edge/hidden edge/weight edge/from edge/type]} left-edge left-from (get-node from) left-rank (:node/rank left-from) left-tc? (constraint? type) ignore? (or (> left-rank rank) (and (not left-tc?) (or hidden (zero? weight))))] (if ignore? 0 (reduce + 0 (map-incoming right (fn [right-edge] (let [{:keys [edge/hidden edge/weight edge/from edge/type]} right-edge right-from (get-node from) right-rank (:node/rank right-from) right-tc? (constraint? type) ignore? (or (> right-rank rank) (and (not right-tc?) (or hidden (zero? weight)))) left-p (:node/p left-from) right-p (:node/p right-from) c (if (or ignore? (<= left-p right-p)) 0 (+ (if left-tc? 0.9 1) (if right-tc? 0.9 1)))] c))))))))))) (defn set-positions [rank] (loop [p 0 node-id (first rank) more (rest rank)] (when node-id (update-node (assoc (get-node node-id) :node/p p)) (recur (inc p) (first more) (rest more))))) ;; calculate crossing cost from ranking (defn crossing [ranking] (let [ranking (or ranking (:ranking @graph-params))] (reduce + 0 (for [r (range (count (keys ranking)))] (let [rank (get ranking r) rank-len (count rank)] (set-positions rank) ;; evaluate crossings (if (zero? r) 0 (let [c (reduce + 0 (for [i (range rank-len) j (range rank-len) :when (< i j)] (crossing-pair (get rank i) (get rank j))))] c))))))) ;; returns [node-id median] (defn median-for-node [top-to-bottom? node-id] (let [node (get-node node-id) positions (vec (remove nil? (if top-to-bottom? (map-incoming node (fn [edge] (let [{:keys [edge/from edge/hidden edge/weight]} edge p (if-not (or hidden (zero? weight)) (:node/p (get-node from)))] p))) (map-outgoing node (fn [edge] (let [{:keys [edge/to edge/hidden edge/weight]} edge p (if-not (or hidden (zero? weight)) (:node/p (get-node to)))] p))))))] [node-id (calc-median positions)])) (defn median-for-node-top [node-id] (median-for-node true node-id)) (defn median-for-node-bottom [node-id] (median-for-node false node-id)) ;; returns new ranking (defn wmedian [ranking i] ;; (println " DEBUG wmedian" i) (let [max-rank (count (keys ranking))] (if (even? i) (loop [r 0 ranking ranking] ;; top to bottom (if (= r max-rank) ranking (let [rank (get ranking r) node-median (map median-for-node-top rank) new-rank (mapv first (sort-by second < node-median))] (set-positions new-rank) (recur (inc r) (assoc ranking r new-rank))))) (loop [r (dec max-rank) ranking ranking] ;; bottom to top (if (neg? r) ranking (let [rank (get ranking r) node-median (map median-for-node-bottom rank) new-rank (mapv first (sort-by second < node-median))] (set-positions new-rank) (recur (dec r) (assoc ranking r new-rank)))))))) (defn get-forward-constraints [node] (let [{:keys [node/rank]} node] (remove nil? (for [cnstr-id (:node/constraints node)] (let [edge (get-edge cnstr-id) {:keys [edge/to]} edge to (get-node to) to-rank (:node/rank to)] (if (> to-rank rank) (:node/p to))))))) (defn should-transpose? [u v u-v v-u] (if (> u-v v-u) true (if (< u-v v-u) false (let [left-node (get-node u) left-p (:node/p left-node) left-tcs (get-forward-constraints left-node) left-avg (if (pos? (count left-tcs)) (/ (reduce + 0 left-tcs) (count left-tcs)) left-p) right-node (get-node v) right-p (:node/p right-node) right-tcs (get-forward-constraints right-node) right-avg (if (pos? (count right-tcs)) (/ (reduce + 0 right-tcs) (count right-tcs)) right-p) delta-left (- left-p left-avg) delta-right (- right-avg right-p) transpose? (cond (and (not (neg? delta-left)) (not (neg? delta-right))) false (and (not (neg? delta-left)) (neg? delta-right)) (> (- delta-right) delta-left) (and (neg? delta-left) (not (neg? delta-right))) (> (- delta-left) delta-right) (and (neg? delta-left) (neg? delta-right)) (< delta-left delta-right) :else false)] ;; if the right constraint average is farther to the left ;; then the left constraint average is to the right, then transpose transpose? )))) ;; rank is a vector ;; returns [rank-changed? new-rank] (defn transpose-rank [rank iteration] ;; (reset! citeration iteration) ;; DEBUG (let [rank-size (count rank) forward? (even? iteration) rank (if forward? rank (reversev rank))] (if (= 1 rank-size) [false (if forward? rank (reversev rank))] (loop [changed? false new-rank [] u (first rank) more (rest rank)] (let [v (first more)] (if (nil? v) [changed? (if forward? (conj new-rank u) (reversev (conj new-rank u)))] (if forward? (let [u-v (crossing-pair u v) v-u (crossing-pair v u) transposed? (should-transpose? u v u-v v-u) changed? (or changed? transposed?) new-rank (conj new-rank (if transposed? v u)) next-u (if transposed? u v)] (recur changed? new-rank next-u (rest more))) (let [transposed? (> (crossing-pair v u) (crossing-pair u v)) changed? (or changed? transposed?) new-rank (conj new-rank (if transposed? v u)) next-u (if transposed? u v)] (recur changed? new-rank next-u (rest more)))))))))) ;; returns [r c] (defn transpose [ranking iteration] (loop [improved? true ranking ranking] (if-not improved? [ranking (crossing ranking)] (let [max-rank (count (keys ranking)) forward? (even? iteration) rank-start (if forward? 0 (dec max-rank)) rank-end (if forward? max-rank -1) next-rank (if forward? inc dec) [improved? new-ranking] (loop [r rank-start changed? false ranking ranking] (if (= r rank-end) [changed? ranking] (let [rank (get ranking r) [rank-changed? new-rank] (transpose-rank rank iteration) changed? (or changed? rank-changed?)] (when rank-changed? (set-positions new-rank)) (recur (next-rank r) changed? (assoc ranking r new-rank)))))] (recur improved? new-ranking))))) (defn mincross-sweep [d max-iterations i br-bc] (let [[br bc] br-bc] ;; (println "TPLAN mincross-sweep" i "bc" bc) (if (or (zero? bc) (= i max-iterations)) (do (swap! graph-params assoc :ranking br) (tasks/timeout #(success! d true)) true) (let [start (tasks/deferred) finish (-> start (sleep 100) ;; pace (chain #(mincross-sweep d max-iterations (inc i) (transpose (wmedian br i) i))))] (tasks/on-realized finish (constantly true)) ;; consume finish (success! start true) true)))) ;; finish (defn mincross [d] ;; add virtual nodes and edges (println "TPLAN mincross") (let [{:keys [plan-type]} @graph-params] (if (keyword-identical? plan-type :htn-network) true ;; for deferreds (let [dmincross (tasks/deferred) max-iterations 2 ;; heuristic - not found case needing more _ (add-virtual-nodes) ;; will mutate ranking! br (:ranking @graph-params)] (mincross-sweep dmincross max-iterations 0 [br (crossing br)]) dmincross)))) ;; ------------------------------------------------------------------- ;; given two vectors [a b] [c d] (defn min-max-fn ([] [math/js-max-int math/js-min-int]) ([ab] (min-max-fn ab [nil nil])) ([ab cd] (let [[a b] ab [c d] cd] [(safe-min a c) (safe-max b d)]))) ;; given opts which is all the options for a given end-id ;; returns beymns for the option option-id (or all option if not specified) ;; where beymns is a vector of beymn ;; beymn is a vector of [begin end y y-min y-max] ;; for each begin in option(s) (defn get-option-beymns [options opts option-id] (let [option-ids (if option-id [option-id] (keys opts)) begins (apply concat (map #(keys (get opts %)) option-ids)) begins (sort ;; right to left #(compare (:node/rank (get-node %2)) (:node/rank (get-node %1))) begins) beys (mapv (fn [begin] (let [{:keys [node/y node/end]} (get-node begin)] [begin end y])) begins) conj-mn (fn [bey] (let [[b e y] bey [m n] (if e ;; else is a state begin (reduce min-max-fn [y y] (apply concat (map vals (vals (get-in @options [e :opts]))))) [y y])] ;; medianpos 2 will enforce this (-> bey (conj m) (conj n)))) beymns (mapv conj-mn beys)] beymns)) (defn min-max-begin [options parent mn replace?] (swap! options update-in parent (if replace? (constantly mn) (partial min-max-fn mn)))) (defn get-nexts [node] (let [nexts (remove nil? (map-outgoing node (fn [edge] (let [{:keys [edge/hidden edge/weight edge/to]} edge] (if-not (or hidden (zero? weight)) to)))))] (vec (if (< (count nexts) 2) nexts ;; order by y (sort #(compare (:node/y (get-node %1)) (:node/y (get-node %2))) nexts))))) (defn move-begin-opts [add-delta-mn] (letfn [(mbo ([opts] (reduce-kv mbo {} opts)) ([opts option-id begins] (assoc opts option-id (reduce-kv (fn [begins begin-id mn] (assoc begins begin-id (add-delta-mn mn))) {} begins))))] mbo)) (defn move-begin [options e add-delta] (swap! options update-in [e :opts] (move-begin-opts add-delta))) (defn sg-option-end [options option-id end-id] (let [{:keys [opts]} (get @options end-id) begins (keys (get opts option-id))] (when (> (count begins) 1) (let [beymns (get-option-beymns options opts option-id) min-y (reduce min math/js-max-int (map #(get % 3) beymns)) calc-h (fn [beymn] (let [[b e y m n] beymn] (- y m))) hs (map calc-h beymns) max-h (reduce max 0 hs) new-y (+ min-y max-h)] (loop [stop end-id beymn (first beymns) more (rest beymns)] (let [[b e y m n] beymn delta (- new-y y) add-delta (partial + delta) add-delta-mn (fn [mn] (mapv add-delta mn)) stop (or e stop)] (when-not (zero? delta) ;; offset inside by delta (swap! options update-in [end-id :opts option-id b] add-delta-mn) ;; update for this begin (when e ;; this is a choice/parallel (update-node (update-in (get-node e) [:node/y] add-delta))) (visit-nodes b #{stop} (fn [node] (update-node (update-in node [:node/y] add-delta)) (if-let [end (:node/end node)] ;; interior begin/end (move-begin options end add-delta-mn)) (map-outgoing node (fn [edge] (let [{:keys [edge/to edge/weight edge/hidden]} edge] (if-not (or hidden (zero? weight)) to))))))) (if-not (empty? more) (recur b (first more) (rest more))))))) ;;node end, here we've balanced option-id (swap! options update-in [end-id :todo] dec) (let [{:keys [begin opt-order todo opts parent]} (get @options end-id) y-rho (rho nil nil)] (when (zero? todo) (loop [prev-max math/js-min-int opt-id (first opt-order) more (rest opt-order)] (let [beymns (get-option-beymns options opts opt-id) mn (reduce min-max-fn (map #(subvec % 3) beymns)) [y-min y-max] mn start-y (if (= prev-max math/js-min-int) ;; work around cljs y-min (+ prev-max y-rho)) delta (if (> y-min start-y) ;; round up to next y-rho (- start-y y-min) 0) y-max (+ y-max delta)] ;; delta will change y-max ;; because each option might have been adjusted (when-not (zero? delta) (let [add-delta (partial + delta) add-delta-mn (fn [mn] (mapv add-delta mn))] (visit-nodes opt-id #{end-id} (fn [node] (update-node (update-in node [:node/y] add-delta)) (if-let [end (:node/end node)] ;; interior b/end (move-begin options end add-delta-mn)) (map-outgoing node (fn [edge] (let [{:keys [edge/to edge/weight edge/hidden]} edge] (if-not (or hidden (zero? weight)) to)))))))) (if parent (if (keyword-identical? opt-id (first opt-order)) ;; first one (min-max-begin options parent mn true) (min-max-begin options parent mn false))) (if-not (empty? more) (recur y-max (first more) (rest more))))))))) ;; visit from [node-id to end-id) exclusive (defn sg-balance [options option-id begin-id node-id end-id] (let [node (get-node node-id) {:keys [node/type node/y node/end]} node mn (get-in @options [end-id :opts option-id begin-id]) begin-mn (min-max-fn [y y] mn) nexts (get-nexts node)] ;; (println "SB" option-id "BEGIN-ID" begin-id "NODE-ID" node-id ;; "END-ID" end-id "BEGIN-MN" begin-mn "NEXTS" nexts "#" (count nexts)) (when (keyword-identical? node-id option-id) ;; this node-id is a new option for end-id ;; (println "this node-id is a new option for end-id") (swap! options assoc-in [end-id :opts option-id node-id] begin-mn)) (if (begin? type) (do ;; create blank data for this end (swap! options assoc end {:begin node-id :opt-order nexts :parent [end-id :opts option-id node-id] :todo (count nexts) :opts {}}) (when-not (keyword-identical? node-id option-id) ;; new begin ;; (println "New begin within this option") (swap! options assoc-in [end-id :opts option-id node-id] begin-mn)) (doseq [next nexts] (sg-balance options next next next end)) (let [node (get-node end) ;; is there more?? nexts (get-nexts node)] (if (= 1 (count nexts)) ;; continue on same option-id end-id (sg-balance options option-id (first nexts) (first nexts) end-id) (when (zero? (count nexts)) ;; (println "TPN END (*-begin)!") (sg-option-end options option-id :network-end))))) (if (keyword-identical? node-id end-id) (sg-option-end options option-id end-id) (if (and (zero? (count nexts)) (keyword-identical? option-id (get-in @options [:network-end :begin]))) (sg-option-end options option-id :network-end) (if (= 1 (count nexts)) ;; NOT the end (do (when-not (= begin-mn mn) (swap! options assoc-in [end-id :opts option-id begin-id] begin-mn)) (sg-balance options option-id begin-id (first nexts) end-id)))))))) (defn subgraph-balance [] (let [options (atom {}) {:keys [network/network-by-plid-id]} @graph {:keys [plid network-plid-id]} @graph-params network (get network-by-plid-id network-plid-id) {:keys [network/begin network/end]} network] (swap! options assoc :network-end {:begin begin :opt-order [begin] :todo 1 ;; do NOT presume the first node will start at 0 0 ;; :opts {begin {begin [0 0]}} }) (sg-balance options begin begin begin :network-end))) (defn position [d] (let [{:keys [plid network-plid-id]} @graph-params {:keys [plan/by-plid network/network-by-plid-id]} @graph plan0 (get by-plid plid) {:keys [plan/type plan/name]} plan0 plan-type type network (get network-by-plid-id network-plid-id) {:keys [network/begin network/end network/nodes]} network] (println "TPLAN position") (doseq [node-id nodes] (let [node (get-node node-id)] (map-incoming node (fn [edge] (let [node (get-node node-id) {:keys [edge/hidden edge/weight edge/type edge/from]} edge edge-type type {:keys [node/type node/outgoing]} (get-node from) from-type type edge-fn (fn [sum edge-id] (let [{:keys [edge/type edge/hidden edge/weight]} (get-edge edge-id)] (+ sum (if (and (activity-type? type) (not (or hidden (zero? weight)))) 1 0)))) n-outgoing (reduce edge-fn 0 outgoing) {:keys [node/up-priority node/type]} node up-priority (+ (or up-priority 0) (if (or (keyword-identical? edge-type :virtual) (and (or (keyword-identical? from-type :state) (keyword-identical? type :state)) (= 1 n-outgoing))) 0.1 0) (if hidden 0 weight))] (update-node (assoc node :node/up-priority up-priority))))) (map-outgoing node (fn [edge] (let [node (get-node node-id) {:keys [edge/hidden edge/weight edge/type edge/to]} edge edge-type type {:keys [node/type node/incoming]} (get-node to) to-type type edge-fn (fn [sum edge-id] (let [{:keys [edge/type edge/hidden edge/weight]} (get-edge edge-id)] (+ sum (if (and (activity-type? type) (not (or hidden (zero? weight)))) 1 0)))) n-incoming (reduce edge-fn 0 incoming) {:keys [node/down-priority node/type]} node down-priority (+ (or down-priority 0) (if (and (keyword-identical? plan-type :htn-network) (keyword-identical? to end)) -0.5 0) (if (or (keyword-identical? edge-type :virtual) (and (or (keyword-identical? to-type :state) (keyword-identical? type :state)) (= 1 n-incoming))) 0.1 0) (if hidden 0 weight))] (update-node (assoc node :node/down-priority down-priority))))))) (medianpos 0) (medianpos 1) (when (not= plan-type :htn-network) (medianpos 2) (medianpos 3) (subgraph-balance) (medianpos 4) (medianpos 5) ;; NEW: consider additional phases ;; (subgraph-balance) ;; (medianpos 6) ;; (medianpos 7) ;; (subgraph-balance) ;; (medianpos 8) ;; (medianpos 9) ) (set-coord plan-type) true)) ;; for chain (defn clean-nodes ([nodes] (reduce-kv clean-nodes {} nodes)) ([nodes node-id node] (assoc nodes node-id (dissoc node :node/actvities :node/constraints :node/down-priority :node/hidden :node/incoming ;; :node/outgoing ;; DEBUG :node/p ;; :node/rank ;; DEBUG :node/up-priority)))) (defn edges-ref-nodes ([edges] (reduce-kv edges-ref-nodes {} edges)) ([edges edge-id edge] (assoc edges edge-id (let [{:keys [edge/from edge/to]} edge] (-> edge (dissoc :edge/weight :edge/hidden) (assoc :edge/from [:node/node-by-plid-id from] :edge/to [:node/node-by-plid-id to])))))) (defn initialize-network [network] (assoc network :network/width 700 :network/height 50)) (defn initialize-nodes [m k node] (assoc m k (assoc node :node/state :normal :node/hidden false :node/incoming [] :node/outgoing [] :node/rank -1 ;; -1 means NOT in current network :node/x 0 :node/y 0))) (defn initialize-edges [m k edge] (assoc m k (assoc edge :edge/state :normal :edge/hidden (constraint? edge) :edge/weight (if (constraint? edge) 0 1) ))) (defn initialize-graph [] (let [{:keys [plid network-plid-id]} @graph-params] (swap! graph (fn [g] (let [{:keys [network/network-by-plid-id node/node-by-plid-id edge/edge-by-plid-id]} g network (initialize-network (get network-by-plid-id network-plid-id)) network-by-plid-id (assoc network-by-plid-id network-plid-id network) node-by-plid-id (reduce-kv initialize-nodes {} node-by-plid-id) edge-by-plid-id (reduce-kv initialize-edges {} edge-by-plid-id)] (assoc g :network/network-by-plid-id network-by-plid-id :node/node-by-plid-id node-by-plid-id :edge/edge-by-plid-id edge-by-plid-id)))))) (defn initialize-htn [] (println "TPLAN initialize-htn") (map-nodes (fn [node] (when (> (count (:node/outgoing node)) 1) (let [order (atom 0) {:keys [node/outgoing]} node out-edges (map get-edge outgoing) pio-fn #(vector (or (:edge/order %) (swap! order inc)) (:plan/plid %) (:edge/id %)) out-order (sort-by first (mapv pio-fn out-edges)) outgoing (mapv (fn [[o p i]] (composite-key p i)) out-order)] (update-node (assoc node :node/outgoing outgoing)))))) true) ;; annotate c-begin nodes with the sum of probabilities (if apropos) (defn initialize-tpn [] (println "TPLAN initialize-tpn") (map-nodes (fn [node] (let [{:keys [node/type node/outgoing node/end]} node sum-probability (fn [p edge-id] (+ p (or (:edge/probability (get-edge edge-id)) 0))) probability (if (keyword-identical? :c-begin type) (reduce sum-probability 0 outgoing) 0) probability (if (non-zero? probability) probability) order (atom 0) out-edges (map get-edge outgoing) pio-fn #(vector (or (:edge/order %) (swap! order inc)) (:plan/plid %) (:edge/id %)) out-order (sort-by first (mapv pio-fn out-edges)) outgoing (mapv (fn [[o p i]] (composite-key p i)) out-order)] (update-node (assoc-if (assoc node :node/outgoing outgoing) :node/probability probability)) ;; if an unchoice node, indicate that on the c-end as well (when probability (update-node (assoc (get-node end) :node/probability probability))) (if (begin? type) ;; create aggregation edge (update-edge (assoc (create-vedge (node-key-fn node) end) :edge/type :aggregation :edge/hidden true :edge/weight 0))) ))) true) (defn initialize-plan [d] (let [{:keys [plid network-plid-id]} @graph-params {:keys [plan/by-plid network/network-by-plid-id]} @graph {:keys [plan/type]} (get by-plid plid)] (if (keyword-identical? type :htn-network) (initialize-htn) (initialize-tpn)) true)) (defn setup-graph [plan] (let [plid (first (keys (:plan/by-plid plan))) plan0 (get-in plan [:plan/by-plid plid]) {:keys [plan/type plan/name plan/begin]} plan0 gp (if (keyword-identical? type :htn-network) graph-params-htn graph-params-tpn)] (reset! graph plan) (reset! graph-params (assoc gp :plid plid :plan-type type :network-plid-id begin)) (initialize-graph) (println "TPLAN" plid "type" type "network" begin) true)) (defn return-graph [& args] (println "TPLAN return-graph") @graph) (defn layout [plan] (let [a-little 30 start (tasks/deferred) finish (-> start (sleep a-little) (chain setup-graph) (sleep a-little) (chain add-incoming-outgoing) (sleep a-little) (chain initialize-plan) (sleep a-little) (chain rank) (sleep (* 3 a-little)) (chain save-ranking) (sleep a-little) (chain balance) (sleep a-little) (chain mincross) (sleep a-little) (chain position) (sleep a-little) (chain return-graph) (sleep a-little) (tasks/catch #(do (println "Layout failed... ") (return-graph))))] (success! start plan) finish))
# Cubic Chunks Minecraft Mod This MinecraftForge mod extends Minecraft height and depth. The only limitation is size of 32-bit integer. [![Build Status](https://travis-ci.org/OpenCubicChunks/CubicChunks.svg?branch=MC_1.12)](https://travis-ci.org/OpenCubicChunks/CubicChunks) * For the most up to date information about this mod and its related mods, as well as the newest Downloads, please join us on the **Cubic Chunks Discord** linked below: * [Discord server](https://discord.gg/kMfWg9m) ### Cubic Chunks (CC) - Links: Github - [Cubic Chunks - 1.12.2 and Lower](https://github.com/OpenCubicChunks/CubicChunks) Github - [Cubic Chunks - After 1.12.2](https://github.com/OpenCubicChunks/CubicChunks2) CurseForge - [Main page](https://www.curseforge.com/minecraft/mc-mods/opencubicchunks) CurseForge - [Downloads (All)](https://www.curseforge.com/minecraft/mc-mods/opencubicchunks/files/all) ### Cubic World Gen (CWG) - Links: Github - [Cubic World Gen](https://github.com/OpenCubicChunks/CubicWorldGen) CurseForge - [Main page](https://www.curseforge.com/minecraft/mc-mods/cubicworldgen) CurseForge - [Downloads (All)](https://www.curseforge.com/minecraft/mc-mods/cubicworldgen/files/all) * Cubic Chunks **Archive** : [The Archive](https://www.mediafire.com/folder/i9adl2fo9yoas/Cubic%20Chunks) - Please download from **Curseforge** to help **Support** the CC Project, if the version you want is available there. - Other **Support** Links are available in the **Cubic Chunks Discord** Linked further up, Thank You. [![Gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/CubicChunks-dev/Lobby?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) ### Cloning the repository * Please go to our Discord server linked above for the newest info on compilation of this project. Note: you need git installed to do the following: ``` git clone --recursive ``` You need a git submodule for the project to compile. If you don't yet have the submodule but already cloned the repository: ``` git submodule update --init --recursive ``` To get latest version of the submodule: ``` git submodule update --recursive --remote ``` ### Compiling the mod Note: on windows you need to run these commands without `./` This command: ``` ./gradlew build ``` Should be enough to build the mod, but if there are any issues, run `./gradlew setupDecompWorkspace` before `./gradlew build`. The mod uses information from git repository to generate version number. Make sure you have the git repository before compiling. ### Setting up development environment Note: on windows you need to run these commands without `./` ![IntelliJ IDEA](intellij-logo.png) Run: ``` ./gradlew setupDecompWorkspace ``` then import it as gradle project into IDEA (if you already have something open, use File->new->project from existing sources) Then run: ``` ./gradlew genIntellijRuns ``` To be able to run the mod from within IDE. Then edit the generated run configurations and set `use classpath of module` to `CubicChunkc_main` Then refresh gradle project in IDEA. For development in IntelliJ IDEA the [MinecraftDev plugin](https://plugins.jetbrains.com/idea/plugin/8327-minecraft-development) is recommended. #### Other IDEs: Importing cubic chunks should be the same as any other Forge mod. If the IDE has gradle integration, import the mod as gradle project after setting up development environment. To run this mod from your IDE you need to add at least the following JVM option: ``` -Dfml.coreMods.load=cubicchunks.asm.CubicChunksCoreMod ``` If you use a different IDE and know how to setup development environment in that IDEs - submit pull request adding that information to this file. Some other useful options: `-Dmixin.debug.verbose=true` - enable mixin verbose output `-Dmixin.debug.export=true` - export classes after applying mixins to `run/.mixin.out/`, useful for debugging mixins `-Dcubicchunks.debug=true` - enable cubic chunks debug options `-XX:-OmitStackTraceInFastThrow` - some parts of cubic chunks code cause fast throw hen they fail, use when you see exception with no stacktrace `-Dmixin.checks.interfaces=true` - check that mixin classes implement all interface methods `-Dfml.noGrab=false` - can be useful for debugging client on some systems, disables hiding mouse cursor
<!-- id: 98020958 link: http://tumblr.atmos.org/post/98020958/network-advertising-initiative slug: network-advertising-initiative date: Sun Apr 19 2009 21:16:12 GMT-0700 (PDT) publish: 2009-04-019 tags: title: Network Advertising Initiative --> Network Advertising Initiative ============================== [http://www.networkadvertising.org/managing/opt\_out.asp](http://www.networkadvertising.org/managing/opt_out.asp)
package cn.xigua366.sample.service; import cn.xigua366.sample.domain.dto.PageSchoolDTO; import cn.xigua366.sample.domain.dto.SchoolDTO; import cn.xigua366.sample.domain.query.PageSchoolQuery; import com.yangxi.cloud.framework.core.PageResult; /** * <p> * 学校信息Service组件 * <p/> * * @author yangxi * @version 1.0 */ public interface SchoolService { /** * 根据ID查询学校信息 * @param id * @return */ SchoolDTO getSchoolById(Long id); /** * 分页查询学校列表 * @param pageSchoolQuery * @return */ PageResult<SchoolDTO> pageSchool(PageSchoolQuery pageSchoolQuery); /** * 分页查询学校列表 * @param pageSchoolQuery * @return */ PageResult<PageSchoolDTO> pageSchool2(PageSchoolQuery pageSchoolQuery); }
var jaffy = jaffy || {}; jaffy.maps = function(component,locations){ var $component = $(component); $component.foreach(function(){ map_instance($(this), locations); }); function map_instance($component, locations){ var map, marker, markers = Array(); bounds = new google.maps.LatLngBounds(), markerCluster; map = new google.maps.Map(document.getElementById('map'), { center: locations[0], zoom: 7 }); function _html(tag, content) { return '<' + tag + '>' + content + '</' + tag + '>'; } // Finding the boundary within all available list of addresses locations.map(function(location) { bounds.extend(new google.maps.LatLng(location.lat, location.lng)); marker = new google.maps.Marker({ position: location, label: location.title, map: map, address: { address: location.address, city: location.city, state: location.state, country: location.country, phone: location.phone, email: location.email, url: location.url }, buttons: location.buttons }); marker.addListener('click', function(event) { infowindow.setContent( _html('h4', this.label) + _html('p', this.address.address) + _html('p', _html('span', this.address.city) + _html('span', this.address.state) + _html('span', this.address.country) ) + _html('p', this.address.phone) + _html('p', this.address.email) + _html('p', this.address.url) ); infowindow.open(map, this); }); markers.push(marker); }); var infowindow = new google.maps.InfoWindow(); map.setCenter(bounds.getCenter()); map.fitBounds(bounds); // Add a marker clusterer to manage the markers. var markerCluster = new MarkerClusterer(map, markers, { imagePath: 'https://developers.google.com/maps/documentation/javascript/examples/markerclusterer/m' }); } }
using System; namespace OY.TotalCommander.TcPluginInterface.Content { [Serializable] public class ContentProgressEventArgs: PluginEventArgs { #region Properties public int NextBlockData { get; private set; } #endregion Properties public ContentProgressEventArgs(int nextBlockData) { NextBlockData = nextBlockData; } } }
#!/bin/bash NF_LIST="nrf amf smf udr pcf udm nssf ausf n3iwf free5gc-upfd" for NF in ${NF_LIST}; do sudo killall -9 ${NF} done sudo killall tcpdump sudo ip link del upfgtp sudo ip link del ipsec0 sudo ip link del xfrmi-default sudo rm /dev/mqueue/* sudo rm -f /tmp/free5gc_unix_sock mongo --eval "db.NfProfile.drop()" free5gc
require 'ddtrace/logger' module Datadog # A simple pub-sub event model for components to exchange messages through. class Event attr_reader \ :name, :subscriptions def initialize(name) @name = name @subscriptions = {} @mutex = Mutex.new end def subscribe(key, &block) raise ArgumentError, 'Must give a block to subscribe!' unless block @mutex.synchronize do subscriptions[key] = block end end def unsubscribe(key) @mutex.synchronize do subscriptions.delete(key) end end def unsubscribe_all! @mutex.synchronize do subscriptions.clear end true end def publish(*args) @mutex.synchronize do subscriptions.each do |key, block| begin block.call(*args) rescue StandardError => e Datadog::Logger.log.debug("Error while handling '#{key}' for '#{name}' event: #{e.message}") end end true end end end end
use std::fs::File; use std::io; use std::iter::Iterator; use std::collections::BTreeSet; use io::Read; fn read(path: &str) -> String { let mut file = File::open(path).unwrap(); let mut s = String::new(); file.read_to_string(&mut s).unwrap(); s } use nom::{ IResult, bytes::complete::tag, character::complete::{char, alpha1}, multi::{separated_list1}, }; pub fn problem6a(demo: bool) { let path = if demo { "inputs/input6demo.txt" } else { "inputs/input6.txt" }; let s = read(path); let parse_result: IResult<_, _> = separated_list1(tag("\n\n"), separated_list1(char('\n'), alpha1))(s.as_str()); let raw = parse_result.unwrap().1; let unions = raw.iter().map(|gr| gr.iter() .map(|p| p.chars().collect::<BTreeSet<_>>()) .fold(BTreeSet::new(), |acc, s| acc.union(&s).cloned().collect()) ); println!("{:?}", unions.map(|s| s.len()).sum::<usize>()); } pub fn problem6b(demo: bool) { let path = if demo { "inputs/input6demo.txt" } else { "inputs/input6.txt" }; let s = read(path); let parse_result: IResult<_, _> = separated_list1(tag("\n\n"), separated_list1(char('\n'), alpha1))(s.as_str()); let raw = parse_result.unwrap().1; let unions = raw.iter().map(|gr| gr.iter() .map(|p| p.chars().collect::<BTreeSet<_>>()) .fold(None, |acc_opt, s| match acc_opt { None => Some(s), Some(acc) => Some(acc.intersection(&s).cloned().collect()), }).unwrap() ); println!("{:?}", unions.map(|s| s.len()).sum::<usize>()); }
package spp import org.scalatest._ import java.io.File import spp.lexer.Lexer import spp.structure.Tokens.Token import spp.structure.Tokens.Token class LexerTests extends OutputComparisonSpec { val inputExtension: String = ".py" val outputExtension: String = ".txt" val pipeline = path => TokensToString(Lexer(path)) "lexer" should "tokenize basic input file correctly" in { outputMatch("basic-test") } it should "tokenize file with blank lines correctly" in { outputMatch("empty-lines-test") } it should "tokenize a single-line input file correctly" in { outputMatch("single-line") } it should "fail with an indented first statement" in { outputContains("unexpected-indent-1", "Error") } it should "fail with inconsistent indentation" in { outputContains("unexpected-indent-2", "Error") } it should "handle nested enclosing correctly" in { outputMatch("nested-enclosing") } it should "tokenize numeric literals correctly" in { outputMatch("literals") } it should "tokenize string and bytes literals correctly" in { outputMatch("string-literals") } it should "handle explicit line joinings correctly" in { outputMatch("explicit-line-joining") } it should "tokenize operators and delimiters correctly" in { outputMatch("operators-delimiters") } } object TokensToString { def apply(tokens: Iterator[Token]) = { tokens.map(_.toString()).reduce(_ ++ "\n" ++ _) } }
# frozen_string_literal: true RSpec.describe QuetzalDb::Cfn::Create do subject(:template) do Helpers::Template.new(File.join(__dir__, '..', '..', '..', 'quetzal-db-create-stack.yml')) end let(:expected_empty_resource) do { 'Type' => 'Custom::EmptyResource', 'Condition' => 'NotCreate' } end it 'adds an empty resource' do empty_resource = template['Resources']['EmptyResource'] expect(empty_resource).to eq(expected_empty_resource) end end
def caesar_cipher(str, int) fin = "" str.chars.each do |c| if ((65..90).to_a + (97..122).to_a).include? c.ord var = c.ord + int if ((90..97).to_a + (122..200).to_a).include? var var -= 26 end fin += var.chr else fin += c end end puts fin end print "Enter some text: " text = gets.chomp() puts "" print "How many letters should this be shifted forward? " change = gets.chomp().to_i puts "" caesar_cipher(text, change)
part of kcaa_controller; void handleFleetList( Assistant assistant, AssistantModel model, Map<String, dynamic> data) { var fleetsLength = data["fleets"].length; if (fleetsLength != model.fleets.length) { // Wait for the DOM to be updated. runLater(0, () => assistant.updateCollapsedSections()); } resizeList(model.fleets, fleetsLength, () => new Fleet()); for (var i = 0; i < fleetsLength; i++) { model.fleets[i].update(data["fleets"][i], model.shipMap, model.missions); } notifyShipList(model); model.someFleetChargeable = model.fleets.any((f) => f.ships.any((s) => s.fuel < s.fuelCapacity || s.ammo < s.ammoCapacity)); }
/* Sniperkit-Bot - Status: analyzed */ package main import ( "strings" "testing" "github.com/PuerkitoBio/goquery" ) func TestGetFileAndPath(t *testing.T) { args := map[string]map[string]string{ "bin/file": { "file": "file*", "path": "*bin", }, "posix": { "file": "*posix*", "path": "", }, "/usr/bin/bash": { "file": "bash*", "path": "*/usr/bin", }, } for input, output := range args { f, p := getFileAndPath(input) if output["file"] != f { t.Fatalf("expected %q, got %q", output["file"], f) } if output["path"] != p { t.Fatalf("expected %q, got %q", output["path"], p) } } } func TestParseHTML(t *testing.T) { searchResults := ` <table class="pure-table table-striped table-bordered table-condensed" data-toggle="table"> <tbody> <tr> <th>File</th> <th>Package</th> <th>Branch</th> <th>Repository</th> <th>Architecture</th> </tr> <tr> <td>/usr/lib/php7/modules/posix.so</td> <td><a href="/package/edge/testing/armhf/php7-posix">php7-posix</a></td> <td>edge</td> <td>testing</td> <td>armhf</td> </tr> </tbody> </table> ` doc, _ := goquery.NewDocumentFromReader(strings.NewReader(searchResults)) files := getFilesInfo(doc) if len(files) != 1 { t.Fatalf("expected %d, got %d", 1, len(files)) } expectedFile := fileInfo{ path: "/usr/lib/php7/modules/posix.so", pkg: "php7-posix", branch: "edge", repo: "testing", arch: "armhf"} if files[0] != expectedFile { t.Fatalf("expected %v, got %v", expectedFile, files[0]) } }
import React from "react"; import {Switch, Route} from "react-router-dom"; import API from '../apis'; import TableTgRoleList from '../table_list'; import FormTgRoleAdd from '../form_add'; import FormTgRoleEdit from '../form_edit'; import ViewTgRoleInfo from '../view_info'; import TableTgUserRoleList from '../../tg_user_role/table_list'; import FormTgUserRoleAdd from '../../tg_user_role/form_add'; import FormTgUserRoleEdit from '../../tg_user_role/form_edit'; import TableTgRolePermissionList from '../../tg_role_permission/table_list'; import FormTgRolePermissionAdd from '../../tg_role_permission/form_add'; import FormTgRolePermissionEdit from '../../tg_role_permission/form_edit'; export default function RouteTgRole() { return ( <Switch> <Route path="/tgRole" exact> <TableTgRoleList /> </Route> <Route path="/tgRole/add" exact> <FormTgRoleAdd /> </Route> <Route path="/tgRole/:id" exact> <ViewTgRoleInfo /> </Route> <Route path="/tgRole/:selectedObjectId/edit" exact> <FormTgRoleEdit /> </Route> <Route path="/tgRole/:id/members" exact render={(renderProps) => ( <TableTgUserRoleList where={{roleId: renderProps.match.params.id}} excludeColumns={["roleId"]} /> )} /> <Route path="/tgRole/:id/members/add" exact render={(renderProps) => ( <FormTgUserRoleAdd fixedParams={{roleId: renderProps.match.params.id}} parent={{ query: API.GET_BY_ID, variables: { id: renderProps.match.params.id}, }} /> )} /> <Route path="/tgRole/:id/members/:selectedObjectId/edit" exact render={(renderProps) => ( <FormTgUserRoleEdit fixedParams={{roleId: renderProps.match.params.id}} parent={{ query: API.GET_BY_ID, variables: { id: renderProps.match.params.id}, }} /> )} /> <Route path="/tgRole/:id/permission" exact render={(renderProps) => ( <TableTgRolePermissionList where={{roleId: renderProps.match.params.id}} excludeColumns={["roleId"]} /> )} /> <Route path="/tgRole/:id/permission/add" exact render={(renderProps) => ( <FormTgRolePermissionAdd fixedParams={{roleId: renderProps.match.params.id}} parent={{ query: API.GET_BY_ID, variables: { id: renderProps.match.params.id}, }} /> )} /> <Route path="/tgRole/:id/permission/:selectedObjectId/edit" exact render={(renderProps) => ( <FormTgRolePermissionEdit fixedParams={{roleId: renderProps.match.params.id}} parent={{ query: API.GET_BY_ID, variables: { id: renderProps.match.params.id}, }} /> )} /> </Switch> ); }
(defpackage :aoc-20-utils (:use :cl) (:export :input-file-pathname) (:documentation "Common convenience utilities")) (in-package :aoc-20-utils) (defun expand-filename (n) (format nil "inputs/day~2,'0d.txt" n)) (defun input-file-pathname (n) (asdf:system-relative-pathname 'aoc-20 (expand-filename n)))
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.fineract.cn.portfolio.service.internal.command.handler; import org.apache.fineract.cn.portfolio.api.v1.domain.TaskInstance; import org.apache.fineract.cn.portfolio.api.v1.events.EventConstants; import org.apache.fineract.cn.portfolio.api.v1.events.TaskInstanceEvent; import org.apache.fineract.cn.portfolio.service.internal.command.ChangeTaskInstanceCommand; import org.apache.fineract.cn.portfolio.service.internal.command.ExecuteTaskInstanceCommand; import org.apache.fineract.cn.portfolio.service.internal.mapper.TaskInstanceMapper; import org.apache.fineract.cn.portfolio.service.internal.repository.TaskInstanceEntity; import org.apache.fineract.cn.portfolio.service.internal.repository.TaskInstanceRepository; import java.time.Clock; import java.time.LocalDateTime; import org.apache.fineract.cn.api.util.UserContextHolder; import org.apache.fineract.cn.command.annotation.Aggregate; import org.apache.fineract.cn.command.annotation.CommandHandler; import org.apache.fineract.cn.command.annotation.CommandLogLevel; import org.apache.fineract.cn.command.annotation.EventEmitter; import org.apache.fineract.cn.lang.ServiceException; import org.springframework.beans.factory.annotation.Autowired; /** * @author Myrle Krantz */ @SuppressWarnings("unused") @Aggregate public class TaskInstanceCommandHandler { private final TaskInstanceRepository taskInstanceRepository; @Autowired public TaskInstanceCommandHandler(TaskInstanceRepository taskInstanceRepository) { this.taskInstanceRepository = taskInstanceRepository; } @CommandHandler(logStart = CommandLogLevel.INFO, logFinish = CommandLogLevel.INFO) @EventEmitter(selectorName = EventConstants.SELECTOR_NAME, selectorValue = EventConstants.PUT_TASK_INSTANCE) public TaskInstanceEvent process(final ChangeTaskInstanceCommand changeTaskInstanceCommand) { final String productIdentifier = changeTaskInstanceCommand.getProductIdentifier(); final String caseIdentifier = changeTaskInstanceCommand.getCaseIdentifier(); final TaskInstance taskInstance = changeTaskInstanceCommand.getInstance(); final TaskInstanceEntity existingTaskInstance = taskInstanceRepository.findByProductIdAndCaseIdAndTaskId(productIdentifier, caseIdentifier, taskInstance.getTaskIdentifier()) .orElseThrow(() -> ServiceException.notFound("Task instance ''{0}.{1}.{2}'' not found.", productIdentifier, caseIdentifier, taskInstance.getTaskIdentifier())); final TaskInstanceEntity taskInstanceEntity = TaskInstanceMapper.mapOverOldEntity(taskInstance, existingTaskInstance); taskInstanceRepository.save(taskInstanceEntity); return new TaskInstanceEvent( changeTaskInstanceCommand.getProductIdentifier(), changeTaskInstanceCommand.getCaseIdentifier(), changeTaskInstanceCommand.getInstance().getTaskIdentifier()); } @CommandHandler(logStart = CommandLogLevel.INFO, logFinish = CommandLogLevel.INFO) @EventEmitter(selectorName = EventConstants.SELECTOR_NAME, selectorValue = EventConstants.PUT_TASK_INSTANCE_EXECUTION) public TaskInstanceEvent process(final ExecuteTaskInstanceCommand changeTaskInstanceExecutionCommand) { final String productIdentifier = changeTaskInstanceExecutionCommand.getProductIdentifier(); final String caseIdentifier = changeTaskInstanceExecutionCommand.getCaseIdentifier(); final String taskIdentifier = changeTaskInstanceExecutionCommand.getTaskIdentifier(); final boolean executed = changeTaskInstanceExecutionCommand.getExecuted(); final TaskInstanceEntity taskInstanceEntity = taskInstanceRepository.findByProductIdAndCaseIdAndTaskId(productIdentifier, caseIdentifier, taskIdentifier) .orElseThrow(() -> ServiceException.notFound("Task instance ''{0}.{1}.{2}'' not found.", productIdentifier, caseIdentifier, taskIdentifier)); if (executed) { taskInstanceEntity.setExecutedOn(LocalDateTime.now(Clock.systemUTC())); taskInstanceEntity.setExecutedBy(UserContextHolder.checkedGetUser()); } else { taskInstanceEntity.setExecutedOn(null); taskInstanceEntity.setExecutedBy(null); } taskInstanceRepository.save(taskInstanceEntity); return new TaskInstanceEvent(productIdentifier, caseIdentifier, taskIdentifier); } }
using System; using System.Collections; using System.Collections.Generic; using UnityEngine; using UnityEngine.UI; public class Transition : MonoBehaviour { public bool IsFading { get; private set; } [SerializeField] private Image target = null; [SerializeField] private float speed = 1f; [SerializeField] private bool fadedOutByDefault = false; private Coroutine fadeCoroutine; private float defaultOpacity; protected virtual void Awake() { defaultOpacity = target.color.a; if (!fadedOutByDefault) { SetOpacity(target, 0f); target.gameObject.SetActive(false); } } /// <summary> /// Fades out, runs the method /// and fades back in. /// </summary> public void RunWithFade(Action CallBack) { StopFade(); StartCoroutine(FadeOutAndIn(CallBack)); } public Coroutine FadeIn() { StopFade(); fadeCoroutine = StartCoroutine(FadeInAndDeactivate()); return fadeCoroutine; IEnumerator FadeInAndDeactivate() { yield return StartCoroutine(Fade(0f)); target.gameObject.SetActive(false); } } public Coroutine FadeOut() { StopFade(); target.gameObject.SetActive(true); fadeCoroutine = StartCoroutine(Fade(defaultOpacity)); return fadeCoroutine; } private void StopFade() { if (fadeCoroutine != null) { StopCoroutine(fadeCoroutine); } } private IEnumerator FadeOutAndIn(Action Callback = null) { yield return FadeOut(); Callback?.Invoke(); yield return new WaitForSecondsRealtime(0.05f); yield return FadeIn(); } private IEnumerator Fade(float targetAlpha) { IsFading = true; float alpha = target.color.a; while (alpha != targetAlpha) { alpha = Mathf.MoveTowards( alpha, targetAlpha, Time.unscaledDeltaTime * speed); SetOpacity(target, alpha); yield return null; } IsFading = false; } private void SetOpacity(Image target, float alpha) { target.color = new Color( target.color.r, target.color.g, target.color.b, alpha); } }
!======================================================================== ! ! S P E C F E M 2 D Version 7 . 0 ! -------------------------------- ! ! Main historical authors: Dimitri Komatitsch and Jeroen Tromp ! Princeton University, USA ! and CNRS / University of Marseille, France ! (there are currently many more authors!) ! (c) Princeton University and CNRS / University of Marseille, April 2014 ! ! This software is a computer program whose purpose is to solve ! the two-dimensional viscoelastic anisotropic or poroelastic wave equation ! using a spectral-element method (SEM). ! ! This software is governed by the CeCILL license under French law and ! abiding by the rules of distribution of free software. You can use, ! modify and/or redistribute the software under the terms of the CeCILL ! license as circulated by CEA, CNRS and Inria at the following URL ! "http://www.cecill.info". ! ! As a counterpart to the access to the source code and rights to copy, ! modify and redistribute granted by the license, users are provided only ! with a limited warranty and the software's author, the holder of the ! economic rights, and the successive licensors have only limited ! liability. ! ! In this respect, the user's attention is drawn to the risks associated ! with loading, using, modifying and/or developing or reproducing the ! software by the user in light of its specific status of free software, ! that may mean that it is complicated to manipulate, and that also ! therefore means that it is reserved for developers and experienced ! professionals having in-depth computer knowledge. Users are therefore ! encouraged to load and test the software's suitability as regards their ! requirements in conditions enabling the security of their systems and/or ! data to be ensured and, more generally, to use and operate it in the ! same conditions as regards security. ! ! The full text of the license is available in file "LICENSE". ! !======================================================================== ! XSUM_KERNELS_ASCII ! ! USAGE ! NPROC bin/xsum_kernels_ascii INPUT_FILE OUTPUT_DIR ! ! ! COMMAND LINE ARGUMENTS ! INPUT_FILE - text file containing list of kernel directories ! OUTPUT_PATH - directory to which summed kernels are written ! ! ! DESCRIPTION ! Sums kernels from directories specified in INPUT_FILE. ! Writes the resulting sums to OUTPUT_DIR. ! ! INPUT_FILE is a text file containing a list of absolute or relative paths to ! kernel directories, one directory per line. Each kernel directory must ! contain the following ASCII files, which are generated by SPECFEM2D when ! adjoint simulation and ascii kernel options are specified: ! ! proc000000_rhop_alpha_beta_kernel.dat ! proc000000_rho_kappa_mu.dat ! ! This routine is modeled after xsum_kernels in SPECFEM3D, in so far as ! similarities between 2D and 3D packages allow. program sum_kernels_ascii use postprocess_par, only: MAX_STRING_LEN, MAX_KERNEL_PATHS, MAX_KERNEL_NAMES, & IIN implicit none integer, parameter :: NARGS = 2 integer :: ipath, npath, iker, nlines, i,j double precision, allocatable, dimension(:) :: kernel1, kernel2, kernel3 double precision, allocatable, dimension(:) :: kernel_sum1, kernel_sum2, kernel_sum3 double precision, allocatable, dimension(:,:) :: coord character(len=MAX_STRING_LEN) :: input_file, output_dir, kernel_name character(len=MAX_STRING_LEN) :: kernel_paths(MAX_KERNEL_PATHS) character(len=MAX_STRING_LEN) :: filename, arg(2), line integer :: ier double precision :: dummy1, dummy2 write(*,*) 'Running XSUM_KERNELS_ASCII' write(*,*) if (command_argument_count() /= NARGS) then print *, 'USAGE: bin/xsum_kernels_ascii INPUT_FILE OUTPUT_DIR' print *, '' stop 'Please check command line arguments' endif ! parse command line arguments do i = 1, NARGS call get_command_argument(i,arg(i), status=ier) enddo read(arg(1),'(a)') input_file read(arg(2),'(a)') output_dir ! parse paths from INPUT_FILE npath=0 open(unit = IIN, file = trim(input_file), status = 'old',iostat = ier) if (ier /= 0) then print *,'Error opening ',trim(input_file) stop 'Please check command line argument: INPUT_FILE' endif do while (1 == 1) read(IIN,'(a)',iostat=ier) line if (ier /= 0) exit npath = npath+1 if (npath > MAX_KERNEL_PATHS) stop 'Error number of paths exceeds MAX_KERNEL_PATHS' kernel_paths(npath) = line enddo close(IIN) ! print status update write(*,*) 'Running XSUM_KERNELS_ASCII' write(*,*) write(*,*) 'Combining arrays from ',npath,' paths' write(*,*) ! allocate arrays call get_number_gll_points((kernel_paths(1)), nlines) allocate( coord(2,nlines) ) allocate( kernel1(nlines) ) allocate( kernel2(nlines) ) allocate( kernel3(nlines) ) allocate( kernel_sum1(nlines) ) allocate( kernel_sum2(nlines) ) allocate( kernel_sum3(nlines) ) kernel1(:) = 0.0d0 kernel2(:) = 0.0d0 kernel3(:) = 0.0d0 kernel_sum1(:) = 0.0d0 kernel_sum2(:) = 0.0d0 kernel_sum3(:) = 0.0d0 ! sum kernels do iker = 1, 2 if (iker == 1) write(kernel_name,'(a)') 'proc000000_rhop_alpha_beta_kernel.dat' if (iker == 2) write(kernel_name,'(a)') 'proc000000_rho_kappa_mu_kernel.dat' ! sum kernels do ipath = 1, npath filename = trim(kernel_paths(ipath)) //'/'// trim(kernel_name) open(unit=3,file=filename, status='old', action='read') do j = 1,nlines read(3,*) dummy1, dummy2, kernel1(j), kernel2(j), kernel3(j) enddo close(3) if (ipath == 1) then kernel_sum1 = kernel1 kernel_sum2 = kernel2 kernel_sum3 = kernel3 else kernel_sum1(:)=kernel_sum1(:)+kernel1(:) kernel_sum2(:)=kernel_sum2(:)+kernel2(:) kernel_sum3(:)=kernel_sum3(:)+kernel3(:) endif enddo ! save result filename = trim(output_dir) //'/'// trim(kernel_name) open(unit=4,file=filename,status='unknown',action='write') do j = 1, nlines write(4,'(5e11.3)') coord(1,j),coord(2,j),kernel_sum1(j),kernel_sum2(j),kernel_sum3(j) enddo close(4) enddo write(*,*) 'Finished writing kernels ' write(*,*) end program sum_kernels_ascii ! ------------------------------------------------------------------------------ subroutine get_number_gll_points(kernel_path, nlines) use postprocess_par, only: MAX_STRING_LEN, MAX_LINES implicit none double precision :: dummy1, dummy2, dummy3, dummy4, dummy5 character(len=MAX_STRING_LEN) :: kernel_path, filename integer :: j, ios, nlines filename = trim(kernel_path)//'/'//'proc000000_rhop_alpha_beta_kernel.dat' open(unit=3,file=filename,status='old',action='read') nlines = 0 do j=1,MAX_LINES read(3,*,iostat=ios) dummy1, dummy2, dummy3, dummy4, dummy5 if (ios /= 0) exit nlines=nlines+1 enddo close(3) end subroutine get_number_gll_points
# Creates the string variable with input variables formatter = "%{first} %{second} %{third} %{fourth}" # Prints the string variable formatter, and inserts the values in the %{} # Note that integers don't need to be "" puts formatter % {first: 1, second: 2, third: 3, fourth: 4} # Note that text strings require "" or '' puts formatter % {first: "one", second: "two", third: "three", fourth: "four"} # Note that true and false are understood by Ruby as values without "" puts formatter % {first: true, second: false, third: true, fourth: false} # Note that formatter is behaving as a string and being printed puts formatter % {first: formatter, second: formatter, third: formatter, fourth: formatter} # As above but the format is spread across multiple lines using , puts formatter % { first: "I had this thing.", second: "That you could type up right.", third: "But it didn't sing.", fourth: "So I said goodnight." }
class VampPluginSdk < Formula desc "audio processing plugin system sdk" homepage "http://www.vamp-plugins.org" url "https://code.soundsoftware.ac.uk/attachments/download/1520/vamp-plugin-sdk-2.6.tar.gz" sha256 "d0d3578137ac0c1e63f31561081a8d61da526a81152bc1dc9383b629bc07f85f" head "https://code.soundsoftware.ac.uk/hg/vamp-plugin-sdk", :using => :hg stable do # activate osx specific items in Makefile.in # https://code.soundsoftware.ac.uk/issues/1473 patch :p1, :DATA end bottle do cellar :any sha256 "3c1665b45ed9060ddcc00036b760e48e2d8f884877a8976bfb5d5bb8b8dc09b0" => :el_capitan sha256 "9f9faa350b6a0072264107506a243cc627459da143e41b1cde8af2cad1b52079" => :yosemite sha256 "86a5d017be8bccf01f43b6e99fb2f441bde4dc6edff36837d58467926563e4f7" => :mavericks end depends_on "automake" => :build depends_on "pkg-config" => :build depends_on "libsndfile" depends_on "libogg" depends_on "flac" def install system "./configure", "--disable-debug", "--disable-dependency-tracking", "--prefix=#{prefix}" system "make", "install" end test do (testpath/"test.cpp").write <<-EOS.undent #include "vamp-sdk/Plugin.h" #include <vamp-sdk/PluginAdapter.h> class MyPlugin : public Vamp::Plugin { }; const VampPluginDescriptor * vampGetPluginDescriptor(unsigned int version, unsigned int index) { return NULL; } EOS system ENV.cxx, "test.cpp", "-I#{include}", "-Wl,-dylib", "-o", "test.dylib" assert_match /Usage:/, shell_output("#{bin}/vamp-rdf-template-generator 2>&1", 2) cp "#{lib}/vamp/vamp-example-plugins.so", testpath/"vamp-example-plugins.dylib" ENV["VAMP_PATH"]=testpath assert_match /amplitudefollower/, shell_output("#{bin}/vamp-simple-host -l") end end __END__ diff -r 1522e2f6d700 -r 3bfc44d26963 Makefile.in --- a/Makefile.in Fri Sep 04 13:48:28 2015 +0100 +++ b/Makefile.in Fri Dec 11 12:59:21 2015 +0000 @@ -104,16 +104,32 @@ PLUGIN_LDFLAGS = $(DYNAMIC_LDFLAGS) -Wl,--version-script=build/vamp-plugin.map -## For OS/X with g++: -#DYNAMIC_LDFLAGS = -dynamiclib -#PLUGIN_LDFLAGS = $(DYNAMIC_LDFLAGS) -#SDK_DYNAMIC_LDFLAGS = $(DYNAMIC_LDFLAGS) -#HOSTSDK_DYNAMIC_LDFLAGS = $(DYNAMIC_LDFLAGS) +# Adapt install details when found to be running on OSX (thanks to David O) +uname_S = $(shell uname -s) +ifeq ($(uname_S),Darwin) + + DYNAMIC_LDFLAGS = -dynamiclib + SDK_DYNAMIC_LDFLAGS = $(DYNAMIC_LDFLAGS) + HOSTSDK_DYNAMIC_LDFLAGS = $(DYNAMIC_LDFLAGS) + PLUGIN_LDFLAGS = $(DYNAMIC_LDFLAGS) -exported_symbols_list build/vamp-plugin.list + + INSTALL_HOSTSDK_LIBNAME = libvamp-hostsdk.3.6.0.dylib + INSTALL_HOSTSDK_LINK_ABI = libvamp-hostsdk.3.dylib + +# The OS X linker doesn't allow you to request static linkage when +# linking by library search path, if the same library name is found in +# both static and dynamic versions. So if we install both static and +# dynamic, the static library will never be used. That's OK for the +# host SDK, but we do want plugins to get static linkage of the plugin +# SDK. So install the dynamic version under a different name. + INSTALL_SDK_LIBNAME = libvamp-sdk-dynamic.2.6.0.dylib + INSTALL_SDK_LINK_ABI = libvamp-sdk-dynamic.2.dylib + +endif ### End of user-serviceable parts - API_HEADERS = \ $(APIDIR)/vamp.h diff -r 1522e2f6d700 -r 3bfc44d26963 build/update-version.sh --- a/build/update-version.sh Fri Sep 04 13:48:28 2015 +0100 +++ b/build/update-version.sh Fri Dec 11 12:59:21 2015 +0000 @@ -42,6 +42,12 @@ $p 's/(INSTALL_SDK_LINK_ABI\s*=\s*libvamp-sdk.so).*/$1.'$sdkmajor'/' \ Makefile.in +$p 's/(INSTALL_SDK_LIBNAME\s*=\s*libvamp-sdk-dynamic).*.dylib/$1.'$sdkmajor'.'$sdkminor'.0.dylib/' \ + Makefile.in + +$p 's/(INSTALL_SDK_LINK_ABI\s*=\s*libvamp-sdk-dynamic).*.dylib/$1.'$sdkmajor'.dylib/' \ + Makefile.in + $p 's/(current)=.*/$1='$sdkmajor'/' \ build/libvamp-sdk.la.in @@ -54,6 +60,12 @@ $p 's/(INSTALL_HOSTSDK_LINK_ABI\s*=\s*libvamp-hostsdk.so).*/$1.'$hostmajor'/' \ Makefile.in +$p 's/(INSTALL_HOSTSDK_LIBNAME\s*=\s*libvamp-hostsdk).*.dylib/$1.'$hostmajor'.'$hostminor'.0.dylib/' \ + Makefile.in + +$p 's/(INSTALL_HOSTSDK_LINK_ABI\s*=\s*libvamp-hostsdk).*.dylib/$1.'$hostmajor'.dylib/' \ + Makefile.in + $p 's/(current)=.*/$1='$hostmajor'/' \ build/libvamp-hostsdk.la.in diff -r 1522e2f6d700 -r 3bfc44d26963 configure --- a/configure Fri Sep 04 13:48:28 2015 +0100 +++ b/configure Fri Dec 11 12:59:21 2015 +0000 @@ -1,6 +1,6 @@ #! /bin/sh # Guess values for system-dependent variables and create Makefiles. -# Generated by GNU Autoconf 2.69 for vamp-plugin-sdk 2.5. +# Generated by GNU Autoconf 2.69 for vamp-plugin-sdk 2.6. # # Report bugs to <[email protected]>. # @@ -580,8 +580,8 @@ # Identity of this package. PACKAGE_NAME='vamp-plugin-sdk' PACKAGE_TARNAME='vamp-plugin-sdk' -PACKAGE_VERSION='2.5' -PACKAGE_STRING='vamp-plugin-sdk 2.5' +PACKAGE_VERSION='2.6' +PACKAGE_STRING='vamp-plugin-sdk 2.6' PACKAGE_BUGREPORT='[email protected]' PACKAGE_URL='' @@ -1243,7 +1243,7 @@ # Omit some internal or obsolete options to make the list less imposing. # This message is too long to be a string in the A/UX 3.1 sh. cat <<_ACEOF -\`configure' configures vamp-plugin-sdk 2.5 to adapt to many kinds of systems. +\`configure' configures vamp-plugin-sdk 2.6 to adapt to many kinds of systems. Usage: $0 [OPTION]... [VAR=VALUE]... @@ -1304,7 +1304,7 @@ if test -n "$ac_init_help"; then case $ac_init_help in - short | recursive ) echo "Configuration of vamp-plugin-sdk 2.5:";; + short | recursive ) echo "Configuration of vamp-plugin-sdk 2.6:";; esac cat <<\_ACEOF @@ -1402,7 +1402,7 @@ test -n "$ac_init_help" && exit $ac_status if $ac_init_version; then cat <<\_ACEOF -vamp-plugin-sdk configure 2.5 +vamp-plugin-sdk configure 2.6 generated by GNU Autoconf 2.69 Copyright (C) 2012 Free Software Foundation, Inc. @@ -1651,7 +1651,7 @@ This file contains any messages produced by compilers while running configure, to aid debugging if configure makes a mistake. -It was created by vamp-plugin-sdk $as_me 2.5, which was +It was created by vamp-plugin-sdk $as_me 2.6, which was generated by GNU Autoconf 2.69. Invocation command line was $ $0 $@ @@ -4505,7 +4505,7 @@ # report actual input values of CONFIG_FILES etc. instead of their # values after options handling. ac_log=" -This file was extended by vamp-plugin-sdk $as_me 2.5, which was +This file was extended by vamp-plugin-sdk $as_me 2.6, which was generated by GNU Autoconf 2.69. Invocation command line was CONFIG_FILES = $CONFIG_FILES @@ -4558,7 +4558,7 @@ cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`" ac_cs_version="\\ -vamp-plugin-sdk config.status 2.5 +vamp-plugin-sdk config.status 2.6 configured by $0, generated by GNU Autoconf 2.69, with options \\"\$ac_cs_config\\"
using System; using Fuzzy; using Xunit; namespace Chronology { public class TimeSpanExtensionsTest: TestFixture { public class Days: TimeSpanExtensionsTest { [Fact] public void ReturnsTimeSpanFromDouble() { double value = fuzzy.TimeSpan().TotalDays; var expected = TimeSpan.FromDays(value); TimeSpan actual = value.Days(); Assert.Equal(expected, actual); } [Fact] public void ReturnsTimeSpanFromLong() { var value = (long)fuzzy.TimeSpan().TotalDays; var expected = TimeSpan.FromDays(value); TimeSpan actual = value.Days(); Assert.Equal(expected, actual); } [Fact] public void ReturnsTimeSpanFromInt() { var value = (int)fuzzy.TimeSpan().TotalDays; var expected = TimeSpan.FromDays(value); TimeSpan actual = value.Days(); Assert.Equal(expected, actual); } [Fact] public void ReturnsTimeSpanFromShort() { var value = (short)fuzzy.TimeSpan().TotalDays; var expected = TimeSpan.FromDays(value); TimeSpan actual = value.Days(); Assert.Equal(expected, actual); } } public class Hours: TimeSpanExtensionsTest { [Fact] public void ReturnsTimeSpanFromDouble() { double value = fuzzy.TimeSpan().TotalHours; var expected = TimeSpan.FromHours(value); TimeSpan actual = value.Hours(); Assert.Equal(expected, actual); } [Fact] public void ReturnsTimeSpanFromLong() { var value = (long)fuzzy.TimeSpan().TotalHours; var expected = TimeSpan.FromHours(value); TimeSpan actual = value.Hours(); Assert.Equal(expected, actual); } [Fact] public void ReturnsTimeSpanFromInt() { var value = (int)fuzzy.TimeSpan().TotalHours; var expected = TimeSpan.FromHours(value); TimeSpan actual = value.Hours(); Assert.Equal(expected, actual); } [Fact] public void ReturnsTimeSpanFromShort() { var value = (short)fuzzy.TimeSpan().TotalHours; var expected = TimeSpan.FromHours(value); TimeSpan actual = value.Hours(); Assert.Equal(expected, actual); } } public class Milliseconds: TimeSpanExtensionsTest { [Fact] public void ReturnsTimeSpanFromDouble() { double value = fuzzy.TimeSpan().TotalMilliseconds; var expected = TimeSpan.FromMilliseconds(value); TimeSpan actual = value.Milliseconds(); Assert.Equal(expected, actual); } [Fact] public void ReturnsTimeSpanFromLong() { var value = (long)fuzzy.TimeSpan().TotalMilliseconds; var expected = TimeSpan.FromMilliseconds(value); TimeSpan actual = value.Milliseconds(); Assert.Equal(expected, actual); } [Fact] public void ReturnsTimeSpanFromInt() { var value = (int)fuzzy.TimeSpan().TotalMilliseconds; var expected = TimeSpan.FromMilliseconds(value); TimeSpan actual = value.Milliseconds(); Assert.Equal(expected, actual); } [Fact] public void ReturnsTimeSpanFromShort() { var value = (short)fuzzy.TimeSpan().TotalMilliseconds; var expected = TimeSpan.FromMilliseconds(value); TimeSpan actual = value.Milliseconds(); Assert.Equal(expected, actual); } } public class Minutes: TimeSpanExtensionsTest { [Fact] public void ReturnsTimeSpanFromDouble() { double value = fuzzy.TimeSpan().TotalMinutes; var expected = TimeSpan.FromMinutes(value); TimeSpan actual = value.Minutes(); Assert.Equal(expected, actual); } [Fact] public void ReturnsTimeSpanFromLong() { var value = (long)fuzzy.TimeSpan().TotalMinutes; var expected = TimeSpan.FromMinutes(value); TimeSpan actual = value.Minutes(); Assert.Equal(expected, actual); } [Fact] public void ReturnsTimeSpanFromInt() { var value = (int)fuzzy.TimeSpan().TotalMinutes; var expected = TimeSpan.FromMinutes(value); TimeSpan actual = value.Minutes(); Assert.Equal(expected, actual); } [Fact] public void ReturnsTimeSpanFromShort() { var value = (short)fuzzy.TimeSpan().TotalMinutes; var expected = TimeSpan.FromMinutes(value); TimeSpan actual = value.Minutes(); Assert.Equal(expected, actual); } } public class Seconds: TimeSpanExtensionsTest { [Fact] public void ReturnsTimeSpanFromDouble() { double value = fuzzy.TimeSpan().TotalSeconds; var expected = TimeSpan.FromSeconds(value); TimeSpan actual = value.Seconds(); Assert.Equal(expected, actual); } [Fact] public void ReturnsTimeSpanFromLong() { var value = (long)fuzzy.TimeSpan().TotalSeconds; var expected = TimeSpan.FromSeconds(value); TimeSpan actual = value.Seconds(); Assert.Equal(expected, actual); } [Fact] public void ReturnsTimeSpanFromInt() { var value = (int)fuzzy.TimeSpan().TotalSeconds; var expected = TimeSpan.FromSeconds(value); TimeSpan actual = value.Seconds(); Assert.Equal(expected, actual); } [Fact] public void ReturnsTimeSpanFromShort() { var value = (short)fuzzy.TimeSpan().TotalSeconds; var expected = TimeSpan.FromSeconds(value); TimeSpan actual = value.Seconds(); Assert.Equal(expected, actual); } } public class Ticks: TimeSpanExtensionsTest { [Fact] public void ReturnsTimeSpanFromLong() { long value = fuzzy.Int64(); var expected = TimeSpan.FromTicks(value); TimeSpan actual = value.Ticks(); Assert.Equal(expected, actual); } [Fact] public void ReturnsTimeSpanFromInt() { int value = fuzzy.Int32(); var expected = TimeSpan.FromTicks(value); TimeSpan actual = value.Ticks(); Assert.Equal(expected, actual); } [Fact] public void ReturnsTimeSpanFromShort() { short value = fuzzy.Int16(); var expected = TimeSpan.FromTicks(value); TimeSpan actual = value.Ticks(); Assert.Equal(expected, actual); } } } }
@using Microsoft.AspNetCore.Localization @addTagHelper *, Microsoft.AspNetCore.Mvc.TagHelpers @{ var culture = Context.Features.Get<IRequestCultureFeature>().RequestCulture.Culture.Name; } <!DOCTYPE HTML> <html lang="@culture"> <head> <meta name="viewport" content="width=device-width, initial-scale=1"> <title>@ViewData["title"]</title> @if (IsSectionDefined("AddToHead")) { @await RenderSectionAsync("AddToHead",false) } </head> <body> @RenderBody() <partial name="Partials/Footer"/> </body> @if (IsSectionDefined("AddToScripts")) { @await RenderSectionAsync("AddToScripts", false) } </html>
class Orc480AddIndexToWorklog < ActiveRecord::Migration[5.1] def up remove_index :worklogs, %i[task_id user_id project_id date] add_index :worklogs, %i[task_id date], unique: true end def dowm remove_index :worklogs, %i[task_id date] add_index :worklogs, %i[task_id user_id project_id date], unique: true end end
/* Generated by RuntimeBrowser Image: /System/Library/PrivateFrameworks/NeutrinoCore.framework/NeutrinoCore */ @interface NUVideoPropertiesJob : NURenderJob { <NUVideoProperties> * _videoProperties; } - (void).cxx_destruct; - (bool)prepare:(out id*)arg1; - (id)result; - (bool)wantsCompleteStage; - (bool)wantsRenderStage; @end
var o = {p1:1}; var prop = {value:2, writable:true, enumerable:false, configurable:true} var x = Object.defineProperty(o, "p2", prop); var __result1 = x.propertyIsEnumerable("p1"); var __expect1 = true; var __result2 = x.propertyIsEnumerable("p2"); var __expect2 = false;
/* * The Optimal Kick Hamiltonian split evaluates every interaction in the system * at the optimal split time step, thereby using the least amount of force * evaluations to evolve the system. */ #include <tgmath.h> #include <stdio.h> #include <stdlib.h> #include "evolve.h" #include "evolve_ok.h" struct forces zeroforces = {0, NULL, NULL}; #define IS_ZEROFORCES(F) (((F).n == 0) && ((F).forc == NULL) && ((F).last == NULL)) #define LOG_FORCES(F) \ { \ for (UINT i = 0; i < (F).n; i++) { \ printf("%u\t%u\t%f\n", (F).forc[i].parti->id, (F).forc[i].partj->id, (F).forc[i].timestep); \ } \ }; static void ok_timestep_cpu(int clevel,struct forces f, DOUBLE dt) { int dir=SIGN(dt); for (UINT i = 0; i < f.n; i++) { //if (f.forc[i].timestep != HUGE_VAL) ENDRUN("timestep??"); f.forc[i].timestep = timestep_ij(f.forc[i].parti, f.forc[i].partj,dir); } diag->tstep[clevel]++; diag->tcount[clevel] += f.n; } /* * split_ok_forces: split forces into smaller than dt, faster than dt */ static void ok_split(FLOAT dt, struct forces f, struct forces *slow, struct forces *fast) { //LOG("dt=%lf f.n=%u\n", dt, f.n); UINT i = 0; struct force *left, *right; left = f.forc; right = f.last; dt=fabs(dt); while (1) { if (i >= f.n) ENDRUN("forces split error 1\n"); i++; while ((left->timestep < dt) && (left<right)) left++; while ((right->timestep >= dt) && (left<right)) right--; if (left < right) { SWAP(*left, *right, struct force); } else { break; } } if (left->timestep < dt) left++; slow->n = f.last - left + 1; fast->n = left - f.forc; if (fast->n == 1) { fast->n = 0; slow->n = f.n; } if (slow->n > 0) { slow->forc = f.forc + fast->n; slow->last = f.last;//slow->part+slow->n-1; } if (fast->n > 0) { fast->forc = f.forc; fast->last = f.forc + fast->n - 1; } if (fast->n + slow->n != f.n) ENDRUN("forces split error 2: fast->n=%u slow->n=%u f.n=%u\n", fast->n, slow->n, f.n); //for (i = 0; i < f.n; i++) f.forc[i].level = clevel; } struct forces ok_main_forces = {0, NULL, NULL}; void evolve_ok_init(struct sys s) { UINT n_forces = s.n * s.n - s.n; if (ok_main_forces.forc != NULL) ENDRUN("OK (re)allocation error"); ok_main_forces.forc = (struct force *) malloc(n_forces * sizeof(struct force)); ok_main_forces.last = &(ok_main_forces.forc[n_forces - 1]); ok_main_forces.n = n_forces; // initialize pointers of the forces structure UINT k = 0; for (UINT i = 0; i < s.n; i++) { for (UINT j = 0; j < s.n; j++) { if (i != j) { ok_main_forces.forc[k].parti = &( s.part[i] ); ok_main_forces.forc[k].partj = &( s.part[j] ); k++; } } } } void evolve_ok_stop() { if (ok_main_forces.forc != NULL) { free(ok_main_forces.forc); ok_main_forces.forc = NULL; } } static void ok_kick(int clevel,struct forces f, DOUBLE dt) { FLOAT dx[3],dr3,dr2,dr,acci; FLOAT acc[3]; for (UINT i = 0; i < f.n; i++) { acc[0] = 0.; acc[1] = 0.; acc[2] = 0.; dx[0] = f.forc[i].parti->pos[0] - f.forc[i].partj->pos[0]; dx[1] = f.forc[i].parti->pos[1] - f.forc[i].partj->pos[1]; dx[2] = f.forc[i].parti->pos[2] - f.forc[i].partj->pos[2]; dr2 = dx[0]*dx[0] + dx[1]*dx[1] + dx[2]*dx[2] + eps2; if (dr2 > 0) { dr = sqrt(dr2); dr3 = dr*dr2; acci = f.forc[i].partj->mass / dr3; f.forc[i].parti->vel[0] -= dt * dx[0] * acci; f.forc[i].parti->vel[1] -= dt * dx[1] * acci; f.forc[i].parti->vel[2] -= dt * dx[2] * acci; } } diag->kstep[clevel]++; diag->kcount[clevel] += f.n; } void evolve_ok2(int clevel,struct sys s, struct forces f, DOUBLE stime, DOUBLE etime, DOUBLE dt, int calc_timestep) { if (IS_ZEROFORCES(f) && clevel == 0) { f = ok_main_forces; } CHECK_TIMESTEP(etime,stime,dt,clevel); // all particles are drifted together if (f.n == 0) { diag->deepsteps++; diag->simtime += dt; drift(clevel,s, etime, dt); return; } if (calc_timestep) ok_timestep_cpu(clevel,f, dt); struct forces slowf = zeroforces, fastf = zeroforces; ok_split((FLOAT) dt, f, &slowf, &fastf); evolve_ok2(clevel+1,s, fastf, stime, stime+dt/2, dt/2, 0); ok_kick(clevel,slowf, dt); evolve_ok2(clevel+1,s, fastf, stime+dt/2, etime, dt/2, 1); }
#!/bin/sh # # bear.sh - shell wrapper for bear.php # # $Id: bear.sh 707 2009-07-06 18:31:29Z [email protected] $ # BEAR_HOME="@PEAR-DIR@/BEAR" if (test -z "$PHP_COMMAND"); then export PHP_COMMAND=php fi if (test -z "$PHP_CLASSPATH"); then PHP_CLASSPATH=BEAR_HOME/class export PHP_CLASSPATH fi $PHP_COMMAND -d html_errors=off -qC $BEAR_HOME/BEAR/bin/bear.php $*
<?php namespace App\Entities; class Post { private $fields = []; public function __construct($data) { $this->fields = $data; } public function getCreatedAt(string $format = 'Y-m-d H:i', string $timezome = 'Europe/Moscow') { $datetime = new \DateTime($this->fields['created_at']); $datetime->setTimezone(new \DateTimeZone($timezome)); return $datetime->format($format); } public function getId() { return $this->fields['id']; } public function getDescription() { return $this->fields['description']; } public function getTextbody() { return $this->fields['body']; } public function getTitle() { return $this->fields['title']; } public function __set($key, $value) { return null; } public function getAuthor() { return $this->fields['author']; } }
#if UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX || UNITY_EDITOR /** * Autogenerated by Thrift Compiler () * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ namespace GetSocialSdk.Core { /// <summary> /// to determine which query to run. /// </summary> public enum QueryType { APP_SESSIONS = 0, INVITE_EVENTS = 1, INSTALL_EVENTS = 2, ACTIVITY_EVENTS = 3, INVITE_CYCLE = 4, PN_EVENTS = 5, CONTENT_SESSIONS = 6, REENGAGEMENT = 7, KFACTOR = 8, COMBINED_INVITE_EVENTS = 9, CONVERSION = 10, RETENTION = 11, INSTALL_DISTRIBUTION = 12, USERS = 13, UNIQUE_USERS = 14, UNIQUE_INVITE_USERS = 15, INVITE_EVENTS_PER_1000 = 16, ETL_STATUS = 17, RETENTION_PER_PERIOD = 18, SAVINGS = 19, AUDIENCE_COMPARE = 20, AUDIENCE_RETENTION = 21, USER = 22, AUDIENCE_USER = 23, AUDIENCE = 24, ENGAGEMENT = 25, ENGAGEMENT_PER_PERIOD = 26, PN_TYPE = 27, LOCAL_AUDIENCE_USERS = 28, EXPORT_USERS_FACEBOOK = 29, LOCAL_AUDIENCE = 30, EXPORT_EVENTS = 31, EXPORT_USERS = 32, AVAILABLE_AUDIENCE = 33, APPS_OVERVIEW = 34, ENGAGEMENT_DISTRIBUTION = 35, BILLING_DAU = 36, BILLING_USAGE = 37, PURCHASE_EVENTS = 38, DAU = 39, USERS_PURCHASES = 40, PURCHASE_CYCLE = 41, PURCHASE_TIMELINE = 42, PURCHASE_PAIRS = 43, CUSTOM_EVENTS = 44, PROMO_CODE_EVENTS = 45, USERS_EVENTS = 46, USERS_PROPERTIES = 47, CUSTOM_EVENTS_PROPERTIES = 48, ACTIVITY_ENGAGED_USERS = 49, REFERRAL_EVENTS = 50, } } #endif
package untitled.goose.framework.model.entities.runtime import untitled.goose.framework.model.events.PlayerEvent trait Player extends Defined[PlayerDefinition] with History[PlayerEvent] { /** Compares two players. */ def ==(obj: Player): Boolean = definition == obj.definition && history == obj.history override def equals(obj: Any): Boolean = obj match { case x: Player => x == this case _ => false } override def hashCode(): Int = 17 * definition.hashCode + 23 override def toString: String = this.getClass.getSimpleName + ":" + definition.name } object Player { private class PlayerDefImpl(val definition: PlayerDefinition) extends Player { val history: Seq[PlayerEvent] = List() } case class PlayerImpl(definition: PlayerDefinition, history: Seq[PlayerEvent] = Seq()) extends Player /** Factory method that creates a new tile from the definition. */ def apply(playerDefinition: PlayerDefinition): Player = new PlayerDefImpl(playerDefinition) }
/* Copyright 2020 Gravitational, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package framework import ( "github.com/gravitational/robotest/infra" "github.com/gravitational/robotest/lib/loc" "github.com/gravitational/trace" ) // TestState represents the state of the test between boostrapping a cluster // and teardown. // The state is updated on each in-between test run to sync the provisioner state. type TestState struct { // EntryURL defines the entry point to the application. // This can be the address of existing Ops Center or local application endpoint URL EntryURL string `json:"ops_url,omitempty"` // Application defines the application package to test as retrieved from the wizard Application *loc.Locator `json:"application,omitempty"` // Login specifies optional login to connect to the EntryURL. // Falls back to TestContext.Login if unspecified Login *Login `json:"login,omitempty"` // ServiceLogin specifies optional service login to connect to the EntryURL. ServiceLogin *ServiceLogin `json:"service_login,omitempty"` // Bandwagon specifies bandwagon creation details Bandwagon *BandwagonConfig `json:"bandwagon,omitempty"` // Provisioner defines the provisioner used to create the infrastructure. // This can be empty for the automatic provisioner Provisioner *Provisioner `json:"provisioner,omitempty"` // Onprem defines the provisioner state. // The provisioner used is specified by Provisioner. // With automatic provisioner, no provisioner state is stored ProvisionerState *infra.ProvisionerState `json:"provisioner_state,omitempty"` // StateDir specifies the location of temporary state used for a single test run // (from bootstrapping to destroy) StateDir string `json:"state_dir"` // BackupState defines state of backup. // Used for backup/restore operations. BackupState *BackupState `json:"backup_state,omitempty"` } // BackupState defines state of backup. type BackupState struct { // Addr is the address of a node where backup is storing Addr string `json:"addr"` // Path is an absolute path to the backup file Path string `json:"path"` } func (r TestState) Validate() error { var errors []error if r.Provisioner != nil && r.ProvisionerState == nil { errors = append(errors, trace.BadParameter("ProvisionerState is required")) } if r.Provisioner == nil && r.ProvisionerState != nil { errors = append(errors, trace.BadParameter("Provisioner is required")) } if r.StateDir == "" { errors = append(errors, trace.BadParameter("StateDir is required")) } return trace.NewAggregate(errors...) }
package fr.anthonygodin.api.controller; import fr.anthonygodin.api.dto.entity.ToolDTO; import fr.anthonygodin.api.dto.entity.ToolToCreateDTO; import fr.anthonygodin.api.service.CrudService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; /** * Created by AnthoGdn on 26/03/17. */ @RestController @RequestMapping("api/tools") public class ToolController extends CrudController<ToolDTO, ToolToCreateDTO> { private static final Logger LOGGER = LoggerFactory.getLogger(ToolController.class); @Autowired private CrudService<ToolDTO, ToolToCreateDTO> toolService; @Override protected Logger getLogger() { return LOGGER; } @Override protected CrudService getService() { return toolService; } }
/* * InputDialog.java * * Copyright (C) 1998-2003 Peter Graves * $Id: InputDialog.java,v 1.3 2003/07/23 16:13:51 piso Exp $ * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ package org.armedbear.j; import java.awt.BorderLayout; import java.awt.event.InputEvent; import java.awt.event.KeyEvent; import java.awt.event.KeyListener; import java.util.List; import javax.swing.BoxLayout; import javax.swing.JDialog; import javax.swing.JPanel; import javax.swing.border.EmptyBorder; public class InputDialog extends JDialog implements KeyListener { protected final Editor editor; protected HistoryTextField textField; private String defaultValue; private History history; private String input; private List completions; private int index; public InputDialog(Editor editor, String prompt, String title, String defaultValue) { super(editor.getFrame(), title, true); this.editor = editor; this.defaultValue = defaultValue; JPanel panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); panel.setBorder(new EmptyBorder(5, 5, 5, 5)); panel.add(new Label(prompt)); textField = new HistoryTextField(20); textField.addKeyListener(this); panel.add(textField); getContentPane().add(panel, BorderLayout.CENTER); pack(); textField.setFocusTraversalKeysEnabled(false); } public static String showInputDialog(Editor editor, String prompt, String title, String defaultValue) { InputDialog d = new InputDialog(editor, prompt, title, defaultValue); editor.centerDialog(d); d.show(); return d.input; } public static String showInputDialog(Editor editor, String prompt, String title) { return showInputDialog(editor, prompt, title, null); } @Override public void show() { if (defaultValue != null && defaultValue.length() > 0) { textField.setText(defaultValue); textField.selectAll(); } textField.requestFocus(); super.show(); } public final void setDefaultValue(String s) { defaultValue = s; } public final String getInput() { return input; } public void setHistory(History history) { this.history = history; textField.setHistory(history); } protected void enter() { input = textField.getText(); if (history != null) { history.append(input); history.save(); } dispose(); } protected void escape() { input = null; dispose(); } @Override public void keyPressed(KeyEvent e) { final int keyCode = e.getKeyCode(); final int modifiers = e.getModifiers(); switch (keyCode) { case KeyEvent.VK_TAB: { String s = null; if (modifiers == InputEvent.SHIFT_MASK) s = previousGuess(); else s = guess(textField.getText()); e.consume(); if (s != null) { textField.setText(s); textField.setCaretPosition(s.length()); } return; } case KeyEvent.VK_ENTER: enter(); return; case KeyEvent.VK_ESCAPE: escape(); return; case KeyEvent.VK_SHIFT: case KeyEvent.VK_META: case KeyEvent.VK_ALT: // Ignore modifers. return; default: // Anything but tab, start over. completions = null; return; } } @Override public void keyReleased(KeyEvent e) {} @Override public void keyTyped(KeyEvent e) {} @Override public void dispose() { super.dispose(); editor.restoreFocus(); } private String guess(String prefix) { if (completions == null) { completions = getCompletions(prefix); if (completions == null) return null; index = 0; } else if (index >= completions.size()) index = 0; // Start over. if (index < completions.size()) return (String) completions.get(index++); return null; } private String previousGuess() { if (completions != null) { if (completions.size() > 1) { index -= 2; if (index < 0) index += completions.size(); return (String) completions.get(index++); } } return null; } // Derived classes can override this method to provide completion // functionality. protected List getCompletions(String prefix) { return null; } }
<?php /** * LitePubl CMS * * @copyright 2010 - 2017 Vladimir Yushko http://litepublisher.com/ http://litepublisher.ru/ * @license https://github.com/litepubl/cms/blob/master/LICENSE.txt MIT * @link https://github.com/litepubl\cms * @version 7.08 */ namespace litepubl\plugins\smushit; use litepubl\core\Event; use litepubl\core\Str; use litepubl\post\Files; use litepubl\post\MediaParser; use litepubl\utils\Http; class Plugin extends \litepubl\core\Plugin { public function install() { $parser = MediaParser::i(); $parser->added = $this->fileAdded; } public function uninstall() { $parser = MediaParser::i(); $parser->unbind($this); } public function fileAdded(Event $event) { $files = Files::i(); $item = $files->getItem($event->id); if ('image' != $item['media']) { return; } $fileurl = $files->getUrl($id); if ($s = Http::get('http://www.smushit.com/ysmush.it/ws.php?img=' . urlencode($fileurl))) { $json = json_decode($s); if (isset($json->error) || (-1 === (int)$json->dest_size) || !$json->dest) { return; } $div = $item['size'] - (int)$json->dest_size; if (($div / ($item['size'] / 100)) < 3) { return; } $dest = urldecode($json->dest); if (!Str::begin($dest, 'http')) { $dest = 'http://www.smushit.com/' . $dest; } if ($content = Http::get($dest)) { return $files->setContent($id, $content); } } } }
module Plotting using Plots using ..Structs export plot_rectangles rectangle(x, y, w, h) = Shape(x .+ [0, w, w, 0], y .+ [0, 0, h, h]) function plot_rectangles(rect_sizes, positions) p = Plots.plot(legend = false) for (dims, pos) in zip(rect_sizes, positions) Plots.plot!( p, rectangle(pos..., dims...), #fillcolor=nothing, # linewidth=0 ) end return p end end # module
<?php namespace App\Domains\Deposits\Actions; use App\Domains\Deposits\DTOs\DepositDTO; use App\Domains\Deposits\Enums\DepositStatusEnum; use App\Domains\Deposits\Models\Deposit; use App\Domains\Images\Actions\CreateNewImageAction; use Illuminate\Support\Facades\DB; class CreateNewDepositAction { public function __construct(private CreateNewImageAction $createNewImageAction) { } public function execute(DepositDTO $depositDTO): Deposit { return DB::transaction(function () use ($depositDTO) { $savedCheckImage = $this->createNewImageAction->execute($depositDTO->checkPicture, $depositDTO->userId); return Deposit::create([ "amount" => $depositDTO->amount, "description" => $depositDTO->description, "status" => DepositStatusEnum::pending(), "image_id" => $savedCheckImage->id, "user_id" => $depositDTO->userId ])->load("image"); }); } }
/* * FBOTestApp.cpp * * Copyright (C) 2007 by Universitaet Stuttgart (VIS). Alle Rechte vorbehalten. */ #ifdef _WIN32 #include <windows.h> #else /* _WIN32 */ #endif /* _WIN32 */ #include <stdio.h> #include "vislib/graphics/gl/IncludeAllGL.h" #include <GL/glut.h> #include "GlutAppManager.h" #include "vislib/graphics/FpsCounter.h" #include "vislib/VersionNumber.h" #include "vislib/graphics/gl/glfunctions.h" /** not nice! */ extern vislib::graphics::FpsCounter fpsCounter; /* * GlutAppManager::AbstractFactory::AbstractFactory */ GlutAppManager::AbstractFactory::AbstractFactory(const char *name) : name(name) { } /* * GlutAppManager::GlutAppManager */ GlutAppManager::GlutAppManager(void) : app(NULL), factories(), windowMenu(0), appMenu(0) { } /* * GlutAppManager::~GlutAppManager */ GlutAppManager::~GlutAppManager(void) { if (app) { app->GLDeinit(); delete app; } for (int i = int(this->factories.Count()) - 1; i >= 0; i--) { AbstractFactory *f = this->factories[i]; this->factories[i] = NULL; delete f; } this->factories.Clear(); if (this->windowMenu != 0) { glutDetachMenu(this->windowMenu); glutDestroyMenu(this->windowMenu); this->windowMenu = 0; } if (this->appMenu != 0) { glutDestroyMenu(this->appMenu); this->appMenu = 0; } } /* * GlutAppManager::GetInstance */ GlutAppManager * GlutAppManager::GetInstance(void) { static GlutAppManager instance; return &instance; } /* * GlutAppManager::InstallFactory */ void GlutAppManager::InstallFactory(AbstractFactory *factory) { if (factory == NULL) return; GlutAppManager::GetInstance()->factories.Append(factory); } /* * GlutAppManager::InitGlutWindow */ void GlutAppManager::InitGlutWindow(void) { if (this->windowMenu != 0) { return; } this->appMenu = glutCreateMenu(GlutAppManager::OnMenuItemClicked); vislib::StringA name; for (int i = 0; i < int(this->factories.Count()); i++) { if (this->factories[i] != NULL) { name.Format("%d: %s", (i + 1), this->factories[i]->GetName()); glutAddMenuEntry(name.PeekBuffer(), i + 1); } } this->windowMenu = glutCreateMenu(GlutAppManager::OnMenuItemClicked); glutAddSubMenu("Select Test", this->appMenu); #if defined(VISGLUT_EXTENSIONS) ::glutAddMenuSeparator(); #endif /* VISGLUT_EXTENSIONS */ glutAddMenuEntry("Restart Test", -2); glutAddMenuEntry("Exit", -1); glutAttachMenu(GLUT_RIGHT_BUTTON); } /* * GlutAppManager::OnMenuItemClicked */ void GlutAppManager::OnMenuItemClicked(int menuID) { if (menuID == -1) { GlutAppManager::ExitApplication(0); } else if (menuID == -2) { GlutAppManager *This = GlutAppManager::GetInstance(); if (This->app) { This->app->GLDeinit(); if (This->app->GLInit() == 0) { // TODO: initializes the glut stuff This->app->OnResize(This->width, This->height); printf("Test restarted.\n"); fpsCounter.Reset(); } else { delete This->app; This->app = NULL; printf("Test could not be restarted.\n"); } } } else if ((menuID > 0) && (menuID <= int(GlutAppManager::GetInstance()->factories.Count()))) { GlutAppManager *This = GlutAppManager::GetInstance(); // select an test application factory printf("Selecting Test: %s\n", This->factories[menuID - 1]->GetName()); if (This->app) { if (This->factories[menuID - 1]->HasCreated(This->app)) { printf(" Test already selected.\n"); } else { This->app->GLDeinit(); delete This->app; This->app = NULL; } } if (!This->app) { This->app = This->factories[menuID - 1]->CreateApplication(); if (This->app) { if (This->app->GLInit() == 0) { // TODO: initializes the glut stuff This->app->OnResize(This->width, This->height); printf(" Test selected.\n"); fpsCounter.Reset(); } else { delete This->app; This->app = NULL; printf(" Test could not be initialized.\n"); } } else { printf(" Test could not be created.\n"); } } glutPostRedisplay(); } } /* * GlutAppManager::ExitApplication */ void GlutAppManager::ExitApplication(int exitcode) { exit(exitcode); } /* * GlutAppManager::SetSize */ void GlutAppManager::SetSize(int w, int h) { this->width = w; this->height = h; if (this->app == NULL) { glViewport(0, 0, this->width, this->height); } } /* * glprintf */ static void glprintf(float x, float y, const void *font, const char *string) { glRasterPos2f(x, y); while (*string) { glutBitmapCharacter((void *)font, *string++); } } /* * GlutAppManager::glRenderEmptyScreen */ void GlutAppManager::glRenderEmptyScreen(void) { GlutAppManager *This = GlutAppManager::GetInstance(); glViewport(0, 0, This->width, This->height); glClearColor(0.0f, 0.0f, 0.0f, 0.0f); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); glDisable(GL_DEPTH_TEST); glDisable(GL_LIGHTING); glDisable(GL_LIGHT0); glMatrixMode(GL_PROJECTION); glLoadIdentity(); glOrtho(-0.5, This->width, -0.5, This->height, -1.0, 1.0); glMatrixMode(GL_MODELVIEW); glLoadIdentity(); glColor3f(0.7f, 0.8f, 1.0f); glprintf(10.0f, float(This->height - 28), GLUT_BITMAP_HELVETICA_18, "VISlib glutTest Application"); glprintf(10.0f, float(This->height - 44), GLUT_BITMAP_HELVETICA_12, "Copyright 2007, Universität Stuttgart (VIS). Alle Rechte vorbehalten."); vislib::StringA txt; txt.Format("OpenGL Version: %s", vislib::graphics::gl::GLVersion().ToStringA(3).PeekBuffer()); glprintf(10.0f, float(This->height - 60), GLUT_BITMAP_HELVETICA_12, txt.PeekBuffer()); glprintf(10.0f, float(This->height - 76), GLUT_BITMAP_HELVETICA_12, "Use the right click context menu to select a test."); glFlush(); glutSwapBuffers(); }
describe 'wildfly::profile_path' do it { is_expected.to run.with_params('').and_return(nil) } it { is_expected.to run.with_params(nil).and_return(nil) } it { is_expected.to run.with_params('full-ha').and_return('/profile=full-ha') } end
package com.uploadcare.android.library.data import com.squareup.moshi.Json import com.uploadcare.android.library.api.UploadcareGroup import java.net.URI data class GroupPageData(val next: URI? = null, val previous: URI? = null, val total: Int, @Json(name = "per_page") val perPage: Int, val results: List<UploadcareGroup>) : PageData<UploadcareGroup> { override fun getResultsData() = results override fun hasMore() = next != null override fun getNextURI() = next }
// // Created by Zhen Peng on 8/4/19. // #include <stdio.h> #include <stdlib.h> #include <fstream> #include <time.h> #include "dglobals.h" #include "globals.h" using namespace PADO; /* * Create a binary file. The format is just sequence of pairs of Vertex IDs. */ void create(const char *filename, VertexID num_v, EdgeID num_e) { std::ofstream fout(filename); if (!fout.is_open()) { fprintf(stderr, "Error: cannot create file %s\n", filename); exit(EXIT_FAILURE); } srand(time(0)); double time_running = -WallTimer::get_time_mark(); for (EdgeID e_i = 0; e_i < num_e; ++e_i) { VertexID head = rand() % num_v; VertexID tail = rand() % num_v; fout.write(reinterpret_cast<char *>(&head), sizeof(head)); fout.write(reinterpret_cast<char *>(&tail), sizeof(tail)); } time_running += WallTimer::get_time_mark(); printf("running_time: %f\n", time_running); } int main(int argc, char *argv[]) { if (argc < 4) { fprintf(stderr, "Usage: ./createfile <output_binary_file> <num_v> <num_e>\n"); exit(EXIT_FAILURE); } create(argv[1], strtoull(argv[2], nullptr, 0), strtoull(argv[3], nullptr, 0)); return EXIT_SUCCESS; }
// Copyright 2021 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. use zerocopy::{AsBytes, FromBytes}; use crate::types::*; /// Matches iovec_t. #[derive(Debug, Default, Clone, Copy, AsBytes, FromBytes)] #[repr(C)] pub struct UserBuffer { pub address: UserAddress, pub length: usize, } impl UserBuffer { pub fn get_total_length(buffers: &[UserBuffer]) -> usize { let mut total = 0; for buffer in buffers { total += buffer.length; } total } }
--- title: combinatorics icon: 'null' related: ["math"] emoji: ⚖  ---