{ // 获取包含Hugging Face文本的span元素 const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap'); spans.forEach(span => { if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) { span.textContent = 'AI快站'; } }); }); // 替换logo图片的alt属性 document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => { if (img.alt.match(/Hugging\s*Face/i)) { img.alt = 'AI快站 logo'; } }); } // 替换导航栏中的链接 function replaceNavigationLinks() { // 已替换标记,防止重复运行 if (window._navLinksReplaced) { return; } // 已经替换过的链接集合,防止重复替换 const replacedLinks = new Set(); // 只在导航栏区域查找和替换链接 const headerArea = document.querySelector('header') || document.querySelector('nav'); if (!headerArea) { return; } // 在导航区域内查找链接 const navLinks = headerArea.querySelectorAll('a'); navLinks.forEach(link => { // 如果已经替换过,跳过 if (replacedLinks.has(link)) return; const linkText = link.textContent.trim(); const linkHref = link.getAttribute('href') || ''; // 替换Spaces链接 - 仅替换一次 if ( (linkHref.includes('/spaces') || linkHref === '/spaces' || linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) && linkText !== 'OCR模型免费转Markdown' && linkText !== 'OCR模型免费转Markdown' ) { link.textContent = 'OCR模型免费转Markdown'; link.href = 'https://fast360.xyz'; link.setAttribute('target', '_blank'); link.setAttribute('rel', 'noopener noreferrer'); replacedLinks.add(link); } // 删除Posts链接 else if ( (linkHref.includes('/posts') || linkHref === '/posts' || linkText === 'Posts' || linkText.match(/^s*Postss*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } // 替换Docs链接 - 仅替换一次 else if ( (linkHref.includes('/docs') || linkHref === '/docs' || linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) && linkText !== '模型下载攻略' ) { link.textContent = '模型下载攻略'; link.href = '/'; replacedLinks.add(link); } // 删除Enterprise链接 else if ( (linkHref.includes('/enterprise') || linkHref === '/enterprise' || linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } }); // 查找可能嵌套的Spaces和Posts文本 const textNodes = []; function findTextNodes(element) { if (element.nodeType === Node.TEXT_NODE) { const text = element.textContent.trim(); if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') { textNodes.push(element); } } else { for (const child of element.childNodes) { findTextNodes(child); } } } // 只在导航区域内查找文本节点 findTextNodes(headerArea); // 替换找到的文本节点 textNodes.forEach(node => { const text = node.textContent.trim(); if (text === 'Spaces') { node.textContent = node.textContent.replace(/Spaces/g, 'OCR模型免费转Markdown'); } else if (text === 'Posts') { // 删除Posts文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } else if (text === 'Enterprise') { // 删除Enterprise文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } }); // 标记已替换完成 window._navLinksReplaced = true; } // 替换代码区域中的域名 function replaceCodeDomains() { // 特别处理span.hljs-string和span.njs-string元素 document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换hljs-string类的span中的域名(移除多余的转义符号) document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换pre和code标签中包含git clone命令的域名 document.querySelectorAll('pre, code').forEach(element => { if (element.textContent && element.textContent.includes('git clone')) { const text = element.innerHTML; if (text.includes('huggingface.co')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 处理特定的命令行示例 document.querySelectorAll('pre, code').forEach(element => { const text = element.innerHTML; if (text.includes('huggingface.co')) { // 针对git clone命令的专门处理 if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 特别处理模型下载页面上的代码片段 document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => { const content = container.innerHTML; if (content && content.includes('huggingface.co')) { container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 特别处理模型仓库克隆对话框中的代码片段 try { // 查找包含"Clone this model repository"标题的对话框 const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]'); if (cloneDialog) { // 查找对话框中所有的代码片段和命令示例 const codeElements = cloneDialog.querySelectorAll('pre, code, span'); codeElements.forEach(element => { if (element.textContent && element.textContent.includes('huggingface.co')) { if (element.innerHTML.includes('huggingface.co')) { element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); } // 更精确地定位克隆命令中的域名 document.querySelectorAll('[data-target]').forEach(container => { const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string'); codeBlocks.forEach(block => { if (block.textContent && block.textContent.includes('huggingface.co')) { if (block.innerHTML.includes('huggingface.co')) { block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); }); } catch (e) { // 错误处理但不打印日志 } } // 当DOM加载完成后执行替换 if (document.readyState === 'loading') { document.addEventListener('DOMContentLoaded', () => { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); }); } else { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); } // 增加一个MutationObserver来处理可能的动态元素加载 const observer = new MutationObserver(mutations => { // 检查是否导航区域有变化 const hasNavChanges = mutations.some(mutation => { // 检查是否存在header或nav元素变化 return Array.from(mutation.addedNodes).some(node => { if (node.nodeType === Node.ELEMENT_NODE) { // 检查是否是导航元素或其子元素 if (node.tagName === 'HEADER' || node.tagName === 'NAV' || node.querySelector('header, nav')) { return true; } // 检查是否在导航元素内部 let parent = node.parentElement; while (parent) { if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') { return true; } parent = parent.parentElement; } } return false; }); }); // 只在导航区域有变化时执行替换 if (hasNavChanges) { // 重置替换状态,允许再次替换 window._navLinksReplaced = false; replaceHeaderBranding(); replaceNavigationLinks(); } }); // 开始观察document.body的变化,包括子节点 if (document.body) { observer.observe(document.body, { childList: true, subtree: true }); } else { document.addEventListener('DOMContentLoaded', () => { observer.observe(document.body, { childList: true, subtree: true }); }); } })(); \n\n\"\"\"\n\nMODEL_TEMPLATE = \"\"\"\\\ndefine([\n 'underscore',\n 'backbone'\n], function(_, Backbone){\n var $model_variable = Backbone.Model.extend({\n defaults: $defaults\n });\n // You usually don't return a model instantiated\n return $model_variable;\n});\"\"\"\n\nCOLLECTION_TEMPLATE = \"\"\"\\\ndefine([\n 'underscore',\n 'backbone',\n // Model dependencies\n $model_dir\n], function(_, Backbone$model_parameter){\n var $collection_variable = Backbone.Collection.extend({\n $model_content\n });\n // You don't usually return a collection instantiated\n return new $collection_variable;\n});\"\"\"\n\nDIRS = {\n\t'css': 'css',\n\t'templates': 'templates',\n\t'js': 'js',\n\t'jslibs': 'js' + os.sep + 'libs',\n\t'models': 'js' + os.sep + 'models',\n\t'collections': 'js' + os.sep + 'collections',\n\t'views': 'js' + os.sep + 'views'\n}\n\nFILES = {\n\tDIRS['js'] + os.sep + 'app.js': APP_JS,\n\tDIRS['js'] + os.sep + 'main.js': MAIN_JS,\n\tDIRS['js'] + os.sep + 'router.js': ROUTER_JS,\n\tDIRS['js'] + os.sep + 'template.js': TEMPLATE_JS,\n}\n\nLIBS = {\n\t'require': 'http://requirejs.org/docs/release/2.1.5/minified/require.js',\n\t'jquery': 'http://ajax.googleapis.com/ajax/libs/jquery/1/jquery.min.js',\n\t'underscore': 'http://documentcloud.github.com/underscore/underscore-min.js',\n\t'backbone': 'http://documentcloud.github.com/backbone/backbone-min.js'\n}\n\ndef LIB_FILENAME(name, foldername=None):\n\tif not foldername:\n\t\tfoldername = name\n\treturn DIRS['jslibs'] + os.sep + foldername + os.sep + name + '.js'\n\n\nREQUIREJS_LIBS = {\n\t'text': 'https://raw.github.com/requirejs/text/latest/text.js'\n}\n\n# for every command 'c' there needs to be a 'process_c()' function\nCOMMANDS = {\n\t'check': [],\n\t'about': [],\n\t'init': [],\n\t'generate': [\n\t\t'model',\n\t\t'view',\n\t\t'collection'\n\t]\n}\n\n\ndef initialize_project():\n\tif already_initialized():\n\t\tprint 'Project already initialized.'\n\t\treturn\n\n\t# create dir structure\n\tprint ''\n\tprint 'Creating directory structure'\n\tfor directory in sorted(DIRS.values()):\n\t\ttry:\n\t\t\tos.mkdir(directory)\n\t\t\tprint \" '%s' created.\" % directory\n\t\texcept OSError:\n\t\t\tprint \" '%s' already exists\" % directory\n\n\t# download required libs\n\tprint '\\nDownloading required libs'\n\tfor name,url in LIBS.items():\n\t\tlibfile = LIB_FILENAME(name)\n\t\tif os.path.exists(libfile):\n\t\t\tprint \" File '%s' already exists, skipping.\" % os.path.basename(libfile)\n\t\telse:\n\t\t\ttry:\n\t\t\t\tos.mkdir(os.path.dirname(libfile))\n\t\t\texcept OSError as e:\n\t\t\t\tNone\n\t\t\tprint \" Downloading '%s' from %s\" % (name,url)\n\t\t\turllib.urlretrieve(url, libfile)\n\n\t# download require.js libs\n\tprint '\\nDownloading libs for require.js'\n\tfor name, url in REQUIREJS_LIBS.items():\n\t\tlibfile = LIB_FILENAME(name, 'require')\n\t\tif os.path.exists(libfile):\n\t\t\tprint \" File '%s' already exists, skipping.\" % os.path.basename(libfile)\n\t\telse:\n\t\t\ttry:\n\t\t\t\tos.mkdir(os.path.dirname(libfile))\n\t\t\texcept OSError as e:\n\t\t\t\tNone\n\t\t\tprint \" Downloading '%s' from %s\" % (name,url)\n\t\t\turllib.urlretrieve(url, libfile)\n\n\t# create base .js files\n\tprint '\\nCreating base .js files'\n\tfor file, content in FILES.items():\n\t\tif os.path.exists(file):\n\t\t\tprint \" File '%s' already exists, skipping.\" % file\n\t\telse:\n\t\t\tprint \" Creating '%s'\" % file\n\t\t\tf = open(file, 'w')\n\t\t\tf.write(content)\n\t\t\tf.close()\n\n\t# create base index.html\n\tprint ''\n\tif os.path.exists('index.html'):\n\t\tprint \"Index.html already exists, skipping.\"\n\telse:\n\t\tprint 'Creating base index.html'\n\t\tf = open('index.html', 'w')\n\t\ttemplate = Template(INDEX_HTML_TEMPLATE)\n\t\tcontent = template.substitute(\n\t\t\tmain_js = DIRS['js'] + os.sep + 'main',\n\t\t\trequire_js = DIRS['jslibs'] + os.sep + 'require' + os.sep + 'require.js'\n\t\t)\n\t\tf.write(content)\n\t\tf.close()\n\tprint\n\ndef create_model(name, model_defaults):\n\tprint \"Creating Model '%s'\" % name\n\n\t# enforce format of 'key:value' of the module_attrs\n\tfor model_default in model_defaults:\n\t\tif not ':' in model_default:\n\t\t\tprint \"Error - Wrong format of model defaults.\"\n\t\t\tprint \" Example: 'name:Harry age:11'\"\n\t\t\treturn 1\n\n\t\n\tfilename = DIRS['models'] + os.sep + name.lower() + '.js'\n\ttry:\n\t\tf = open(filename, 'r')\n\t\tprint \"File '%s' already exists, overwrite? [y/N] \" % filename,\n\t\toverwrite = \"n\"\n\t\ttry:\n\t\t\toverwrite = sys.stdin.readline().strip()\n\t\texcept KeyboardInterrupt:\n\t\t\tNone\n\t\tprint '\\r', # remove space in beginning of line when reading from stdin\n\n\t\tif overwrite.lower() != 'y':\n\t\t\treturn\n\n\t\tprint \"Overwriting file '%s'\" % filename\n\t\tf = open(filename, 'w')\n\texcept IOError:\n\t\tf = open(filename, 'w')\n\t\tprint \"File '%s' created.\" % filename\n\n\t# build 'defaults' string with the defaults passed as parameter\n\tl = len(model_defaults)\n\tdefaults = \"{\\n\"\n\tfor attr in model_defaults:\n\t\tl -= 1\n\t\tattr_name,attr_val = attr.split(':')\n\t\tdefaults += \" \"*6 + \"'\" + attr_name + \"': '\" + attr_val\n\t\tif l==0:\n\t\t\tdefaults += \"'\\n\"\n\t\telse:\n\t\t\tdefaults += \"',\\n\"\n\tdefaults += \" \"*4 + \"}\"\n\n\ttemplate = Template(MODEL_TEMPLATE)\n\tcontent = template.substitute(\n\t\t# lowercase 1st letter of model_variable\n\t\t#model_variable = name[0].lower() + name[1:] + \"Model\",\n\t\tmodel_variable = name + \"Model\",\n\t\tdefaults = defaults\n\t)\n\tf.write(content)\n\tf.close()\n\tprint \"Model '%s' created.\" % (name,)\n\ndef create_collection(name, model):\n\t# variables format for the collection template:\n\t# model_dir: 'models/model_name'\n\t# model_parameter: ', modelnameModel'\n\t# mode_content: 'model: modelnameModel'\n\t# collection_variable: 'collecionnameCollection'\n\tmodel_dir = ''\n\tmodel_parameter = ''\n\tmodel_content = ''\n\n\t# lowercase 1st letter\n\tcollection_variable = name[0].lower() + name[1:] + 'Collection'\n\n\t# first check that the model exists\n\tif model:\n\t\tmodel_filename = DIRS['models'] + os.sep + model.lower() + '.js'\n\t\ttry:\n\t\t\tmodel_file = open(model_filename, 'r')\n\t\texcept IOError:\n\t\t\tprint \"Model '%s' not found, aborting.\" % model\n\t\t\treturn\n\n\t\t# format of model_dir is 'models/modelname' and not\n\t\t# 'js/models/modelname', therefore we need to remove the initial 'js/'\n\t\ttrim = len(DIRS['js'] + os.sep)\n\t\tmodel_dir = \"'%s'\" % (DIRS['models'] + os.sep + model.lower())[trim:]\n\t\tmodel_parameter = ', %sModel' % model\n\t\tmodel_content = 'model: %sModel' % model\n\n\t# check if collection exists\n\tfilename = DIRS['collections'] + os.sep + name.lower() + '.js'\n\ttry:\n\t\tf = open(filename, 'r')\n\t\tprint \"File '%s' already exists, overwrite? [y/N] \" % filename,\n\t\toverwrite = \"n\"\n\t\ttry:\n\t\t\toverwrite = sys.stdin.readline().strip()\n\t\texcept KeyboardInterrupt:\n\t\t\tNone\n\t\tprint '\\r', # remove space in beginning of line when reading from stdin\n\n\t\tif overwrite.lower() != 'y':\n\t\t\treturn\n\n\t\tprint \"Overwriting file '%s'\" % filename\n\t\tf = open(filename, 'w')\n\texcept IOError:\n\t\tf = open(filename, 'w')\n\t\tprint \"File '%s' created.\" % filename\n\n\n\ttemplate = Template(COLLECTION_TEMPLATE)\n\tcontent = template.substitute(\n\t\tmodel_dir = model_dir,\n\t\tmodel_parameter = model_parameter,\n\t\tmodel_content = model_content,\n\t\tcollection_variable = collection_variable\n\t)\n\tf.write(content)\n\tf.close()\n\tprint \"Collection '%s' created.\" % name\n\ndef create_view(name):\n\tNone\n\n\"\"\"\nCheck if the project was already initialized. Checks directories in DIRS,\nfiles in FILES and LIBS, and finally 'index.html'.\n\nReturns True only if all directories and files are present.\n\"\"\"\ndef already_initialized():\n\tfor directory in DIRS.values():\n\t\tif not os.path.exists(directory):\n\t\t\treturn False\n\n\tfor f in FILES.keys() + [LIB_FILENAME(l) for l in LIBS.keys()]:\n\t\tif not os.path.exists(f):\n\t\t\treturn False\n\n\tif not os.path.exists('index.html'):\n\t\treturn False\n\n\treturn True\t\t\n\n\"\"\"\nProcessors\n For every command 'c' found in COMMAND, there needs to be a function\n 'process_c(args)' to process it\n\"\"\"\ndef process_about(args):\n\tprint ''\n\tprint ABOUT\n\ndef process_check(args):\n\tif already_initialized():\n\t\tprint 'Project already initialized.'\n\telse:\n\t\tprint \"Project not yet initialized. Use 'init' to initialize.\"\n\ndef process_init(args):\n\tprint 'Init project'\n\tinitialize_project()\n\n\n\n\"\"\"\nargs array:\n [0] type of module to create (Model, View, ...)\n [1] name of module\n [2:] attributes\n\"\"\"\ndef process_generate(args):\n\tif not args:\n\t\tprint 'Missing type and/or name of module.'\n\t\tprint ' Syntax: generate [model|view|collection]'\n\n\t\tsys.exit(2)\n\n\tif len(args) < 2:\n\n\t\tif len(args) == 1 and args[0] == 'model':\n\t\t\tprint 'Error: missing name of the new Model'\n\t\t\tprint\n\t\t\tprint ' Syntax: generate model [defaults]'\n\t\t\tprint ' Examples: generate model Person'\n\t\t\tprint ' generate model Person name:Harry age:11 isWizard:true'\n\t\telif len(args) == 1 and args[0] == 'collection':\n\t\t\tprint 'Error: missing name of the new Collection'\n\t\t\tprint\n\t\t\tprint ' Syntax: generate collection [model dependency name]'\n\t\t\tprint ' Examples: generate collection Persons'\n\t\t\tprint ' generate collection Persons Person'\n\t\t\n\n\t\tsys.exit(2)\n\n\tallowed_types = COMMANDS['generate']\n\tmodule_type = args[0]\n\tmodule_name = args[1]\n\tmodule_attrs = args[2:]\n\n\tif module_type not in allowed_types:\n\t\tprint 'Unable to create module with type %s.' % module_type\n\t\tsys.exit(2)\n\n\tif module_type == 'model':\n\t\tcreate_model(module_name, module_attrs)\n\telif module_type == 'view':\n\t\tcreate_view(module_name, module_attrs)\n\telif module_type == 'collection':\n\t\tif module_attrs and isinstance(module_attrs, list):\n\t\t\tmodule_attrs = module_attrs[0]\n\t\tcreate_collection(module_name, module_attrs)\n\n\"\"\"\nReturns operation_name, operation_args\n\"\"\"\ndef parse_args(args):\n\tdef print_available_commands():\n\t\tfor c in COMMANDS.keys():\n\t\t\tprint ' %s' % c,\n\t\t\tif COMMANDS[c]:\n\t\t\t\tprint '[',\n\t\t\t\tfor arg in COMMANDS[c]: print arg,\n\t\t\t\tprint ']',\n\t\t\tprint\n\n\tprint HEADER\n\n\tif not args:\n\t\tprint 'No command given. Available commands:'\n\t\tprint_available_commands()\n\t\tsys.exit(2)\n\telif args[0] not in COMMANDS.keys():\n\t\tprint \"Unknown command '%s'. Available commands:\" % args[0]\n\t\tprint_available_commands()\n\t\tsys.exit(2)\n\n\t# pass remaining command line arguments as parameters\n\toperation_name = args[0]\n\toperation_args = args[1:] or None\n\treturn operation_name, operation_args\n\n\ndef main():\n\tcommand, command_args = parse_args(sys.argv[1:])\n\tprocessor = getattr(self, 'process_' + command)\n\tprocessor(command_args)\n\t#print '\\nDone'\n\n\n\nif __name__ == \"__main__\":\n\tmain()\n\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2013,"string":"2,013"}}},{"rowIdx":41668,"cells":{"__id__":{"kind":"number","value":9998683909699,"string":"9,998,683,909,699"},"blob_id":{"kind":"string","value":"b710f4433174664c19400df864cee321a081fddb"},"directory_id":{"kind":"string","value":"bb0bcccef27f815cba02399bcd157c5cb46f2a18"},"path":{"kind":"string","value":"/codility/brick.py"},"content_id":{"kind":"string","value":"5849fb772e1b5c1410d713dc1e84e901f6a45816"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"nkatre/technical_interviews"},"repo_url":{"kind":"string","value":"https://github.com/nkatre/technical_interviews"},"snapshot_id":{"kind":"string","value":"4880a6d89afb5b6926e018fcd6db3f3f27d9d432"},"revision_id":{"kind":"string","value":"0df97ad2e2b3abdf7d221c4e9a5acd90789cf74a"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2020-02-26T13:14:55.764785","string":"2020-02-26T13:14:55.764785"},"revision_date":{"kind":"timestamp","value":"2013-08-07T09:35:38","string":"2013-08-07T09:35:38"},"committer_date":{"kind":"timestamp","value":"2013-08-07T09:35:38","string":"2013-08-07T09:35:38"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"def solution(A,B):\n list=[0 for i in range(0,len(A))]\n count=len(A)-1\n count_f=0\n for i in B:\n #print i\n if((i<=A[count]) & (count>=0)):\n count_f+=1\n count-=1\n #print \"count\" , count\n else:\n found=False\n #print \"here\"\n while(count>=0):\n if((i<=A[count])& (count>=0)):\n count_f+=1\n count-=1\n #print \"count\" ,count\n break\n count-=1\n\n return count_f\n \n \nif __name__ == '__main__':\n A=[5,6,4,3,6,2,3]\n B=[2,3,5,2,4]\n print solution(A,B)\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2013,"string":"2,013"}}},{"rowIdx":41669,"cells":{"__id__":{"kind":"number","value":16956530903549,"string":"16,956,530,903,549"},"blob_id":{"kind":"string","value":"3744bc85e6c0bb41291fb78827276f94eafa3444"},"directory_id":{"kind":"string","value":"e17bc92c0ca25f5953b0d2e8c7aec5d7974fbb1c"},"path":{"kind":"string","value":"/lib/summon/multiwindow.py"},"content_id":{"kind":"string","value":"edb4a539d1d216e1b2a9c2d87d072a92913f0fc2"},"detected_licenses":{"kind":"list like","value":["LGPL-2.0-or-later","GPL-2.0-only","LGPL-2.1-only"],"string":"[\n \"LGPL-2.0-or-later\",\n \"GPL-2.0-only\",\n \"LGPL-2.1-only\"\n]"},"license_type":{"kind":"string","value":"non_permissive"},"repo_name":{"kind":"string","value":"mdrasmus/summon"},"repo_url":{"kind":"string","value":"https://github.com/mdrasmus/summon"},"snapshot_id":{"kind":"string","value":"ad8aea266f01b871f160ee45d76ae82badf4ee69"},"revision_id":{"kind":"string","value":"16bd8cbd952d4462232c4a199677021649ad35d4"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-10T21:38:39.320110","string":"2021-01-10T21:38:39.320110"},"revision_date":{"kind":"timestamp","value":"2014-06-06T15:54:08","string":"2014-06-06T15:54:08"},"committer_date":{"kind":"timestamp","value":"2014-06-06T15:54:08","string":"2014-06-06T15:54:08"},"github_id":{"kind":"number","value":1784067,"string":"1,784,067"},"star_events_count":{"kind":"number","value":3,"string":"3"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"\"\"\"\n\n SUMMON - Multiple Window Management\n\n\"\"\"\n\nimport time\n\nfrom summon.core import *\nfrom summon import util\nimport summon\n\n \n\n\nclass WindowEnsemble:\n \"\"\"This class coordinates the position, size, translation, and zoom of \n multiple SUMMON Windows.\n \"\"\"\n\n def __init__(self, windows, stackx=False, stacky=False, \n samew=False, sameh=False,\n tiex=False, tiey=False, pinx=False, piny=False,\n coordsx=None, coordsy=None,\n master=None,\n close_with_master=None):\n \"\"\"windows -- windows to coordinate\n stackx -- (bool) windows should stack with same x-coordinate\n stacky -- (bool) windows should stack with same y-coordinate\n samew -- (bool) windows should have same width\n sameh -- (bool) windows should have same height\n tiex -- (bool) translation along x-axis should be coordinated\n tiey -- (bool) translation along y-axis should be coordinated\n pinx -- (bool) translation along x-axis should be offset by window position\n piny -- (bool) translation along x-axis should be offset by window position\n coordsx -- a list of x-offsets for translation\n coordsy -- a list of y-offsets for translation\n master -- master window\n close_with_master -- (bool) if true, all windows close with master\n \"\"\"\n \n self.windows = windows[:]\n self.pos = {}\n self.sizes = {}\n self.stackx = stackx\n self.stacky = stacky\n self.samew = samew\n self.sameh = sameh\n self.listeners = {}\n self.ties = {}\n self.lock = False\n self.recentPos = util.Dict(default=[])\n self.recentSize = util.Dict(default=[])\n \n self.tiex = tiex\n self.tiey = tiey\n self.pinx = pinx\n self.piny = piny\n self.coordsx = coordsx\n self.coordsy = coordsy\n \n # setup master window\n if master != None:\n self.master = master\n \n # close_with_master defaults to True if master is given\n if close_with_master == None:\n self.close_with_master = True\n else:\n self.close_with_master = close_with_master\n else:\n self.master = windows[0]\n \n # close_with_master defaults to False if master is not given\n if close_with_master == None: \n self.close_with_master = False\n else:\n self.close_with_master = close_with_master\n \n \n # record window positions and sizes\n for win in windows:\n self.pos[win] = win.get_position()\n self.sizes[win] = win.get_size()\n \n \n # setup window listeners\n for win in windows:\n self.init_listeners(win)\n \n\n # setup window stacking\n if stackx or stacky:\n self.stack(self.master)\n \n # setup scrolling ties\n if tiex or tiey:\n self.tie(windows, tiex=tiex, tiey=tiey, pinx=pinx, piny=piny,\n coordsx=coordsx, coordsy=coordsy, master=master)\n \n \n\n def add_window(self, win, index=-1, coordx=0, coordy=0):\n \"\"\"add a window to the existing ensemble\"\"\"\n \n if self.tiex or self.tiey:\n self.untie()\n \n if index == -1:\n index = len(self.windows)\n self.windows.insert(index, win)\n \n self.pos[win] = win.get_position()\n self.sizes[win] = win.get_size()\n \n self.init_listeners(win)\n \n self.recentPos.clear()\n self.recentSize.clear()\n \n # setup window stacking\n if self.stackx or self.stacky:\n self.stack(self.master)\n \n if self.coordsx != None:\n self.coordsx.insert(index, coordx)\n if self.coordsy != None:\n self.coordsy.insert(index, coordy)\n \n # setup scrolling ties\n if self.tiex or self.tiey:\n self.tie(self.windows, tiex=self.tiex, tiey=self.tiey, \n pinx=self.pinx, piny=self.piny,\n coordsx=self.coordsx, coordsy=self.coordsy, \n master=self.master)\n\n \n \n def init_listeners(self, win):\n \"\"\"initialize listeners for a window managed by the ensemble\"\"\"\n \n self.listeners[win] = util.Bundle(\n close=lambda: self._on_window_close(win),\n resize=lambda w, h: self._on_window_resize(win, w, h),\n move=lambda x, y: self._on_window_move(win, x, y))\n win.add_close_listener(self.listeners[win].close)\n win.add_resize_listener(self.listeners[win].resize)\n win.add_move_listener(self.listeners[win].move)\n \n \n def stop(self):\n \"\"\"stop the window ensemble from coordinating window movements\"\"\"\n \n # pretend all the windows have closed\n for win in list(self.windows):\n self._on_window_close(win)\n \n \n def _on_window_close(self, win):\n \"\"\"callback for when a window in the ensemble closes\"\"\"\n \n self.remove_window(win)\n \n # close all windows if master closes\n if self.close_with_master and win == self.master:\n for win2 in self.windows:\n win2.close()\n \n \n def remove_window(self, win):\n \"\"\"removes a window from the ensemble\"\"\"\n \n # do nothing if window is not in ensemble\n if win not in self.windows:\n return \n \n self.windows.remove(win)\n \n # remove all callbacks\n win.remove_close_listener(self.listeners[win].close)\n win.remove_resize_listener(self.listeners[win].resize)\n win.remove_move_listener(self.listeners[win].move)\n\n del self.listeners[win]\n \n self.untie(win)\n \n\n \n \n \n def _on_window_resize(self, win, width, height):\n \"\"\"callback for when a window resizes\"\"\"\n \n # ignore windows that have been changed by the ensemble\n size = (width, height)\n if size in self.recentSize[win]:\n ind = self.recentSize[win].index(size)\n self.recentSize[win] = self.recentSize[win][ind+1:]\n \n # process windows that have been changed by outside forces\n elif self.sizes[win] != (width, height):\n if self.stackx or self.stacky:\n self.stack(win)\n else:\n self.align(win)\n self.raise_windows(win)\n \n \n def _on_window_move(self, win, x, y):\n \"\"\"callback for when a window moves\"\"\"\n \n # ignore windows that have been changed by the ensemble\n pos = (x, y)\n if pos in self.recentPos[win]:\n ind = self.recentPos[win].index(pos)\n self.recentPos[win] = self.recentPos[win][ind+1:]\n \n # process windows that have been changed by outside forces \n elif self.pos[win] != (x, y):\n if self.stackx or self.stacky:\n self.stack(win)\n else:\n self.align(win)\n self.raise_windows(win)\n \n \n def stack(self, win):\n \"\"\"restack windows together\"\"\"\n \n target_pos = win.get_position()\n target_size = win.get_size()\n self.pos[win] = target_pos\n self.sizes[win] = target_size\n \n # get window sizes\n widths = []\n heights = []\n x = []\n y = []\n totalx = 0\n totaly = 0\n target = []\n \n for win2 in self.windows:\n # update size\n if win2 == win:\n w, h = target_size\n \n # determine destination positions\n target = [totalx, totaly]\n else:\n w2, h2 = win2.get_size()\n \n if self.samew:\n w = target_size[0]\n else:\n w = w2\n if self.sameh:\n h = target_size[1]\n else:\n h = h2\n \n if (w,h) != (w2, h2):\n self.recentSize[win2].append((w,h))\n self.sizes[win2] = (w, h)\n win2.set_size(w, h)\n \n widths.append(w)\n heights.append(h)\n x.append(totalx)\n y.append(totaly)\n deco = win2.get_decoration()\n totalx += w + deco[0]\n totaly += h + deco[1]\n \n # set window positions\n for i, win2 in enumerate(self.windows):\n if win == win2:\n continue\n \n if self.stackx:\n newx = target_pos[0]\n newy = target_pos[1] + y[i] - target[1]\n elif self.stacky:\n newx = target_pos[0] + x[i] - target[0]\n newy = target_pos[1]\n \n oldpos = self.pos[win2] #win2.get_position()\n self.pos[win2] = (newx, newy)\n \n if (newx, newy) != oldpos:\n win2.set_position(newx, newy)\n self.recentPos[win2].append((newx, newy))\n \n \n def align(self, win):\n \"\"\"move all windows the same amount window 'win' has moved\"\"\"\n now = win.get_position()\n now = [now[0], now[1]]\n pos1 = self.pos[win]\n \n # move all other windows to match moved window\n for win2 in self.windows:\n if win2 != win:\n pos2 = self.pos[win2]\n pos3 = [now[0] + pos2[0] - pos1[0],\n now[1] + pos2[1] - pos1[1]]\n win2.set_position(*pos3)\n self.recentPos[win2].append(tuple(pos3))\n self.pos[win2] = pos3\n\n # record new position for main window\n self.pos[win] = now\n \n \n def tie(self, windows, tiex=False, tiey=False, pinx=False, piny=False,\n coordsx=None, coordsy=None, master=None):\n \"\"\"ties the scrolling and zooming of multiple windows together\"\"\"\n \n if len(windows) < 2:\n return\n \n self.tiex = tiex\n self.tiey = tiey\n self.pinx = pinx\n self.piny = piny\n self.coordsx = coordsx\n self.coordsy = coordsy\n \n if master == None:\n master = windows[0] \n \n if coordsx == None:\n coordsx = [0] * len(windows)\n\n if coordsy == None:\n coordsy = [0] * len(windows)\n\n # make coordinate lookup\n self.coords = {}\n for win, x, y in zip(windows, coordsx, coordsy):\n self.coords[win] = util.Bundle(x=x, y=y)\n\n # set callbacks for each window\n for win in windows:\n others = util.remove(windows, win)\n \n tie = WindowTie(win, others, self)\n self.ties[win] = tie\n \n win.add_view_change_listener(tie.update_scroll)\n win.add_focus_change_listener(tie.update_focus)\n\n if master == win:\n master_trans = tie.update_scroll\n master_focus = tie.update_focus\n master_focus()\n master_trans()\n \n\n def untie(self, win=None):\n \"\"\"remove a window from any ties\"\"\"\n \n if win == None:\n # untie all windows\n for win2 in self.windows:\n self.untie(win2)\n else:\n if win not in self.ties:\n return\n \n win.remove_view_change_listener(self.ties[win].update_scroll)\n win.remove_focus_change_listener(self.ties[win].update_focus) \n\n del self.ties[win]\n\n # make sure window ties remove their callbacks\n for tie in self.ties.itervalues():\n tie.remove_window(win)\n\n\n def raise_windows(self, top=None):\n \"\"\"raises all windows in ensemble above other windows on the desktop\"\"\"\n for win in self.windows:\n win.raise_window(True)\n \n if top != None:\n top.raise_window(True)\n \n\n\n\nclass WindowTie:\n \"\"\"This class coordinates the translation and zoom of multiple SUMMON Windows.\n \"\"\"\n\n def __init__(self, win, others, ensemble):\n self.win = win\n self.others = others\n self.ensemble = ensemble\n\n def remove_window(self, win):\n \"\"\"removes a window from the list of tied windows\"\"\"\n if win in self.others:\n self.others.remove(win)\n \n \n def update_scroll(self):\n \"\"\"call back that sets translation and zoom\"\"\"\n\n # prevent infinite loops\n if self.ensemble.lock:\n return\n self.ensemble.lock = True\n \n \n w1 = self.win\n others = self.others\n coords = self.ensemble.coords\n \n needpin = self.ensemble.pinx or self.ensemble.piny\n \n if needpin:\n pos1 = w1.get_position()\n trans1 = w1.get_trans()\n zoom1 = w1.get_zoom()\n \n \n for w2 in others:\n if needpin:\n pos2 = w2.get_position()\n \n oldtrans2 = list(w2.get_trans())\n oldzoom2 = list(w2.get_zoom())\n trans2 = oldtrans2[:]\n zoom2 = oldzoom2[:]\n\n if self.ensemble.tiex:\n trans2[0] = trans1[0] - coords[w2].x + coords[w1].x\n zoom2[0] = zoom1[0]\n\n if self.ensemble.pinx:\n trans2[0] += pos1[0] - pos2[0]\n \n if self.ensemble.tiey:\n trans2[1] = trans1[1] - coords[w2].y + coords[w1].y\n zoom2[1] = zoom1[1]\n\n if self.ensemble.piny:\n trans2[1] -= pos1[1] - pos2[1]\n\n # check to see if there is a change (prevents infinite loops)\n if trans2 != oldtrans2:\n w2.set_trans(*trans2)\n if zoom2 != oldzoom2:\n w2.set_zoom(*zoom2)\n self.ensemble.lock = False\n\n\n \n def update_focus(self):\n \"\"\"callback that sets focus\"\"\"\n \n # prevent infinite loops \n if self.ensemble.lock:\n return\n self.ensemble.lock = True\n \n coords = self.ensemble.coords \n \n fx1, fy1 = self.win.get_focus()\n fx1 -= coords[self.win].x\n fy1 -= coords[self.win].y\n\n for w2 in self.others:\n newpos = (fx1 + coords[w2].x, fy1 + coords[w2].y)\n oldpos = w2.get_focus()\n\n if newpos != oldpos:\n w2.set_focus(* newpos)\n self.ensemble.lock = False\n\n\n\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41670,"cells":{"__id__":{"kind":"number","value":12773232781656,"string":"12,773,232,781,656"},"blob_id":{"kind":"string","value":"bbfd81ec40f71bee2c8882a5f72c0e993c8f4d1d"},"directory_id":{"kind":"string","value":"26591af2901a305e5082ef07fec15ceb54ed886f"},"path":{"kind":"string","value":"/python_exercise.py"},"content_id":{"kind":"string","value":"cdca4b0137128f2daffd2c275098387085fe21b2"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"rappiah/EXERCISE-SESSION"},"repo_url":{"kind":"string","value":"https://github.com/rappiah/EXERCISE-SESSION"},"snapshot_id":{"kind":"string","value":"017ff4b088ade8754ba24103a6b59a463bfc29f2"},"revision_id":{"kind":"string","value":"a113843c84d4c68d67fcbd5e359d6ec85cfae2a9"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2020-09-21T15:29:52.163316","string":"2020-09-21T15:29:52.163316"},"revision_date":{"kind":"timestamp","value":"2012-02-21T21:12:53","string":"2012-02-21T21:12:53"},"committer_date":{"kind":"timestamp","value":"2012-02-21T21:12:53","string":"2012-02-21T21:12:53"},"github_id":{"kind":"number","value":3505535,"string":"3,505,535"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"import os\nimport glob\ndef files(path):\n listing = os.listdir(path)\n return listing\ndef filestring(line)\n search='N'\n s=line.find(search)\n if s>=0\n k=True\n else:\n k=False\n return b;\ndef replace(line)\n nline = line.replace(\"N','M')\nreturn nline;\npath = 'cleandata'\nfilelist=files(path)\nfor infile in filelist:\nfor currentFile in glob.glob( os.path.join(path, '*') ):\n\tprint(\"current file is: \" + infile)\n\n\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2012,"string":"2,012"}}},{"rowIdx":41671,"cells":{"__id__":{"kind":"number","value":14010183334637,"string":"14,010,183,334,637"},"blob_id":{"kind":"string","value":"eac4289ac67417be0cd470ed2232c373162ea05f"},"directory_id":{"kind":"string","value":"ca20e366c9cb1b0c92f355e58a36456bd0f4c5ac"},"path":{"kind":"string","value":"/packnaturals.py"},"content_id":{"kind":"string","value":"6d81b51f34a95d990766519a57294745d0b2c77f"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"tumist/packnaturals"},"repo_url":{"kind":"string","value":"https://github.com/tumist/packnaturals"},"snapshot_id":{"kind":"string","value":"0b3d94c073949ea91c76b2c96688ca2bc7a2c2dc"},"revision_id":{"kind":"string","value":"6b76b7c62d04d1069a179c1f43319467ba94196a"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2016-09-10T19:15:06.728046","string":"2016-09-10T19:15:06.728046"},"revision_date":{"kind":"timestamp","value":"2014-01-04T01:39:17","string":"2014-01-04T01:39:17"},"committer_date":{"kind":"timestamp","value":"2014-01-04T01:39:17","string":"2014-01-04T01:39:17"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"#!/usr/bin/env python\n\"\"\"URL-safe natural-number packer.\n\nThis module extends the packnaturals_ordered implementation to achieve\nmore compression (especially when you have clusters of numbers) at the\nexpense of ordering.\n\"\"\"\nfrom __future__ import print_function\nimport packnaturals_ordered\n\nto_list_decor = lambda func: lambda arg: list(func(arg))\n\ndef pack(numbers):\n s = sorted(numbers)\n rel = s[:1] + [a-b for a, b in zip(s[1:], s)]\n return packnaturals_ordered.pack(rel)\n\n@to_list_decor\ndef unpack(string):\n rel = packnaturals_ordered.unpack(string)\n incr = 0\n for n in rel:\n incr += n\n yield incr\n\nif __name__ == \"__main__\":\n import sys\n try:\n numbers = [int(num) for num in sys.argv[1:]]\n if not numbers or not all([num >= 0 for num in numbers]):\n raise ValueError\n except ValueError:\n print(\"Usage: {0} [num2 num3 ...]\".format(sys.argv[0]))\n sys.exit(1)\n packed = pack(numbers)\n print(\"packed :\", packed)\n unpacked = unpack(packed)\n print(\"unpacked :\", ' '.join([str(n) for n in unpacked]))\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41672,"cells":{"__id__":{"kind":"number","value":16819091931318,"string":"16,819,091,931,318"},"blob_id":{"kind":"string","value":"446099c78b2c208c7cf62123b2149adbda349427"},"directory_id":{"kind":"string","value":"7dcdd5de0640f07b01b1707c134ec0bd168f641d"},"path":{"kind":"string","value":"/fedora_college/modules/content/media.py"},"content_id":{"kind":"string","value":"c37f7dbc768d7084060d61a95647ccc2f647cbac"},"detected_licenses":{"kind":"list like","value":["BSD-3-Clause"],"string":"[\n \"BSD-3-Clause\"\n]"},"license_type":{"kind":"string","value":"permissive"},"repo_name":{"kind":"string","value":"MSheezan/fedora-college"},"repo_url":{"kind":"string","value":"https://github.com/MSheezan/fedora-college"},"snapshot_id":{"kind":"string","value":"8e3e741f6ddac481c2bb7bbcde1e70e2b4b56774"},"revision_id":{"kind":"string","value":"07dbce3652c6c1796fb0f7b208a706c9e9d90dc1"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-15T22:38:16.831830","string":"2021-01-15T22:38:16.831830"},"revision_date":{"kind":"timestamp","value":"2014-06-26T07:04:33","string":"2014-06-26T07:04:33"},"committer_date":{"kind":"timestamp","value":"2014-06-26T07:04:33","string":"2014-06-26T07:04:33"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"# -*- coding: utf-8 -*-\nfrom flask import Blueprint, render_template\nfrom flask import url_for, g\nfrom fedora_college.modules.content.forms import * # noqa\nfrom fedora_college.core.models import * # noqa\n\nbundle = Blueprint('content', __name__, template_folder='templates')\n\n\n@bundle.route('/media/view')\n@bundle.route('/media/view/')\n@bundle.route('/media/view/')\n@bundle.route('/media/view//')\ndef displaymedia(mediaid=None):\n url = url_for('content.displaymedia')\n if mediaid is not None:\n media = Media.query.filter_by(media_id=mediaid).all()\n return render_template('media/index.html', data=media, url=url)\n else:\n media = Media.query.all()\n return render_template('media/index.html', data=media, url=url)\n\n\n@bundle.route('/media/add/', methods=['GET', 'POST'])\n@bundle.route('/media/add', methods=['GET', 'POST'])\ndef uploadmedia():\n user = UserProfile.query. \\\n filter_by(username=g.fas_user['username']).first_or_404()\n token = user.token\n form_action = url_for('api.uploadvideo', token=token)\n return render_template('media/uploadmedia.html',\n form_action=form_action,\n title=\"add media\"\n )\n\n\n@bundle.route('/media/view//revise')\n@bundle.route('/media/view//revise/')\ndef revisemedia(mediaid=None):\n user = UserProfile.query. \\\n filter_by(username=g.fas_user['username']).first_or_404()\n token = user.token\n form_action = url_for('api.revisevideo',\n videoid=mediaid,\n token=token)\n return render_template('media/revise.html',\n form_action=form_action,\n title=\"add media\")\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41673,"cells":{"__id__":{"kind":"number","value":5729486390751,"string":"5,729,486,390,751"},"blob_id":{"kind":"string","value":"9184556e6342adbb6f2a5aed258c74725ec769bc"},"directory_id":{"kind":"string","value":"0e4ea484cd62854f691aabab86323708f5f951a8"},"path":{"kind":"string","value":"/libgsync/drive/mimetypes.py"},"content_id":{"kind":"string","value":"2984b2c7824027fd035577e0bf758dd5f06aa7a9"},"detected_licenses":{"kind":"list like","value":["BSD-3-Clause"],"string":"[\n \"BSD-3-Clause\"\n]"},"license_type":{"kind":"string","value":"permissive"},"repo_name":{"kind":"string","value":"omriiluz/gsync"},"repo_url":{"kind":"string","value":"https://github.com/omriiluz/gsync"},"snapshot_id":{"kind":"string","value":"767ede8782b39263c1038553220027a6e9f74f1a"},"revision_id":{"kind":"string","value":"fa33bce69d0b9988345b1df3d821f7e3c027d758"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2020-05-31T06:01:30.773956","string":"2020-05-31T06:01:30.773956"},"revision_date":{"kind":"timestamp","value":"2013-10-22T22:21:53","string":"2013-10-22T22:21:53"},"committer_date":{"kind":"timestamp","value":"2013-10-22T22:21:53","string":"2013-10-22T22:21:53"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"# Copyright (C) 2013 Craig Phillips. All rights reserved.\n\nfrom __future__ import absolute_import\n\nclass MimeTypes(object):\n NONE = \"none/unknown-mimetype\"\n FOLDER = \"application/vnd.google-apps.folder\"\n BINARY_FILE = \"application/octet-stream\"\n\n @staticmethod\n def get(path):\n mimeType = None\n try:\n import magic\n if callable(magic.from_file):\n mimeType = magic.from_file(path, mime = True)\n except Exception, e:\n import mimetypes\n mimeType = mimetypes.guess_type(path)[0]\n\n if mimeType is not None:\n return mimeType\n\n return MimeTypes.NONE\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2013,"string":"2,013"}}},{"rowIdx":41674,"cells":{"__id__":{"kind":"number","value":15204184272664,"string":"15,204,184,272,664"},"blob_id":{"kind":"string","value":"5a38543cc6d842598a0ccd7509e822400aa3e77c"},"directory_id":{"kind":"string","value":"48249ea1614676d292df4d029686efb5d4c91bf9"},"path":{"kind":"string","value":"/devilry/apps/core/tests/basenode.py"},"content_id":{"kind":"string","value":"a867a3a34916ab9a8aca6757ca2ec7ce3d4a9afc"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"erlendve/devilry-django"},"repo_url":{"kind":"string","value":"https://github.com/erlendve/devilry-django"},"snapshot_id":{"kind":"string","value":"f99f73abbd313621f5dfbd4ea2fccd9d5a9fd826"},"revision_id":{"kind":"string","value":"3a4caf2433ee7eeceac83a09002563a03a14ea7e"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2020-04-08T11:04:21.573151","string":"2020-04-08T11:04:21.573151"},"revision_date":{"kind":"timestamp","value":"2011-09-04T17:54:34","string":"2011-09-04T17:54:34"},"committer_date":{"kind":"timestamp","value":"2011-09-04T17:54:34","string":"2011-09-04T17:54:34"},"github_id":{"kind":"number","value":2072574,"string":"2,072,574"},"star_events_count":{"kind":"number","value":1,"string":"1"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"from django.contrib.auth.models import User\n\nfrom django.test import TestCase\nfrom ..models import Node, Subject\nfrom ..testhelper import TestHelper\n\nclass TestBaseNode(TestCase, TestHelper):\n\n def setUp(self):\n self.add(nodes=\"uio:admin(uioadmin).ifi:admin(ifiadmin,ifitechsupport)\")\n self.add(nodes=\"uio.deepdummy1\")\n self.thesuperuser = User.objects.create(username='thesuperuser', is_superuser=True)\n\n def test_is_admin(self):\n self.assertTrue(self.uio.is_admin(self.uioadmin))\n self.assertFalse(self.uio.is_admin(self.ifiadmin))\n self.assertTrue(self.uio_ifi.is_admin(self.uioadmin))\n self.assertTrue(self.uio_ifi.is_admin(self.ifiadmin))\n\n def test_get_admins(self):\n def split_and_sort(admins):\n l = admins.split(', ')\n l.sort()\n return ', '.join(l)\n self.assertEquals(self.uio.get_admins(), 'uioadmin')\n self.assertEquals(split_and_sort(self.uio_ifi.get_admins()),\n 'ifiadmin, ifitechsupport')\n\n def test_can_save(self):\n self.assertTrue(self.uio.can_save(self.uioadmin))\n self.assertFalse(self.uio.can_save(self.ifiadmin))\n self.assertTrue(self.uio_ifi.can_save(self.ifiadmin))\n self.assertTrue(self.uio_ifi.can_save(self.uioadmin))\n\n self.assertTrue(Node().can_save(self.thesuperuser))\n self.assertFalse(Node(parentnode=None).can_save(self.uioadmin))\n self.assertTrue(Node(parentnode=self.uio).can_save(self.uioadmin))\n self.assertFalse(Node(parentnode=self.uio).can_save(self.ifiadmin))\n\n def test_can_save_id_none(self):\n self.assertTrue(Subject(parentnode=self.uio_deepdummy1).can_save(self.uioadmin))\n self.assertFalse(Subject(parentnode=self.uio_deepdummy1).can_save(self.ifiadmin))\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2011,"string":"2,011"}}},{"rowIdx":41675,"cells":{"__id__":{"kind":"number","value":5231270173042,"string":"5,231,270,173,042"},"blob_id":{"kind":"string","value":"da34399c47b54ef969009dd5ae3b9a3b2d23597d"},"directory_id":{"kind":"string","value":"339744d9a1816f4338f38043fb946e8387432cf2"},"path":{"kind":"string","value":"/functions.py"},"content_id":{"kind":"string","value":"d4a68ab0b1aca6fb624e193c81e0d2fe55975dc2"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"Vertrexia/ccs"},"repo_url":{"kind":"string","value":"https://github.com/Vertrexia/ccs"},"snapshot_id":{"kind":"string","value":"20539aaac2c79e66833aea2bd70c404afc151599"},"revision_id":{"kind":"string","value":"6661d47f3850a250016b84cd912cce1c0b707771"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-02T12:57:27.716018","string":"2021-01-02T12:57:27.716018"},"revision_date":{"kind":"timestamp","value":"2013-01-08T09:48:28","string":"2013-01-08T09:48:28"},"committer_date":{"kind":"timestamp","value":"2013-01-08T09:48:28","string":"2013-01-08T09:48:28"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"import classes\r\nimport math\r\n\r\n# checks if players exist in the list\r\ndef playerExists(name):\r\n for key in classes.players_:\r\n player = classes.players_[key]\r\n\r\n if player.name == name:\r\n return True\r\n return False\r\n\r\n# player entered the server\r\ndef playerEntered(name):\r\n if playerExists(name) == False:\r\n classes.players_[classes.pCounter] = classes.Player()\r\n player = classes.players_[classes.pCounter]\r\n\r\n player.name = name\r\n\r\n classes.pCounter += 1\r\n\r\ndef getPlayer(name):\r\n if name != \"\":\r\n for key in classes.players_:\r\n player = classes.players_[key]\r\n\r\n if player.name == name:\r\n return player\r\n return False\r\n\r\n# player left the server\r\ndef playerLeft(name):\r\n if playerExists(name) == True:\r\n player = getPlayer(name)\r\n\r\n if player != False:\r\n player.isAlive = False"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2013,"string":"2,013"}}},{"rowIdx":41676,"cells":{"__id__":{"kind":"number","value":11991548728659,"string":"11,991,548,728,659"},"blob_id":{"kind":"string","value":"6f0448ee8b7855a8cb835f3bc43849c0123b26cf"},"directory_id":{"kind":"string","value":"1f1c9bb2c09652925952e191544c5e3dff727b67"},"path":{"kind":"string","value":"/yuce/settings.py"},"content_id":{"kind":"string","value":"e439c5763cf8ad3d5caf7a899284b1c6be44f461"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"julyzergcn/yuce"},"repo_url":{"kind":"string","value":"https://github.com/julyzergcn/yuce"},"snapshot_id":{"kind":"string","value":"0627cff7961454eac191376917d659e3ceeff0a9"},"revision_id":{"kind":"string","value":"5e85c2a44cb7015bf1f0838d298a47a13868ce30"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2016-09-10T15:06:53.680184","string":"2016-09-10T15:06:53.680184"},"revision_date":{"kind":"timestamp","value":"2013-09-20T23:15:36","string":"2013-09-20T23:15:36"},"committer_date":{"kind":"timestamp","value":"2013-09-20T23:15:36","string":"2013-09-20T23:15:36"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"#coding=utf-8\n\nBITCOIN_SERVER_ADDR = 'localhost:8333'\nBITCOIN_SERVER_USER = 'ee'\nBITCOIN_SERVER_PASS = 'ee33'\n\nBITCOIN_SERVER_URL = 'http://%s:%s@%s' % (BITCOIN_SERVER_USER, BITCOIN_SERVER_PASS, BITCOIN_SERVER_ADDR)\n\nBITCOIN_WITHDRAW = True # can withdraw\n#~ BITCOIN_WITHDRAW = False # cannot withdraw\n\nimport djcelery\ndjcelery.setup_loader()\n\nBROKER_URL = 'django://'\nCELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'\n\nTOPIC_START_WEIGHT = 10**5\nTOPIC_END_WEIGHT = 10**4\nTOPIC_POST_COST = 10\nTOPIC_EVENT_CLOSED_EMAILS = []\n\nimport decimal\nTOPIC_SUBMITTED_COST = decimal.Decimal(0.0000001)\n\n# max bet score per topic, in one or more times\nTOPIC_MAX_BET_SCORE = 1\n\n# when topic is completed, divide the profit to site and the topic submitter\nSITE_WIN_RATE = 0.1\nSUBMITTER_WIN_RATE = 0.1\n\nDATE_FORMAT = 'n/j/y'\nDATETIME_FORMAT = 'n/j/y H:i'\n\nEMAIL_HOST = 'smtp.yeah.net'\nEMAIL_HOST_USER = 'yuce_yuce@yeah.net'\nEMAIL_HOST_PASSWORD = 'Yuce321'\nEMAIL_PORT = 25\nDEFAULT_FROM_EMAIL = 'yuce_yuce@yeah.net'\n\n\nfrom os.path import dirname, join, abspath\n\n\nROOT = dirname(abspath(__file__))\n\nLOCALE_PATHS = (\n join(dirname(ROOT), 'conf', 'locale'),\n)\n\nAUTH_USER_MODEL = 'core.User'\n\nDEBUG = True\nTEMPLATE_DEBUG = DEBUG\n\nADMINS = (\n # ('Your Name', 'your_email@example.com'),\n)\n\nMANAGERS = ADMINS\n\nDATABASES = {\n 'default': {\n 'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.\n 'NAME': join(ROOT, 'dev.db'),\n 'USER': '',\n 'PASSWORD': '',\n 'HOST': '',\n 'PORT': '',\n }\n}\n\nimport dj_database_url\nDATABASES = {'default': dj_database_url.config()}\n\n# Hosts/domain names that are valid for this site; required if DEBUG is False\n# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts\nALLOWED_HOSTS = []\n\n# Local time zone for this installation. Choices can be found here:\n# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name\n# although not all choices may be available on all operating systems.\n# In a Windows environment this must be set to your system time zone.\nTIME_ZONE = 'Asia/Shanghai'\n\n# Language code for this installation. All choices can be found here:\n# http://www.i18nguy.com/unicode/language-identifiers.html\nLANGUAGE_CODE = 'zh-cn'\ngettext_noop = lambda s: s\nLANGUAGES = (\n ('en', gettext_noop('English')),\n ('zh-cn', gettext_noop(u'中文')),\n)\n\nSITE_ID = 1\n\n# If you set this to False, Django will make some optimizations so as not\n# to load the internationalization machinery.\nUSE_I18N = True\n\n# If you set this to False, Django will not format dates, numbers and\n# calendars according to the current locale.\nUSE_L10N = True\n\n# If you set this to False, Django will not use timezone-aware datetimes.\nUSE_TZ = True\n\nMEDIA_ROOT = join(ROOT, 'media')\n\nMEDIA_URL = '/media/'\n\nSTATIC_ROOT = join(ROOT, 'static')\n\nSTATIC_URL = '/static/'\n\nSTATICFILES_DIRS = (\n)\n\nSTATICFILES_FINDERS = (\n 'django.contrib.staticfiles.finders.FileSystemFinder',\n 'django.contrib.staticfiles.finders.AppDirectoriesFinder',\n# 'django.contrib.staticfiles.finders.DefaultStorageFinder',\n)\n\n# Make this unique, and don't share it with anybody.\nSECRET_KEY = 'g*i8%1++w4qbhd&qtl^(hjw_w8x6yq5^cct6v1k)4t)_yq_g9y'\n\n# List of callables that know how to import templates from various sources.\nTEMPLATE_LOADERS = (\n 'django.template.loaders.filesystem.Loader',\n 'django.template.loaders.app_directories.Loader',\n# 'django.template.loaders.eggs.Loader',\n)\n\nMIDDLEWARE_CLASSES = (\n 'django.middleware.common.CommonMiddleware',\n 'django.contrib.sessions.middleware.SessionMiddleware',\n 'django.middleware.csrf.CsrfViewMiddleware',\n 'django.contrib.auth.middleware.AuthenticationMiddleware',\n 'django.contrib.messages.middleware.MessageMiddleware',\n 'django.middleware.locale.LocaleMiddleware',\n)\n\nTEMPLATE_CONTEXT_PROCESSORS = (\n 'django.contrib.auth.context_processors.auth',\n 'django.core.context_processors.debug',\n 'django.core.context_processors.i18n',\n 'django.core.context_processors.media',\n 'django.core.context_processors.static',\n 'django.core.context_processors.tz',\n 'django.core.context_processors.request',\n 'django.contrib.messages.context_processors.messages',\n)\n\nROOT_URLCONF = 'yuce.urls'\n\n# Python dotted path to the WSGI application used by Django's runserver.\nWSGI_APPLICATION = 'yuce.wsgi.application'\n\nTEMPLATE_DIRS = (\n join(ROOT, 'templates'),\n)\n\nINSTALLED_APPS = (\n 'django.contrib.auth',\n 'django.contrib.contenttypes',\n 'django.contrib.sessions',\n 'django.contrib.sites',\n 'django.contrib.messages',\n 'django.contrib.staticfiles',\n 'django.contrib.humanize',\n 'django.contrib.admin',\n 'gunicorn',\n 'django_reset',\n 'endless_pagination',\n 'bootstrapform',\n 'south',\n 'registration',\n 'captcha',\n 'djcelery',\n 'kombu.transport.django',\n 'core',\n\n 'task_tracker',\n)\n\n# A sample logging configuration. The only tangible logging\n# performed by this configuration is to send an email to\n# the site admins on every HTTP 500 error when DEBUG=False.\n# See http://docs.djangoproject.com/en/dev/topics/logging for\n# more details on how to customize your logging configuration.\nLOGGING = {\n 'version': 1,\n 'disable_existing_loggers': False,\n 'filters': {\n 'require_debug_false': {\n '()': 'django.utils.log.RequireDebugFalse'\n }\n },\n 'handlers': {\n 'mail_admins': {\n 'level': 'ERROR',\n 'filters': ['require_debug_false'],\n 'class': 'django.utils.log.AdminEmailHandler'\n }\n },\n 'loggers': {\n 'django.request': {\n 'handlers': ['mail_admins'],\n 'level': 'ERROR',\n 'propagate': True,\n },\n }\n}\n\ntry:\n from settings_local import *\nexcept ImportError:\n pass\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2013,"string":"2,013"}}},{"rowIdx":41677,"cells":{"__id__":{"kind":"number","value":7610682080931,"string":"7,610,682,080,931"},"blob_id":{"kind":"string","value":"366d11e5d32e72527c3132b2ecd56933a6453e8d"},"directory_id":{"kind":"string","value":"36118546fce229f6e31cb83ee9f6c5131cdeade9"},"path":{"kind":"string","value":"/double_linked/dll.py"},"content_id":{"kind":"string","value":"7e6cb120ddcdbb44ac6cfd44cff950ee31b5dbe8"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"markableidinger/data_structures"},"repo_url":{"kind":"string","value":"https://github.com/markableidinger/data_structures"},"snapshot_id":{"kind":"string","value":"54e09ae1a06bd00eae69baf9902714f2d184b6cc"},"revision_id":{"kind":"string","value":"b485132945ecf08ad921d2477b93744cb4498524"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-19T00:47:15.571258","string":"2021-01-19T00:47:15.571258"},"revision_date":{"kind":"timestamp","value":"2014-10-22T04:35:42","string":"2014-10-22T04:35:42"},"committer_date":{"kind":"timestamp","value":"2014-10-22T04:35:42","string":"2014-10-22T04:35:42"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"class Node:\n\n def __init__(self, value, previous, next):\n self.value = value\n self.previous = previous\n self.next = next\n\n\nclass Doubly_linked_list:\n\n def __init__(self):\n self.tail = Node(None, None, None)\n self.head = Node(None, None, self.tail)\n self.tail.previous = self.head\n\n def insert(self, val):\n new = Node(val, self.head, self.head.next)\n self.head.next.previous = new\n self.head.next = new\n\n def append(self, val):\n new = Node(val, self.tail.previous, self.tail)\n self.tail.previous.next = new\n self.tail.previous = new\n\n def pop(self):\n if self.head.next.value is None:\n return None\n else:\n return_item = self.head.next\n self.head.next = return_item.next\n return_item.next.previous = self.head\n return return_item.value\n\n def shift(self):\n if self.tail.previous.value is None:\n return None\n else:\n return_item = self.tail.previous\n self.tail.previous = return_item.previous\n return_item.previous.next = self.tail\n return return_item.value\n\n def remove(self, val):\n currently_selected = self.head.next\n previously_selected = self.head\n while currently_selected.value is not None:\n if currently_selected.value == val:\n previously_selected.next = currently_selected.next\n currently_selected.next.previous = previously_selected\n break\n else:\n previously_selected = currently_selected\n currently_selected = currently_selected.next\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41678,"cells":{"__id__":{"kind":"number","value":12137577594189,"string":"12,137,577,594,189"},"blob_id":{"kind":"string","value":"50985fe143f4b2b5e8a21f2c82dd4c061f518ea7"},"directory_id":{"kind":"string","value":"9df60e44b27d57e72cd7da551527379c4e8ce6d1"},"path":{"kind":"string","value":"/data_loader/WUSCHEDParser.py"},"content_id":{"kind":"string","value":"7874bd90bae80c824d4d1bf4892004087bbe0c6e"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"ngermer/wusched"},"repo_url":{"kind":"string","value":"https://github.com/ngermer/wusched"},"snapshot_id":{"kind":"string","value":"19f67c53f69386fae29f0546d444dad94afd52fb"},"revision_id":{"kind":"string","value":"42648e2a4b525a49693da12a6ca5b2b6189488e8"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-21T13:49:01.359312","string":"2021-01-21T13:49:01.359312"},"revision_date":{"kind":"timestamp","value":"2013-05-16T03:00:44","string":"2013-05-16T03:00:44"},"committer_date":{"kind":"timestamp","value":"2013-05-16T03:00:44","string":"2013-05-16T03:00:44"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"from HTMLParser import HTMLParser\n\nclass WUSCHEDParser(HTMLParser):\n def __init__(self, course_handler):\n HTMLParser.__init__(self)\n self.course_handler = course_handler\n self.found_table = False\n self.in_data = False\n self.course = None\n self.cell_num = -1\n self.title_caught = False\n print \"Parser initialized.\"\n\n def handle_starttag(self,tag,attrs):\n tag = tag.lower()\n # wait until we find the table.\n if tag == \"thead\":\n self.found_table = True\n return\n if not self.found_table:\n return\n\n # wait until we reach the appropriate location in the data.\n if tag == \"tbody\":\n self.in_data = True\n return\n if not self.in_data:\n return\n\n # begin parsing a row of data.\n if tag == \"tr\":\n self.course = {}\n self.cell_num = -1\n self.title_caught = False\n\n elif tag == \"td\":\n self.cell_num += 1\n\n elif tag == \"a\":\n if self.cell_num == 0:\n pass\n elif self.cell_num == 1:\n pass\n elif self.cell_num == 2:\n if not self.title_caught:\n self.title_caught = True\n else:\n #found a syllabus link.\n for k,v in attrs:\n if k == \"href\":\n self.course[\"syl\"]=v\n elif self.cell_num == 3:\n pass\n elif self.cell_num == 4:\n pass\n elif self.cell_num == 5:\n pass\n elif self.cell_num == 6:\n pass\n elif self.cell_num == 7:\n pass\n elif self.cell_num == 8:\n pass\n elif self.cell_num == 9:\n pass\n elif self.cell_num == 10:\n pass\n elif self.cell_num == 11:\n pass\n elif self.cell_num == 12:\n pass\n\n def handle_endtag(self,tag):\n if not self.found_table:\n return\n if not self.in_data:\n return\n tag = tag.lower()\n\n if tag==\"tbody\":\n self.in_data = False\n self.found_table = False\n\n elif tag==\"tr\":\n self.cell_num = -1\n if len(self.course)!=0:\n self.course_handler.add_course(self.course)\n\n def handle_data(self,data):\n # wait until we find the table.\n if (not self.found_table) or (not self.in_data):\n return\n\n #strip whitespace.\n data = data.strip()\n\n # begin parsing a row of data.\n if self.cell_num == 0:\n self.course[\"dept\"],data = data.split(\" \",1)\n data,self.course[\"num\"] = data.rsplit(\" \",1)\n elif self.cell_num == 1:\n self.course[\"sec\"] = data\n elif self.cell_num == 2:\n #avoid overwriting names with \"syllabus\"\n if \"name\" not in self.course:\n self.course[\"name\"] = data\n elif self.cell_num == 3:\n pass\n elif self.cell_num == 4:\n self.course[\"days\"] = data\n elif self.cell_num == 5:\n self.course[\"begin\"] = data\n elif self.cell_num == 6:\n self.course[\"end\"] = data\n elif self.cell_num == 7:\n self.course[\"inst\"] = data\n elif self.cell_num == 8:\n pass\n elif self.cell_num == 9:\n self.course[\"seats\"] = int(data)\n elif self.cell_num == 10:\n self.course[\"enrolled\"] = int(data)\n elif self.cell_num == 11:\n self.course[\"waits\"] = int(data)\n elif self.cell_num == 12:\n self.course[\"attr\"] = data.split(\", \")\n\nif __name__ == '__main__':\n print \"WUSCHEDParser provider running at\", asctime()\n course_handler = CourseHandler()\n parser = WUSCHEDParser(course_handler)\n \n with open(\"../../wu_l_list.html\") as f:\n for line in f:\n parser.feed(line)\n\n print \"Done.\"\n\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2013,"string":"2,013"}}},{"rowIdx":41679,"cells":{"__id__":{"kind":"number","value":12824772357860,"string":"12,824,772,357,860"},"blob_id":{"kind":"string","value":"1644b29f6d8190be5e2585feee94f0a70a47cbb0"},"directory_id":{"kind":"string","value":"5717e45d653a675a749dbd496b62c9852bef0cd2"},"path":{"kind":"string","value":"/chef-repo/cookbooks/ycsb/files/default/generateChart.py"},"content_id":{"kind":"string","value":"d6bfb9d832a41d02cad3f4d87b28e8fcca04d1da"},"detected_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"license_type":{"kind":"string","value":"permissive"},"repo_name":{"kind":"string","value":"myownthemepark/csde"},"repo_url":{"kind":"string","value":"https://github.com/myownthemepark/csde"},"snapshot_id":{"kind":"string","value":"b7dab355adaa7d2a54c01e5ca33035b8446021dc"},"revision_id":{"kind":"string","value":"11bc441b4e34fe24d76d357317f0736b5e7d350d"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2017-04-28T22:07:30.204559","string":"2017-04-28T22:07:30.204559"},"revision_date":{"kind":"timestamp","value":"2013-03-25T22:48:32","string":"2013-03-25T22:48:32"},"committer_date":{"kind":"timestamp","value":"2013-03-25T22:48:32","string":"2013-03-25T22:48:32"},"github_id":{"kind":"number","value":8025192,"string":"8,025,192"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":1,"string":"1"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"#!/usr/bin/env python\n\n# columns = [\"Type\", \"Elapsed Time (seconds)\", \"Operations\", \"Ops/Sec\", \"Average Latency\"]\n\nimport sys\n\nif len(sys.argv) > 1:\n filename = sys.argv[1]\nelse:\n print \"Usage: generateChart.py \"\n print \"Produces: .html\"\n print\n sys.exit()\n\nopsCols = [\"Elapsed Time (seconds)\", \"Ops/Sec\"]\nopsColsString = \"\"\nfor heading in opsCols:\n opsColsString += \" opsData.addColumn('number', '\" + heading + \"');\\n\"\n\nlatencyCols = [\"Elapsed Time (seconds)\", \"Average Latency\"]\nlatencyColsString = \"\"\nfor heading in latencyCols:\n latencyColsString += \" latencyData.addColumn('number', '\" + heading + \"');\\n\"\n\nopsData = \"\"\nlatencyData = \"\"\nwith open(filename, 'r') as f:\n read_data = f.readlines()\n for line in read_data:\n if \"sec\" in line and \"operations\" in line and \"current ops/sec\" in line:\n line = line.strip().split()\n try:\n dataType = line[7].strip('[')\n dataTime = line[0]\n dataOps = str(int(line[2]))\n dataOpsSec = line[4]\n dataLatency = line[8].strip(\"]\").split(\"=\")[1]\n # dataString += \" ['\" + dataType + \"', \" + dataTime + \", \" + dataOps + \", \" + dataOpsSec + \", \" + dataLatency + \"],\\n\"\n opsData += \" [\" + dataTime + \", \" + dataOpsSec + \"],\\n\"\n latencyData += \" [\" + dataTime + \", \" + dataLatency + \"],\\n\"\n except Exception:\n pass\n\nhtml = \"\"\"\n\n \n \n \n \n \n\n \n \n
\n
\n \n\n\"\"\"\nwith open(filename + '.html', 'w') as f:\n f.write(html)\n\nprint filename + \".html has been created.\"\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2013,"string":"2,013"}}},{"rowIdx":41680,"cells":{"__id__":{"kind":"number","value":4587025122072,"string":"4,587,025,122,072"},"blob_id":{"kind":"string","value":"89c6df8cf8fe1da8aeba9a919d81bf5c77911c95"},"directory_id":{"kind":"string","value":"1d7ca3b94912b4159e7aac76bd3e1fedfc703094"},"path":{"kind":"string","value":"/syncthing_gtk/__init__.py"},"content_id":{"kind":"string","value":"c3d04feedc1756880018c1a6309de7ec61b41ad4"},"detected_licenses":{"kind":"list like","value":["GPL-2.0-only"],"string":"[\n \"GPL-2.0-only\"\n]"},"license_type":{"kind":"string","value":"non_permissive"},"repo_name":{"kind":"string","value":"sandeepone/syncthing-gtk"},"repo_url":{"kind":"string","value":"https://github.com/sandeepone/syncthing-gtk"},"snapshot_id":{"kind":"string","value":"8ac17eeda0336be78358a7ae5922a6d0af26e56a"},"revision_id":{"kind":"string","value":"97c80a697b7bc22aa3134760ef7e5b8b5d613a1d"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-17T21:02:36.600848","string":"2021-01-17T21:02:36.600848"},"revision_date":{"kind":"timestamp","value":"2014-12-21T20:37:38","string":"2014-12-21T20:37:38"},"committer_date":{"kind":"timestamp","value":"2014-12-21T20:37:38","string":"2014-12-21T20:37:38"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"#!/usr/bin/env python2\nimport tools\nfrom timermgr\t\t\timport TimerManager\nfrom daemonprocess\t\timport DaemonProcess\nfrom daemon\t\t\t\timport Daemon, InvalidConfigurationException, \\\n\t\t\t\t\t\t\tTLSUnsupportedException, ConnectionRestarted\nfrom watcher\t\t\timport Watcher, HAS_INOTIFY\nfrom notifications\t\timport Notifications, HAS_DESKTOP_NOTIFY\nfrom infobox\t\t\timport InfoBox\nfrom editordialog\t\timport EditorDialog\nfrom deviceeditor\t\timport DeviceEditorDialog\nfrom foldereditor\t\timport FolderEditorDialog\nfrom daemonsettings\t\timport DaemonSettingsDialog\nfrom statusicon\t\t\timport StatusIcon, HAS_INDICATOR\nfrom uisettings\t\t\timport UISettingsDialog\nfrom configuration\t\timport Configuration\nfrom iddialog\t\t\timport IDDialog\nfrom about\t\t\t\timport AboutDialog\nfrom ignoreeditor\t\timport IgnoreEditor\nfrom ribar\t\t\t\timport RIBar\nfrom daemonoutputdialog\timport DaemonOutputDialog\nfrom stdownloader\t\timport StDownloader\nfrom wizard\t\t\t\timport Wizard\nfrom finddaemondialog\timport FindDaemonDialog\nfrom app\t\t\t\timport App\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41681,"cells":{"__id__":{"kind":"number","value":17635135729421,"string":"17,635,135,729,421"},"blob_id":{"kind":"string","value":"fbde8a2e6a779ea9f6ce237fe1f082f11190e046"},"directory_id":{"kind":"string","value":"e91b9ae12b4d52c37985bc62c1d29500595393b8"},"path":{"kind":"string","value":"/src/game/swarm/Swarm.py"},"content_id":{"kind":"string","value":"e0c253e861446f580f165f293295a1b602062855"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"Wuji/wuSwarm"},"repo_url":{"kind":"string","value":"https://github.com/Wuji/wuSwarm"},"snapshot_id":{"kind":"string","value":"243a3527a9c42acac57e8a86044eee6b1cf84c99"},"revision_id":{"kind":"string","value":"6bd2aa4a537a77366061c03ce308ecf56a7c9629"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2016-09-06T20:16:44.908498","string":"2016-09-06T20:16:44.908498"},"revision_date":{"kind":"timestamp","value":"2011-08-05T22:29:44","string":"2011-08-05T22:29:44"},"committer_date":{"kind":"timestamp","value":"2011-08-05T22:29:44","string":"2011-08-05T22:29:44"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"'''\nCreated on 31 Jul 2011\n\n@author: wuji\n'''\nfrom game.environment.Simple2dEnv import NumberTile\nfrom game.swarm import ai\n\nclass Swarm(object):\n '''\n This class represents a swarm\n '''\n\n def __init__(self, environment, start_coordinates = (0, 0), entities = []):\n '''\n Constructor\n '''\n self.entities = entities\n if len(self.entities) == 0:\n self.entities.append(SwarmEntity(self, environment))\n self.real_env = environment\n self.known_universe = []\n self.unknown_tiles = set()\n for x in range(self.real_env.length_x):\n column = []\n for y in range(self.real_env.length_y):\n tile = NumberTile(x, y, 0)\n column.append(tile)\n self.unknown_tiles.add(tile.coordinates)\n self.known_universe.append(column)\n for entity in entities:\n entity.position = (0, 0)\n self.assign_new_leader()\n self.leader.scan()\n \n def number_of_entities(self):\n return len(self.entities)\n \n def assign_new_leader(self):\n self.leader = self.entities[0]\n\n def next_turn(self):\n for entity in self.entities:\n new_territory = entity.scan()\n for tile in new_territory:\n x = tile.x\n y = tile.y\n self.known_universe[x][y] = tile\n if tile.coordinates in self.unknown_tiles:\n self.unknown_tiles.remove(tile.coordinates)\n if not len(entity.path) == 0:\n entity.move()\n if not len(self.unknown_tiles) == 0:\n dest = self.unknown_tiles.pop()\n self.unknown_tiles.add(dest)\n entity.move_to(dest)\n\nclass SwarmEntity(object):\n '''\n This class represents on entity of a swarm\n '''\n def __init__(self, swarm, environment):\n '''\n Constructor\n '''\n self.env = environment\n self.swarm = swarm\n self.path = list()\n self.position = (0, 0)\n \n def scan(self):\n return self.env.scan(self)\n\n def move(self):\n self.position = self.path.pop()\n \n def move_to(self, destination):\n origin = self.env.get_tile_at_position(self.position[0], self.position[1])\n dest = self.env.get_tile_at_position(destination[0], destination[1])\n path = ai.a_star_2d(origin, dest, self.env)\n if not path == None and not len(path) == 0:\n self.path = path\n self.move()\n \n \n "},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2011,"string":"2,011"}}},{"rowIdx":41682,"cells":{"__id__":{"kind":"number","value":11948599058091,"string":"11,948,599,058,091"},"blob_id":{"kind":"string","value":"fedff6d3dbcb8c4d95628c0b099b3da3c6df58f9"},"directory_id":{"kind":"string","value":"ff48e587ae0005d327ffd1dc2531362785bbade7"},"path":{"kind":"string","value":"/judge/models.py"},"content_id":{"kind":"string","value":"352113517ca3321c9a85308fd9a6048dea47a910"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"westandskif/swiss_elo_system"},"repo_url":{"kind":"string","value":"https://github.com/westandskif/swiss_elo_system"},"snapshot_id":{"kind":"string","value":"f33d16fe60760688924b0317b73f358238bf8ddc"},"revision_id":{"kind":"string","value":"4bb9d91c773a5b98a244faee2bc5c607defe2e21"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2015-08-08T15:53:11.827784","string":"2015-08-08T15:53:11.827784"},"revision_date":{"kind":"timestamp","value":"2013-08-13T10:47:41","string":"2013-08-13T10:47:41"},"committer_date":{"kind":"timestamp","value":"2013-08-13T10:47:41","string":"2013-08-13T10:47:41"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"from django.db import models\n# Create your models here.\nclass Player(models.Model):\n\tfirst_name = models.CharField(max_length=30)\n\tlast_name = models.CharField(max_length=30)\n\telo = models.FloatField()\n\twhite_played = models.IntegerField(default=0)\n\tblack_played = models.IntegerField(default=0)\n\tscore = models.FloatField(default=0)\n\topponents = models.ManyToManyField('self')\n\tdef __unicode__(self):\n\t\treturn u\" \".join([unicode(self.first_name), unicode(self.last_name)])\n\tclass Meta:\n\t\tordering = ('first_name',)\n\nclass PlayerReport(models.Model):\n\tplayer = models.OneToOneField(Player, related_name='report')\n\trank = models.IntegerField(default=0)\n\tscore = models.FloatField(default=0)\n\tnew_elo = models.FloatField(default=0)\n\topponents_score = models.FloatField(default=0)\n\tdef __unicode__(self):\n\t\treturn u\"Rank {0} player info.\".format(self.rank)\n\nclass Game(models.Model):\n\t\"\"\"\n\tCreates a game between 2 players; None is possible (for odd number of players),\n\tsuch game is played and opponent of None is winner.\n\t\"\"\"\n\tdef __init__(self, *args, **kwargs):\n\t\tsuper(Game, self).__init__(*args, **kwargs)\n\t\tif 'white' in kwargs:\n\t\t\tself.save()\n\t\t\tself.white = kwargs['white']\n\t\t\tself.black = kwargs['black']\n\t\t\tif 'tour' in kwargs:\n\t\t\t\tself.tour = kwargs['tour']\n\t\t\tif not (self.white is None or self.black is None):\n\t\t\t\tself.members.add(kwargs['white'], kwargs['black'])\n\t\t\t\tself.white.white_played += 1\n\t\t\t\tself.black.black_played += 1\n\t\t\t\tself.white.save(update_fields=['white_played'])\n\t\t\t\tself.black.save(update_fields=['black_played'])\t\t\t\t\n\t\t\telse:\n\t\t\t\tif self.white is None:\n\t\t\t\t\tself.black.score += 1\n\t\t\t\t\tself.black.black_played += 1\n\t\t\t\t\tself.winner = self.black\n\t\t\t\telse:\n\t\t\t\t\tself.white.score += 1\n\t\t\t\t\tself.white.white_played += 1\n\t\t\t\t\tself.winner = self.white\n\t\t\t\tself.winner.save()\n\t\t\t\tself.members.add(self.winner)\n\t\t\t\tself.loser = None\n\t\t\t\tself.played = True\n\t\t\t\t\t\t\t\n\t\t\tself.save()\n\t\t\n\tmembers = models.ManyToManyField(Player, related_name='games', null=True)\n\ttour = models.IntegerField(default=1, unique=False)\n\twhite = models.ForeignKey(Player, related_name='white_games', null=True, unique=False)\n\tblack = models.ForeignKey(Player, related_name='black_games', null=True, unique=False)\n\twinner = models.ForeignKey(Player, related_name='wins', null=True, unique=False)\n\tloser = models.ForeignKey(Player, related_name='losings', null=True, unique=False)\n\tplayed = models.BooleanField(default=False)\n\n\tdef __unicode__(self):\n\t\tif self.played:\n\t\t\tif self.winner is None:\n\t\t\t\treturn \"{0} vs {1}. The game has been finished with draw.\".format(self.white, self.black)\n\t\t\treturn \"{0} vs {1}. {2} has won.\".format(self.white, self.black, self.winner)\n\t\treturn \"{0} vs {1}. The game hasn't been played yet.\".format(self.white, self.black)\n\t\n\tclass Meta:\n\t\tordering = ('tour',)\n\nclass Tournament(models.Model):\n\tactive = models.BooleanField(default=True)\n\tcurrent_tour = models.IntegerField(default=1)\n\tmax_tour = models.IntegerField(default=-1)\n\tplayers = models.ManyToManyField(Player, related_name='tournament', null=True)\n\tgames = models.ManyToManyField(Game, related_name='tournament', null=True)\n\n\tdef __unicode__(self):\n\t\treturn u\"Tournament #{0}\".format(unicode(self.id))\n\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2013,"string":"2,013"}}},{"rowIdx":41683,"cells":{"__id__":{"kind":"number","value":5119601045248,"string":"5,119,601,045,248"},"blob_id":{"kind":"string","value":"c0763de1e7e7574377e5872df08a0888414926ec"},"directory_id":{"kind":"string","value":"65c2ee2aa77587268ab7e621bd94c52feeea43f2"},"path":{"kind":"string","value":"/project/apps/blog/forms.py"},"content_id":{"kind":"string","value":"059938556381232849b57b168923482e5e84d177"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"bornleft/django-simple_blog"},"repo_url":{"kind":"string","value":"https://github.com/bornleft/django-simple_blog"},"snapshot_id":{"kind":"string","value":"187d6568043bcaa69a943c0a363bd75829e917c1"},"revision_id":{"kind":"string","value":"fbb78fce54a449aed82ed5fca7e78785daf8cf5e"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2020-04-06T04:35:57.755273","string":"2020-04-06T04:35:57.755273"},"revision_date":{"kind":"timestamp","value":"2011-08-02T21:57:05","string":"2011-08-02T21:57:05"},"committer_date":{"kind":"timestamp","value":"2011-08-02T21:57:05","string":"2011-08-02T21:57:05"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"# -*- coding: utf-8 -*-\nfrom django import forms\nfrom django.forms import ModelForm\nfrom django.forms import widgets\nfrom django.utils.translation import ugettext as _\nfrom project.apps.blog.models import Entry, Tag\n\nclass CommentForm(forms.Form):\n entry_pk = forms.CharField(widget= forms.HiddenInput)\n fname = forms.CharField(label = _(u'Имя'))\n lname = forms.CharField(label = _(u'Фамилия'))\n comment = forms.CharField(label = _(u'Комментарий'), widget = forms.Textarea(attrs={'cols': 60, 'rows': 18}))\n\nclass EntryForm(ModelForm):\n class Meta:\n model = Entry\n #exclude = ('author',)\n widgets = {\n 'entry': forms.Textarea(attrs = {'cols': 60, 'rows': 18}),\n 'author': forms.HiddenInput,\n }\n\nclass TagForm(ModelForm):\n class Meta:\n model = Tag\n exclude = ('entrys',)\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2011,"string":"2,011"}}},{"rowIdx":41684,"cells":{"__id__":{"kind":"number","value":7859790169573,"string":"7,859,790,169,573"},"blob_id":{"kind":"string","value":"97822ce370b3886b6e6ac1efb98329dfb407438a"},"directory_id":{"kind":"string","value":"6bf4867b690f59a77f7caddc1238c3bae6b3e1c3"},"path":{"kind":"string","value":"/tests/benchmark/scenarios/vm/test_utils.py"},"content_id":{"kind":"string","value":"d8f6fbddbbfa9100a8a94d5a9dc376793188c4d7"},"detected_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"license_type":{"kind":"string","value":"permissive"},"repo_name":{"kind":"string","value":"kambiz-aghaiepour/rally"},"repo_url":{"kind":"string","value":"https://github.com/kambiz-aghaiepour/rally"},"snapshot_id":{"kind":"string","value":"641c044cc24c10eb15e4d6b4ab3bc4885779e076"},"revision_id":{"kind":"string","value":"be708bacf0bc898a9538b9b6cb0ba4e1c015c1f2"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-15T19:35:15.318291","string":"2021-01-15T19:35:15.318291"},"revision_date":{"kind":"timestamp","value":"2014-08-18T23:51:30","string":"2014-08-18T23:51:30"},"committer_date":{"kind":"timestamp","value":"2014-08-18T23:51:30","string":"2014-08-18T23:51:30"},"github_id":{"kind":"number","value":23090342,"string":"23,090,342"},"star_events_count":{"kind":"number","value":3,"string":"3"},"fork_events_count":{"kind":"number","value":1,"string":"1"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"# Copyright 2013: Mirantis Inc.\n# All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\n\nimport subprocess\n\nimport mock\nfrom oslotest import mockpatch\n\nfrom rally.benchmark.scenarios.vm import utils\nfrom tests import fakes\nfrom tests import test\n\n\nVMTASKS_UTILS = \"rally.benchmark.scenarios.vm.utils\"\n\n\nclass VMScenarioTestCase(test.TestCase):\n\n def setUp(self):\n super(VMScenarioTestCase, self).setUp()\n self.wait_for = mockpatch.Patch(VMTASKS_UTILS +\n \".bench_utils.wait_for\")\n self.useFixture(self.wait_for)\n\n @mock.patch('__builtin__.open')\n def test_run_action(self, mock_open):\n mock_ssh = mock.MagicMock()\n mock_file_handle = mock.MagicMock()\n mock_open.return_value = mock_file_handle\n vm_scenario = utils.VMScenario()\n vm_scenario.run_action(mock_ssh, 'interpreter', 'script')\n mock_ssh.execute.assert_called_once_with('interpreter',\n stdin=mock_file_handle)\n\n def test_wait_for_ssh(self):\n ssh = mock.MagicMock()\n vm_scenario = utils.VMScenario()\n vm_scenario.wait_for_ssh(ssh)\n ssh.wait.assert_called_once_with()\n\n @mock.patch(VMTASKS_UTILS + \".VMScenario.ping_ip_address\",\n return_value=True)\n def test_wait_for_ping(self, mock_ping):\n vm_scenario = utils.VMScenario()\n vm_scenario.wait_for_ping(\"1.2.3.4\")\n self.wait_for.mock.assert_called_once_with(\"1.2.3.4\",\n is_ready=mock_ping,\n timeout=120)\n\n @mock.patch(VMTASKS_UTILS + \".VMScenario.run_action\")\n @mock.patch(VMTASKS_UTILS + \".VMScenario.wait_for_ping\")\n @mock.patch(\"rally.sshutils.SSH\")\n def test_run_command(self, mock_ssh_class, mock_wait_ping,\n mock_run_action):\n mock_ssh_instance = mock.MagicMock()\n mock_ssh_class.return_value = mock_ssh_instance\n\n vm_scenario = utils.VMScenario()\n vm_scenario._context = {\"user\": {\"keypair\": {\"private\": \"ssh\"}}}\n vm_scenario.run_command(\"1.2.3.4\", 22, \"username\", \"int\", \"script\")\n\n mock_wait_ping.assert_called_once_with(\"1.2.3.4\")\n mock_ssh_class.assert_called_once_with(\"username\", \"1.2.3.4\", port=22,\n pkey=\"ssh\")\n mock_ssh_instance.wait.assert_called_once_with()\n mock_run_action.assert_called_once_with(mock_ssh_instance,\n \"int\", \"script\")\n\n def test_check_network(self):\n vm_scenario = utils.VMScenario()\n fake_server = fakes.FakeServer()\n fake_server.addresses = {}\n self.assertRaises(ValueError,\n vm_scenario.check_network, fake_server, \"private\")\n fake_server.addresses[\"private_1\"] = {\n \"version\": 4,\n \"addr\": \"1.2.3.4\"\n }\n vm_scenario.check_network(fake_server, \"private_1\")\n\n @mock.patch(\"subprocess.Popen\")\n def test_ping_ip_address(self, mock_subprocess):\n\n ping_process = mock.MagicMock()\n ping_process.returncode = 0\n mock_subprocess.return_value = ping_process\n\n vm_scenario = utils.VMScenario()\n host_ip = \"1.2.3.4\"\n self.assertTrue(vm_scenario.ping_ip_address(host_ip))\n\n mock_subprocess.assert_called_once_with(\n ['ping', '-c1', '-w1', host_ip],\n stderr=subprocess.PIPE, stdout=subprocess.PIPE)\n ping_process.wait.assert_called_once_with()\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41685,"cells":{"__id__":{"kind":"number","value":8057358695124,"string":"8,057,358,695,124"},"blob_id":{"kind":"string","value":"8d1d7c699b539af23bf170262e6d7193c06fc54e"},"directory_id":{"kind":"string","value":"a31a20618104828b51b78ee2b68b18fbda07c001"},"path":{"kind":"string","value":"/python/0001.py"},"content_id":{"kind":"string","value":"a49b90f643855843768ec1131a141cf0aa75a738"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"lionbee/euler"},"repo_url":{"kind":"string","value":"https://github.com/lionbee/euler"},"snapshot_id":{"kind":"string","value":"a5dd02575e8a6d5f9e37b83fcd06ab167e1b580a"},"revision_id":{"kind":"string","value":"4342f77ebc83f08c7ae92d2e3c54319ee498dcc6"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-05-26T21:21:47.831318","string":"2021-05-26T21:21:47.831318"},"revision_date":{"kind":"timestamp","value":"2014-08-01T12:55:57","string":"2014-08-01T12:55:57"},"committer_date":{"kind":"timestamp","value":"2014-08-01T12:55:57","string":"2014-08-01T12:55:57"},"github_id":{"kind":"number","value":22506556,"string":"22,506,556"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":1,"string":"1"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"def addFactors(factor, maxvalue):\n\tnumber = 0;\n\ttotal = 0;\n\n\twhile number < maxvalue:\n\t\tprint number\n\t\tif number % factor == 0:\n\t\t\t\ttotal+=number\n\t\tnumber+=1\n\tprint total\n\treturn total\n\nprint addFactors(3, 1000) + addFactors(5, 1000) - addFactors(15, 1000)\n\n# now that I know python a little better\nprint reduce(lambda a, b: a + b, (x for x in range(1, 1000) if x % 3 == 0 or x % 5 == 0))\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41686,"cells":{"__id__":{"kind":"number","value":4389456607755,"string":"4,389,456,607,755"},"blob_id":{"kind":"string","value":"8e90b3d94d7e8758aceec92e8c99d18cbeed944e"},"directory_id":{"kind":"string","value":"9e437371ec09ae830bbcae0076d5d87b523d1d00"},"path":{"kind":"string","value":"/PhyloTreeHeatmapVis.py"},"content_id":{"kind":"string","value":"fbdc797b2f238b03ae8d69a9706098368c6018ce"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"XiaoxiaoLiu/py-arbor"},"repo_url":{"kind":"string","value":"https://github.com/XiaoxiaoLiu/py-arbor"},"snapshot_id":{"kind":"string","value":"08239fb4b10e323c5da1cd073cdc7c0331831f25"},"revision_id":{"kind":"string","value":"36caaec2dd8f88aea5c65cd3907b9e89292413b9"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-25T10:43:57.626415","string":"2021-01-25T10:43:57.626415"},"revision_date":{"kind":"timestamp","value":"2013-07-24T18:11:54","string":"2013-07-24T18:11:54"},"committer_date":{"kind":"timestamp","value":"2013-07-24T18:11:54","string":"2013-07-24T18:11:54"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"#!/usr/bin/python\n\nfrom vtk import *\n\n#read in a tree\ntreeReader = vtkNewickTreeReader()\ntreeReader.SetFileName('/home/xiaoxiao/work/data/Arbor/anolis.phy')\ntreeReader.Update()\ntr = treeReader.GetOutput()\nprint(tr.GetNumberOfVertices())\n\n#read in a table\ntableReader = vtkDelimitedTextReader()\ntableReader.SetFileName('/home/xiaoxiao/work/data/Arbor/anolisDataAppended.csv')\ntableReader.Update()\ntable = tableReader.GetOutput()\n\n\n#play with the heatmap vis\ntreeHeatmapItem = vtkTreeHeatmapItem()\ntreeHeatmapItem.SetTree(tr);\ntreeHeatmapItem.SetTable(table);\n\n# setup the window\nview = vtkContextView()\nview.GetRenderer().SetBackground(1,1,1)\nview.GetRenderWindow().SetSize(800,600)\n\niren = view.GetInteractor()\niren.SetRenderWindow(view.GetRenderWindow())\n\ntransformItem = vtkContextTransform()\ntransformItem.AddItem(treeHeatmapItem)\ntransformItem.SetInteractive(1)\n\nview.GetScene().AddItem(transformItem)\nview.GetRenderWindow().SetMultiSamples(0)\n\niren.Initialize()\nview.GetRenderWindow().Render()\niren.Start()\n\n\n\n\n\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2013,"string":"2,013"}}},{"rowIdx":41687,"cells":{"__id__":{"kind":"number","value":1133871372141,"string":"1,133,871,372,141"},"blob_id":{"kind":"string","value":"8bdae2184f5e0164284d867349861e4d775b3557"},"directory_id":{"kind":"string","value":"d6566a46d7eac45de6f71a4175c1f4c4b5e5835a"},"path":{"kind":"string","value":"/sorts/Mergesort.py"},"content_id":{"kind":"string","value":"1fb12b6d7ce694bbe8c21a3f90c19ee839aba013"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"jonpo/algo"},"repo_url":{"kind":"string","value":"https://github.com/jonpo/algo"},"snapshot_id":{"kind":"string","value":"03da18b13373931020dc462097def4b121975709"},"revision_id":{"kind":"string","value":"fdc1ce5a4c7104d17427f2e8dc3db5eba57d263a"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2016-09-06T08:40:20.729667","string":"2016-09-06T08:40:20.729667"},"revision_date":{"kind":"timestamp","value":"2014-11-26T20:18:24","string":"2014-11-26T20:18:24"},"committer_date":{"kind":"timestamp","value":"2014-11-26T20:18:24","string":"2014-11-26T20:18:24"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"#!/usr/bin/env python\n\n#Filename: Mergesort.py\n#Author: Jon Poley\n\n#Uses merge sort to sort an array given as a comma separated string. \n#Ex: \"4, 3, 5, 1\"\n\nimport sys\nimport math\n\ndef merge(array, min, mid, max):\n\tindex = mid - min + 1\n\tindex2 = max - mid\n\tleft = []\n\tright = []\n\tfor i in range(0, index):\n\t\tleft.append(array[min + i])\n\tfor j in range(0, index2):\n\t\tright.append(array[mid + j +1])\n\tleft.append(sys.maxint)\n\tright.append(sys.maxint)\n\ti = 0\n\tj = 0\n\tfor k in range(min, max+1):\n\t\tif left[i] <= right[j]:\n\t\t\tarray[k] = left[i]\n\t\t\ti = i + 1\n\t\telse: \n\t\t\tarray[k] = right[j]\n\t\t\tj = j + 1\n\ndef mergesort(array, min, max):\n\tif min < max:\n\t\tmid = int((min + max)/2)\n\t\tmergesort(array, min, mid)\n\t\tmergesort(array, mid +1, max)\n\t\tmerge(array, min, mid, max)\n\narray = sys.argv[1]\narray = map(int, array.split(\",\"))\nMin = 0;\nMax = len(array) -1\nMid = math.floor((Max + Min)/2)\nmergesort(array, Min, Max)\nprint array\n\n\n\t"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41688,"cells":{"__id__":{"kind":"number","value":17343077960381,"string":"17,343,077,960,381"},"blob_id":{"kind":"string","value":"2d14bedd9ffec342fada4acf9a09b8d96f14a1e4"},"directory_id":{"kind":"string","value":"25087d59c4bee1c8a8c3363de71eaa704d628a5a"},"path":{"kind":"string","value":"/test/python/qcqpsolver.py"},"content_id":{"kind":"string","value":"44b4c256140911b36e1292ffb69fe0c49f4f27e2"},"detected_licenses":{"kind":"list like","value":["LGPL-3.0-only"],"string":"[\n \"LGPL-3.0-only\"\n]"},"license_type":{"kind":"string","value":"non_permissive"},"repo_name":{"kind":"string","value":"zhenglei-gao/casadi"},"repo_url":{"kind":"string","value":"https://github.com/zhenglei-gao/casadi"},"snapshot_id":{"kind":"string","value":"604bf08b92187d3f42f372e5913c76ff5ebf89e4"},"revision_id":{"kind":"string","value":"c01d4951610263db03e5f6363ab0c7259ea13869"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-14T13:16:20.594162","string":"2021-01-14T13:16:20.594162"},"revision_date":{"kind":"timestamp","value":"2014-04-03T11:48:58","string":"2014-04-03T11:48:58"},"committer_date":{"kind":"timestamp","value":"2014-04-03T11:48:58","string":"2014-04-03T11:48:58"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"#\n# This file is part of CasADi.\n# \n# CasADi -- A symbolic framework for dynamic optimization.\n# Copyright (C) 2010 by Joel Andersson, Moritz Diehl, K.U.Leuven. All rights reserved.\n# \n# CasADi is free software; you can redistribute it and/or\n# modify it under the terms of the GNU Lesser General Public\n# License as published by the Free Software Foundation; either\n# version 3 of the License, or (at your option) any later version.\n# \n# CasADi is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n# Lesser General Public License for more details.\n# \n# You should have received a copy of the GNU Lesser General Public\n# License along with CasADi; if not, write to the Free Software\n# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n# \n# \nfrom casadi import *\nimport casadi as c\nfrom numpy import *\nimport unittest\nfrom types import *\nfrom helpers import *\n\nqcqpsolvers = []\ntry:\n qcqpsolvers.append((SOCPQCQPSolver,{\"socp_solver\": SDPSOCPSolver, \"socp_solver_options\": {\"sdp_solver\": DSDPSolver} },False))\nexcept:\n pass\n\n\nclass QCQPSolverTests(casadiTestCase):\n\n def testboundsviol(self):\n H = 1e-6*DMatrix([[1,0],[0,1]])\n G = DMatrix([2,1])\n A = DMatrix.sparse(0,2)\n P = 2*DMatrix([[1,0],[0,2]])\n Q = DMatrix([2,3])\n R = DMatrix([-7])\n LBX = DMatrix([ -inf,-3 ])\n UBX = DMatrix([ inf, -inf ])\n \n for qcqpsolver, qcqp_options, re_init in qcqpsolvers:\n solver = qcqpsolver(qcqpStruct(a=A.sparsity(),p=P.sparsity(),h=H.sparsity()))\n solver.setOption(qcqp_options)\n solver.init()\n\n solver.setInput(H,\"h\")\n solver.setInput(G,\"g\")\n solver.setInput(A,\"a\")\n solver.setInput(P,\"p\")\n solver.setInput(Q,\"q\")\n solver.setInput(R,\"r\")\n solver.setInput(LBX,\"lbx\")\n solver.setInput(UBX,\"ubx\")\n\n with self.assertRaises(Exception):\n solver.solve()\n \n def test_bounds(self):\n # min 1/2 x' H x + 2 x + y\n # x,y\n #\n # s.t. x^2 + 2y^2 + 2*x + 3*y - 7 <= 0\n H = 1e-6*DMatrix([[1,0],[0,1]])\n G = DMatrix([2,1])\n A = DMatrix.sparse(0,2)\n P = 2*DMatrix([[1,0],[0,2]])\n Q = DMatrix([2,3])\n R = DMatrix([-7])\n LBX = DMatrix([ -inf, -inf ])\n UBX = DMatrix([ inf, inf ])\n \n for qcqpsolver, qcqp_options, re_init in qcqpsolvers:\n self.message(\"qcqpsolver: \" + str(qcqpsolver))\n\n solver = qcqpsolver(qcqpStruct(a=A.sparsity(),p=P.sparsity(),h=H.sparsity()))\n solver.setOption(qcqp_options)\n solver.init()\n\n solver.setInput(H,\"h\")\n solver.setInput(G,\"g\")\n solver.setInput(A,\"a\")\n solver.setInput(P,\"p\")\n solver.setInput(Q,\"q\")\n solver.setInput(R,\"r\")\n solver.setInput(LBX,\"lbx\")\n solver.setInput(UBX,\"ubx\")\n\n solver.solve()\n \n socp = solver.getSolver()\n \n self.checkarray(solver.getOutput(),DMatrix([-(sqrt(73)+3)/3,-(sqrt(73)+9)/12]),str(qcqpsolver),digits=5)\n self.checkarray(solver.getOutput(\"lam_x\"),DMatrix([0,0]),str(qcqpsolver),digits=5)\n\n self.checkarray(solver.getOutput(\"lam_a\"),DMatrix([]),str(qcqpsolver),digits=5)\n \n self.checkarray(solver.getOutput(\"cost\"),mul(G.T,solver.getOutput()),str(qcqpsolver),digits=4)\n\n def test_qp(self):\n # min 1/2 x' H x + 2 x + y\n # x,y\n #\n H = DMatrix([[1,0],[0,1]])\n G = DMatrix([2,1])\n A = DMatrix.sparse(0,2)\n P = DMatrix.sparse(2,0)\n Q = DMatrix.sparse(0,1)\n R = DMatrix.sparse(0,1)\n LBX = DMatrix([ -inf, -inf ])\n UBX = DMatrix([ inf, inf ])\n \n for qcqpsolver, qcqp_options, re_init in qcqpsolvers:\n self.message(\"qcqpsolver: \" + str(qcqpsolver))\n\n solver = qcqpsolver(qcqpStruct(a=A.sparsity(),p=P.sparsity(),h=H.sparsity()))\n solver.setOption(qcqp_options)\n solver.init()\n\n solver.setInput(H,\"h\")\n solver.setInput(G,\"g\")\n solver.setInput(A,\"a\")\n solver.setInput(P,\"p\")\n solver.setInput(Q,\"q\")\n solver.setInput(R,\"r\")\n solver.setInput(LBX,\"lbx\")\n solver.setInput(UBX,\"ubx\")\n\n solver.solve()\n \n socp = solver.getSolver()\n \n self.checkarray(solver.getOutput(),DMatrix([-2,-1]),str(qcqpsolver),digits=5)\n self.checkarray(solver.getOutput(\"lam_x\"),DMatrix([0,0]),str(qcqpsolver),digits=5)\n\n self.checkarray(solver.getOutput(\"lam_a\"),DMatrix([]),str(qcqpsolver),digits=5)\n \n self.checkarray(solver.getOutput(\"cost\"),-2.5,str(qcqpsolver),digits=4)\n \n \nif __name__ == '__main__':\n unittest.main()\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41689,"cells":{"__id__":{"kind":"number","value":14096082679989,"string":"14,096,082,679,989"},"blob_id":{"kind":"string","value":"4a2ad827b35462efaa2c4028162422314a8ffc82"},"directory_id":{"kind":"string","value":"f6ad34f1eed97340f796ea083a71e6e2d38a3d26"},"path":{"kind":"string","value":"/src/libs/lwip/SConstruct"},"content_id":{"kind":"string","value":"8b06b31aca08f93f63e7ee4ace2da74fba04e072"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"gz/aos10"},"repo_url":{"kind":"string","value":"https://github.com/gz/aos10"},"snapshot_id":{"kind":"string","value":"606abb223563c4f6df6f163c07b0290ab2d95795"},"revision_id":{"kind":"string","value":"b204e8fc29860ce03155a08f7e8d8748180a4f14"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2020-03-26T02:38:11.918982","string":"2020-03-26T02:38:11.918982"},"revision_date":{"kind":"timestamp","value":"2011-02-10T12:39:06","string":"2011-02-10T12:39:06"},"committer_date":{"kind":"timestamp","value":"2011-02-10T12:39:06","string":"2011-02-10T12:39:06"},"github_id":{"kind":"number","value":1464801,"string":"1,464,801"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":1,"string":"1"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"Import(\"env\")\n\npublic_headers = [\"#libs/lwip/include\", \"#libs/lwip/include/ipv4\"]\n\nsrccorelist = \"\"\"core/inet.c core/ipv4/icmp.c core/ipv4/ip.c core/ipv4/ip_addr.c\n\t\t core/mem.c core/memp.c core/netif.c core/pbuf.c core/stats.c\n\t\t core/stats.c core/sys.c core/tcp.c core/tcp_input.c\n\t\t core/tcp_output.c core/tcp_pcb.c core/udp.c\"\"\"\nsrcotherlist = \"netif/etharp.c sos/sosif.c\"\n\nliblist = \"c ixp_osal ixp400_xscale_sw\"\n\ncppdefines = env[\"CPPDEFINES\"] + [\"LWIP_DEBUG\", \"l4aos\"]\ncpppath = env[\"CPPPATH\"] + [\"#sos\"] # Grab sos headers\ncc_warnings = env[\"CC_WARNINGS\"] + [\"no-redundant-decls\", \"no-format\"]\n \nlib = env.MyLibrary(\"lwip\",\n\t source = Split(srccorelist) + Split(srcotherlist),\n\t\t public_headers = public_headers,\n\t\t LIBS = Split(liblist),\n\t\t CPPDEFINES = cppdefines,\n\t\t CPPPATH = cpppath,\n\t\t CC_WARNINGS = cc_warnings)\n\nReturn(\"lib\")\n\n# vim: filetype=python\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2011,"string":"2,011"}}},{"rowIdx":41690,"cells":{"__id__":{"kind":"number","value":17575006207728,"string":"17,575,006,207,728"},"blob_id":{"kind":"string","value":"3dfe5ad1bec554b123c2efb06525db1d5e93845b"},"directory_id":{"kind":"string","value":"caf4b86a585138df032dc1f3fc6575bf496647f4"},"path":{"kind":"string","value":"/checkout/session.py"},"content_id":{"kind":"string","value":"e5d556f85ce158af2447e9f65ce3358556782591"},"detected_licenses":{"kind":"list like","value":["GPL-3.0-only"],"string":"[\n \"GPL-3.0-only\"\n]"},"license_type":{"kind":"string","value":"non_permissive"},"repo_name":{"kind":"string","value":"nka11/store-fr"},"repo_url":{"kind":"string","value":"https://github.com/nka11/store-fr"},"snapshot_id":{"kind":"string","value":"7e564eae15b0d0822a18e0e856edd82d755fe16b"},"revision_id":{"kind":"string","value":"080782a3030dcaefcf0fde0f8f6823d488932843"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2016-09-10T22:16:10.659889","string":"2016-09-10T22:16:10.659889"},"revision_date":{"kind":"timestamp","value":"2014-08-05T17:03:06","string":"2014-08-05T17:03:06"},"committer_date":{"kind":"timestamp","value":"2014-08-05T17:03:06","string":"2014-08-05T17:03:06"},"github_id":{"kind":"number","value":22606342,"string":"22,606,342"},"star_events_count":{"kind":"number","value":1,"string":"1"},"fork_events_count":{"kind":"number","value":1,"string":"1"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"from django import http\nfrom django.contrib import messages\nfrom django.core.urlresolvers import reverse\nfrom oscar.apps.checkout import exceptions\nfrom oscar.apps.checkout.session import CheckoutSessionMixin as OscarCheckoutSessionMixin\nfrom django.utils.translation import ugettext as _\n\nfrom checkout.utils import CheckoutSessionData\n\n\nclass CheckoutSessionMixin(OscarCheckoutSessionMixin):\n def dispatch(self, request, *args, **kwargs):\n self.checkout_session = CheckoutSessionData(request)\n\ttry:\n self.check_preconditions(request)\n except exceptions.FailedPreCondition as e:\n for message in e.messages:\n messages.warning(request, message)\n return http.HttpResponseRedirect(e.url)\n # call super() from superclass\n return super(OscarCheckoutSessionMixin, self).dispatch(\n request, *args, **kwargs)\n def check_user_cgu(self, request):\n if not self.checkout_session.get_cgu_status():\n raise exceptions.FailedPreCondition(\n url=reverse('checkout:index'),\n message=_(\"Please accept CGU\")\n )\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41691,"cells":{"__id__":{"kind":"number","value":14499809591629,"string":"14,499,809,591,629"},"blob_id":{"kind":"string","value":"4f7d91307d8c63fa2de8ff96eb08f57ad2702fa0"},"directory_id":{"kind":"string","value":"c5a7d8b9f813989fbdeb51734e7acafe426cb048"},"path":{"kind":"string","value":"/weibo_test.py"},"content_id":{"kind":"string","value":"af2cdeed048c098b9b6b2697a591d326bec1944e"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"jiafangdi-guang/PHSE-PyCommDete"},"repo_url":{"kind":"string","value":"https://github.com/jiafangdi-guang/PHSE-PyCommDete"},"snapshot_id":{"kind":"string","value":"f07a8758a7112560663b0ebea43b788936a9e7b9"},"revision_id":{"kind":"string","value":"c4290d5f0481980184dd7d63befce7c0c947bda2"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-12-03T12:10:29.517661","string":"2021-12-03T12:10:29.517661"},"revision_date":{"kind":"timestamp","value":"2014-05-31T02:29:31","string":"2014-05-31T02:29:31"},"committer_date":{"kind":"timestamp","value":"2014-05-31T02:29:31","string":"2014-05-31T02:29:31"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"__author__ = 'nourl'\n\nimport networkx as nx\nfrom inputs.formal_edgelist import *\nfrom sys import exit\nfrom copy import deepcopy\n\nC = nx.DiGraph(formal_edgelist('./benchmark_directed_networks/network.dat'))\nprint C.in_edges(2)\nprint C.out_edges(2)\neg = nx.DiGraph()\noutedges = C.out_edges(2)\nout_edges_final = deepcopy(outedges)\na = len(outedges)\nprint a\nfor x in outedges:\n\tout_two = C.out_edges(x[1])\n\tout_edges_final += out_two\n\nprint len(out_edges_final),\"_______\",out_edges_final\n\nout_edges_weighted = []\nfor x in out_edges_final:\n\tout_edges_weighted.append((x[0],x[1],1))\neg.add_weighted_edges_from(out_edges_weighted)\negnodes=eg.nodes()\nnw = {}\nfor x in egnodes:\n\tweight = eg.degree(x,weight=True) * nx.closeness_centrality(C,x)\n\tnw[x] = weight\n#print \"node_weighted: \",nw\nnw_nor = {}\nfor key in nw.keys():\n\tnw_nor[key] = nw[key]/nw[2]\n#print \"nw_nor:\",nw_nor\nnw_sorted = sorted(nw_nor.iteritems(), key=lambda x:x[1],reverse=True)\n#print \"nw_sorted\",nw_sorted\nw_mean = sum(nw_nor.itervalues())/len(eg)\nw_mean_filter = filter(lambda x:x>w_mean, nw_nor.itervalues())\nw_mean_mean = sum(w_mean_filter)/len(w_mean_filter)\n#print \"w_mean: \",w_mean\n#print \"w_mean_mean: \",w_mean_mean\nnw_percent = []\nnw_percent_dic = {}\nfor nwx in nw_sorted:\n\tif nwx[0] != 2:\n\t\tnw_percent.append((nwx[0],nwx[1]))\nnw_percent_dic[2] = nw_percent\nprint \"nw_percent_dic\",nw_percent_dic\n\n\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41692,"cells":{"__id__":{"kind":"number","value":2465311232903,"string":"2,465,311,232,903"},"blob_id":{"kind":"string","value":"a8758970de058f3aec6eea006d10f798453ce5ac"},"directory_id":{"kind":"string","value":"39759112ee3a84aa78b15be8cc4888ff6a6b1bc0"},"path":{"kind":"string","value":"/webcast/models.py"},"content_id":{"kind":"string","value":"aa8c9fb86f737620b328851969ae43f9906693d3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"ecolemo/showbox"},"repo_url":{"kind":"string","value":"https://github.com/ecolemo/showbox"},"snapshot_id":{"kind":"string","value":"bd8b5c8eb30fc3704a7aaf559c0fa0820014a8f7"},"revision_id":{"kind":"string","value":"6cb0f3d6394897ebb34f0602787793c8a49f0953"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-22T14:45:38.704992","string":"2021-01-22T14:45:38.704992"},"revision_date":{"kind":"timestamp","value":"2011-12-03T05:46:57","string":"2011-12-03T05:46:57"},"committer_date":{"kind":"timestamp","value":"2011-12-03T05:46:57","string":"2011-12-03T05:46:57"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"from django.db import models\nimport feedparser\nimport time\nfrom datetime import datetime, timedelta\nfrom webcast.scheduler import Scheduler\nfrom django.db import IntegrityError\nfrom django.conf import settings\nfrom django.db import transaction\n\nclass Channel(models.Model):\n seq = models.IntegerField(default=99)\n name = models.CharField(max_length=100, unique=True)\n \n def __unicode__(self):\n return self.name \n \nclass Feed(models.Model):\n url = models.CharField(max_length=500)\n title = models.CharField(max_length=500, null=True)\n channel = models.ForeignKey(Channel)\n \n def __unicode__(self):\n return '[' + self.channel.name + '] ' + self.title + ' ---- ' + self.url \n\nclass Entry(models.Model):\n feed = models.ForeignKey(Feed)\n link = models.CharField(max_length=500)\n title = models.CharField(max_length=500)\n updated_at = models.DateTimeField()\n screenshot_path = models.CharField(max_length=500)\n def __unicode__(self):\n return self.link \n\nclass UpdateLog(models.Model):\n updated_at = models.DateTimeField(auto_now=True)\n count = models.IntegerField()\n \nclass CastUpdater(object):\n instance = None\n PERIOD = 300\n \n @staticmethod\n def getInstance():\n if not CastUpdater.instance:\n CastUpdater.instance = CastUpdater()\n return CastUpdater.instance\n\n def __init__(self):\n self.sched = Scheduler(self.update, CastUpdater.PERIOD)\n self.recent_entries = []\n \n def start(self):\n self.sched.start()\n self.next_update_time = datetime.today() + timedelta(seconds=CastUpdater.PERIOD)\n \n def running(self):\n return self.sched.timer != None\n\n def update(self):\n self.last_update_time = datetime.today()\n self.next_update_time = datetime.today() + timedelta(seconds=CastUpdater.PERIOD)\n self.recent_entries = []\n \n feeds = Feed.objects.all()\n for feed in feeds:\n d = feedparser.parse(feed.url)\n print feed.url\n feed.title = d.feed.title\n feed.save()\n \n for e in d.entries:\n updated_at = datetime.today()\n if 'updated_parsed' in e:\n updated_at = datetime.fromtimestamp((time.mktime(e.updated_parsed))) + timedelta(hours=9)\n \n link = e.link\n if '/http://' in e.link:\n link = link[link.find('/http://') + 1:]\n try:\n Entry.objects.get(link=link)\n except Entry.MultipleObjectsReturned:\n pass\n except Entry.DoesNotExist:\n entry = Entry.objects.create(feed=feed, title=e.title, link=link, updated_at=updated_at)\n self.recent_entries.append(entry)\n \n UpdateLog.objects.create(count=len(self.recent_entries))\n \n def count(self):\n if not self.recent_entries: return 0\n \n return len(self.recent_entries)\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2011,"string":"2,011"}}},{"rowIdx":41693,"cells":{"__id__":{"kind":"number","value":5239860115272,"string":"5,239,860,115,272"},"blob_id":{"kind":"string","value":"f3dbd835003ffc8074ab7655631206c205ab2684"},"directory_id":{"kind":"string","value":"2dc33f2fd71c1a0063183f26751a8ef4a2f2cfe9"},"path":{"kind":"string","value":"/backend/utils.py"},"content_id":{"kind":"string","value":"86eb8df0074ac5daa6ac964e75bc9db538d53903"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"ruiaf/sumnews"},"repo_url":{"kind":"string","value":"https://github.com/ruiaf/sumnews"},"snapshot_id":{"kind":"string","value":"40c6ab773738ec3b75474372d9a8bdab85022a4b"},"revision_id":{"kind":"string","value":"a93e0757046015b5fa785c6fcf95467b505a6912"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2016-09-08T02:35:21.091167","string":"2016-09-08T02:35:21.091167"},"revision_date":{"kind":"timestamp","value":"2014-04-05T17:17:30","string":"2014-04-05T17:17:30"},"committer_date":{"kind":"timestamp","value":"2014-04-05T17:17:30","string":"2014-04-05T17:17:30"},"github_id":{"kind":"number","value":17527511,"string":"17,527,511"},"star_events_count":{"kind":"number","value":1,"string":"1"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"def max2(iterable):\n \"\"\"\n\n :param iterable: An iterable\n :return: A list with with the 2 largest elements in the iterable\n \"\"\"\n first = None\n second = None\n for ele in iterable:\n if first is None or ele > first:\n second = first\n first = ele\n elif second is None or ele > second:\n second = ele\n return [first, second]"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41694,"cells":{"__id__":{"kind":"number","value":12859132112253,"string":"12,859,132,112,253"},"blob_id":{"kind":"string","value":"9647983bbed0bf4a6b5eeec8332b5fbb895eb9f2"},"directory_id":{"kind":"string","value":"1ff6c9a930d94a5e1d536b103a1c3869222d0d56"},"path":{"kind":"string","value":"/main_window.py"},"content_id":{"kind":"string","value":"739458b888bf64e703cab41cec9e82f8709bf20a"},"detected_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"repo_name":{"kind":"string","value":"ahlfors/yeelink_tester"},"repo_url":{"kind":"string","value":"https://github.com/ahlfors/yeelink_tester"},"snapshot_id":{"kind":"string","value":"9acfe357195a337c9c1689d4072c56feaed552ef"},"revision_id":{"kind":"string","value":"b2a616e64afcb0d82753059ef43f813dee8b5132"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-18T08:26:46.133488","string":"2021-01-18T08:26:46.133488"},"revision_date":{"kind":"timestamp","value":"2014-09-22T02:07:39","string":"2014-09-22T02:07:39"},"committer_date":{"kind":"timestamp","value":"2014-09-22T02:07:39","string":"2014-09-22T02:07:39"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"# -*- coding: utf-8 -*-\n\n\"\"\"\nYeelink Tester by wendal.net\n\"\"\"\n\n# 修正windows UTF-8控制台下报错\nimport codecs\ncodecs.register(lambda name: codecs.lookup('utf-8') if name == 'cp65001' else None)\n\nfrom PyQt4.QtGui import *\nfrom PyQt4.QtCore import *\n\nfrom Ui_main_window import Ui_MainWindow\nimport urllib2\nimport json\nimport traceback\nimport serial.tools.list_ports\nfrom threading import Thread\nimport time\nimport paho.mqtt.client as mqtt\nimport socket\n\n#日志头部的标签\nTAG_SELF = \"SELF\"\nTAG_API = \"API\"\nTAG_MQTT = \"MQTT\"\nTAG_MOCK = \"MOCK\"\n\n#传感器表格每列的含义\nSENSOR_COLUMN_ID = 0\nSENSOR_COLUMN_NAME = 1\nSENSOR_COLUMN_TYPE = 2\nSENSOR_COLUMN_VALUE = 3\nSENSOR_COLUMN_DATA_WRITE = 4\nSENSOR_COLUMN_DATA_READ = 5\nSENSOR_COLUMN_UPDATE_TIME = 6\n\n#传感器类型\nSENSOR_TYPE_NUMBER = \"0\"\nSENSOR_TYPE_GPS = \"6\"\nSENSOR_TYPE_IMAGE = \"9\"\nSENSOR_TYPE_SWITCH = \"5\"\nSENSOR_TYPE_RAW = \"8\"\n\n# 读数据的key\nREAD_KEY = \"r_key\"\n# 上传数据的前缀\nWRITE_KEY = \"w_key\"\n\nYEELINK = \"yeelink\"\nUIOT = \"uiot\"\n\n# 传感器类型的中文对应\nsensor_type_map = {\n SENSOR_TYPE_NUMBER : u\"数值型\",\n SENSOR_TYPE_IMAGE : u\"图像型\",\n SENSOR_TYPE_SWITCH : u\"开关型\",\n SENSOR_TYPE_GPS : u\"GPS型\",\n SENSOR_TYPE_RAW : u\"泛型\",\n \"number\" : u\"数值型\",\n \"gps\" : u\"地理位置型\",\n \"kv\" : u\"泛型\",\n \"onoff\" : u\"开关型\",\n \"image\" : u\"图像型\"\n }\n\nclass MainWindow(QMainWindow, Ui_MainWindow):\n \"\"\"\n Class documentation goes here.\n \"\"\"\n def __init__(self, parent = None):\n \"\"\"\n Constructor\n \"\"\"\n QMainWindow.__init__(self, parent)\n self.setupUi(self)\n \n # 初始化日志输出timer, 因为Qt的UI更新不能在子线程中执行\n self.log_timer = QTimer()\n self.logs = []\n self.log_timer.setInterval(1)\n self.log_timer.start(1)\n self.connect(self.log_timer, SIGNAL(\"timeout()\"), self.append_log)\n \n # 初始化传感器表格更新timer\n self.table_data = []\n self.table_timer = QTimer()\n self.table_timer.setInterval(1)\n self.table_timer.start(1)\n self.connect(self.table_timer, SIGNAL(\"timeout()\"), self.table_update)\n \n # 启动完成, 自然卖卖广告咯...\n self.D(TAG_SELF, u\"启动完成 . Power by wendal http://wendal.net\")\n \n def apikey(self):\n \"\"\"全局获取API KEY的帮助方法\"\"\"\n return unicode(self.ui_text_uapikey.text())\n \n def devid(self):\n \"\"\"当前的设计只允许一个设备,所以全局来吧\"\"\"\n return unicode(self.ui_combo_devid.currentText()).split(\" \")[0]\n \n def srv_type(self):\n return str(self.ui_txt_srv_type.currentText())\n \n def api_url(self, uri):\n if self.srv_type() == YEELINK :\n return \"http://\" + str(self.ui_txt_srv_api_url.text()) + \"/v1.1\" + uri\n elif self.srv_type() == UIOT :\n return \"http://\" + str(self.ui_txt_srv_api_url.text()) + \"/iot\" + uri\n \n def mqtt_topit(self, sensor_id):\n if self.srv_type() == YEELINK :\n return \"v1.1/device/%s/sensor/%s/datapoints\" % (self.devid(), str(sensor_id))\n elif self.srv_type() == UIOT :\n return \"iot/sensor/%s\" % (str(sensor_id),)\n \n def mqtt_srv(self):\n if self.srv_type() == YEELINK :\n return \"mqtt.yeelink.net\"\n elif self.srv_type() == UIOT :\n srv = str(self.ui_txt_srv_api_url.text())\n if \":\" in srv :\n return srv[:srv.index(\":\")]\n return srv\n \n def yeelink_send(self, uri, data):\n url = self.api_url(uri)\n req = urllib2.Request(url, data)\n req.add_header(\"U-ApiKey\", self.apikey())\n if data :\n self.D(TAG_API+\".W\", u\"POST \" + url)\n try :\n self.D(TAG_API+\".W\", str(data))\n except:\n self.D(TAG_API+\".W\", u\"...\")\n else :\n self.D(TAG_API+\".W\", u\"GET \" + url)\n try :\n resp = urllib2.urlopen(req)\n self.D(TAG_API + \".R\", u\"code=%d\" % resp.code)\n return resp.read()\n except:\n self.D(TAG_API, u\"FAIL\" + traceback.format_exc())\n raise\n \n def D(self, TAG, msg):\n \"\"\"日志方法\"\"\"\n self.logs.append(QString(\"%-5s > %s\\r\\n\" % (TAG, msg)))\n \n def append_log(self):\n tmp = self.logs\n self.logs = []\n if not tmp :\n return\n for p in tmp :\n self.ui_debug_console.moveCursor(QTextCursor.End)\n self.ui_debug_console.insertPlainText(p)\n sb = self.ui_debug_console.verticalScrollBar()\n sb.setValue(sb.maximum())\n \n def table_update(self):\n tmp = self.table_data\n self.table_data = []\n if not tmp :\n return\n for row,column,s in tmp :\n self.ui_table_sensors.setItem(row, column, QTableWidgetItem(s))\n \n def mqtt_sensor_run(self, sensor):\n \n \"\"\"MQTT监听\"\"\"\n try :\n mqttc = mqtt.Client()\n def on_message(client, userdata, msg):\n self.D(TAG_SELF, \"MQTT sensor update %s %s > %s\" % (str(sensor[\"id\"]), sensor[\"title\"], str(msg.payload)))\n try :\n re = json.loads(msg.payload)\n s = \"%s%s\" % (sensor[WRITE_KEY], re[\"value\"])\n self.D(self.ser.port, s)\n self.ser.write(s + \"\\n\")\n except:\n traceback.print_exc()\n def on_connect(client, userdata, flags, rc):\n self.D(TAG_SELF, \"MQTT Connected with result code \"+str(rc))\n #topic = \"u/%s/v1.1/device/%s/sensor/%s/datapoints\" % (self.apikey(), self.devid(), str(sensor[\"id\"]))\n #print topic\n topic = self.mqtt_topit(sensor[\"id\"])\n try :\n mqttc.subscribe([(str(topic), 0), ])\n except:\n pass\n mqttc.on_message = on_message\n mqttc.on_connect = on_connect\n #mqttc.connect(\"mqtt.yeelink.net\")\n mqttc.username_pw_set(str(self.ui_txt_username.text()), self.apikey())\n mqttc.connect(self.mqtt_srv())\n \n mqttc.loop_forever()\n except:\n self.D(TAG_SELF, u\"MQTT 启动失败 : \" + traceback.format_exc())\n \n @pyqtSignature(\"\")\n def on_ui_button_help_pressed(self):\n \"\"\"\n Slot documentation goes here.\n \"\"\"\n # TODO: not implemented yet\n QMessageBox.about(self, u\"帮助\", \"http://wendal.net\")\n \n @pyqtSignature(\"\")\n def on_ui_button_check_api_pressed(self):\n \"\"\"\n Slot documentation goes here.\n \"\"\"\n try :\n re = json.loads(self.yeelink_send(\"/devices\", None))\n if not re :\n QMessageBox.about(self, u\"无可用设备\", u\"该密钥下的帐号无任何设备\")\n return\n self.ui_combo_devid.clear()\n for dev in re :\n #print dev\n self.ui_combo_devid.addItem(QString(\"%s %s\" % (dev[\"id\"], dev[\"title\"])))\n self.ui_button_get_sensors.setEnabled(True)\n self.ui_button_start_read.setEnabled(True)\n self.ui_button_check_api.setEnabled(False)\n self.ui_text_uapikey.setEnabled(False)\n \n if len(re) == 1 :\n self.D(TAG_SELF, u\"只有一个设备,自动加载传感器\")\n self.on_ui_button_get_sensors_pressed()\n except:\n traceback.print_exc()\n QMessageBox.about(self, u\"密钥错误\", u\"密钥不对: \" + self.apikey())\n \n @pyqtSignature(\"\")\n def on_ui_button_get_sensors_pressed(self):\n \"\"\"\n Slot documentation goes here.\n \"\"\"\n try :\n #print self.devid()\n sensors = json.loads(self.yeelink_send(\"/device/%s/sensors\" % self.devid(), None))\n self.ui_table_sensors.setRowCount(len(sensors))\n index = 0\n for sensor in sensors :\n sensor[\"row_index\"] = index\n self.ui_table_sensors.setItem(index, SENSOR_COLUMN_ID, QTableWidgetItem(str(sensor[\"id\"])))\n self.ui_table_sensors.setItem(index, SENSOR_COLUMN_NAME, QTableWidgetItem(sensor[\"title\"]))\n sensor_type = sensor_type_map.get(str(sensor[\"type\"]))\n if not sensor_type :\n sensor_type = u\"其他类型\"\n self.ui_table_sensors.setItem(index, SENSOR_COLUMN_TYPE, QTableWidgetItem(sensor_type))\n if sensor.get(\"last_data_gen\") :\n self.ui_table_sensors.setItem(index, SENSOR_COLUMN_VALUE, QTableWidgetItem(sensor[\"last_data_gen\"]))\n elif sensor.get(\"last_data\") :\n self.ui_table_sensors.setItem(index, SENSOR_COLUMN_VALUE, QTableWidgetItem(sensor[\"last_data\"]))\n if sensor.get(\"last_update\") :\n self.ui_table_sensors.setItem(index, SENSOR_COLUMN_UPDATE_TIME, QTableWidgetItem(sensor[\"last_update\"]))\n \n it = self.ui_table_sensors.item(index, SENSOR_COLUMN_DATA_WRITE)\n if not it :\n self.ui_table_sensors.setItem(index, SENSOR_COLUMN_DATA_WRITE, QTableWidgetItem(\"w\"+str(sensor[\"id\"])+\":\"))\n it = self.ui_table_sensors.item(index, SENSOR_COLUMN_DATA_READ)\n if not it :\n self.ui_table_sensors.setItem(index, SENSOR_COLUMN_DATA_READ, QTableWidgetItem(\"r\"+str(sensor[\"id\"])))\n \n sensor[\"w_key\"] = str(self.ui_table_sensors.item(index, SENSOR_COLUMN_DATA_WRITE).text())\n sensor[\"r_key\"] = str(self.ui_table_sensors.item(index, SENSOR_COLUMN_DATA_READ).text())\n \n index += 1\n if sensor[\"type\"] in (SENSOR_TYPE_SWITCH, \"onoff\") :\n self.D(TAG_SELF, u\"启动MQTT监听 sensor id=%s name=%s\" % (str(sensor[\"id\"]), sensor[\"title\"]))\n t = Thread(target=self.mqtt_sensor_run, name=(\"Yeelink MQTT id=\" + str(sensor[\"id\"])), args=[sensor])\n t.setDaemon(True)\n t.start()\n \n self.sensors = sensors #保存起来,这样就能快捷访问了\n \n self.ui_button_get_sensors.setEnabled(False)\n self.ui_combo_devid.setEnabled(False)\n \n coms = sorted(serial.tools.list_ports.comports())\n if coms :\n self.ui_text_com_number.clear()\n for port, _, _ in coms:\n self.ui_text_com_number.addItem(QString(port))\n except:\n self.D(TAG_SELF, u\"出错啦: \" + traceback.format_exc())\n \n @pyqtSignature(\"\")\n def on_ui_button_clear_debug_pressed(self):\n \"\"\"\n Slot documentation goes here.\n \"\"\"\n self.ui_debug_console.clear()\n \n def com_run(self, ser):\n while self.com_reading :\n try :\n line = ser.readline()\n if not line :\n continue\n line = str(line).strip()\n self.D(ser.port+\".R\", line)\n self.handle_com_line(ser, line)\n except:\n traceback.print_exc()\n time.sleep(1)\n try :\n if ser.isOpen():\n ser.close()\n except:\n traceback.print_exc()\n self.ui_button_stop_read.setEnabled(False)\n self.ui_button_start_read.setEnabled(True)\n \n def handle_com_line(self, ser, line):\n line = str(line).strip()\n if not line :\n return\n \n if line[0] == '[' or line[0] == '{' :\n try :\n try :\n j = json.loads(line)\n except:\n self.D(ser.port, u\"非法的json字符串\")\n return\n if not j :\n self.D(ser.port, u\"没有包含任何数据\")\n return\n if line[0] == '[' :\n for d in j :\n if d.get(\"sensor_id\") :\n self.D(ser.port, u\"数据是列表,且包含sensor_id,所以这是'多数据点(同一设备)', 执行上传\")\n try :\n self.yeelink_send(\"/device/%s/datapoints\" % self.devid(), line)\n except:\n self.D(ser.port, u\"上传失败\")\n return\n self.D(ser.port, u\"数据是列表,但不包含sensor_id,所以这是'多数据点(单个设备)', 查找'数据上传'键为空字符的传感器\")\n for sensor in self.sensors :\n if sensor[WRITE_KEY] == \"\" :\n self.D(ser.port, u\"作为传感器[id=%s,name=%s]的数据进行上传\")\n self.yeelink_send(\"/device/%s/sensor/%s/datapoints\" % (self.devid(), str(sensor[\"id\"])), line)\n return\n self.D(ser.port, u\"没有找到'数据上传'键为空字符的传感器,忽略数据\")\n return\n if len(self.sensors) == 1 :\n self.D(ser.port, u\"只有一个传感器, 而且数据看上去ok, 那就上传吧\")\n self.yeelink_send(\"/device/%s/sensor/%s/datapoints\" % (self.devid(), str(sensor[\"id\"])), line)\n return\n for sensor in self.sensors :\n if sensor[WRITE_KEY] == \"\" :\n self.D(ser.port, u\"作为传感器[id=%s,name=%s]的数据进行上传\")\n self.yeelink_send(\"/device/%s/sensor/%s/datapoints\" % (self.devid(), str(sensor[\"id\"])), line)\n return\n self.D(ser.port, u\"没有找到'数据上传'键为空字符的传感器,忽略数据\")\n return\n except:\n self.D(ser.port, u\"出错了\")\n \n for sensor in self.sensors :\n if line.startswith(sensor[READ_KEY]) :\n self.D(ser.port, u\"与传感器[id=%s, name=%s]的'数据读取'键匹配\" % (str(sensor[\"id\"]), sensor[\"title\"]))\n try :\n re = self.yeelink_send(\"/device/%s/sensor/%s/datapoints\" % (self.devid(), str(sensor[\"id\"])), None)\n re = json.loads(re)\n if re.get(\"key\") :\n msg = sensor[WRITE_KEY] + json.dumps(re) + \"\\n\"\n else :\n msg = sensor[WRITE_KEY] + json.dumps(re.get(\"value\")) + \"\\n\"\n self.D(ser.port + \".W\", msg)\n ser.write(msg)\n except:\n self.D(ser.port, u\"出错了\" + traceback.format_exc())\n return\n \n for sensor in self.sensors :\n if line.startswith(sensor[WRITE_KEY]) :\n data = line[len(sensor[WRITE_KEY]):]\n if not data :\n self.D(ser.port, u\"没数据\")\n return\n if data[0] == \":\" :\n data = data[1:]\n if data[0] == '{' :\n try :\n try :\n re = json.loads(data)\n except:\n self.D(ser.port, u\"非法的JSON字符串\" + traceback.format_exc(2))\n return\n if re and re.get(\"value\") :\n self.yeelink_send(\"/device/%s/sensor/%s/datapoints\" % (self.devid(), str(sensor[\"id\"])), data)\n return\n except:\n self.D(ser.port, \"Bad Bad\")\n self.yeelink_send(\"/device/%s/sensor/%s/datapoints\" % (self.devid(), str(sensor[\"id\"])), \"\"\"{\"value\":%s}\"\"\" % data)\n return\n self.D(ser.port, u\"没匹配任何传感器\")\n \n @pyqtSignature(\"\")\n def on_ui_button_start_read_pressed(self):\n \"\"\"\n Slot documentation goes here.\n \"\"\"\n try :\n self.D(TAG_SELF, u\"尝试打开串口 ... \")\n ser = serial.Serial()\n ser.baudrate = int(str(self.ui_text_com_bitrate.currentText()))\n ser.bytesize = int(str(self.ui_text_com_databit.currentText()))\n ser.stopbits = int(str(self.ui_text_com_stopbit.currentText()))\n ser.port = str(self.ui_text_com_number.currentText())\n ser.timeout = 3\n ser.open()\n self.D(TAG_SELF, u\"打开串口成功\")\n self.ui_button_start_read.setEnabled(False)\n self.ui_button_stop_read.setEnabled(True)\n self.ser = ser\n t = Thread(target=self.com_run, args=[ser], name=\"Yeelink COM Listener\", )\n t.setDaemon(True)\n self.com_reading = True\n t.start()\n except:\n traceback.print_exc()\n self.D(TAG_SELF, u\"串口打开识别!!\" + traceback.format_exc())\n\n \n @pyqtSignature(\"\")\n def on_ui_button_stop_read_pressed(self):\n \"\"\"\n Slot documentation goes here.\n \"\"\"\n self.D(TAG_SELF, u\"触发串口关闭\")\n self.com_reading = False\n\n \n @pyqtSignature(\"\")\n def on_ui_button_mock_start_pressed(self):\n \"\"\"\n Slot documentation goes here.\n \"\"\"\n try :\n t = Thread(name=\"yeelink api proxy\", target=self.yeelink_api_proxy)\n t.setDaemon(True)\n t.start()\n \n self.ui_button_mock_stop.setEnabled(True)\n self.ui_button_mock_start.setEnabled(False)\n self.mock_running = True\n self.D(TAG_MOCK, u\"启动成功\")\n except:\n self.D(TAG_MOCK, u\"启动失败\" + traceback.format_exc())\n \n @pyqtSignature(\"\")\n def on_ui_button_mock_stop_pressed(self):\n \"\"\"\n Slot documentation goes here.\n \"\"\"\n # TODO: not implemented yet\n self.mock_running = False\n self.D(TAG_MOCK, u\"关闭\")\n self.ui_button_mock_start.setEnabled(True)\n self.ui_button_mock_stop.setEnabled(False)\n \n @pyqtSignature(\"\")\n def on_ui_button_api_test_pressed(self):\n \"\"\"\n Slot documentation goes here.\n \"\"\"\n # TODO: not implemented yet\n import yeelink_api_test\n t = yeelink_api_test.YeelinkTestDialog(self)\n t.ui_text_uapikey.setText(self.ui_text_uapikey.text())\n try :\n t.ui_text_url.clear()\n for sensor in self.sensors :\n t.ui_text_url.addItem(QString(self.api_url(\"/device/%s/sensor/%s/datapoints\" % (self.devid(), str(sensor[\"id\"])))))\n except:\n pass\n t.show()\n \n def yeelink_api_proxy(self):\n PORT = int(str(self.ui_spin_mock_port.text()))\n s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n #s.settimeout(3)\n s.bind((\"\", PORT))\n s.listen(PORT)\n while self.mock_running :\n conn = None\n _out = None\n try:\n conn, addr = s.accept()\n self.D(TAG_MOCK, \"Connected by \" + str(addr))\n _in = conn.makefile()\n _out = conn.makefile(\"w\")\n \n try :\n #读取请求头\n data = _in.read(4)\n if str(data) != \"POST\" :\n self.D(TAG_MOCK, u\"不是POST请求,拒绝之.\")\n continue\n data = _in.read(1)\n if str(data) != \" \" :\n self.D(TAG_MOCK, u\"POST之后的不是空格,非法请求\")\n continue\n #开始读取URI\n data = \"\"\n for i in xrange(1024) :\n d = _in.read(1)\n if d == ' ' :\n self.D(TAG_MOCK, u\"读取到URI之后的空格, 识别URI为 \" + data)\n break\n else :\n data += str(d)\n if i == 1023 :\n self.D(TAG_MOCK, u\"读取1024字节之后还没结束, URI太长了,拒绝\")\n data = None\n if data == None :\n continue\n if data == \"\" :\n self.D(TAG_MOCK, u\"URI以空格开头,肯定是多输入了一个空格导致的,拒绝\")\n continue\n #然后就是HTTP/1.1或者HTTP/1.0,然后接\\r\\n\n data = _in.read(len(\"HTTP/1.0\\r\\n\"))\n #print data\n if not str(data).startswith(\"HTTP/1.0\") and not str(data).startswith(\"HTTP/1.1\") :\n self.D(TAG_MOCK, u\"请求行不包含HTTP/1.0或HTTP/1.1,拒绝\")\n continue\n if not str(data).endswith(\"\\r\\n\") :\n self.D(TAG_MOCK, u\"请求行不是以\\\\r\\\\n结束,拒绝\")\n continue\n key_ok = False\n cnt_len = 0\n while 1 :\n header_line = \"\"\n while 1 :\n d = _in.read(1)\n if d == '\\n' :\n break\n header_line += str(d)\n if header_line == \"\" :\n self.D(TAG_MOCK, u\"检测到非法的Header,拒绝\")\n break\n if header_line == \"\\r\" :\n self.D(TAG_MOCK, u\"检测到Header结束\")\n break\n header_line = header_line.strip()\n self.D(TAG_MOCK, \"Read Header --> \" + str(header_line))\n if header_line.startswith(\"U-ApiKey: \") :\n self.D(TAG_MOCK, u\"检测到U-ApiKey,对比本地数据中\");\n _key = header_line.split(\" \", 2)[1]\n if _key == self.apikey() :\n self.D(TAG_MOCK, u\"U-ApiKey合法\")\n key_ok = True\n else :\n self.D(TAG_MOCK, u\"U-ApiKey不合法 [%s] [%s]\" % (_key, self.apikey()))\n break\n elif header_line.startswith(\"Content-Length: \") :\n self.D(TAG_MOCK, u\"检测到Content-Length: \")\n try :\n cnt_len = int(header_line.split(\" \", 2)[1])\n self.D(TAG_MOCK, u\"获取到请求主体的长度为\" + str(cnt_len))\n except:\n self.D(TAG_MOCK, u\"Content-Length 不是合法的整数值\")\n break\n if not key_ok :\n self.D(TAG_MOCK, u\"没有在Header里面找到合法U-ApiKey,拒绝\")\n continue\n if cnt_len < 5 :\n self.D(TAG_MOCK, u\"请求体太小,肯定不合法\")\n continue\n #开始读取body\n try :\n body = _in.read(cnt_len)\n j = json.loads(body)\n self.D(TAG_MOCK, u\"请求中的JSON数据(经过格式化) --> \" + json.dumps(j))\n if not j.get(\"value\") :\n self.D(TAG_MOCK, u\"数据里面没有名为value的键,肯定非法\")\n break\n \n if j.get(\"key\") :\n self.D(TAG_MOCK, u\"看来是泛型数据,放行\")\n elif j.get(\"value\") :\n if json.dumps(j.get(\"value\")).startswith(\"{\") :\n self.D(TAG_MOCK, u\"看上去是GPS数据,分析里面的key\")\n gps = j.get(\"value\")\n if not gps.get(\"lat\") :\n self.D(TAG_MOCK, u\"缺失lan值\")\n continue\n if not gps.get(\"lng\") :\n self.D(TAG_MOCK, u\"缺失lng值\")\n continue\n if str(gps.get(\"speed\")) == \"None\" :\n self.D(TAG_MOCK, u\"缺失speed值\")\n continue\n self.D(TAG_MOCK, u\"GPS数据 看上去合法\")\n else :\n self.D(TAG_MOCK, u\"看来不是GPS,那只能是数值型数据了,校验之\")\n if isinstance(j.get(\"value\"), float) :\n self.D(TAG_MOCK, u\"看来是合法的数值\")\n else :\n self.D(TAG_MOCK, u\"不是JSON格式中的数值,拒绝\")\n break\n else :\n self.D(TAG_MOCK, u\"数据里面没有名为key或timestamp的键,肯定非法\")\n break\n \n # 看来是合法的哦, 返回个赞\n _out.write(\"HTTP/1.1 200 OK\\r\\nPower: wendal\\r\\nContent-Length: 0\\r\\n\\r\\n\")\n _out.flush()\n conn.shutdown(1)\n conn.close()\n conn = None\n except:\n self.D(TAG_MOCK, u\"yeelink上传的数据必然是json格式,然后它报错了,所以,你的数据不是合法JSON!!\" + traceback.format_exc())\n break\n except:\n self.D(TAG_MOCK, u\"出错了!!\" + traceback.format_exc())\n \n except:\n traceback.print_exc()\n finally:\n if conn != None :\n try :\n self.D(TAG_MOCK, u\"关闭连接 \" + str(conn))\n _out.write(\"HTTP/1.1 403 Error\\r\\nPower: wendal\\r\\nContent-Length: 0\\r\\n\\r\\n\")\n _out.flush()\n conn.shutdown(1)\n conn.close()\n except:\n self.D(TAG_MOCK, u\"关闭连接失败!!\" + traceback.format_exc())\n s.close()\n \n @pyqtSignature(\"QTableWidgetItem*\")\n def on_ui_table_sensors_itemChanged(self, item):\n \"\"\"\n Slot documentation goes here.\n \"\"\"\n try :\n _ = self.sensors\n except:\n return\n sensor = self.sensors[item.row()]\n if item.column() == SENSOR_COLUMN_DATA_READ :\n sensor[READ_KEY] = str(item.text())\n self.D(TAG_SELF, u\"传感器[id=%s, name=%s]的'数据读取'键修改为%s\" % (str(sensor[\"id\"]), sensor[\"title\"], sensor[READ_KEY]))\n return\n if item.column() == SENSOR_COLUMN_DATA_WRITE :\n sensor[WRITE_KEY] = str(item.text())\n self.D(TAG_SELF, u\"传感器[id=%s, name=%s]的'数据上传'键修改为%s\" % (str(sensor[\"id\"]), sensor[\"title\"], sensor[WRITE_KEY]))\n return\n # TODO 如果修改的是值, 发请求更新服务器的值\n "},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41695,"cells":{"__id__":{"kind":"number","value":19602230768147,"string":"19,602,230,768,147"},"blob_id":{"kind":"string","value":"03c818bca550b6ef427c3b0ad883f0f4266a08ac"},"directory_id":{"kind":"string","value":"a1892072674ac9adbbf21bc221d3fdfc337a9268"},"path":{"kind":"string","value":"/medium/endianness.py2"},"content_id":{"kind":"string","value":"96889635ff4d22126342d018c4b1ec68ec357f4d"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"Makpoc/codeeval"},"repo_url":{"kind":"string","value":"https://github.com/Makpoc/codeeval"},"snapshot_id":{"kind":"string","value":"27bc83c1a52203b7e84d5102175e5c0385beaa62"},"revision_id":{"kind":"string","value":"791f7024b09b77e12d315475031b2ea11eb3013f"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2020-05-17T19:37:52.453726","string":"2020-05-17T19:37:52.453726"},"revision_date":{"kind":"timestamp","value":"2014-07-03T07:54:28","string":"2014-07-03T07:54:28"},"committer_date":{"kind":"timestamp","value":"2014-07-03T07:54:28","string":"2014-07-03T07:54:28"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"#!/usr/bin/env python2.7\n# encoding=utf-8\n\nimport sys\n\nif __name__ == '__main__':\n if 'little' in sys.byteorder.lower():\n print 'LittleEndian'\n else:\n print 'BigEndian'\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41696,"cells":{"__id__":{"kind":"number","value":7722351245653,"string":"7,722,351,245,653"},"blob_id":{"kind":"string","value":"f2bbb3f700b9ef2da6796a1749f9f66dc8eee490"},"directory_id":{"kind":"string","value":"a29c1ab6370ccd86b87b077294523cb674816ea8"},"path":{"kind":"string","value":"/migrations/versions/creates_craiglist_listing-3d08656dfba2.py"},"content_id":{"kind":"string","value":"690804cf307bc730539b478a47aebff463eff071"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"jessedhillon/bonanza"},"repo_url":{"kind":"string","value":"https://github.com/jessedhillon/bonanza"},"snapshot_id":{"kind":"string","value":"a677a98e37b827e3dcf8fec903b9cdd288f88543"},"revision_id":{"kind":"string","value":"a20f446884b10ad6ea0596440175b798aac03084"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2016-09-06T08:36:37.431405","string":"2016-09-06T08:36:37.431405"},"revision_date":{"kind":"timestamp","value":"2014-12-09T06:57:13","string":"2014-12-09T06:57:13"},"committer_date":{"kind":"timestamp","value":"2014-12-09T06:57:13","string":"2014-12-09T06:57:13"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"\"\"\"creates craiglist listing\n\nRevision ID: 3d08656dfba2\nRevises: 168d2892efbe\nCreate Date: 2014-10-02 16:51:06.228153\n\n\"\"\"\n\n# revision identifiers, used by Alembic.\nrevision = '3d08656dfba2'\ndown_revision = '168d2892efbe'\n\nfrom alembic import op\nimport sqlalchemy as sa\nfrom batteries.model.types import Ascii, UTCDateTime\nfrom geoalchemy2 import Geometry\nfrom sqlalchemy.dialects import postgresql\n\ndef upgrade():\n ### commands auto generated by Alembic - please adjust! ###\n op.create_table('craigslist_listing',\n sa.Column('key', Ascii(length=40), nullable=False),\n sa.Column('id', sa.Unicode(length=20), nullable=False),\n sa.Column('title', sa.UnicodeText(), nullable=False),\n sa.Column('url', sa.Unicode(length=200), nullable=False),\n sa.Column('image_thumbnail_url', sa.Unicode(length=300), nullable=True),\n sa.Column('bedrooms', sa.Integer(), nullable=True),\n sa.Column('posted_date', sa.Date(), nullable=False),\n sa.Column('ask', sa.Numeric(precision=12, scale=2), nullable=False),\n sa.Column('location', Geometry(geometry_type='POINT'), nullable=True),\n sa.Column('ctime', UTCDateTime(), nullable=True),\n sa.Column('mtime', UTCDateTime(), nullable=True),\n sa.PrimaryKeyConstraint('key', name=op.f('pk_craigslist_listing'))\n )\n op.drop_table('listing')\n ### end Alembic commands ###\n\n\ndef downgrade():\n ### commands auto generated by Alembic - please adjust! ###\n op.create_table('listing',\n sa.Column('key', sa.VARCHAR(length=40), autoincrement=False, nullable=False),\n sa.Column('location', Geometry(geometry_type=u'POINT'), autoincrement=False, nullable=True),\n sa.Column('ctime', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),\n sa.Column('mtime', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),\n sa.PrimaryKeyConstraint('key', name=u'pk_listing')\n )\n op.drop_table('craigslist_listing')\n ### end Alembic commands ###\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41697,"cells":{"__id__":{"kind":"number","value":6751688590674,"string":"6,751,688,590,674"},"blob_id":{"kind":"string","value":"cd4847edb6ab198e65b3bcc32942a6194bc0be5b"},"directory_id":{"kind":"string","value":"018a5c8bfeb0e010deb67a6dfeeacce414fb3dc5"},"path":{"kind":"string","value":"/script-events/matrix_io.py"},"content_id":{"kind":"string","value":"2063f33d8baf3600c190e1e061863ef6922d0c91"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"tiberiu-popa/historic-events-research"},"repo_url":{"kind":"string","value":"https://github.com/tiberiu-popa/historic-events-research"},"snapshot_id":{"kind":"string","value":"4ea9f127d02f9dd03ef27d32430fd442bb72cdb1"},"revision_id":{"kind":"string","value":"661932d22bf64605737103ddbdb9a7308e5347a2"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2020-05-04T15:54:31.585130","string":"2020-05-04T15:54:31.585130"},"revision_date":{"kind":"timestamp","value":"2013-06-25T05:54:23","string":"2013-06-25T05:54:23"},"committer_date":{"kind":"timestamp","value":"2013-06-25T05:54:23","string":"2013-06-25T05:54:23"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"#!/usr/bin/python\nfrom __future__ import print_function\nimport csv\nimport numpy as np\nimport os\nfrom scipy.sparse import csr_matrix\n\ndef save_csr_matrix(filename, matrix):\n\tnp.savez(filename, data=matrix.data, indices=matrix.indices,\n\t\tindptr=matrix.indptr, shape=matrix.shape)\n\ndef load_csr_matrix(filename):\n\tzmat = np.load(filename)\n\treturn csr_matrix((zmat['data'], zmat['indices'], zmat['indptr']), zmat['shape'], dtype=np.uint8)\n\ndef save_csv_as_csr_matrix(csv_filename, matrix_filename):\n\tvalues = [ ]\n\trow_indices = [ ]\n\tcolumn_indices = [ ]\n\tm = 0\n\tn = 0\n\twith open(csv_filename, 'r') as f:\n\t\treader = csv.reader(f)\n\t\tfor i, row in enumerate(reader):\n\t\t\tif i % 10000 == 0:\n\t\t\t\tprint('Progress:', i)\n\t\t\tfor j, elem in enumerate(map(int, row)):\n\t\t\t\tif elem != 0:\n\t\t\t\t\tvalues.append(elem)\n\t\t\t\t\trow_indices.append(i)\n\t\t\t\t\tcolumn_indices.append(j)\n\t\t\tn = max(n, len(row))\n\t\t\tm += 1\n\tindices = (row_indices, column_indices)\n\tmatrix = csr_matrix((values, indices), (m, n), dtype=np.uint8)\n\tsave_csr_matrix(matrix_filename, matrix)\n\ndef transforms_csvs(in_directory, out_directory):\n\tfilenames = os.listdir(in_directory)\n\tfilenames.sort()\n\tfor filename in filenames:\n\t\tbase_filename = os.path.splitext(filename)[0]\n\t\tfull_filename = os.path.join(in_directory, filename)\n\t\tprint('Processing', full_filename)\n\t\tmatrix_filename = os.path.join(out_directory, base_filename + '.npz')\n\t\tif not os.path.isfile(matrix_filename):\n\t\t\tsave_csv_as_csr_matrix(full_filename, matrix_filename)\n\ndef read_matrices(directory):\n\tfilenames = os.listdir(directory)\n\tmatrices = { }\n\tfor filename in filenames:\n\t\tbase_filename = os.path.splitext(filename)[0]\n\t\tfull_filename = os.path.join(directory, filename)\n\t\tmatrix = load_csr_matrix(full_filename)\n\t\tmatrices[base_filename] = matrix\n\treturn matrices\n\ndef main():\n\tcsv_directory = os.path.join('data', 'relevance')\n\tsparse_directory = os.path.join('data', 'sparse_relevance')\n\ttransforms_csvs(csv_directory, sparse_directory)\n\tmatrices = read_matrices(sparse_directory)\n\tprint(matrices)\n\nif __name__ == '__main__':\n\tmain()\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2013,"string":"2,013"}}},{"rowIdx":41698,"cells":{"__id__":{"kind":"number","value":17428977309518,"string":"17,428,977,309,518"},"blob_id":{"kind":"string","value":"25ceb40c8b3145a9f3071fafb383c08dd7817783"},"directory_id":{"kind":"string","value":"d407f3bdbcdf70920bb8f0790c401dfb023af5de"},"path":{"kind":"string","value":"/sound/sound.gypi"},"content_id":{"kind":"string","value":"9a20d55d0b502dfbcf6e022162b7e3518c88f6c2"},"detected_licenses":{"kind":"list like","value":["LicenseRef-scancode-unknown-license-reference","BSD-2-Clause"],"string":"[\n \"LicenseRef-scancode-unknown-license-reference\",\n \"BSD-2-Clause\"\n]"},"license_type":{"kind":"string","value":"non_permissive"},"repo_name":{"kind":"string","value":"mathall/nanaka"},"repo_url":{"kind":"string","value":"https://github.com/mathall/nanaka"},"snapshot_id":{"kind":"string","value":"3f02ffb4f2e19af3446d43af61226c122b18498c"},"revision_id":{"kind":"string","value":"0304f444702318a83d221645d4e5f3622082c456"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2016-09-11T04:01:22.986788","string":"2016-09-11T04:01:22.986788"},"revision_date":{"kind":"timestamp","value":"2014-04-16T20:31:46","string":"2014-04-16T20:31:46"},"committer_date":{"kind":"timestamp","value":"2014-04-26T12:56:01","string":"2014-04-26T12:56:01"},"github_id":{"kind":"number","value":11401646,"string":"11,401,646"},"star_events_count":{"kind":"number","value":2,"string":"2"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"{\n 'sources': [\n '../sound/OggDecoder.cpp',\n '../sound/OggDecoder.h',\n '../sound/Sound.cpp',\n '../sound/Sound.h',\n '../sound/SoundDecoder.h',\n '../sound/SoundLoader.cpp',\n '../sound/SoundLoader.h',\n '../sound/SoundResource.h',\n ],\n}\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41699,"cells":{"__id__":{"kind":"number","value":3358664466613,"string":"3,358,664,466,613"},"blob_id":{"kind":"string","value":"372d7b270fc8afec6279b0ad7be9dcd495529a27"},"directory_id":{"kind":"string","value":"f60898b49d9b6b1d71da954313bb4962f1201a4b"},"path":{"kind":"string","value":"/flacon/config.py"},"content_id":{"kind":"string","value":"076652ac80fd66644a2f43d7531db07b67b63200"},"detected_licenses":{"kind":"list like","value":["BSD-3-Clause"],"string":"[\n \"BSD-3-Clause\"\n]"},"license_type":{"kind":"string","value":"permissive"},"repo_name":{"kind":"string","value":"bayazee/flacon"},"repo_url":{"kind":"string","value":"https://github.com/bayazee/flacon"},"snapshot_id":{"kind":"string","value":"6cb0067a9762b89274c5e43083186daf31416f7d"},"revision_id":{"kind":"string","value":"2e5833cf98a137df968a9257467a1a041ce66de9"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-01T15:55:06.120732","string":"2021-01-01T15:55:06.120732"},"revision_date":{"kind":"timestamp","value":"2013-04-28T10:02:12","string":"2013-04-28T10:02:12"},"committer_date":{"kind":"timestamp","value":"2013-04-28T10:02:12","string":"2013-04-28T10:02:12"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"class DefaultConfig(object):\n # Essentials\n\n DEBUG = True\n DEPLOYMENT = False\n\n SECRET_KEY = 'SECRET_KEY'\n\n MAIN_URL = 'http://127.0.0.1:5000'\n MAIN_STATIC_URL = 'http://static.127.0.0.1:5000'\n\n INSTALLED_EXTENSIONS = []\n INSTALLED_BLUEPRINTS = []\n\n if DEBUG:\n LOG_FORMAT = '\\033[1;35m[%(asctime)s]\\033[1;m [\\033[1;31m %(levelname)s \\033[1;m] \\033[1;32m[%(logger_name)s]\\033[1;m: \\\n \\033[1;33m %(message)s \\033[1;m'\n else:\n LOG_FORMAT = '[%(asctime)s] %(levelname)s [%(logger_name)s]: %(message)s'\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2013,"string":"2,013"}}}],"truncated":false,"partial":false},"paginationData":{"pageIndex":416,"numItemsPerPage":100,"numTotalItems":42509,"offset":41600,"length":100}},"jwt":"eyJhbGciOiJFZERTQSJ9.eyJyZWFkIjp0cnVlLCJwZXJtaXNzaW9ucyI6eyJyZXBvLmNvbnRlbnQucmVhZCI6dHJ1ZX0sImlhdCI6MTc1NjU2NjI1Mywic3ViIjoiL2RhdGFzZXRzL2xvdWJuYWJubC9vbGRfcHl0aG9uIiwiZXhwIjoxNzU2NTY5ODUzLCJpc3MiOiJodHRwczovL2h1Z2dpbmdmYWNlLmNvIn0._KneaL1_rbb5SNBeAczkK-RQKorpw60i9s6VxzpJU0WOvHbXugLJUFUMJKhXkDbDmYq0zk_BQlTGDafV_7YLDA","displayUrls":true},"discussionsStats":{"closed":0,"open":1,"total":1},"fullWidth":true,"hasGatedAccess":true,"hasFullAccess":true,"isEmbedded":false,"savedQueries":{"community":[],"user":[]}}">
__id__
int64
3.09k
19,722B
blob_id
stringlengths
40
40
directory_id
stringlengths
40
40
path
stringlengths
2
256
content_id
stringlengths
40
40
detected_licenses
list
license_type
stringclasses
3 values
repo_name
stringlengths
5
109
repo_url
stringlengths
24
128
snapshot_id
stringlengths
40
40
revision_id
stringlengths
40
40
branch_name
stringlengths
4
42
visit_date
timestamp[ns]
revision_date
timestamp[ns]
committer_date
timestamp[ns]
github_id
int64
6.65k
581M
star_events_count
int64
0
1.17k
fork_events_count
int64
0
154
gha_license_id
stringclasses
16 values
gha_fork
bool
2 classes
gha_event_created_at
timestamp[ns]
gha_created_at
timestamp[ns]
gha_updated_at
timestamp[ns]
gha_pushed_at
timestamp[ns]
gha_size
int64
0
5.76M
gha_stargazers_count
int32
0
407
gha_forks_count
int32
0
119
gha_open_issues_count
int32
0
640
gha_language
stringlengths
1
16
gha_archived
bool
2 classes
gha_disabled
bool
1 class
content
stringlengths
9
4.53M
src_encoding
stringclasses
18 values
language
stringclasses
1 value
is_vendor
bool
2 classes
is_generated
bool
2 classes
year
int64
1.97k
2.01k
19,662,360,306,625
318eff9bff337473407ec10ff0f07c3877657f1e
61ccb08e90b3c6c956329cdb0d5a475f3fb25e5b
/social_levenshtein.py
d749bcf041e4ae060fe51cd71de09e24cc2cd321
[]
no_license
calebbarr/code-eval
https://github.com/calebbarr/code-eval
6e1d03b1dd4938694e3b61eccb4b99ae4efd50d4
d4df5992ea39194dfde4b7d7ce7f1ff427287b3a
refs/heads/master
2021-01-19T13:02:41.387409
2014-01-24T05:43:35
2014-01-24T05:43:35
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import sys words = [ word.strip("\n") for word in open(sys.argv[1]).readlines() ] TARGET_WORD = "abcde" network = set() lev_distances = { # (word1,word2) : is_one # (word2,word1) : is_one } def levenshtein(word1,word2): table = [[0 for j in xrange(len(word2)+1)] for i in xrange(len(word1)+1) ] for i in xrange(len(word1)): table[i][0] = i for j in xrange(len(word2)): table[0][j] = j for i in range(1,len(word1)+1): for j in range(1,len(word2)+1): if word1[i-1] == word2[j-1]: table[i][j] = table[i-1][j-1] else: table[i][j] = min(table[i-1][j] + 1, table[i][j-1] + 1, table[i-1][j-1] + 1) return table[-1][-1] def get_network(word): global network global words if word in network: return else: network.add(word) friends = [] for friend in words: if friend not in network: if (word,friend) in lev_distances: if lev_distances[(word,friend)] == True: friends.append(friend) else: if levenshtein(word,friend) == 1: friends.append(friend) lev_distances[(word,friend)] = True lev_distances[(friend,word)] = True else: lev_distances[(word,friend)] = False lev_distances[(friend,word)] = False while len(friends) > 0: friend = friends.pop() get_network(friend) return get_network(TARGET_WORD) print len(network)
UTF-8
Python
false
false
2,014
8,710,193,709,411
6aa01c5d19230717915338d6593dd93997cb733c
767bed235bc4e1ec3630063ef546b47f8e1a77a4
/g.py
1f294d593c375789d8f8e42a83ed06fed9347e3a
[ "LicenseRef-scancode-unknown-license-reference", "Python-2.0" ]
non_permissive
pombredanne/jinn
https://github.com/pombredanne/jinn
b0ad326b2e779f2e9622e7fb26139d102aea413e
e5a932a8ca001b1f4a397bea5ef43b7662a17fdf
HEAD
2018-05-05T20:25:26.799956
2013-04-16T16:15:18
2013-04-16T16:15:18
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
feedback = None jinn = None os = None
UTF-8
Python
false
false
2,013
395,137,000,632
73a04d2fb0a36e79ce2dcaa29c3e0e03c05caf5e
ad3f64165e84ca816f4678d7d6c8f6d886408d9b
/postprocess.py
5b25c2bd13e3ff59e249293297f1f20e73ff2b96
[ "GPL-3.0-only" ]
non_permissive
sazamore/PyTrack
https://github.com/sazamore/PyTrack
12652a6e4f6ea85a2c9adf85d12455d26e5bd99b
59bce762a2e5e5799a357bd2b22a356423037cad
refs/heads/master
2021-01-19T11:02:16.030260
2012-07-19T05:04:47
2012-07-19T05:04:47
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env python # Filename: postprocess.py # Project Github: http://github.com/super3/PyTrack # Author: Shawn Wilkinson <[email protected]> # Author Website: http://super3.org/ # License: GPLv3 <http://gplv3.fsf.org/> # Imports import datetime from Classes.CompareImages import * from Classes.Helper import * # Benchmark Class class Benchmark: """Allows for timing of load and process events.""" def __init__(self): """ Initializes a Benchmark Object. Note: It will break if it grabs a time less than a second. Data members: startTime -- Datetime object at start of benchmark. num -- Stores amount of objects loaded/processed so we can get a rate """ self.reset() def start(self, prefix, num): """Starts the timer.""" self.startTime = datetime.datetime.now() self.num = num print(prefix + " " + str(num) + " Images...") def end(self): """Ends the timer. Returns seconds took.""" # Returns Datetime Timedelta result = datetime.datetime.now() - self.startTime rate = round(self.num / result.seconds, 3) print("Done in " + str(result.seconds) + " seconds. (" + str(rate) + " objects/sec)") self.reset() return result.seconds def reset(self): """Resets the timer.""" self.startTime = None self.num = 0 # PostProcess Class # This would work really nice with threading. class PostProcess: """Used to process a batch of images.""" def __init__(self, startFrame, endFrame, benchmark = False): """ Initializes a PostProcess Object. Data members: startFrame -- Start frame of post process endFrame -- End frame of post process totalFrames -- Total frames to post process queue -- A list of CompareFiles objects to process benchmark -- To enable/disable benchmarking """ # Frame Vars self.startFrame = startFrame self.endFrame = endFrame self.totalFrames = abs(endFrame - startFrame) # Queue Var self.queue = [] # Benchmark Object self.benchmark = benchmark if self.benchmark: self.bench = Benchmark() def load(self): """Load frames from disk to memory, and add them to queue.""" if self.benchmark: self.bench.start("Loading", self.totalFrames) for i in range(self.startFrame, self.endFrame): img1 = ImageFile(genFile(i)) img2 = ImageFile(genFile(i+1)) self.queue.append( CompareFiles(img1,img2) ) if self.benchmark: self.bench.end() def process(self): """Process frames in queue.""" if self.benchmark: self.bench.start("Processing", self.totalFrames) for obj in self.queue: obj.process( 0.3, (0.5, 0.5, 0.5), 300 ) # Grab Obj's Internal Data Here self.queue.remove(obj) if self.benchmark: self.bench.end() def run(self): """Load and process selected frames.""" self.load() self.process() # Main if __name__ == "__main__": # Vars start = 1 end = 1450 limit = 500 inter = abs(end-start) # Intended to split a process job into managemable chunks # The range function is a little wonky, and there is probably a better way # See: http://stackoverflow.com/q/312443 for i in range(1, inter, limit): # To prevent going over with silly range if i+limit-1 > end: process = PostProcess(i, end, True) else: process = PostProcess(i, i+limit-1, True) process.run()
UTF-8
Python
false
false
2,012
9,620,726,778,436
7ea3b8a3fbbb5037e3ecc8cf4ace5044c810684d
9f9005887be5ab7b395da48ce66ccac92d3bd273
/apps/analytics/utils.py
fcb15e8d2a6b5bd24816223983c865203551bddf
[]
no_license
bbarclay/Willet-Referrals
https://github.com/bbarclay/Willet-Referrals
114680624ff9dda1699212d7987c2a55f2f8cfc3
d1e046d5b7bf1ba0febb337a31ec04f5888fb341
refs/heads/master
2021-12-02T14:35:40.282061
2013-08-12T21:06:40
2013-08-12T21:06:40
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from util.consts import GOOGLE_ANALYTICS_API_KEY, APP_DOMAIN from pyga.entities import Event from pyga.requests import Tracker, Session, Visitor import logging def track_event(category, action, label=None, value=None): """Save the event to our event backend. Currently the only event backend supported is Google Analytics. """ tracker = Tracker(GOOGLE_ANALYTICS_API_KEY, APP_DOMAIN) # dummy visitor and session for now. # not doing user analytics, just aggregate events visitor = Visitor() session = Session() event = Event(category=category, action=action, label=label, value=value) # send GA event to google & log tracker.track_event(event, session, visitor) logging.info("Logged event: category=%s, action=%s, label=%s, value=%s", category, action, label, value)
UTF-8
Python
false
false
2,013
11,304,353,956,679
af73b88b47a27bb2b276fad19f241745a4cc986b
cc28174b5e18f0e5d0778ce6dfb30836805cd62e
/getb2g/base.py
b314b1effa398a5aef4666a0751b8c5809f11bd1
[]
no_license
ahal/getb2g
https://github.com/ahal/getb2g
08bf9e6b4d05d87c0171598c2681c484eb182502
71f9d7fc4c5585a65973b098c7726402c8ea32be
refs/heads/master
2016-09-11T10:39:55.755316
2013-07-12T15:53:08
2013-07-12T15:53:08
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from abc import ABCMeta, abstractmethod from bs4 import BeautifulSoup import inspect import os import shutil import stat import sys import tempfile import traceback from mixins import DownloadMixin, StorageMixin import mozfile import mozinfo import mozlog log = mozlog.getLogger('GetB2G') class Base(DownloadMixin, StorageMixin): __metaclass__ = ABCMeta _default_busybox_url = 'http://busybox.net/downloads/binaries/latest/' def __init__(self, **metadata): self.metadata = metadata super(Base, self).__init__() @classmethod def handled_resources(cls, request): """ Returns a subset of the resources that this class is capable of handling for a specified request """ handled_resources = [] methods = [name for name, ref in inspect.getmembers(cls, inspect.ismethod)] for res in request.resources: if 'prepare_%s' % res in methods: parents = get_parent_resources(res) if all(p not in request.resources for p in parents) \ or any('prepare_%s' % p in methods for p in parents) \ or res in valid_resources['device']: handled_resources.append(res) return handled_resources @classmethod def execute_request(cls, request): """ Executes the specified request """ handled_resources = cls.handled_resources(request) for resource in handled_resources: log.info("preparing '%s'" % resource) try: h = cls(**request.metadata) getattr(h, 'prepare_%s' % resource)() request.metadata = h.metadata request.resources.remove(resource) except (KeyboardInterrupt, SystemExit): raise except: log.warning("%s encountered an error while attempting to prepare '%s'" % (cls.__name__, resource)) log.debug(traceback.format_exc()) break def prepare_busybox(self): """ Prepares the busybox binary """ url = self._default_busybox_url platform = self.metadata.get('busybox_platform', 'armv6l') doc = self.download_file(url, tempfile.mkstemp()[1], silent=True) soup = BeautifulSoup(open(doc, 'r')) os.remove(doc) for link in soup.find_all('a'): if 'busybox-%s' % platform in link['href']: path = os.path.join(self.metadata['workdir'], 'busybox') if os.path.isfile(path): os.remove(path) file_name = self.download_file(url + link['href'], 'busybox') os.chmod(file_name, stat.S_IEXEC | stat.S_IREAD) break else: log.error("Couldn't find a busybox binary for platform '%s'" % platform) prepare_busybox.groups = ['default'] class SymbolsBase(object): __metaclass__ = ABCMeta _default_minidump_stackwalk_url = 'https://hg.mozilla.org/build/tools/file/tip/breakpad/%s/minidump_stackwalk' @abstractmethod def prepare_symbols(self): """ Prepares the symbols directory """ prepare_symbols.groups = ['unagi', 'panda', 'leo', 'hamachi', 'inari', 'otoro'] def prepare_minidump_stackwalk(self, url=None): """ Prepares the minidump stackwalk binary """ if not url: arch = '64' if mozinfo.bits == 64 else '' if mozinfo.isLinux: url = self._default_minidump_stackwalk_url % ('linux%s' % arch) elif mozinfo.isMac: url = self._default_minidump_stackwalk_url % ('osx%s' % arch) elif mozinfo.isWin: url = self._default_minidump_stackwalk_url % 'win32' path = os.path.join(self.metadata['workdir'], 'minidump_stackwalk') if os.path.isfile(path): os.remove(path) file_name = self.download_file(url, 'minidump_stackwalk') os.chmod(file_name, stat.S_IEXEC | stat.S_IREAD) prepare_minidump_stackwalk.groups = ['symbols'] class TestBase(object): __metaclass__ = ABCMeta _default_xre_url = 'http://people.mozilla.com/~ahalberstadt/getb2g/xre.zip' @abstractmethod def prepare_tests(self): """ Prepares the tests bundle """ prepare_tests.groups = ['emulator', 'b2g_desktop', 'panda'] def prepare_xre(self, url=None): """ Prepares the xre directory """ url = url or self._default_xre_url file_name = self.download_file(url) path = os.path.join(self.metadata['workdir'], 'xre') if os.path.isdir(path): shutil.rmtree(path) files = mozfile.extract(file_name) os.remove(file_name) prepare_xre.groups = ['tests'] class EmulatorBase(object): __metaclass__ = ABCMeta @abstractmethod def prepare_emulator(self): """ Prepares the emulator package """ prepare_emulator.groups = ['device', 'cli'] class UnagiBase(object): __metaclass__ = ABCMeta @abstractmethod def prepare_unagi(self): """ Prepares the unagi build """ prepare_unagi.groups = ['device', 'cli'] class PandaBase(object): __metaclass__ = ABCMeta @abstractmethod def prepare_panda(self): """ Prepares the panda build """ prepare_panda.groups = ['device', 'cli'] class B2GDesktopBase(object): __metaclass__ = ABCMeta @abstractmethod def prepare_b2g_desktop(self): """ Prepares the b2g desktop build """ prepare_b2g_desktop.groups = ['device', 'cli'] class LeoBase(object): __metaclass__ = ABCMeta @abstractmethod def prepare_leo(self): """ Prepares the leo build """ prepare_leo.groups = ['device', 'cli'] class HamachiBase(object): __metaclass__ = ABCMeta @abstractmethod def prepare_hamachi(self): """ Prepares the hamachi build """ prepare_hamachi.groups = ['device', 'cli'] class InariBase(object): __metaclass__ = ABCMeta @abstractmethod def prepare_inari(self): """ Prepares the inari build """ prepare_inari.groups = ['device', 'cli'] class OtoroBase(object): __metaclass__ = ABCMeta @abstractmethod def prepare_otoro(self): """ Prepares the otoro build """ prepare_otoro.groups = ['device', 'cli'] def get_parent_resources(resource): parents = [] for res in valid_resources['all']: if res in valid_resources: if resource in valid_resources[res]: parents.append(res) return parents # inspect the abstract base classes and extract the valid resources valid_resources = {'all': set([])} for cls_name, cls in inspect.getmembers(sys.modules[__name__], inspect.isclass): for name, method in inspect.getmembers(cls, inspect.ismethod): if name.startswith('prepare'): name = name[len('prepare_'):].lower() valid_resources['all'].add(name) for group in getattr(method, 'groups', []): if group not in valid_resources: valid_resources[group] = set([]) valid_resources[group].add(name)
UTF-8
Python
false
false
2,013
5,171,140,664,578
00cad5b39e4ecae55546bdc01b470f279fb41ddb
e8842b7d1e7c2abb14c4b03aae30ce3e5b6f6fa1
/src/variables.py
ff1a80fa18a79bf3d0bf082f8a8ffeb75171afcb
[]
no_license
abhiramkothapalli/Stock-Trading-Framework
https://github.com/abhiramkothapalli/Stock-Trading-Framework
05fccfa8261660b453515991510cdb50f4a960ea
d954ba426d3a1a532d193139718f0d2dcd97de61
refs/heads/master
2016-09-06T05:02:16.593641
2014-11-02T01:38:00
2014-11-02T01:38:00
25,757,396
1
1
null
null
null
null
null
null
null
null
null
null
null
null
null
import os import sys #environment variables of the entire process. Set this up first sys.dont_write_bytecode = True #FIELD VARIABLES path = os.path.dirname(os.path.realpath(__file__)) hourMin = int(0) day = "" portfolio = [] #TESTING METRICS #forces a cycle of buy and sell forceCycle = False #forces to buy all of list forceBuy = False #forces to sell all of portfolio forceSell = False #turns on/off log log = False #STRATEGY METRICS #name of the strategy(s) strategy = ["trueDrop"] #starting bank balance balance = float(100000) #assumed price of transaction tradePrice = float(10) #start time given in numeric format startTime = int(930) #end time given in numeric format endTime = int(1600) #amount of time between each cycle (seconds) sleepTime = 900 #SMTP METRICS(FOR SENDMAIL) #decides to send mail or not SENDMAIL = False #your email username USER = "" #your email password PASS = "" #email you want to send it to TO = "" #Server that your email is hosted on SERVER = "smtp.gmail.com"
UTF-8
Python
false
false
2,014
14,173,392,096,034
77201ac57ef94a07d8819ba98c3aad63f07b16bc
6b0e699f3f8580db0dd6bc5d58c35d4963e08a83
/src/maluuba_ros/maluuba.py
c6d6a94ac7798613b5d333cb2d93efde4c1689d9
[]
no_license
LoyVanBeek/maluuba_ros
https://github.com/LoyVanBeek/maluuba_ros
5a962c23bfe44cc3840a3d3a26087b3bade4d2f1
7acc1ecf17c58cb52c5f4cb49a6bd4cfcafbb524
refs/heads/master
2021-01-10T20:07:45.168343
2013-06-17T20:23:27
2013-06-17T20:23:27
10,084,400
1
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env python """ROS node for the Maluuba nAPI. Takes phrases from a rosservice, puts these through the API and encodes these is a ROS service response.""" import roslib roslib.load_manifest('maluuba_ros') from maluuba_ros.srv import Interpret, InterpretResponse, NormalizeResponse, Normalize from maluuba_ros.msg import Entities, TimeRange, Contact, Interpretation import sys import rospy from dateutil import parser from maluuba_napi import client allFields = ["album", "rating", "playlist", "song", "artist", "station", "genre", "originalTitle", "replacementTitle", "originalDate", "replacementDate", "originalLocation", "replacementLocation", "originalTime", "replacementTime", "contactName", "deleteContactName", "duration", "dateRange", "timeRange", "repeatDaysLength", "lengthOfTime", "repeatDays", "meetingTitle", "location", "date", "contacts", "message", "subject", "keyword", "time", "leaveLocation", "destination", "origin", "transitType", "route", "searchTerm", "numPeople", "appName", "contactField", "contactFieldValue", "mpaaRating", "actor", "theatre", "title", "numTickets", "departureTime", "departureDay", "returnTime", "returnDay", "departing", "carrier", "sortOrder", "noReturn", "child", "adult", "senior", "price", "luxury", "event"] intFields = ["rating", "numPeople", "numTickets", "child", "adult"] timeFields = ["originalDate", "replacementDate", "originalTime", "replacementTime", "date", "time", "departureTime", "returnTime"] floatFields = ["price"] durationFields = ["duration"] TimeRangeFields = ["dateRange", "timeRange"] ContactFields = ["contacts"] stringArrayFields = ["repeatDays"] specialFields = ["repeatDays", "luxury"] otherFields = list(set(allFields) - set(intFields) - set(timeFields) - set(floatFields) - set(durationFields) - set(TimeRangeFields) - set(ContactFields) - set(stringArrayFields) - set(specialFields)) class Maluuba(object): """Wrapper for the Maluuba natural language understanding API. the nAPI supports two services, interpret and normalize. """ def __init__(self, key): c = client.NAPIClient(key) super(Maluuba, self).__init__() self.client = c rospy.Service( 'maluuba/interpret', Interpret, self.interpret) rospy.Service( 'maluuba/normalize', Normalize, self.normalize) def interpret(self, request): """Interprets a phrase via the maluuba nAPI. The response packs the orignal phrase as well, so users of the interpretation get a complete package.""" rospy.loginfo("Interpreting '{0}'".format(request.phrase)) response = self.client.interpret(request.phrase) entities = response.entities try: if "REPEATDAYS" in entities.keys(): entities["repeatDays"] = entities["REPEATDAYS"] entities.pop("REPEATDAYS", None) if "LUXURY" in entities.keys(): entities["luxury"] = entities["LUXURY"][0] entities.pop("LUXURY", None) if "contacts" in entities.keys(): # TODO: there is a dict for each contact, so this code makes # multiple contacts. # Replace the phone_NUMBER-key by phoneNumber contact_entities = entities["contacts"] entities["contacts"] = [] for contact in contact_entities: if "phone_NUMBER" in contact.keys(): contact["phoneNumber"] = contact["phone_NUMBER"] contact.pop("phone_NUMBER", None) # import ipdb;ipdb.set_trace() entities["contacts"] += [Contact(**contact)] for field in [field for field in intFields if field in entities.keys()]: entities[field] = int(entities[field][0]) for field in [field for field in floatFields if field in entities.keys()]: if field in entities.keys(): entities[field] = float(entities[field]) for field in [field for field in TimeRangeFields if field in entities.keys()]: if field in entities.keys(): _range = entities[field][0] #import ipdb; ipdb.set_trace() start = self.time_to_int(_range["start"]) end = self.time_to_int(_range["end"]) entities[field] = TimeRange(start, end) for field in [field for field in durationFields if field in entities.keys()]: if field in entities.keys(): entities[field] = str(entities[field]) # Time as returned by Maluuba cannot be put in a ROS Time message. for field in [field for field in timeFields if field in entities.keys()]: if field in entities.keys(): from dateutil import parser dt = parser.parse(entities[field][0]) since_epoch = self.time_to_int(dt) entities[field] = since_epoch for field in [field for field in stringArrayFields if field in entities.keys()]: entities[field] = [str(item) for item in entities[field]] for field in [field for field in otherFields if field in entities.keys()]: entities[field] = [str(value) for value in entities[field]][0] ents = Entities(**entities) return InterpretResponse( Interpretation(ents, str(response.category), str(response.action), request.phrase)) except Exception as e: rospy.logerr("Phrase '{0}' yields exception: '{1}'. Response: {2.entities}, {2.category}, {2.action}".format( request.phrase, e, response)) raise def normalize(self, request): """Normalize a phrase. For example, 'tomorrow' is normalized to a DateTime""" # import ipdb; ipdb.set_trace() response = self.client.normalize( request.phrase, request.type, request.timezone) return NormalizeResponse( str(response.entities), str(response.context)) @staticmethod def time_to_int(entity): #import ipdb; ipdb.set_trace() stamp = entity.strftime("%s") since_epoch = int(stamp) return since_epoch if __name__ == "__main__": import sys rospy.init_node("maluuba_node") apikey = "" try: keyfile = open(sys.argv[1]) apikey = keyfile.readline() apikey = apikey.strip() except IOError: rospy.logerr( "First argument must be a file containing a Maluuba API key") exit(-1) m = Maluuba(apikey) rospy.spin()
UTF-8
Python
false
false
2,013
1,288,490,193,152
e989ea9a56f76a63fdfec661ce8fb9d57472bcc1
0d16b1b4537f732429f07e0b0e29a0c5579f124e
/bundle.py
18ffb2bf4437e007081c86a2ba54502af6e2fb2c
[]
no_license
clarinova-data/clarinova.com-sandag_crime_incidents-aa-orig-429e-dbundle
https://github.com/clarinova-data/clarinova.com-sandag_crime_incidents-aa-orig-429e-dbundle
2f83915e8a419f9d933bbcca8e369ac984dc7978
ba989f3ad4883bbb1b4fb674a521da48859487eb
refs/heads/master
2016-08-07T18:57:36.084658
2014-02-14T04:35:20
2014-02-14T04:35:20
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
''' ''' from ambry.bundle import BuildBundle class Bundle(BuildBundle): ''' ''' def __init__(self,directory=None): self.super_ = super(Bundle, self) self.super_.__init__(directory) def prepare(self): from ambry.identity import PartitionIdentity from ambry.geo.analysisarea import get_analysis_area if not self.database.exists(): self.database.create() if len(self.schema.tables) == 0 and len(self.schema.columns) == 0: self.log("Loading schema from file") with open(self.config.build.schema_file, 'rbU') as f: self.schema.schema_from_file(f) self.database.commit() for aa_id, aa_name in self.config.build.analysisareas.items(): aa = get_analysis_area(self.library, geoid=aa_id) if aa: pid = PartitionIdentity(self.identity, table='incidents', space=aa_id) p = self.partitions.find_or_new(pid) self.log("Created partition {} ".format(p.name)) return True def build(self): from ambry.geo.analysisarea import get_analysis_area from ambry.identity import PartitionIdentity # # Create an array of all of the AAs, and compute the combined bounding box, # so we can restrict the query # aas = [] lonmin = 180 lonmax = -180 latmin = 180 latmax = -180 for aa_id, aa_name in self.config.build.analysisareas.items(): aa = get_analysis_area(self.library, geoid=aa_id) part = self.partitions.find(PartitionIdentity(self.identity, table='incidents', space=aa_id)) aas.append((aa, part, part.database.inserter(), aa.get_translator())) lonmax = max(lonmax, aa.lonmax ) lonmin = min(lonmin, aa.lonmin ) latmax = max(latmax, aa.latmax ) latmin = min(latmin, aa.latmin ) # # Select only the points in the combined analysis area, then partition the points to each of # the AA sets. # q = self.config.build.incident_query.format(lonmax=lonmax, lonmin=lonmin, latmax=latmax, latmin=latmin) cr = self.library.dep('crime') type_map = self.config.build.type_map rows = 0 for row in cr.bundle.query(q): row = dict(row) nr = dict(row) nr['lat'] = row['lat'] nr['lon'] = row['lon'] nr['type'] = type_map[row['legend']] for aa, part, ins, trans in aas: if aa.is_in_ll(row['lon'],row['lat']): nr['analysisarea'] = aa.geoid p = trans(row['lon'], row['lat']) nr['cellx'] = p.x nr['celly'] = p.y ins.insert(nr) for aa, part, ins, trans in aas: ins.close() return True import sys if __name__ == '__main__': import ambry.run ambry.run.run(sys.argv[1:], Bundle)
UTF-8
Python
false
false
2,014
7,378,753,860,649
21a36d881ddf839e6e61b578fd0a39f0af556bb2
e9ceabd8414cdc90b374ee0b39c522b9df80c074
/tools/occam/occam/targets/par.py
c101d7a9934e9991a6318cb13efafc42ce306244
[]
no_license
Wajihulhassan/SelfContainedPrevirt
https://github.com/Wajihulhassan/SelfContainedPrevirt
275605079442ce97111350e999b1a6cdbbadb7c7
ea2f9127921e3bb3e72a72436f49ec3698137073
refs/heads/master
2020-05-17T23:52:39.022564
2013-06-09T23:26:35
2013-06-09T23:26:35
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# ------------------------------------------------------------------------------ # OCCAM # # Copyright © 2011-2012, SRI International # # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # * Neither the name of SRI International nor the names of its contributors may # be used to endorse or promote products derived from this software without # specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ------------------------------------------------------------------------------ from occam import target, driver from .. import config import os, sys import shlex import logging def isChild(): cmd = open('/proc/%d/cmdline' % os.getppid(),'r').read().split('\0')[0] # sys.stderr.write('=' * 60) # sys.stderr.write('\n') # sys.stderr.write(cmd) # sys.stderr.write('\n') # sys.stderr.write('-' * 60) # sys.stderr.write('\n') # exit(1) dir(config) return ('llvm-ld' == cmd) or \ (os.path.abspath(cmd) == os.path.abspath(config.STD['clang'])) class ParallelTool (target.Target): def opts(self, args): return ([], args) def occam(self, cfg, args): pass def usage(self): return "%s ...%s args..." % (self.name, self.name) def run(self, cfg, _, args): logging.getLogger().info('%(cwd)s', {'cwd' : os.getcwd()}) if not isChild(): self.occam(cfg, args) return driver.run(cfg.getStdTool(self.name), args, pipe=False, inp=sys.stdin, resetPath=True) def createLo(self,old,new): if not os.path.isfile(new): oldFile = open(old, 'r') newFile = open(new, 'w') newFile.write(oldFile.read().replace('.o','.bc.o')) oldFile.close() newFile.close() def createLa(self,old,new): if not os.path.isfile(new): oldFile = open(old, 'r') newFile = open(new, 'w') newFile.write(oldFile.read().replace('.a','.bc.a')) oldFile.close() newFile.close() def fixinputname(self, name, keep=True, create=True): fixed = self.fixname(name) if os.path.isfile(fixed): return fixed else: if create and name.endswith(".lo") or name.endswith(".loT"): self.createLo(name,fixed) return fixed elif create and name.endswith(".la") or name.endswith(".laT"): self.createLa(name,fixed) return fixed else: logging.getLogger().warning('WARNING: bitcode version of %s not found, using original\n', name) if not keep: return None return name def fixname(self, name): if name == '-': return name if '.bc.' in name: return name if name.endswith(".S"): return name if name.endswith(".bin"): # XXX Should this be more clever or should creating # .bin files be handled by some target? return name if '.' in name: return '%s.bc%s' % (name[:name.rfind('.')], name[name.rfind('.'):]) return '%s.bc' % name
UTF-8
Python
false
false
2,013
17,970,143,195,743
73e66fcfccda08db8aff8bce88a55b05c7dfce6e
13350b352d3832d41147e7674268bf53697a7bde
/collector/Collector.py
ca37b34ad58d1499498527e77d7ccd6495bed664
[]
no_license
Positronics/lumberjack
https://github.com/Positronics/lumberjack
b97930c7d537a43ce80c28f8336ea85ed88c0dda
5c44b49b61019edbd034484477cf902de74663b5
refs/heads/master
2019-06-17T12:56:38.028759
2013-03-26T21:04:08
2013-03-26T21:04:08
8,961,137
1
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env python from pymongo import MongoClient import pika import json import config # 1. connect to rabbitmq # 2. connect to mongodb # -- fail if any of the above fails # 3. wait for incoming message # 4. normalize message. # 5. save into database # 6. go to stpe 3. # -- quit if SIGTERM is received. # # NOTE: the interesting part of this collectos is the processing of its own logging # messages. Logging message should be printed to stdout and persistent storage # without passing through the message broker. # def connectAMQP(conf): """ connect to an AMQP. @return queue. """ params = pika.ConnectionParameters(host=conf['host'], port=conf['port']) conn = pika.BlockingConnection(params) chan = conn.channel() chan.exchange_declare(exchange=conf['exchange']['name'], type=conf['exchange']['type'], passive=conf['exchange']['passive']) result = chan.queue_declare(exclusive=True) chan.queue_bind(exchange=conf['exchange']['name'], queue=result.method.queue) return chan, result.method.queue def connectMongo(conf): """ initialize MongoDB connection. @return (connection, collection) """ conn = MongoClient(conf['host'], conf['port']) db = conf['database'] collection = conf['collection'] return (conn, conn[db][collection]) def isValid(msg, fields): """ validate that received message contains the required fields """ return not len([ item for item in fields if item not in msg.keys()]) > 0 def run(): """ initialize Collector """ conf = config.load() required = conf['global']["message"]["fields"] mongo=None logs=None channel=None queue = None mongo, logs = connectMongo(conf['mongodb']) channel, queue = connectAMQP(conf['global']['amqp']) def onMessage(ch, method, properties, body): msg = json.loads(body) if isValid(msg, required): oid = logs.insert(json.loads(body)) print "[Collector] Log message accepted: objectID=%s, msg=%s" % (oid, msg) else: print "[Collector] Rejected invalid log message: %s" % body channel.basic_consume(onMessage, queue=queue, no_ack=True) try: print "[Collector] ready to consume log messages" channel.start_consuming() except KeyboardInterrupt: mongo.disconnect() channel.close() if __name__ == "__main__": run()
UTF-8
Python
false
false
2,013
15,865,609,205,136
a81d4047c81baa5b75a591e203e968002d24e4b6
a4bbc9519b68d182dd9cf231d487f4eb04934cf3
/IntermediatePython/moviedates.py
6dfe643d628a6193c17c30b79d67d10099e1a313
[]
no_license
deoxyribose/IntermediatePython
https://github.com/deoxyribose/IntermediatePython
ff2b551060cf9e53b673ae7c3b487a543b2a18a6
9ce44950e9fd930269e15bc645ac7da9ffe71f5b
refs/heads/master
2020-06-02T00:05:05.334795
2012-12-22T16:21:15
2012-12-22T16:21:15
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#find -size +300M -not -iname "Game*" -printf '%T+\n' > moviedates import pylab as pl from collections import Counter datetime = ['2011-12-07+12:52:30.2418460000', '2012-04-21+21:16:31.0008350000', '2012-03-12+16:06:33.3107310000', '2012-02-20+21:39:46.9344130000', '2012-08-29+15:42:43.1153350000', '2012-02-15+15:49:59.6475600000', '2012-05-02+08:51:12.0283480000', '2012-04-21+12:01:20.0378610000', '2012-05-16+20:23:09.6871760000', '2012-08-31+19:07:04.6080800000', '2012-04-28+13:03:39.7369440000', '2012-08-19+16:58:43.1766360000', '2012-08-30+22:08:53.1748640000', '2012-02-05+23:10:41.7673080000', '2012-02-25+11:54:49.3197680000', '2012-08-30+17:04:40.2120590000', '2012-02-13+12:58:16.3994300000', '2012-04-20+13:36:14.1126670000', '2012-03-03+12:54:06.4379160000', '2012-04-29+19:15:06.3451340000', '2012-03-24+20:20:25.3826220000', '2012-02-23+10:45:47.0532070000', '2012-02-19+21:53:36.4038130000', '2012-01-19+14:17:12.4583820000', '2012-02-26+13:27:14.5672340000', '2012-02-26+13:27:14.7294570000', '2011-09-26+16:12:20.0283239000', '2012-02-15+14:33:36.6934010000', '2012-06-27+14:26:37.6967520000', '2012-03-05+15:56:07.4134800000', '2012-05-11+10:38:20.1511780000', '2012-05-11+13:15:13.2300150000', '2012-02-23+12:40:05.9358990000', '2012-02-22+23:15:23.7939130000', '2012-05-20+22:40:53.9874280000', '2012-05-20+22:46:15.2558130000', '2012-01-19+15:01:41.5891640000', '2012-06-17+17:47:55.5319040000', '2012-02-06+15:01:55.7489960000', '2012-08-21+21:08:32.2117980000', '2012-04-28+10:45:56.6418410000', '2012-07-25+19:25:35.7219240000', '2012-06-30+11:44:08.2356160000', '2012-07-01+17:34:30.6814530000', '2012-07-01+17:34:18.9263080000', '2012-07-01+17:34:18.4152280000', '2012-07-01+17:30:25.4460200000', '2012-03-13+17:55:40.1508420000', '2012-09-01+15:45:57.8474560000', '2012-05-06+19:48:07.1831750000', '2012-03-21+14:19:20.5846630000', '2012-03-28+13:15:31.5384450000', '2012-07-31+20:47:52.6916860000', '2012-05-17+15:01:31.0034210000', '2012-03-06+16:19:09.4011370000', '2012-02-24+15:52:51.5715860000', '2012-03-09+20:03:00.6181580000', '2012-02-28+21:57:25.7404210000', '2012-03-07+15:28:18.1042660000', '2012-03-02+21:42:21.1787270000', '2012-03-06+18:58:00.2278210000', '2012-08-30+14:37:32.1881710000', '2012-02-26+22:09:14.8178900000', '2012-06-03+17:13:02.9818210000', '2012-05-09+12:27:00.8134940000', '2012-08-02+19:43:02.4490870000', '2012-07-25+15:32:05.8442050000', '2012-07-25+15:32:00.9352930000', '2012-07-25+15:32:53.6176950000', '2012-03-06+23:31:32.0553830000', '2012-06-21+22:42:33.6188800000', '2012-06-25+19:46:05.3303390000', '2012-06-26+19:52:22.7164200000', '2012-02-13+16:12:38.7579290000', '2012-08-09+15:58:36.4658190000'] months = [int(i.split('-')[1]) for i in datetime] days = [int(i.split('-')[2][0:2]) for i in datetime] daysinmonths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] totaldays = sorted([sum(daysinmonths[0:months[i]-1])+days[i] for i in range(len(months))]) movies = Counter(totaldays) for i in range(1,342): if i not in totaldays: movies.update({i:0}) movies = sorted(movies.items()) totaldays = zip(*movies)[0] movies = zip(*movies)[1] prop_mov = sum([1 for i in movies if i])/float(len(movies)) prop_mov_yesterday = sum([1 for i in range(len(movies)) if movies[i-1]!=0])/float(len(movies)) cond_prop_mov = (sum([1 for i in range(len(movies)) if movies[i-1]!=0 and movies[i]!=0])/float(len(movies)))/prop_mov_yesterday prop_see_more = (sum([1 for i in range(len(movies)) if movies[i]>1 and movies[i]!=0])/float(len(movies)))/prop_mov print 'Probability of having seen a movie a particular day is %f' %(prop_mov) print 'Probability of seeing a movie given that I saw at least one movie the day before is %f' %(cond_prop_mov) print 'Probability of seeing more than one movie given that I have already seen one is %f' %(prop_see_more) pl.plot(totaldays, movies, '-') # smoothing = [movies[0]] # for i in xrange(1,len(movies)): # smoothing.append(0.6*movies[i-1] + 0.4*smoothing[i-1]) # pl.plot(totaldays, smoothing,'-') # pl.ylim(-1,5) pl.savefig('movieclusters.png')
UTF-8
Python
false
false
2,012
515,396,112,611
1ca6c211c2d557b767a9142102ddafaac1a0eeb9
6fd848402f80dd701562a91aa011f34b30f52754
/extended11.py
2eab97c5800b728ecdd7afb253639944e68beed1
[]
no_license
LeifAndersen/compilers_project_tests2
https://github.com/LeifAndersen/compilers_project_tests2
28a5e6299453ed719a1159fd9066fea9d5ef6b91
b9cea73c2737853e53ba3bee6ed12fbee9e67213
refs/heads/master
2020-04-13T09:39:24.594112
2013-02-28T05:39:22
2013-02-28T05:39:22
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
x = [i for i in range(10)] y = [i*2 for i in range(100) if i < 20 or i > 40] z = {z for z in range(10)} x = {z:2 for z in range(50)}
UTF-8
Python
false
false
2,013
9,577,777,084,185
3738236c1d95d84521823cd3086f1c18871680f2
6a4120c019b96946b01e7e296d76a2a6a525395c
/peakseq.py
2cf98409e8703847b061d27198df7cf45e055c38
[ "LicenseRef-scancode-proprietary-license", "CC-BY-NC-ND-3.0" ]
non_permissive
dnanexus/Scoring
https://github.com/dnanexus/Scoring
3ca93ab8abfaf28de51c0bd9f4f711cb616c3d21
baef0b051c51833a6a2fa17644f624a0220794de
refs/heads/master
2021-01-18T10:52:23.299757
2013-07-19T03:33:26
2013-07-19T03:33:26
10,806,425
1
1
null
null
null
null
null
null
null
null
null
null
null
null
null
import sjm import os import chr_maps import idr import conf BIN_DIR = conf.BIN_DIR SUBMISSION_BIN_DIR = conf.SUBMISSION_BIN_DIR QUEUE = conf.QUEUE PROJECT = conf.SGE_PROJECT PEAKSEQ_BINARY = conf.PEAKSEQ_BINARY BIN_SIZE = conf.PEAKSEQ_BIN_SIZE NAME = 'peakseq' USE_CONTROL_LOCK = False def archive_results(name, results_dir, archive_file): if os.path.exists(archive_file): raise Exception("Archive file %s already exists" % archive_file) archive_cmd = '%s %s %s' % (os.path.join(BIN_DIR, 'archive_results.py'), results_dir, archive_file) return sjm.Job('Archive_%s' % name, archive_cmd, queue=QUEUE, project=PROJECT) def check_control_inputs(control): if control.genome not in chr_maps.genomes: raise Exception("Genome %s not found. Valid genomes: %s" % (control.genome, ' '.join(chr_maps.genomes.keys()))) for mr in control.mapped_read_files: if not os.path.exists(mr): raise Exception("Cannot find mapped reads file %s" % mr) if os.path.exists(control.archive_file): raise Exception("Archive of control results already exists as %s" % control.archive_file) def check_sample_inputs(sample): if os.path.exists(os.path.join(sample.results_dir, 'rep_stats')): raise Exception("Sample results non-empty.") if os.path.exists(sample.archive_file): raise Exception("Archive of sample results already exists as %s" % sample.archive_file) if not sample.genome in chr_maps.peakseq_mappability_file: raise Exception("PeakSeq mappability file not defined in chr_maps.py") if not os.path.exists(chr_maps.peakseq_mappability_file[sample.genome]): raise Exception("Cannot find sample mappability file %s" % chr_maps.peakseq_mappability_file[sample.genome]) if sample.genome not in chr_maps.genomes: raise Exception("Genome %s not found. Valid genomes: " % (sample_conf.GENOME, ' '.join(chr_maps.genomes.keys()))) def prep_control(control): if not os.path.isdir(control.results_dir): os.makedirs(control.results_dir) if not os.path.isdir(control.temp_dir): os.makedirs(control.temp_dir) if not os.path.isdir(control.sgr_dir): os.makedirs(control.sgr_dir) def prep_sample(sample): if not os.path.isdir(sample.results_dir): os.makedirs(sample.results_dir) if not os.path.isdir(sample.temp_dir): os.makedirs(sample.temp_dir) def form_control_files(name, control): cmds = [] control.merged_file_location = os.path.join(control.temp_dir, '%s_merged_eland.txt' % control.run_name) # Merge eland files cmd = os.path.join(BIN_DIR, 'merge_and_filter_reads.py') cmd += ' %s' % control.merged_file_location for mr in control.mapped_read_files: cmd += ' %s' % mr cmds.append(cmd) # Divide merged file by chr cmd = os.path.join(BIN_DIR, 'divide_eland.py') cmd += " %s %s %s" % (control.merged_file_location, control.genome, control.results_dir) cmds.append(cmd) # Create Signal Map cmd = os.path.join(BIN_DIR, 'create_signal_map.py') cmd += ' %s %s' % (control.sgr_dir, control.results_dir) cmds.append(cmd) control.add_jobs(name, [sjm.Job(control.run_name, cmds, queue=QUEUE, project=PROJECT),]) def form_sample_files(name, sample): jobs = [] for rep in sample.replicates: jobs.append(sjm.Job(rep.rep_name(sample) + '_merge', form_replicate_files(rep, sample), queue=QUEUE, project=PROJECT)) jobs.append(sjm.Job(sample.run_name + '_All_merge', form_replicate_files(sample.combined_replicate, sample), queue=QUEUE, project=PROJECT)) sample.add_jobs(name, jobs) def form_replicate_files(rep, sample): cmds = [] # Make directories if not os.path.exists(rep.temp_dir(sample)): os.makedirs(rep.temp_dir(sample)) if not os.path.exists(rep.results_dir(sample)): os.makedirs(rep.results_dir(sample)) if not os.path.exists(rep.sgr_dir(sample)): os.makedirs(rep.sgr_dir(sample)) # Merge and filter rep.merged_file_location = os.path.join(rep.temp_dir(sample), rep.rep_name(sample) + '_merged_eland.txt') cmd = os.path.join(BIN_DIR, 'merge_and_filter_reads.py') cmd += ' %s' % rep.merged_file_location for f in rep.mapped_read_files: cmd += ' %s' % f cmds.append(cmd) # Divide by chr cmd = os.path.join(BIN_DIR, 'divide_eland.py') cmd += ' %s %s %s' % (rep.merged_file_location, sample.genome, rep.temp_dir(sample)) cmds.append(cmd) # Make Pseudoreplicates rep.pr1_name = rep.rep_name(sample) + '_PR1' rep.pr1_results_dir = os.path.join(sample.results_dir, rep.pr1_name) if not os.path.exists(rep.pr1_results_dir): os.makedirs(rep.pr1_results_dir) rep.pr1_sgr_dir = os.path.join(rep.pr1_results_dir, 'sgr') if not os.path.exists(rep.pr1_sgr_dir): os.makedirs(rep.pr1_sgr_dir) rep.pr1_merged = os.path.join(rep.temp_dir(sample), rep.pr1_name + '_merged_eland.txt') rep.pr2_name = rep.rep_name(sample) + '_PR2' rep.pr2_results_dir = os.path.join(sample.results_dir, rep.pr2_name) if not os.path.exists(rep.pr2_results_dir): os.makedirs(rep.pr2_results_dir) rep.pr2_sgr_dir = os.path.join(rep.pr2_results_dir, 'sgr') if not os.path.exists(rep.pr2_sgr_dir): os.makedirs(rep.pr2_sgr_dir) rep.pr2_merged = os.path.join(rep.temp_dir(sample), rep.pr2_name + '_merged_eland.txt') cmd = os.path.join(BIN_DIR, 'shuffle_mapped_reads.py') cmd += ' %s %s %s' % (rep.merged_file_location, rep.pr1_merged, rep.pr2_merged) cmds.append(cmd) cmd = os.path.join(BIN_DIR, 'divide_eland.py') cmd += ' %s %s %s' % (rep.pr1_merged, sample.genome, rep.temp_dir(sample)) cmds.append(cmd) cmd = os.path.join(BIN_DIR, 'divide_eland.py') cmd += ' %s %s %s' % (rep.pr2_merged, sample.genome, rep.temp_dir(sample)) cmds.append(cmd) return cmds def complete_control(name, control): if USE_CONTROL_LOCK: cmd = 'python ' cmd += os.path.join(BIN_DIR, 'complete_control_scoring.py') cmd += ' %s' % control.results_dir cmd += ' %s' % control.peakcaller control.add_jobs(name, [sjm.Job('complete_control', [cmd,], queue=QUEUE, project=PROJECT, host='localhost'),]) def archive_control(name, control): control.add_jobs(name, [archive_results(control.run_name, control.results_dir, control.archive_file),]) def archive_sample(name, sample, control): # Put archive file locations in stats file for SNAP f = open(os.path.join(sample.results_dir, 'rep_stats'), 'a') f.write('sample_tar_complete=%s\n' % sample.archive_file) f.write('control_tar_complete=%s\n' % control.archive_file) f.close() sample.add_jobs(name, [archive_results(sample.run_name, sample.results_dir, sample.archive_file),]) def calc_pbc(name, sample): pbc_stats_file = os.path.join(sample.results_dir, 'pbc_stats.txt') cmds = [] for r in sample.replicates: cmd = "python " + os.path.join(BIN_DIR, 'calc_pbc.py') cmd += ' %s' % r.merged_file_location cmd += ' %s' % pbc_stats_file cmd += ' %s' % r.rep_name(sample) cmds.append(cmd) sample.add_jobs(name, [sjm.Job('calc_pbc', cmds, queue=QUEUE, project=PROJECT, memory='6G'),]) def run_peakcaller(name, control, sample, options=None): if not options: options = {} mappability_file = chr_maps.peakseq_mappability_file[sample.genome] for r in sample.replicates + [sample.combined_replicate,]: for chr in chr_maps.genomes[sample.genome]: # Regular Run chr = chr[:-3] # remove .fa input = os.path.join(r.temp_dir(sample), '%s_eland.txt' % chr) cmd = PEAKSEQ_BINARY + " %s %s %s %s %s %s" % ( os.path.join(r.temp_dir(sample), '%s_eland.txt' % chr), #input os.path.join(control.results_dir, '%s_eland.txt' % chr), #control os.path.join(r.sgr_dir(sample), '%s.sgr' % chr), os.path.join(r.results_dir(sample), '%s_hits.bed' % chr), BIN_SIZE, mappability_file,) cmd = os.path.join(BIN_DIR, 'peakseq_wrapper.py') + ' ' + cmd sample.add_jobs(name, [sjm.Job(r.rep_name(sample) + '_%s' % chr, [cmd,], queue=QUEUE, project=PROJECT),]) # Pseudoreplicate Runs input = os.path.join(r.temp_dir(sample), '%s_eland.txt' % chr) cmd = PEAKSEQ_BINARY + " %s %s %s %s %s %s" % ( os.path.join(r.temp_dir(sample), '%s_eland.txt' % chr), #input os.path.join(control.results_dir, '%s_eland.txt' % chr), #control os.path.join(r.pr1_sgr_dir, '%s.sgr' % chr), os.path.join(r.pr1_results_dir, '%s_hits.bed' % chr), BIN_SIZE, mappability_file,) cmd = os.path.join(BIN_DIR, 'peakseq_wrapper.py') + ' ' + cmd sample.add_jobs(name, [sjm.Job(r.rep_name(sample) + '_PR1_%s' % chr, [cmd,], queue=QUEUE, project=PROJECT),]) input = os.path.join(r.temp_dir(sample), '%s_eland.txt' % chr) cmd = PEAKSEQ_BINARY + " %s %s %s %s %s %s" % ( os.path.join(r.temp_dir(sample), '%s_eland.txt' % chr), #input os.path.join(control.results_dir, '%s_eland.txt' % chr), #control os.path.join(r.pr2_sgr_dir, '%s.sgr' % chr), os.path.join(r.pr2_results_dir, '%s_hits.bed' % chr), BIN_SIZE, mappability_file,) cmd = os.path.join(BIN_DIR, 'peakseq_wrapper.py') + ' ' + cmd sample.add_jobs(name, [sjm.Job(r.rep_name(sample) + '_PR2_%s' % chr, [cmd,], queue=QUEUE, project=PROJECT),]) def merge_results(name, sample): for r in sample.replicates + [sample.combined_replicate,]: for q_val in sample.conf.Q_VALUE_THRESHOLDS + [0,]: if q_val: output = os.path.join(r.results_dir(sample), '%s_%f_hits_filtered.bed' % (r.rep_name(sample), q_val)) else: output = os.path.join(r.results_dir(sample), '%s_hits_filtered.bed' % r.rep_name(sample)) r.unfiltered_results = output cmd = filter_hits_cmd(r.results_dir(sample), r.sgr_dir(sample), sample.genome, output, q_val) sample.add_jobs(name, [sjm.Job('merge_' + r.rep_name(sample) + '%g' % (q_val), [cmd,], queue=QUEUE, project=PROJECT),]) # Merge Pseudoreplicate Hits output = os.path.join(r.results_dir(sample), '%s_hits.bed' % (r.rep_name(sample) + '_PR1')) r.unfiltered_results_pr1 = output cmd = filter_hits_cmd(r.pr1_results_dir, r.pr1_sgr_dir, sample.genome, output) sample.add_jobs(name, [sjm.Job('merge_' + r.rep_name(sample) + '_PR1', [cmd,], queue=QUEUE, project=PROJECT),]) output = os.path.join(r.results_dir(sample), '%s_hits.bed' % (r.rep_name(sample) + '_PR2')) r.unfiltered_results_pr2 = output cmd = filter_hits_cmd(r.pr1_results_dir, r.pr1_sgr_dir, sample.genome, output) sample.add_jobs(name, [sjm.Job('merge_' + r.rep_name(sample) + '_PR2', [cmd,], queue=QUEUE, project=PROJECT),]) def filter_hits_cmd(results_dir, sgr_dir, genome, output, q_val=None): cmd = os.path.join(BIN_DIR, 'filter_hits.py') cmd += ' %s' % results_dir cmd += ' %s' % sgr_dir cmd += ' %s' % genome cmd += ' %s' % output if q_val: cmd += ' %f' % q_val return cmd def replicate_scoring(name, sample): cmds = [] # Mapped Read Statistics cmd = os.path.join(BIN_DIR, 'read_stats.py') cmd += ' %s' % os.path.join(sample.results_dir, 'rep_stats') cmd += ' ' + sample.conf.path cmds.append(cmd) # Replicate Overlap Statistics for q in sample.conf.Q_VALUE_THRESHOLDS: for r1 in sample.replicates: for r2 in sample.replicates: if r1 == r2: continue cmd = os.path.join(BIN_DIR, 'overlap_stats.py') cmd += ' ' + r1.narrowPeak cmd += ' ' + r2.narrowPeak cmd += ' ' + os.path.join(sample.results_dir, 'rep_stats') cmd += ' %f' % q cmd += ' %s_VS_%s_%f' % (r1.rep_name(sample), r2.rep_name(sample), q) cmds.append(cmd) j = sjm.Job('replicate_stats', cmds, queue=QUEUE, project=PROJECT) sample.add_jobs(name, [j,]) def form_idr_inputs(name, sample): os.makedirs(os.path.join(sample.results_dir, 'idr')) jobs = [] for rep in sample.replicates + [sample.combined_replicate,]: rep.narrowPeak = os.path.join(rep.results_dir(sample), rep.rep_name(sample) + '_unfiltered_narrowPeak.bed') cmd = os.path.join(SUBMISSION_BIN_DIR, 'normalhits2narrowPeak') cmd += ' %s > %s' % (rep.unfiltered_results, rep.narrowPeak) jobs.append(sjm.Job(rep.rep_name(sample) + '_hits2narrowPeak', [cmd,], queue=QUEUE, project=PROJECT)) # Pseudoreplicates rep.narrowPeak_pr1 = os.path.join(rep.results_dir(sample), rep.rep_name(sample) + '_PR1_unfiltered_narrowPeak.bed') cmd = os.path.join(SUBMISSION_BIN_DIR, 'normalhits2narrowPeak') cmd += ' %s > %s' % (rep.unfiltered_results_pr1, rep.narrowPeak_pr1) jobs.append(sjm.Job(rep.rep_name(sample) + '_PR1_hits2narrowPeak', [cmd,], queue=QUEUE, project=PROJECT)) rep.narrowPeak_pr2 = os.path.join(rep.results_dir(sample), rep.rep_name(sample) + '_PR2_unfiltered_narrowPeak.bed') cmd = os.path.join(SUBMISSION_BIN_DIR, 'normalhits2narrowPeak') cmd += ' %s > %s' % (rep.unfiltered_results_pr2, rep.narrowPeak_pr2) jobs.append(sjm.Job(rep.rep_name(sample) + '_PR2_hits2narrowPeak', [cmd,], queue=QUEUE, project=PROJECT)) sample.add_jobs(name, jobs) def mail_results(sample, control, run_name, emails): cmds = [] cmd = os.path.join(BIN_DIR, 'build_report_text.py') cmd += ' %s' % sample.run_name cmd += ' %s' % sample.archive_file_download cmd += ' %s' % control.archive_file_download cmd += ' %s' % os.path.join(sample.results_dir, 'rep_stats') cmd += ' %s' % os.path.join(sample.results_dir, 'spp_stats.txt') cmd += ' %s' % os.path.join(sample.results_dir, 'idr_results.txt') cmd += ' %s' % os.path.join(sample.results_dir, 'full_report.txt') cmds.append(cmd) cmd = os.path.join(BIN_DIR, 'mail_wrapper.py') cmd += ' "%s Scoring Results"' % sample.run_name cmd += ' %s' % os.path.join(sample.results_dir, 'full_report.txt') for email in emails: cmd += ' %s' % email cmds.append(cmd) return sjm.Job('mail_results', cmds, queue=QUEUE, project=PROJECT, host='localhost', dependencies=sample.all_jobs() + control.all_jobs()) def cleanup(sample, control): cmds = [] if sample: temp_dirs = [] for r in sample.replicates: temp_dirs.append(r.temp_dir(sample)) if sample.combined_replicate.mapped_read_files: temp_dirs.append(sample.combined_replicate.temp_dir(sample)) for td in temp_dirs: if td and os.path.exists(td): cmds.append('rm -rf %s' % td) if control and control.jobs: if os.path.exists(control.merged_file_location): cmds.append('rm %s' % control.merged_file_location) return sjm.Job('cleanup', cmds, queue=QUEUE, project=PROJECT, dependencies=sample.all_jobs() + control.all_jobs()) def idr_analysis(name, sample): jobs = [] for i, rep_a in enumerate(sample.replicates): for j in range(i+1, len(sample.replicates)): rep_b = sample.replicates[j] idr_name = '%s_VS_%s' % (rep_a.rep_name(sample), rep_b.rep_name(sample)) cmd = idr.idr_analysis_cmd(rep_a.narrowPeak, rep_b.narrowPeak, os.path.join(sample.idr_dir, idr_name), 'q.value', sample.genome) jobs.append(sjm.Job('idr_analysis_' + idr_name, [cmd,], queue=QUEUE, project=PROJECT)) # Pseudoreplicates idr_name = '%s_PR1_VS_%s_PR2' % (rep_a.rep_name(sample), rep_a.rep_name(sample)) cmd = idr.idr_analysis_cmd(rep_a.narrowPeak_pr1, rep_a.narrowPeak_pr2, os.path.join(sample.idr_dir, idr_name+'_PR'), 'q.value', sample.genome) jobs.append(sjm.Job('idr_analysis_' + idr_name, [cmd,], queue=QUEUE, project=PROJECT)) # Pooled Pseudoreplicates idr_name = '%s_PR1_VS_%s_PR2' % (sample.combined_replicate.rep_name(sample), sample.combined_replicate.rep_name(sample)) cmd = idr.idr_analysis_cmd(sample.combined_replicate.narrowPeak_pr1, sample.combined_replicate.narrowPeak_pr2, os.path.join(sample.idr_dir, idr_name), 'q.value', sample.genome) jobs.append(sjm.Job('idr_analysis_'+ idr_name, [cmd,], queue=QUEUE, project=PROJECT)) sample.add_jobs(name, jobs) def idr_filter(name, sample): cmd = os.path.join(BIN_DIR, 'idr_filter.py') cmd += ' %s' % sample.run_name cmd += ' %s' % sample.genome cmd += ' %i' % len(sample.replicates) cmd += ' %s' % sample.idr_dir cmd += ' %s' % os.path.join(os.path.join(sample.results_dir, 'All'), sample.combined_replicate.unfiltered_results) cmd += ' %s' % sample.results_dir cmd += ' 5' # sort column (p.value) sample.add_jobs(name, [sjm.Job('idr_filter_' + sample.run_name, [cmd,], queue=QUEUE, project=PROJECT),])
UTF-8
Python
false
false
2,013
10,050,223,489,035
c2edcdd35b248b5a24398a72acda974f13c5930f
55fc17c487c1799a62cb2948f8834a4c0c882581
/collect.py
a157066ae57d17331390bc488fe54531b42520f2
[]
no_license
alexandrustr/bac2012
https://github.com/alexandrustr/bac2012
ad78dadd577aee7885d3967b6a53f12796ca3b7e
d82546fef713abd69e25a2e1d46458ddcca39ca7
refs/heads/master
2021-01-19T14:57:27.715057
2013-06-10T14:25:24
2013-06-10T14:25:24
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from urllib2 import urlopen import re import pyodbc from BeautifulSoup import BeautifulSoup def clean_var (var): return var.strip().lower() url = 'http://examene.calificativ.ro/examene/rezultate-bacalaureat/2012/' page = urlopen(url).read() areas = [] # Read district names for val in re.sub('<(.*?)>','',page).split('\n'): if len(val.split('/')[0].strip()) < 3 and len(val.split('/')[0].strip()) > 0: areas.append(val.split('/')[0]) for area in areas: # Read html filenames html = [] url = 'http://examene.calificativ.ro/examene/rezultate-bacalaureat/2012/'+area+'/' page = urlopen(url).read() for val in re.sub('<(.*?)>',' ',page).split('\n'): if re.match('.*html', val.strip().split(' ')[0]): html.append(val.strip().split(' ')[0]) for file in html: url = 'http://examene.calificativ.ro/examene/rezultate-bacalaureat/2012/'+area+'/'+file page = urlopen(url).read() soup = BeautifulSoup(page) table = soup.find('table') rows = table.findAll('tr') i = -1 list = [] for tr in rows: i += 1 if i == 2: i = 0 print '|'.join(list) list = [] cols = tr.findAll('td') for td in cols: if len(td.contents) > 0: if len(td.contents) > 2: list.append(clean_var(td.contents[0] + td.contents[2])) else: list.append(clean_var(td.contents[0])) else: list.append('None')
UTF-8
Python
false
false
2,013
11,733,850,690,306
20df99688759a94f81d2e2c72c4c72d1979ea55e
1fac3f78e26130cb8bfe51721ebb5c20bb453222
/KTN/kodeovingsforelesning/client.py
7eb843dec0a2190e9b8d6571c8b900a7391f81a0
[]
no_license
krekle/fellesprosjekt
https://github.com/krekle/fellesprosjekt
52511561a9622907be122897d223ae4d21611831
0682649e977d9ad6f6b7c90a2a2cd19ae04225ec
refs/heads/master
2021-01-02T09:08:09.619019
2014-03-21T22:41:10
2014-03-21T22:41:10
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# -*- coding: utf-8 -*- import socket import json class Client(object): def __init__(self): # Initialiser en tilkobling self.connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM) def start(self, host, port): # Start tilkoblingen self.connection.connect((host, port)) # Send en melding til serveren self.send('May the Force be with you') # Motta data fra serveren # Setter maks datastørrelse til 1kb received_data = self.connection.recv(1024) # Si ifra at klienten har mottatt en melding print 'Received from server: ' + received_data # Lukk tilkoblingen self.connection.close() # Lag en metode for å sende en melding til serveren def send(self, data): self.connection.send(data) # Kjøres når programmet startes if __name__ == "__main__": # Definer host og port for serveren HOST = 'localhost' PORT = 9999 # Initialiser klienten client = Client() # Start klienten client.start(HOST, PORT)
UTF-8
Python
false
false
2,014
10,264,971,851,080
05c2a5066dcc2ca6c74d0500ba40f44f51c027b3
8624ca4c10237bc4489c2b2a6b893958ec180a90
/python/roach_gui.py
640a06f8326caa399f47dfece9ef3b312b16dca8
[ "BSD-3-Clause" ]
permissive
CCATObservatory/ccat-wfs-software
https://github.com/CCATObservatory/ccat-wfs-software
428e326aec4ff04d5706f21648384d9884b5a854
c1a9a9cd8fb31ff0fceb263c1a3b6f1e06f1f495
refs/heads/master
2016-08-06T12:54:48.315906
2014-08-13T18:19:50
2014-08-13T18:19:50
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from PyQt4 import QtCore, QtGui,Qt from plot_template import plot_template import PyQt4.Qwt5 as Qwt from numpy import arange,sqrt from custom_qt_widgets import * try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: _fromUtf8 = lambda s: s """ Small class to create a widget with two plots in it """ class plot_widget(QtGui.QWidget): def __init__(self,plot_type=False,gui_parent=None): QtGui.QWidget.__init__(self, gui_parent) self.plot_type = plot_type self.layout = QtGui.QVBoxLayout() if plot_type == "fft": self.ch0_plot = plot_template(x_log=False,y_log=True) self.ch1_plot = plot_template(x_log=False,y_log=True) elif plot_type == "corr": self.ch0_plot = plot_template(x_log=False,y_log=True) self.ch1_plot = plot_template() else: self.ch0_plot = plot_template() self.ch1_plot = plot_template() self.layout.addWidget(self.ch0_plot) self.layout.addWidget(self.ch1_plot) self.setLayout(self.layout) self.setup_plots() def setup_plots(self): self.ch0_plot.plot_region.setCanvasBackground(Qt.Qt.black) self.ch1_plot.plot_region.setCanvasBackground(Qt.Qt.black) if self.plot_type == "corr" or "fft": #We'll make a grid self.fft_ch0_grid = Qwt.QwtPlotGrid() self.fft_ch0_grid.enableXMin(True) self.fft_ch0_grid.setMajPen(Qt.QPen(Qt.Qt.white,0,Qt.Qt.DotLine)) self.fft_ch0_grid.setMinPen(Qt.QPen(Qt.Qt.gray,0,Qt.Qt.DotLine)) self.fft_ch0_grid.attach(self.ch0_plot.plot_region) if self.plot_type == "fft" or "corr": self.fft_ch1_grid = Qwt.QwtPlotGrid() self.fft_ch1_grid.enableXMin(True) self.fft_ch1_grid.setMajPen(Qt.QPen(Qt.Qt.white,0,Qt.Qt.DotLine)) self.fft_ch1_grid.setMinPen(Qt.QPen(Qt.Qt.gray,0,Qt.Qt.DotLine)) self.fft_ch1_grid.attach(self.ch1_plot.plot_region) class roach_gui(QtGui.QDialog): def __init__(self,parent,gui_parent=None): QtGui.QDialog.__init__(self, gui_parent) self.p = parent self.setupUi() self.plot_timer = QtCore.QTimer() self.setup_plots() self.setup_slots() self.plot_timer.start(100) def setupUi(self): self.setWindowTitle("CORR WFS") #Setup the control portion self.roach_layout = QtGui.QGridLayout() self.roach_layout.addWidget(QtGui.QLabel("Tick"),0,0,1,1) self.roach_tick = QtGui.QLabel("0") self.roach_layout.addWidget(self.roach_tick,0,1,1,1) self.roach_layout.addWidget(QtGui.QLabel("Data Rate"),1,0,1,1) self.roach_acclen = QtGui.QSpinBox() self.roach_acclen.setRange(2,1000) self.roach_acclen.setProperty("value",self.p.data_rate) self.roach_layout.addWidget(self.roach_acclen,1,1,1,1) self.roach_layout.addWidget(QtGui.QLabel("Ave Len"),2,0,1,1) self.roach_avelen = QtGui.QSpinBox() self.roach_avelen.setRange(1,1000) self.roach_avelen.setProperty("value",self.p.average_no) self.roach_layout.addWidget(self.roach_avelen,2,1,1,1) self.sync_Button = QtGui.QPushButton("Sync") self.reset_Button = QtGui.QPushButton("Reset") self.clk_Button = QtGui.QPushButton("Roach Clk") self.dl_Button = QtGui.QPushButton("Logging") self.reset_Button.setEnabled(False) self.roach_layout.addWidget(self.sync_Button,3,0,1,2) self.roach_layout.addWidget(self.reset_Button,4,0,1,2) self.roach_layout.addWidget(self.clk_Button,5,0,1,2) self.roach_layout.addWidget(self.dl_Button,6,0,1,2) self.controlGroupBox = QtGui.QGroupBox("Roach Control") self.controlGroupBox.setLayout(self.roach_layout) #And now the monitoring stations #Ch0 self.ch0_layout = QtGui.QGridLayout() self.ch0_layout.addWidget(QtGui.QLabel("RMS"),0,0,1,1) self.ch0_rms = QtGui.QLabel("0") self.ch0_layout.addWidget(self.ch0_rms,0,1,1,1) self.ch0_layout.addWidget(QtGui.QLabel("Power"),1,0,1,1) self.ch0_power = QtGui.QLabel("0") self.ch0_layout.addWidget(self.ch0_power,1,1,1,1) self.ch0_layout.addWidget(QtGui.QLabel("Atten (dB)"),2,0,1,1) self.ch0_attenval = QtGui.QSpinBox() self.ch0_attenval.setRange(0,31) self.ch0_layout.addWidget(self.ch0_attenval,2,1,1,1) self.ch0if_Button = QtGui.QPushButton("Control") self.ch0_layout.addWidget(self.ch0if_Button,3,0,1,2) self.ch0GroupBox = QtGui.QGroupBox("CH0") self.ch0GroupBox.setLayout(self.ch0_layout) #Ch1 self.ch1_layout = QtGui.QGridLayout() self.ch1_layout.addWidget(QtGui.QLabel("RMS"),0,0,1,1) self.ch1_rms = QtGui.QLabel("0") self.ch1_layout.addWidget(self.ch1_rms,0,1,1,1) self.ch1_layout.addWidget(QtGui.QLabel("Power"),1,0,1,1) self.ch1_power = QtGui.QLabel("0") self.ch1_layout.addWidget(self.ch1_power,1,1,1,1) self.ch1if_Button = QtGui.QPushButton("Control") self.ch1_layout.addWidget(QtGui.QLabel("Atten (dB)"),2,0,1,1) self.ch1_attenval = QtGui.QSpinBox() self.ch1_attenval.setRange(0,31) self.ch1_layout.addWidget(self.ch1_attenval,2,1,1,1) self.ch1_layout.addWidget(self.ch1if_Button,3,0,1,2) self.ch1GroupBox = QtGui.QGroupBox("CH1") self.ch1GroupBox.setLayout(self.ch1_layout) #Correlator self.corr_layout = QtGui.QGridLayout() self.corr_layout.addWidget(QtGui.QLabel("Mag"),0,0,1,1) self.corr_mag = QtGui.QLabel("0") self.corr_layout.addWidget(self.corr_mag,0,1,1,1) self.corr_layout.addWidget(QtGui.QLabel("Phase"),1,0,1,1) self.corr_phase = QtGui.QLabel("0") self.corr_layout.addWidget(self.corr_phase,1,1,1,1) self.corrGroupBox = QtGui.QGroupBox("Correlator") self.corrGroupBox.setLayout(self.corr_layout) #And the PPMAC section self.ppmac_layout = QtGui.QGridLayout() self.ppmac_layout.addWidget(QtGui.QLabel("Tick"),0,0,1,1) self.ppmac_tick = QtGui.QLabel("0") self.ppmac_layout.addWidget(self.ppmac_tick,0,1,1,1) self.ppmac_layout.addWidget(QtGui.QLabel("X Pos"),1,0,1,1) self.ppmac_xpos = QtGui.QLabel("0") self.ppmac_layout.addWidget(self.ppmac_xpos,1,1,1,1) self.ppmac_layout.addWidget(QtGui.QLabel("Y Pos"),2,0,1,1) self.ppmac_ypos = QtGui.QLabel("0") self.ppmac_layout.addWidget(self.ppmac_ypos,2,1,1,1) self.ppmacGroupBox = QtGui.QGroupBox("PPMAC") self.ppmacGroupBox.setLayout(self.ppmac_layout) #final panel layout self.left_panel = QtGui.QVBoxLayout() self.left_panel.addWidget(self.controlGroupBox) self.left_panel.addWidget(self.ch0GroupBox) self.left_panel.addWidget(self.ch1GroupBox) self.left_panel.addWidget(self.corrGroupBox) self.left_panel.addWidget(self.ppmacGroupBox) self.left_panel.addStretch() #And now work on the tabs and plots self.tabWidget = QtGui.QTabWidget() self.adc_plots = plot_widget(gui_parent=self.tabWidget) self.tabWidget.addTab(self.adc_plots, "ADC Data") self.spectro_plots = plot_widget(plot_type="fft",gui_parent=self.tabWidget) self.tabWidget.addTab(self.spectro_plots, "spectro Data") self.corr_plots = plot_widget(plot_type="corr",gui_parent=self.tabWidget) self.tabWidget.addTab(self.corr_plots, "Corr Data") #And now have the final layout self.layout = QtGui.QHBoxLayout() self.layout.addLayout(self.left_panel) self.layout.addWidget(self.tabWidget) self.setLayout(self.layout) def setup_plots(self): self.adc0_curve = Qwt.QwtPlotCurve("ADC0 Monitor") self.adc0_curve.attach(self.adc_plots.ch0_plot.plot_region) self.adc0_curve.setPen(Qt.QPen(Qt.Qt.green)) self.adc1_curve = Qwt.QwtPlotCurve("adc1 Monitor") self.adc1_curve.attach(self.adc_plots.ch1_plot.plot_region) self.adc1_curve.setPen(Qt.QPen(Qt.Qt.green)) self.spectro0_curve = Qwt.QwtPlotCurve("FB DS Monitor") self.spectro0_curve.attach(self.spectro_plots.ch0_plot.plot_region) self.spectro0_curve.setPen(Qt.QPen(Qt.Qt.green)) self.spectro1_curve = Qwt.QwtPlotCurve("SA DS Monitor") self.spectro1_curve.attach(self.spectro_plots.ch1_plot.plot_region) self.spectro1_curve.setPen(Qt.QPen(Qt.Qt.green)) self.magnitude_curve = Qwt.QwtPlotCurve("FB DS Monitor") self.magnitude_curve.attach(self.corr_plots.ch0_plot.plot_region) self.magnitude_curve.setPen(Qt.QPen(Qt.Qt.green)) self.phase_curve = Qwt.QwtPlotCurve("SA DS Monitor") self.phase_curve.attach(self.corr_plots.ch1_plot.plot_region) self.phase_curve.setPen(Qt.QPen(Qt.Qt.green)) def setup_slots(self): QtCore.QObject.connect(self.clk_Button,QtCore.SIGNAL("clicked()"), self.show_clk_gui) QtCore.QObject.connect(self.ch0if_Button,QtCore.SIGNAL("clicked()"), self.show_bba_gui) QtCore.QObject.connect(self.dl_Button,QtCore.SIGNAL("clicked()"), self.show_dl_gui) QtCore.QObject.connect(self.plot_timer, QtCore.SIGNAL("timeout()"), self.update_plots) QtCore.QObject.connect(self.roach_acclen,QtCore.SIGNAL("valueChanged(int)"), self.p.set_acc_len) QtCore.QObject.connect(self.roach_avelen,QtCore.SIGNAL("valueChanged(int)"), self.p.reset_ave_buffer) QtCore.QObject.connect(self.sync_Button,QtCore.SIGNAL("clicked()"), self.p.setup_spectro) QtCore.QObject.connect(self.ch0_attenval,QtCore.SIGNAL("valueChanged(int)"), self.p.atten.setatten0) QtCore.QObject.connect(self.ch1_attenval,QtCore.SIGNAL("valueChanged(int)"), self.p.atten.setatten1) def show_clk_gui(self): self.p.clk_gui.show() def show_bba_gui(self): self.p.bba_gui.show() def show_dl_gui(self): self.p.dl_gui.show() def update_plots(self): self.p.adc_lock.acquire() adc0_x = arange(len(self.p.adc0_data)) self.adc0_curve.setData(adc0_x, self.p.adc0_data) adc1_x = arange(len(self.p.adc1_data)) self.adc1_curve.setData(adc1_x, self.p.adc1_data) self.p.adc_lock.release() self.p.spectro_lock.acquire() self.spectro0_curve.setData(self.p.spectro_x, self.p.ave_ch0_power) self.spectro1_curve.setData(self.p.spectro_x, self.p.ave_ch1_power) self.magnitude_curve.setData(self.p.spectro_x, self.p.ave_mag) self.phase_curve.setData(self.p.spectro_x, self.p.ave_phase) self.p.spectro_lock.release() self.adc_plots.ch0_plot.plot_region.replot() self.adc_plots.ch1_plot.plot_region.replot() self.spectro_plots.ch0_plot.plot_region.replot() self.spectro_plots.ch1_plot.plot_region.replot() self.corr_plots.ch0_plot.plot_region.replot() self.corr_plots.ch1_plot.plot_region.replot() #Update the spectro count self.roach_tick.setText(QtCore.QString.number(self.p.tick)) #Update mean values: self.ch0_rms.setText(QtCore.QString.number(self.p.adc0_rms, "g", 4)) self.ch1_rms.setText(QtCore.QString.number(self.p.adc1_rms, "g", 4)) self.ch0_power.setText(QtCore.QString.number(self.p.dc_ch0_power, "g", 4)) self.ch1_power.setText(QtCore.QString.number(self.p.dc_ch1_power, "g", 4)) self.corr_phase.setText(QtCore.QString.number(self.p.dc_phase, "g", 4)) self.corr_mag.setText(QtCore.QString.number(self.p.dc_mag, "g", 4)) #And ppmac values self.ppmac_tick.setText(QtCore.QString.number(self.p.ppmac.tick)) self.ppmac_xpos.setText(QtCore.QString.number(self.p.ppmac.motor_one_pos/1000.0)) self.ppmac_ypos.setText(QtCore.QString.number(self.p.ppmac.motor_two_pos/1000.0))
UTF-8
Python
false
false
2,014
13,099,650,261,210
7771cc84f8f2b0e9c102c7c35a88dc21fd736cbd
b39d9ef9175077ac6f03b66d97b073d85b6bc4d0
/Dorzolamide_Timolol_Teva_eye_drops,_solution_in_single-dose_container_SmPC.py
002f0e81e507e5944564adcc50df91163362198e
[]
no_license
urudaro/data-ue
https://github.com/urudaro/data-ue
2d840fdce8ba7e759b5551cb3ee277d046464fe0
176c57533b66754ee05a96a7429c3e610188e4aa
refs/heads/master
2021-01-22T12:02:16.931087
2013-07-16T14:05:41
2013-07-16T14:05:41
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
{'_data': [['Very common', [['Eye', u'br\xe4nnande och stickande k\xe4nsla '], ['GI', u'smakf\xf6r\xe4ndringar ']]], ['Common', [['Nervous system', u'huvudv\xe4rk* '], ['Eye', u'konjunktival injektion, dimsyn, korneal erosion, \xf6gonkl\xe5da, \xf6kat t\xe5rfl\xf6de '], ['Eye', u'\xf6gonlocksinflammation*, \xf6gonlocksirritation* '], ['Eye', u'tecken och symtom p\xe5 \xf6gonirritation (t ex br\xe4nnande, stickande, kliande k\xe4nsla, \xf6kat t\xe5rfl\xf6de, r\xf6da \xf6gon) blefarit*, keratit*, nedsatt korneal k\xe4nslighet och torra \xf6gon* '], ['Respiratory', u'sinuit '], ['GI', u'illam\xe5ende* '], ['Reproductive system', u'asteni/tr\xf6tthet* ']]], ['Uncommon', [['Eye', u'iridocyklit* '], ['Eye', u'synst\xf6rningar inklusive refraktionsf\xf6r\xe4ndringar (i vissa fall p\xe5 grund av '], ['Ear', u'bradykardi* '], ['Reproductive system', u'asteni/tr\xf6tthet* ']]], ['Rare', [['Immune system', u'tecken och symtom p\xe5 systemiska allergiska reaktioner inklusive angio\xf6dem, urtikaria, pruritus, hudutslag, anafylaktisk reaktion, s\xe4llsynta '], ['Nervous system', u'yrsel*, parestesi* '], ['Eye', u'irritation med rodnad*, sm\xe4rta*, bel\xe4ggningar p\xe5 \xf6gonlocken*, \xf6verg\xe5ende myopati (som f\xf6rsvann vid uts\xe4ttande av behandlingen), korneal\xf6dem*, okul\xe4r hypotoni*, avlossning av koroidea (efter filtrationskirurgi)* '], ['Ear', u'tinnitus* '], ['Respiratory', u'andn\xf6d, respiratorisk insufficiens, rinit '], ['Respiratory', u'epistaxis* '], ['GI', u'halsirritation, muntorrhet* '], ['Skin', u'kontaktdermatit, Stevens-Johnsons sjukdom, toxisk epidermal nekrolys '], ['Reproductive system', u'Peyronies sjukdom* ']]], ['Unknown', [['Reproductive system', u'sexuell dysfunktion, minskad libido ']]]], '_pages': [5, 8], u'_rank': 23, u'_type': u'LSFU3'}
UTF-8
Python
false
false
2,013
17,514,876,660,997
7041de1ec0f0455284dd82004194296de7c71ec2
9ad0be07226a9464ee567eead3197fbfc00ba77c
/python/__init__.py
12ca818124a48a0017cea3b49e1ccf8739798bbb
[]
no_license
gdimperi/DijetAnalysis
https://github.com/gdimperi/DijetAnalysis
3d2f1472534731853a6654a1f8cac33838aaf222
1f5602e0a3d72838cfe8bae3acbf54732a386a73
refs/heads/master
2021-01-24T20:02:23.393220
2014-09-23T13:39:25
2014-09-23T13:39:25
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#Automatically created by SCRAM import os __path__.append(os.path.dirname(os.path.abspath(__file__).rsplit('/CMSROMA/DijetAnalysis/',1)[0])+'/cfipython/slc6_amd64_gcc481/CMSROMA/DijetAnalysis')
UTF-8
Python
false
false
2,014
7,215,545,058,313
a1c585e7799091e9e89b16ae2300753c8bf3299c
7118a5750c033950078945c82ee6dfcaf771a6d1
/serve.py
bee0f4f574d37ae04dbdba6e627c4ea1860e235b
[]
no_license
methane/pyconjp2012-gevent-slide
https://github.com/methane/pyconjp2012-gevent-slide
f477c58d7fbd7e94454a2239109667e59c11fd10
2cfc537c7707b5c6c6befe0aacc0d188652cfe10
refs/heads/master
2020-06-08T04:10:50.431020
2012-09-10T09:05:39
2012-09-10T09:05:39
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from gevent.pywsgi import WSGIServer import render def app(env, start_response): content = render.render() content = content.encode('utf-8') L = len(content) start_response('200 OK', [ ('Content-Length', str(L)), ('Content-Type', 'text/html; charset=utf-8'), ]) return [content] WSGIServer(('127.0.0.1', 8001), app).serve_forever()
UTF-8
Python
false
false
2,012
5,291,399,737,453
0e9b37ca6b95e2782510bca791d8bc64d01dee4f
51296939be59e12d5f9cf6c55ab75c757d437f4e
/resetdb.py
1bafcd06154e72e44bf9ac2ac0b769634efcb0c8
[]
no_license
nathan7/blagware.py
https://github.com/nathan7/blagware.py
2ca8f0afed0715f437ca25ce18f7e211b0895f21
8a5fc3630327bd50610e400d23ab76c69cf0ef51
refs/heads/master
2016-09-16T14:00:17.871021
2012-01-02T00:18:44
2012-01-02T00:18:44
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from db import * import datetime try: Article.dropTable() except: pass try: Article.createTable() except: pass try: Tag.dropTable() except: pass try: Tag.createTable() except: pass try: Project.dropTable() except: pass try: Project.createTable() except: pass try: Comment.dropTable() except: pass try: Comment.createTable() except: pass ax=Article(title='one',text='test',datemade=datetime.timedelta(0,10)+datetime.datetime.now()) tx=Tag(name='articles') ty=Tag(name='things') ax.addTag(tx) ax.addTag(ty) ay=Article(title='two',text='test') tz=Tag(name='stuff') ay.addTag(tx) ay.addTag(tz) px=Project(name='blagware',tag=tx,desc='blag software') px.addTag(Tag(name='software'))
UTF-8
Python
false
false
2,012
11,355,893,539,385
d42ef6aedd8c0557a0ef18e88a6e7a16876b7ac2
160ba957f7064c39873dfc244f7c58e345675ef2
/feature_selection/test_gap_forward.py
1f7caeb55111323a2e99923ed761d4d27e3dea84
[]
no_license
samithaj/COPDGene
https://github.com/samithaj/COPDGene
dc6aeb4cec1c6fe14e523c1c51526fdf59068c9a
945af35c5ec4c92c58f16ad1a3dbca477a2babae
refs/heads/master
2020-04-22T14:53:05.670885
2014-01-23T20:51:10
2014-01-23T20:51:10
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import csv import matplotlib.pyplot as plt file_sel = open("data_3/features_sel_forward_gap_run_1.csv","wb") file_writer = csv.writer(file_sel) file_writer.writerow(["Loop Index","GAP Value","Added Feature Index",\ "Added Feature Name"]) for i in range(len(features_add)): file_writer.writerow([i,score_best[i],features_add[i],\ features_name_use[features_add[i]]]) file_sel.close() tmp = [] for i in range(len(features_add)): tmp.append(features_name_use[features_add[i]]) plt.figure() plt.plot(range(1,64),score_best) plt.xlabel("The Number of Features") plt.title("Forward Search With GAP Statistic") plt.show()
UTF-8
Python
false
false
2,014
7,945,689,511,001
7ca0b4e69836ad4a44aaebab4b56772c9a2bbd74
029b64da9eddd9e9240408b0afbdef76f46c9c6e
/Monitor/monitor_server.py
a11315c1b876ee8b7d3dde204f44da388fc64f9e
[]
no_license
bossjones/Jarvis
https://github.com/bossjones/Jarvis
637e6917ab9398687315f1c648a9f22e5d671f37
bdee46b81bacf00afbae2312ad2fb5d9a36425af
refs/heads/master
2021-01-16T19:22:39.547442
2014-09-09T13:55:56
2014-09-09T13:55:56
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import SocketServer import logging import socket import threading import json UDP_PORT = 12304 PORT = 9195 class MonitorRequestHandler(SocketServer.StreamRequestHandler): timeout = 5 def handle(self): self.logger = logging.getLogger(self.__class__.__name__) self.logger.setLevel(logging.DEBUG) self.logger.debug('Got connection') self.server.userlist.append(self.request) while True: try: msg = json.loads(self.request.recv(16384).strip()) self.logger.debug("Got %s, ignoring" % str(msg)) self.server.handle(msg['data']) except socket.timeout: if not self.server.running: break else: continue except: break self.close() def close(self): if self.request in self.server.userlist: self.server.userlist.remove(self.request) self.logger.warn("Closing") self.request.close() class MonitorServer(SocketServer.ThreadingTCPServer, object): daemon_threads = True allow_reuse_address = True def __init__(self, server_address=None): self.logger = logging.getLogger(self.__class__.__name__) self.logger.setLevel(logging.DEBUG) self.userlist = [] if not server_address: server_address = (socket.gethostname(), PORT) self.logger.debug("Using address "+str(server_address)) SocketServer.ThreadingTCPServer.__init__(self, server_address, MonitorRequestHandler) self.running = True self.logger.debug("Initialized") def broadcast(self, data): #print json.loads(data)['msg'] print "Broadcast: %s" % (data if len(data) <= 40 else data[:37]+"...") for user in self.userlist: user.send(data+"\n") if __name__ == "__main__": logging.basicConfig() srv = MonitorServer() print "Starting up server" t = threading.Thread(target = srv.serve_forever) t.start() try: sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.bind((socket.gethostname(), UDP_PORT)) while True: # TODO: Timeouts? Count number of requests in time period data, addr = sock.recvfrom(4096) # buffer size is 1024 bytes print data srv.broadcast(data) finally: srv.shutdown() srv.server_close() srv.running = False
UTF-8
Python
false
false
2,014
12,524,124,657,379
e4827fbf47f088ef173a533ef27d53e1435c518f
8f03e61091abfc15386a9e7e280d99b9fd319327
/pd2-progress.py
372286bef719dd541a473afa60f6e4a3576f9105
[ "MIT" ]
permissive
BlizzardFenrir/pd2-progress
https://github.com/BlizzardFenrir/pd2-progress
355a5b9e4f1a1a3f97baa02d7dcc06a7ae568441
d093702c69586062fc127ee2f863e65ca93f30a7
refs/heads/master
2021-01-22T07:18:11.152646
2014-04-17T14:48:31
2014-04-17T14:48:31
18,878,405
1
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import StringIO import csv def printsums(names, row, column): result = '=SUM(' for idx, name in enumerate(names): if idx != 0: result = result + ', ' result = result + name + '!' + column + str(row) result = result + ')' return result def main(): print '--- PAYDAY 2 PROGRESS SPREADSHEET GENERATOR ---' names = [] letters = ['B', 'C', 'D', 'E'] skip = [12, 18, 23] missions = ['Bank Heist PRO', 'Bank Heist: Cash', 'Bank Heist: Deposit', 'Bank Heist: Gold PRO',\ 'Diamond Store', 'Go Bank', 'Jewelry Store', 'Transport: Crossroads',\ 'Transport: Downtown', 'Transport: Harbor', 'Transport: Park', 'Transport: Underpass',\ 'Firestarter', 'Firestarter PRO', 'Rats', 'Rats PRO', 'Watchdogs',\ 'Watchdogs PRO', 'Big Oil PRO', 'Election Day', 'Election Day PRO',\ 'Framing Frame', 'Framing Frame PRO', 'Four Stores', 'Mallcrasher',\ 'Nightclub', 'Ukrainian Job PRO'] # Get names from input print 'Enter the list of names you want to use, separated by commas:' rawnameslist = raw_input() for name in rawnameslist.split(','): names.append(name.strip()) # Generate spreadsheet csv files # Main sheet print 'Writing data to \'spreadsheet.txt\'...' fmain = open('spreadsheet.txt', 'w') fmain.write('---------------------------------------------------------------------\n'\ 'MAIN SHEET\n'\ 'Copy everything below this line to the main sheet of the spreadsheet:\n'\ '---------------------------------------------------------------------\n') fmain.write('Mission\tHard\tVery Hard\tOverkill\tDeath Wish\n') fmain.write('Progress:\t=SUM(B3:B32)/(' + str(len(names)) + '*27)\t'\ '=SUM(C3:C32)/(' + str(len(names)) + '*27)\t'\ '=SUM(D3:D32)/(' + str(len(names)) + '*27)\t'\ '=SUM(E3:E32)/(' + str(len(names)) + '*27)\n') for number in range(0, len(missions)): # Blank line to separate missions from different contractors if number in skip: fmain.write('\t\t\t\t\n') # Print row finalstring = str(missions[number]) for letter in letters: finalstring = finalstring + '\t' + printsums(names, number+3, letter) finalstring = finalstring + '\n' fmain.write(finalstring) # Write individual sheet fmain.write('\n\n---------------------------------------------------------------------\n'\ 'INDIVIDUAL SHEET\n'\ 'Copy everything below this line to the sheets for each name:\n'\ '---------------------------------------------------------------------\n') fmain.write('Mission\tHard\tVery Hard\tOverkill\tDeath Wish\n') fmain.write('Progress:\t=SUM(B3:B32)/27\t'\ '=SUM(C3:C32)/27\t'\ '=SUM(D3:D32)/27\t'\ '=SUM(E3:E32)/27\n') for number in range(0, len(missions)): # Blank line to separate missions from different contractors if number in skip: fmain.write('\t\t\t\t\n') finalstring = str(missions[number]) + '\t0\t0\t0\t0\n' fmain.write(finalstring) fmain.close() if __name__ == "__main__": main()
UTF-8
Python
false
false
2,014
10,247,792,013,930
3e659c4557ae67f8862855dddfe32386f2fca7a3
5a0515e7de72149930890d16961a9a301da07871
/settings.py
7411dd5a011152af947a04e5209c5efc06f8d0c3
[]
no_license
lluiscastrejonsubira/Turker
https://github.com/lluiscastrejonsubira/Turker
62e0f7e6d1a7f5dd855dd909d38a20849ec72eeb
d5f7f7a88717390a31918d74a8b895f616d52541
refs/heads/master
2021-01-10T23:15:10.854545
2014-12-06T23:06:26
2014-12-06T23:06:26
27,650,419
6
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import ConfigParser # Globlals settings_path ="settings.conf" def create_default_settings(): # Create config config = ConfigParser.RawConfigParser() config.add_section('Settings') config.set('Settings', 'access_key', 'put here your access key') config.set('Settings', 'secret_key', 'put here your secret key') config.set('Settings', 'server_url', 'put here your server url') config.set('Settings', 'db', 'insert db engine for sqlalchemy') config.set('Settings', 'images_path', 'insert path to the images folder') # Write settings with open(settings_path, 'w') as configfile: config.write(configfile) def create_new_batch(path): # Create config config = ConfigParser.RawConfigParser() config.add_section('Batch') config.set('Batch', 'name', 'insert name for the batch') config.set('Batch', 'sandbox', 'True to use sandbox, False otherwise') config.set('Batch', 'title', 'insert task title') config.set('Batch', 'description', 'insert task description') config.set('Batch', 'keywords', 'insert one keyword') config.set('Batch', 'question', 'insert the suburl of your server') config.set('Batch', 'amount', 0.00) config.set('Batch', 'duration', 3600) config.set('Batch', 'lifetime', 604800) config.set('Batch', 'auto_approve', 604800) config.set('Batch', 'max_assigs', 5) config.set('Batch', 'images_per_task', 1) config.set('Batch', 'height', 800) config.set('Batch', 'number_of_images', 0) config.set('Batch', 'image_group', 'Enter the desired image_group here') config.set('Batch', 'batch_tasks', 'Enter the batch for which to validate tasks') config.set('Batch', 'min_approved_amount', 100) config.set('Batch', 'min_approved_percent', 90) config.set('Batch', 'country_code', 'None') # Write settings with open(path, 'w') as configfile: config.write(configfile) def get_settings(path=settings_path): config = ConfigParser.RawConfigParser() dataset = config.read(path) if len(dataset) == 0: return None else: return config def get_batch_settings(path): batch = ConfigParser.RawConfigParser() batch.read(path) return batch
UTF-8
Python
false
false
2,014
13,125,420,090,900
cf5f827f10569368a921ba0501dee388b8d5de17
5ffd668baa59dbb7055060581914d0f8314d2315
/ext/qtrade_quotes_vienna.py
40c714c85d948803acfa487b2decf7efc3b0cfa4
[]
no_license
poeticcapybara/qTrade
https://github.com/poeticcapybara/qTrade
ba14f011fb7c59647096223c36ed0c108b7c741b
ac117c0e222df757d79face3f301f29eb5fb1005
refs/heads/master
2016-02-28T06:06:31.236230
2011-11-21T23:41:12
2011-11-21T23:41:12
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env python # -*- coding: iso-8859-1 -*- # ============================================================================ # Project Name : qTrade # Module Name : qtrade_quotes_vienna.py # # Description: List of quotes from http://en.wienerborse.at/ # # ============================================================================ # ============================================================================ # Imports # ============================================================================ # python system import logging import re import thread import time import string # qTrade system import qtrade_config from qtrade_logging import * from qtrade_defs import * from qtrade_ext import * from qtrade_connection import QTradeConnection # ============================================================================ # Import_ListOfQuotes_WBO() # # ============================================================================ def Import_ListOfQuotes_WBO(quotes,market='WIENER BORSE',dlg=None,x=0): print 'Update %s list of symbols' % market connection=QTradeConnection(cookies=None, proxy=qtrade_config.proxyHostname, proxyAuth=qtrade_config.proxyAuthentication) if market=='WIENER BORSE': url = "http://en.wienerborse.at/marketplace_products/trading/auction/?query=&markets=A_G_D&market=all" else: return False def splitLines(buf): lines = string.split(buf, '\n') lines = filter(lambda x:x, lines) def removeCarriage(s): if s[-1]=='\r': return s[:-1] else: return s lines = [removeCarriage(l) for l in lines] return lines info('Import_ListOfQuotes_WBO_%s:connect to %s' % (market,url)) try: data=connection.getDataFromUrl(url) except: debug('Import_ListOfQuotes_WBO:unable to connect :-(') return False # returns the data lines = splitLines(data) count = 0 n = 1 i = 0 for line in lines: #typical lines: #<td class="left">AT00000ATEC9</td> #<td class="left">ATEC</td> #<td class="left">A-TEC INDUSTRIES AG</td> #<td class="left">08:55</td> #<td class="left">12:00</td> #<td class="left">17:30</td> # extract data if '<th colspan="6"><b>Prime Market.at</b></th>' in line : n = 0 if n == 0 : if '<td class="left">' in line : i = i + 1 ch = line[(line.find('>')+1):(line.find ('</td>'))] if i == 1 : isin = ch elif i == 2 : ticker = ch elif i == 3 : name = ch name = name.replace('ä','a')#\xe4 name = name.replace('ö','o')#\xf6 name = name.replace('Ö','O')#\xd6 name = name.replace('ü','u')#\xfc name = name.replace('ß','?')#\xdf elif i == 6 : i = 0 #print isin, name, ticker # ok to proceed quotes.addQuote(isin = isin,name = name, \ ticker = ticker,market= market,currency = 'EUR', \ place = 'WBO',country = 'AT') count = count + 1 print 'Imported %d lines from WIENER BORSE' % (count) return True # ============================================================================ # Export me # ============================================================================ registerListSymbolConnector('WIENER BORSE','WBO',QLIST_ANY,QTAG_LIST,Import_ListOfQuotes_WBO) # ============================================================================ # Test ME # ============================================================================ if __name__=='__main__': setLevel(logging.INFO) from qtrade_quotes import quotes Import_ListOfQuotes_WBO(quotes,'WIENER BORSE') quotes.saveListOfQuotes() # ============================================================================ # That's all folks ! # ============================================================================
WINDOWS-1252
Python
false
false
2,011
5,557,687,685,038
6efe1abab7ba7257b7be73fec0fb57899bc48fc8
3ba8dedf7bf029b02799fdf823d13b8fdbcfb654
/particle_filter.py
4a20ee8f7a334a07bdca3135f1fd279769d42cfd
[ "MIT" ]
permissive
nihakue/pykhepera
https://github.com/nihakue/pykhepera
eb2dea0ac8a8124d554a7b933932cf11fc87895d
1a5cebd701f36efba45c42e8fa5682bd5ed79f9c
refs/heads/master
2016-09-05T14:05:58.483944
2014-03-11T15:00:39
2014-03-11T15:00:39
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import numpy as np from scipy.stats import norm from utils import Pose from utils import Particle import utils import time from multiprocessing import Pool from collections import namedtuple import raycasting import itertools class ParticleFilter(object): """PARTICLE FILTER YAY""" def __init__(self, n, start_pose, data, scale=1, theta_scale=np.pi/16): self.n = n self.data = data self.start_pose = start_pose self.scale = scale self.theta_scale = theta_scale # self.particles = self.random_particles() self.particles = self.rand_gaussian_particles(start_pose, n, 1./n) self.likliest = self.particles[0] self.pool = Pool(processes=2) def get_x(self): return [pose.x for pose in self.particles] def get_y(self): return [pose.y for pose in self.particles] def get_theta(self): return [pose.theta for pose in self.particles] def weighted_choice(self, choices): total = sum(p.w for p in choices) r = np.random.uniform(0, total) upto = 0 for p in choices: if upto + p.w > r: return p upto += p.w assert False, "Shouldn't get here" def update(self, dt): t0 = time.time() particles = self.sample_particles(len(self.particles)) #Move sampled particles based on a motor control new_particles = [ utils.update_pose(p, self.data.wheel_speeds, dt, noisy=True) for p in particles if p.x < 1469 and p.x > 0 and p.y < 962 and p.y > 0] # odo_pose = self.data.pose # odo_pose.w = 1./len(new_particles)*10 # new_particles.append(odo_pose) if len(new_particles) < self.n: shortage = self.n - len(new_particles) likliest_particle = self.most_likely() new_particles += (self.rand_gaussian_particles(likliest_particle, shortage, likliest_particle.w)) eta = 0 #Calculate the sensor probabilities (weights) for each particle #Use a pool of workers to utilize multiple cores exp_readings = self.pool.map(raycasting.exp_readings_for_pose_star, itertools.izip(new_particles, itertools.repeat(self.data.distance_thresholds))) if len(exp_readings) != len(new_particles): assert False, "Array of expected readings must have the same size as the array of new particles. exp_readings: %d new_particles: %d" % (len(exp_readings), len(new_particles)) #Sum sensor probabilites (assumption is that they are independent) for i, p in enumerate(new_particles): weight = self.probability_sum(4, exp_readings[i]) eta += weight p.w = weight #Normalize weights print 'eta: ', eta new_particles = self.normalize(eta, new_particles) while len(new_particles) > len(self.particles): new_particles.remove(self.least_likely(new_particles)) self.particles = new_particles duration = time.time() - t0 # print 'update took %.4f seconds' % duration def sample_particles(self, num): return [self.weighted_choice(self.particles) for i in xrange(num)] def likely_pose(self): particles = tuple((p.x, p.y, p.theta) for p in self.particles) xs, ys, thetas = zip(*particles) mean_x = np.mean(xs) mean_y = np.mean(ys) mean_theta = np.mean(thetas) return Pose(mean_x, mean_y, mean_theta) def random_particles(self, quantity): tup = ((np.random.uniform(0, 1469), np.random.uniform(0,962), np.random.uniform(0, 2*np.pi)) for i in xrange(quantity)) particles = [Particle(x, y, theta, self.least_likely(self.particles).w) for (x, y, theta) in tup] return particles def rand_gaussian_particles(self, loc, num, weight): particles_x = np.random.normal(loc.x, self.scale, num) particles_y = np.random.normal(loc.y, self.scale, num) particles_theta = np.random.normal(loc.theta, self.theta_scale, num) return [Particle(x, y, z, weight) for x, y, z in zip(particles_x, particles_y, particles_theta)] def normalize(self, eta, particles): if eta > 0: for p in particles: p.w = p.w/eta if p.w > self.likliest.w: self.likliest = p #now likliest return particles else: self.particles = self.random_particles(self.n) return def least_likely(self, particles): return min(particles, key=weight_key) def most_likely(self, particles=None): if not particles: particles = self.particles return max(particles, key=weight_key) def probability_sum(self, scale, exp_reading): prob_sum = sum(norm.pdf(pr, r, 60) for pr, r in zip(exp_reading, self.data.sensor_values)) # print 'probability of: %s when reading: %s' % (exp_reading, self.data.sensor_values) # print prob_sum return prob_sum def tear_down(self): self.pool.terminate() self.pool.join() def weight_key(particle): return particle.w
UTF-8
Python
false
false
2,014
3,676,492,010,772
2eb34b52b20f1f1c08808eaa88e8e38699bf10aa
04afedaa658c61f463d7f79cad2376d1b1652115
/corehq/apps/app_manager/suite_xml.py
17a8caaa8ebfee7a33aeedb4ded4399cbf6e14bf
[]
no_license
comm-scriptek/commcare-hq
https://github.com/comm-scriptek/commcare-hq
1897c86a8cce7422018b8be9bdcae76a6403a28a
a818a704e2439f7c0d66b432d052db909a97064d
refs/heads/master
2020-12-24T09:44:46.036215
2013-07-02T07:52:37
2013-07-02T07:52:37
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from django.core.urlresolvers import reverse from lxml import etree from eulxml.xmlmap import StringField, XmlObject, IntegerField, NodeListField, NodeField from corehq.apps.app_manager.util import split_path from corehq.apps.app_manager.xform import SESSION_CASE_ID from dimagi.utils.decorators.memoized import memoized from dimagi.utils.web import get_url_base class IdNode(XmlObject): id = StringField('@id') class XpathVariable(XmlObject): ROOT_NAME = 'variable' name = StringField('@name') locale_id = StringField('locale/@id') class Xpath(XmlObject): ROOT_NAME = 'xpath' function = StringField('@function') variables = NodeListField('variable', XpathVariable) class Text(XmlObject): """ <text> <!----------- Exactly one. Will be present wherever text can be defined. Contains a sequential list of string elements to be concatenated to form the text body.--> <xpath function=""> <!------------ 0 or More. An xpath function whose result is a string. References a data model if used in a context where one exists. --> <variable name=""/> <!------------ 0 or More. Variable for the localized string. Variable elements can support any child elements that <body> can. --> </xpath> <locale id=""> <!------------ 0 or More. A localized string. id can be referenced here or as a child--> <id/> <!------------ At Most One. The id of the localized string (if not provided as an attribute --> <argument key=""/> <!------------ 0 or More. Arguments for the localized string. Key is optional. Arguments can support any child elements that <body> can. --> </locale> </text> """ ROOT_NAME = 'text' xpath = NodeField('xpath', Xpath) xpath_function = StringField('xpath/@function') locale_id = StringField('locale/@id') class AbstractResource(XmlObject): LOCATION_TEMPLATE = 'resource/location[@authority="%s"]' local = StringField(LOCATION_TEMPLATE % 'local', required=True) remote = StringField(LOCATION_TEMPLATE % 'remote', required=True) version = IntegerField('resource/@version') id = StringField('resource/@id') def __init__(self, id=None, version=None, local=None, remote=None, **kwargs): super(AbstractResource, self).__init__(**kwargs) self.id = id self.version = version self.local = local self.remote = remote class XFormResource(AbstractResource): ROOT_NAME = 'xform' class LocaleResource(AbstractResource): ROOT_NAME = 'locale' language = StringField('@language') class MediaResource(AbstractResource): ROOT_NAME = 'media' path = StringField('@path') class Display(XmlObject): ROOT_NAME = 'display' text = NodeField('text', Text) media_image = StringField('media/@image') media_audio = StringField('media/@audio') def __init__(self, text=None, media_image=None, media_audio=None, **kwargs): super(Display, self).__init__(text=text, **kwargs) self.media_image = media_image self.media_audio = media_audio class DisplayNode(XmlObject): """Any node that has the awkward text-or-display subnode, like Command or Menu""" text = NodeField('text', Text) display = NodeField('display', Display) def __init__(self, locale_id=None, media_image=None, media_audio=None, **kwargs): super(DisplayNode, self).__init__(**kwargs) if locale_id is None: text = None else: text = Text(locale_id=locale_id) if media_image or media_audio: self.display = Display(text=text, media_image=media_image, media_audio=media_audio) else: self.text = text class Command(DisplayNode, IdNode): ROOT_NAME = 'command' relevant = StringField('@relevant') class Instance(IdNode): ROOT_NAME = 'instance' src = StringField('@src') def __init__(self, id=None, src=None, **kwargs): super(Instance, self).__init__(id=id, **kwargs) self.src = src class SessionDatum(IdNode): ROOT_NAME = 'datum' nodeset = StringField('@nodeset') value = StringField('@value') detail_select = StringField('@detail-select') detail_confirm = StringField('@detail-confirm') class Entry(XmlObject): ROOT_NAME = 'entry' form = StringField('form') command = NodeField('command', Command) instance = NodeField('instance', Instance) instances = NodeListField('instance', Instance) datums = NodeListField('session/datum', SessionDatum) datum = NodeField('session/datum', SessionDatum) class Menu(DisplayNode, IdNode): ROOT_NAME = 'menu' commands = NodeListField('command', Command) class AbstractTemplate(XmlObject): form = StringField('@form', choices=['image', 'phone', 'address']) width = IntegerField('@width') text = NodeField('text', Text) class Template(AbstractTemplate): ROOT_NAME = 'template' class Header(AbstractTemplate): ROOT_NAME = 'header' class Field(XmlObject): ROOT_NAME = 'field' sort = StringField('@sort') header = NodeField('header', Header) template = NodeField('template', Template) class DetailVariable(XmlObject): ROOT_NAME = '_' function = StringField('@function') def get_name(self): return self.node.tag def set_name(self, value): self.node.tag = value name = property(get_name, set_name) class Detail(IdNode): """ <detail id=""> <title><text/></title> <variables> <__ function=""/> </variables> <field sort=""> <header form="" width=""><text/></header> <template form="" width=""><text/></template> </field> </detail> """ ROOT_NAME = 'detail' title = NodeField('title/text', Text) variables = NodeListField('variables/*', DetailVariable) fields = NodeListField('field', Field) class Fixture(IdNode): ROOT_NAME = 'fixture' user_id = StringField('@user_id') def set_content(self, xml): for child in self.node: self.node.remove(child) self.node.append(xml) class Suite(XmlObject): ROOT_NAME = 'suite' version = IntegerField('@version') xform_resources = NodeListField('xform', XFormResource) locale_resources = NodeListField('locale', LocaleResource) media_resources = NodeListField('locale', MediaResource) details = NodeListField('detail', Detail) entries = NodeListField('entry', Entry) menus = NodeListField('menu', Menu) fixtures = NodeListField('fixture', Fixture) class IdStrings(object): def homescreen_title(self): return 'homescreen.title' def app_display_name(self): return "app.display.name" def xform_resource(self, form): return form.unique_id def locale_resource(self, lang): return u'app_{lang}_strings'.format(lang=lang) def media_resource(self, multimedia_id, name): return u'media-{id}-{name}'.format(id=multimedia_id, name=name) def detail(self, module, detail): return u"m{module.id}_{detail.type}".format(module=module, detail=detail) def detail_title_locale(self, module, detail): return u"m{module.id}.{detail.type}.title".format(module=module, detail=detail) def detail_column_header_locale(self, module, detail, column): return u"m{module.id}.{detail.type}.{d.model}_{d.field}_{d_id}.header".format( detail=detail, module=module, d=column, d_id=column.id + 1 ) def detail_column_enum_variable(self, module, detail, column, key): return u"m{module.id}.{detail.type}.{d.model}_{d.field}_{d_id}.enum.k{key}".format( module=module, detail=detail, d=column, d_id=column.id + 1, key=key, ) def menu(self, module): return u"m{module.id}".format(module=module) def module_locale(self, module): return module.get_locale_id() def form_locale(self, form): return form.get_locale_id() def form_command(self, form): return form.get_command_id() def case_list_command(self, module): return module.get_case_list_command_id() def case_list_locale(self, module): return module.get_case_list_locale_id() def referral_list_command(self, module): """1.0 holdover""" return module.get_referral_list_command_id() def referral_list_locale(self, module): """1.0 holdover""" return module.get_referral_list_locale_id() class MediaResourceError(Exception): pass class SuiteGenerator(object): def __init__(self, app): self.app = app # this is actually so slow it's worth caching self.modules = list(self.app.get_modules()) self.id_strings = IdStrings() @property def xform_resources(self): first = [] last = [] for form_stuff in self.app.get_forms(bare=False): if form_stuff['type'] == 'module_form': path = './modules-{module.id}/forms-{form.id}.xml'.format(**form_stuff) this_list = first else: path = './user_registration.xml' this_list = last this_list.append(XFormResource( id=self.id_strings.xform_resource(form_stuff['form']), version=form_stuff['form'].get_version(), local=path, remote=path, )) for x in first: yield x for x in last: yield x @property def locale_resources(self): for lang in ["default"] + self.app.build_langs: path = './{lang}/app_strings.txt'.format(lang=lang) yield LocaleResource( language=lang, id=self.id_strings.locale_resource(lang), version=self.app.version, local=path, remote=path, ) @property def media_resources(self): PREFIX = 'jr://file/' # you have to call remove_unused_mappings # before iterating through multimedia_map self.app.remove_unused_mappings() for path, m in self.app.multimedia_map.items(): if path.startswith(PREFIX): path = path[len(PREFIX):] else: raise MediaResourceError('%s does not start with jr://file/commcare/' % path) path, name = split_path(path) # CommCare assumes jr://media/, # which is an alias to jr://file/commcare/media/ # so we need to replace 'jr://file/' with '../../' # (this is a hack) path = '../../' + path multimedia_id = m.multimedia_id yield MediaResource( id=self.id_strings.media_resource(multimedia_id, name), path=path, version=1, local=None, remote=get_url_base() + reverse( 'hqmedia_download', args=[m.media_type, multimedia_id] ) + name ) @property @memoized def details(self): r = [] from corehq.apps.app_manager.detail_screen import get_column_generator if not self.app.use_custom_suite: for module in self.modules: for detail in module.get_details(): detail_columns = detail.get_columns() if detail_columns and detail.type in ('case_short', 'case_long'): d = Detail( id=self.id_strings.detail(module, detail), title=Text(locale_id=self.id_strings.detail_title_locale(module, detail)) ) for column in detail_columns: fields = get_column_generator(self.app, module, detail, column).fields d.fields.extend(fields) try: d.fields[0].sort = 'default' except IndexError: pass else: # only yield the Detail if it has Fields r.append(d) return r def get_filter_xpath(self, module, delegation=False): from corehq.apps.app_manager.detail_screen import Filter short_detail = module.details[0] filters = [] for column in short_detail.get_columns(): if column.format == 'filter': filters.append("(%s)" % Filter(self.app, module, short_detail, column).filter_xpath) if filters: xpath = '[%s]' % (' and '.join(filters)) else: xpath = '' if delegation: xpath += "[index/parent/@case_type = '%s']" % module.case_type xpath += "[start_date = '' or double(date(start_date)) <= double(now())]" return xpath @property def entries(self): def add_case_stuff(module, e, use_filter=False): def get_instances(): yield Instance(id='casedb', src='jr://instance/casedb') if any([form.form_filter for form in module.get_forms()]) and \ module.all_forms_require_a_case(): yield Instance(id='commcaresession', src='jr://instance/session') e.instances.extend(get_instances()) # I'm setting things individually instead of in the constructor # so that they appear in the correct order e.datum = SessionDatum() e.datum.id='case_id' e.datum.nodeset="instance('casedb')/casedb/case[@case_type='{module.case_type}'][@status='open']{filter_xpath}".format( module=module, filter_xpath=self.get_filter_xpath(module) if use_filter else '' ) e.datum.value="./@case_id" detail_ids = [detail.id for detail in self.details] def get_detail_id_safe(detail_type): detail_id = self.id_strings.detail( module=module, detail=module.get_detail(detail_type) ) return detail_id if detail_id in detail_ids else None e.datum.detail_select = get_detail_id_safe('case_short') e.datum.detail_confirm = get_detail_id_safe('case_long') for module in self.modules: for form in module.get_forms(): e = Entry() e.form = form.xmlns e.command=Command( id=self.id_strings.form_command(form), locale_id=self.id_strings.form_locale(form), media_image=form.media_image, media_audio=form.media_audio, ) if form.requires == "case": add_case_stuff(module, e, use_filter=True) yield e if module.case_list.show: e = Entry( command=Command( id=self.id_strings.case_list_command(module), locale_id=self.id_strings.case_list_locale(module), ) ) add_case_stuff(module, e, use_filter=False) yield e @property def menus(self): for module in self.modules: menu = Menu( id='root' if module.put_in_root else self.id_strings.menu(module), locale_id=self.id_strings.module_locale(module), media_image=module.media_image, media_audio=module.media_audio, ) def get_commands(): for form in module.get_forms(): command = Command(id=self.id_strings.form_command(form)) if module.all_forms_require_a_case() and \ not module.put_in_root and \ getattr(form, 'form_filter', None): command.relevant = form.form_filter.replace('.', SESSION_CASE_ID.case() ) yield command if module.case_list.show: yield Command(id=self.id_strings.case_list_command(module)) menu.commands.extend(get_commands()) yield menu @property def fixtures(self): if self.app.case_sharing: f = Fixture(id='user-groups') f.user_id = 'demo_user' groups = etree.fromstring(""" <groups> <group id="demo_user_group_id"> <name>Demo Group</name> </group> </groups> """) f.set_content(groups) yield f def generate_suite(self, sections=None): sections = sections or ( 'xform_resources', 'locale_resources', 'details', 'entries', 'menus', 'fixtures', ) suite = Suite() suite.version = self.app.version def add_to_suite(attr): getattr(suite, attr).extend(getattr(self, attr)) map(add_to_suite, sections) return suite.serializeDocument(pretty=True)
UTF-8
Python
false
false
2,013
14,972,256,042,046
f7a681cd36d4937c8ae1483d3e7cafd6e9c5906b
bd1362c60313784c90013dfc9f0169e64389bf27
/scripts/feature/monthly_bar.py
cd5ba3ddb51d2eed04c43cdf104e4b9b0a23cdb7
[]
no_license
ForceCry/iem
https://github.com/ForceCry/iem
391aa9daf796591909cb9d4e60e27375adfb0eab
4b0390d89e6570b99ca83a5fa9b042226e17c1ad
refs/heads/master
2020-12-24T19:04:55.517409
2013-04-09T14:25:36
2013-04-09T14:25:36
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import iemdb COOP = iemdb.connect('coop', bypass=True) ccursor = COOP.cursor() data = [] ccursor.execute(""" SELECT month, avg(sum) from (SELECT year, month, sum(precip) from alldata where stationid = 'ia8755' and year < 2001 and year > 1970 GROUP by month, year) as foo GROUP by month ORDER by month ASC """) for row in ccursor: data.append( row[1] ) data2 = [] ccursor.execute(""" SELECT month, avg(sum) from (SELECT year, month, sum(precip) from alldata where stationid = 'ia8755' and year < 2011 and year > 2000 GROUP by month, year) as foo GROUP by month ORDER by month ASC """) for row in ccursor: data2.append( row[1] ) import numpy import matplotlib.pyplot as plt fig = plt.figure() ax = fig.add_subplot(111) ax.bar(numpy.arange(1,13) - 0.4, data, width=0.4, facecolor='r', label='1970-2000') ax.bar(numpy.arange(1,13), data2, width=0.4, facecolor='b', label='2000-2010') ax.set_xlim(0.5,12.5) ax.set_xticks( numpy.arange(1,13) ) ax.set_xticklabels( ('Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec') ) ax.set_ylabel("Monthly Precipitation [inch]") ax.set_title("Waukon Monthly Precipitation") ax.legend() ax.grid(True) fig.savefig('test.png')
UTF-8
Python
false
false
2,013
6,700,149,021,646
af6c12b0e1dec1e9b14991ad5952d7b347a0f877
306001d1599a3112cce021c42c0df66a6b93d12a
/yaml2csv.py
dcc74b50c6c87ff91df2ae69632ba74db0abab6f
[ "GPL-2.0-only" ]
non_permissive
shimarin/discourse-ja-translation
https://github.com/shimarin/discourse-ja-translation
e2eb402bc0f58bbd28149fad6b963ca0c285d92d
795e9c719a04d0ddd35cf66ecfbbdb700ecb1521
refs/heads/master
2016-08-03T19:49:34.312190
2014-05-03T02:38:11
2014-05-03T02:38:11
18,515,658
2
1
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/python2.7 import sys import csv import yaml import codecs TO_BE_TRANSLATED_MARK = "***TO BE TRANSLATED***" def collect(result, node, prefix=None): for key,value in node.items(): new_prefix = (key if prefix == None else prefix + "." + key) if isinstance(value, dict): collect(result, value, new_prefix) else: result[new_prefix] = value def collect_old_csv(filename): result = {} reader = csv.reader(open(filename)) for row in reader: if TO_BE_TRANSLATED_MARK not in row[1]: result[row[0]] = row[1].decode("utf-8") return result def flatten(namespace=None,old_csv=None): namespace = "" if namespace == None else namespace + "." en_src = yaml.load(open("%sen.yml" % namespace)) ja_src = yaml.load(open("%sja.yml" % namespace)) en = {} collect(en, en_src["en"]) ja = {} collect(ja, ja_src["ja"]) ja_old = collect_old_csv(old_csv) if old_csv else {} writer = csv.writer(sys.stdout) for key,value in sorted(en.items()): val = TO_BE_TRANSLATED_MARK + value if key in ja: val = ja[key] elif key in ja_old: val = ja_old[key] writer.writerow([key, val.encode("UTF-8")]) if __name__ == '__main__': if len(sys.argv) < 2: print "Usage: yaml2csv.py namespace('server'|'client') [old-translated-csv-file]" sys.exit(1) flatten(sys.argv[1], None if len(sys.argv) < 3 else sys.argv[2])
UTF-8
Python
false
false
2,014
10,625,749,127,718
1a1171c4735ff18e5fe1269035cd8fdecf76eb14
dbea1b80ca761395338798f0a89afa9445414639
/guassian_data.py
3ce522e3f16466f75331d23c2b8d16a2954ffedd
[]
no_license
taralparker/python-kmeans-testing
https://github.com/taralparker/python-kmeans-testing
855968aee38aad2209c9e47ba5c98f30ff232d17
56f8cc042b8f8a45a7409fa4a2150810b9dfbda1
refs/heads/master
2021-03-12T19:41:06.140019
2014-07-04T01:33:58
2014-07-04T01:33:58
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from numpy.random import multivariate_normal from numpy import vstack import numpy as np from matplotlib import pyplot as plt import prettyplotlib as ppl center1 = np.array([1,1]) center2 = np.array([-1,-1]) center3 = np.array([-1,1]) var_scale = 0.01 sample_size = 1000 x1 = multivariate_normal(center1,var_scale*np.eye(2),sample_size) x2 = multivariate_normal(center2,var_scale*np.eye(2),sample_size) x3 = multivariate_normal(center3,var_scale*np.eye(2),sample_size) X = vstack((x1,x2,x3)) #np.savetxt("./test_clusters/guassian3.csv",X,delimiter=",") ppl.scatter(X[:,0],X[:,1]) plt.show()
UTF-8
Python
false
false
2,014
3,418,793,997,719
c797e4f63d32e17ca6e0e81e1fbc95cf274ce1b8
cf057975f05b7d5043bf8bbaf451760f46140ee3
/flashtest.py
13ec039fa44401fb702e3ec0ca5aa8c23ad6aaf5
[]
no_license
joy0us/Hero-Quest
https://github.com/joy0us/Hero-Quest
f340982b104cddf824d03fdaa19ea9a584c66dcf
52cf4644ddc4bf8bdd82bb5b8e492a6d283594cd
refs/heads/master
2016-09-06T21:41:39.170981
2014-09-01T19:15:02
2014-09-01T19:15:02
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import pygcurse,pygame,time win = pygcurse.PygcurseWindow(40,25) while True: win.fill('#', fgcolor='red',bgcolor='black') time.sleep(0.05) win.fill('#',fgcolor='green',bgcolor='white') time.sleep(0.05)
UTF-8
Python
false
false
2,014
6,949,257,123,008
9385310c30743bcef5c2b260fa03b4ed0cd38f37
5667944cdd23f0d3df9020afea293a5de358c97b
/libs/myprime.py
cdb906f0844a34908b184de699fd46730daeb151
[]
no_license
InFog/PyProjectEuler
https://github.com/InFog/PyProjectEuler
bb0c04d630fe50a0521b191bff875dc7a2fa4dd0
5e7fed46dac9dc96fbb699f9e3fa0bcd3eeb0e60
refs/heads/master
2018-05-18T22:44:46.157950
2012-08-09T23:29:50
2012-08-09T23:29:50
605,893
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- from math import sqrt, ceil def isPrime(number): if (number < 2): return False elif (number == 2): return True elif (number % 2 == 0): return False # If number is even else: for i in range(3, int(ceil(sqrt(number)) + 1), 2): if (number % i == 0): return False return True
UTF-8
Python
false
false
2,012
2,920,577,778,512
18ad5e3769cf5c6bb7bd1a20ea955fb16be974ab
376aeb979a47eb68ba227b1f22f2c113ecf9c671
/coffeesite/featuredcoffee/admin.py
d532d4a30cc5154662fabafc0ccf1a94e19a4aff
[]
no_license
yxin2k/coffee
https://github.com/yxin2k/coffee
2e754f0ce396911562411a595e45264300a4e11a
7836f1f392d52d623383daf72325906aa05448bc
refs/heads/master
2016-09-16T09:46:56.679309
2014-09-05T19:08:10
2014-09-05T19:08:10
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from django.contrib import admin from featuredcoffee.models import Month from featuredcoffee.models import Coffee admin.site.register(Month) admin.site.register(Coffee)
UTF-8
Python
false
false
2,014
6,451,040,923,087
5eda46e352cbf703de41e020f168f30943352276
9d85f532e1c372570672f1367a75a86375b9f211
/csubgweb/models.py
d0d92a5cf4af1178cbc08fd7a85c4229adf76cfc
[]
no_license
liqi328/CSUBG
https://github.com/liqi328/CSUBG
3c10adf13bef4ba1b7e909b472b6b2cd72105e64
a57d9236d8b7ca4b0d1d76a43e3dbf8c1dc4ec57
refs/heads/master
2016-09-05T19:13:32.091989
2012-09-01T08:23:17
2012-09-01T08:23:17
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# -*- coding: cp936 -*- from django.db import models # Create your models here. GENDER_CHOICE = ( ('Female','Female'), ('Male','Male'), ) DEGREE_CHOICE = ( ('PhD','PhD'), ('Master','Master'), ('Bachelor','Bachelor'), ) CATEGORY_CHOICE = ( ('Professor', 'Professor'), ('VisitingScholar', 'Visiting Scholar'), ('PostDoctorate', 'Post Doctorate'), ('PhDCandidate', 'PhD Candidate'), ('PhDGraduate', 'PhD(Graduate)'), ('MasterCandidate', 'Master Candidate'), ('MasterGraduate', 'Master(Graduate)'), ('ForeignStudent', 'Foreign Student'), ) ''' TITLE_CHOICE = ( ('Professor', 'Professor'), ('PostPhD', 'PostPhD'), ('PhD', 'PhD'), ('Master', 'Master'), ) ''' class Member(models.Model): name = models.CharField(max_length = '30') englishName = models.CharField(max_length = '30') gender = models.CharField(max_length = '10', choices = GENDER_CHOICE) birthday = models.DateField(auto_now = False, auto_now_add = False, blank = True, null=True) title = models.CharField(max_length = '300') degree = models.CharField(max_length = '20', choices = DEGREE_CHOICE) category = models.CharField(max_length = '30',choices = CATEGORY_CHOICE) enrollmentDate = models.DateField(auto_now = False, auto_now_add = False, blank = True, null=True) headshot = models.ImageField(upload_to = 'upload/headshot', blank = True, null=True, default='upload/headshot/0_0.jpg') email = models.EmailField(blank = True, null=True) homepage = models.CharField(max_length = '60', blank = True, null=True) def __unicode__(self): return self.name class Project(models.Model): SOURCE_CHOICE = ( ('From NFSC', 'From NFSC'), ('From Ministry Education', 'From Ministry Education'), ('From Hu Nan Province', 'From Hu Nan Province'), ('From CSU', 'From CSU'), ('From 973', 'From 973'), ('Cooperation Project', 'Cooperation Project'), ) title = models.CharField(max_length = '200') name = models.CharField(max_length = '200') number = models.CharField(max_length = '30') fund = models.CharField(max_length = '10') time = models.CharField(max_length = '40') manager = models.CharField(max_length = '20') introduction = models.TextField(max_length = '6000') source = models.CharField(max_length = '50', choices = SOURCE_CHOICE) def __unicode__(self): return self.name class Paper(models.Model): TYPE_CHOICE = ( ('JournalPaper','Journal Paper'), ('ConferencePaper','Conference Paper'), ) title = models.TextField(max_length = '300') authors = models.CharField(max_length = '200') publication = models.TextField(max_length = '200') publisher = models.CharField(max_length = '100',null = True, blank = True) volume = models.CharField(max_length = '20',null = True, blank = True) number = models.CharField(max_length = '20',null = True, blank = True) pages = models.CharField(max_length = '20',null = True, blank = True) publishDate = models.DateField(auto_now = False, auto_now_add = False) link = models.CharField(max_length = '100', blank = True, null = True) type = models.CharField(max_length = '40', choices = TYPE_CHOICE) def __unicode__(self): return self.title class Patent(models.Model): TYPE_CHOICE = ( ('Patent', 'Patent'), ('Award', 'Award'), ('SoftwareCopyright', 'Software Copyright'), ) name = models.CharField(max_length = '200') year = models.CharField(max_length = '10') owner = models.CharField(max_length = '60') applicationNumber = models.CharField(max_length = '50',null = True, blank = True) type = models.CharField(max_length = '30', choices = TYPE_CHOICE) def __unicode__(self): return self.name class Software(models.Model): SOFTWARE_CHOICE = ( ('Protein_Complex_Mining', 'Protein_Complex_Mining'), ('Key_Protein_Predict','Key_Protein_Predict'), ) name = models.CharField(max_length = '100') functionDescription = models.TextField(max_length = '6000') instruction = models.TextField(max_length = '600',null = True, blank = True) browseCount = models.IntegerField(editable = False, default = 0) downloadCount = models.IntegerField(editable =False, default = 0) image = models.ImageField(upload_to = 'upload/software/%Y%m',blank = True, null = True) #link = models.FileField(upload_to = 'upload/software/homepage/%Y%m') link = models.CharField(max_length = '100', default='software/') category = models.CharField(max_length = '80', choices = SOFTWARE_CHOICE) class Contact(models.Model): name = models.CharField(max_length = '100', blank = False) email = models.EmailField(editable = True) messages = models.TextField(max_length = '6000',) time = models.DateTimeField(auto_now_add = True) isReply = models.BooleanField(default = False) def __unicode__(self): return self.name class News(models.Model): title = models.CharField(max_length = '300') content = models.TextField(max_length = '60000') publishDate = models.DateField(auto_now_add = True) def __unicode__(self): return self.title class FriendLink(models.Model): name = models.CharField(max_length = '200') english_name = models.CharField(max_length = '200') link = models.CharField(max_length = '200') def __unicode__(self): return self.name
UTF-8
Python
false
false
2,012
10,316,511,473,393
7cf8c9fcf6b581f10d8f92393f52e8b3e7809743
fb05fc424012eb92fcaeb806964324a091ed2342
/cola-ops-service/colalib/vappscb.py
6224833b89f59de318bf4ba7275e89774ea45c6f
[]
no_license
ruivapps/rli
https://github.com/ruivapps/rli
aedb1f7359ed46fab3dad372d2b6f8b76a17888a
7da510a671eb1b3908d50ede1eda878081182883
refs/heads/master
2016-08-06T08:15:13.360364
2014-04-15T20:51:30
2014-04-15T20:51:30
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/python ''' this module will query vappscb database ''' import pg, colaException class Vappscb(object): ''' top vappscb class for database query''' def __init__(self): self.db = None #if conferenceId = primaryConferenceId, chassis is primary self.conferenceId = '' self.primaryConferenceId = '' self.info = [] self.debug = [] self.error = [] self.result = [] self.mediaport = False self.localMediaPorts = [] self.g2w = False self.passcode = '' self.chassis = '' self.callids = [] self.primary = False self.haveSnapShot = False self.snapshotCalls= [] self.snapshotConferences = [] self.snapshotConferencestate = [] def connect(self, connection): ''' connect to database ''' try: self.db = pg.connect(connection['dbname'], connection['host'], int(connection['port']), None, None, connection['user'], connection['password']) except (pg.ProgrammingError, pg.InternalError), error: raise colaException.DatabaseError('can not connect to database\n%s' %(error)) def close(self): ''' close database connection ''' assert self.db, 'clost db. do not have database object' try: self.db.close() except (pg.InternalError, pg.ProgrammingError), error: raise colaException.DatabaseError ('error close database\n%s' %(error)) def getConferenceId(self, passcode = None, chassis = None): '''get conferenceid from conferences table requrie passcode and chasssis ''' if passcode: self.passcode = passcode if chassis: self.chassis = chassis assert self.passcode, 'passcode are required for getConferenceId()' assert self.chassis, 'chassis are required for getConferenceId()' query=("""select conferenceid from conferences where passcode='%s' and chassis='%s' """ %(self.passcode, self.chassis)) self.conferenceId = self.execute(query) if self.conferenceId: self.conferenceId = self.conferenceId[0][0] #we have conference, we must have primary confernce self.getPrimaryConferenceId() else: self.debug.append ('conference not found from bridge. passcode: %s' %(self.passcode)) def getPrimaryConferenceId(self, passcode = None): ''' get primary conference id from conferencestate table ''' if passcode: self.passcode = passcode if not self.passcode: return None assert self.passcode, 'passcode are required for getPrimaryConferenceId()' assert self.chassis, 'chassis are required for getPrimaryConferenceId()' query=("""select primaryconferenceid from conferencestate where passcode='%s' """ %(self.passcode)) self.primaryConferenceId = self.execute(query) if self.primaryConferenceId: self.primaryConferenceId = self.primaryConferenceId[0][0] #is ours client holding primary conference? self.isPrimary() else: self.primaryConferenceId=None self.debug.append('primary conference not found from database. passcode: %s' %(self.passcode)) def getLocalMediaPorts(self): ''' get local media ports from db ''' if not self.conferenceId: self.localMediaPorts = [] return None ports = {} self.localMediaPorts = [] query = ("""SELECT localmedia from calls where conferenceid = '%s' """ %(self.conferenceId)) res = self.execute(query) if not res: self.localMediaPorts = [] return None for port in res: try: ports[port[0].split(':')[1]]=None except AttributeError, error: self.debug.append('attribute error on split port (usually on primarys cascade).\n%s' %error) if ports: self.localMediaPorts=[x for x in ports.keys()] self.localMediaPorts.sort() if self.localMediaPorts[0]=='0': self.localMediaPorts.pop(0) return ports else: return None def isPrimary(self): ''' check if the call is primary ''' if self.conferenceId and self.primaryConferenceId: if self.conferenceId == self.primaryConferenceId: self.primary = True return True return False def execute(self, query): ''' execute query ''' assert self.db, 'do not have database object' try: res = self.db.query(query).getresult() except (pg.InternalError, pg.ProgrammingError), error: raise colaException.DatabaseError ('error execute query\n%s\n%s' %(query, error)) return res def snapShot(self): ''' take database snapshot of of the conference ''' #takes calls, conferences and conferencestate table. self.haveSnapShot = True query = (""" select * from calls """) self.snapshotCalls = self.execute(query) query = (""" select * from conferences """) self.snapshotConferences = self.execute(query) query = (""" select * from conferencestate """) self.snapshotConferencestate = self.execute(query) def checkSchema(self): ''' run schema check ''' #find if we have media port column in database res = None query = (""" select a.attname from pg_class c inner join pg_attribute a on a.attrelid = c.oid where c.relname = 'calls' and a.attname='localmedia' """) res = self.execute(query) if res: self.mediaport = True #find if the schema has accesscode #we should have at least this many rows in accesscode table. res = None g2wThrehod = 10000 query = ("""SELECT reltuples from pg_class where relname='accesscodes' and reltuples > '%s' """ %(g2wThrehod)) res = self.execute(query) if res: self.g2w = True def getCallId(self, passcode = None, chassis = None): ''' get call id from vappscb ''' if not self.conferenceId: self.callids=[] return None if passcode: self.passcode = passcode if chassis: self.chassis = chassis assert self.passcode, 'passcode are required for getCallId()' assert self.chassis, 'chassis are required for getCallId()' callid = {} query = (""" select callid from calls where destinationip = '%s' and conferenceid = '%s' """ %(self.chassis, self.conferenceId)) res = self.execute(query) if not res: return None for x in res: callid[x[0]] = None self.callids=[str(x) for x in callid.keys()] return callid def getPasscode(self, accesscode): '''for g2w database. get passcode from accesscode table''' query = (""" select passcode from accesscodes where accesscode = '%s' """ %(accesscode)) self.passcode = self.execute(query) if not self.passcode: raise colaException.DataError ('access code not found in g2w database\n%s' %(accesscode)) self.passcode = self.passcode[0][0] return self.passcode
UTF-8
Python
false
false
2,014
14,405,320,328,402
43f89f08104326279c5a8b23f9ba2801f4b25ac7
f5e20ff753bafed8ad987d61f137bbaa8e5aa4d9
/mahotas/tests/test_convolve.py
d115b2535f68a73edb9ee5b9aa8ff31f2f48d3b6
[ "BSL-1.0", "GPL-2.0-only" ]
non_permissive
tfmoraes/mahotas
https://github.com/tfmoraes/mahotas
234749cb08f04964849d9ac7a5e40907ba684cfc
0c030db8aaf8b30642ebd9f9339b52190a3d38f6
refs/heads/master
2020-11-30T16:21:32.397354
2011-06-12T21:20:35
2011-06-12T21:20:45
1,894,465
1
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import numpy as np import mahotas.convolve import mahotas._filters def test_compare_w_ndimage(): from scipy import ndimage A = np.arange(34*340).reshape((34,340))%3 B = np.ones((3,3), A.dtype) for mode in mahotas._filters.modes: assert np.all(mahotas.convolve(A, B, mode=mode) == ndimage.convolve(A, B, mode=mode)) def test_22(): A = np.arange(1024).reshape((32,32)) B = np.array([ [0,1], [2,3], ]) C = np.array([ [0,1,0], [2,3,0], [0,0,0], ]) AB = mahotas.convolve(A,B) AC = mahotas.convolve(A,C) assert AB.shape == AC.shape assert np.all(AB == AC)
UTF-8
Python
false
false
2,011
13,477,607,390,783
83591a2a7888cde7d93563ce0e0e92196dc96672
3350a4f14e83e400376fc26a7b2573e37c4b375c
/Solutions/Problems_1-9/Problem_4.py
6d22b87b55d1f61c631c9606811e99d35b64b0dd
[]
no_license
tehs0ap/Project-Euler-Python
https://github.com/tehs0ap/Project-Euler-Python
512406538a03d53d94a319390d6f459a5e92987f
d181aec1d79da73cb9ae7f34e129c2abdd4a309e
refs/heads/master
2021-01-01T16:44:53.069615
2012-12-26T13:39:39
2012-12-26T13:39:39
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
''' Created on 2012-12-06 @author: Marty ''' ''' A palindromic number reads the same both ways. The largest palindrome made from the product of two 2-digit numbers is 9009 = 91 99. Find the largest palindrome made from the product of two 3-digit numbers. ''' import time def isPalindrome(number): numberString = str(number) if numberString == numberString[::-1]: return True else: return False startTime = time.time() largestPalindrome = 0 for a in reversed(range(100, 1000)): for b in reversed(range(100, 1000)): if isPalindrome(a * b) and (a * b) > largestPalindrome: largestPalindrome = a * b print largestPalindrome print "Time Elapsed: " + str(time.time() - startTime)
UTF-8
Python
false
false
2,012
7,670,811,595,486
2b9c0574dfecd51b6431c838ea709394feff6f4d
69b1f87cd1fef02fc8c9c227200f391f1c39324a
/librarian/models.py
a024495de6f518e06bd0badbd69e8607d7ac2eea
[ "BSD-2-Clause" ]
permissive
dbikard/SynBioWorld
https://github.com/dbikard/SynBioWorld
ca151e3798defa260bdd455fe4f203cc3ef0d9d6
7f56d1305f1c7c9953d51b3516a073b7bce8e357
refs/heads/master
2021-01-16T19:33:59.673976
2011-08-12T18:34:34
2011-08-12T18:34:34
1,372,056
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from .utils import slugify from datetime import datetime from django.contrib.auth.models import User from django.contrib.sitemaps import Sitemap from django.db import models from django.db.models import permalink from minicms.models import BaseContent from random import choice from string import ascii_letters, digits from django.core.exceptions import ObjectDoesNotExist from google.appengine.api import taskqueue import re import urllib2 def get_page(url): request = urllib2.Request(url) response = urllib2.urlopen(request) html = response.read() response.close() return html def uniquify(seq, idfun=None): # order preserving if idfun is None: def idfun(x): return x seen = {} result = [] for item in seq: marker = idfun(item) if marker in seen: continue seen[marker] = 1 result.append(item) return result class Country(models.Model): name=models.CharField(max_length=200) sbw=models.IntegerField(null=True) has_SB=models.BooleanField() def __unicode__(self): return '%s' % (self.name) def get_institutions(self): return Institution.objects.filter(country=self).order_by('-sbw') def get_SBW(self): self.sbw=sum([inst.sbw for inst in self.get_institutions()]) self.save() return self.sbw class Town(models.Model): name=models.CharField(max_length=200) country=models.ForeignKey(Country) def __unicode__(self): return '%s' % (self.name) class Institution(models.Model): name=models.CharField(max_length=200) country=models.ForeignKey(Country,null=True) sbw=models.IntegerField(null=True) def __unicode__(self): return '%s' % (self.name) def get_present_employee(self): aff=Affiliation.objects.filter(institution=self).order_by('-year') people=uniquify([x.employee for x in aff if x.employee.get_institution() == self]) people.sort(cmp=lambda x,y: cmp(y.sbw,x.sbw)) return people def get_past_employee(self): aff=Affiliation.objects.filter(institution=self).order_by('-year') people=uniquify([x.employee for x in aff if x.employee.get_institution() != self]) people.sort(cmp=lambda x,y: cmp(y.sbw,x.sbw)) return people def get_SBW(self): self.sbw=sum([scientist.sbw for scientist in self.get_present_employee()]) self.save() return self.sbw #~ class Keyword(models.Model): #~ name=models.CharField(max_length=200) #~ count=models.IntegerField(null=True) #~ def __unicode__(self): #~ return 'Keyword: %s' % (self.name) class Person(models.Model): first_name=models.CharField(max_length=200,null=True) initials=models.CharField(max_length=10) last_name=models.CharField(max_length=200) email=models.CharField(max_length=200,null=True) sbw=models.IntegerField(null=True) def __unicode__(self): return "%s %s" % (self.last_name, self.initials) def get_papers(self): authorship_set=Authorship.objects.filter(author=self) paper_set=[x.paper for x in authorship_set] paper_set.sort(cmp=lambda x,y: cmp(y.pubYear,x.pubYear)) return paper_set def get_institution(self): try: aff=Affiliation.objects.filter(employee=self).order_by('-year')[0] inst=aff.institution except: inst=None return inst def get_SBW(self): self.sbw=sum([pub.sbw for pub in self.get_papers()]) self.save() return self.sbw @permalink def get_absolute_url(self): return ('librarian.views.show_person', (), {'person_id': self.pk}) class Journal(models.Model): name=models.CharField(max_length=200,null=True) short_name=models.CharField(max_length=100,null=True) sbw=models.IntegerField(null=True) def get_name(self): if self.name and not self.short_name: L=self.name[0] num=re.match("[0-9]",L) if num: L="0-9" html = get_page("http://images.isiknowledge.com/WOK46/help/WOS/"+L+"_abrvjt.html") q=re.findall("<DT>(?P<name>.+)\n<B><DD>\t(?P<short_name>.+)\n</B>",html) names={} for n in q: name=n[0].capitalize() # It happens often that several journals have the same name and different short_names if name==self.name: self.short_name=n[1].capitalize() break if not self.short_name: self.short_name=self.name self.save() elif self.short_name and not self.name: L=self.short_name[0] num=re.match("[0-9]",L) if num: L="0-9" html = get_page("http://images.isiknowledge.com/WOK46/help/WOS/"+L+"_abrvjt.html") q=re.findall("<DT>(?P<name>.+)\n<B><DD>\t(?P<short_name>.+)\n</B>",html) short_names={} for n in q: short_name=n[1].capitalize() if short_name==self.short_name: self.name=n[0].capitalize() if not self.name: self.name=self.short_name self.save() def __unicode__(self): return '%s' % (self.name) def get_papers(self): return Paper.objects.filter(journal=self).order_by('-pubYear') def get_SBW(self): self.sbw=sum([pub.sbw for pub in self.get_papers()]) self.save() return self.sbw class Raw_cit(models.Model): cit=models.CharField(max_length=200,null=True) last_name=models.CharField(max_length=50, null=True) init=models.CharField(max_length=5, null=True) year=models.IntegerField(null=True) jour=models.ForeignKey(Journal, null=True) vol=models.CharField(max_length=20, null=True) page=models.CharField(max_length=20, null=True) doi=models.CharField(max_length=100, null=True) def get_paper(self): cited_paper=Paper.objects.filter(raw_cit=self) if len(cited_paper)==1: return cited_paper[0] else: if self.doi: cited_paper=Paper.objects.filter(doi=self.doi) else: try: cited_first_au=Person.objects.get(last_name=self.last_name, initials=self.init) except ObjectDoesNotExist: cited_first_au=None cited_paper=Paper.objects.filter(journal=self.jour) if cited_first_au: cited_paper=cited_paper.filter(first_au=cited_first_au) if self.year: cited_paper=cited_paper.filter(pubYear=self.year) if self.vol: cited_paper=cited_paper.filter(volume=self.vol) cited_paper=cited_paper.filter(volume=self.vol) cited_paper=list(cited_paper) if len(cited_paper)==1: cited_paper=cited_paper[0] cited_paper.raw_cit=self cited_paper.save() return cited_paper class Paper(models.Model): first_au = models.ForeignKey(Person,null=True) title = models.CharField(max_length=200, null=True) journal = models.ForeignKey(Journal,null=True) volume=models.CharField(max_length=10,null=True) page=models.CharField(max_length=10,null=True) pubMonth=models.IntegerField(null=True) pubYear=models.IntegerField(null=True) abstract=models.TextField(null=True) doi=models.CharField(max_length=200,null=True) sbw=models.IntegerField(null=True) raw_citations=models.TextField(max_length=100000,null=True) raw_cit=models.ForeignKey(Raw_cit,null=True) added=models.DateField(null=True) def __unicode__(self): return self.title def get_authors(self): authorship_set=Authorship.objects.filter(paper=self).order_by('position') author_set=[x.author for x in authorship_set] return author_set def get_SBW(self): self.sbw=Citation.objects.filter(cited_paper=self).count() self.save() return self.sbw def update_citations(self): CR=self.raw_citations CR=CR.split(";") for cit in CR: cit=cit.strip() taskqueue.add(url='/librarian/update_raw_cit/', params={'pk': self.pk,'raw_cit':cit}, queue_name='citationsUpdate') class Citation(models.Model): citing_paper=models.ForeignKey(Paper, related_name="citing_set") cited_paper=models.ForeignKey(Paper, related_name="cited_set",null=True) def __unicode__(self): return '%s-%s cited by %s-%s' % (self.cited_paper.first_au.last_name, self.cited_paper.pubYear, self.citing_paper.first_au.last_name,self.citing_paper.pubYear) class Authorship(models.Model): author = models.ForeignKey(Person) paper = models.ForeignKey(Paper) position = models.IntegerField(null=True) def __unicode__(self): return "%s-%s" % (self.author.last_name,self.paper.title) class Affiliation(models.Model): institution=models.ForeignKey(Institution) employee=models.ForeignKey(Person) year=models.IntegerField(null=True) #~ class Person_has_keyword(models.Model): #~ person=models.ForeignKey(Person) #~ keyword=models.ForeignKey(Keyword) #~ count=models.IntegerField(null=True) class ISI_data(models.Model): data=models.TextField(null=True) class PeopleSitemap(Sitemap): changefreq = "never" def items(self): return Person.objects.all()
UTF-8
Python
false
false
2,011
19,524,921,334,792
718e8a1f18b2d0a17c39efb56599888ff60e662a
44064ed79f173ddca96174913910c1610992b7cb
/Second_Processing_app/temboo/Library/NYTimes/MovieReviews/__init__.py
cdeed78c8566d996eeb4268b7373fb410077558b
[]
no_license
dattasaurabh82/Final_thesis
https://github.com/dattasaurabh82/Final_thesis
440fb5e29ebc28dd64fe59ecd87f01494ed6d4e5
8edaea62f5987db026adfffb6b52b59b119f6375
refs/heads/master
2021-01-20T22:25:48.999100
2014-10-14T18:58:00
2014-10-14T18:58:00
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from GetReviewer import * from SearchByKeyword import * from SearchByReviewer import * from GetPicks import *
UTF-8
Python
false
false
2,014
10,900,627,025,593
da628fcfe6a942f074eaaa77f8ce9ae24b9fd911
0363373da960570fe76d88e518e3151cafa61b0a
/ds18s20.py
a2d0c654eddc1c725d36c11d85d95967b8763422
[]
no_license
Sicness/SmartHome
https://github.com/Sicness/SmartHome
6dc69d18ddafa334301825e48de2d111e22fc68c
286cc3393e26a035e1bff0ac486ac29ea778d4a9
refs/heads/master
2020-05-20T01:36:58.898251
2013-05-06T06:42:55
2013-05-06T06:42:55
6,040,924
1
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# idea and same code I took from: # http://learn.adafruit.com/adafruits-raspberry-pi-lesson-11-ds18b20-temperature-sensing/software # Dependencies: # sudo modprobe w1-gpio # sudo modprobe w1-therm from time import sleep class ds18b20: def __init__(self, adr): self.__dev_adr = adr def read_temp_raw(self): try: f = open(self.__dev_adr, 'r') except: raise lines = f.readlines() f.close() return lines def read_c(self): lines = self.read_temp_raw() while lines[0].strip()[-3:] != 'YES': time.sleep(0.2) lines = read_temp_raw() equals_pos = lines[1].find('t=') if equals_pos != -1: temp_string = lines[1][equals_pos+2:] temp_c = float(temp_string) / 1000.0 return temp_c def read_f(self): lines = self.read_temp_raw() while lines[0].strip()[-3:] != 'YES': time.sleep(0.2) lines = read_temp_raw() equals_pos = lines[1].find('t=') if equals_pos != -1: temp_string = lines[1][equals_pos+2:] temp_c = float(temp_string) / 1000.0 temp_f = temp_c * 9.0 / 5.0 + 32.0 return temp_f def read_temp(self): """ Read temperature from sensor and return turpe (c, f) """ lines = self.read_temp_raw() while lines[0].strip()[-3:] != 'YES': time.sleep(0.2) lines = read_temp_raw() equals_pos = lines[1].find('t=') if equals_pos != -1: temp_string = lines[1][equals_pos+2:] temp_c = float(temp_string) / 1000.0 temp_f = temp_c * 9.0 / 5.0 + 32.0 return temp_c, temp_f # same tests if __name__ == '__main__': ds = ds18b20('/sys/bus/w1/devices/10-0008025b6d03/w1_slave') print "read_c returned: ", ds.read_c() print "read_f returned: ", ds.read_f() print "read_temp returned: ", ds.read_temp()
UTF-8
Python
false
false
2,013
3,521,873,218,423
c4d6a7517baefdbcac586cebd2158be40a02c9bc
e3df0e321e8bcf6e7d70644dccf5ea4f109580e8
/byteprint/bp/core/scraps/urls/admin.py
4ad8a5ec52d331fc06495fe7e9d917b81ccf046f
[ "MIT" ]
permissive
colingourlay/byteprint
https://github.com/colingourlay/byteprint
65af7b9e01299c1e62f4cb03c5641e12fcf39860
184ddb5eac48a48507e20553f82b2f16c1a29fda
refs/heads/master
2021-01-13T02:25:56.834846
2010-05-20T14:47:53
2010-05-20T14:47:53
677,022
2
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from django.conf.urls.defaults import * urlpatterns = patterns('bp.core.scraps.views.admin', url(r'^blueprints/$', 'blueprints', name='scraps_admin_blueprints' ), url(r'^piles/create/$', 'pile_create', name='scraps_admin_pile_create' ), url(r'^piles/(?P<pile_id>\d+)/delete/$', 'pile_delete', name='scraps_admin_pile_delete' ), url(r'^piles/(?P<pile_id>\d+)/disable/$', 'pile_toggle', {'status': False}, name='scraps_admin_pile_disable' ), url(r'^piles/(?P<pile_id>\d+)/enable/$', 'pile_toggle', {'status': True}, name='scraps_admin_pile_enable' ), url(r'^piles/(?P<pile_id>\d+)/rename/$', 'pile_rename', name='scraps_admin_pile_rename' ), url(r'^create/(?P<blueprint_name>([\w-])+)/$', 'scrap_create', name='scraps_admin_scrap_create' ), url(r'^create/$', 'scrap_create', name='scraps_admin_scrap_create_httppost' ), url(r'^(?P<scrap_id>\d+)/delete/$', 'scrap_delete', name='scraps_admin_scrap_delete' ), url(r'^(?P<scrap_id>\d+)/reposition/(?P<position>\d+)/$', 'scrap_reposition', name='scraps_admin_scrap_reposition' ), url(r'^(?P<scrap_id>\d+)/repile/(?P<pile_id>\d+)/$', 'scrap_repile', name='scraps_admin_scrap_repile' ), url(r'^(?P<scrap_id>\d+)/unpile/$', 'scrap_repile', name='scraps_admin_scrap_unpile' ), url(r'^(?P<scrap_id>\d+)/disable/$', 'scrap_toggle', {'status': False}, name='scraps_admin_scrap_disable' ), url(r'^(?P<scrap_id>\d+)/enable/$', 'scrap_toggle', {'status': True}, name='scraps_admin_scrap_enable' ), url(r'^(?P<scrap_id>\d+)/$', 'scrap_edit', name='scraps_admin_scrap_edit' ), url(r'^$', 'manage', name='scraps_admin_manage' ), )
UTF-8
Python
false
false
2,010
10,110,353,019,136
b9082a3b9ad567517681044000cc16f3a4d832f1
1e6ec0eaaa29f12af5db10b1f0f0758e3b66f00c
/paypaladaptive/admin.py
1b7ce456b460d9e0093fb1c013eec8f08b8bbcb0
[ "CC-BY-3.0" ]
non_permissive
flc/django-paypal-adaptive
https://github.com/flc/django-paypal-adaptive
0f93fa8720a4179632db066a37369b0b226e4ede
20228ad023574b35268efe9958dd7926134570e7
refs/heads/master
2021-01-15T18:50:34.211358
2014-11-06T23:23:03
2014-11-06T23:23:03
21,063,742
2
2
null
null
null
null
null
null
null
null
null
null
null
null
null
from django.contrib import admin from . import models def update_adaptive_instance(modeladmin, request, queryset): for instance in queryset: instance.update(save=True) update_adaptive_instance.short_description = u"Update" class PaymentAdmin(admin.ModelAdmin): actions = [update_adaptive_instance] list_display = ('created_date', 'pay_key', 'status',) list_filter = ('status',) search_fields = ('=id', '=pay_key') class PreapprovalAdmin(admin.ModelAdmin): list_display = ('preapproval_key', 'valid_until_date', 'status') actions = [update_adaptive_instance] class RefundAdmin(admin.ModelAdmin): pass class IPNLogAdmin(admin.ModelAdmin): list_display = ( 'created_date', 'path', 'verify_request_response', 'return_status_code', 'duration', ) list_filter = ('verify_request_response', 'return_status_code') admin.site.register(models.Payment, PaymentAdmin) admin.site.register(models.Preapproval, PreapprovalAdmin) admin.site.register(models.Refund, RefundAdmin) admin.site.register(models.IPNLog, IPNLogAdmin)
UTF-8
Python
false
false
2,014
19,550,691,171,223
6dca66687636564f55c340725ba631934160476b
0043c4469e3d77a1a97e6db563683b7057442ad8
/examples/zwaveCommander.py
ccb8458d5518898c6c693c7eed466e0253b1e0fd
[]
no_license
BrianODell/py-openzwave
https://github.com/BrianODell/py-openzwave
bb00f9744e1e5d52400b74dc82539d37a68159fa
3682bd131eef94cec8288f14e62b4f4b8dd055e8
refs/heads/master
2021-01-17T22:39:58.319426
2011-08-26T17:44:21
2011-08-26T17:44:21
2,144,686
2
1
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- from collections import namedtuple import curses import curses.panel import logging import threading import time from louie import dispatcher, All from common.ozwWrapper import ZWaveWrapper padcoords = namedtuple('padcoords', ['sminrow','smincol','smaxrow','smaxcol']) colorlevels = namedtuple('colorlevels', ['error','warning']) class ZWaveCommander: def __init__(self, stdscr): self._curAlert = False self._alertStack = list() self._driverInitialized = False self._wrapper = None self._listMode = True self._screen = stdscr self._version = '0.1 Beta 1' self._listtop = 0 self._listindex = 0 self._listcount = 0 self._selectedNode = None self._stop = threading.Event() self._keys = { 'A' : 'Add', 'B' : 'About', 'D' : 'Delete', 'R' : 'Refresh', 'S' : 'Setup', '+' : 'Increase', '-' : 'Decrease', '1' : 'On', '0' : 'Off', 'Q' : 'Quit' } self._config = { 'device': '/dev/keyspan-2', 'config': '../openzwave/config/', } # TODO: add log level to config # TODO: add log enable/disable to config # TODO: logging - can ozw log be redirected to file? If so, we can add ability to view/tail log FORMAT='%(asctime)s\t%(levelname)s\t%(name)s\t%(message)s' logging.basicConfig(filename='test.log', level=logging.DEBUG, format=FORMAT) self._log = logging.getLogger('ZWaveCommander') self._logbar ='\n%s\n' % ('-'*60) def main(self): '''Main run loop''' self._log.info('%sZWaveCommander Version %s Starting%s', self._logbar, self._version, self._logbar) self._initCurses(self._screen) try: self._checkConfig() self._checkInterface() self._runLoop() finally: self._shutdown() def _delayloop(self, context, duration, callback): self._log.debug('thread %s sleeping...', context) time.sleep(duration) self._log.debug('timer %s expired, executing callback %s', context, callback) if context == 'alert': self._curAlert = False if self._alertStack: self._alert(self._alertStack.pop()) if callback is not None: callback() def _handleQuit(self): # TODO: exit confirmation dialog self._log.info('Stop requested') self._stop.set() def _handleRefresh(self): if self._selectedNode: self._wrapper.refresh(self._selectedNode) def _handleOn(self): if self._selectedNode: self._wrapper.setNodeOn(self._selectedNode) def _handleOff(self): if self._selectedNode: self._wrapper.setNodeOff(self._selectedNode) def _handleIncrease(self): if self._selectedNode: curLevel = self._selectedNode.level newLevel = curLevel + 10 if newLevel > 99: newLevel = 99 self._wrapper.setNodeLevel(self._selectedNode, newLevel) def _handleDecrease(self): if self._selectedNode: curLevel = self._selectedNode.level newLevel = curLevel - 10 if newLevel < 0: newLevel = 0 self._wrapper.setNodeLevel(self._selectedNode, newLevel) def _setTimer(self, context, duration, callback): newTimer = threading.Thread(None, self._delayloop, 'cb-thread-%s' % context, (context, duration, callback), {}) newTimer.setDaemon(True) newTimer.start() def _alert(self, text): '''perform program alert''' if not self._curAlert: self._curAlert = True curses.flash() self._screen.addstr(self._screensize[0] - 1, 0, ' {0:{width}}'.format(text, width=self._screensize[1] - 2), curses.color_pair(self.COLOR_ERROR)) self._screen.refresh() self._setTimer('alert', 1, self._redrawMenu) else: self._alertStack.append(text) def _layoutScreen(self): # TODO: handle screen resize on curses.KEY_RESIZE in loop (tear down, re-calculate, and re-build) # top 5 lines (fixed): system info (including list header) # bottom line (fixed): menu/status # remaining top half: item list (scrolling) # remaining bottom half: split - left half=static info, right half=detail (scrolling) # item list: 8 columns. All column widths here are padded with 1 char space (except col 0, which is always 1 char) # c0=1 char fixed (select indicator) # c1=4 char fixed (id) # c2=10 char min (name) # c3=10 char min (location) # c4=20 char min (type) # c5=9 char fixed (state) # c6=7 char fixed (batt) # c7=7 char fixed (signal) # last three columns: 23 chars: are optional and can fall off if space requires it (min width 45) # "min" columns expand evenly to fit remaining space self._screen.clear() self._log.debug("Laying out screen") self._colwidths=[1,4,10,10,15,12,8,8] self._colheaders=['','ID','Name','Location','Type','State','Batt','Signal'] self._detailheaders=['Info','Values','Classes','Groups','Events'] self._flexcols=[2,3,4] self._rowheights=[5,5,10,1] self._flexrows=[1,2] self._deviceValueColumns=['id','commandClass','instance','index','type','label','value','units'] self._deviceValueWidths= [10,20,9,6,10,20,10,10] self._sortcolumn = self._colheaders[1] self._detailview = self._detailheaders[0] self._screensize = self._screen.getmaxyx() width = self._screensize[1] height = self._screensize[0] self._log.debug('Screen is %d wide by %d high', width, height) # Update dynamic column widths for device list self._log.debug('Initial column widths are: %s', self._colwidths) cwid = 0 for i in self._colwidths: cwid += i flexwidth = width - cwid if flexwidth > 0: adder = divmod(flexwidth, len(self._flexcols)) for i in self._flexcols: self._colwidths[i] += adder[0] self._colwidths[self._flexcols[-1]] += adder[1] self._log.debug('Adjusted column widths are: %s' ,self._colwidths) # Update dynamic row heights for screen sections self._log.debug('Initial row heights are: %s' , self._rowheights) cht = 0 for i in self._rowheights: cht += i flexheight = height - cht if flexheight > 0: adder = divmod(flexheight, len(self._flexrows)) for i in self._flexrows: self._rowheights[i] += adder[0] self._rowheights[self._flexrows[-1]] += adder[1] self._log.debug('Adjusted row heights are: %s' , self._rowheights) if curses.has_colors(): self._log.debug('Curses initialized: %d colors and %d color pairs available', curses.COLORS, curses.COLOR_PAIRS) else: self._log.debug('Curses initialized, but no colors are available') self._listpad = curses.newpad(256,256) self._detailpads = { 'Info': curses.newpad(self._rowheights[2], self._screensize[1]), 'Values': curses.newpad(128, self._screensize[1]), 'Classes': curses.newpad(128, self._screensize[1]), 'Groups': curses.newpad(self._rowheights[2], self._screensize[1]), 'Events': curses.newpad(256, self._screensize[1]) } self._detailpos = dict() for k in self._detailpads.iterkeys(): self._detailpos[k] = 0 self._detailtop = self._rowheights[0] + self._rowheights[1] + 2 self._detailbottom = self._detailtop + self._rowheights[2] - 3 self._updateColumnHeaders() def _initCurses(self, stdscr): '''Configure ncurses application-specific environment (ncurses has already been initialized)''' curses.curs_set(0) # Re-define color attributes... self.COLOR_NORMAL=1 self.COLOR_HEADER_NORMAL=2 self.COLOR_HEADER_HI=3 self.COLOR_ERROR=4 self.COLOR_CRITICAL=5 self.COLOR_WARN=6 self.COLOR_OK=7 curses.init_pair(self.COLOR_NORMAL, curses.COLOR_WHITE, curses.COLOR_BLACK) # normal (selected row is inverted, disabled/sleep is dim) curses.init_pair(self.COLOR_HEADER_NORMAL, curses.COLOR_BLACK, curses.COLOR_GREEN) # header normal curses.init_pair(self.COLOR_HEADER_HI, curses.COLOR_WHITE, curses.COLOR_CYAN) # header hi curses.init_pair(self.COLOR_ERROR, curses.COLOR_YELLOW, curses.COLOR_RED) # error text curses.init_pair(self.COLOR_CRITICAL, curses.COLOR_RED, curses.COLOR_BLACK) # critical curses.init_pair(self.COLOR_WARN, curses.COLOR_YELLOW, curses.COLOR_BLACK) # warn curses.init_pair(self.COLOR_OK, curses.COLOR_GREEN, curses.COLOR_BLACK) # ok self._layoutScreen() def _checkConfig(self): # TODO: check if configuration exists and is valid. If not, then go directly to handleSetup(). Loop until user cancels or enters valid config. pass def _handleSetup(self): self._alert('handleSetup not yet implemented') def _checkIfInitialized(self): if not self._driverInitialized: msg = 'Unable to initialize driver - check configuration' self._alert(msg) self._log.warning(msg) self._handleSetup() else: self._log.info('OpenZWave initialized successfully.') def _notifyDriverReady(self, homeId): self._log.info('OpenZWave Driver is Ready; homeid is %0.8x. %d nodes were found.', homeId, self._wrapper.nodeCount) self._driverInitialized = True self._addDialogText(2,'Driver initialized with homeid {0}'.format(hex(homeId))) self._addDialogText(3,'Node Count is now {0}'.format(self._wrapper.nodeCount)) self._readyNodeCount = 0 def _notifyNodeAdded(self, homeId, nodeId): self._addDialogText(3,'Node Count is now {0}'.format(self._wrapper.nodeCount)) self._updateSystemInfo() def _redrawAll(self): self._clearDialog() self._updateSystemInfo() self._updateDeviceList() self._updateColumnHeaders() self._updateDeviceDetail() def _notifySystemReady(self): self._log.info('OpenZWave Initialization Complete.') self._alert('OpenZWave Initialization Complete.') self._redrawAll() def _notifyNodeReady(self, homeId, nodeId): self._readyNodeCount += 1 self._addDialogText(2, 'OpenZWave is querying associated devices') self._addDialogText(3,'Node {0} is now ready'.format(nodeId)) self._addDialogProgress(5, self._readyNodeCount, self._wrapper.nodeCount) self._updateDeviceList() def _notifyValueChanged(self, signal, **kw): nodeId = kw['nodeId'] self._log.debug('Got value changed notification for node {0}'.format(nodeId)) # TODO: this is very heavy handed - just update appropriate elements self._updateDeviceList() self._updateDeviceDetail() def _initDialog(self, height, width, buttons=('OK',), caption=None): self._dialogpad = curses.newpad(height, width) self._dialogpad.bkgd(0x94, curses.color_pair(self.COLOR_HEADER_HI)) self._dialogpad.clear() self._dialogpad.box() if caption: lh = (width / 2) - (len(caption) / 2) - 1 self._dialogpad.addstr(0, lh, ' {0} '.format(caption), curses.color_pair(self.COLOR_NORMAL) | curses.A_STANDOUT) if buttons: if len(buttons) > 1: bwid = 0 for bcap in buttons: if len(bcap) > bwid: bwid = len(bcap) cellwid = (width - 4) / len(buttons) lpad = (cellwid - bwid) / 2 - 1 rpad = cellwid - bwid - lpad - 1 self._dialogpad.move(height - 2, 1) else: bwid = len(buttons[0]) lpad = rpad = 1 self._dialogpad.move(height - 2, (width / 2) - (bwid / 2) - 2) for button in buttons: self._dialogpad.addstr('{0:{wlpad}}<{1:^{wbwid}}>{0:{wrpad}}'.format('',button, wlpad=lpad, wbwid=bwid, wrpad=rpad)) dt = (self._screensize[0] / 2) - (height / 2) dl = (self._screensize[1] / 2) - (width / 2) dc = padcoords(sminrow=dt,smincol=dl,smaxrow=dt+height - 1, smaxcol=dl+width - 1) self._dialogcoords = dc self._dialogpad.overlay(self._screen, 0, 0, dc.sminrow, dc.smincol, dc.smaxrow, dc.smaxcol) self._screen.refresh() def _clearDialog(self): del self._dialogpad self._dialogpad = None self._dialogcoords = None self._screen.touchwin() self._screen.refresh() def _updateDialog(self): if self._dialogpad: self._screen.refresh() dc = self._dialogcoords self._dialogpad.refresh(0,0,dc.sminrow, dc.smincol, dc.smaxrow, dc.smaxcol) def _addDialogText(self, row, text, align='^'): if self._dialogpad: self._dialogpad.addstr(row, 1, '{0:{aln}{wid}}'.format(text, aln=align, wid=self._dialogpad.getmaxyx()[1] - 2)) self._updateDialog() def _addDialogProgress(self, row, current, total, showPercent=True, width=None): if self._dialogpad: dc = self._dialogcoords if width is None: width = (dc.smaxcol - dc.smincol) * 2 / 3 pct = float(current) / float(total) filled = int(pct * float(width)) lh = ((dc.smaxcol - dc.smincol) / 2) - (width / 2) self._dialogpad.addch(row, lh - 1, '[', curses.color_pair(self.COLOR_NORMAL) | curses.A_BOLD) self._dialogpad.addch(row, lh + width, ']', curses.color_pair(self.COLOR_NORMAL) | curses.A_BOLD) self._dialogpad.addstr(row, lh, ' '*width, curses.color_pair(self.COLOR_NORMAL)) self._dialogpad.addstr(row, lh, '|'*filled, curses.color_pair(self.COLOR_OK) | curses.A_BOLD) if showPercent: pctstr = '{0:4.0%}'.format(pct) lh = ((dc.smaxcol - dc.smincol) / 2) - (len(pctstr) / 2) self._dialogpad.addstr(row, lh, pctstr, curses.color_pair(self.COLOR_NORMAL) | curses.A_BOLD) self._updateDialog() def _checkInterface(self): dispatcher.connect(self._notifyDriverReady, ZWaveWrapper.SIGNAL_DRIVER_READY) dispatcher.connect(self._notifySystemReady, ZWaveWrapper.SIGNAL_SYSTEM_READY) dispatcher.connect(self._notifyNodeReady, ZWaveWrapper.SIGNAL_NODE_READY) dispatcher.connect(self._notifyValueChanged, ZWaveWrapper.SIGNAL_VALUE_CHANGED) dispatcher.connect(self._notifyNodeAdded, ZWaveWrapper.SIGNAL_NODE_ADDED) self._initDialog(10,60,['Cancel'],'Progress') self._addDialogText(2,'Initializing OpenZWave') self._log.info('Initializing OpenZWave via wrapper') self._wrapper = ZWaveWrapper.getInstance(device=self._config['device'], config=self._config['config'], log=None) self._setTimer('initCheck', 3, self._checkIfInitialized) while not self._stop.isSet() and not self._wrapper.initialized: time.sleep(0.1) # TODO: handle keys here... cancel/etc def _runLoop(self): while not self._stop.isSet(): key = self._screen.getch() if key == curses.KEY_DOWN: self._switchItem(1) elif key == curses.KEY_UP: self._switchItem(-1) elif key == curses.KEY_LEFT: self._switchTab(-1) elif key == curses.KEY_RIGHT: self._switchTab(1) elif key == 0x09: self._nextMode() elif key is not None: self._handleMnemonic(key) def _handleMnemonic(self, key): for mnemonic, func in self._keys.iteritems(): if key == ord(mnemonic[0].lower()) or key == ord(mnemonic[0].upper()): funcname = '_handle%s' % func try: method = getattr(self, funcname) method() except AttributeError as ex: msg = 'No method named [%s] defined!' % funcname self._log.warn('handleMnemonic: %s', msg) self._log.warn('handleMnemonic Exception Details: %s', str(ex)) self._alert(msg) break def _resetDetailPos(self): for p in self._detailpos.iterkeys(): self._detailpos[p] = 0 def _switchItem(self, delta): if self._listMode: n = self._listindex + delta if n in range(0, self._listcount): self._listindex = n self._updateDeviceList() # TODO: we don't really need to redraw everything when selection changes self._resetDetailPos() self._updateDeviceDetail() else: self._detailpos[self._detailview] += delta self._updateDeviceDetail() def _switchTab(self, delta): if self._listMode: i = self._colheaders.index(self._sortcolumn) i += delta if i > len(self._colheaders) - 1: i = 1 elif i < 1: i = len(self._colheaders) - 1 self._sortcolumn = self._colheaders[i] else: i = self._detailheaders.index(self._detailview) i += delta if i > len(self._detailheaders) - 1: i = 0 elif i < 0: i = len(self._detailheaders) - 1 self._detailview = self._detailheaders[i] self._updateColumnHeaders() self._updateDeviceList() self._updateDeviceDetail() def _nextMode(self): self._listMode = not self._listMode self._updateColumnHeaders() def _shutdown(self): # TODO: handle orderly shutdown pass def _rightPrint(self, row, data, attrs=None): if attrs is None: attrs = curses.color_pair(self.COLOR_NORMAL) self._screen.addstr(row, self._screensize[1] - len(data), data, attrs) def _updateSystemInfo(self): self._screen.addstr(0,1,'{0} on {1}'.format(self._wrapper.controllerDescription, self._config['device']), curses.color_pair(self.COLOR_NORMAL)) self._screen.addstr(1,1,'Home ID 0x%0.8x' % self._wrapper.homeId, curses.color_pair(self.COLOR_NORMAL)) self._screen.move(2,1) self._screen.addstr('{0} Registered Nodes'.format(self._wrapper.nodeCount), curses.color_pair(self.COLOR_NORMAL)) if self._wrapper.initialized: sleepcount = self._wrapper.sleepingNodeCount if sleepcount: self._screen.addstr(' ({0} Sleeping)'.format(sleepcount),curses.color_pair(self.COLOR_NORMAL) | curses.A_DIM) self._rightPrint(0, '{0} Library'.format(self._wrapper.libraryTypeName)) self._rightPrint(1, 'Version {0}'.format(self._wrapper.libraryVersion)) self._screen.refresh() def _updateColumnHeaders(self): self._screen.move(4,0) for text, wid in zip(self._colheaders, self._colwidths): clr = curses.color_pair(self.COLOR_HEADER_NORMAL) if self._listMode else curses.color_pair(self.COLOR_NORMAL) | curses.A_STANDOUT if text == self._sortcolumn: clr = curses.color_pair(self.COLOR_HEADER_HI) | curses.A_BOLD self._screen.addstr('{0:<{width}}'.format(text, width=wid), clr) self._screen.move(self._rowheights[0] + self._rowheights[1] + 1, 0) clr = curses.color_pair(self.COLOR_HEADER_NORMAL) if not self._listMode else curses.color_pair(self.COLOR_NORMAL) | curses.A_STANDOUT self._screen.addstr('{0:{width}}'.format('', width=self._screensize[1]), clr) self._screen.move(self._rowheights[0] + self._rowheights[1] + 1, 0) for text in self._detailheaders: clr = curses.color_pair(self.COLOR_HEADER_NORMAL) if not self._listMode else curses.color_pair(self.COLOR_NORMAL) | curses.A_STANDOUT if text == self._detailview: clr = curses.color_pair(self.COLOR_HEADER_HI) | curses.A_BOLD wid = len(text) self._screen.addstr(' {0:<{width}} '.format(text, width=wid), clr) def _fixColumn(self, text, width, align='<'): retval = '{0:{aln}{wid}}'.format(text, aln=align, wid=width) if len(retval) > width: retval = retval[:width] return retval def _getListItemColor(self, drawSelected): return curses.color_pair(self.COLOR_NORMAL) | curses.A_STANDOUT if drawSelected \ else curses.color_pair(self.COLOR_NORMAL) def _drawMiniBar(self, value, minValue, maxValue, drawWidth, drawSelected, drawPercent=False, colorLevels=None): clr = self._getListItemColor(drawSelected) pct = float(value) / float(maxValue) dw = drawWidth - 2 filled = int(pct * float(dw)) fillcolor = clr if not drawSelected: fillcolor = curses.color_pair(self.COLOR_OK) if colorLevels: if pct <= colorLevels.error: fillcolor = curses.color_pair(self.COLOR_CRITICAL) elif pct <= colorLevels.warning: fillcolor = curses.color_pair(self.COLOR_WARN) self._listpad.addch('[', clr | curses.A_BOLD) self._listpad.addstr('|' * filled, fillcolor) self._listpad.addstr(' ' * (dw - filled), clr) self._listpad.addch(']', clr | curses.A_BOLD) # TODO: draw percent text if requested def _drawNodeStatus(self, node, drawSelected): clr = self._getListItemColor(drawSelected) if node.isSleeping: self._listpad.addstr(self._fixColumn('(sleeping)', self._colwidths[5]), clr | curses.A_LOW) elif node.hasCommandClass(0x76): # lock self._listpad.addstr(self._fixColumn('Locked' if node.isLocked else 'Unlocked', self._colwidths[5]), clr) elif node.hasCommandClass(0x26): # multi-level switch self._drawMiniBar(node.level, 0, 99, self._colwidths[5], drawSelected) elif node.hasCommandClass(0x25): # binary switch self._listpad.addstr(self._fixColumn('ON' if node.isOn else 'OFF', self._colwidths[5]), clr) else: self._listpad.addstr(self._fixColumn('OK', self._colwidths[5]), clr) def _drawBatteryStatus(self, node, drawSelected): clr = self._getListItemColor(drawSelected) if node.hasCommandClass(0x80): self._drawMiniBar(node.batteryLevel, 0, 100, self._colwidths[6], drawSelected, colorLevels=colorlevels(error=0.10,warning=0.40)) else: self._listpad.addstr(self._fixColumn('', self._colwidths[6]), clr) def _drawSignalStrength(self, node, drawSelected): clr = self._getListItemColor(drawSelected) self._listpad.addstr(self._fixColumn('', self._colwidths[7]), clr) def _drawDeviceNodeLine(self, node, drawSelected): clr = self._getListItemColor(drawSelected) self._listpad.addstr(' ', clr) self._listpad.addstr(self._fixColumn(node.id, self._colwidths[1]), clr) self._listpad.addstr(self._fixColumn(node.name, self._colwidths[2]), clr) self._listpad.addstr(self._fixColumn(node.location, self._colwidths[3]), clr) self._listpad.addstr(self._fixColumn(node.productType, self._colwidths[4]), clr) self._drawNodeStatus(node, drawSelected) self._drawBatteryStatus(node, drawSelected) self._drawSignalStrength(node, drawSelected) def _updateDeviceList(self): self._listcount = self._wrapper.nodeCount idx = 0 for node in self._wrapper._nodes.itervalues(): if idx == self._listindex: self._selectedNode = node self._listpad.move(idx,0) self._drawDeviceNodeLine(node, idx == self._listindex) idx += 1 ctop = self._rowheights[0] listheight = self._rowheights[1] if self._listindex - self._listtop > listheight: self._listtop = self._listindex - listheight elif self._listindex < self._listtop: self._listtop = self._listindex self._screen.refresh() self._listpad.refresh(self._listtop, 0, ctop, 0, ctop + listheight, self._screensize[1] - 1) self._updateDialog() def _redrawDetailTab(self, pad): self._screen.refresh() pad.refresh(0, 0, self._detailtop, 0, self._detailbottom, self._screensize[1] - 1) def _updateDetail_Values(self, pad): # Draw column header clr = curses.color_pair(self.COLOR_HEADER_HI) | curses.A_BOLD pad.addstr(0,0,'{0:<{width}}'.format(' ', width=self._screensize[1]), clr) pad.move(0,1) for text, wid in zip(self._deviceValueColumns, self._deviceValueWidths): pad.addstr('{0:<{width}}'.format(text.title(), width=wid), clr) node = self._selectedNode if node and node.values: # Grab all items except for configuration values (they have their own tab) vset = list() for value in node.values.itervalues(): if value.valueData: vset.append(value) # Sort the resulting set: (1) command class, (2) instance, (3) index s = sorted(sorted(sorted(vset, key=lambda value: value.getValue('index')), key=lambda value: value.getValue('instance')), key=lambda value: value.getValue('commandClass')) if self._detailpos[self._detailview] >= len(s): self._detailpos[self._detailview]=len(s)-1 i = 0 for value in s: vdic = value.valueData pad.move(i+1,0) # TODO: reset detail position on parent item change drawSelected = self._detailpos['Values'] == i clr = self._getListItemColor(drawSelected) pad.addstr(' ' * self._screensize[1], clr) pad.move(i+1,1) i += 1 for key, wid in zip(self._deviceValueColumns, self._deviceValueWidths): clr = self._getListItemColor(drawSelected) text = value.getValue(key) # strip 'COMMAND_CLASS_' prefix to save some space if key == 'commandClass' and text.startswith('COMMAND_CLASS_'): text = text[14:] # TODO: value decorators (checkbox for Booleans, edit box for others) # decimal: format to 2 places # bool as checkbox # byte as minibar if editable # ints need to be directly edited... # buttons... ? # Draw editable items differently if key == 'value' and not vdic['readOnly'] and drawSelected: clr = curses.color_pair(self.COLOR_ERROR) pad.addstr(self._fixColumn(text, wid), clr) def _updateDetail_Info(self, pad): node = self._selectedNode if node: #baudRate, basic, generic, specific, version, security self._deviceInfoColumns=['id','name','location','capabilities','neighbors','manufacturer','product','productType'] if self._detailpos[self._detailview] >= len(self._deviceInfoColumns): self._detailpos[self._detailview]=len(self._deviceInfoColumns)-1 editableColumns=['name','location','manufacturer','product'] i = maxwid = 0 for name in self._deviceInfoColumns: maxwid = len(name) if len(name) > maxwid else maxwid colwidth = maxwid + 2 clr = self._getListItemColor(False) clr_rw = curses.color_pair(self.COLOR_ERROR) clr_ro = self._getListItemColor(True) clr_col = curses.color_pair(self.COLOR_OK) # TODO: If editable, should be textpad for column in self._deviceInfoColumns: val = str(getattr(node, column)) pad.move(i + 1, 1) pad.addstr('{0:>{width}}'.format(column.title() + ':', width=colwidth), clr_col) selected = i == self._detailpos[self._detailview] thisclr = clr if selected: thisclr = clr_rw if column in editableColumns else clr_ro i += 1 pad.addstr(' ') pad.addstr('{0:<{width}}'.format(val, width=30), thisclr) def _updateDetail_Classes(self, pad): clr = curses.color_pair(self.COLOR_HEADER_HI) | curses.A_BOLD pad.addstr(0,0,'{0:<{width}}'.format(' CommandClass', width=self._screensize[1]), clr) node = self._selectedNode if node: if self._detailpos[self._detailview] >= len(node.commandClasses): self._detailpos[self._detailview]=len(node.commandClasses)-1 i = 0 for cc in node.commandClasses: pad.addstr(i + 1, 0, ' {0:<{width}}'.format(self._wrapper.getCommandClassName(cc), width=30), self._getListItemColor(i == self._detailpos[self._detailview])) i += 1 def _updateDetail_Groups(self, pad): pad.addstr(3,3,'Group view not yet implemented') # groups tab: # index label maxMembers members # 1 my group 4 1, 2, 4 # Members column is editable - enter comma-separated list? def _updateDetail_Events(self, pad): pad.addstr(3,3,'Event view not yet implemented') # event detail tab: # timestamp commandClass notificationType def _updateDeviceDetail(self): # TODO: detail needs to be scrollable, but to accomplish that a couple of changes need to be made. First, the detail header band needs to be moved into a static shared section (above the detail pad); second, a new dict of 'top' positions needs to be created; finally, positioning code needs to be written to correctly offset the pad. pad = self._detailpads[self._detailview] pad.clear() if self._detailpos[self._detailview] < 0: self._detailpos[self._detailview]=0 funcname = '_updateDetail_{0}'.format(self._detailview) try: method = getattr(self, funcname) method(pad) except AttributeError as ex: msg = 'No method named [%s] defined!' % funcname self._log.warn('_updateDeviceDetail: %s', msg) self._log.warn('_updateDeviceDetail Exception Details: %s', str(ex)) self._alert(msg) self._redrawDetailTab(pad) def _updateMenu(self): menurow = self._screensize[0] - 1 self._screen.addstr(menurow, 0, ' ' * (self._screensize[1] - 1), curses.color_pair(self.COLOR_HEADER_NORMAL)) self._screen.move(menurow,4) for mnemonic, text in self._keys.iteritems(): self._screen.addstr(' {0} '.format(mnemonic), curses.color_pair(self.COLOR_NORMAL) | curses.A_BOLD) self._screen.addstr('{0}'.format(text), curses.color_pair(self.COLOR_HEADER_NORMAL)) def _redrawMenu(self): self._updateMenu() self._screen.refresh() def main(stdscr): # TODO: prune log file commander = ZWaveCommander(stdscr) commander.main() curses.wrapper(main) class DeleteMe: ''' 1 2 3 4 5 6 7 8 12345678901234567890123456789012345678901234567890123456789012345678901234567890 +--------------------------------------------------------------------------------+ | HomeSeer Z-Troller on /dev/keyspan-2 Installer Library | 1 | Home ID 0x003d8522 Version Z-Wave 2.78 | 2 | 7 Registered Nodes (2 Sleeping) | 3 | | 4 | ID Name Location Type State Batt Signal | 5 | 1 Controller Remote Controller OK | 6 | | 2 Sconce 1 Living Room Multilevel Switch [|||| ] [||||] | 7 | |>3 TV Living Room Binary Power Switch on [||| ] | 8 | | 4 Liv Rm Motion Living Room Motion Sensor sleeping [||||] [||||] | 9 | | 5 Sliding Door Family Room Door/Window Sensor ALARM [||| ] [|| ] | 10 +- Scrollable box, lists nodes | 6 Sconce 2 Living Room Multilevel Switch [|||| ] [||||] | 11 | | 7 Bedroom Lamp Master Bed Multilevel Scene Switch on | 12 | | | 13 | | | 14 | | Name: TV | Command Classes | 15 | Location: Living Room | COMMAND_CLASS_BASIC | 16 | | Manufacturer: Aeon Labs | COMMAND_CLASS_HAIL | 17 | | Product: Smart Energy Switch | COMMAND_CLASS_ASSOCIATION | 18 | | Neighbors: 2,4,5,6,7 | COMMAND_CLASS_VERSION | 19 | | Version: 3 | COMMAND_CLASS_SWITCH_ALL | 20 | | State: On | COMMAND_CLASS_MANUFACTURER_SPECIFIC | 21 +- Scrollable box, toggles: | Signal: 3dbA (good) | COMMAND_CLASS_CONFIGURATION | 22 | 1) command classes | | COMMAND_CLASS_SENSOR_MULTILEVEL | 23 | 2) values | | COMMAND_CLASS_METER | 24 | 3) groups | Add Del Edit Refresh + - oN oFf Values Groups Classes Setup Quit | 25 | 4) config params +---------------------------------------+----------------------------------------+ [a]add - associate new node [b]about - show about dialog [c]classes - view command classes [d]delete - remove association [e]edit (COMMAND_CLASS_CONFIGURATION or has editable values) [f]off (command_class_switch_binary,command_class_switch_multilevel,COMMAND_CLASS_SWITCH_TOGGLE_BINARY,COMMAND_CLASS_SWITCH_TOGGLE_MULTILEVEL) [g]groups (COMMAND_CLASS_ASSOCIATION) [n]on (command_class_switch_binary,command_class_switch_multilevel,COMMAND_CLASS_SWITCH_TOGGLE_BINARY,COMMAND_CLASS_SWITCH_TOGGLE_MULTILEVEL) [r]refresh - refresh specified node [s]setup [+]increase (COMMAND_CLASS_SWITCH_MULTILEVEL) [-]decrease (COMMAND_CLASS_SWITCH_MULTILEVEL) '''
UTF-8
Python
false
false
2,011
10,642,929,007,407
74b4aea862e486841ddc4133d150864742994f8a
e6f37e7595977601178482c8edf85e2b5e4095ad
/src/regicide/resources/visual.py
e10f45ffb381dc0c3ed6d06fa67f51a1c1e39bdc
[]
no_license
Ardnived/Regicide
https://github.com/Ardnived/Regicide
71cd39d00b0ad4f2bafe869dd8f8218513b0f602
395330c1cbd76ca2d1255426bb1eb264ed960371
refs/heads/master
2021-01-01T19:50:56.100654
2013-06-06T05:07:14
2013-06-06T05:07:14
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
''' Created on Mar 2, 2013 @author: Devindra This module loads and stores all graphical assets for the game. ''' from pyglet import image class ImageSet(object): def __init__(self, name, rows, columns): self.palette = image.ImageGrid(image.load('resources/visual/'+name+'.png'), rows=rows, columns=columns) def get(self, index): return self.palette[index] class Character(object): ELLIOT = image.load('resources/visual/characters/elliot.png') HELENA = image.load('resources/visual/characters/helena.png') LANCEL = image.load('resources/visual/characters/lancel.png') SHALOTT = image.load('resources/visual/characters/shalott.png') VIVIEN = image.load('resources/visual/characters/vivien.png') YORICK = image.load('resources/visual/characters/yorick.png') class Icon(ImageSet): LVL, NUMBER_1, NUMBER_2, NUMBER_3, NUMBER_4, NUMBER_5, NUMBER_6, NUMBER_7, NUMBER_8, NUMBER_9, NUMBER_10, QUESTION, EXCLAMATION = range(13) GO, CLOCK_LARGE, FLAG, DIRECTIONS, ARROW_RIGHT, ARROW_DOWN_RIGHT, ARROW_UP_RIGHT, ARROW_UP, ARROW_RIGHT_UP, ARROW_LEFT_UP, ARROW_LEFT, ARROW_UP_LEFT, ARROW_DOWN_LEFT, ARROW_DOWN, ARROW_RIGHT_DOWN, ARROW_LEFT_DOWN = range(16, 32) POINTER, CANCEL, CLOCK, BOX, HEART, FIST, SWORD, ARMOUR, SHIELD, STAR, HOURGLASS, COIN, SKULL, KEY, PAGE, BOOK = range(32, 48) instance = ImageSet('icons', 3, 16) @staticmethod def get(index): return Icon.instance.get(index) class Entity(ImageSet): GOBLIN, ZOMBIE, SKELETON, ORC, OGRE, WEREWOLF, GOLEM, DEMON = range(8) SLIME_SWARM, SLIME_LARGE, SCORPION, OCTOPUS, VAMPIRE, MUMMY, WRAITH, BEHOLDER = range(8, 16) MUSHROOMS, RABBIT, BAT_SMALL, BAT_LARGE, SNAKE, WOLF, BOAR, BEAR = range(16, 24) RAT, SPIDER_SWARM, LIZARD, SPIDER_LARGE, FROG, BEETLE, CENTIPEDE, DRAGON = range(24, 32) ELF_MALE, DWARF, BARBARIAN, ELF_FEMALE, RANGER, KNIGHT, WIZARD, SORCEROR = range(32, 40) instance = ImageSet('units', 5, 8) @staticmethod def get(index): return Entity.instance.get(index) class Tile(ImageSet): COLUMNS, COLUMNS_GATE, GRATE, SPIKES, TILES, WAVES, BUBBLES, STARS = range(8) PLINTH_EYE, PLINTH, BRICK_LARGE, BRICK, BRICK_HOLE, GATE_1, GATE_2, GATE_3 = range(8, 16) BLOCK_LIGHT, PLINTH_LIGHT, BLOCK, BLOCK_CRACKED, BLOCK_LOCKED, STAIRS_DOWN, STAIRS_UP, PIT = range(16, 24) SHADE_1, SHADE_2, SHADE_3, SHADE_4, SHADE_5, SHADE_6, SHADE_7, SHADE_8 = range(24, 32) instance = ImageSet('tiles', 4, 8) @staticmethod def get(index): return Tile.instance.get(index) @staticmethod def get_shadow(index): return Tile.get(23 + index) class Misc(ImageSet): SYMBOL_PENTAGRAM, SYMBOL_ANKH, SYMBOL_EYE, SYMBOL_TRI, SYMBOL_DIRECTIONS, SYMBOL_PERSON, BANNER, SIGN = range(8) SKULLS_MANY, SKULL, BONES, GRAVE, ALTAR, FOUNTAIN, GEMS, SPHERE = range(8, 16) COINS_FEW, COINS_MANY, BUBBLES, JEWEL, RING_SMALL, RING_LARGE, AMULET_SMALL, AMULET_LARGE = range(16, 24) SCEPTER_1, SCEPTER_2, SCEPTER_3, SCEPTER_4, SCEPTER_5, SCEPTER_6, SCEPTER_7, SCEPTER_8 = range(24, 32) SHIELD_1, SHIELD_2, SHIELD_3, SHIELD_4, SHIELD_5, SHIELD_6, SHIELD_7, SHIELD_8 = range(32, 40) HELMET_1, HELMET_2, HELMET_3, HELMET_4, HELMET_5, HELMET_6, HELMET_7, HELMET_8 = range(40, 48) TUNIC_1, TUNIC_2, TUNIC_3, TUNIC_4, TUNIC_5, TUNIC_6, TUNIC_7, TUNIC_8 = range(48, 56) BOMBS_1, BOMBS_2, BOMBS_3, ARROWS_1, ARROWS_2, ARROWS_3, BOLTS_1, BOLTS_2 = range(56, 64) SHURIKEN_1, SHURIKEN_2, THROWING_DAGGERS, BOW_1, BOW_2, BOW_3, CROSSBOW_1, CROSSBOW_2 = range(64, 72) AXE_1, AXE_2, AXE_3, AXE_4, AXE_5, AXE_6, AXE_7, AXE_8 = range(72, 80) SWORD_1, SWORD_2, SWORD_3, SWORD_4, SWORD_5, SWORD_6, SWORD_7, SWORD_8 = range(80, 88) SCROLL_1, SCROLL_2, SCROLL_3, SCROLL_4, SCROLL_5, SCROLL_6, SCROLL_7, SCROLL_8 = range(88, 96) POTION_1, POTION_2, POTION_3, POTION_4, POTION_5, POTION_6, POTION_7, POTION_8 = range(96, 104) CHEST, CHEST_OPEN, KEY_1, KEY_2, KEY_3, KEY_4, KEY_5, KEY_6 = range(104, 112) instance = ImageSet('misc', 14, 8) @staticmethod def get(index): return Misc.instance.get(index) class UI(ImageSet): BAR_LEFT, BAR_100, BAR_75, BAR_50, BAR_25, BAR_0, BAR_RIGHT = range(7) HEART_100, HEART_75, HEART_50, HEART_25, HEART_0, HEART_EMPTY = range(7, 13) instance = ImageSet('ui', 2, 7) @staticmethod def get(index): return UI.instance.get(index) class Cursor(ImageSet): SELECT, TARGET, GREEN, YELLOW = range(4) instance = ImageSet('cursor', 1, 4) @staticmethod def get(index): return Cursor.instance.get(index) class ASCII(ImageSet): instance = ImageSet('ascii', 16, 16) @staticmethod def get(index): return ASCII.instance.get(index) @staticmethod def get_index(x, y): return x + y*16 class Pattern(ImageSet): instance = ImageSet('patterns', 1, 8) @staticmethod def get(index): return Pattern.instance.get(index) class Number(object): palette_large = image.ImageGrid(image.load('resources/visual/numbers-large.png'), rows=2, columns=10) palette_small = image.ImageGrid(image.load('resources/visual/numbers-small.png'), rows=2, columns=10) @staticmethod def get(value, large=False, highlight=False): value = value % 10 if (highlight): value += 10 if (large): return Number.palette_large[value] else: return Number.palette_small[value]
UTF-8
Python
false
false
2,013
15,925,738,777,791
122a5719cce7b0b87bf8b5c1b0dc3b4885d4654f
938b3ab871892f7f7a11345b5f135e843407fcaf
/Problem3.py
4c23305be79752c134ef2b15ee447f220a33fe90
[ "MIT" ]
permissive
ledbutter/ProjectEulerPython
https://github.com/ledbutter/ProjectEulerPython
37a87ddebd2656fb0fa5314f5532d2a58ce152de
3c0deddc9c9b5067b811c9892f3369cc063f4c52
refs/heads/master
2020-04-06T07:00:36.683238
2014-11-20T16:46:37
2014-11-20T16:46:37
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#The prime factors of 13195 are 5, 7, 13 and 29. #What is the largest prime factor of the number 600851475143 ? # 6857 # from math import sqrt # primes = set([2]) # value = 3 # number = 600851475143 # sq = sqrt(number) # while value < sq: # isPrime = True # for k in primes: # if value % k == 0: # isPrime = False # value += 2 # why not + 1 ???? # if isPrime: # primes.add(value) # if number % value == 0: # print(value) # number /= value # print(number) number = 600851475143 n = 1 while number > 1: n += 1 if number%n == 0: number /= n print(n)
UTF-8
Python
false
false
2,014
11,106,785,446,885
7c30e6d20f478a096eeb434b5e94ae4945b20f9e
8f529e3525acd3393f9c3b25b5ed1bccee3511e9
/mappertest1.py
5ca3a7ac882813ae88097317115b5a10b7e770ed
[]
no_license
kevinbluett/TopHat-Platform
https://github.com/kevinbluett/TopHat-Platform
ecd7088cdc8c249ef3c6eecb1ce05c9b00518767
87c9925f5172cfd0844a8adbc5d60f76b52f6d1e
refs/heads/master
2021-01-16T21:10:04.757361
2012-08-22T17:02:56
2012-08-22T17:02:56
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#! /usr/bin/python def main(): from Networking.protocolhandler import ProtocolHandler from Common.config import TopHatConfig #setup the config TopHatConfig(path="/home/specialk/Dev/tophat/config.py") # do the other stuff from Model.Mapper import gamemapper as GM Mapper = GM.GameMapper() u = Mapper.find(1) from pprint import pprint pprint(u.dict(2)) if __name__ == "__main__": main()
UTF-8
Python
false
false
2,012
5,214,090,320,478
97fd123d0cefcd0b9a60da1bd1f3a3fe4f515f0d
61522d3a49009f69433389f4b158f185ce787d86
/old/distr_redis_clients.py
5a0471a983404b4bc6320eb6ca8b650e42900882
[]
no_license
crazyideas21/swclone
https://github.com/crazyideas21/swclone
83cc34f4fd00240bc1daabea0a23fcd0ddad00a5
cbe3096004d4ce8dff87ab9d15599b8f6e65854a
refs/heads/master
2020-05-17T08:23:08.334510
2013-03-28T22:38:05
2013-03-28T22:38:05
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/python ''' Distributed redis clients across multiple hosts. There are two modes: (1) 'controller': Controls the experiment. Coordinates the clients and collects data. (2) 'redis': Runs the redis clients. Waits for controller's instructions to start sending requests to the redis server. Controller-client protocol: Each client loops forever and tries to unpickle the experiment_state.pickle file, which has the ExperimentState object pickled. Upon success, a client submits requests to the redis server continuously, until instructed to stop by the ExperimentState object. A client saves the result to a unqiue file identified by the experiment ID. Created on Oct 23, 2012 @author: Danny Y. Huang ''' import socket, sys, subprocess, threading, time, traceback, random, os import cPickle as pickle import lib.util as util from lib.parallelize import ThreadPool #=============================================================================== # Client parameters #=============================================================================== REDIS_CLIENT_PROCESS_COUNT = 10 WORKER_THREAD_COUNT = 50 REDIS_PORT = 6379 #=============================================================================== # Controller parameters #=============================================================================== EXPERIMENT_STATE_FILE = 'experiment_state_pickle.tmp' CONFIGURATION = 'mn' if CONFIGURATION == 'hp': # List of hosts available for the experiment, as seen by the experiment's # network (i.e. in-band). REDIS_SERVER_IN_BAND = '10.81.20.1' REDIS_SERVER_OUT_OF_BAND = '172.22.14.213' elif CONFIGURATION == 'tor': # Top-of-rack switch as hardware baseline. REDIS_SERVER_IN_BAND = '172.22.14.213' REDIS_SERVER_OUT_OF_BAND = '172.22.14.213' elif CONFIGURATION == 'mn': # Mininet REDIS_SERVER_IN_BAND = '10.0.0.30' REDIS_SERVER_OUT_OF_BAND = '10.0.0.30' else: assert False # Expected gap in milliseconds between successive requests. Actual value may # differ. EXPECTED_GAP_MS = 50 # Mouse flows #EXPECTED_GAP_MS = 1000 # Elephant flows # How many bytes to put/get on the redis server. DATA_LENGTH = 64 # Mouse flows #DATA_LENGTH = 1*1000*1000 # Elephant flows # How many hosts run the redis clients. REDIS_CLIENT_HOST_COUNT = 8 class ExperimentState: def __init__(self): self.redis_server = None self.gap_ms = None self.data_length = None self.uid = None class ControllerMode: def __init__(self): # Publish the value of x onto the redis server. self.init_redis_server() # Remove previous temp data files. for client_data_file in os.listdir('.'): if client_data_file.endswith('.tmp'): os.remove(client_data_file) # Construct the new experiment state, thereby starting the experiment. experiment_state = ExperimentState() experiment_state.data_length = DATA_LENGTH experiment_state.gap_ms = EXPECTED_GAP_MS * REDIS_CLIENT_PROCESS_COUNT * REDIS_CLIENT_HOST_COUNT experiment_state.redis_server = REDIS_SERVER_IN_BAND experiment_state.uid = str(random.random())[2:6] with open(EXPERIMENT_STATE_FILE, 'w') as f: f.write(pickle.dumps(experiment_state)) # Wait and stop. print 'Experiment State:', experiment_state.__dict__ util.verbose_sleep(130, 'Collecting data...') os.remove(EXPERIMENT_STATE_FILE) # Waiting for all data files to be ready. The number of files to expect # is equal to the number of *.dummy files with the current experiment # UID. data_file_count = 0 for filename in os.listdir('.'): if filename.startswith('dummy-' + experiment_state.uid): data_file_count += 1 client_data_file_list = [] while len(client_data_file_list) < data_file_count: print 'Waiting for all data files to be ready.', print 'Current:', len(client_data_file_list), print 'Expected total:', data_file_count time.sleep(5) client_data_file_list = [] for filename in os.listdir('.'): if filename.startswith('data-' + experiment_state.uid): client_data_file_list += [filename] # Join data. start_end_times = [] for client_data_file in client_data_file_list: print 'Reading', client_data_file with open(client_data_file) as f: client_data_list = pickle.loads(f.read()) start_end_times += [(start_time, end_time, client_data_file) \ for (start_time, end_time) in client_data_list] self.save_data(start_end_times) def save_data(self, start_end_times): """ Calculates the actual request rate and prints as the first line. Saves the CDFs of latency and bandwidth betweeen 1-2 minutes to disk. Assumes that all redis client hosts report correct times. """ # Save the CDF of the start times. TODO: Debug. with open('data/distr_redis_raw_start_time_cdf.txt', 'w') as f: start_time_list = [t for (t, _, _) in start_end_times] for (t, p) in util.make_cdf_table(start_time_list): print >> f, '%.5f' % t, p # Save the start end times list. TODO: Debug. with open('data/distr_redis_raw_start_end_times.txt','w') as f: for (start_time, end_time, data_file) in start_end_times: print >> f, '%.5f' % start_time, end_time, data_file # Filter out irrelevant time values. Focus on 60th-120th seconds. min_time = min([start_time for (start_time, _, _) in start_end_times]) def is_steady_state(start_end_time_tuple): (start_time, _, _) = start_end_time_tuple return min_time + 60 <= start_time <= min_time + 120 filtered_times = filter(is_steady_state, start_end_times) filtered_times.sort() print 'Raw data size:', len(start_end_times), print 'Data between 60-120th seconds:', len(filtered_times) # Figure out the actual gaps in milliseconds. start_time_list = [start for (start, _, _) in filtered_times] gap_list = [] for index in range(0, len(start_time_list) - 1): gap = start_time_list[index + 1] - start_time_list[index] gap_list.append(gap * 1000.0) gap_list.sort() print 'Client gap: (mean, stdev) =', util.get_mean_and_stdev(gap_list), print 'median =', gap_list[len(gap_list)/2] # Calculate latency and bandwidth. latency_list = [] bandwidth_list = [] for (start_time, end_time, _) in filtered_times: if end_time is None: latency = -1 bandwidth = 0 else: latency = end_time - start_time # seconds bandwidth = DATA_LENGTH / latency # Bytes/s latency_list.append(latency * 1000.0) # milliseconds bandwidth_list.append(bandwidth * 8.0 / 1000000.0) # Mbps # Write to file. with open('data/distr_redis_latency.txt', 'w') as f: for (v, p) in util.make_cdf_table(latency_list): print >> f, v, p with open('data/distr_redis_bw.txt', 'w') as f: for (v, p) in util.make_cdf_table(bandwidth_list): print >> f, v, p def init_redis_server(self): """ Sets the variable we're going to get later. """ arg_list = ['*3', '$3', 'set', '$1', 'x', '$%s' % DATA_LENGTH, 'z' * DATA_LENGTH, ''] arg_str = '\r\n'.join(arg_list) sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.connect((REDIS_SERVER_OUT_OF_BAND, REDIS_PORT)) sock.sendall(arg_str) assert sock.recv(1024) == '+OK\r\n' sock.close() class RedisMode: def __init__(self): # Open child processes. is_child = (len(sys.argv) == 3 and sys.argv[2] == 'child') if not is_child: for _ in range(REDIS_CLIENT_PROCESS_COUNT - 1): subprocess.Popen('./distr_redis_clients.py redis child', shell=True) # Attempt to deconstruct the experiment state. experiment_state = None while experiment_state is None: try: with open(EXPERIMENT_STATE_FILE) as f: experiment_state = pickle.loads(f.read()) except (IOError, pickle.UnpicklingError): time.sleep(2) try: self.handle_experiment(experiment_state) except KeyboardInterrupt: return except: with open('run.log', 'a') as f: print >> f, 'Redis client crashed:', traceback.format_exc() def handle_experiment(self, experiment_state): self.lock = threading.Lock() self.last_request_start_time = 0 last_request_start_time = -1 run_id = str(random.random())[2:6] run_start_time = time.time() print 'Experiment', experiment_state.uid, '-', run_id, 'begins.' # Create a dummy file so that the controller knows how many result files # to expect. dummy_filename = 'dummy-' + experiment_state.uid + '-' + run_id + '.tmp' with open(dummy_filename, 'w') as f: print >> f, '0' * 65536 subprocess.call('touch %s; sync' % dummy_filename, shell=True) # List of (start_time, end_time). start_end_times = [] # Bombard the redis server. pool = ThreadPool(max_threads=WORKER_THREAD_COUNT, block_on_busy_workers=True) # Record actual sleep time. TODO: Debug. sleep_f = open('sleep-time-' + run_id + '.tmp', 'w') while True: # The main loop times out after a while. current_time = time.time() if current_time - run_start_time > 140: break # How long to sleep? The time to sleep depends on when the last job # was started. But first, we need to make sure we're not using the # same last_request_start_time value repeatedly. self.lock.acquire() if last_request_start_time == self.last_request_start_time: # Blocks until we obtain an updated value (i.e. a new job # started.) self.lock.release() time.sleep(0.001) continue else: # A fresh new job has indeed started. last_request_start_time = self.last_request_start_time self.lock.release() time_elapsed_ms = (current_time - last_request_start_time) * 1000.0 if time_elapsed_ms < experiment_state.gap_ms: sleep_time_ms = experiment_state.gap_ms - time_elapsed_ms sleep_time_ms = random.uniform(sleep_time_ms * 0.9, sleep_time_ms * 1.1) time.sleep(sleep_time_ms / 1000.0) else: sleep_time_ms = 0 print >> sleep_f, '%.3f' % last_request_start_time, '%.0f' % time_elapsed_ms, '%.0f' % sleep_time_ms # Parallelize the request. This may block if all worker threads are # busy. pool.run(self.send_redis_request, experiment_state, start_end_times) sleep_f.close() # Save results to a randomly named file with a common prefix per experiment. result_file = 'data-' + experiment_state.uid + '-' + run_id + '.tmp' with open(result_file, 'w') as f: f.write(pickle.dumps(start_end_times)) subprocess.call('touch %s; sync' % result_file, shell=True) print 'Experiment', experiment_state.uid, 'ended.' pool.close() def send_redis_request(self, experiment_state, start_end_times): start_time = time.time() end_time = None with self.lock: self.last_request_start_time = start_time try: # Connect to server sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.connect((experiment_state.redis_server, REDIS_PORT)) sock.sendall('get x\r\n') # Make sure we get all the data. We check this lazily. recv_length = 0 while True: data = sock.recv(32768) recv_length += len(data) if recv_length > experiment_state.data_length and data.endswith('\r\n'): break end_time = time.time() sock.close() except Exception: pass # Submit result. Jobs that didn't finish have None as the end_time. with self.lock: start_end_times += [(start_time, end_time)] def main(): if 'controller' in sys.argv: ControllerMode() elif 'redis' in sys.argv: RedisMode() else: print >> sys.stderr, 'Wrong arguments.' if __name__ == '__main__': main()
UTF-8
Python
false
false
2,013
3,341,484,558,905
98dfbef5c156ab45aafe4cc1512320fd34529f1f
9d5722dbe8cc176c8bf48077a2b439940d45eaf9
/src/cid/forms/siim2/ficha_urbanistica/__init__.py
842df568effb6c5af5e33532335c66a37b310694
[ "AGPL-3.0-only" ]
non_permissive
dunkel13/CaliopeServer
https://github.com/dunkel13/CaliopeServer
4800b719235d8ff334a57684035d1ff7d6334bbd
a71f4e6490ddb6b6ec43e3b71df5b0603a632f37
refs/heads/master
2021-05-27T12:11:36.500244
2014-01-08T15:07:03
2014-01-08T15:07:03
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from cid.forms.siim2.ficha_urbanistica.models import FichaUrbanistica
UTF-8
Python
false
false
2,014
3,650,722,238,737
f1e0b492c9c0a48c9b2220e8e953d11fb3408ef8
3ab08cdbd792d6614fb9b76344a0f44c9c3b8c7a
/trunk/rrule.py
bc50b65ac6b425b3f8207efe927aa1660a57fe0d
[ "LGPL-2.0-or-later", "GPL-1.0-or-later", "GPL-3.0-only" ]
non_permissive
scooby/dailyreport
https://github.com/scooby/dailyreport
7238f6ad0b0ec45362157868c88106e93d9eb2d6
5ebb6263d39853a7ac6fd9807a71719d9b311e17
refs/heads/master
2015-07-09T18:47:17
2013-10-28T23:45:07
2013-10-28T23:45:07
268,136
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
""" Copyright 2010 Benjamin Samuel. All rights reserved. This file is part of dailyreport, a suite of software to prepare a daily report based on information stored in multiple personal organizer applications. dailyreport is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. dailyreport is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with dailyreport, in the file COPYING in the root directory of the distribution. If not, see <http://www.gnu.org/licenses/>.""" from datetime import datetime, timedelta, date from itertools import chain from calendar import monthrange from timeline import timeline, utc, localtz from icalendar import LocalTimezone import collections import sys """ This module is specifically designed to handle recurring events as found in calendar apps and the like. What it does is sift through events to find those that match the window. Frankly, this is not the model of elegance, but as I worked through the spec, I found myself working around a lot of interactions and special cases. Also, I'm specifically targeting the icalendar parser, but this should bolt on to another parser without too much chicanery. """ WEEKDAYS = { "MO":0, "TU":1, "WE":2, "TH":3, "FR":4, "SA":5, "SU":6 } ONEWEEK = timedelta(weeks = 1) ONEDAY = timedelta(days = 1) class DroppedEntryWarning(Warning): pass def rrule_gen(dt, freq = None, interval = None, until = None, count = None, force_until = None, **odds): """ This function generates dates according to the RRULE specs defined by RFC 2445. I went by the version here: http://www.kanzaki.com/docs/ical/recur.html Most parameters are required, since this is generally intended to take input from a VCal or iCal parser. The byrules can throw errors that are silently ignored (entries are thrown out) so if something's not getting generated and you don't know why, the debugger argument takes a function that will be called with the exception objects. To actually call this function, see expand_recurrence below. """ def freq_iter(freq, interval, dt): def yearly(interval, dt): #debugger("Starting yearly with freq %s interval %s start year %s" % ( # freq, interval, dt.year)) dy = dt.year while True: ndt = dt.replace(year = dy) if force_until is not None and ndt > force_until: return #debugger("Generating year %d" % dy) yield ndt dy += interval def monthly(interval, dt): dy, dm = (dt.year, dt.month) while True: ndt = dt.replace(year = dy, month = dm) if force_until is not None and ndt > force_until: return yield ndt dm += interval if dm > 12: dy += int((dm - 1) / 12) dm = ((dm - 1) % 12) + 1 def periodly(dt, **args): interval = timedelta(**args) while True: if force_until is not None and dt > force_until: return yield dt dt += interval c, args = { 'YEARLY': ( yearly, { 'dt': dt, 'interval': interval }), 'MONTHLY': ( monthly, { 'dt': dt, 'interval': interval }), 'WEEKLY': ( periodly, { 'dt': dt, 'weeks': interval }), 'DAILY': ( periodly, { 'dt': dt, 'days': interval }), 'HOURLY': ( periodly, { 'dt': dt, 'hours': interval }), 'MINUTELY': ( periodly, { 'dt': dt, 'minutes': interval }), 'SECONDLY': ( periodly, { 'dt': dt, 'seconds': interval }) }[freq] return c(**args) def bysecond(dt, value): if 0 <= value <= 60: return dt.replace(second = value) elif -60 <= value < 0: return dt.replace(second = 60 + value) else: raise ValueError("BYSECOND must be -60 to 60") def byminute(dt, value): if 0 <= value <= 60: return dt.replace(minute = value) elif -60 <= value < 0: return dt.replace(minute = 60 + value) else: raise ValueError("BYMINUTE must be -60 to 60") def byhour(dt, value): if 0 <= value <= 24: return dt.replace(hour = value) elif -24 <= value < 0: return dt.replace(hour = 24 + value) else: raise ValueError("BYHOUR must be -24 to 24") def bymonthday(dt, value): if 1 <= value <= 31: return dt.replace(day = value) elif -31 <= value <= -1: return dt.replace(day = monthrange(dt.year, dt.month)[1] + value + 1) # So -1 is the last day of the month else: raise ValueError("BYMONTHDAY must be -31 to -1 or 1 to 31") def bymonth(dt, value): if 1 <= value <= 12: return dt.replace(hour = value) elif -12 <= value <= -1: return dt.replace(hour = 13 + value) else: raise ValueError("BYMONTH must be -12 to -1 or 1 to 12") def byyearday(dt, value): """ Logic here is: get 1 Jan (or 31 Dec) of that year and add days.""" if 1 <= value <= 366: ndt = dt.replace(month = 1, day = 1 ) + timedelta(days = value - 1) elif -366 <= value <= -1: ndt = dt.replace(month = 12, day = 31 ) + timedelta(days = value + 1) else: raise ValueError("BYYEARDAY must be -366 to -1 or 1 to 366.") if ndt.year != dt.year: raise DroppedEntryWarning("BYYEARDAY got a value out of range for " + "that year.") return ndt def weekstarts(dt): """ Find the beginning of the week containing dt. Guaranteed: weekstarts(dt).weekday() == wkst Guaranteed: weekstarts(dt) <= dt Preserves time and other info. """ if wkst > dt.weekday(): return dt + timedelta(days = wkst - dt.weekday() - 7) else: return dt + timedelta(days = wkst - dt.weekday()) def byweekno(dt, value): """ This is straightforward enough if you go by ISO8601, but 8601 assumes that weeks start on Monday. The wkst flag can change that. The ISO definition of the first week is "the week with at least four days in the starting year. Fortunately, no matter what day the week starts on, the first week will contain 4 Jan. And likewise, the last week will contain 28 Dec. """ curwkday1 = weekstarts(dt) if 1 <= value <= 53: week1day1 = weekstarts(dt.replace(month = 1, day = 4)) curweek = (curwkday1 - week1day1).weeks + 1 return dt + timedelta(weeks = value - curweek) elif -53 <= value <= -1: lastweekday1 = weekstarts(dt.replace(month = 12, day = 28)) negweek = (curwkday1 - lastweekday1).weeks - 1 return dt + timedelta(weeks = value - negweek) else: raise ValueError("BYWEEKNO can't take a 0 value") def addmonths(dt, num): y, m = dt.year, dt.month - 1 return dt.replace(year = y + int((m + num) / 12), month = ((m + num) % 12) + 1) def byday(dt, value): """ The value is either a day of the week (as defined by WEEKDAYS) or an integer offset followed by a day of the week. If an offset is specified, it is relative to the interval, which is to say, if freq is MONTHLY and interval is 2, +3MO is the third monday within those two months. -4TU would be the fourth from last tuesday within those two months. 0 is not a valid offset, +1 or 1 is the first, and -1 is the last. If no offset is specified, it implies every weekday within the interval, which can be quite a lot if you're dealing with YEARLY intervals. This function is affected by wkst. """ value = value.strip() if len(value) > 2: offset, wkday = int(value[0:-2]), WEEKDAYS[value[-2:]] # Need to find the first day of the "freq" if freq == 'YEARLY': # Note: interval may be > 1 if 1 <= offset: week1day1 = weekstarts(dt.replace(month = 1, day = 4)) curweek = (weekstarts(dt) - week1day1).weeks + 1 return dt + timedelta(weeks = value - curweek, days = wkday - dt.weekday()) elif offset <= -1: lastweekday1 = weekstarts(dt.replace(month = 12, day = 28, year = dt.year + interval - 1)) # Multi-year intervals negweek = (weekstarts(dt) - lastweekday1).weeks - 1 return dt + timedelta(weeks = value - negweek, days = wkday - dt.weekday()) else: raise ValueError("BYDAY offset must be not be 0.") elif freq == 'WEEKLY': if offset > 0: return dt + timedelta(weeks = offset - 1, days = wkday - dt.weekday()) elif offset < 0: return dt + timedelta(weeks = interval + offset, days = wkday - dt.weekday()) else: raise ValueError("BYDAY offset must not be 0.") elif freq == 'MONTHLY': if offset > 0: # find first week of this month # Note: weekstarts will always return a date whose # weekday is set to wkst. ndt = weekstarts(dt.replace(day = 1) ) + timedelta(days = wkday - wkst) # If we picked a day before the interval, advance a week if ndt.month + ndt.year * 12 < dt.month + dt.year * 12: return ndt + timedelta(weeks = offset) else: return ndt + timedelta(weeks = offset - 1) elif offset < 0: # get the last month of the interval lm = addmonths(dt, interval - 1) # find last week of this month ndt = weekstarts(lm.replace(day = monthrange(lm.year, lm.month)[1])) + timedelta(days = wkday - wkst) # We may pick a day after the end of the interval. if ndt.month + ndt.year * 12 > lm.month + lm.year * 12: return ndt + timedelta(weeks = offset) else: return ndt + timedelta(weeks = offset + 1) else: raise ValueError("BYDAY offset must not be 0.") else: raise ValueError("BYDAY requires freq be WEEKLY, MONTHLY or" + " YEARLY.") else: # Need to do all weekdays within interval. # This is mostly a copy and paste of above logic to find the # first and last weeks. wkday = WEEKDAYS[value] if freq == 'YEARLY': ctr = weekstarts(dt.replace(month = 1, day = 4) ) + timedelta(days = wkday - wkst) term = weekstarts(dt.replace(month = 12, day = 28, year = dt.year + interval - 1)) + timedelta(days = wkday - wkst) elif freq == 'WEEKLY': ctr = dt + timedelta(days = wkday - dt.weekday()) return [ctr + timedelta(weeks = x) for x in range(0, interval)] elif freq == 'MONTHLY': ctr = weekstarts(dt.replace(day = 1)) + timedelta( days = wkday - wkst) # If we picked a day in the previous month, advance a week if ctr.month + ctr.year * 12 < dt.month + dt.year * 12: ctr += ONEWEEK lm = addmonths(dt, interval - 1) # find last week of this month term = weekstarts(lm.replace(day = monthrange(lm.year, lm.month)[1])) + timedelta(days = wkday - wkst) if term.month + ndt.year * 12 > lm.month + lm.year * 12: term -= ONEWEEK else: raise ValueError("BYDAY only works if freq is WEEKLY, MONTHLY " + "or YEARLY.") # If we have a ctr, term pair, handle them: r = [] while ctr <= term: r.append(ctr) ctr = ctr + ONEWEEK # += works but probably shouldn't return r # Actual function starts. assert isinstance(dt, datetime) and dt.tzinfo is not None assert until is None or isinstance(until, date) or (isinstance(until, datetime) and until.tzinfo is not None) if isinstance(until, date): until = datetime(until.year, until.month, until.day, tzinfo = dt.tzinfo) + ONEDAY # These rules will replace their unit of time. Multiple entries # create multiple replacements. No entries are skipped. bymapper = ((func, listify(odds, rule)) for rule, func in (("BYMONTH", bymonth), ("BYWEEKNO", byweekno), ("BYYEARDAY", byyearday), ("BYMONTHDAY", bymonthday), ("BYDAY", byday), ("BYHOUR", byhour), ("BYMINUTE", byminute), ("BYSECOND", bysecond))) bymapper = [x for x in bymapper if x[1]] #print >>sys.stderr, "freq: %r interval: %r dts: %r" % (freq, interval, dt) #print >>sys.stderr, "bymapper set: %r" % bymapper bysetpos = listify(odds, "BYSETPOS") if "WKST" in odds: wkst = WEEKDAYS[unlistify(odds, "WKST")] else: wkst = 0 for ent in freq_iter(freq, interval, dt): retlist = [ent] for func, dtr_args in bymapper: newlist = [] for dt, val in product(retlist, dtr_args): try: a = func(dt, val) newlist.extend(a if isinstance(a, (list, tuple)) else [a]) except Exception as e: logging.exception(e) retlist = newlist if bysetpos: retlist = [retlist[x] for x in bysetpos] retlist.sort() for x in retlist: if until is not None and x > until: return yield x if count is not None: count -= 1 if count < 0: return def listify(h, k): """ Utility function to make sure hash values are lists of atoms. """ if k not in h: return () v = h[k] if isinstance(v, (basestring, collections.Mapping)): return (v,) elif isinstance(v, collections.Iterable): return v elif v is None: return () else: return (v,) def unlistify(h, k, d = None): """ The parser seems to return lists in some cases where it doesn't make sense. This forces a single item. """ #print "unlistify: %r [ %r ] %r" % (h, k, d) try: if k not in h: return d except TypeError: print >>sys.stderr, "Can't get %s in %r" % (k, h) raise v = h[k] if isinstance(v, (basestring, collections.Mapping)): return v elif isinstance(v, collections.Iterable): r = tuple(v) assert len(r) < 2 return r[0] if r else d else: return v def make_timeline(comp, force_until = None): def call_rule_gen(rule): return rrule_gen(dt = dts, force_until = force_until, freq = unlistify(rule, 'FREQ'), interval = unlistify(rule, 'INTERVAL', 1), until = unlistify(rule, 'UNTIL'), count = unlistify(rule, 'COUNT'), **dict(comp.items()) ) #print >>sys.stderr, "make_timeline called with %s" % comp.summary dts = unlistify(comp, 'DTSTART') dur = unlistify(comp, 'DURATION') dte = unlistify(comp, 'DTEND') if dur and dte: raise ValueError("Can't have both duration and end time") if isinstance(dts, datetime): if dte is None: dur = dur or timedelta() elif isinstance(dte, date): dur = dte - dts.astimezone(localtz).date() + ONEDAY else: dur = dte - dts dts = dts.astimezone(utc) elif isinstance(dts, date): if dte is None: dts = datetime(dts.year, dts.month, dts.day, tzinfo=localtz) dur = dur if dur is not None and dur > ONEDAY else ONEDAY elif isinstance(dte, date): dts = datetime(dts.year, dts.month, dts.day, tzinfo=localtz) dur = dte - dts + ONEDAY else: # Use the end time as the time to start dts dts = datetime.combine(dts, dte.time()) dur = dte - dts dts = dts.astimezone(utc) else: pass rtl = timeline.default(None) extl = timeline.default(None) if dts: rtl.set(dts, dts + dur, 'event') else: print >>sys.stderr, "DTS not set!" if dts: for rulelist, tl in (('RRULE', rtl), ('EXRULE', extl)): for rule in listify(comp, rulelist): #print >>sys.stderr, "Running rule_gen against %r" % rule for x in call_rule_gen(rule): tl.set(x, x + dur, 'event') else: print >>sys.stderr, "DTS not set!" for attr, tl in (('RDATE', rtl), ('EXDATE', extl)): for i in listify(comp, attr) or []: try: idt, idur = i.start, i.duration except: idt, idur = i, dur if isinstance(idt, date): idt = datetime(idt.year, idt.month, idt.day, tzinfo = localtz).astimezone(utc) idur = idur if idur > ONEDAY else ONEDAY else: idt = idt.astimezone(utc) tl.set(idt, idt + idur, 'event') #print "make timeline: %r" % rtl.tl return timeline.remove(rtl, extl) if __name__ == '__main__': for i in rrule_gen("MONTHLY", 3, datetime(2010, 7, 1, tzinfo=localtz), None, 5, { "BYDAY": ["3MO", "-1WE"], "WKST" : "TU" } ): print i.year, i.month, i.day
UTF-8
Python
false
false
2,013
13,434,657,708,086
be379ef9a5b5298b7bba9499d9755b88786a6596
ab4950480565b91fec3e8269a4e99b5a7ade3055
/src/EHF/libs/ehfmaths/types.py
f1b45d282c948140fa314e4b751b480fc597347e
[]
no_license
enomineCN/EHF
https://github.com/enomineCN/EHF
7443db0614e0dc5a7bc3a056dfb66593761a7d73
96b532e14669505fe6ab3c690168a346e370f774
refs/heads/master
2020-03-12T00:08:07.854286
2013-10-08T10:52:49
2013-10-08T10:52:49
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import math from ctypes import Structure from ctypes import c_float from ctypes import c_int from EHF.core import win32types class COORD(Structure): """ represents a coord """ _fields_ = [ ("x", c_float), ("y", c_float) ] class RECT(Structure): """ represents a rect """ _fields_ = [ ("left", c_int), ("top", c_int), ("right", c_int), ("bottom", c_int) ] class VECTOR(Structure): """ a C-Vector class, can be used directly in rpm(), it's not suggested to pass this type of object in and out of the pipeline, use the pure python version VECTOR3 instead. call toPyVector3() to convert this type to the pure python type """ _fields_ = [ ("x", c_float), ("y", c_float), ("z", c_float) ] def length(self): return math.sqrt(self.x*self.x + self.y*self.y + self.z*self.z) def dotProduct(self, dot): return self.x*dot.x + self.y*dot.y + self.z*dot.z; def scalar_mul(self, multiplier): return VECTOR(self.x * multiplier, self.y * multiplier, self.z * multiplier) def __add__(self, other): return VECTOR(self.x+other.x, self.y+other.y, self.z+other.z) def __sub__(self, other): return VECTOR(self.x-other.x, self.y-other.y, self.z-other.z) def __str__(self): return "<%f, %f, %f>" % (self.x, self.y, self.z) def __repr__(self): return self.__str__() def toPyVector3(self): return VECTOR3(self.x, self.y, self.z) def toPyVector4P(self): return VECTOR4(self.x, self.y, self.z, 1.0) def toPyVector4D(self): return VECTOR4(self.x, self.y, self.z, 0.0) class VECTOR3(object): """ represents a 3-dimensional vector """ def __init__(self, x=0.0, y=0.0, z=0.0): self.x = x self.y = y self.z = z def length(self): return math.sqrt(self.x*self.x + self.y*self.y + self.z*self.z) def dotProduct(self, dot): return self.x*dot.x + self.y*dot.y + self.z*dot.z; def crossProduct(self, other): nx = self.y*other.z - other.y*self.z ny = self.z*other.x - other.z*self.x nz = self.x*other.y - other.x*self.y return VECTOR3(nx, ny, nz) def multToMat(self, mat): nx = self.x*mat.getM(0,0) + self.y*mat.getM(1,0) + self.z*mat.getM(2,0) + mat.getM(3,0) ny = self.x*mat.getM(0,1) + self.y*mat.getM(1,1) + self.z*mat.getM(2,1) + mat.getM(3,1) nz = self.x*mat.getM(0,2) + self.y*mat.getM(1,2) + self.z*mat.getM(2,2) + mat.getM(3,2) return VECTOR3(nx, ny, nz) def scalar_mul(self, multiplier): return VECTOR3(self.x * multiplier, self.y * multiplier, self.z * multiplier) def normalize(self): length = self.length() nx = self.x/length ny = self.y/length nz = self.z/length return VECTOR3(nx, ny, nz) def toPointVector4(self): return VECTOR4(self.x, self.y, self.z, 1.0) def toDirectionVector4(self): return VECTOR4(self.x, self.y, self.z, 0.0) def __add__(self, other): return VECTOR3(self.x+other.x, self.y+other.y, self.z+other.z) def __sub__(self, other): return VECTOR3(self.x-other.x, self.y-other.y, self.z-other.z) def __str__(self): return "<%f, %f, %f>" % (self.x, self.y, self.z) def __repr__(self): return self.__str__() class VECTOR4(object): """ represents a 4-dimensional vector. this type is required for frostbite engine games """ def __init__(self, x=0.0, y=0.0, z=0.0, w=0.0): self.x = x self.y = y self.z = z self.w = w def __str__(self): return "<%f, %f, %f, %f>" % (self.x, self.y, self.z, self.w) def __repr__(self): return self.__str__() def multToMat(self, mat): nx = self.x*mat.getM(0,0) + self.y*mat.getM(1,0) + self.z*mat.getM(2,0) + self.w*mat.getM(3,0) ny = self.x*mat.getM(0,1) + self.y*mat.getM(1,1) + self.z*mat.getM(2,1) + self.w*mat.getM(3,1) nz = self.x*mat.getM(0,2) + self.y*mat.getM(1,2) + self.z*mat.getM(2,2) + self.w*mat.getM(3,2) nw = self.x*mat.getM(0,3) + self.y*mat.getM(1,3) + self.z*mat.getM(2,3) + self.w*mat.getM(3,3) return VECTOR4(nx, ny, nz, nw) def length(self): return math.sqrt(self.x*self.x + self.y*self.y + self.z*self.z + self.w*self.w) def _length(self): return math.sqrt(self.x*self.x + self.y*self.y + self.z*self.z) def dotProduct(self, dot): return self.x*dot.x + self.y*dot.y + self.z*dot.z + self.w*dot.w; def crossProduct(self, other): """ NOTE cross product should ONLY apply to directional vector not Points!! and since it only applies to vector, we can safely set nw to 0.0 """ nx = self.y*other.z - other.y*self.z ny = self.z*other.x - other.z*self.x nz = self.x*other.y - other.x*self.y nw = 0.0 return VECTOR4(nx, ny, nz, nw) def scalar_mul(self, multiplier): return VECTOR4(self.x * multiplier, self.y * multiplier, self.z * multiplier, self.w * multiplier) def normalize(self): """ NOTE normalization should only apply to directional vector """ length = self.length() nx = self.x/length ny = self.y/length nz = self.z/length nw = self.w/length return VECTOR4(nx, ny, nz, nw) def toPointVector4(self): return VECTOR4(self.x, self.y, self.z, 1.0) def toDirectionVector4(self): return VECTOR4(self.x, self.y, self.z, 0.0) def __add__(self, other): return VECTOR4(self.x+other.x, self.y+other.y, self.z+other.z, self.w+other.w) def __sub__(self, other): return VECTOR4(self.x-other.x, self.y-other.y, self.z-other.z, self.w-other.w) class CameraTransform(Structure): """ represents a complete camera(view) transform. the C/C++ version of this structure implements a union of 4 sets of VECTOR4 and an array of 16 floats it can be used directly in rpm to get the *linerTransform* (BF3/ MOHW) the 4 sets of VECTOR4 are right, up, forward, trans * i think trans is the eye position """ _fields_ = [ ("arr", c_float * 16) ] def getRightVec4(self): return VECTOR4(self.arr[0], self.arr[1], self.arr[2], self.arr[3]) def getRightVec3(self): return VECTOR3(self.arr[0], self.arr[1], self.arr[2]) def getUpVec4(self): return VECTOR4(self.arr[4], self.arr[5], self.arr[6], self.arr[7]) def getUpVec3(self): return VECTOR3(self.arr[4], self.arr[5], self.arr[6]) def getForwardVect4(self): return VECTOR4(self.arr[8], self.arr[9], self.arr[10], self.arr[11]) def getForwardVect3(self): return VECTOR3(self.arr[8], self.arr[9], self.arr[10]) def getTransVect4(self): return VECTOR4(self.arr[12], self.arr[13], self.arr[14], self.arr[15]) def getTransVect3(self): return VECTOR3(self.arr[12], self.arr[13], self.arr[14]) class MATRIX44(Structure): """ simulates a D3DXMATRIX class. it's not suggested to pass this type of object in and out of the pipeline, use the pure python version SimpleMatrix instead. """ _fields_ = [ ("arr", win32types.c_float * 16) ] def toPySimpleMatrix(self): return SimpleMatrix(self) def toArray(self): array = [ [0.0 for i in range(4)] for j in range(4) ] count = 0 for i in range(4): for j in range(4): array[j][i] = self.arr[count] count += 1 return array def toList(self): mat = [] count = 0 row = [] for entry in self.arr: if count and count % 4 == 0: # start a new row mat.append(row) row = [] row.append(entry) count += 1 mat.append(row) return mat def m(self, row, column): return self.arr[row*4+column] class SimpleMatrix(object): """ represents a 4x4 matrix use getM() and setM() to access to the individual entry """ def __init__(self, cMatrix=None): """ initialize from a MATRIX44 structure or use the default constructor """ if cMatrix: self.data = [entry for entry in cMatrix.arr] else: self.data = [0.0 for i in range(16)] def getM(self, row, column): return self.data[row*4+column] def setM(self, row, column, value): self.data[row*4+column] = value def toString(self): return '\n'.join( [ ' '.join( ["%.4f"%self.getM(i,j) for j in range(4)] ) for i in range(4) ] ) def __str__(self): return self.toString() def __repr__(self): return self.toString() def multTo(self, other): result = SimpleMatrix() for row_index in range(4): for column_index in range(4): newEntry = sum( [self.getM(row_index, _k)*other.getM(_k, column_index) for _k in range(4)] ) result.setM(row_index, column_index, newEntry) return result def multToVec4(self, vec4): x = self.getM(0,0) * vec4.x + self.getM(0,1) * vec4.y + self.getM(0,2) * vec4.z + self.getM(0,3) * vec4.z y = self.getM(1,0) * vec4.x + self.getM(1,1) * vec4.y + self.getM(1,2) * vec4.z + self.getM(1,3) * vec4.z z = self.getM(2,0) * vec4.x + self.getM(2,1) * vec4.y + self.getM(2,2) * vec4.z + self.getM(2,3) * vec4.z w = self.getM(3,0) * vec4.x + self.getM(3,1) * vec4.y + self.getM(3,2) * vec4.z + self.getM(3,3) * vec4.z return VECTOR4(x, y, z, w)
UTF-8
Python
false
false
2,013
7,533,372,659,631
38bca62abd5ca5e9c8699c90cca267cb780f4ffd
4b1761177fc0428416a2c9801496ccabea1cb9e2
/dowant/lib/logs/logs.py
46649d2d0d2f9b79a75718c57b70b99b3e1feb01
[]
no_license
harry81/notipub-backend
https://github.com/harry81/notipub-backend
6f3870e0322f59f19c631b488a64a35822d04c07
fc71d4c2473c06f5f1acaeaf92d24f7879ab7302
refs/heads/master
2020-04-23T05:03:46.866414
2014-08-11T04:12:12
2014-08-11T04:12:12
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import custom def get_logger(name=''): """Get a named logger using python's logging module. This serves as a lightweight wrapper around python's logging module. The named logger may have a different configuration set in 'get_named_logger', additional stdout handler returned by 'get_stdout_handler' and formatters of all handlers changed to a verbose one returned by 'get_verbose_formatter'. Look in 'settings.py' for available settings You can put in the project's settings and that influence the above mentioned features. Example use in the code: from lib.logs import logs as logging logger = logging.get_logger(__name__) logger.critical('Hit!') :param name: logger's name :type name: string :rtype: Logger """ # get a defined named logger or a one using basic conf logger = custom.get_named_logger(name) # checking for an additional stdout handler stdout_handler = custom.get_stdout_handler(name) if stdout_handler: # adding a stdout handler logger.addHandler(stdout_handler) # checking for an change to a verbose formatter verbose_formatter = custom.get_verbose_formatter(name) if verbose_formatter: # setting up verbose formatter for all handlers for handler in logger.handlers: handler.setFormatter(verbose_formatter) logger = custom.get_verbose_LoggerAdapter(logger) return logger
UTF-8
Python
false
false
2,014
11,991,548,729,020
aa10491b563f6ec7a1b0f8f19583e2c267dffdc8
df5696e9f72fceea1b30d1c29d28e428af4910d1
/test/src/sort.py
8a0cf990b788777fa1704f64069aeabd71525e73
[]
no_license
erichuang2012/test
https://github.com/erichuang2012/test
d3385b62e41f625f3661b2ab6b43b20e4dc1e06a
f28870b1b52a86b18775adf9741a61d82fc15304
refs/heads/master
2016-09-05T18:41:27.477767
2013-11-28T07:43:53
2013-11-28T07:43:53
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
''' Created on Aug 14, 2013 @author: eric.huang ''' li = [{'a':1, 'c':3, 'd':2},{'a':2, 'c':2, 'b':4}] li2 = [{'a':1, 'c':3, 'd':2},{'a':2, 'c':2, 'b':4}] li.sort() print li #TODO: a li2.sort(cmp=None, key=lambda x: x['c'], reverse=False) print li2
UTF-8
Python
false
false
2,013
12,919,261,658,229
d4aee2496efc09b769afd0abdf4877256064c8e4
969d3c0abb93032378469d72a89ee0019fbb8135
/formskit/__init__.py
4b4fb2306fe5ea8822c7e53cc7983818d50c45ac
[]
no_license
gitter-badger/formskit
https://github.com/gitter-badger/formskit
781f94b6a972ae0c35279c5b4fa18a5992d2c249
6faa770d0dabd5e0747830563037248daba9751a
refs/heads/master
2021-01-17T21:12:26.591041
2014-08-12T23:51:51
2014-08-12T23:51:51
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from .form import Form from .field import Field, Button
UTF-8
Python
false
false
2,014
1,503,238,590,434
a0aceacd376a1782bbf8486bf0edb0f599ee3ae8
e2e3dd5a238383adeea41e32e2c4b5cf71c04b62
/app/accdata/Ressources/_help.py
7c83d76757844d681023bbe30fb36964488322fd
[]
no_license
alexandrepoirier/ACC-Data-Analyzer
https://github.com/alexandrepoirier/ACC-Data-Analyzer
dc9c34848af38aa540e3a69c65205af6be1aa496
bf7d18d16aba5717378b52a6984bc3b05ca90f8d
refs/heads/master
2020-03-29T09:39:12.659075
2014-04-10T18:32:37
2014-04-10T18:32:37
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env python # encoding: utf-8 HELP_TEXT = {'general': "This application takes the signal from the accelerometer \ of your phone and converts the data to three meta-variables. These \ variables represent the overall activity (how much you move), the \ roughness of your movements (how fast you move) and the magnitude \ of your movements (how large and big your movements are)." }
UTF-8
Python
false
false
2,014
3,556,232,940,721
b6243ff949bf5d2008d4f082971db1a6895d3ead
954e22d389c064bd214afed56f71c41e0123986e
/website/pages/models.py
f27e3f1bbd6fbba156c0090e3fdf4578bb446af5
[]
no_license
qod7/sahayak
https://github.com/qod7/sahayak
f084db497a2dfb914aeeb06323dfe37b9d697e42
680ccfd020b887a7ec0457c5df83383a3ed7fc86
refs/heads/master
2021-01-18T11:25:18.112759
2014-12-22T05:43:34
2014-12-22T05:43:34
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from django.db import models from django.contrib.auth.models import User from PIL import Image class Media(models.Model): ''' Represents the image file as media It processes and saves the media file in fixed size upon saving. ''' image = models.ImageField() description = models.CharField(max_length=1000, default=' ') # def save(self, size=(200, 200)): # """ # Save Photo after ensuring it is not blank. Resize as needed. # """ # from sahayak import settings # if not self.image: # return # super(Media, self).save() # filename = settings.MEDIA_ROOT+"/"+self.image.name # image = Image.open(filename) # width, height = image.size # if height > width: # ratio = height/width # image.thumbnail((200, 200*ratio), Image.ANTIALIAS) # else: # ratio = width/height # image.thumbnail((200*ratio, 200), Image.ANTIALIAS) # image.save(filename) def showimage(self): return '<img src="/media/'+self.image.name+'"/>' showimage.allow_tags = True def geturl(self): ''' Returns the URL of the media file ''' pass def __str__(self): return self.description class Field(models.Model): name = models.CharField(max_length=100) slug = models.CharField(max_length=100) image = models.ForeignKey(Media, null=True, blank=True) workername = models.CharField(max_length=100, default='') def __str__(self): return self.name class WorkerInfo(models.Model): user = models.ForeignKey(User) image = models.ForeignKey(Media, null=True, blank=True) field = models.ManyToManyField(Field, null=True, blank=True, related_name="workerinfo" , related_query_name="worker") totalrating = models.IntegerField(default=0) rating = models.FloatField(default=0) ratingcount = models.IntegerField(default=0) phonenumber = models.CharField(max_length=100) about = models.TextField(default='') def __str__(self): return "Worker info for "+self.user.first_name+" "+self.user.last_name def getname(self): return self.user.first_name+" "+self.user.last_name def jobscompleted(self): return Job.objects.filter(worker=self, status=Job.COMPLETED).count() def jobspending(self): return Job.objects.filter(worker=self, status=Job.ACCEPTED).count() def isbusy(self): # If the accepted jobs count of a user exceeds 3, then he is busy return self.jobspending() > 1 def isexperienced(self): return self.jobscompleted() > 2 def availability(self): if self.isbusy(): return "Busy" return "Free" def addrating(self, ratingpoint): self.ratingcount += 1 self.totalrating += ratingpoint self.rating = self.totalrating / (self.ratingcount * 1.0) self.save() def gettags(self): tags = '' if self.isbusy(): tags += ' busy' else: tags += ' free' if self.rating > 4.0: tags += ' toprated' if self.isexperienced(): tags += ' experienced' return tags def skills(self): list = [] for field in self.field.all(): list.append(field.name) return ",".join(list) class Job(models.Model): customer = models.ForeignKey(User) worker = models.ForeignKey(WorkerInfo) title = models.CharField(max_length=1000, default='') description = models.TextField(default='') AWAITING = "AR" REJECTED = "RJ" ACCEPTED = "AC" COMPLETED = "CP" JOB_STATUS = ( (AWAITING, "Awaiting Response"), (REJECTED, "Rejected"), (ACCEPTED, "Accepted"), (COMPLETED, "Completed") ) status = models.CharField(max_length=2, choices=JOB_STATUS, default=AWAITING) rating = models.IntegerField(null=True, blank=True) ratingtext = models.CharField(max_length=1000, blank=True, default='') def __str__(self): return self.title+" for customer "+self.customer.first_name+" "+self.customer.last_name+" by "+self.worker.getname() def customerinfo(self): return UserInfo.objects.get(user=self.customer) class UserInfo(models.Model): user = models.OneToOneField(User, related_name="userinfo", related_query_name="info") latitude=models.FloatField(default=0) longitude=models.FloatField(default=0)
UTF-8
Python
false
false
2,014
2,894,807,966,696
639e15b2a2e82a1d69eac79802f6b761518f2167
cf5077d06c5145d93b44c0c00bb93f93fbf4d59d
/post/forms.py
968b1a83f097b4d5c7b8eb76ea1a9d1afb2069a2
[]
no_license
su-danny/famdates
https://github.com/su-danny/famdates
16a9ee01d259c9978278415943d918fd47bdfc9e
301cf997985172c146d917c832390e0db57c03c5
refs/heads/master
2016-08-06T18:17:30.345319
2014-03-11T10:34:31
2014-03-11T10:34:31
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from django import forms from famdates.post.models import Post, Comment import json class PostForm(forms.ModelForm): body = forms.CharField(required=False) # json field uploaded_files = forms.CharField(required=False) class Meta: model = Post exclude = ('author', 'feed', 'location', 'wall', 'is_sticky') def clean(self): if not self.cleaned_data.get('body') and not json.loads(self.cleaned_data.get('uploaded_files')): self.errors['body'] = "Post content is required if no uploaded file" return self.cleaned_data class CommentForm(forms.ModelForm): class Meta: model = Comment
UTF-8
Python
false
false
2,014
2,018,634,642,243
308da00e928fbd1025568f5f8520bb64ad7cd5ef
5409b3a5b0d2a1f2635c1a15fb0da4a1edc674fe
/bin/nii2mean.py
a8b3687583591c44e494354b4a4e1c9f82b8921c
[]
no_license
parenthetical-e/objectify
https://github.com/parenthetical-e/objectify
1984dd9b8943bbd56bcf88ce6cb3cf638cb3907e
c19626be2675bfcad69ef656c5f574285a595720
refs/heads/master
2020-04-09T06:15:42.881263
2014-04-23T15:08:12
2014-04-23T15:08:12
160,105,137
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import argparse import numpy as np import nibabel as nb from skimage import io from skimage import filter as filt import matplotlib.pyplot as plt parser = argparse.ArgumentParser( description=("Average images"), formatter_class=argparse.ArgumentDefaultsHelpFormatter ) parser.add_argument( "nii", help="A Nifti1 data file" ) parser.add_argument( "name", help="Save mean as (extension defines image type)" ) parser.add_argument( "--no_smooth", action="store_false", dest='smooth', help="Don't apply a Gaussian smooth after averaging?" ) parser.add_argument( "--sigma", default=12, type=int, help="Width of gaussian smooth" ) parser.set_defaults(smooth=True) args = parser.parse_args() nii = nb.load(args.nii).get_data() meanimg = nii.mean(axis=nii.ndim - 1) ## Time/TR/vol is the last axis... if args.smooth: meanimg = filt.gaussian_filter(meanimg, args.sigma) plt.imsave(args.name, meanimg, cmap=plt.cm.gray)
UTF-8
Python
false
false
2,014
438,086,695,957
291d03b6887d20e870b088d61920001bdee059c1
286db5aee514c6b9f9eeac7860ba5d4e6a027749
/src/GoogleAppsAccountManager/frontend/_messages.py
ebc25e8992ad90c72b3b9fa4d559746307a14152
[ "GPL-2.0-only" ]
non_permissive
patrickathompson/GoogleAppsAccountManager
https://github.com/patrickathompson/GoogleAppsAccountManager
aafb337fe2c659402630c6d5f95e43cf22f21626
228deb3e9a498bc20502986ec4ea5e0f119df717
refs/heads/master
2021-01-21T16:15:42.917268
2013-04-26T16:39:58
2013-04-26T16:39:58
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# # GoogleAppsAccountManager: frontend/_messages # Copyright (C) 2012-2013 KAMEI Yutaka # # License: GNU General Public License version 2 or later # Date: 2012-12-28, since 2012-12-28 # ########################### Result messages ########################### SUCCESS_RESULT = ( """[{date}],{target},{operation} succeeded """ ) FAILED_RESULT = ( """[{date}],{target},{operation} failed,{detail} """ ) ########################### Error messages ########################### ADMIN_OR_DOMIN_NOT_SPECIFIED = ( """Admin name or domain is not specified in command-line or config file. """ ) INVALID_CONFIG_FILE = ( """Invalid parameter is specified in Configuration file. """ ) ADMIN_LOGIN_FAILED = ( """Failed to login by administrator's account. Check password. """ ) USER_LOCK_UNLOCK_BOTH = ( """Lock and unlock options exist at the same time. """ ) CONFLICTED_OPTIONS = ( """Conflicted options specified. """ ) OU_HAS_NOT_FULL_PATH = ( """ou_path prefix does not have "/" character. Retry with "/". """ ) OU_BLOCK_UNBLOCK_BOTH = ( """block_inheritance and unblock_inheritance options exist at the same time. """ ) GROUP_ASSIGNED_OWNER = ( """Cannot assign group id to owner. Specify user name. """ )
UTF-8
Python
false
false
2,013
601,295,449,935
d5790eaf1f0005a67bf1493e4c52447dfe26fe4f
a935828d667056e7b118ad0b496e1e662c5da591
/django/mac-eater/views.py
022b88c2455cd136435456497348226eebdbd332
[ "MIT" ]
permissive
whardier/MAC-Eater
https://github.com/whardier/MAC-Eater
54812b7ee22b7396ded4d5853720c4f60e0d392c
d533275c05069a14a66aa046ddf8d5f6c7798eea
refs/heads/master
2021-01-02T23:12:26.347239
2011-01-02T20:32:35
2011-01-02T20:32:35
1,213,713
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from django.http import HttpResponse from locations.models import Location def version(request): import platform return HttpResponse(platform.python_version()) def locationrandom(request): l = Location.objects.all()[0] for i in range(1000): l2 = l l2.id = None l2.save() return HttpResponse("OK")
UTF-8
Python
false
false
2,011
18,519,898,998,706
76550258f9a2287d2c1e75e6d8f0632d34bc5c6d
0fa3ade263cd304c43d826ce4b60ec35de52b546
/ps-4-6/find-centrality.py
8a810a7178fb4c7cc1e8962c33558db6c17f7afd
[]
no_license
xix017/udacity-cs215
https://github.com/xix017/udacity-cs215
7fa9683e98ce017be19910a31da3495b7d8f7be8
40404e27ae6a8290835ecd283fdd496f403e7ca3
refs/heads/master
2021-01-18T01:04:06.081231
2013-02-02T23:02:30
2013-02-02T23:02:30
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import random import Queue import time movies = set() def parse_graph(filepath): G = {} global movies with open(filepath, 'r') as f: for line in f: entry = line.split("\t") # [ACTOR][MOVIE][YEAR] actor = entry[0] movie = entry[1] + entry[2] if actor not in G: G[actor] = [] if movie not in G: movies.add(movie) G[movie] = [] G[actor].append(movie) G[movie].append(actor) return G, movies def parse_cardinalities(filepath): card = [] with open(filepath, 'r') as f: for line in f: entry = line.split("\t") card.append((entry[0], float(entry[1]))) return card cnt = 0 start = time.clock() def centrality(G, node): ## average shortest distance path distance to each reachable node #print(node) global start global cnt cnt += 1 if (cnt % 100 == 0): newClock = time.clock() print(cnt, newClock - start) start = newClock global movies if (node in movies): return 99 queue = Queue.Queue() queue.put(node) marked_distance = {} marked_distance[node] = 0 while (not queue.empty()): current = queue.get() for neighbor in G[current]: if (not neighbor in marked_distance): marked_distance[neighbor] = marked_distance[current] + 1 queue.put(neighbor) # only keeps actors marked_distance = filter(lambda x: x[0] in movies, marked_distance.iteritems()) marked_distance = map(lambda x: x[1], marked_distance) found_centrality = sum(marked_distance) * 1.0 / len(marked_distance) return found_centrality def topK(aList, k, smaller=lambda x, y: x < y): top = aList[:k] for elem in aList[k:]: for i in range(len(top)): if smaller(top[i], elem): top[i], elem = elem, top[i] return top G, movies = parse_graph("imdb-1.tsv") print(len(G)) cardinalities = map(lambda x: (x, centrality(G, x)), G) with open("card_saved", "w") as f: for elem in cardinalities: f.write(str(elem[0]) + "\t" + str(elem[1]) + "\n") #cardinalities = parse_cardinalities("card_saved") cardinalities = filter(lambda x: x[0] not in movies, cardinalities) top = topK(cardinalities, 25, lambda x, y: y in movies or x[1] > y[1]) for i, t in enumerate(top): print (i, t)
UTF-8
Python
false
false
2,013
2,671,469,703,440
975137cf993429cdade5e7323a0e592ce1451976
4911c68f0bc33c6f08ee4e4cb23718d7b0cca9e0
/genobox_exchangeids.py
8c807f2e4dd8ba7855a7e77be1e8c301223fb77f
[ "GPL-3.0-or-later" ]
non_permissive
srcbs/GenoBox
https://github.com/srcbs/GenoBox
7febcb53319d69d937d9ba1617fc53d9807ff8e9
683ea3ec1914dfcb3645fceed9ac11f32b0540c3
refs/heads/master
2020-05-28T08:14:32.363601
2013-05-24T13:08:43
2013-05-24T13:08:43
1,475,535
3
1
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/panvol1/simon/bin/python2.7 import argparse import sys def exchange_id(handle, outhandle, x, b, d, f, s): ''' Exchange id of input line, column x ''' for line in handle: if line.startswith('#'): outhandle.write(line) continue # filter on f (if set) if f: if not line.startswith(f): outhandle.write(line) continue # filter on s (if set) if s: if line.startswith(s): outhandle.write(line) continue # do the exchange line = line.rstrip() if line == '': continue fields = line.split('\t') fields[x] = d[fields[x]] newline = ('\t').join(fields) + '\n' outhandle.write(newline) # create the parser parser = argparse.ArgumentParser(description=''' Exchange ids in file -a column X (0-based) based on mapping provided in file b. File b must have common ids in column 0 and new id in column 1 ''') # add the arguments parser.add_argument('--a', help='file to exchange ids [stdin]', default=None) parser.add_argument('--x', help='column to swap ids in [0]', default=0, type=int) parser.add_argument('--b', help='file containing annotation') parser.add_argument('--f', help='only if line starts with []', default=None) parser.add_argument('--s', help='skip line if starts with []', default=None) parser.add_argument('--o', help='output file [stdout]', default=None) parser.add_argument('--log', help='log level [INFO]', default='info') # parse the command line args = parser.parse_args() #args = parser.parse_args('--a rmsk_build37.sort.genome --x 0 --b chr2gi.build37 --o rmsk_build37.gi.genome'.split()) #args = parser.parse_args(' --a hs_ref_GRCh37_all_gatk_tmp.fa --x 0 --b gi_fa2number.build37_rCRS --o hs_ref_GRCh37_all_gatk_number.fa --f ">"'.split()) #args = parser.parse_args(' --x 0 --b gi_fa2number.build37_rCRS --f ">"'.split()) # read annotation to dict d = {} bhandle = open(args.b, 'r') for line in bhandle: line = line.rstrip() fields = line.split('\t') d[fields[0]] = fields[1] bhandle.close() # set outhandle if args.o: outhandle = open(args.o, 'w') else: outhandle = sys.stdout # set inhandle if args.a: inhandle = open(args.a, 'r') else: inhandle = sys.stdin # parse and exchange ids exchange_id(inhandle, outhandle, args.x, args.b, d, args.f, args.s) outhandle.close() inhandle.close()
UTF-8
Python
false
false
2,013
14,474,039,794,832
59af3390a8b8d37a7511bbe9a6b3adac4744a134
7e4e32fa59c63b53fc1977529f9e8460b4ff5e14
/words_service.py
c81215f357e5cc0ddc4385fbcb2d54457663e185
[]
no_license
lynxor/words
https://github.com/lynxor/words
4dd7bfe141496c07b3c9ab765691821e829b9051
691579d355a6872cb3b29ec4c5712a21175eb120
refs/heads/master
2020-05-28T14:16:41.159753
2013-07-01T10:21:28
2013-07-01T10:21:28
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from pymongo import * import datetime import sys import re class WordsService(): ALL = "All languages" def __init__(self, client): db = client.words self.word = db.word def insert(self, word): return self.word.insert(word) def retrieveAll(self, lang): query = {"language" : lang} if(lang == self.ALL): query = {} return list(self.word.find(query)) def search(self, keyword, lang): regex = re.compile( ".*" + ".*".join(keyword.split(",")) + ".*", re.IGNORECASE) query = {"$or" : [{"value" : regex}, {"definition": regex}, {"examples": regex}, {"tags": regex}]} if(lang != self.ALL): query["language"] = lang; return list(self.word.find(query))
UTF-8
Python
false
false
2,013
10,728,828,329,097
e6022163e62e3ea71ec13654f88655fc14d3d59d
0e4d73c871e33d09bb5c59bda3438459e06cb174
/src/collective/solr/skins/solr_site_search/solr_boost_index_values.py
b27b2bbdbbf282e9fd368d98d940c4018d0fcc01
[]
no_license
Jarn/collective.solr
https://github.com/Jarn/collective.solr
2b6180ffa35d514bd7761277875dc86ca9f369bd
723a314b71de3ac25d25ed7cb11c706c3e47367e
refs/heads/master
2020-12-25T11:05:58.733239
2012-06-20T22:23:25
2012-06-20T22:23:25
1,487,685
5
8
null
false
2012-09-17T12:47:02
2011-03-16T15:35:00
2012-09-10T13:25:04
2012-06-20T22:37:34
328
null
11
7
Python
null
null
## Script (Python) "solr_boost_index_values" ##bind container=container ##bind context=context ##bind namespace= ##bind script=script ##bind subpath=traverse_subpath ##parameters=data ##title=Calculate field and document boost values for Solr # this script is meant to be customized according to site-specific # search requirements, e.g. boosting certain content types like "news items", # ranking older content lower, consider special important content items, # content rating etc. # # the indexing data that will be sent to Solr is passed in as the `data` # parameter, the indexable object is available via the `context` binding. # the return value should be a dictionary consisting of field names and # their respecitive boost values. use an empty string as the key to set # a boost value for the entire document/content item. return {}
UTF-8
Python
false
false
2,012
15,255,723,835,706
dc59fc57981ada2ca1b1d9c565208b39f1068475
ba03712710c0f924bb110a108e34ca75e24b5e9e
/paasmaker/pacemaker/controller/configuration.py
a99518d1884d410d63751ff67a96e79289b81435
[ "MPL-2.0" ]
non_permissive
paasmaker/paasmaker
https://github.com/paasmaker/paasmaker
99cc84c25de7a4726408c1bc241863e9cb9d06c2
ea3061d3d5e13b6c17b6dc804fb3c717b7b90e98
refs/heads/master
2021-01-15T18:53:36.128004
2013-12-09T22:46:48
2013-12-09T22:46:48
9,067,577
0
1
null
false
2013-09-19T22:44:42
2013-03-28T01:27:25
2013-09-19T22:39:59
2013-09-19T22:39:49
3,911
null
1
0
Python
null
null
# # Paasmaker - Platform as a Service # # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. # import json import time import paasmaker from paasmaker.common.controller import BaseController, BaseControllerTest from paasmaker.common.core import constants import tornado import colander class ConfigurationDumpController(BaseController): AUTH_METHODS = [BaseController.SUPER, BaseController.USER] def get(self): self.require_permission(constants.PERMISSION.SYSTEM_ADMINISTRATION) # Dump out the configuration. self.add_data('configuration', self.configuration) self.add_data_template('json', json) self.client_side_render() @staticmethod def get_routes(configuration): routes = [] routes.append((r"/configuration/dump", ConfigurationDumpController, configuration)) return routes class PluginInformationController(BaseController): AUTH_METHODS = [BaseController.SUPER, BaseController.USER] def get(self): self.require_permission(constants.PERMISSION.SYSTEM_ADMINISTRATION) # Dump out the configuration. plugin_data = self.configuration.plugins.plugin_information() self.add_data('plugins', plugin_data) self.add_data_template('json', json) self.client_side_render() @staticmethod def get_routes(configuration): routes = [] routes.append((r"/configuration/plugins", PluginInformationController, configuration)) return routes class ConfigurationDumpControllerTest(BaseControllerTest): config_modules = ['pacemaker'] def get_app(self): self.late_init_configuration(self.io_loop) routes = ConfigurationDumpController.get_routes({'configuration': self.configuration}) routes.extend(paasmaker.pacemaker.controller.login.LoginController.get_routes({'configuration': self.configuration})) application = tornado.web.Application(routes, **self.configuration.get_tornado_configuration()) return application def test_simple(self): request = self.fetch_with_user_auth('http://localhost:%d/configuration/dump?format=json') response = self.wait() self.failIf(response.error) self.assertIn(str(self.configuration.get_flat('redis.table.port')), response.body) class PluginInformationControllerTest(BaseControllerTest): config_modules = ['pacemaker'] def get_app(self): self.late_init_configuration(self.io_loop) routes = PluginInformationController.get_routes({'configuration': self.configuration}) routes.extend(paasmaker.pacemaker.controller.login.LoginController.get_routes({'configuration': self.configuration})) application = tornado.web.Application(routes, **self.configuration.get_tornado_configuration()) return application def test_simple(self): request = self.fetch_with_user_auth('http://localhost:%d/configuration/plugins?format=json') response = self.wait() self.failIf(response.error) self.assertIn('Placement', response.body)
UTF-8
Python
false
false
2,013
12,695,923,353,339
7f6ab0e9d2fb95f4648a1295f4193ee5bd8f122e
b82f78d05d4b7cdbc5b0e4dd9f97e452c3b55989
/fusionbox/core/management/commands/backupdb.py
601261b2b9db0839746a9e697aebfd684873d1af
[ "BSD-2-Clause" ]
permissive
gihandesilva/django-fusionbox
https://github.com/gihandesilva/django-fusionbox
c5ef16ee3cb5a201513b7931ba5baa740c7d2a36
62f3f1a8a95626bef81aefb3de3d2810ef3aeb2c
refs/heads/master
2021-01-18T23:32:33.522961
2013-03-29T16:20:30
2013-03-29T16:20:30
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
""" Adapted from http://djangosnippets.org/snippets/823/ """ import os import pipes from subprocess import Popen, PIPE import time from django.core.management.base import BaseCommand BACKUP_DIR = 'backups' class Command(BaseCommand): help = 'Backs up each database in settings.DATABASES.' can_import_settings = True class BackupError(Exception): pass def handle(self, *args, **options): from django.conf import settings current_time = time.strftime('%F-%s') if not os.path.exists(BACKUP_DIR): os.makedirs(BACKUP_DIR) # Loop through databases and backup for database_name in settings.DATABASES: config = settings.DATABASES[database_name] # MySQL command and args if config['ENGINE'] == 'django.db.backends.mysql': backup_cmd = self.do_mysql_backup backup_kwargs = { 'timestamp_file': os.path.join(BACKUP_DIR, '{0}-{1}.mysql.gz'.format(database_name, current_time)), 'db': config['NAME'], 'user': config['USER'], 'password': config.get('PASSWORD', None), 'host': config.get('HOST', None), 'port': config.get('PORT', None), } # PostgreSQL command and args elif config['ENGINE'] in ('django.db.backends.postgresql_psycopg2', 'django.contrib.gis.db.backends.postgis'): backup_cmd = self.do_postgresql_backup backup_kwargs = { 'timestamp_file': os.path.join(BACKUP_DIR, '{0}-{1}.pgsql.gz'.format(database_name, current_time)), 'db': config['NAME'], 'user': config['USER'], 'password': config.get('PASSWORD', None), 'host': config.get('HOST', None), 'port': config.get('PORT', None), } # SQLite command and args elif config['ENGINE'] == 'django.db.backends.sqlite3': backup_cmd = self.do_sqlite_backup backup_kwargs = { 'timestamp_file': os.path.join(BACKUP_DIR, '{0}-{1}.sqlite.gz'.format(database_name, current_time)), 'db_file': config['NAME'], } # Unsupported else: backup_cmd = None # Run backup command with args print '========== Backing up \'{0}\'...'.format(database_name) if backup_cmd: try: backup_cmd(**backup_kwargs) print '========== ...done!' except self.BackupError as e: print e.message print '========== ...skipped.' else: print 'Backup for {0} engine not implemented.'.format(config['ENGINE']) print '========== ...skipped.' print '' def do_mysql_backup(self, timestamp_file, db, user, password=None, host=None, port=None): # Build args to dump command dump_args = [] dump_args += ['--user={0}'.format(pipes.quote(user))] if password: dump_args += ['--password={0}'.format(pipes.quote(password))] if host: dump_args += ['--host={0}'.format(pipes.quote(host))] if port: dump_args += ['--port={0}'.format(pipes.quote(port))] dump_args += [pipes.quote(db)] dump_args = ' '.join(dump_args) # Build filenames timestamp_file = pipes.quote(timestamp_file) # Build command cmd = 'mysqldump {dump_args} | gzip > {timestamp_file}'.format( dump_args=dump_args, timestamp_file=timestamp_file, ) # Execute self.do_command(cmd, db) print 'Backed up {db}; Load with `cat {timestamp_file} | gunzip | mysql {dump_args}`'.format( db=db, timestamp_file=timestamp_file, dump_args=dump_args, ) def do_postgresql_backup(self, timestamp_file, db, user, password=None, host=None, port=None): # Build args to dump command dump_args = [] dump_args += ['--username={0}'.format(pipes.quote(user))] if password: dump_args += ['--password'] if host: dump_args += ['--host={0}'.format(pipes.quote(host))] if port: dump_args += ['--port={0}'.format(pipes.quote(port))] dump_args += [pipes.quote(db)] dump_args = ' '.join(dump_args) # Build filenames timestamp_file = pipes.quote(timestamp_file) # Build command cmd = 'pg_dump {dump_args} | gzip > {timestamp_file}'.format( dump_args=dump_args, timestamp_file=timestamp_file, ) # Execute self.do_command(cmd, db, password) print 'Backed up {db}; Load with `cat {timestamp_file} | gunzip | psql {dump_args}`'.format( db=db, timestamp_file=timestamp_file, dump_args=dump_args, ) def do_sqlite_backup(self, timestamp_file, db_file): # Build filenames db_file = pipes.quote(db_file) timestamp_file = pipes.quote(timestamp_file) # Build command cmd = 'gzip < {db_file} > {timestamp_file}'.format( db_file=db_file, timestamp_file=timestamp_file, ) # Execute self.do_command(cmd, db_file) print 'Backed up {db_file}; Load with `cat {timestamp_file} | gunzip > {db_file}`'.format( db_file=db_file, timestamp_file=timestamp_file, ) def do_command(cls, cmd, db, password=None): """ Executes a command and prints a status message. """ print 'executing:' print cmd with open('/dev/null', 'w') as FNULL: process = Popen(cmd, stdin=PIPE, stdout=FNULL, stderr=FNULL, shell=True) # Enter a password through stdin if required if password: process.communicate(input='{0}\n'.format(password)) else: process.wait() if process.returncode != 0: raise cls.BackupError('Error code {code} while backing up database \'{db}\'!'.format( code=process.returncode, db=db, ))
UTF-8
Python
false
false
2,013
10,711,648,467,930
9fc572dd2d135843493e9b6c043df10dbe6e9e17
93ef5b03b4ef080df8785a62340bae6e8ebbe3dd
/zonekort.py
a0b21466759146c201a860083e43786e91e68c1f
[]
no_license
kaaelhaa/OIOREST-zone-miner
https://github.com/kaaelhaa/OIOREST-zone-miner
345ae7270a9aa9e3dba187aa732fc6a21f43e226
9f79a3f8e841ea1828a9b7e30f09aa73d5423d97
refs/heads/master
2021-01-16T00:57:19.017858
2012-04-17T12:15:04
2012-04-17T12:15:04
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/python2.7 # -*- coding: UTF-8 -*- import urllib2 import urlparse, urllib import json import networkx as nx def fixurl(url): # turn string into unicode if not isinstance(url,unicode): url = url.decode('utf8') # parse it parsed = urlparse.urlsplit(url) # divide the netloc further userpass,at,hostport = parsed.netloc.partition('@') user,colon1,pass_ = userpass.partition(':') host,colon2,port = hostport.partition(':') # encode each component scheme = parsed.scheme.encode('utf8') user = urllib.quote(user.encode('utf8')) colon1 = colon1.encode('utf8') pass_ = urllib.quote(pass_.encode('utf8')) at = at.encode('utf8') host = host.encode('idna') colon2 = colon2.encode('utf8') port = port.encode('utf8') path = '/'.join( # could be encoded slashes! urllib.quote(urllib.unquote(pce).encode('utf8'),'') for pce in parsed.path.split('/') ) query = urllib.quote(urllib.unquote(parsed.query).encode('utf8'),'=&?/') fragment = urllib.quote(urllib.unquote(parsed.fragment).encode('utf8')) # put it back together netloc = ''.join((user,colon1,pass_,at,host,colon2,port)) return urlparse.urlunsplit((scheme,netloc,path,query,fragment)) def get_parsed_json(url): req = urllib2.urlopen(fixurl(url)) encoding=req.headers['content-type'].split('charset=')[-1] content = unicode(req.read(), encoding) return json.loads(content) operators = get_parsed_json(u"http://geo.oiorest.dk/takstzoner/operatører.json") zone_url = u"http://geo.oiorest.dk/takstzoner.json?operatørnr={0}" for operator in operators: print "Getting info for", operator['navn'] zoner = get_parsed_json(zone_url.format(operator['nr'])) G = nx.Graph() for zone in zoner: naboer = get_parsed_json(zone['naboer']) for nabo in naboer: G.add_edge(int(zone['nr']), int(nabo['nr'])) f = open(u"{0}.xml".format(operator['navn']),"w+") nx.write_graphml(G, f) f.close() zoner = None
UTF-8
Python
false
false
2,012
18,081,812,343,422
de6ea557c7173d1eb7c87485ed954c5797e79ec6
8007155727d6a19b16c37208bca2ef72f01b9684
/parser/srt.py
ba35bb6cecbd1f7c4c9a56a1bc8f7c5192054a5c
[ "MIT" ]
permissive
francis-shuoch/PHD
https://github.com/francis-shuoch/PHD
f7cb4e90d8389165c01b472f94cb697a8d88f0ca
3f911887bbfa79769a720b0b75e49c9a4269ab78
refs/heads/master
2021-05-27T16:38:38.799575
2013-09-02T13:46:50
2013-09-02T13:46:50
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
"""Parse out text body from srt files. according to http://en.wikipedia.org/wiki/SubRip 1.A numeric counter identifying each sequential subtitle 2.The time that the subtitle should appear on the screen, followed by " --> " and the time it should disappear 3.Subtitle text itself on one or more lines 4.A blank line containing no text indicating the end of this subtitle[10] """ import re def parse(file_path): """ Args ---- file_path: string Returns ------- sents: list list of strings. each string is a sentence. Note ---- Examples -------- """ return parse_file(file_path) def parse_file(file_path): string = "" with open(file_path, 'r') as f: string = f.read() return parse_string(string) def parse_string(string): # each segment separate by a blank line segs = re.split('\n\n', string) texts = [] for seg in segs: # ignore # and time lines = seg.split('\n') text = ' '.join(lines[2:]) # strip <> markups text = re.sub(r'<[^>]*>', '', text) texts.append(text) texts = '\n'.join(texts) return texts # sents = texts.split('.\n') # # return sents
UTF-8
Python
false
false
2,013
661,424,988,219
70288ddcc6dfe349ee68785610f4bca48605940f
6c373efac8f1e4c38badbae9df187bd023161102
/modularizr.py
a2b75317173184ad70d80a861016832a702ebd7d
[ "GPL-3.0-only", "LicenseRef-scancode-unknown-license-reference" ]
non_permissive
nfvs/backbonejs-modularizr
https://github.com/nfvs/backbonejs-modularizr
a3055092adb04c9e81ae85cbba04ce200324c9be
35dc06dda08c778425678c2741249450c4b1711c
refs/heads/master
2019-07-14T17:17:43.782748
2013-06-18T17:51:18
2013-06-18T17:51:18
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/python import sys import os from string import Template import urllib self = __import__(__name__) VERSION = '0.0.1' HEADER = """\ ---------------------- Backbone.js Modularizr ----------------------""" ABOUT = """\ (C) 2012-2013 Nuno Santos http://nunofvsantos.com Based on the tutorial by Jakub Kozisek. http://backbonetutorials.com/organizing-backbone-using-modules/ https://github.com/dzejkej/modular-backbone Released under the GPL v3 License. http://www.gnu.org/licenses/gpl-3.0.html Source code available on: http://github.com/nfvs/backbonejs-modularizr """ APP_JS = """\ define([ 'jquery', 'underscore', 'backbone', 'router' ], function($, _, Backbone, Router) { var initialize = function(){ // Pass in our Router module and call it's initialize function Router.initialize(); } return { initialize: initialize }; });""" MAIN_JS = """\ requirejs.config({ paths: { jquery: 'libs/jquery/jquery', underscore: 'libs/underscore/underscore', backbone: 'libs/backbone/backbone', text: 'libs/require/text', templates: '../templates' }, shim: { jquery : { exports : 'jQuery' }, underscore : { exports : '_' }, backbone: { deps: ['underscore', 'jquery'], exports: 'Backbone' }, } }); require([ 'app', 'backbone' ], function(App, backbone) { App.initialize(); });""" ROUTER_JS = """\ define([ 'jquery', 'underscore', 'backbone' ], function($, _, Backbone) { var AppRouter = Backbone.Router.extend({ routes: { "*actions": "defaultAction" } }); var initialize = function() { var app_router = new AppRouter; app_router.on('route:defaultAction', function(actions){ // We have no matching route, lets just log what the URL was console.log('No route:', actions); }); Backbone.history.start(); }; return { initialize: initialize }; });""" TEMPLATE_JS = """\ define([ 'jquery', 'underscore', 'backbone' ], function($, _, Backbone) { return {}; });""" INDEX_HTML_TEMPLATE = """\ <!doctype html> <html lang="en"> <head> <title></title> <!-- Load the script "$main_js" as our entry point --> <script data-main="$main_js" src="$require_js"></script> </head> <body> </body> </html>""" MODEL_TEMPLATE = """\ define([ 'underscore', 'backbone' ], function(_, Backbone){ var $model_variable = Backbone.Model.extend({ defaults: $defaults }); // You usually don't return a model instantiated return $model_variable; });""" COLLECTION_TEMPLATE = """\ define([ 'underscore', 'backbone', // Model dependencies $model_dir ], function(_, Backbone$model_parameter){ var $collection_variable = Backbone.Collection.extend({ $model_content }); // You don't usually return a collection instantiated return new $collection_variable; });""" DIRS = { 'css': 'css', 'templates': 'templates', 'js': 'js', 'jslibs': 'js' + os.sep + 'libs', 'models': 'js' + os.sep + 'models', 'collections': 'js' + os.sep + 'collections', 'views': 'js' + os.sep + 'views' } FILES = { DIRS['js'] + os.sep + 'app.js': APP_JS, DIRS['js'] + os.sep + 'main.js': MAIN_JS, DIRS['js'] + os.sep + 'router.js': ROUTER_JS, DIRS['js'] + os.sep + 'template.js': TEMPLATE_JS, } LIBS = { 'require': 'http://requirejs.org/docs/release/2.1.5/minified/require.js', 'jquery': 'http://ajax.googleapis.com/ajax/libs/jquery/1/jquery.min.js', 'underscore': 'http://documentcloud.github.com/underscore/underscore-min.js', 'backbone': 'http://documentcloud.github.com/backbone/backbone-min.js' } def LIB_FILENAME(name, foldername=None): if not foldername: foldername = name return DIRS['jslibs'] + os.sep + foldername + os.sep + name + '.js' REQUIREJS_LIBS = { 'text': 'https://raw.github.com/requirejs/text/latest/text.js' } # for every command 'c' there needs to be a 'process_c()' function COMMANDS = { 'check': [], 'about': [], 'init': [], 'generate': [ 'model', 'view', 'collection' ] } def initialize_project(): if already_initialized(): print 'Project already initialized.' return # create dir structure print '' print 'Creating directory structure' for directory in sorted(DIRS.values()): try: os.mkdir(directory) print " '%s' created." % directory except OSError: print " '%s' already exists" % directory # download required libs print '\nDownloading required libs' for name,url in LIBS.items(): libfile = LIB_FILENAME(name) if os.path.exists(libfile): print " File '%s' already exists, skipping." % os.path.basename(libfile) else: try: os.mkdir(os.path.dirname(libfile)) except OSError as e: None print " Downloading '%s' from %s" % (name,url) urllib.urlretrieve(url, libfile) # download require.js libs print '\nDownloading libs for require.js' for name, url in REQUIREJS_LIBS.items(): libfile = LIB_FILENAME(name, 'require') if os.path.exists(libfile): print " File '%s' already exists, skipping." % os.path.basename(libfile) else: try: os.mkdir(os.path.dirname(libfile)) except OSError as e: None print " Downloading '%s' from %s" % (name,url) urllib.urlretrieve(url, libfile) # create base .js files print '\nCreating base .js files' for file, content in FILES.items(): if os.path.exists(file): print " File '%s' already exists, skipping." % file else: print " Creating '%s'" % file f = open(file, 'w') f.write(content) f.close() # create base index.html print '' if os.path.exists('index.html'): print "Index.html already exists, skipping." else: print 'Creating base index.html' f = open('index.html', 'w') template = Template(INDEX_HTML_TEMPLATE) content = template.substitute( main_js = DIRS['js'] + os.sep + 'main', require_js = DIRS['jslibs'] + os.sep + 'require' + os.sep + 'require.js' ) f.write(content) f.close() print def create_model(name, model_defaults): print "Creating Model '%s'" % name # enforce format of 'key:value' of the module_attrs for model_default in model_defaults: if not ':' in model_default: print "Error - Wrong format of model defaults." print " Example: 'name:Harry age:11'" return 1 filename = DIRS['models'] + os.sep + name.lower() + '.js' try: f = open(filename, 'r') print "File '%s' already exists, overwrite? [y/N] " % filename, overwrite = "n" try: overwrite = sys.stdin.readline().strip() except KeyboardInterrupt: None print '\r', # remove space in beginning of line when reading from stdin if overwrite.lower() != 'y': return print "Overwriting file '%s'" % filename f = open(filename, 'w') except IOError: f = open(filename, 'w') print "File '%s' created." % filename # build 'defaults' string with the defaults passed as parameter l = len(model_defaults) defaults = "{\n" for attr in model_defaults: l -= 1 attr_name,attr_val = attr.split(':') defaults += " "*6 + "'" + attr_name + "': '" + attr_val if l==0: defaults += "'\n" else: defaults += "',\n" defaults += " "*4 + "}" template = Template(MODEL_TEMPLATE) content = template.substitute( # lowercase 1st letter of model_variable #model_variable = name[0].lower() + name[1:] + "Model", model_variable = name + "Model", defaults = defaults ) f.write(content) f.close() print "Model '%s' created." % (name,) def create_collection(name, model): # variables format for the collection template: # model_dir: 'models/model_name' # model_parameter: ', modelnameModel' # mode_content: 'model: modelnameModel' # collection_variable: 'collecionnameCollection' model_dir = '' model_parameter = '' model_content = '' # lowercase 1st letter collection_variable = name[0].lower() + name[1:] + 'Collection' # first check that the model exists if model: model_filename = DIRS['models'] + os.sep + model.lower() + '.js' try: model_file = open(model_filename, 'r') except IOError: print "Model '%s' not found, aborting." % model return # format of model_dir is 'models/modelname' and not # 'js/models/modelname', therefore we need to remove the initial 'js/' trim = len(DIRS['js'] + os.sep) model_dir = "'%s'" % (DIRS['models'] + os.sep + model.lower())[trim:] model_parameter = ', %sModel' % model model_content = 'model: %sModel' % model # check if collection exists filename = DIRS['collections'] + os.sep + name.lower() + '.js' try: f = open(filename, 'r') print "File '%s' already exists, overwrite? [y/N] " % filename, overwrite = "n" try: overwrite = sys.stdin.readline().strip() except KeyboardInterrupt: None print '\r', # remove space in beginning of line when reading from stdin if overwrite.lower() != 'y': return print "Overwriting file '%s'" % filename f = open(filename, 'w') except IOError: f = open(filename, 'w') print "File '%s' created." % filename template = Template(COLLECTION_TEMPLATE) content = template.substitute( model_dir = model_dir, model_parameter = model_parameter, model_content = model_content, collection_variable = collection_variable ) f.write(content) f.close() print "Collection '%s' created." % name def create_view(name): None """ Check if the project was already initialized. Checks directories in DIRS, files in FILES and LIBS, and finally 'index.html'. Returns True only if all directories and files are present. """ def already_initialized(): for directory in DIRS.values(): if not os.path.exists(directory): return False for f in FILES.keys() + [LIB_FILENAME(l) for l in LIBS.keys()]: if not os.path.exists(f): return False if not os.path.exists('index.html'): return False return True """ Processors For every command 'c' found in COMMAND, there needs to be a function 'process_c(args)' to process it """ def process_about(args): print '' print ABOUT def process_check(args): if already_initialized(): print 'Project already initialized.' else: print "Project not yet initialized. Use 'init' to initialize." def process_init(args): print 'Init project' initialize_project() """ args array: [0] type of module to create (Model, View, ...) [1] name of module [2:] attributes """ def process_generate(args): if not args: print 'Missing type and/or name of module.' print ' Syntax: generate [model|view|collection]' sys.exit(2) if len(args) < 2: if len(args) == 1 and args[0] == 'model': print 'Error: missing name of the new Model' print print ' Syntax: generate model <model name> [defaults]' print ' Examples: generate model Person' print ' generate model Person name:Harry age:11 isWizard:true' elif len(args) == 1 and args[0] == 'collection': print 'Error: missing name of the new Collection' print print ' Syntax: generate collection <collection name> [model dependency name]' print ' Examples: generate collection Persons' print ' generate collection Persons Person' sys.exit(2) allowed_types = COMMANDS['generate'] module_type = args[0] module_name = args[1] module_attrs = args[2:] if module_type not in allowed_types: print 'Unable to create module with type %s.' % module_type sys.exit(2) if module_type == 'model': create_model(module_name, module_attrs) elif module_type == 'view': create_view(module_name, module_attrs) elif module_type == 'collection': if module_attrs and isinstance(module_attrs, list): module_attrs = module_attrs[0] create_collection(module_name, module_attrs) """ Returns operation_name, operation_args """ def parse_args(args): def print_available_commands(): for c in COMMANDS.keys(): print ' %s' % c, if COMMANDS[c]: print '[', for arg in COMMANDS[c]: print arg, print ']', print print HEADER if not args: print 'No command given. Available commands:' print_available_commands() sys.exit(2) elif args[0] not in COMMANDS.keys(): print "Unknown command '%s'. Available commands:" % args[0] print_available_commands() sys.exit(2) # pass remaining command line arguments as parameters operation_name = args[0] operation_args = args[1:] or None return operation_name, operation_args def main(): command, command_args = parse_args(sys.argv[1:]) processor = getattr(self, 'process_' + command) processor(command_args) #print '\nDone' if __name__ == "__main__": main()
UTF-8
Python
false
false
2,013
9,998,683,909,699
b710f4433174664c19400df864cee321a081fddb
bb0bcccef27f815cba02399bcd157c5cb46f2a18
/codility/brick.py
5849fb772e1b5c1410d713dc1e84e901f6a45816
[]
no_license
nkatre/technical_interviews
https://github.com/nkatre/technical_interviews
4880a6d89afb5b6926e018fcd6db3f3f27d9d432
0df97ad2e2b3abdf7d221c4e9a5acd90789cf74a
refs/heads/master
2020-02-26T13:14:55.764785
2013-08-07T09:35:38
2013-08-07T09:35:38
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
def solution(A,B): list=[0 for i in range(0,len(A))] count=len(A)-1 count_f=0 for i in B: #print i if((i<=A[count]) & (count>=0)): count_f+=1 count-=1 #print "count" , count else: found=False #print "here" while(count>=0): if((i<=A[count])& (count>=0)): count_f+=1 count-=1 #print "count" ,count break count-=1 return count_f if __name__ == '__main__': A=[5,6,4,3,6,2,3] B=[2,3,5,2,4] print solution(A,B)
UTF-8
Python
false
false
2,013
16,956,530,903,549
3744bc85e6c0bb41291fb78827276f94eafa3444
e17bc92c0ca25f5953b0d2e8c7aec5d7974fbb1c
/lib/summon/multiwindow.py
edb4a539d1d216e1b2a9c2d87d072a92913f0fc2
[ "LGPL-2.0-or-later", "GPL-2.0-only", "LGPL-2.1-only" ]
non_permissive
mdrasmus/summon
https://github.com/mdrasmus/summon
ad8aea266f01b871f160ee45d76ae82badf4ee69
16bd8cbd952d4462232c4a199677021649ad35d4
refs/heads/master
2021-01-10T21:38:39.320110
2014-06-06T15:54:08
2014-06-06T15:54:08
1,784,067
3
0
null
null
null
null
null
null
null
null
null
null
null
null
null
""" SUMMON - Multiple Window Management """ import time from summon.core import * from summon import util import summon class WindowEnsemble: """This class coordinates the position, size, translation, and zoom of multiple SUMMON Windows. """ def __init__(self, windows, stackx=False, stacky=False, samew=False, sameh=False, tiex=False, tiey=False, pinx=False, piny=False, coordsx=None, coordsy=None, master=None, close_with_master=None): """windows -- windows to coordinate stackx -- (bool) windows should stack with same x-coordinate stacky -- (bool) windows should stack with same y-coordinate samew -- (bool) windows should have same width sameh -- (bool) windows should have same height tiex -- (bool) translation along x-axis should be coordinated tiey -- (bool) translation along y-axis should be coordinated pinx -- (bool) translation along x-axis should be offset by window position piny -- (bool) translation along x-axis should be offset by window position coordsx -- a list of x-offsets for translation coordsy -- a list of y-offsets for translation master -- master window close_with_master -- (bool) if true, all windows close with master """ self.windows = windows[:] self.pos = {} self.sizes = {} self.stackx = stackx self.stacky = stacky self.samew = samew self.sameh = sameh self.listeners = {} self.ties = {} self.lock = False self.recentPos = util.Dict(default=[]) self.recentSize = util.Dict(default=[]) self.tiex = tiex self.tiey = tiey self.pinx = pinx self.piny = piny self.coordsx = coordsx self.coordsy = coordsy # setup master window if master != None: self.master = master # close_with_master defaults to True if master is given if close_with_master == None: self.close_with_master = True else: self.close_with_master = close_with_master else: self.master = windows[0] # close_with_master defaults to False if master is not given if close_with_master == None: self.close_with_master = False else: self.close_with_master = close_with_master # record window positions and sizes for win in windows: self.pos[win] = win.get_position() self.sizes[win] = win.get_size() # setup window listeners for win in windows: self.init_listeners(win) # setup window stacking if stackx or stacky: self.stack(self.master) # setup scrolling ties if tiex or tiey: self.tie(windows, tiex=tiex, tiey=tiey, pinx=pinx, piny=piny, coordsx=coordsx, coordsy=coordsy, master=master) def add_window(self, win, index=-1, coordx=0, coordy=0): """add a window to the existing ensemble""" if self.tiex or self.tiey: self.untie() if index == -1: index = len(self.windows) self.windows.insert(index, win) self.pos[win] = win.get_position() self.sizes[win] = win.get_size() self.init_listeners(win) self.recentPos.clear() self.recentSize.clear() # setup window stacking if self.stackx or self.stacky: self.stack(self.master) if self.coordsx != None: self.coordsx.insert(index, coordx) if self.coordsy != None: self.coordsy.insert(index, coordy) # setup scrolling ties if self.tiex or self.tiey: self.tie(self.windows, tiex=self.tiex, tiey=self.tiey, pinx=self.pinx, piny=self.piny, coordsx=self.coordsx, coordsy=self.coordsy, master=self.master) def init_listeners(self, win): """initialize listeners for a window managed by the ensemble""" self.listeners[win] = util.Bundle( close=lambda: self._on_window_close(win), resize=lambda w, h: self._on_window_resize(win, w, h), move=lambda x, y: self._on_window_move(win, x, y)) win.add_close_listener(self.listeners[win].close) win.add_resize_listener(self.listeners[win].resize) win.add_move_listener(self.listeners[win].move) def stop(self): """stop the window ensemble from coordinating window movements""" # pretend all the windows have closed for win in list(self.windows): self._on_window_close(win) def _on_window_close(self, win): """callback for when a window in the ensemble closes""" self.remove_window(win) # close all windows if master closes if self.close_with_master and win == self.master: for win2 in self.windows: win2.close() def remove_window(self, win): """removes a window from the ensemble""" # do nothing if window is not in ensemble if win not in self.windows: return self.windows.remove(win) # remove all callbacks win.remove_close_listener(self.listeners[win].close) win.remove_resize_listener(self.listeners[win].resize) win.remove_move_listener(self.listeners[win].move) del self.listeners[win] self.untie(win) def _on_window_resize(self, win, width, height): """callback for when a window resizes""" # ignore windows that have been changed by the ensemble size = (width, height) if size in self.recentSize[win]: ind = self.recentSize[win].index(size) self.recentSize[win] = self.recentSize[win][ind+1:] # process windows that have been changed by outside forces elif self.sizes[win] != (width, height): if self.stackx or self.stacky: self.stack(win) else: self.align(win) self.raise_windows(win) def _on_window_move(self, win, x, y): """callback for when a window moves""" # ignore windows that have been changed by the ensemble pos = (x, y) if pos in self.recentPos[win]: ind = self.recentPos[win].index(pos) self.recentPos[win] = self.recentPos[win][ind+1:] # process windows that have been changed by outside forces elif self.pos[win] != (x, y): if self.stackx or self.stacky: self.stack(win) else: self.align(win) self.raise_windows(win) def stack(self, win): """restack windows together""" target_pos = win.get_position() target_size = win.get_size() self.pos[win] = target_pos self.sizes[win] = target_size # get window sizes widths = [] heights = [] x = [] y = [] totalx = 0 totaly = 0 target = [] for win2 in self.windows: # update size if win2 == win: w, h = target_size # determine destination positions target = [totalx, totaly] else: w2, h2 = win2.get_size() if self.samew: w = target_size[0] else: w = w2 if self.sameh: h = target_size[1] else: h = h2 if (w,h) != (w2, h2): self.recentSize[win2].append((w,h)) self.sizes[win2] = (w, h) win2.set_size(w, h) widths.append(w) heights.append(h) x.append(totalx) y.append(totaly) deco = win2.get_decoration() totalx += w + deco[0] totaly += h + deco[1] # set window positions for i, win2 in enumerate(self.windows): if win == win2: continue if self.stackx: newx = target_pos[0] newy = target_pos[1] + y[i] - target[1] elif self.stacky: newx = target_pos[0] + x[i] - target[0] newy = target_pos[1] oldpos = self.pos[win2] #win2.get_position() self.pos[win2] = (newx, newy) if (newx, newy) != oldpos: win2.set_position(newx, newy) self.recentPos[win2].append((newx, newy)) def align(self, win): """move all windows the same amount window 'win' has moved""" now = win.get_position() now = [now[0], now[1]] pos1 = self.pos[win] # move all other windows to match moved window for win2 in self.windows: if win2 != win: pos2 = self.pos[win2] pos3 = [now[0] + pos2[0] - pos1[0], now[1] + pos2[1] - pos1[1]] win2.set_position(*pos3) self.recentPos[win2].append(tuple(pos3)) self.pos[win2] = pos3 # record new position for main window self.pos[win] = now def tie(self, windows, tiex=False, tiey=False, pinx=False, piny=False, coordsx=None, coordsy=None, master=None): """ties the scrolling and zooming of multiple windows together""" if len(windows) < 2: return self.tiex = tiex self.tiey = tiey self.pinx = pinx self.piny = piny self.coordsx = coordsx self.coordsy = coordsy if master == None: master = windows[0] if coordsx == None: coordsx = [0] * len(windows) if coordsy == None: coordsy = [0] * len(windows) # make coordinate lookup self.coords = {} for win, x, y in zip(windows, coordsx, coordsy): self.coords[win] = util.Bundle(x=x, y=y) # set callbacks for each window for win in windows: others = util.remove(windows, win) tie = WindowTie(win, others, self) self.ties[win] = tie win.add_view_change_listener(tie.update_scroll) win.add_focus_change_listener(tie.update_focus) if master == win: master_trans = tie.update_scroll master_focus = tie.update_focus master_focus() master_trans() def untie(self, win=None): """remove a window from any ties""" if win == None: # untie all windows for win2 in self.windows: self.untie(win2) else: if win not in self.ties: return win.remove_view_change_listener(self.ties[win].update_scroll) win.remove_focus_change_listener(self.ties[win].update_focus) del self.ties[win] # make sure window ties remove their callbacks for tie in self.ties.itervalues(): tie.remove_window(win) def raise_windows(self, top=None): """raises all windows in ensemble above other windows on the desktop""" for win in self.windows: win.raise_window(True) if top != None: top.raise_window(True) class WindowTie: """This class coordinates the translation and zoom of multiple SUMMON Windows. """ def __init__(self, win, others, ensemble): self.win = win self.others = others self.ensemble = ensemble def remove_window(self, win): """removes a window from the list of tied windows""" if win in self.others: self.others.remove(win) def update_scroll(self): """call back that sets translation and zoom""" # prevent infinite loops if self.ensemble.lock: return self.ensemble.lock = True w1 = self.win others = self.others coords = self.ensemble.coords needpin = self.ensemble.pinx or self.ensemble.piny if needpin: pos1 = w1.get_position() trans1 = w1.get_trans() zoom1 = w1.get_zoom() for w2 in others: if needpin: pos2 = w2.get_position() oldtrans2 = list(w2.get_trans()) oldzoom2 = list(w2.get_zoom()) trans2 = oldtrans2[:] zoom2 = oldzoom2[:] if self.ensemble.tiex: trans2[0] = trans1[0] - coords[w2].x + coords[w1].x zoom2[0] = zoom1[0] if self.ensemble.pinx: trans2[0] += pos1[0] - pos2[0] if self.ensemble.tiey: trans2[1] = trans1[1] - coords[w2].y + coords[w1].y zoom2[1] = zoom1[1] if self.ensemble.piny: trans2[1] -= pos1[1] - pos2[1] # check to see if there is a change (prevents infinite loops) if trans2 != oldtrans2: w2.set_trans(*trans2) if zoom2 != oldzoom2: w2.set_zoom(*zoom2) self.ensemble.lock = False def update_focus(self): """callback that sets focus""" # prevent infinite loops if self.ensemble.lock: return self.ensemble.lock = True coords = self.ensemble.coords fx1, fy1 = self.win.get_focus() fx1 -= coords[self.win].x fy1 -= coords[self.win].y for w2 in self.others: newpos = (fx1 + coords[w2].x, fy1 + coords[w2].y) oldpos = w2.get_focus() if newpos != oldpos: w2.set_focus(* newpos) self.ensemble.lock = False
UTF-8
Python
false
false
2,014
12,773,232,781,656
bbfd81ec40f71bee2c8882a5f72c0e993c8f4d1d
26591af2901a305e5082ef07fec15ceb54ed886f
/python_exercise.py
cdca4b0137128f2daffd2c275098387085fe21b2
[]
no_license
rappiah/EXERCISE-SESSION
https://github.com/rappiah/EXERCISE-SESSION
017ff4b088ade8754ba24103a6b59a463bfc29f2
a113843c84d4c68d67fcbd5e359d6ec85cfae2a9
refs/heads/master
2020-09-21T15:29:52.163316
2012-02-21T21:12:53
2012-02-21T21:12:53
3,505,535
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import os import glob def files(path): listing = os.listdir(path) return listing def filestring(line) search='N' s=line.find(search) if s>=0 k=True else: k=False return b; def replace(line) nline = line.replace("N','M') return nline; path = 'cleandata' filelist=files(path) for infile in filelist: for currentFile in glob.glob( os.path.join(path, '*') ): print("current file is: " + infile)
UTF-8
Python
false
false
2,012
14,010,183,334,637
eac4289ac67417be0cd470ed2232c373162ea05f
ca20e366c9cb1b0c92f355e58a36456bd0f4c5ac
/packnaturals.py
6d81b51f34a95d990766519a57294745d0b2c77f
[]
no_license
tumist/packnaturals
https://github.com/tumist/packnaturals
0b3d94c073949ea91c76b2c96688ca2bc7a2c2dc
6b76b7c62d04d1069a179c1f43319467ba94196a
refs/heads/master
2016-09-10T19:15:06.728046
2014-01-04T01:39:17
2014-01-04T01:39:17
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env python """URL-safe natural-number packer. This module extends the packnaturals_ordered implementation to achieve more compression (especially when you have clusters of numbers) at the expense of ordering. """ from __future__ import print_function import packnaturals_ordered to_list_decor = lambda func: lambda arg: list(func(arg)) def pack(numbers): s = sorted(numbers) rel = s[:1] + [a-b for a, b in zip(s[1:], s)] return packnaturals_ordered.pack(rel) @to_list_decor def unpack(string): rel = packnaturals_ordered.unpack(string) incr = 0 for n in rel: incr += n yield incr if __name__ == "__main__": import sys try: numbers = [int(num) for num in sys.argv[1:]] if not numbers or not all([num >= 0 for num in numbers]): raise ValueError except ValueError: print("Usage: {0} <num1> [num2 num3 ...]".format(sys.argv[0])) sys.exit(1) packed = pack(numbers) print("packed :", packed) unpacked = unpack(packed) print("unpacked :", ' '.join([str(n) for n in unpacked]))
UTF-8
Python
false
false
2,014
16,819,091,931,318
446099c78b2c208c7cf62123b2149adbda349427
7dcdd5de0640f07b01b1707c134ec0bd168f641d
/fedora_college/modules/content/media.py
c37f7dbc768d7084060d61a95647ccc2f647cbac
[ "BSD-3-Clause" ]
permissive
MSheezan/fedora-college
https://github.com/MSheezan/fedora-college
8e3e741f6ddac481c2bb7bbcde1e70e2b4b56774
07dbce3652c6c1796fb0f7b208a706c9e9d90dc1
refs/heads/master
2021-01-15T22:38:16.831830
2014-06-26T07:04:33
2014-06-26T07:04:33
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# -*- coding: utf-8 -*- from flask import Blueprint, render_template from flask import url_for, g from fedora_college.modules.content.forms import * # noqa from fedora_college.core.models import * # noqa bundle = Blueprint('content', __name__, template_folder='templates') @bundle.route('/media/view') @bundle.route('/media/view/') @bundle.route('/media/view/<mediaid>') @bundle.route('/media/view/<mediaid>/') def displaymedia(mediaid=None): url = url_for('content.displaymedia') if mediaid is not None: media = Media.query.filter_by(media_id=mediaid).all() return render_template('media/index.html', data=media, url=url) else: media = Media.query.all() return render_template('media/index.html', data=media, url=url) @bundle.route('/media/add/', methods=['GET', 'POST']) @bundle.route('/media/add', methods=['GET', 'POST']) def uploadmedia(): user = UserProfile.query. \ filter_by(username=g.fas_user['username']).first_or_404() token = user.token form_action = url_for('api.uploadvideo', token=token) return render_template('media/uploadmedia.html', form_action=form_action, title="add media" ) @bundle.route('/media/view/<mediaid>/revise') @bundle.route('/media/view/<mediaid>/revise/') def revisemedia(mediaid=None): user = UserProfile.query. \ filter_by(username=g.fas_user['username']).first_or_404() token = user.token form_action = url_for('api.revisevideo', videoid=mediaid, token=token) return render_template('media/revise.html', form_action=form_action, title="add media")
UTF-8
Python
false
false
2,014
5,729,486,390,751
9184556e6342adbb6f2a5aed258c74725ec769bc
0e4ea484cd62854f691aabab86323708f5f951a8
/libgsync/drive/mimetypes.py
2984b2c7824027fd035577e0bf758dd5f06aa7a9
[ "BSD-3-Clause" ]
permissive
omriiluz/gsync
https://github.com/omriiluz/gsync
767ede8782b39263c1038553220027a6e9f74f1a
fa33bce69d0b9988345b1df3d821f7e3c027d758
refs/heads/master
2020-05-31T06:01:30.773956
2013-10-22T22:21:53
2013-10-22T22:21:53
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# Copyright (C) 2013 Craig Phillips. All rights reserved. from __future__ import absolute_import class MimeTypes(object): NONE = "none/unknown-mimetype" FOLDER = "application/vnd.google-apps.folder" BINARY_FILE = "application/octet-stream" @staticmethod def get(path): mimeType = None try: import magic if callable(magic.from_file): mimeType = magic.from_file(path, mime = True) except Exception, e: import mimetypes mimeType = mimetypes.guess_type(path)[0] if mimeType is not None: return mimeType return MimeTypes.NONE
UTF-8
Python
false
false
2,013
15,204,184,272,664
5a38543cc6d842598a0ccd7509e822400aa3e77c
48249ea1614676d292df4d029686efb5d4c91bf9
/devilry/apps/core/tests/basenode.py
a867a3a34916ab9a8aca6757ca2ec7ce3d4a9afc
[]
no_license
erlendve/devilry-django
https://github.com/erlendve/devilry-django
f99f73abbd313621f5dfbd4ea2fccd9d5a9fd826
3a4caf2433ee7eeceac83a09002563a03a14ea7e
refs/heads/master
2020-04-08T11:04:21.573151
2011-09-04T17:54:34
2011-09-04T17:54:34
2,072,574
1
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from django.contrib.auth.models import User from django.test import TestCase from ..models import Node, Subject from ..testhelper import TestHelper class TestBaseNode(TestCase, TestHelper): def setUp(self): self.add(nodes="uio:admin(uioadmin).ifi:admin(ifiadmin,ifitechsupport)") self.add(nodes="uio.deepdummy1") self.thesuperuser = User.objects.create(username='thesuperuser', is_superuser=True) def test_is_admin(self): self.assertTrue(self.uio.is_admin(self.uioadmin)) self.assertFalse(self.uio.is_admin(self.ifiadmin)) self.assertTrue(self.uio_ifi.is_admin(self.uioadmin)) self.assertTrue(self.uio_ifi.is_admin(self.ifiadmin)) def test_get_admins(self): def split_and_sort(admins): l = admins.split(', ') l.sort() return ', '.join(l) self.assertEquals(self.uio.get_admins(), 'uioadmin') self.assertEquals(split_and_sort(self.uio_ifi.get_admins()), 'ifiadmin, ifitechsupport') def test_can_save(self): self.assertTrue(self.uio.can_save(self.uioadmin)) self.assertFalse(self.uio.can_save(self.ifiadmin)) self.assertTrue(self.uio_ifi.can_save(self.ifiadmin)) self.assertTrue(self.uio_ifi.can_save(self.uioadmin)) self.assertTrue(Node().can_save(self.thesuperuser)) self.assertFalse(Node(parentnode=None).can_save(self.uioadmin)) self.assertTrue(Node(parentnode=self.uio).can_save(self.uioadmin)) self.assertFalse(Node(parentnode=self.uio).can_save(self.ifiadmin)) def test_can_save_id_none(self): self.assertTrue(Subject(parentnode=self.uio_deepdummy1).can_save(self.uioadmin)) self.assertFalse(Subject(parentnode=self.uio_deepdummy1).can_save(self.ifiadmin))
UTF-8
Python
false
false
2,011
5,231,270,173,042
da34399c47b54ef969009dd5ae3b9a3b2d23597d
339744d9a1816f4338f38043fb946e8387432cf2
/functions.py
d4a68ab0b1aca6fb624e193c81e0d2fe55975dc2
[]
no_license
Vertrexia/ccs
https://github.com/Vertrexia/ccs
20539aaac2c79e66833aea2bd70c404afc151599
6661d47f3850a250016b84cd912cce1c0b707771
refs/heads/master
2021-01-02T12:57:27.716018
2013-01-08T09:48:28
2013-01-08T09:48:28
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import classes import math # checks if players exist in the list def playerExists(name): for key in classes.players_: player = classes.players_[key] if player.name == name: return True return False # player entered the server def playerEntered(name): if playerExists(name) == False: classes.players_[classes.pCounter] = classes.Player() player = classes.players_[classes.pCounter] player.name = name classes.pCounter += 1 def getPlayer(name): if name != "": for key in classes.players_: player = classes.players_[key] if player.name == name: return player return False # player left the server def playerLeft(name): if playerExists(name) == True: player = getPlayer(name) if player != False: player.isAlive = False
UTF-8
Python
false
false
2,013
11,991,548,728,659
6f0448ee8b7855a8cb835f3bc43849c0123b26cf
1f1c9bb2c09652925952e191544c5e3dff727b67
/yuce/settings.py
e439c5763cf8ad3d5caf7a899284b1c6be44f461
[]
no_license
julyzergcn/yuce
https://github.com/julyzergcn/yuce
0627cff7961454eac191376917d659e3ceeff0a9
5e85c2a44cb7015bf1f0838d298a47a13868ce30
refs/heads/master
2016-09-10T15:06:53.680184
2013-09-20T23:15:36
2013-09-20T23:15:36
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#coding=utf-8 BITCOIN_SERVER_ADDR = 'localhost:8333' BITCOIN_SERVER_USER = 'ee' BITCOIN_SERVER_PASS = 'ee33' BITCOIN_SERVER_URL = 'http://%s:%s@%s' % (BITCOIN_SERVER_USER, BITCOIN_SERVER_PASS, BITCOIN_SERVER_ADDR) BITCOIN_WITHDRAW = True # can withdraw #~ BITCOIN_WITHDRAW = False # cannot withdraw import djcelery djcelery.setup_loader() BROKER_URL = 'django://' CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler' TOPIC_START_WEIGHT = 10**5 TOPIC_END_WEIGHT = 10**4 TOPIC_POST_COST = 10 TOPIC_EVENT_CLOSED_EMAILS = [] import decimal TOPIC_SUBMITTED_COST = decimal.Decimal(0.0000001) # max bet score per topic, in one or more times TOPIC_MAX_BET_SCORE = 1 # when topic is completed, divide the profit to site and the topic submitter SITE_WIN_RATE = 0.1 SUBMITTER_WIN_RATE = 0.1 DATE_FORMAT = 'n/j/y' DATETIME_FORMAT = 'n/j/y H:i' EMAIL_HOST = 'smtp.yeah.net' EMAIL_HOST_USER = '[email protected]' EMAIL_HOST_PASSWORD = 'Yuce321' EMAIL_PORT = 25 DEFAULT_FROM_EMAIL = '[email protected]' from os.path import dirname, join, abspath ROOT = dirname(abspath(__file__)) LOCALE_PATHS = ( join(dirname(ROOT), 'conf', 'locale'), ) AUTH_USER_MODEL = 'core.User' DEBUG = True TEMPLATE_DEBUG = DEBUG ADMINS = ( # ('Your Name', '[email protected]'), ) MANAGERS = ADMINS DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'. 'NAME': join(ROOT, 'dev.db'), 'USER': '', 'PASSWORD': '', 'HOST': '', 'PORT': '', } } import dj_database_url DATABASES = {'default': dj_database_url.config()} # Hosts/domain names that are valid for this site; required if DEBUG is False # See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts ALLOWED_HOSTS = [] # Local time zone for this installation. Choices can be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # although not all choices may be available on all operating systems. # In a Windows environment this must be set to your system time zone. TIME_ZONE = 'Asia/Shanghai' # Language code for this installation. All choices can be found here: # http://www.i18nguy.com/unicode/language-identifiers.html LANGUAGE_CODE = 'zh-cn' gettext_noop = lambda s: s LANGUAGES = ( ('en', gettext_noop('English')), ('zh-cn', gettext_noop(u'中文')), ) SITE_ID = 1 # If you set this to False, Django will make some optimizations so as not # to load the internationalization machinery. USE_I18N = True # If you set this to False, Django will not format dates, numbers and # calendars according to the current locale. USE_L10N = True # If you set this to False, Django will not use timezone-aware datetimes. USE_TZ = True MEDIA_ROOT = join(ROOT, 'media') MEDIA_URL = '/media/' STATIC_ROOT = join(ROOT, 'static') STATIC_URL = '/static/' STATICFILES_DIRS = ( ) STATICFILES_FINDERS = ( 'django.contrib.staticfiles.finders.FileSystemFinder', 'django.contrib.staticfiles.finders.AppDirectoriesFinder', # 'django.contrib.staticfiles.finders.DefaultStorageFinder', ) # Make this unique, and don't share it with anybody. SECRET_KEY = 'g*i8%1++w4qbhd&qtl^(hjw_w8x6yq5^cct6v1k)4t)_yq_g9y' # List of callables that know how to import templates from various sources. TEMPLATE_LOADERS = ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', # 'django.template.loaders.eggs.Loader', ) MIDDLEWARE_CLASSES = ( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.locale.LocaleMiddleware', ) TEMPLATE_CONTEXT_PROCESSORS = ( 'django.contrib.auth.context_processors.auth', 'django.core.context_processors.debug', 'django.core.context_processors.i18n', 'django.core.context_processors.media', 'django.core.context_processors.static', 'django.core.context_processors.tz', 'django.core.context_processors.request', 'django.contrib.messages.context_processors.messages', ) ROOT_URLCONF = 'yuce.urls' # Python dotted path to the WSGI application used by Django's runserver. WSGI_APPLICATION = 'yuce.wsgi.application' TEMPLATE_DIRS = ( join(ROOT, 'templates'), ) INSTALLED_APPS = ( 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.sites', 'django.contrib.messages', 'django.contrib.staticfiles', 'django.contrib.humanize', 'django.contrib.admin', 'gunicorn', 'django_reset', 'endless_pagination', 'bootstrapform', 'south', 'registration', 'captcha', 'djcelery', 'kombu.transport.django', 'core', 'task_tracker', ) # A sample logging configuration. The only tangible logging # performed by this configuration is to send an email to # the site admins on every HTTP 500 error when DEBUG=False. # See http://docs.djangoproject.com/en/dev/topics/logging for # more details on how to customize your logging configuration. LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'filters': { 'require_debug_false': { '()': 'django.utils.log.RequireDebugFalse' } }, 'handlers': { 'mail_admins': { 'level': 'ERROR', 'filters': ['require_debug_false'], 'class': 'django.utils.log.AdminEmailHandler' } }, 'loggers': { 'django.request': { 'handlers': ['mail_admins'], 'level': 'ERROR', 'propagate': True, }, } } try: from settings_local import * except ImportError: pass
UTF-8
Python
false
false
2,013
7,610,682,080,931
366d11e5d32e72527c3132b2ecd56933a6453e8d
36118546fce229f6e31cb83ee9f6c5131cdeade9
/double_linked/dll.py
7e6cb120ddcdbb44ac6cfd44cff950ee31b5dbe8
[]
no_license
markableidinger/data_structures
https://github.com/markableidinger/data_structures
54e09ae1a06bd00eae69baf9902714f2d184b6cc
b485132945ecf08ad921d2477b93744cb4498524
refs/heads/master
2021-01-19T00:47:15.571258
2014-10-22T04:35:42
2014-10-22T04:35:42
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
class Node: def __init__(self, value, previous, next): self.value = value self.previous = previous self.next = next class Doubly_linked_list: def __init__(self): self.tail = Node(None, None, None) self.head = Node(None, None, self.tail) self.tail.previous = self.head def insert(self, val): new = Node(val, self.head, self.head.next) self.head.next.previous = new self.head.next = new def append(self, val): new = Node(val, self.tail.previous, self.tail) self.tail.previous.next = new self.tail.previous = new def pop(self): if self.head.next.value is None: return None else: return_item = self.head.next self.head.next = return_item.next return_item.next.previous = self.head return return_item.value def shift(self): if self.tail.previous.value is None: return None else: return_item = self.tail.previous self.tail.previous = return_item.previous return_item.previous.next = self.tail return return_item.value def remove(self, val): currently_selected = self.head.next previously_selected = self.head while currently_selected.value is not None: if currently_selected.value == val: previously_selected.next = currently_selected.next currently_selected.next.previous = previously_selected break else: previously_selected = currently_selected currently_selected = currently_selected.next
UTF-8
Python
false
false
2,014
12,137,577,594,189
50985fe143f4b2b5e8a21f2c82dd4c061f518ea7
9df60e44b27d57e72cd7da551527379c4e8ce6d1
/data_loader/WUSCHEDParser.py
7874bd90bae80c824d4d1bf4892004087bbe0c6e
[]
no_license
ngermer/wusched
https://github.com/ngermer/wusched
19f67c53f69386fae29f0546d444dad94afd52fb
42648e2a4b525a49693da12a6ca5b2b6189488e8
refs/heads/master
2021-01-21T13:49:01.359312
2013-05-16T03:00:44
2013-05-16T03:00:44
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from HTMLParser import HTMLParser class WUSCHEDParser(HTMLParser): def __init__(self, course_handler): HTMLParser.__init__(self) self.course_handler = course_handler self.found_table = False self.in_data = False self.course = None self.cell_num = -1 self.title_caught = False print "Parser initialized." def handle_starttag(self,tag,attrs): tag = tag.lower() # wait until we find the table. if tag == "thead": self.found_table = True return if not self.found_table: return # wait until we reach the appropriate location in the data. if tag == "tbody": self.in_data = True return if not self.in_data: return # begin parsing a row of data. if tag == "tr": self.course = {} self.cell_num = -1 self.title_caught = False elif tag == "td": self.cell_num += 1 elif tag == "a": if self.cell_num == 0: pass elif self.cell_num == 1: pass elif self.cell_num == 2: if not self.title_caught: self.title_caught = True else: #found a syllabus link. for k,v in attrs: if k == "href": self.course["syl"]=v elif self.cell_num == 3: pass elif self.cell_num == 4: pass elif self.cell_num == 5: pass elif self.cell_num == 6: pass elif self.cell_num == 7: pass elif self.cell_num == 8: pass elif self.cell_num == 9: pass elif self.cell_num == 10: pass elif self.cell_num == 11: pass elif self.cell_num == 12: pass def handle_endtag(self,tag): if not self.found_table: return if not self.in_data: return tag = tag.lower() if tag=="tbody": self.in_data = False self.found_table = False elif tag=="tr": self.cell_num = -1 if len(self.course)!=0: self.course_handler.add_course(self.course) def handle_data(self,data): # wait until we find the table. if (not self.found_table) or (not self.in_data): return #strip whitespace. data = data.strip() # begin parsing a row of data. if self.cell_num == 0: self.course["dept"],data = data.split(" ",1) data,self.course["num"] = data.rsplit(" ",1) elif self.cell_num == 1: self.course["sec"] = data elif self.cell_num == 2: #avoid overwriting names with "syllabus" if "name" not in self.course: self.course["name"] = data elif self.cell_num == 3: pass elif self.cell_num == 4: self.course["days"] = data elif self.cell_num == 5: self.course["begin"] = data elif self.cell_num == 6: self.course["end"] = data elif self.cell_num == 7: self.course["inst"] = data elif self.cell_num == 8: pass elif self.cell_num == 9: self.course["seats"] = int(data) elif self.cell_num == 10: self.course["enrolled"] = int(data) elif self.cell_num == 11: self.course["waits"] = int(data) elif self.cell_num == 12: self.course["attr"] = data.split(", ") if __name__ == '__main__': print "WUSCHEDParser provider running at", asctime() course_handler = CourseHandler() parser = WUSCHEDParser(course_handler) with open("../../wu_l_list.html") as f: for line in f: parser.feed(line) print "Done."
UTF-8
Python
false
false
2,013
12,824,772,357,860
1644b29f6d8190be5e2585feee94f0a70a47cbb0
5717e45d653a675a749dbd496b62c9852bef0cd2
/chef-repo/cookbooks/ycsb/files/default/generateChart.py
d6bfb9d832a41d02cad3f4d87b28e8fcca04d1da
[ "Apache-2.0" ]
permissive
myownthemepark/csde
https://github.com/myownthemepark/csde
b7dab355adaa7d2a54c01e5ca33035b8446021dc
11bc441b4e34fe24d76d357317f0736b5e7d350d
refs/heads/master
2017-04-28T22:07:30.204559
2013-03-25T22:48:32
2013-03-25T22:48:32
8,025,192
0
1
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env python # columns = ["Type", "Elapsed Time (seconds)", "Operations", "Ops/Sec", "Average Latency"] import sys if len(sys.argv) > 1: filename = sys.argv[1] else: print "Usage: generateChart.py <input-filename>" print "Produces: <input-filename>.html" print sys.exit() opsCols = ["Elapsed Time (seconds)", "Ops/Sec"] opsColsString = "" for heading in opsCols: opsColsString += " opsData.addColumn('number', '" + heading + "');\n" latencyCols = ["Elapsed Time (seconds)", "Average Latency"] latencyColsString = "" for heading in latencyCols: latencyColsString += " latencyData.addColumn('number', '" + heading + "');\n" opsData = "" latencyData = "" with open(filename, 'r') as f: read_data = f.readlines() for line in read_data: if "sec" in line and "operations" in line and "current ops/sec" in line: line = line.strip().split() try: dataType = line[7].strip('[') dataTime = line[0] dataOps = str(int(line[2])) dataOpsSec = line[4] dataLatency = line[8].strip("]").split("=")[1] # dataString += " ['" + dataType + "', " + dataTime + ", " + dataOps + ", " + dataOpsSec + ", " + dataLatency + "],\n" opsData += " [" + dataTime + ", " + dataOpsSec + "],\n" latencyData += " [" + dataTime + ", " + dataLatency + "],\n" except Exception: pass html = """ <html> <head> <!--Load the AJAX API--> <script type="text/javascript" src="https://www.google.com/jsapi"></script> <script type="text/javascript"> // Load the Visualization API and the piechart package. google.load('visualization', '1.0', {'packages':['corechart']}); // Set a callback to run when the Google Visualization API is loaded. google.setOnLoadCallback(drawOps); google.setOnLoadCallback(drawLatency); function drawOps() { // Create the data table. var opsData = new google.visualization.DataTable(); """ + opsColsString + """ opsData.addRows([ """ + opsData[:-2] + """ ]); // Set chart options var options = {'title':'Operations per Second', 'width':1920, 'height':600, 'curveType': 'function', 'pointSize': 3, 'lineWidth': 1 }; // Instantiate and draw our chart, passing in some options. var opsChart = new google.visualization.ScatterChart(document.getElementById('chart_ops')); opsChart.draw(opsData, options); } function drawLatency() { // Create the data table. var latencyData = new google.visualization.DataTable(); """ + latencyColsString + """ latencyData.addRows([ """ + latencyData[:-2] + """ ]); // Set chart options var options = {'title':'Average Latency', 'width':1920, 'height':600, 'curveType': 'function', 'pointSize': 3, 'lineWidth': 1 }; // Instantiate and draw our chart, passing in some options. var latencyChart = new google.visualization.ScatterChart(document.getElementById('chart_latency')); latencyChart.draw(latencyData, options); } </script> </head> <body> <!--Div that will hold the pie chart--> <div id="chart_ops"></div> <div id="chart_latency"></div> </body> </html> """ with open(filename + '.html', 'w') as f: f.write(html) print filename + ".html has been created."
UTF-8
Python
false
false
2,013
4,587,025,122,072
89c6df8cf8fe1da8aeba9a919d81bf5c77911c95
1d7ca3b94912b4159e7aac76bd3e1fedfc703094
/syncthing_gtk/__init__.py
c3d04feedc1756880018c1a6309de7ec61b41ad4
[ "GPL-2.0-only" ]
non_permissive
sandeepone/syncthing-gtk
https://github.com/sandeepone/syncthing-gtk
8ac17eeda0336be78358a7ae5922a6d0af26e56a
97c80a697b7bc22aa3134760ef7e5b8b5d613a1d
refs/heads/master
2021-01-17T21:02:36.600848
2014-12-21T20:37:38
2014-12-21T20:37:38
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env python2 import tools from timermgr import TimerManager from daemonprocess import DaemonProcess from daemon import Daemon, InvalidConfigurationException, \ TLSUnsupportedException, ConnectionRestarted from watcher import Watcher, HAS_INOTIFY from notifications import Notifications, HAS_DESKTOP_NOTIFY from infobox import InfoBox from editordialog import EditorDialog from deviceeditor import DeviceEditorDialog from foldereditor import FolderEditorDialog from daemonsettings import DaemonSettingsDialog from statusicon import StatusIcon, HAS_INDICATOR from uisettings import UISettingsDialog from configuration import Configuration from iddialog import IDDialog from about import AboutDialog from ignoreeditor import IgnoreEditor from ribar import RIBar from daemonoutputdialog import DaemonOutputDialog from stdownloader import StDownloader from wizard import Wizard from finddaemondialog import FindDaemonDialog from app import App
UTF-8
Python
false
false
2,014
17,635,135,729,421
fbde8a2e6a779ea9f6ce237fe1f082f11190e046
e91b9ae12b4d52c37985bc62c1d29500595393b8
/src/game/swarm/Swarm.py
e0c253e861446f580f165f293295a1b602062855
[]
no_license
Wuji/wuSwarm
https://github.com/Wuji/wuSwarm
243a3527a9c42acac57e8a86044eee6b1cf84c99
6bd2aa4a537a77366061c03ce308ecf56a7c9629
refs/heads/master
2016-09-06T20:16:44.908498
2011-08-05T22:29:44
2011-08-05T22:29:44
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
''' Created on 31 Jul 2011 @author: wuji ''' from game.environment.Simple2dEnv import NumberTile from game.swarm import ai class Swarm(object): ''' This class represents a swarm ''' def __init__(self, environment, start_coordinates = (0, 0), entities = []): ''' Constructor ''' self.entities = entities if len(self.entities) == 0: self.entities.append(SwarmEntity(self, environment)) self.real_env = environment self.known_universe = [] self.unknown_tiles = set() for x in range(self.real_env.length_x): column = [] for y in range(self.real_env.length_y): tile = NumberTile(x, y, 0) column.append(tile) self.unknown_tiles.add(tile.coordinates) self.known_universe.append(column) for entity in entities: entity.position = (0, 0) self.assign_new_leader() self.leader.scan() def number_of_entities(self): return len(self.entities) def assign_new_leader(self): self.leader = self.entities[0] def next_turn(self): for entity in self.entities: new_territory = entity.scan() for tile in new_territory: x = tile.x y = tile.y self.known_universe[x][y] = tile if tile.coordinates in self.unknown_tiles: self.unknown_tiles.remove(tile.coordinates) if not len(entity.path) == 0: entity.move() if not len(self.unknown_tiles) == 0: dest = self.unknown_tiles.pop() self.unknown_tiles.add(dest) entity.move_to(dest) class SwarmEntity(object): ''' This class represents on entity of a swarm ''' def __init__(self, swarm, environment): ''' Constructor ''' self.env = environment self.swarm = swarm self.path = list() self.position = (0, 0) def scan(self): return self.env.scan(self) def move(self): self.position = self.path.pop() def move_to(self, destination): origin = self.env.get_tile_at_position(self.position[0], self.position[1]) dest = self.env.get_tile_at_position(destination[0], destination[1]) path = ai.a_star_2d(origin, dest, self.env) if not path == None and not len(path) == 0: self.path = path self.move()
UTF-8
Python
false
false
2,011
11,948,599,058,091
fedff6d3dbcb8c4d95628c0b099b3da3c6df58f9
ff48e587ae0005d327ffd1dc2531362785bbade7
/judge/models.py
352113517ca3321c9a85308fd9a6048dea47a910
[]
no_license
westandskif/swiss_elo_system
https://github.com/westandskif/swiss_elo_system
f33d16fe60760688924b0317b73f358238bf8ddc
4bb9d91c773a5b98a244faee2bc5c607defe2e21
refs/heads/master
2015-08-08T15:53:11.827784
2013-08-13T10:47:41
2013-08-13T10:47:41
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from django.db import models # Create your models here. class Player(models.Model): first_name = models.CharField(max_length=30) last_name = models.CharField(max_length=30) elo = models.FloatField() white_played = models.IntegerField(default=0) black_played = models.IntegerField(default=0) score = models.FloatField(default=0) opponents = models.ManyToManyField('self') def __unicode__(self): return u" ".join([unicode(self.first_name), unicode(self.last_name)]) class Meta: ordering = ('first_name',) class PlayerReport(models.Model): player = models.OneToOneField(Player, related_name='report') rank = models.IntegerField(default=0) score = models.FloatField(default=0) new_elo = models.FloatField(default=0) opponents_score = models.FloatField(default=0) def __unicode__(self): return u"Rank {0} player info.".format(self.rank) class Game(models.Model): """ Creates a game between 2 players; None is possible (for odd number of players), such game is played and opponent of None is winner. """ def __init__(self, *args, **kwargs): super(Game, self).__init__(*args, **kwargs) if 'white' in kwargs: self.save() self.white = kwargs['white'] self.black = kwargs['black'] if 'tour' in kwargs: self.tour = kwargs['tour'] if not (self.white is None or self.black is None): self.members.add(kwargs['white'], kwargs['black']) self.white.white_played += 1 self.black.black_played += 1 self.white.save(update_fields=['white_played']) self.black.save(update_fields=['black_played']) else: if self.white is None: self.black.score += 1 self.black.black_played += 1 self.winner = self.black else: self.white.score += 1 self.white.white_played += 1 self.winner = self.white self.winner.save() self.members.add(self.winner) self.loser = None self.played = True self.save() members = models.ManyToManyField(Player, related_name='games', null=True) tour = models.IntegerField(default=1, unique=False) white = models.ForeignKey(Player, related_name='white_games', null=True, unique=False) black = models.ForeignKey(Player, related_name='black_games', null=True, unique=False) winner = models.ForeignKey(Player, related_name='wins', null=True, unique=False) loser = models.ForeignKey(Player, related_name='losings', null=True, unique=False) played = models.BooleanField(default=False) def __unicode__(self): if self.played: if self.winner is None: return "{0} vs {1}. The game has been finished with draw.".format(self.white, self.black) return "{0} vs {1}. {2} has won.".format(self.white, self.black, self.winner) return "{0} vs {1}. The game hasn't been played yet.".format(self.white, self.black) class Meta: ordering = ('tour',) class Tournament(models.Model): active = models.BooleanField(default=True) current_tour = models.IntegerField(default=1) max_tour = models.IntegerField(default=-1) players = models.ManyToManyField(Player, related_name='tournament', null=True) games = models.ManyToManyField(Game, related_name='tournament', null=True) def __unicode__(self): return u"Tournament #{0}".format(unicode(self.id))
UTF-8
Python
false
false
2,013
5,119,601,045,248
c0763de1e7e7574377e5872df08a0888414926ec
65c2ee2aa77587268ab7e621bd94c52feeea43f2
/project/apps/blog/forms.py
059938556381232849b57b168923482e5e84d177
[]
no_license
bornleft/django-simple_blog
https://github.com/bornleft/django-simple_blog
187d6568043bcaa69a943c0a363bd75829e917c1
fbb78fce54a449aed82ed5fca7e78785daf8cf5e
refs/heads/master
2020-04-06T04:35:57.755273
2011-08-02T21:57:05
2011-08-02T21:57:05
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# -*- coding: utf-8 -*- from django import forms from django.forms import ModelForm from django.forms import widgets from django.utils.translation import ugettext as _ from project.apps.blog.models import Entry, Tag class CommentForm(forms.Form): entry_pk = forms.CharField(widget= forms.HiddenInput) fname = forms.CharField(label = _(u'Имя')) lname = forms.CharField(label = _(u'Фамилия')) comment = forms.CharField(label = _(u'Комментарий'), widget = forms.Textarea(attrs={'cols': 60, 'rows': 18})) class EntryForm(ModelForm): class Meta: model = Entry #exclude = ('author',) widgets = { 'entry': forms.Textarea(attrs = {'cols': 60, 'rows': 18}), 'author': forms.HiddenInput, } class TagForm(ModelForm): class Meta: model = Tag exclude = ('entrys',)
UTF-8
Python
false
false
2,011
7,859,790,169,573
97822ce370b3886b6e6ac1efb98329dfb407438a
6bf4867b690f59a77f7caddc1238c3bae6b3e1c3
/tests/benchmark/scenarios/vm/test_utils.py
d8f6fbddbbfa9100a8a94d5a9dc376793188c4d7
[ "Apache-2.0" ]
permissive
kambiz-aghaiepour/rally
https://github.com/kambiz-aghaiepour/rally
641c044cc24c10eb15e4d6b4ab3bc4885779e076
be708bacf0bc898a9538b9b6cb0ba4e1c015c1f2
refs/heads/master
2021-01-15T19:35:15.318291
2014-08-18T23:51:30
2014-08-18T23:51:30
23,090,342
3
1
null
null
null
null
null
null
null
null
null
null
null
null
null
# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import subprocess import mock from oslotest import mockpatch from rally.benchmark.scenarios.vm import utils from tests import fakes from tests import test VMTASKS_UTILS = "rally.benchmark.scenarios.vm.utils" class VMScenarioTestCase(test.TestCase): def setUp(self): super(VMScenarioTestCase, self).setUp() self.wait_for = mockpatch.Patch(VMTASKS_UTILS + ".bench_utils.wait_for") self.useFixture(self.wait_for) @mock.patch('__builtin__.open') def test_run_action(self, mock_open): mock_ssh = mock.MagicMock() mock_file_handle = mock.MagicMock() mock_open.return_value = mock_file_handle vm_scenario = utils.VMScenario() vm_scenario.run_action(mock_ssh, 'interpreter', 'script') mock_ssh.execute.assert_called_once_with('interpreter', stdin=mock_file_handle) def test_wait_for_ssh(self): ssh = mock.MagicMock() vm_scenario = utils.VMScenario() vm_scenario.wait_for_ssh(ssh) ssh.wait.assert_called_once_with() @mock.patch(VMTASKS_UTILS + ".VMScenario.ping_ip_address", return_value=True) def test_wait_for_ping(self, mock_ping): vm_scenario = utils.VMScenario() vm_scenario.wait_for_ping("1.2.3.4") self.wait_for.mock.assert_called_once_with("1.2.3.4", is_ready=mock_ping, timeout=120) @mock.patch(VMTASKS_UTILS + ".VMScenario.run_action") @mock.patch(VMTASKS_UTILS + ".VMScenario.wait_for_ping") @mock.patch("rally.sshutils.SSH") def test_run_command(self, mock_ssh_class, mock_wait_ping, mock_run_action): mock_ssh_instance = mock.MagicMock() mock_ssh_class.return_value = mock_ssh_instance vm_scenario = utils.VMScenario() vm_scenario._context = {"user": {"keypair": {"private": "ssh"}}} vm_scenario.run_command("1.2.3.4", 22, "username", "int", "script") mock_wait_ping.assert_called_once_with("1.2.3.4") mock_ssh_class.assert_called_once_with("username", "1.2.3.4", port=22, pkey="ssh") mock_ssh_instance.wait.assert_called_once_with() mock_run_action.assert_called_once_with(mock_ssh_instance, "int", "script") def test_check_network(self): vm_scenario = utils.VMScenario() fake_server = fakes.FakeServer() fake_server.addresses = {} self.assertRaises(ValueError, vm_scenario.check_network, fake_server, "private") fake_server.addresses["private_1"] = { "version": 4, "addr": "1.2.3.4" } vm_scenario.check_network(fake_server, "private_1") @mock.patch("subprocess.Popen") def test_ping_ip_address(self, mock_subprocess): ping_process = mock.MagicMock() ping_process.returncode = 0 mock_subprocess.return_value = ping_process vm_scenario = utils.VMScenario() host_ip = "1.2.3.4" self.assertTrue(vm_scenario.ping_ip_address(host_ip)) mock_subprocess.assert_called_once_with( ['ping', '-c1', '-w1', host_ip], stderr=subprocess.PIPE, stdout=subprocess.PIPE) ping_process.wait.assert_called_once_with()
UTF-8
Python
false
false
2,014
8,057,358,695,124
8d1d7c699b539af23bf170262e6d7193c06fc54e
a31a20618104828b51b78ee2b68b18fbda07c001
/python/0001.py
a49b90f643855843768ec1131a141cf0aa75a738
[]
no_license
lionbee/euler
https://github.com/lionbee/euler
a5dd02575e8a6d5f9e37b83fcd06ab167e1b580a
4342f77ebc83f08c7ae92d2e3c54319ee498dcc6
refs/heads/master
2021-05-26T21:21:47.831318
2014-08-01T12:55:57
2014-08-01T12:55:57
22,506,556
0
1
null
null
null
null
null
null
null
null
null
null
null
null
null
def addFactors(factor, maxvalue): number = 0; total = 0; while number < maxvalue: print number if number % factor == 0: total+=number number+=1 print total return total print addFactors(3, 1000) + addFactors(5, 1000) - addFactors(15, 1000) # now that I know python a little better print reduce(lambda a, b: a + b, (x for x in range(1, 1000) if x % 3 == 0 or x % 5 == 0))
UTF-8
Python
false
false
2,014
4,389,456,607,755
8e90b3d94d7e8758aceec92e8c99d18cbeed944e
9e437371ec09ae830bbcae0076d5d87b523d1d00
/PhyloTreeHeatmapVis.py
fbdc797b2f238b03ae8d69a9706098368c6018ce
[]
no_license
XiaoxiaoLiu/py-arbor
https://github.com/XiaoxiaoLiu/py-arbor
08239fb4b10e323c5da1cd073cdc7c0331831f25
36caaec2dd8f88aea5c65cd3907b9e89292413b9
refs/heads/master
2021-01-25T10:43:57.626415
2013-07-24T18:11:54
2013-07-24T18:11:54
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/python from vtk import * #read in a tree treeReader = vtkNewickTreeReader() treeReader.SetFileName('/home/xiaoxiao/work/data/Arbor/anolis.phy') treeReader.Update() tr = treeReader.GetOutput() print(tr.GetNumberOfVertices()) #read in a table tableReader = vtkDelimitedTextReader() tableReader.SetFileName('/home/xiaoxiao/work/data/Arbor/anolisDataAppended.csv') tableReader.Update() table = tableReader.GetOutput() #play with the heatmap vis treeHeatmapItem = vtkTreeHeatmapItem() treeHeatmapItem.SetTree(tr); treeHeatmapItem.SetTable(table); # setup the window view = vtkContextView() view.GetRenderer().SetBackground(1,1,1) view.GetRenderWindow().SetSize(800,600) iren = view.GetInteractor() iren.SetRenderWindow(view.GetRenderWindow()) transformItem = vtkContextTransform() transformItem.AddItem(treeHeatmapItem) transformItem.SetInteractive(1) view.GetScene().AddItem(transformItem) view.GetRenderWindow().SetMultiSamples(0) iren.Initialize() view.GetRenderWindow().Render() iren.Start()
UTF-8
Python
false
false
2,013
1,133,871,372,141
8bdae2184f5e0164284d867349861e4d775b3557
d6566a46d7eac45de6f71a4175c1f4c4b5e5835a
/sorts/Mergesort.py
1fb12b6d7ce694bbe8c21a3f90c19ee839aba013
[]
no_license
jonpo/algo
https://github.com/jonpo/algo
03da18b13373931020dc462097def4b121975709
fdc1ce5a4c7104d17427f2e8dc3db5eba57d263a
refs/heads/master
2016-09-06T08:40:20.729667
2014-11-26T20:18:24
2014-11-26T20:18:24
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env python #Filename: Mergesort.py #Author: Jon Poley #Uses merge sort to sort an array given as a comma separated string. #Ex: "4, 3, 5, 1" import sys import math def merge(array, min, mid, max): index = mid - min + 1 index2 = max - mid left = [] right = [] for i in range(0, index): left.append(array[min + i]) for j in range(0, index2): right.append(array[mid + j +1]) left.append(sys.maxint) right.append(sys.maxint) i = 0 j = 0 for k in range(min, max+1): if left[i] <= right[j]: array[k] = left[i] i = i + 1 else: array[k] = right[j] j = j + 1 def mergesort(array, min, max): if min < max: mid = int((min + max)/2) mergesort(array, min, mid) mergesort(array, mid +1, max) merge(array, min, mid, max) array = sys.argv[1] array = map(int, array.split(",")) Min = 0; Max = len(array) -1 Mid = math.floor((Max + Min)/2) mergesort(array, Min, Max) print array
UTF-8
Python
false
false
2,014
17,343,077,960,381
2d14bedd9ffec342fada4acf9a09b8d96f14a1e4
25087d59c4bee1c8a8c3363de71eaa704d628a5a
/test/python/qcqpsolver.py
44b4c256140911b36e1292ffb69fe0c49f4f27e2
[ "LGPL-3.0-only" ]
non_permissive
zhenglei-gao/casadi
https://github.com/zhenglei-gao/casadi
604bf08b92187d3f42f372e5913c76ff5ebf89e4
c01d4951610263db03e5f6363ab0c7259ea13869
refs/heads/master
2021-01-14T13:16:20.594162
2014-04-03T11:48:58
2014-04-03T11:48:58
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# # This file is part of CasADi. # # CasADi -- A symbolic framework for dynamic optimization. # Copyright (C) 2010 by Joel Andersson, Moritz Diehl, K.U.Leuven. All rights reserved. # # CasADi is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 3 of the License, or (at your option) any later version. # # CasADi is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with CasADi; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA # # from casadi import * import casadi as c from numpy import * import unittest from types import * from helpers import * qcqpsolvers = [] try: qcqpsolvers.append((SOCPQCQPSolver,{"socp_solver": SDPSOCPSolver, "socp_solver_options": {"sdp_solver": DSDPSolver} },False)) except: pass class QCQPSolverTests(casadiTestCase): def testboundsviol(self): H = 1e-6*DMatrix([[1,0],[0,1]]) G = DMatrix([2,1]) A = DMatrix.sparse(0,2) P = 2*DMatrix([[1,0],[0,2]]) Q = DMatrix([2,3]) R = DMatrix([-7]) LBX = DMatrix([ -inf,-3 ]) UBX = DMatrix([ inf, -inf ]) for qcqpsolver, qcqp_options, re_init in qcqpsolvers: solver = qcqpsolver(qcqpStruct(a=A.sparsity(),p=P.sparsity(),h=H.sparsity())) solver.setOption(qcqp_options) solver.init() solver.setInput(H,"h") solver.setInput(G,"g") solver.setInput(A,"a") solver.setInput(P,"p") solver.setInput(Q,"q") solver.setInput(R,"r") solver.setInput(LBX,"lbx") solver.setInput(UBX,"ubx") with self.assertRaises(Exception): solver.solve() def test_bounds(self): # min 1/2 x' H x + 2 x + y # x,y # # s.t. x^2 + 2y^2 + 2*x + 3*y - 7 <= 0 H = 1e-6*DMatrix([[1,0],[0,1]]) G = DMatrix([2,1]) A = DMatrix.sparse(0,2) P = 2*DMatrix([[1,0],[0,2]]) Q = DMatrix([2,3]) R = DMatrix([-7]) LBX = DMatrix([ -inf, -inf ]) UBX = DMatrix([ inf, inf ]) for qcqpsolver, qcqp_options, re_init in qcqpsolvers: self.message("qcqpsolver: " + str(qcqpsolver)) solver = qcqpsolver(qcqpStruct(a=A.sparsity(),p=P.sparsity(),h=H.sparsity())) solver.setOption(qcqp_options) solver.init() solver.setInput(H,"h") solver.setInput(G,"g") solver.setInput(A,"a") solver.setInput(P,"p") solver.setInput(Q,"q") solver.setInput(R,"r") solver.setInput(LBX,"lbx") solver.setInput(UBX,"ubx") solver.solve() socp = solver.getSolver() self.checkarray(solver.getOutput(),DMatrix([-(sqrt(73)+3)/3,-(sqrt(73)+9)/12]),str(qcqpsolver),digits=5) self.checkarray(solver.getOutput("lam_x"),DMatrix([0,0]),str(qcqpsolver),digits=5) self.checkarray(solver.getOutput("lam_a"),DMatrix([]),str(qcqpsolver),digits=5) self.checkarray(solver.getOutput("cost"),mul(G.T,solver.getOutput()),str(qcqpsolver),digits=4) def test_qp(self): # min 1/2 x' H x + 2 x + y # x,y # H = DMatrix([[1,0],[0,1]]) G = DMatrix([2,1]) A = DMatrix.sparse(0,2) P = DMatrix.sparse(2,0) Q = DMatrix.sparse(0,1) R = DMatrix.sparse(0,1) LBX = DMatrix([ -inf, -inf ]) UBX = DMatrix([ inf, inf ]) for qcqpsolver, qcqp_options, re_init in qcqpsolvers: self.message("qcqpsolver: " + str(qcqpsolver)) solver = qcqpsolver(qcqpStruct(a=A.sparsity(),p=P.sparsity(),h=H.sparsity())) solver.setOption(qcqp_options) solver.init() solver.setInput(H,"h") solver.setInput(G,"g") solver.setInput(A,"a") solver.setInput(P,"p") solver.setInput(Q,"q") solver.setInput(R,"r") solver.setInput(LBX,"lbx") solver.setInput(UBX,"ubx") solver.solve() socp = solver.getSolver() self.checkarray(solver.getOutput(),DMatrix([-2,-1]),str(qcqpsolver),digits=5) self.checkarray(solver.getOutput("lam_x"),DMatrix([0,0]),str(qcqpsolver),digits=5) self.checkarray(solver.getOutput("lam_a"),DMatrix([]),str(qcqpsolver),digits=5) self.checkarray(solver.getOutput("cost"),-2.5,str(qcqpsolver),digits=4) if __name__ == '__main__': unittest.main()
UTF-8
Python
false
false
2,014
14,096,082,679,989
4a2ad827b35462efaa2c4028162422314a8ffc82
f6ad34f1eed97340f796ea083a71e6e2d38a3d26
/src/libs/lwip/SConstruct
8b06b31aca08f93f63e7ee4ace2da74fba04e072
[]
no_license
gz/aos10
https://github.com/gz/aos10
606abb223563c4f6df6f163c07b0290ab2d95795
b204e8fc29860ce03155a08f7e8d8748180a4f14
refs/heads/master
2020-03-26T02:38:11.918982
2011-02-10T12:39:06
2011-02-10T12:39:06
1,464,801
0
1
null
null
null
null
null
null
null
null
null
null
null
null
null
Import("env") public_headers = ["#libs/lwip/include", "#libs/lwip/include/ipv4"] srccorelist = """core/inet.c core/ipv4/icmp.c core/ipv4/ip.c core/ipv4/ip_addr.c core/mem.c core/memp.c core/netif.c core/pbuf.c core/stats.c core/stats.c core/sys.c core/tcp.c core/tcp_input.c core/tcp_output.c core/tcp_pcb.c core/udp.c""" srcotherlist = "netif/etharp.c sos/sosif.c" liblist = "c ixp_osal ixp400_xscale_sw" cppdefines = env["CPPDEFINES"] + ["LWIP_DEBUG", "l4aos"] cpppath = env["CPPPATH"] + ["#sos"] # Grab sos headers cc_warnings = env["CC_WARNINGS"] + ["no-redundant-decls", "no-format"] lib = env.MyLibrary("lwip", source = Split(srccorelist) + Split(srcotherlist), public_headers = public_headers, LIBS = Split(liblist), CPPDEFINES = cppdefines, CPPPATH = cpppath, CC_WARNINGS = cc_warnings) Return("lib") # vim: filetype=python
UTF-8
Python
false
false
2,011
17,575,006,207,728
3dfe5ad1bec554b123c2efb06525db1d5e93845b
caf4b86a585138df032dc1f3fc6575bf496647f4
/checkout/session.py
e5d556f85ce158af2447e9f65ce3358556782591
[ "GPL-3.0-only" ]
non_permissive
nka11/store-fr
https://github.com/nka11/store-fr
7e564eae15b0d0822a18e0e856edd82d755fe16b
080782a3030dcaefcf0fde0f8f6823d488932843
refs/heads/master
2016-09-10T22:16:10.659889
2014-08-05T17:03:06
2014-08-05T17:03:06
22,606,342
1
1
null
null
null
null
null
null
null
null
null
null
null
null
null
from django import http from django.contrib import messages from django.core.urlresolvers import reverse from oscar.apps.checkout import exceptions from oscar.apps.checkout.session import CheckoutSessionMixin as OscarCheckoutSessionMixin from django.utils.translation import ugettext as _ from checkout.utils import CheckoutSessionData class CheckoutSessionMixin(OscarCheckoutSessionMixin): def dispatch(self, request, *args, **kwargs): self.checkout_session = CheckoutSessionData(request) try: self.check_preconditions(request) except exceptions.FailedPreCondition as e: for message in e.messages: messages.warning(request, message) return http.HttpResponseRedirect(e.url) # call super() from superclass return super(OscarCheckoutSessionMixin, self).dispatch( request, *args, **kwargs) def check_user_cgu(self, request): if not self.checkout_session.get_cgu_status(): raise exceptions.FailedPreCondition( url=reverse('checkout:index'), message=_("Please accept CGU") )
UTF-8
Python
false
false
2,014
14,499,809,591,629
4f7d91307d8c63fa2de8ff96eb08f57ad2702fa0
c5a7d8b9f813989fbdeb51734e7acafe426cb048
/weibo_test.py
af2cdeed048c098b9b6b2697a591d326bec1944e
[]
no_license
jiafangdi-guang/PHSE-PyCommDete
https://github.com/jiafangdi-guang/PHSE-PyCommDete
f07a8758a7112560663b0ebea43b788936a9e7b9
c4290d5f0481980184dd7d63befce7c0c947bda2
refs/heads/master
2021-12-03T12:10:29.517661
2014-05-31T02:29:31
2014-05-31T02:29:31
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
__author__ = 'nourl' import networkx as nx from inputs.formal_edgelist import * from sys import exit from copy import deepcopy C = nx.DiGraph(formal_edgelist('./benchmark_directed_networks/network.dat')) print C.in_edges(2) print C.out_edges(2) eg = nx.DiGraph() outedges = C.out_edges(2) out_edges_final = deepcopy(outedges) a = len(outedges) print a for x in outedges: out_two = C.out_edges(x[1]) out_edges_final += out_two print len(out_edges_final),"_______",out_edges_final out_edges_weighted = [] for x in out_edges_final: out_edges_weighted.append((x[0],x[1],1)) eg.add_weighted_edges_from(out_edges_weighted) egnodes=eg.nodes() nw = {} for x in egnodes: weight = eg.degree(x,weight=True) * nx.closeness_centrality(C,x) nw[x] = weight #print "node_weighted: ",nw nw_nor = {} for key in nw.keys(): nw_nor[key] = nw[key]/nw[2] #print "nw_nor:",nw_nor nw_sorted = sorted(nw_nor.iteritems(), key=lambda x:x[1],reverse=True) #print "nw_sorted",nw_sorted w_mean = sum(nw_nor.itervalues())/len(eg) w_mean_filter = filter(lambda x:x>w_mean, nw_nor.itervalues()) w_mean_mean = sum(w_mean_filter)/len(w_mean_filter) #print "w_mean: ",w_mean #print "w_mean_mean: ",w_mean_mean nw_percent = [] nw_percent_dic = {} for nwx in nw_sorted: if nwx[0] != 2: nw_percent.append((nwx[0],nwx[1])) nw_percent_dic[2] = nw_percent print "nw_percent_dic",nw_percent_dic
UTF-8
Python
false
false
2,014
2,465,311,232,903
a8758970de058f3aec6eea006d10f798453ce5ac
39759112ee3a84aa78b15be8cc4888ff6a6b1bc0
/webcast/models.py
aa8c9fb86f737620b328851969ae43f9906693d3
[]
no_license
ecolemo/showbox
https://github.com/ecolemo/showbox
bd8b5c8eb30fc3704a7aaf559c0fa0820014a8f7
6cb0f3d6394897ebb34f0602787793c8a49f0953
refs/heads/master
2021-01-22T14:45:38.704992
2011-12-03T05:46:57
2011-12-03T05:46:57
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from django.db import models import feedparser import time from datetime import datetime, timedelta from webcast.scheduler import Scheduler from django.db import IntegrityError from django.conf import settings from django.db import transaction class Channel(models.Model): seq = models.IntegerField(default=99) name = models.CharField(max_length=100, unique=True) def __unicode__(self): return self.name class Feed(models.Model): url = models.CharField(max_length=500) title = models.CharField(max_length=500, null=True) channel = models.ForeignKey(Channel) def __unicode__(self): return '[' + self.channel.name + '] ' + self.title + ' ---- ' + self.url class Entry(models.Model): feed = models.ForeignKey(Feed) link = models.CharField(max_length=500) title = models.CharField(max_length=500) updated_at = models.DateTimeField() screenshot_path = models.CharField(max_length=500) def __unicode__(self): return self.link class UpdateLog(models.Model): updated_at = models.DateTimeField(auto_now=True) count = models.IntegerField() class CastUpdater(object): instance = None PERIOD = 300 @staticmethod def getInstance(): if not CastUpdater.instance: CastUpdater.instance = CastUpdater() return CastUpdater.instance def __init__(self): self.sched = Scheduler(self.update, CastUpdater.PERIOD) self.recent_entries = [] def start(self): self.sched.start() self.next_update_time = datetime.today() + timedelta(seconds=CastUpdater.PERIOD) def running(self): return self.sched.timer != None def update(self): self.last_update_time = datetime.today() self.next_update_time = datetime.today() + timedelta(seconds=CastUpdater.PERIOD) self.recent_entries = [] feeds = Feed.objects.all() for feed in feeds: d = feedparser.parse(feed.url) print feed.url feed.title = d.feed.title feed.save() for e in d.entries: updated_at = datetime.today() if 'updated_parsed' in e: updated_at = datetime.fromtimestamp((time.mktime(e.updated_parsed))) + timedelta(hours=9) link = e.link if '/http://' in e.link: link = link[link.find('/http://') + 1:] try: Entry.objects.get(link=link) except Entry.MultipleObjectsReturned: pass except Entry.DoesNotExist: entry = Entry.objects.create(feed=feed, title=e.title, link=link, updated_at=updated_at) self.recent_entries.append(entry) UpdateLog.objects.create(count=len(self.recent_entries)) def count(self): if not self.recent_entries: return 0 return len(self.recent_entries)
UTF-8
Python
false
false
2,011
5,239,860,115,272
f3dbd835003ffc8074ab7655631206c205ab2684
2dc33f2fd71c1a0063183f26751a8ef4a2f2cfe9
/backend/utils.py
86eb8df0074ac5daa6ac964e75bc9db538d53903
[]
no_license
ruiaf/sumnews
https://github.com/ruiaf/sumnews
40c6ab773738ec3b75474372d9a8bdab85022a4b
a93e0757046015b5fa785c6fcf95467b505a6912
refs/heads/master
2016-09-08T02:35:21.091167
2014-04-05T17:17:30
2014-04-05T17:17:30
17,527,511
1
0
null
null
null
null
null
null
null
null
null
null
null
null
null
def max2(iterable): """ :param iterable: An iterable :return: A list with with the 2 largest elements in the iterable """ first = None second = None for ele in iterable: if first is None or ele > first: second = first first = ele elif second is None or ele > second: second = ele return [first, second]
UTF-8
Python
false
false
2,014
12,859,132,112,253
9647983bbed0bf4a6b5eeec8332b5fbb895eb9f2
1ff6c9a930d94a5e1d536b103a1c3869222d0d56
/main_window.py
739458b888bf64e703cab41cec9e82f8709bf20a
[ "MIT" ]
permissive
ahlfors/yeelink_tester
https://github.com/ahlfors/yeelink_tester
9acfe357195a337c9c1689d4072c56feaed552ef
b2a616e64afcb0d82753059ef43f813dee8b5132
refs/heads/master
2021-01-18T08:26:46.133488
2014-09-22T02:07:39
2014-09-22T02:07:39
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# -*- coding: utf-8 -*- """ Yeelink Tester by wendal.net """ # 修正windows UTF-8控制台下报错 import codecs codecs.register(lambda name: codecs.lookup('utf-8') if name == 'cp65001' else None) from PyQt4.QtGui import * from PyQt4.QtCore import * from Ui_main_window import Ui_MainWindow import urllib2 import json import traceback import serial.tools.list_ports from threading import Thread import time import paho.mqtt.client as mqtt import socket #日志头部的标签 TAG_SELF = "SELF" TAG_API = "API" TAG_MQTT = "MQTT" TAG_MOCK = "MOCK" #传感器表格每列的含义 SENSOR_COLUMN_ID = 0 SENSOR_COLUMN_NAME = 1 SENSOR_COLUMN_TYPE = 2 SENSOR_COLUMN_VALUE = 3 SENSOR_COLUMN_DATA_WRITE = 4 SENSOR_COLUMN_DATA_READ = 5 SENSOR_COLUMN_UPDATE_TIME = 6 #传感器类型 SENSOR_TYPE_NUMBER = "0" SENSOR_TYPE_GPS = "6" SENSOR_TYPE_IMAGE = "9" SENSOR_TYPE_SWITCH = "5" SENSOR_TYPE_RAW = "8" # 读数据的key READ_KEY = "r_key" # 上传数据的前缀 WRITE_KEY = "w_key" YEELINK = "yeelink" UIOT = "uiot" # 传感器类型的中文对应 sensor_type_map = { SENSOR_TYPE_NUMBER : u"数值型", SENSOR_TYPE_IMAGE : u"图像型", SENSOR_TYPE_SWITCH : u"开关型", SENSOR_TYPE_GPS : u"GPS型", SENSOR_TYPE_RAW : u"泛型", "number" : u"数值型", "gps" : u"地理位置型", "kv" : u"泛型", "onoff" : u"开关型", "image" : u"图像型" } class MainWindow(QMainWindow, Ui_MainWindow): """ Class documentation goes here. """ def __init__(self, parent = None): """ Constructor """ QMainWindow.__init__(self, parent) self.setupUi(self) # 初始化日志输出timer, 因为Qt的UI更新不能在子线程中执行 self.log_timer = QTimer() self.logs = [] self.log_timer.setInterval(1) self.log_timer.start(1) self.connect(self.log_timer, SIGNAL("timeout()"), self.append_log) # 初始化传感器表格更新timer self.table_data = [] self.table_timer = QTimer() self.table_timer.setInterval(1) self.table_timer.start(1) self.connect(self.table_timer, SIGNAL("timeout()"), self.table_update) # 启动完成, 自然卖卖广告咯... self.D(TAG_SELF, u"启动完成 . Power by wendal http://wendal.net") def apikey(self): """全局获取API KEY的帮助方法""" return unicode(self.ui_text_uapikey.text()) def devid(self): """当前的设计只允许一个设备,所以全局来吧""" return unicode(self.ui_combo_devid.currentText()).split(" ")[0] def srv_type(self): return str(self.ui_txt_srv_type.currentText()) def api_url(self, uri): if self.srv_type() == YEELINK : return "http://" + str(self.ui_txt_srv_api_url.text()) + "/v1.1" + uri elif self.srv_type() == UIOT : return "http://" + str(self.ui_txt_srv_api_url.text()) + "/iot" + uri def mqtt_topit(self, sensor_id): if self.srv_type() == YEELINK : return "v1.1/device/%s/sensor/%s/datapoints" % (self.devid(), str(sensor_id)) elif self.srv_type() == UIOT : return "iot/sensor/%s" % (str(sensor_id),) def mqtt_srv(self): if self.srv_type() == YEELINK : return "mqtt.yeelink.net" elif self.srv_type() == UIOT : srv = str(self.ui_txt_srv_api_url.text()) if ":" in srv : return srv[:srv.index(":")] return srv def yeelink_send(self, uri, data): url = self.api_url(uri) req = urllib2.Request(url, data) req.add_header("U-ApiKey", self.apikey()) if data : self.D(TAG_API+".W", u"POST " + url) try : self.D(TAG_API+".W", str(data)) except: self.D(TAG_API+".W", u"...") else : self.D(TAG_API+".W", u"GET " + url) try : resp = urllib2.urlopen(req) self.D(TAG_API + ".R", u"code=%d" % resp.code) return resp.read() except: self.D(TAG_API, u"FAIL" + traceback.format_exc()) raise def D(self, TAG, msg): """日志方法""" self.logs.append(QString("%-5s > %s\r\n" % (TAG, msg))) def append_log(self): tmp = self.logs self.logs = [] if not tmp : return for p in tmp : self.ui_debug_console.moveCursor(QTextCursor.End) self.ui_debug_console.insertPlainText(p) sb = self.ui_debug_console.verticalScrollBar() sb.setValue(sb.maximum()) def table_update(self): tmp = self.table_data self.table_data = [] if not tmp : return for row,column,s in tmp : self.ui_table_sensors.setItem(row, column, QTableWidgetItem(s)) def mqtt_sensor_run(self, sensor): """MQTT监听""" try : mqttc = mqtt.Client() def on_message(client, userdata, msg): self.D(TAG_SELF, "MQTT sensor update %s %s > %s" % (str(sensor["id"]), sensor["title"], str(msg.payload))) try : re = json.loads(msg.payload) s = "%s%s" % (sensor[WRITE_KEY], re["value"]) self.D(self.ser.port, s) self.ser.write(s + "\n") except: traceback.print_exc() def on_connect(client, userdata, flags, rc): self.D(TAG_SELF, "MQTT Connected with result code "+str(rc)) #topic = "u/%s/v1.1/device/%s/sensor/%s/datapoints" % (self.apikey(), self.devid(), str(sensor["id"])) #print topic topic = self.mqtt_topit(sensor["id"]) try : mqttc.subscribe([(str(topic), 0), ]) except: pass mqttc.on_message = on_message mqttc.on_connect = on_connect #mqttc.connect("mqtt.yeelink.net") mqttc.username_pw_set(str(self.ui_txt_username.text()), self.apikey()) mqttc.connect(self.mqtt_srv()) mqttc.loop_forever() except: self.D(TAG_SELF, u"MQTT 启动失败 : " + traceback.format_exc()) @pyqtSignature("") def on_ui_button_help_pressed(self): """ Slot documentation goes here. """ # TODO: not implemented yet QMessageBox.about(self, u"帮助", "http://wendal.net") @pyqtSignature("") def on_ui_button_check_api_pressed(self): """ Slot documentation goes here. """ try : re = json.loads(self.yeelink_send("/devices", None)) if not re : QMessageBox.about(self, u"无可用设备", u"该密钥下的帐号无任何设备") return self.ui_combo_devid.clear() for dev in re : #print dev self.ui_combo_devid.addItem(QString("%s %s" % (dev["id"], dev["title"]))) self.ui_button_get_sensors.setEnabled(True) self.ui_button_start_read.setEnabled(True) self.ui_button_check_api.setEnabled(False) self.ui_text_uapikey.setEnabled(False) if len(re) == 1 : self.D(TAG_SELF, u"只有一个设备,自动加载传感器") self.on_ui_button_get_sensors_pressed() except: traceback.print_exc() QMessageBox.about(self, u"密钥错误", u"密钥不对: " + self.apikey()) @pyqtSignature("") def on_ui_button_get_sensors_pressed(self): """ Slot documentation goes here. """ try : #print self.devid() sensors = json.loads(self.yeelink_send("/device/%s/sensors" % self.devid(), None)) self.ui_table_sensors.setRowCount(len(sensors)) index = 0 for sensor in sensors : sensor["row_index"] = index self.ui_table_sensors.setItem(index, SENSOR_COLUMN_ID, QTableWidgetItem(str(sensor["id"]))) self.ui_table_sensors.setItem(index, SENSOR_COLUMN_NAME, QTableWidgetItem(sensor["title"])) sensor_type = sensor_type_map.get(str(sensor["type"])) if not sensor_type : sensor_type = u"其他类型" self.ui_table_sensors.setItem(index, SENSOR_COLUMN_TYPE, QTableWidgetItem(sensor_type)) if sensor.get("last_data_gen") : self.ui_table_sensors.setItem(index, SENSOR_COLUMN_VALUE, QTableWidgetItem(sensor["last_data_gen"])) elif sensor.get("last_data") : self.ui_table_sensors.setItem(index, SENSOR_COLUMN_VALUE, QTableWidgetItem(sensor["last_data"])) if sensor.get("last_update") : self.ui_table_sensors.setItem(index, SENSOR_COLUMN_UPDATE_TIME, QTableWidgetItem(sensor["last_update"])) it = self.ui_table_sensors.item(index, SENSOR_COLUMN_DATA_WRITE) if not it : self.ui_table_sensors.setItem(index, SENSOR_COLUMN_DATA_WRITE, QTableWidgetItem("w"+str(sensor["id"])+":")) it = self.ui_table_sensors.item(index, SENSOR_COLUMN_DATA_READ) if not it : self.ui_table_sensors.setItem(index, SENSOR_COLUMN_DATA_READ, QTableWidgetItem("r"+str(sensor["id"]))) sensor["w_key"] = str(self.ui_table_sensors.item(index, SENSOR_COLUMN_DATA_WRITE).text()) sensor["r_key"] = str(self.ui_table_sensors.item(index, SENSOR_COLUMN_DATA_READ).text()) index += 1 if sensor["type"] in (SENSOR_TYPE_SWITCH, "onoff") : self.D(TAG_SELF, u"启动MQTT监听 sensor id=%s name=%s" % (str(sensor["id"]), sensor["title"])) t = Thread(target=self.mqtt_sensor_run, name=("Yeelink MQTT id=" + str(sensor["id"])), args=[sensor]) t.setDaemon(True) t.start() self.sensors = sensors #保存起来,这样就能快捷访问了 self.ui_button_get_sensors.setEnabled(False) self.ui_combo_devid.setEnabled(False) coms = sorted(serial.tools.list_ports.comports()) if coms : self.ui_text_com_number.clear() for port, _, _ in coms: self.ui_text_com_number.addItem(QString(port)) except: self.D(TAG_SELF, u"出错啦: " + traceback.format_exc()) @pyqtSignature("") def on_ui_button_clear_debug_pressed(self): """ Slot documentation goes here. """ self.ui_debug_console.clear() def com_run(self, ser): while self.com_reading : try : line = ser.readline() if not line : continue line = str(line).strip() self.D(ser.port+".R", line) self.handle_com_line(ser, line) except: traceback.print_exc() time.sleep(1) try : if ser.isOpen(): ser.close() except: traceback.print_exc() self.ui_button_stop_read.setEnabled(False) self.ui_button_start_read.setEnabled(True) def handle_com_line(self, ser, line): line = str(line).strip() if not line : return if line[0] == '[' or line[0] == '{' : try : try : j = json.loads(line) except: self.D(ser.port, u"非法的json字符串") return if not j : self.D(ser.port, u"没有包含任何数据") return if line[0] == '[' : for d in j : if d.get("sensor_id") : self.D(ser.port, u"数据是列表,且包含sensor_id,所以这是'多数据点(同一设备)', 执行上传") try : self.yeelink_send("/device/%s/datapoints" % self.devid(), line) except: self.D(ser.port, u"上传失败") return self.D(ser.port, u"数据是列表,但不包含sensor_id,所以这是'多数据点(单个设备)', 查找'数据上传'键为空字符的传感器") for sensor in self.sensors : if sensor[WRITE_KEY] == "" : self.D(ser.port, u"作为传感器[id=%s,name=%s]的数据进行上传") self.yeelink_send("/device/%s/sensor/%s/datapoints" % (self.devid(), str(sensor["id"])), line) return self.D(ser.port, u"没有找到'数据上传'键为空字符的传感器,忽略数据") return if len(self.sensors) == 1 : self.D(ser.port, u"只有一个传感器, 而且数据看上去ok, 那就上传吧") self.yeelink_send("/device/%s/sensor/%s/datapoints" % (self.devid(), str(sensor["id"])), line) return for sensor in self.sensors : if sensor[WRITE_KEY] == "" : self.D(ser.port, u"作为传感器[id=%s,name=%s]的数据进行上传") self.yeelink_send("/device/%s/sensor/%s/datapoints" % (self.devid(), str(sensor["id"])), line) return self.D(ser.port, u"没有找到'数据上传'键为空字符的传感器,忽略数据") return except: self.D(ser.port, u"出错了") for sensor in self.sensors : if line.startswith(sensor[READ_KEY]) : self.D(ser.port, u"与传感器[id=%s, name=%s]的'数据读取'键匹配" % (str(sensor["id"]), sensor["title"])) try : re = self.yeelink_send("/device/%s/sensor/%s/datapoints" % (self.devid(), str(sensor["id"])), None) re = json.loads(re) if re.get("key") : msg = sensor[WRITE_KEY] + json.dumps(re) + "\n" else : msg = sensor[WRITE_KEY] + json.dumps(re.get("value")) + "\n" self.D(ser.port + ".W", msg) ser.write(msg) except: self.D(ser.port, u"出错了" + traceback.format_exc()) return for sensor in self.sensors : if line.startswith(sensor[WRITE_KEY]) : data = line[len(sensor[WRITE_KEY]):] if not data : self.D(ser.port, u"没数据") return if data[0] == ":" : data = data[1:] if data[0] == '{' : try : try : re = json.loads(data) except: self.D(ser.port, u"非法的JSON字符串" + traceback.format_exc(2)) return if re and re.get("value") : self.yeelink_send("/device/%s/sensor/%s/datapoints" % (self.devid(), str(sensor["id"])), data) return except: self.D(ser.port, "Bad Bad") self.yeelink_send("/device/%s/sensor/%s/datapoints" % (self.devid(), str(sensor["id"])), """{"value":%s}""" % data) return self.D(ser.port, u"没匹配任何传感器") @pyqtSignature("") def on_ui_button_start_read_pressed(self): """ Slot documentation goes here. """ try : self.D(TAG_SELF, u"尝试打开串口 ... ") ser = serial.Serial() ser.baudrate = int(str(self.ui_text_com_bitrate.currentText())) ser.bytesize = int(str(self.ui_text_com_databit.currentText())) ser.stopbits = int(str(self.ui_text_com_stopbit.currentText())) ser.port = str(self.ui_text_com_number.currentText()) ser.timeout = 3 ser.open() self.D(TAG_SELF, u"打开串口成功") self.ui_button_start_read.setEnabled(False) self.ui_button_stop_read.setEnabled(True) self.ser = ser t = Thread(target=self.com_run, args=[ser], name="Yeelink COM Listener", ) t.setDaemon(True) self.com_reading = True t.start() except: traceback.print_exc() self.D(TAG_SELF, u"串口打开识别!!" + traceback.format_exc()) @pyqtSignature("") def on_ui_button_stop_read_pressed(self): """ Slot documentation goes here. """ self.D(TAG_SELF, u"触发串口关闭") self.com_reading = False @pyqtSignature("") def on_ui_button_mock_start_pressed(self): """ Slot documentation goes here. """ try : t = Thread(name="yeelink api proxy", target=self.yeelink_api_proxy) t.setDaemon(True) t.start() self.ui_button_mock_stop.setEnabled(True) self.ui_button_mock_start.setEnabled(False) self.mock_running = True self.D(TAG_MOCK, u"启动成功") except: self.D(TAG_MOCK, u"启动失败" + traceback.format_exc()) @pyqtSignature("") def on_ui_button_mock_stop_pressed(self): """ Slot documentation goes here. """ # TODO: not implemented yet self.mock_running = False self.D(TAG_MOCK, u"关闭") self.ui_button_mock_start.setEnabled(True) self.ui_button_mock_stop.setEnabled(False) @pyqtSignature("") def on_ui_button_api_test_pressed(self): """ Slot documentation goes here. """ # TODO: not implemented yet import yeelink_api_test t = yeelink_api_test.YeelinkTestDialog(self) t.ui_text_uapikey.setText(self.ui_text_uapikey.text()) try : t.ui_text_url.clear() for sensor in self.sensors : t.ui_text_url.addItem(QString(self.api_url("/device/%s/sensor/%s/datapoints" % (self.devid(), str(sensor["id"]))))) except: pass t.show() def yeelink_api_proxy(self): PORT = int(str(self.ui_spin_mock_port.text())) s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) #s.settimeout(3) s.bind(("", PORT)) s.listen(PORT) while self.mock_running : conn = None _out = None try: conn, addr = s.accept() self.D(TAG_MOCK, "Connected by " + str(addr)) _in = conn.makefile() _out = conn.makefile("w") try : #读取请求头 data = _in.read(4) if str(data) != "POST" : self.D(TAG_MOCK, u"不是POST请求,拒绝之.") continue data = _in.read(1) if str(data) != " " : self.D(TAG_MOCK, u"POST之后的不是空格,非法请求") continue #开始读取URI data = "" for i in xrange(1024) : d = _in.read(1) if d == ' ' : self.D(TAG_MOCK, u"读取到URI之后的空格, 识别URI为 " + data) break else : data += str(d) if i == 1023 : self.D(TAG_MOCK, u"读取1024字节之后还没结束, URI太长了,拒绝") data = None if data == None : continue if data == "" : self.D(TAG_MOCK, u"URI以空格开头,肯定是多输入了一个空格导致的,拒绝") continue #然后就是HTTP/1.1或者HTTP/1.0,然后接\r\n data = _in.read(len("HTTP/1.0\r\n")) #print data if not str(data).startswith("HTTP/1.0") and not str(data).startswith("HTTP/1.1") : self.D(TAG_MOCK, u"请求行不包含HTTP/1.0或HTTP/1.1,拒绝") continue if not str(data).endswith("\r\n") : self.D(TAG_MOCK, u"请求行不是以\\r\\n结束,拒绝") continue key_ok = False cnt_len = 0 while 1 : header_line = "" while 1 : d = _in.read(1) if d == '\n' : break header_line += str(d) if header_line == "" : self.D(TAG_MOCK, u"检测到非法的Header,拒绝") break if header_line == "\r" : self.D(TAG_MOCK, u"检测到Header结束") break header_line = header_line.strip() self.D(TAG_MOCK, "Read Header --> " + str(header_line)) if header_line.startswith("U-ApiKey: ") : self.D(TAG_MOCK, u"检测到U-ApiKey,对比本地数据中"); _key = header_line.split(" ", 2)[1] if _key == self.apikey() : self.D(TAG_MOCK, u"U-ApiKey合法") key_ok = True else : self.D(TAG_MOCK, u"U-ApiKey不合法 [%s] [%s]" % (_key, self.apikey())) break elif header_line.startswith("Content-Length: ") : self.D(TAG_MOCK, u"检测到Content-Length: ") try : cnt_len = int(header_line.split(" ", 2)[1]) self.D(TAG_MOCK, u"获取到请求主体的长度为" + str(cnt_len)) except: self.D(TAG_MOCK, u"Content-Length 不是合法的整数值") break if not key_ok : self.D(TAG_MOCK, u"没有在Header里面找到合法U-ApiKey,拒绝") continue if cnt_len < 5 : self.D(TAG_MOCK, u"请求体太小,肯定不合法") continue #开始读取body try : body = _in.read(cnt_len) j = json.loads(body) self.D(TAG_MOCK, u"请求中的JSON数据(经过格式化) --> " + json.dumps(j)) if not j.get("value") : self.D(TAG_MOCK, u"数据里面没有名为value的键,肯定非法") break if j.get("key") : self.D(TAG_MOCK, u"看来是泛型数据,放行") elif j.get("value") : if json.dumps(j.get("value")).startswith("{") : self.D(TAG_MOCK, u"看上去是GPS数据,分析里面的key") gps = j.get("value") if not gps.get("lat") : self.D(TAG_MOCK, u"缺失lan值") continue if not gps.get("lng") : self.D(TAG_MOCK, u"缺失lng值") continue if str(gps.get("speed")) == "None" : self.D(TAG_MOCK, u"缺失speed值") continue self.D(TAG_MOCK, u"GPS数据 看上去合法") else : self.D(TAG_MOCK, u"看来不是GPS,那只能是数值型数据了,校验之") if isinstance(j.get("value"), float) : self.D(TAG_MOCK, u"看来是合法的数值") else : self.D(TAG_MOCK, u"不是JSON格式中的数值,拒绝") break else : self.D(TAG_MOCK, u"数据里面没有名为key或timestamp的键,肯定非法") break # 看来是合法的哦, 返回个赞 _out.write("HTTP/1.1 200 OK\r\nPower: wendal\r\nContent-Length: 0\r\n\r\n") _out.flush() conn.shutdown(1) conn.close() conn = None except: self.D(TAG_MOCK, u"yeelink上传的数据必然是json格式,然后它报错了,所以,你的数据不是合法JSON!!" + traceback.format_exc()) break except: self.D(TAG_MOCK, u"出错了!!" + traceback.format_exc()) except: traceback.print_exc() finally: if conn != None : try : self.D(TAG_MOCK, u"关闭连接 " + str(conn)) _out.write("HTTP/1.1 403 Error\r\nPower: wendal\r\nContent-Length: 0\r\n\r\n") _out.flush() conn.shutdown(1) conn.close() except: self.D(TAG_MOCK, u"关闭连接失败!!" + traceback.format_exc()) s.close() @pyqtSignature("QTableWidgetItem*") def on_ui_table_sensors_itemChanged(self, item): """ Slot documentation goes here. """ try : _ = self.sensors except: return sensor = self.sensors[item.row()] if item.column() == SENSOR_COLUMN_DATA_READ : sensor[READ_KEY] = str(item.text()) self.D(TAG_SELF, u"传感器[id=%s, name=%s]的'数据读取'键修改为%s" % (str(sensor["id"]), sensor["title"], sensor[READ_KEY])) return if item.column() == SENSOR_COLUMN_DATA_WRITE : sensor[WRITE_KEY] = str(item.text()) self.D(TAG_SELF, u"传感器[id=%s, name=%s]的'数据上传'键修改为%s" % (str(sensor["id"]), sensor["title"], sensor[WRITE_KEY])) return # TODO 如果修改的是值, 发请求更新服务器的值
UTF-8
Python
false
false
2,014
19,602,230,768,147
03c818bca550b6ef427c3b0ad883f0f4266a08ac
a1892072674ac9adbbf21bc221d3fdfc337a9268
/medium/endianness.py2
96889635ff4d22126342d018c4b1ec68ec357f4d
[]
no_license
Makpoc/codeeval
https://github.com/Makpoc/codeeval
27bc83c1a52203b7e84d5102175e5c0385beaa62
791f7024b09b77e12d315475031b2ea11eb3013f
refs/heads/master
2020-05-17T19:37:52.453726
2014-07-03T07:54:28
2014-07-03T07:54:28
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env python2.7 # encoding=utf-8 import sys if __name__ == '__main__': if 'little' in sys.byteorder.lower(): print 'LittleEndian' else: print 'BigEndian'
UTF-8
Python
false
false
2,014
7,722,351,245,653
f2bbb3f700b9ef2da6796a1749f9f66dc8eee490
a29c1ab6370ccd86b87b077294523cb674816ea8
/migrations/versions/creates_craiglist_listing-3d08656dfba2.py
690804cf307bc730539b478a47aebff463eff071
[]
no_license
jessedhillon/bonanza
https://github.com/jessedhillon/bonanza
a677a98e37b827e3dcf8fec903b9cdd288f88543
a20f446884b10ad6ea0596440175b798aac03084
refs/heads/master
2016-09-06T08:36:37.431405
2014-12-09T06:57:13
2014-12-09T06:57:13
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
"""creates craiglist listing Revision ID: 3d08656dfba2 Revises: 168d2892efbe Create Date: 2014-10-02 16:51:06.228153 """ # revision identifiers, used by Alembic. revision = '3d08656dfba2' down_revision = '168d2892efbe' from alembic import op import sqlalchemy as sa from batteries.model.types import Ascii, UTCDateTime from geoalchemy2 import Geometry from sqlalchemy.dialects import postgresql def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table('craigslist_listing', sa.Column('key', Ascii(length=40), nullable=False), sa.Column('id', sa.Unicode(length=20), nullable=False), sa.Column('title', sa.UnicodeText(), nullable=False), sa.Column('url', sa.Unicode(length=200), nullable=False), sa.Column('image_thumbnail_url', sa.Unicode(length=300), nullable=True), sa.Column('bedrooms', sa.Integer(), nullable=True), sa.Column('posted_date', sa.Date(), nullable=False), sa.Column('ask', sa.Numeric(precision=12, scale=2), nullable=False), sa.Column('location', Geometry(geometry_type='POINT'), nullable=True), sa.Column('ctime', UTCDateTime(), nullable=True), sa.Column('mtime', UTCDateTime(), nullable=True), sa.PrimaryKeyConstraint('key', name=op.f('pk_craigslist_listing')) ) op.drop_table('listing') ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table('listing', sa.Column('key', sa.VARCHAR(length=40), autoincrement=False, nullable=False), sa.Column('location', Geometry(geometry_type=u'POINT'), autoincrement=False, nullable=True), sa.Column('ctime', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), sa.Column('mtime', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), sa.PrimaryKeyConstraint('key', name=u'pk_listing') ) op.drop_table('craigslist_listing') ### end Alembic commands ###
UTF-8
Python
false
false
2,014
6,751,688,590,674
cd4847edb6ab198e65b3bcc32942a6194bc0be5b
018a5c8bfeb0e010deb67a6dfeeacce414fb3dc5
/script-events/matrix_io.py
2063f33d8baf3600c190e1e061863ef6922d0c91
[]
no_license
tiberiu-popa/historic-events-research
https://github.com/tiberiu-popa/historic-events-research
4ea9f127d02f9dd03ef27d32430fd442bb72cdb1
661932d22bf64605737103ddbdb9a7308e5347a2
refs/heads/master
2020-05-04T15:54:31.585130
2013-06-25T05:54:23
2013-06-25T05:54:23
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/python from __future__ import print_function import csv import numpy as np import os from scipy.sparse import csr_matrix def save_csr_matrix(filename, matrix): np.savez(filename, data=matrix.data, indices=matrix.indices, indptr=matrix.indptr, shape=matrix.shape) def load_csr_matrix(filename): zmat = np.load(filename) return csr_matrix((zmat['data'], zmat['indices'], zmat['indptr']), zmat['shape'], dtype=np.uint8) def save_csv_as_csr_matrix(csv_filename, matrix_filename): values = [ ] row_indices = [ ] column_indices = [ ] m = 0 n = 0 with open(csv_filename, 'r') as f: reader = csv.reader(f) for i, row in enumerate(reader): if i % 10000 == 0: print('Progress:', i) for j, elem in enumerate(map(int, row)): if elem != 0: values.append(elem) row_indices.append(i) column_indices.append(j) n = max(n, len(row)) m += 1 indices = (row_indices, column_indices) matrix = csr_matrix((values, indices), (m, n), dtype=np.uint8) save_csr_matrix(matrix_filename, matrix) def transforms_csvs(in_directory, out_directory): filenames = os.listdir(in_directory) filenames.sort() for filename in filenames: base_filename = os.path.splitext(filename)[0] full_filename = os.path.join(in_directory, filename) print('Processing', full_filename) matrix_filename = os.path.join(out_directory, base_filename + '.npz') if not os.path.isfile(matrix_filename): save_csv_as_csr_matrix(full_filename, matrix_filename) def read_matrices(directory): filenames = os.listdir(directory) matrices = { } for filename in filenames: base_filename = os.path.splitext(filename)[0] full_filename = os.path.join(directory, filename) matrix = load_csr_matrix(full_filename) matrices[base_filename] = matrix return matrices def main(): csv_directory = os.path.join('data', 'relevance') sparse_directory = os.path.join('data', 'sparse_relevance') transforms_csvs(csv_directory, sparse_directory) matrices = read_matrices(sparse_directory) print(matrices) if __name__ == '__main__': main()
UTF-8
Python
false
false
2,013
17,428,977,309,518
25ceb40c8b3145a9f3071fafb383c08dd7817783
d407f3bdbcdf70920bb8f0790c401dfb023af5de
/sound/sound.gypi
9a20d55d0b502dfbcf6e022162b7e3518c88f6c2
[ "LicenseRef-scancode-unknown-license-reference", "BSD-2-Clause" ]
non_permissive
mathall/nanaka
https://github.com/mathall/nanaka
3f02ffb4f2e19af3446d43af61226c122b18498c
0304f444702318a83d221645d4e5f3622082c456
refs/heads/master
2016-09-11T04:01:22.986788
2014-04-16T20:31:46
2014-04-26T12:56:01
11,401,646
2
0
null
null
null
null
null
null
null
null
null
null
null
null
null
{ 'sources': [ '../sound/OggDecoder.cpp', '../sound/OggDecoder.h', '../sound/Sound.cpp', '../sound/Sound.h', '../sound/SoundDecoder.h', '../sound/SoundLoader.cpp', '../sound/SoundLoader.h', '../sound/SoundResource.h', ], }
UTF-8
Python
false
false
2,014
3,358,664,466,613
372d7b270fc8afec6279b0ad7be9dcd495529a27
f60898b49d9b6b1d71da954313bb4962f1201a4b
/flacon/config.py
076652ac80fd66644a2f43d7531db07b67b63200
[ "BSD-3-Clause" ]
permissive
bayazee/flacon
https://github.com/bayazee/flacon
6cb0067a9762b89274c5e43083186daf31416f7d
2e5833cf98a137df968a9257467a1a041ce66de9
refs/heads/master
2021-01-01T15:55:06.120732
2013-04-28T10:02:12
2013-04-28T10:02:12
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
class DefaultConfig(object): # Essentials DEBUG = True DEPLOYMENT = False SECRET_KEY = 'SECRET_KEY' MAIN_URL = 'http://127.0.0.1:5000' MAIN_STATIC_URL = 'http://static.127.0.0.1:5000' INSTALLED_EXTENSIONS = [] INSTALLED_BLUEPRINTS = [] if DEBUG: LOG_FORMAT = '\033[1;35m[%(asctime)s]\033[1;m [\033[1;31m %(levelname)s \033[1;m] \033[1;32m[%(logger_name)s]\033[1;m: \ \033[1;33m %(message)s \033[1;m' else: LOG_FORMAT = '[%(asctime)s] %(levelname)s [%(logger_name)s]: %(message)s'
UTF-8
Python
false
false
2,013