{ // 获取包含Hugging Face文本的span元素 const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap'); spans.forEach(span => { if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) { span.textContent = 'AI快站'; } }); }); // 替换logo图片的alt属性 document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => { if (img.alt.match(/Hugging\s*Face/i)) { img.alt = 'AI快站 logo'; } }); } // 替换导航栏中的链接 function replaceNavigationLinks() { // 已替换标记,防止重复运行 if (window._navLinksReplaced) { return; } // 已经替换过的链接集合,防止重复替换 const replacedLinks = new Set(); // 只在导航栏区域查找和替换链接 const headerArea = document.querySelector('header') || document.querySelector('nav'); if (!headerArea) { return; } // 在导航区域内查找链接 const navLinks = headerArea.querySelectorAll('a'); navLinks.forEach(link => { // 如果已经替换过,跳过 if (replacedLinks.has(link)) return; const linkText = link.textContent.trim(); const linkHref = link.getAttribute('href') || ''; // 替换Spaces链接 - 仅替换一次 if ( (linkHref.includes('/spaces') || linkHref === '/spaces' || linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) && linkText !== 'OCR模型免费转Markdown' && linkText !== 'OCR模型免费转Markdown' ) { link.textContent = 'OCR模型免费转Markdown'; link.href = 'https://fast360.xyz'; link.setAttribute('target', '_blank'); link.setAttribute('rel', 'noopener noreferrer'); replacedLinks.add(link); } // 删除Posts链接 else if ( (linkHref.includes('/posts') || linkHref === '/posts' || linkText === 'Posts' || linkText.match(/^s*Postss*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } // 替换Docs链接 - 仅替换一次 else if ( (linkHref.includes('/docs') || linkHref === '/docs' || linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) && linkText !== '模型下载攻略' ) { link.textContent = '模型下载攻略'; link.href = '/'; replacedLinks.add(link); } // 删除Enterprise链接 else if ( (linkHref.includes('/enterprise') || linkHref === '/enterprise' || linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } }); // 查找可能嵌套的Spaces和Posts文本 const textNodes = []; function findTextNodes(element) { if (element.nodeType === Node.TEXT_NODE) { const text = element.textContent.trim(); if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') { textNodes.push(element); } } else { for (const child of element.childNodes) { findTextNodes(child); } } } // 只在导航区域内查找文本节点 findTextNodes(headerArea); // 替换找到的文本节点 textNodes.forEach(node => { const text = node.textContent.trim(); if (text === 'Spaces') { node.textContent = node.textContent.replace(/Spaces/g, 'OCR模型免费转Markdown'); } else if (text === 'Posts') { // 删除Posts文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } else if (text === 'Enterprise') { // 删除Enterprise文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } }); // 标记已替换完成 window._navLinksReplaced = true; } // 替换代码区域中的域名 function replaceCodeDomains() { // 特别处理span.hljs-string和span.njs-string元素 document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换hljs-string类的span中的域名(移除多余的转义符号) document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换pre和code标签中包含git clone命令的域名 document.querySelectorAll('pre, code').forEach(element => { if (element.textContent && element.textContent.includes('git clone')) { const text = element.innerHTML; if (text.includes('huggingface.co')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 处理特定的命令行示例 document.querySelectorAll('pre, code').forEach(element => { const text = element.innerHTML; if (text.includes('huggingface.co')) { // 针对git clone命令的专门处理 if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 特别处理模型下载页面上的代码片段 document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => { const content = container.innerHTML; if (content && content.includes('huggingface.co')) { container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 特别处理模型仓库克隆对话框中的代码片段 try { // 查找包含"Clone this model repository"标题的对话框 const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]'); if (cloneDialog) { // 查找对话框中所有的代码片段和命令示例 const codeElements = cloneDialog.querySelectorAll('pre, code, span'); codeElements.forEach(element => { if (element.textContent && element.textContent.includes('huggingface.co')) { if (element.innerHTML.includes('huggingface.co')) { element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); } // 更精确地定位克隆命令中的域名 document.querySelectorAll('[data-target]').forEach(container => { const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string'); codeBlocks.forEach(block => { if (block.textContent && block.textContent.includes('huggingface.co')) { if (block.innerHTML.includes('huggingface.co')) { block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); }); } catch (e) { // 错误处理但不打印日志 } } // 当DOM加载完成后执行替换 if (document.readyState === 'loading') { document.addEventListener('DOMContentLoaded', () => { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); }); } else { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); } // 增加一个MutationObserver来处理可能的动态元素加载 const observer = new MutationObserver(mutations => { // 检查是否导航区域有变化 const hasNavChanges = mutations.some(mutation => { // 检查是否存在header或nav元素变化 return Array.from(mutation.addedNodes).some(node => { if (node.nodeType === Node.ELEMENT_NODE) { // 检查是否是导航元素或其子元素 if (node.tagName === 'HEADER' || node.tagName === 'NAV' || node.querySelector('header, nav')) { return true; } // 检查是否在导航元素内部 let parent = node.parentElement; while (parent) { if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') { return true; } parent = parent.parentElement; } } return false; }); }); // 只在导航区域有变化时执行替换 if (hasNavChanges) { // 重置替换状态,允许再次替换 window._navLinksReplaced = false; replaceHeaderBranding(); replaceNavigationLinks(); } }); // 开始观察document.body的变化,包括子节点 if (document.body) { observer.observe(document.body, { childList: true, subtree: true }); } else { document.addEventListener('DOMContentLoaded', () => { observer.observe(document.body, { childList: true, subtree: true }); }); } })(); \n\n\"\"\")\n path = urllib.parse.urlparse(self.path)\n fs_path = pathlib.Path(\"{}{}\".format(self.cwd, path.path))\n prefix_ref = \"{}/\".format(path.path)\n if fs_path.is_file():\n body = body_file_cat\n content = \"\"\n with fs_path.open() as f:\n content = \"\".join(f.readlines())\n content = \"
{}
\".format(content)\n body = body.substitute(content=content)\n\n else:\n body = body_dir_list\n items = list()\n item_template = string.Template('
  • $item_name
  • ')\n for p in fs_path.iterdir():\n item_path = urllib.parse.urljoin(prefix_ref, p.name)\n item_name = p.name\n if os.path.isdir(p):\n item_name = \"{}/\".format(item_name)\n items.append(item_template.substitute(item_path=item_path, item_name=item_name))\n body = body.substitute(cwd=fs_path, items=\"\\n\".join(items))\n\n page = page.substitute(cwd=fs_path, body=body)\n\n self.send_response(200)\n self.send_header(\"Content-type\", \"text/html\")\n self.end_headers()\n self.wfile.write(page.encode(\"UTF-8\"))\n\n\nif __name__ == '__main__':\n main()\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"mit"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2228,"cells":{"commit":{"kind":"string","value":"8fa81263cfcc63f6bf22ed2ad50103f91bc43b21"},"subject":{"kind":"string","value":"Create hira.py"},"repos":{"kind":"string","value":"tomohiko/8946"},"old_file":{"kind":"string","value":"hira.py"},"new_file":{"kind":"string","value":"hira.py"},"new_contents":{"kind":"string","value":"#coding:utf-8\nimport hashlib\n\nstart = ord(u'あ')\nend = ord(u'ん')\n\nhira = []\nprint \"Create hiragana\"\nfor i in range(start, end+1, 1):\n hira.append(unichr(i).encode('utf-8'))\n\nnum = len(hira)\nfor i4 in range(num):\n for i3 in range(num):\n for i2 in range(num):\n for i1 in range(num):\n msg = hira[i1] + hira[i2] + hira[i3] + hira[i4]\n print msg,\n print hashlib.md5(msg).hexdigest()\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"apache-2.0"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2229,"cells":{"commit":{"kind":"string","value":"92bc1ad22b6147f61ef4b51b16e115109bc04596"},"subject":{"kind":"string","value":"add build.gyp"},"repos":{"kind":"string","value":"256481788jianghao/opengl_test,256481788jianghao/opengl_test,256481788jianghao/opengl_test"},"old_file":{"kind":"string","value":"build.gyp"},"new_file":{"kind":"string","value":"build.gyp"},"new_contents":{"kind":"string","value":"{\n 'targets':[\n {\n 'target_name':'start_first',\n 'type':'executable',\n 'dependencies':[],\n 'defines':[],\n 'include_dirs':[],\n 'sources':[\n 'start_first/opengl_first.c',\n ],\n 'libraries':[\n '-lGLU -lGL -lglut'\n ],\n 'conditions':[]\n }\n ],\n}\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"apache-2.0"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2230,"cells":{"commit":{"kind":"string","value":"45a0b65106f665872f14780e93ab9f09e65bbce3"},"subject":{"kind":"string","value":"add genRandomGraph.py"},"repos":{"kind":"string","value":"zhfkt/ComplexCi,zhfkt/ComplexCi,zhfkt/ComplexCi,zhfkt/ComplexCi,zhfkt/ComplexCi"},"old_file":{"kind":"string","value":"ComplexCiPython/genRandomGraph.py"},"new_file":{"kind":"string","value":"ComplexCiPython/genRandomGraph.py"},"new_contents":{"kind":"string","value":"import networkx\nimport sys\n\nif len(sys.argv) < 2:\n\n\tprint (\"python genRandomGraph.py [output folder]\");\n\tinput()\n\tsys.exit(0);\n\noutputPath = sys.argv[1]\n\nG=networkx.erdos_renyi_graph(100000,3/100000)\nnetworkx.write_edgelist(G, outputPath + \"/genRandomGraph.csv\", data=False , delimiter=',')\n\n\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"mit"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2231,"cells":{"commit":{"kind":"string","value":"3b15fb1d43bad6d6cf2112538d1de8c1710d0272"},"subject":{"kind":"string","value":"add test for within_page_range"},"repos":{"kind":"string","value":"ScorpionResponse/freelancefinder,ScorpionResponse/freelancefinder,ScorpionResponse/freelancefinder"},"old_file":{"kind":"string","value":"freelancefinder/freelancefinder/tests/test_within_page_range_templatetag.py"},"new_file":{"kind":"string","value":"freelancefinder/freelancefinder/tests/test_within_page_range_templatetag.py"},"new_contents":{"kind":"string","value":"\"\"\"Test the within_page_range function.\"\"\"\n\nfrom ..templatetags.within_page_range import within_filter\n\n\ndef test_in_range_above():\n \"\"\"One page above current should be displayed.\"\"\"\n test_page = 5\n current_page = 4\n\n result = within_filter(test_page, current_page)\n assert result\n\n\ndef test_in_range_below():\n \"\"\"One page below current should be displayed.\"\"\"\n test_page = 3\n current_page = 4\n\n result = within_filter(test_page, current_page)\n assert result\n\n\ndef test_out_of_range_above():\n \"\"\"20 pages above current should not be displayed.\"\"\"\n test_page = 74\n current_page = 54\n\n result = within_filter(test_page, current_page)\n assert not result\n\n\ndef test_out_of_range_below():\n \"\"\"20 pages below current should not be displayed.\"\"\"\n test_page = 34\n current_page = 54\n\n result = within_filter(test_page, current_page)\n assert not result\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"bsd-3-clause"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2232,"cells":{"commit":{"kind":"string","value":"0c315f766b31c105c60b39746db977d6702955ca"},"subject":{"kind":"string","value":"Remove unneeded model attributes"},"repos":{"kind":"string","value":"manhhomienbienthuy/pythondotorg,manhhomienbienthuy/pythondotorg,proevo/pythondotorg,Mariatta/pythondotorg,proevo/pythondotorg,Mariatta/pythondotorg,python/pythondotorg,Mariatta/pythondotorg,manhhomienbienthuy/pythondotorg,manhhomienbienthuy/pythondotorg,python/pythondotorg,python/pythondotorg,python/pythondotorg,proevo/pythondotorg,Mariatta/pythondotorg,proevo/pythondotorg"},"old_file":{"kind":"string","value":"successstories/views.py"},"new_file":{"kind":"string","value":"successstories/views.py"},"new_contents":{"kind":"string","value":"from django.contrib import messages\nfrom django.core.urlresolvers import reverse\nfrom django.utils.decorators import method_decorator\nfrom django.views.generic import CreateView, DetailView, ListView\n\nfrom honeypot.decorators import check_honeypot\n\nfrom .forms import StoryForm\nfrom .models import Story, StoryCategory\n\n\nclass ContextMixin:\n\n def get_context_data(self, **kwargs):\n ctx = super().get_context_data(**kwargs)\n ctx['category_list'] = StoryCategory.objects.all()\n return ctx\n\n\nclass StoryCreate(ContextMixin, CreateView):\n model = Story\n form_class = StoryForm\n template_name = 'successstories/story_form.html'\n success_message = (\n 'Your success story submission has been recorded. '\n 'It will be reviewed by the PSF staff and published.'\n )\n\n @method_decorator(check_honeypot)\n def dispatch(self, *args, **kwargs):\n return super().dispatch(*args, **kwargs)\n\n def get_success_url(self):\n return reverse('success_story_create')\n\n def form_valid(self, form):\n messages.add_message(self.request, messages.SUCCESS, self.success_message)\n return super().form_valid(form)\n\nclass StoryDetail(ContextMixin, DetailView):\n template_name = 'successstories/story_detail.html'\n context_object_name = 'story'\n\n def get_queryset(self):\n if self.request.user.is_staff:\n return Story.objects.select_related()\n return Story.objects.select_related().published()\n\n\nclass StoryList(ListView):\n template_name = 'successstories/story_list.html'\n context_object_name = 'stories'\n\n def get_queryset(self):\n return Story.objects.select_related().published()\n\n\nclass StoryListCategory(ContextMixin, DetailView):\n model = StoryCategory\n"},"old_contents":{"kind":"string","value":"from django.contrib import messages\nfrom django.core.urlresolvers import reverse\nfrom django.utils.decorators import method_decorator\nfrom django.views.generic import CreateView, DetailView, ListView\n\nfrom honeypot.decorators import check_honeypot\n\nfrom .forms import StoryForm\nfrom .models import Story, StoryCategory\n\n\nclass ContextMixin:\n\n def get_context_data(self, **kwargs):\n ctx = super().get_context_data(**kwargs)\n ctx['category_list'] = StoryCategory.objects.all()\n return ctx\n\n\nclass StoryCreate(ContextMixin, CreateView):\n model = Story\n form_class = StoryForm\n template_name = 'successstories/story_form.html'\n success_message = (\n 'Your success story submission has been recorded. '\n 'It will be reviewed by the PSF staff and published.'\n )\n\n @method_decorator(check_honeypot)\n def dispatch(self, *args, **kwargs):\n return super().dispatch(*args, **kwargs)\n\n def get_success_url(self):\n return reverse('success_story_create')\n\n def form_valid(self, form):\n messages.add_message(self.request, messages.SUCCESS, self.success_message)\n return super().form_valid(form)\n\n model = Story\nclass StoryDetail(ContextMixin, DetailView):\n template_name = 'successstories/story_detail.html'\n context_object_name = 'story'\n\n def get_queryset(self):\n if self.request.user.is_staff:\n return Story.objects.select_related()\n return Story.objects.select_related().published()\n\n\nclass StoryList(ListView):\n model = Story\n template_name = 'successstories/story_list.html'\n context_object_name = 'stories'\n\n def get_queryset(self):\n return Story.objects.select_related().published()\n\n\nclass StoryListCategory(ContextMixin, DetailView):\n model = StoryCategory\n"},"license":{"kind":"string","value":"apache-2.0"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2233,"cells":{"commit":{"kind":"string","value":"9abb8108f62451fb993a398c8165a4605e40ec4a"},"subject":{"kind":"string","value":"Add tests for JSONPResponseMiddleware"},"repos":{"kind":"string","value":"Code4SA/mapit,chris48s/mapit,Sinar/mapit,Code4SA/mapit,chris48s/mapit,opencorato/mapit,chris48s/mapit,Sinar/mapit,opencorato/mapit,opencorato/mapit,Code4SA/mapit"},"old_file":{"kind":"string","value":"mapit/tests/test_middleware.py"},"new_file":{"kind":"string","value":"mapit/tests/test_middleware.py"},"new_contents":{"kind":"string","value":"from django.test import TestCase\nfrom django.test.client import RequestFactory\nfrom django.http import HttpResponse, HttpResponsePermanentRedirect\n\nfrom ..middleware import JSONPMiddleware\n\n\nclass JSONPMiddlewareTest(TestCase):\n\n def setUp(self):\n self.middleware = JSONPMiddleware()\n self.factory = RequestFactory()\n\n def test_process_response_ignores_302_redirects(self):\n request = self.factory.get(\"/dummy_url\", {\"callback\": \"xyz\"})\n response = HttpResponsePermanentRedirect(\"/new_url\")\n middleware_response = self.middleware.process_response(request, response)\n self.assertEqual(middleware_response, response)\n\n def test_process_response_uses_callback(self):\n request = self.factory.get(\"/dummy_url\", {\"callback\": \"xyz\"})\n response = HttpResponse(content=\"blah\")\n middleware_response = self.middleware.process_response(request, response)\n self.assertEqual(middleware_response.content, u'xyz(blah)')\n\n def test_process_response_uses_ignores_requests_without_callback(self):\n request = self.factory.get(\"/dummy_url\")\n response = HttpResponse(content=\"blah\")\n middleware_response = self.middleware.process_response(request, response)\n self.assertEqual(middleware_response, response)\n\n def test_process_response_callback_allowed_characters(self):\n request = self.factory.get(\"/dummy_url\", {\"callback\": \"xyz123_$.\"})\n response = HttpResponse(content=\"blah\")\n middleware_response = self.middleware.process_response(request, response)\n self.assertEqual(middleware_response.content, u'xyz123_$.(blah)')\n\n # Try with a character not allowed in the callback\n request = self.factory.get(\"/dummy_url\", {\"callback\": \"xyz123_$.[\"})\n response = HttpResponse(content=\"blah\")\n middleware_response = self.middleware.process_response(request, response)\n self.assertEqual(middleware_response, response)\n\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"agpl-3.0"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2234,"cells":{"commit":{"kind":"string","value":"e20d3ff6147b857cb9a8efa32bfb4ee80610dd34"},"subject":{"kind":"string","value":"Revert \"dump\""},"repos":{"kind":"string","value":"assassinen/python_training"},"old_file":{"kind":"string","value":"dump/fastMessageReaderOriginal.py"},"new_file":{"kind":"string","value":"dump/fastMessageReaderOriginal.py"},"new_contents":{"kind":"string","value":"#!/usr/bin/python\n\nimport sys\nimport re\n\n# ============================================================================ \n\nclass MessageReader:\n\n messageRegexp = r\"s*(\\w+)\\[\\d+\\]=(.*?)(?=\\s\\w+\\[\\d+\\]|$)\";\n\n def __init__(self, fileName):\n self.fileName = fileName\n #self.file = open(fileName, encoding=\"utf8\")\n self.file = open(fileName)\n\n self.carryover = \"\";\n\n def __del__(self):\n self.file.close()\n\n def getMessage(self):\n if (self.carryover != \"\"):\n line = self.carryover\n self.carryover = \"\"\n else:\n line = self.file.readline()\n\n while (line.startswith('ApplVerID') is not True):\n if not line: return {}\n line = self.file.readline()\n message = dict(re.findall(self.messageRegexp, line))\n message['entries'] = []\n\n line = self.file.readline();\n noEntries = re.sub(\".*?NoMDEntries\\[268\\]\\s*=\\s*(\\d+)[^\\d]*\", r'\\1', line)\n if (noEntries == line):\n self.carryover = line;\n return message\n\n for i in range(int(noEntries)):\n line = self.file.readline().split(':')[1].strip()\n entry = dict(re.findall(self.messageRegexp, line))\n message[\"entries\"].append(entry)\n\n return message\n\n# ============================================================================\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"apache-2.0"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2235,"cells":{"commit":{"kind":"string","value":"f917c7ccfbe22a50049e76957a05f35eaaa46b2a"},"subject":{"kind":"string","value":"migrate child table"},"repos":{"kind":"string","value":"DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations"},"old_file":{"kind":"string","value":"polling_stations/apps/addressbase/migrations/0010_remove_onsud_ctry_flag.py"},"new_file":{"kind":"string","value":"polling_stations/apps/addressbase/migrations/0010_remove_onsud_ctry_flag.py"},"new_contents":{"kind":"string","value":"# -*- coding: utf-8 -*-\n# Generated by Django 1.11.20 on 2019-02-15 14:12\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [(\"addressbase\", \"0009_onsud_ced\")]\n\n operations = [migrations.RemoveField(model_name=\"onsud\", name=\"ctry_flag\")]\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"bsd-3-clause"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2236,"cells":{"commit":{"kind":"string","value":"1553cdda2edc16368ba2281616923e849f09bdee"},"subject":{"kind":"string","value":"Create matching_{x,y}.py"},"repos":{"kind":"string","value":"JsWatt/Free-Parking,JsWatt/Free-Parking,JsWatt/Free-Parking,JsWatt/Free-Parking,JsWatt/Free-Parking,JsWatt/Free-Parking,JsWatt/Free-Parking,JsWatt/Free-Parking"},"old_file":{"kind":"string","value":"hacker_rank/regex/repetitions/matching_{x,y}.py"},"new_file":{"kind":"string","value":"hacker_rank/regex/repetitions/matching_{x,y}.py"},"new_contents":{"kind":"string","value":"Regex_Pattern = r'^\\d{1,2}[a-zA-Z]{3,}\\W{0,3}$'\t# Do not delete 'r'.\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"mit"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2237,"cells":{"commit":{"kind":"string","value":"527a53ee1e43f59462b94b50ea997058836a7031"},"subject":{"kind":"string","value":"Create voicersss-inmoovservice-test.py"},"repos":{"kind":"string","value":"MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab"},"old_file":{"kind":"string","value":"home/moz4r/Test/voicersss-inmoovservice-test.py"},"new_file":{"kind":"string","value":"home/moz4r/Test/voicersss-inmoovservice-test.py"},"new_contents":{"kind":"string","value":"i01 = Runtime.createAndStart(\"i01\", \"InMoov\")\ni01.mouth = Runtime.createAndStart(\"i01.mouth\", \"voiceRSS\")\n\npython.subscribe(i01.mouth.getName(),\"publishStartSpeaking\")\npython.subscribe(i01.mouth.getName(),\"publishEndSpeaking\")\n\ndef onEndSpeaking(text):\n\tprint \"end speak\"\ndef onStartSpeaking(text):\n\tprint \"start speak\"\n\ni01.mouth.setKey(\"6b714718f09e48c9a7f260e385ca99a4\")\ni01.mouth.setVoice(\"fr-fr\");\ni01.mouth.speakBlocking(u\"test accent utf8 : éléphant\")\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"apache-2.0"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2238,"cells":{"commit":{"kind":"string","value":"75980fc2e2f63e210f1e58e9a1d56c09072aa04e"},"subject":{"kind":"string","value":"add play_camera.py"},"repos":{"kind":"string","value":"physacco/cv-test,physacco/cv-test,physacco/cv-test,physacco/cv-test"},"old_file":{"kind":"string","value":"python/video/play_camera.py"},"new_file":{"kind":"string","value":"python/video/play_camera.py"},"new_contents":{"kind":"string","value":"#!/usr/bin/env python3\n# encoding: utf-8\n# pylint: disable=no-member\n\n\"\"\"Play a video with OpenCV.\"\"\"\n\nimport sys\nimport cv2\n\ndef main():\n \"\"\"The main function of this module.\"\"\"\n cv2.namedWindow('video', cv2.WINDOW_AUTOSIZE)\n\n cap = cv2.VideoCapture(0)\n i = 0\n while cap.isOpened():\n ret, frame = cap.read()\n if not ret: # done\n break\n\n i += 1\n if i == 1:\n print frame.shape, frame.dtype, frame.size\n\n cv2.imshow('video', frame)\n\n key = cv2.waitKey(30)\n if key & 0xFF == ord('q'): # quit\n break\n\n cap.release()\n cv2.destroyAllWindows()\n\nif __name__ == '__main__':\n main()\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"unlicense"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2239,"cells":{"commit":{"kind":"string","value":"6dfc5a3d7845633570b83aac06c47756292cf8ac"},"subject":{"kind":"string","value":"Add tests for get_uid() method for common DB models."},"repos":{"kind":"string","value":"dennybaa/st2,StackStorm/st2,pixelrebel/st2,Itxaka/st2,Plexxi/st2,pixelrebel/st2,nzlosh/st2,punalpatel/st2,nzlosh/st2,Itxaka/st2,emedvedev/st2,dennybaa/st2,tonybaloney/st2,Plexxi/st2,punalpatel/st2,peak6/st2,dennybaa/st2,StackStorm/st2,tonybaloney/st2,peak6/st2,StackStorm/st2,StackStorm/st2,armab/st2,alfasin/st2,nzlosh/st2,emedvedev/st2,Itxaka/st2,punalpatel/st2,armab/st2,pixelrebel/st2,alfasin/st2,lakshmi-kannan/st2,tonybaloney/st2,lakshmi-kannan/st2,emedvedev/st2,armab/st2,peak6/st2,nzlosh/st2,Plexxi/st2,lakshmi-kannan/st2,alfasin/st2,Plexxi/st2"},"old_file":{"kind":"string","value":"st2common/tests/unit/test_db_model_uids.py"},"new_file":{"kind":"string","value":"st2common/tests/unit/test_db_model_uids.py"},"new_contents":{"kind":"string","value":"# contributor license agreements. See the NOTICE file distributed with\n# this work for additional information regarding copyright ownership.\n# The ASF licenses this file to You under the Apache License, Version 2.0\n# (the \"License\"); you may not use this file except in compliance with\n# the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport unittest2\n\nfrom st2common.models.db.pack import PackDB\nfrom st2common.models.db.sensor import SensorTypeDB\nfrom st2common.models.db.action import ActionDB\nfrom st2common.models.db.rule import RuleDB\nfrom st2common.models.db.trigger import TriggerTypeDB\nfrom st2common.models.db.trigger import TriggerDB\n\n__all__ = [\n 'DBModelUIDFieldTestCase'\n]\n\n\nclass DBModelUIDFieldTestCase(unittest2.TestCase):\n def test_get_uid(self):\n pack_db = PackDB(ref='ma_pack')\n self.assertEqual(pack_db.get_uid(), 'pack:ma_pack')\n\n sensor_type_db = SensorTypeDB(name='sname', pack='spack')\n self.assertEqual(sensor_type_db.get_uid(), 'sensor_type:spack:sname')\n\n action_db = ActionDB(name='aname', pack='apack', runner_info={})\n self.assertEqual(action_db.get_uid(), 'action:apack:aname')\n\n rule_db = RuleDB(name='rname', pack='rpack')\n self.assertEqual(rule_db.get_uid(), 'rule:rpack:rname')\n\n trigger_type_db = TriggerTypeDB(name='ttname', pack='ttpack')\n self.assertEqual(trigger_type_db.get_uid(), 'trigger_type:ttpack:ttname')\n\n trigger_db = TriggerDB(name='tname', pack='tpack')\n self.assertTrue(trigger_db.get_uid().startswith('trigger:tpack:tname:'))\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"apache-2.0"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2240,"cells":{"commit":{"kind":"string","value":"5d64acfd475ca0bb0db2ef7c032fc4ee16df4f75"},"subject":{"kind":"string","value":"remove highlight table"},"repos":{"kind":"string","value":"pajlada/tyggbot,pajlada/pajbot,pajlada/pajbot,pajlada/pajbot,pajlada/pajbot,pajlada/tyggbot,pajlada/tyggbot,pajlada/tyggbot"},"old_file":{"kind":"string","value":"alembic/versions/186928676dbc_remove_highlights.py"},"new_file":{"kind":"string","value":"alembic/versions/186928676dbc_remove_highlights.py"},"new_contents":{"kind":"string","value":"\"\"\"remove_highlights\n\nRevision ID: 186928676dbc\nRevises: f163a00a02aa\nCreate Date: 2019-06-01 15:14:13.999836\n\n\"\"\"\n\n# revision identifiers, used by Alembic.\nrevision = '186928676dbc'\ndown_revision = 'f163a00a02aa'\nbranch_labels = None\ndepends_on = None\n\nfrom alembic import op\nimport sqlalchemy as sa\nfrom sqlalchemy.dialects import mysql\n\ndef upgrade():\n # ### commands auto generated by Alembic - please adjust! ###\n op.drop_table('tb_stream_chunk_highlight')\n # ### end Alembic commands ###\n\n\ndef downgrade():\n # ### commands auto generated by Alembic - please adjust! ###\n op.create_table('tb_stream_chunk_highlight',\n sa.Column('id', mysql.INTEGER(display_width=11), autoincrement=True, nullable=False),\n sa.Column('stream_chunk_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False),\n sa.Column('created_at', mysql.DATETIME(), nullable=False),\n sa.Column('highlight_offset', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False),\n sa.Column('description', mysql.VARCHAR(length=128), nullable=True),\n sa.Column('override_link', mysql.VARCHAR(length=256), nullable=True),\n sa.Column('thumbnail', mysql.TINYINT(display_width=1), autoincrement=False, nullable=True),\n sa.Column('created_by', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True),\n sa.Column('last_edited_by', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True),\n sa.ForeignKeyConstraint(['stream_chunk_id'], ['tb_stream_chunk.id'], name='tb_stream_chunk_highlight_ibfk_1'),\n sa.PrimaryKeyConstraint('id'),\n mysql_default_charset='utf8mb4',\n mysql_engine='InnoDB'\n )\n # ### end Alembic commands ###\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"mit"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2241,"cells":{"commit":{"kind":"string","value":"8658ad72c74306617e58ca82ff0f3fdba35bd353"},"subject":{"kind":"string","value":"implement auto build database interface"},"repos":{"kind":"string","value":"free-free/pyblog,free-free/pyblog,free-free/pyblog,free-free/pyblog"},"old_file":{"kind":"string","value":"app/tools/dbautocreat.py"},"new_file":{"kind":"string","value":"app/tools/dbautocreat.py"},"new_contents":{"kind":"string","value":"#-*- coding:utf-8 -*-\nimport asyncio\nimport aiomysql\nfrom tools.config import Config\n\n\nclass AutoCreate(obj):\n\tdef __init__(self):\n\t\tpass\n\tdef _create_db(self):\n\t\tpass\n\tdef _create_field_type(self):\n\t\tpass\n\tdef _create_field_primary_key(self):\n\t\tpass\n\tdef _create_field_unique_key(self):\n\t\tpass\n\tdef _create_auto_increment(self):\n\t\tpass\n\tdef _create_default(self):\n\t\tpass\n\tdef _create_table(self):\n\t\tpass\n\tdef run(self):\n\t\tpass\n\n@asyncio.coroutine\ndef auto_create():\n\tconn=yield from aiomysql.connect(db=Config.database.database,\n\t\t\t\t\thost=Config.database.host,\n\t\t\t\t\tpassword=Config.database.password,\n\t\t\t\t\tuser=Config.database.user)\n\tcursor =yield from conn.cursor()\n\tyield from cursor.execute('show databases;')\n\tret=yield from cursor.fetchall()\n\tprint(ret)\n\nif __name__=='__main__':\n\tloop=asyncio.get_event_loop()\n\tloop.run_until_complete(asyncio.wait([auto_create()]))\n\tloop.close()\n\n\t\n\n\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"mit"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2242,"cells":{"commit":{"kind":"string","value":"36b8c44f8c2554109ab4ab09add9ac10fae20781"},"subject":{"kind":"string","value":"add entities orm"},"repos":{"kind":"string","value":"clicheio/cliche,item4/cliche,item4/cliche,clicheio/cliche,clicheio/cliche"},"old_file":{"kind":"string","value":"cliche/services/tvtropes/entities.py"},"new_file":{"kind":"string","value":"cliche/services/tvtropes/entities.py"},"new_contents":{"kind":"string","value":"from sqlalchemy import Column, DateTime, ForeignKey, String\nfrom sqlalchemy.ext.declarative import declarative_base\nfrom sqlalchemy.orm import relationship\n\n\nBase = declarative_base()\n\n\n__all__ = 'Entity'\n\n\nclass Entity(Base):\n namespace = Column(String, primary_key=True)\n name = Column(String, primary_key=True)\n url = Column(String)\n last_crawled = Column(DateTime)\n type = Column(String)\n\n relations = relationship('Relation', foreign_keys=[namespace, name],\n primaryjoin='and_(Entity.namespace == \\\n Relation.origin_namespace, \\\n Entity.name == Relation.origin)',\n collection_class=set)\n\n def __init__(self, namespace, name, url, last_crawled, type):\n self.namespace = namespace\n self.name = name\n self.url = url\n self.last_crawled = last_crawled\n self.type = type\n\n def __repr__(self):\n return \" stale_age_in_seconds:\n # Yes! delete it\n print 'Removing: ', info_object\n info_object.delete()\n\"\"\"\nfrom apps.gis_tabular.utils_stale_data import *\nremove_stale_map_data()\n\"\"\"\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"apache-2.0"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2253,"cells":{"commit":{"kind":"string","value":"cb56e0151b37a79e2ba95815555cde0633e167e7"},"subject":{"kind":"string","value":"add client subscribe testing"},"repos":{"kind":"string","value":"beerfactory/hbmqtt"},"old_file":{"kind":"string","value":"samples/client_subscribe.py"},"new_file":{"kind":"string","value":"samples/client_subscribe.py"},"new_contents":{"kind":"string","value":"import logging\nfrom hbmqtt.client._client import MQTTClient\nimport asyncio\n\nlogger = logging.getLogger(__name__)\n\nC = MQTTClient()\n\n@asyncio.coroutine\ndef test_coro():\n yield from C.connect(uri='mqtt://iot.eclipse.org:1883/', username=None, password=None)\n yield from C.subscribe([\n {'filter': '$SYS/broker/uptime', 'qos': 0x00},\n ])\n logger.info(\"Subscribed\")\n\n yield from asyncio.sleep(60)\n yield from C.disconnect()\n\n\nif __name__ == '__main__':\n formatter = \"[%(asctime)s] {%(filename)s:%(lineno)d} %(levelname)s - %(message)s\"\n logging.basicConfig(level=logging.DEBUG, format=formatter)\n asyncio.get_event_loop().run_until_complete(test_coro())"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"mit"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2254,"cells":{"commit":{"kind":"string","value":"c422b5019c6e638bce40a7fecef6977aa5e63ce0"},"subject":{"kind":"string","value":"add __init__.py"},"repos":{"kind":"string","value":"weizhenwei/tech-docs-2016,weizhenwei/tech-docs-2016,weizhenwei/tech-docs-2016,weizhenwei/tech-docs-2016,weizhenwei/tech-docs-2016,weizhenwei/tech-docs-2016"},"old_file":{"kind":"string","value":"python/18-package/parent/__init__.py"},"new_file":{"kind":"string","value":"python/18-package/parent/__init__.py"},"new_contents":{"kind":"string","value":"#!/usr/bin/env python\n#-*- coding=utf-8 -*-\n\nif __name__ == \"__main__\":\n print \"Package parent running as main program\"\nelse:\n print \"Package parent initializing\"\n\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"bsd-2-clause"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2255,"cells":{"commit":{"kind":"string","value":"8d6a5c4092d4f092416fc39fc7faa8bb20e701c3"},"subject":{"kind":"string","value":"Add a manage command to sync reservations from external hook .. hard coded first product only atm (cherry picked from commit 63a80b711e1be9a6047965b8d0061b676d8c50ed)"},"repos":{"kind":"string","value":"jaywink/cartridge-reservable,jaywink/cartridge-reservable,jaywink/cartridge-reservable"},"old_file":{"kind":"string","value":"cartridge/shop/management/commands/syncreshooks.py"},"new_file":{"kind":"string","value":"cartridge/shop/management/commands/syncreshooks.py"},"new_contents":{"kind":"string","value":"from django.core.management.base import BaseCommand\nfrom django.core.management.base import CommandError\nfrom mezzanine.conf import settings\n\nfrom cartridge.shop.models import *\n\nclass Command(BaseCommand):\n help = 'Sync reservations from external hook'\n\n def handle(self, *args, **options):\n p = ReservableProduct.objects.all()[0]\n p.update_from_hook()\n\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"bsd-2-clause"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2256,"cells":{"commit":{"kind":"string","value":"afab4bcd795da4395920eab6107bc33e401ed86a"},"subject":{"kind":"string","value":"Create PiWS.py"},"repos":{"kind":"string","value":"llamafarmer/Pi_Weather_Station,llamafarmer/Pi_Weather_Station,llamafarmer/Pi_Weather_Station"},"old_file":{"kind":"string","value":"PiWS.py"},"new_file":{"kind":"string","value":"PiWS.py"},"new_contents":{"kind":"string","value":"import time\nimport datetime\nimport csv\nfrom math import log\nfrom flask import Flask, render_template\nfrom sense_hat import SenseHat\n\napp = Flask(__name__)\n\ndef weather():\n sense = SenseHat()\n sense.clear()\n\n celcius = round(sense.get_temperature(), 1)\n fahrenheit = round(1.8 * celcius + 32, 1)\n humidity = round(sense.get_humidity(), 1)\n pressure = round(sense.get_pressure(), 1)\n dewpoint = round(243.04 * (log(humidity / 100) + ((17.625 * celcius) / (243.04 + celcius))) / (17.625 - log(humidity / 100) - (17.625 * celcius) / (243.04 + celcius)), 1)\n\n acceleration = sense.get_accelerometer_raw()\n x = round(acceleration['x'], 0)\n y = round(acceleration['y'], 0)\n z = round(acceleration['z'], 0)\n\n if x == -1:\n sense.set_rotation(90)\n elif y == 1:\n sense.set_rotation(0)\n elif y == -1:\n sense.set_rotation(180)\n else:\n sense.set_rotation(180)\n\n if fahrenheit > 20 and fahrenheit < 80:\n bg_color = [0, 0, 155] # blue\n if fahrenheit > 81 and fahrenheit < 90:\n bg_color = [0, 155, 0] # Green\n if fahrenheit > 91 and fahrenheit < 100:\n bg_color = [155, 155, 0] # Yellow\n if fahrenheit > 101 and fahrenheit < 102:\n bg_color = [255, 127, 0] # Orange\n if fahrenheit > 103 and fahrenheit < 104:\n bg_color = [155, 0, 0] # Red\n if fahrenheit > 105 and fahrenheit < 109:\n bg_color = [255, 0, 0] # Bright Red\n if fahrenheit > 110 and fahrenheit < 120:\n bg_color = [155, 155, 155] # White\n else:\n bg_color = [0, 155, 0] # Green\n\n result = ' Temp. F ' + str(fahrenheit) + ' Temp. C ' + str(celcius) + ' Hum. ' + str(humidity) + ' Press. ' + str(pressure) + ' DewPoint ' + str(dewpoint)\n print(result)\n result_list = [(datetime.datetime.now(), celcius, fahrenheit, humidity, pressure, dewpoint)]\n with open('weather_logs.csv', 'a', newline='') as csv_file:\n writer = csv.writer(csv_file)\n writer.writerows(result_list)\n for x in range(5):\n sense.show_message(result, scroll_speed=0.10, back_colour=bg_color, text_colour=[155, 155, 155])\n\n@app.route('/')\n\ndef index():\n sense = SenseHat()\n sense.clear()\n\n celcius = round(sense.get_temperature(), 1)\n fahrenheit = round(1.8 * celcius + 32, 1)\n humidity = round(sense.get_humidity(), 1)\n pressure = round(sense.get_pressure(), 1)\n dewpoint = round(243.04 * (log(humidity / 100) + ((17.625 * celcius) / (243.04 + celcius))) / (17.625 - log(humidity / 100) - (17.625 * celcius) / (243.04 + celcius)), 1)\n\n acceleration = sense.get_accelerometer_raw()\n x = round(acceleration['x'], 1)\n y = round(acceleration['y'], 1)\n z = round(acceleration['z'], 1)\n\n return render_template('weather.html', celcius=celcius, fahrenheit=fahrenheit, humidity=humidity, pressure=pressure, dewpoint=dewpoint, x=x, y=y, z=z)\n\nwhile __name__ == '__main__':\n weather()\n #app.run(host='0.0.0.0')\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"mit"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2257,"cells":{"commit":{"kind":"string","value":"7e71b21f655ec35bd5ebd79aeb5dbec6945a77a7"},"subject":{"kind":"string","value":"Add purdue harvester"},"repos":{"kind":"string","value":"erinspace/scrapi,fabianvf/scrapi,erinspace/scrapi,fabianvf/scrapi,CenterForOpenScience/scrapi,CenterForOpenScience/scrapi"},"old_file":{"kind":"string","value":"scrapi/harvesters/purdue.py"},"new_file":{"kind":"string","value":"scrapi/harvesters/purdue.py"},"new_contents":{"kind":"string","value":"'''\nHarvester for the Purdue University Research Repository for the SHARE project\n\nExample API call: http://purr.purdue.edu/oaipmh?verb=ListRecords&metadataPrefix=oai_dc\n'''\nfrom __future__ import unicode_literals\n\nfrom scrapi.base import OAIHarvester\n\n\nclass PurdueHarvester(OAIHarvester):\n short_name = 'purdue'\n long_name = 'PURR - Purdue University Research Repository'\n url = 'http://purr.purdue.edu'\n\n base_url = 'http://purr.purdue.edu/oaipmh'\n property_list = ['date', 'relation', 'identifier', 'type', 'setSpec']\n timezone_granularity = True\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"apache-2.0"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2258,"cells":{"commit":{"kind":"string","value":"7ecfe7d20f8708a1dada5761cdc02905b0e370e5"},"subject":{"kind":"string","value":"use correct separator"},"repos":{"kind":"string","value":"owlabs/incubator-airflow,opensignal/airflow,hamedhsn/incubator-airflow,DinoCow/airflow,jiwang576/incubator-airflow,rishibarve/incubator-airflow,MetrodataTeam/incubator-airflow,andrewmchen/incubator-airflow,yoziru-desu/airflow,malmiron/incubator-airflow,nathanielvarona/airflow,r39132/airflow,Fokko/incubator-airflow,r39132/airflow,preete-dixit-ck/incubator-airflow,nathanielvarona/airflow,dud225/incubator-airflow,yati-sagade/incubator-airflow,sergiohgz/incubator-airflow,airbnb/airflow,wndhydrnt/airflow,mrkm4ntr/incubator-airflow,gilt/incubator-airflow,brandsoulmates/incubator-airflow,skudriashev/incubator-airflow,Acehaidrey/incubator-airflow,storpipfugl/airflow,jesusfcr/airflow,AllisonWang/incubator-airflow,zodiac/incubator-airflow,mtdewulf/incubator-airflow,lyft/incubator-airflow,jiwang576/incubator-airflow,modsy/incubator-airflow,kerzhner/airflow,zoyahav/incubator-airflow,dgies/incubator-airflow,cfei18/incubator-airflow,airbnb/airflow,cjqian/incubator-airflow,sekikn/incubator-airflow,yoziru-desu/airflow,jfantom/incubator-airflow,Acehaidrey/incubator-airflow,artwr/airflow,cademarkegard/airflow,aminghadersohi/airflow,moritzpein/airflow,Fokko/incubator-airflow,lxneng/incubator-airflow,malmiron/incubator-airflow,stverhae/incubator-airflow,griffinqiu/airflow,jfantom/incubator-airflow,janczak10/incubator-airflow,mattuuh7/incubator-airflow,alexvanboxel/airflow,btallman/incubator-airflow,DinoCow/airflow,nathanielvarona/airflow,sdiazb/airflow,r39132/airflow,opensignal/airflow,easytaxibr/airflow,sid88in/incubator-airflow,DEVELByte/incubator-airflow,apache/airflow,ty707/airflow,biln/airflow,andrewmchen/incubator-airflow,vineet-rh/incubator-airflow,forevernull/incubator-airflow,spektom/incubator-airflow,dhuang/incubator-airflow,mtagle/airflow,apache/incubator-airflow,yati-sagade/incubator-airflow,kerzhner/airflow,NielsZeilemaker/incubator-airflow,jhsenjaliya/incubator-airflow,spektom/incubator-airflow,dmitry-r/incubator-airflow,easytaxibr/airflow,malmiron/incubator-airflow,holygits/incubator-airflow,mtustin-handy/airflow,wolfier/incubator-airflow,MortalViews/incubator-airflow,cfei18/incubator-airflow,holygits/incubator-airflow,aminghadersohi/airflow,preete-dixit-ck/incubator-airflow,opensignal/airflow,adamhaney/airflow,N3da/incubator-airflow,jhsenjaliya/incubator-airflow,brandsoulmates/incubator-airflow,dhuang/incubator-airflow,Acehaidrey/incubator-airflow,vijaysbhat/incubator-airflow,Tagar/incubator-airflow,sid88in/incubator-airflow,jgao54/airflow,moritzpein/airflow,Chedi/airflow,OpringaoDoTurno/airflow,mrares/incubator-airflow,owlabs/incubator-airflow,mylons/incubator-airflow,caseyching/incubator-airflow,asnir/airflow,wxiang7/airflow,plypaul/airflow,preete-dixit-ck/incubator-airflow,RealImpactAnalytics/airflow,yiqingj/airflow,edgarRd/incubator-airflow,andyxhadji/incubator-airflow,hgrif/incubator-airflow,nathanielvarona/airflow,Tagar/incubator-airflow,wileeam/airflow,akosel/incubator-airflow,yk5/incubator-airflow,ronfung/incubator-airflow,edgarRd/incubator-airflow,zodiac/incubator-airflow,rishibarve/incubator-airflow,vijaysbhat/incubator-airflow,Twistbioscience/incubator-airflow,cjqian/incubator-airflow,subodhchhabra/airflow,d-lee/airflow,danielvdende/incubator-airflow,jwi078/incubator-airflow,mattuuh7/incubator-airflow,malmiron/incubator-airflow,yoziru-desu/airflow,ty707/airflow,apache/airflow,mrkm4ntr/incubator-airflow,Acehaidrey/incubator-airflow,N3da/incubator-airflow,DEVELByte/incubator-airflow,subodhchhabra/airflow,vineet-rh/incubator-airflow,CloverHealth/airflow,easytaxibr/airflow,adamhaney/airflow,janczak10/incubator-airflow,storpipfugl/airflow,gtoonstra/airflow,cfei18/incubator-airflow,biln/airflow,andyxhadji/incubator-airflow,MortalViews/incubator-airflow,griffinqiu/airflow,mrkm4ntr/incubator-airflow,mistercrunch/airflow,aminghadersohi/airflow,wndhydrnt/airflow,fenglu-g/incubator-airflow,wileeam/airflow,AllisonWang/incubator-airflow,yati-sagade/incubator-airflow,mrares/incubator-airflow,Tagar/incubator-airflow,Chedi/airflow,mattuuh7/incubator-airflow,OpringaoDoTurno/airflow,akosel/incubator-airflow,d-lee/airflow,andyxhadji/incubator-airflow,edgarRd/incubator-airflow,mtdewulf/incubator-airflow,mrkm4ntr/incubator-airflow,cfei18/incubator-airflow,spektom/incubator-airflow,ledsusop/airflow,jgao54/airflow,brandsoulmates/incubator-airflow,sid88in/incubator-airflow,plypaul/airflow,DEVELByte/incubator-airflow,biln/airflow,ledsusop/airflow,skudriashev/incubator-airflow,owlabs/incubator-airflow,jhsenjaliya/incubator-airflow,mattuuh7/incubator-airflow,DEVELByte/incubator-airflow,wooga/airflow,KL-WLCR/incubator-airflow,danielvdende/incubator-airflow,jiwang576/incubator-airflow,mistercrunch/airflow,jiwang576/incubator-airflow,OpringaoDoTurno/airflow,adrpar/incubator-airflow,jfantom/incubator-airflow,janczak10/incubator-airflow,mylons/incubator-airflow,jlowin/airflow,bolkedebruin/airflow,zodiac/incubator-airflow,sid88in/incubator-airflow,lxneng/incubator-airflow,caseyching/incubator-airflow,kerzhner/airflow,zack3241/incubator-airflow,lyft/incubator-airflow,opensignal/airflow,Chedi/airflow,apache/incubator-airflow,wileeam/airflow,criccomini/airflow,stverhae/incubator-airflow,vijaysbhat/incubator-airflow,jwi078/incubator-airflow,mtdewulf/incubator-airflow,subodhchhabra/airflow,wileeam/airflow,bolkedebruin/airflow,hamedhsn/incubator-airflow,spektom/incubator-airflow,dhuang/incubator-airflow,sergiohgz/incubator-airflow,ProstoMaxim/incubator-airflow,juvoinc/airflow,KL-WLCR/incubator-airflow,hamedhsn/incubator-airflow,airbnb/airflow,fenglu-g/incubator-airflow,dgies/incubator-airflow,airbnb/airflow,fenglu-g/incubator-airflow,r39132/airflow,wooga/airflow,mistercrunch/airflow,Tagar/incubator-airflow,CloverHealth/airflow,janczak10/incubator-airflow,lxneng/incubator-airflow,alexvanboxel/airflow,MetrodataTeam/incubator-airflow,wolfier/incubator-airflow,caseyching/incubator-airflow,storpipfugl/airflow,btallman/incubator-airflow,NielsZeilemaker/incubator-airflow,wxiang7/airflow,NielsZeilemaker/incubator-airflow,hgrif/incubator-airflow,Fokko/incubator-airflow,nathanielvarona/airflow,jason-z-hang/airflow,neovintage/airflow,zoyahav/incubator-airflow,MetrodataTeam/incubator-airflow,mrares/incubator-airflow,skudriashev/incubator-airflow,dmitry-r/incubator-airflow,yoziru-desu/airflow,wolfier/incubator-airflow,RealImpactAnalytics/airflow,btallman/incubator-airflow,holygits/incubator-airflow,neovintage/airflow,mtagle/airflow,jhsenjaliya/incubator-airflow,zoyahav/incubator-airflow,gritlogic/incubator-airflow,RealImpactAnalytics/airflow,griffinqiu/airflow,cademarkegard/airflow,wndhydrnt/airflow,MortalViews/incubator-airflow,jbhsieh/incubator-airflow,hgrif/incubator-airflow,lxneng/incubator-airflow,mylons/incubator-airflow,biln/airflow,jfantom/incubator-airflow,apache/incubator-airflow,ty707/airflow,adrpar/incubator-airflow,MortalViews/incubator-airflow,juvoinc/airflow,sdiazb/airflow,yiqingj/airflow,holygits/incubator-airflow,cademarkegard/airflow,ProstoMaxim/incubator-airflow,wooga/airflow,edgarRd/incubator-airflow,adrpar/incubator-airflow,apache/airflow,andyxhadji/incubator-airflow,forevernull/incubator-airflow,dud225/incubator-airflow,dhuang/incubator-airflow,mtustin-handy/airflow,wxiang7/airflow,btallman/incubator-airflow,N3da/incubator-airflow,lyft/incubator-airflow,akosel/incubator-airflow,danielvdende/incubator-airflow,vijaysbhat/incubator-airflow,d-lee/airflow,nathanielvarona/airflow,plypaul/airflow,criccomini/airflow,saguziel/incubator-airflow,zack3241/incubator-airflow,andrewmchen/incubator-airflow,AllisonWang/incubator-airflow,dgies/incubator-airflow,KL-WLCR/incubator-airflow,lyft/incubator-airflow,mtagle/airflow,saguziel/incubator-airflow,dmitry-r/incubator-airflow,aminghadersohi/airflow,gritlogic/incubator-airflow,easytaxibr/airflow,adrpar/incubator-airflow,jason-z-hang/airflow,ronfung/incubator-airflow,zack3241/incubator-airflow,jlowin/airflow,CloverHealth/airflow,danielvdende/incubator-airflow,adamhaney/airflow,wolfier/incubator-airflow,neovintage/airflow,modsy/incubator-airflow,jason-z-hang/airflow,KL-WLCR/incubator-airflow,dgies/incubator-airflow,danielvdende/incubator-airflow,ronfung/incubator-airflow,vineet-rh/incubator-airflow,moritzpein/airflow,jgao54/airflow,Chedi/airflow,bolkedebruin/airflow,Twistbioscience/incubator-airflow,asnir/airflow,hamedhsn/incubator-airflow,RealImpactAnalytics/airflow,MetrodataTeam/incubator-airflow,adamhaney/airflow,Acehaidrey/incubator-airflow,cademarkegard/airflow,wooga/airflow,apache/incubator-airflow,cfei18/incubator-airflow,mistercrunch/airflow,wxiang7/airflow,artwr/airflow,akosel/incubator-airflow,dud225/incubator-airflow,N3da/incubator-airflow,jlowin/airflow,sekikn/incubator-airflow,neovintage/airflow,jbhsieh/incubator-airflow,rishibarve/incubator-airflow,Twistbioscience/incubator-airflow,sekikn/incubator-airflow,gtoonstra/airflow,jlowin/airflow,cjqian/incubator-airflow,yk5/incubator-airflow,sdiazb/airflow,yiqingj/airflow,Fokko/incubator-airflow,brandsoulmates/incubator-airflow,storpipfugl/airflow,Acehaidrey/incubator-airflow,NielsZeilemaker/incubator-airflow,apache/airflow,danielvdende/incubator-airflow,modsy/incubator-airflow,caseyching/incubator-airflow,yiqingj/airflow,gilt/incubator-airflow,saguziel/incubator-airflow,cjqian/incubator-airflow,gritlogic/incubator-airflow,gilt/incubator-airflow,owlabs/incubator-airflow,ProstoMaxim/incubator-airflow,criccomini/airflow,jwi078/incubator-airflow,gritlogic/incubator-airflow,OpringaoDoTurno/airflow,jesusfcr/airflow,sergiohgz/incubator-airflow,bolkedebruin/airflow,skudriashev/incubator-airflow,sdiazb/airflow,zoyahav/incubator-airflow,jason-z-hang/airflow,jgao54/airflow,mtustin-handy/airflow,mrares/incubator-airflow,stverhae/incubator-airflow,vineet-rh/incubator-airflow,plypaul/airflow,alexvanboxel/airflow,asnir/airflow,DinoCow/airflow,alexvanboxel/airflow,ronfung/incubator-airflow,andrewmchen/incubator-airflow,griffinqiu/airflow,Twistbioscience/incubator-airflow,AllisonWang/incubator-airflow,dmitry-r/incubator-airflow,sergiohgz/incubator-airflow,sekikn/incubator-airflow,forevernull/incubator-airflow,mtustin-handy/airflow,mtagle/airflow,gtoonstra/airflow,ledsusop/airflow,gtoonstra/airflow,wndhydrnt/airflow,hgrif/incubator-airflow,jwi078/incubator-airflow,jbhsieh/incubator-airflow,CloverHealth/airflow,asnir/airflow,juvoinc/airflow,mylons/incubator-airflow,jbhsieh/incubator-airflow,yk5/incubator-airflow,rishibarve/incubator-airflow,ty707/airflow,apache/airflow,zack3241/incubator-airflow,fenglu-g/incubator-airflow,cfei18/incubator-airflow,artwr/airflow,gilt/incubator-airflow,criccomini/airflow,dud225/incubator-airflow,juvoinc/airflow,zodiac/incubator-airflow,ledsusop/airflow,modsy/incubator-airflow,mtdewulf/incubator-airflow,jesusfcr/airflow,DinoCow/airflow,apache/airflow,yk5/incubator-airflow,bolkedebruin/airflow,moritzpein/airflow,kerzhner/airflow,saguziel/incubator-airflow,artwr/airflow,forevernull/incubator-airflow,d-lee/airflow,jesusfcr/airflow,subodhchhabra/airflow,ProstoMaxim/incubator-airflow,preete-dixit-ck/incubator-airflow,yati-sagade/incubator-airflow,stverhae/incubator-airflow"},"old_file":{"kind":"string","value":"scripts/ci/wheel_factory.py"},"new_file":{"kind":"string","value":"scripts/ci/wheel_factory.py"},"new_contents":{"kind":"string","value":"#!/usr/bin/env python\nimport requirements\nimport argparse\nimport glob\nimport os\n\nparser = argparse.ArgumentParser()\nparser.add_argument('file', help=\"requirements.txt\", type=str)\nparser.add_argument('wheeldir', help=\"wheeldir location\", type=str)\n\nargs = parser.parse_args()\n\nreq_file = open(args.file, 'r')\n\nfor req in requirements.parse(req_file):\n print \"Checking \" + args.wheeldir + os.path.sep + req.name + \"*.whl\"\n if not glob.glob(args.wheeldir + os.path.sep + req.name + \"*.whl\"):\n os.system(\"pip wheel --wheel-dir=\" + args.wheeldir + \" \" + req.name + \"\".join(req.specs) + \"\".join(req.extras))\n"},"old_contents":{"kind":"string","value":"#!/usr/bin/env python\nimport requirements\nimport argparse\nimport glob\nimport os\n\nparser = argparse.ArgumentParser()\nparser.add_argument('file', help=\"requirements.txt\", type=str)\nparser.add_argument('wheeldir', help=\"wheeldir location\", type=str)\n\nargs = parser.parse_args()\n\nreq_file = open(args.file, 'r')\n\nfor req in requirements.parse(req_file):\n print \"Checking \" + args.wheeldir + os.path.pathsep + req.name + \"*.whl\"\n if not glob.glob(args.wheeldir + os.path.pathsep + req.name + \"*.whl\"):\n os.system(\"pip wheel --wheel-dir=\" + args.wheeldir + \" \" + req.name + \"\".join(req.specs) + \"\".join(req.extras))\n"},"license":{"kind":"string","value":"apache-2.0"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2259,"cells":{"commit":{"kind":"string","value":"027a199924ee256170a2e369733a57fcc7483c88"},"subject":{"kind":"string","value":"Add missing numeter namespace in poller"},"repos":{"kind":"string","value":"enovance/numeter,redhat-cip/numeter,enovance/numeter,enovance/numeter,redhat-cip/numeter,redhat-cip/numeter,redhat-cip/numeter,enovance/numeter"},"old_file":{"kind":"string","value":"poller/numeter/__init__.py"},"new_file":{"kind":"string","value":"poller/numeter/__init__.py"},"new_contents":{"kind":"string","value":"__import__('pkg_resources').declare_namespace(__name__)\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"agpl-3.0"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2260,"cells":{"commit":{"kind":"string","value":"7420f49f8e1508fa2017c629d8d11a16a9e28c4a"},"subject":{"kind":"string","value":"add abstract biobox class"},"repos":{"kind":"string","value":"pbelmann/command-line-interface,bioboxes/command-line-interface,michaelbarton/command-line-interface,michaelbarton/command-line-interface,pbelmann/command-line-interface,bioboxes/command-line-interface"},"old_file":{"kind":"string","value":"biobox_cli/biobox.py"},"new_file":{"kind":"string","value":"biobox_cli/biobox.py"},"new_contents":{"kind":"string","value":"from abc import ABCMeta, abstractmethod\nimport biobox_cli.container as ctn\nimport biobox_cli.util.misc as util\nimport tempfile as tmp\n\nclass Biobox:\n __metaclass__ = ABCMeta\n\n @abstractmethod\n def prepare_volumes(opts):\n pass\n\n @abstractmethod\n def get_doc(self):\n pass\n\n @abstractmethod\n def after_run(self, host_dst_dir):\n pass\n\n def run(self, argv):\n opts = util.parse_docopt(self.get_doc(), argv, False)\n task = opts['--task']\n image = opts['']\n output = opts['--output']\n host_dst_dir = tmp.mkdtemp()\n volumes = self.prepare_volumes(opts, host_dst_dir)\n ctn.exit_if_no_image_available(image)\n ctnr = ctn.create(image, task, volumes)\n ctn.run(ctnr)\n self.after_run(output, host_dst_dir)\n return ctnr\n\n def remove(self, container):\n \"\"\"\n Removes a container\n Note this method is not tested due to limitations of circle ci\n \"\"\"\n ctn.remove(container)"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"mit"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2261,"cells":{"commit":{"kind":"string","value":"4d1b006e5ba559715d55a88528cdfc0bed755182"},"subject":{"kind":"string","value":"add import script for Weymouth"},"repos":{"kind":"string","value":"chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations"},"old_file":{"kind":"string","value":"polling_stations/apps/data_collection/management/commands/import_weymouth.py"},"new_file":{"kind":"string","value":"polling_stations/apps/data_collection/management/commands/import_weymouth.py"},"new_contents":{"kind":"string","value":"from data_collection.management.commands import BaseXpressDCCsvInconsistentPostcodesImporter\n\nclass Command(BaseXpressDCCsvInconsistentPostcodesImporter):\n council_id = 'E07000053'\n addresses_name = 'parl.2017-06-08/Version 1/Democracy_Club__08June2017WPBC.TSV'\n stations_name = 'parl.2017-06-08/Version 1/Democracy_Club__08June2017WPBC.TSV'\n elections = ['parl.2017-06-08']\n csv_delimiter = '\\t'\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"bsd-3-clause"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2262,"cells":{"commit":{"kind":"string","value":"2ce80e667de438fca20de7b4ab6847751b683e33"},"subject":{"kind":"string","value":"Add digikey command."},"repos":{"kind":"string","value":"kivhift/qmk,kivhift/qmk"},"old_file":{"kind":"string","value":"src/commands/digikey.py"},"new_file":{"kind":"string","value":"src/commands/digikey.py"},"new_contents":{"kind":"string","value":"#\n# Copyright (c) 2013 Joshua Hughes \n#\nimport urllib\nimport webbrowser\n\nimport qmk\n\nclass DigikeyCommand(qmk.Command):\n \"\"\"Look up a part on Digi-Key.\n\n A new tab will be opened in the default web browser that contains the\n search results.\n \"\"\"\n def __init__(self):\n self._name = 'digikey'\n self._help = self.__doc__\n self.__baseURL = 'http://www.digikey.com/product-search/en?KeyWords={}'\n\n @qmk.Command.actionRequiresArgument\n def action(self, arg):\n webbrowser.open_new_tab(self.__baseURL.format(urllib.quote_plus(\n ' '.join(arg.split()).encode('utf-8'))))\n\ndef commands(): return [ DigikeyCommand() ]\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"mit"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2263,"cells":{"commit":{"kind":"string","value":"8b4bbd23bf37fb946b664f5932e4903f802c6e0d"},"subject":{"kind":"string","value":"Add first pass at integration style tests"},"repos":{"kind":"string","value":"wdv4758h/flake8,lericson/flake8"},"old_file":{"kind":"string","value":"flake8/tests/test_integration.py"},"new_file":{"kind":"string","value":"flake8/tests/test_integration.py"},"new_contents":{"kind":"string","value":"from __future__ import with_statement\n\nimport os\nimport unittest\ntry:\n from unittest import mock\nexcept ImportError:\n import mock # < PY33\n\nfrom flake8 import engine\n\n\nclass IntegrationTestCase(unittest.TestCase):\n \"\"\"Integration style tests to exercise different command line options.\"\"\"\n\n def this_file(self):\n \"\"\"Return the real path of this file.\"\"\"\n this_file = os.path.realpath(__file__)\n if this_file.endswith(\"pyc\"):\n this_file = this_file[:-1]\n return this_file\n\n def check_files(self, arglist=[], explicit_stdin=False, count=0):\n \"\"\"Call check_files.\"\"\"\n if explicit_stdin:\n target_file = \"-\"\n else:\n target_file = self.this_file()\n argv = ['flake8'] + arglist + [target_file]\n with mock.patch(\"sys.argv\", argv):\n style_guide = engine.get_style_guide(parse_argv=True)\n report = style_guide.check_files()\n self.assertEqual(report.total_errors, count)\n return style_guide, report\n\n def test_no_args(self):\n # assert there are no reported errors\n self.check_files()\n\n def _job_tester(self, jobs):\n # mock stdout.flush so we can count the number of jobs created\n with mock.patch('sys.stdout.flush') as mocked:\n guide, report = self.check_files(arglist=['--jobs=%s' % jobs])\n self.assertEqual(guide.options.jobs, jobs)\n self.assertEqual(mocked.call_count, jobs)\n\n def test_jobs(self):\n self._job_tester(2)\n self._job_tester(10)\n\n def test_stdin(self):\n self.count = 0\n\n def fake_stdin():\n self.count += 1\n with open(self.this_file(), \"r\") as f:\n return f.read()\n\n with mock.patch(\"pep8.stdin_get_value\", fake_stdin):\n guide, report = self.check_files(arglist=['--jobs=4'],\n explicit_stdin=True)\n self.assertEqual(self.count, 1)\n\n def test_stdin_fail(self):\n def fake_stdin():\n return \"notathing\\n\"\n with mock.patch(\"pep8.stdin_get_value\", fake_stdin):\n # only assert needed is in check_files\n guide, report = self.check_files(arglist=['--jobs=4'],\n explicit_stdin=True,\n count=1)\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"mit"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2264,"cells":{"commit":{"kind":"string","value":"0d2adfcce21dd2efb5d781babec3e6b03464b6d5"},"subject":{"kind":"string","value":"Add basic tests"},"repos":{"kind":"string","value":"gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin"},"old_file":{"kind":"string","value":"tests/app/main/test_request_header.py"},"new_file":{"kind":"string","value":"tests/app/main/test_request_header.py"},"new_contents":{"kind":"string","value":"from tests.conftest import set_config_values\n\n\ndef test_route_correct_secret_key(app_, client):\n with set_config_values(app_, {\n 'ROUTE_SECRET_KEY_1': 'key_1',\n 'ROUTE_SECRET_KEY_2': '',\n 'DEBUG': False,\n }):\n\n response = client.get(\n path='/_status',\n headers=[\n ('X-Custom-forwarder', 'key_1'),\n ]\n )\n assert response.status_code == 200\n\n\ndef test_route_incorrect_secret_key(app_, client):\n with set_config_values(app_, {\n 'ROUTE_SECRET_KEY_1': 'key_1',\n 'ROUTE_SECRET_KEY_2': '',\n 'DEBUG': False,\n }):\n\n response = client.get(\n path='/_status',\n headers=[\n ('X-Custom-forwarder', 'wrong_key'),\n ]\n )\n assert response.status_code == 403\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"mit"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2265,"cells":{"commit":{"kind":"string","value":"77af87198d1116b77df431d9139b30f76103dd64"},"subject":{"kind":"string","value":"Add migration for latitute and longitude of event"},"repos":{"kind":"string","value":"softwaresaved/fat,softwaresaved/fat,softwaresaved/fat,softwaresaved/fat"},"old_file":{"kind":"string","value":"fellowms/migrations/0023_auto_20160617_1350.py"},"new_file":{"kind":"string","value":"fellowms/migrations/0023_auto_20160617_1350.py"},"new_contents":{"kind":"string","value":"# -*- coding: utf-8 -*-\n# Generated by Django 1.9.5 on 2016-06-17 13:50\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('fellowms', '0022_event_report_url'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='event',\n name='lat',\n field=models.FloatField(blank=True, null=True),\n ),\n migrations.AddField(\n model_name='event',\n name='lon',\n field=models.FloatField(blank=True, null=True),\n ),\n ]\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"bsd-3-clause"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2266,"cells":{"commit":{"kind":"string","value":"fb07837db870a5fdea3a98aa1381793b1b20d2c0"},"subject":{"kind":"string","value":"Create main.py"},"repos":{"kind":"string","value":"jbaum517/jcb2tp-grocery"},"old_file":{"kind":"string","value":"main.py"},"new_file":{"kind":"string","value":"main.py"},"new_contents":{"kind":"string","value":"import webapp2\nimport jinja2\nimport os\nimport urllib\n\nfrom google.appengine.api import users\nfrom google.appengine.ext import ndb\n\n\nJINJA_ENVIRONMENT = jinja2.Environment(\n loader=jinja2.FileSystemLoader(os.path.dirname(__file__)),\n extensions=['jinja2.ext.autoescape'],\n autoescape=True)\n\n\ndef user_key(id):\n return ndb.Key('GroceryList',id)\n\n\nclass GroceryItem(ndb.Model):\n name = ndb.StringProperty()\n cost = ndb.FloatProperty()\n quantity = ndb.IntegerProperty()\n total = ndb.FloatProperty()\n picture = ndb.BlobProperty()\n time = ndb.DateTimeProperty(auto_now_add=True)\n\n\nclass MainHandler(webapp2.RequestHandler):\n\n def get(self):\n user = users.get_current_user()\n items_query = GroceryItem.query(\n ancestor=user_key(users.get_current_user().user_id())).order(-GroceryItem.time)\n items = items_query.fetch(10)\n if user:\n url = users.create_logout_url(self.request.uri)\n url_linktext = 'Logout'\n else:\n url = users.create_login_url(self.request.uri)\n url_linktext = 'Login'\n template_values = {\n 'user':users.get_current_user(),\n 'items':items,\n 'url':url,\n 'url_linktext':url_linktext,\n }\n template = JINJA_ENVIRONMENT.get_template('index.html')\n self.response.write(template.render(template_values))\n\n\nclass GroceryList(webapp2.RequestHandler):\n\n def post(self):\n user = users.get_current_user();\n item = GroceryItem(parent=user_key(user.user_id()))\n item.name = self.request.get('name')\n item.cost = self.request.get('cost')\n item.quantity = self.request.get('quantity')\n item.picture = self.request.get('img')\n item.total = item.cost * item.quantity\n item.put()\n\n query_params = {'user': user_key(user.user_id())}\n self.redirect('/?' + urllib.urlencode(query_params))\n\n\napp = webapp2.WSGIApplication([\n ('/', MainHandler)\n ('/add', GroceryList)\n], debug=True)\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"unlicense"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2267,"cells":{"commit":{"kind":"string","value":"b920f5aeecf7843fcc699db4a70a9a0f124fa198"},"subject":{"kind":"string","value":"Add unit test for protonate.py"},"repos":{"kind":"string","value":"jensengroup/propka"},"old_file":{"kind":"string","value":"tests/test_protonate.py"},"new_file":{"kind":"string","value":"tests/test_protonate.py"},"new_contents":{"kind":"string","value":"import propka.atom\nimport propka.protonate\n\n\ndef test_protonate_atom():\n atom = propka.atom.Atom(\n \"HETATM 4479 V VO4 A1578 -19.097 16.967 0.500 1.00 17.21 V \"\n )\n assert not atom.is_protonated\n p = propka.protonate.Protonate()\n p.protonate_atom(atom)\n assert atom.is_protonated\n assert atom.number_of_protons_to_add == 6\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"lgpl-2.1"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2268,"cells":{"commit":{"kind":"string","value":"2bf763e39e91ef989c121bba420e4ae09ea0a569"},"subject":{"kind":"string","value":"Add Diagonal Difference HackerRank Problem"},"repos":{"kind":"string","value":"PlattsSEC/HackerRank,PlattsSEC/HackerRank,PlattsSEC/HackerRank,PlattsSEC/HackerRank,PlattsSEC/HackerRank,PlattsSEC/HackerRank"},"old_file":{"kind":"string","value":"algorithms/diagonal_difference/kevin.py"},"new_file":{"kind":"string","value":"algorithms/diagonal_difference/kevin.py"},"new_contents":{"kind":"string","value":"#!/usr/bin/env python\n\n\ndef get_matrix_row_from_input():\n return [int(index) for index in input().strip().split(' ')]\n\n\nn = int(input().strip())\nprimary_diag_sum = 0\nsecondary_diag_sum = 0\nfor row_count in range(n):\n row = get_matrix_row_from_input()\n primary_diag_sum += row[row_count]\n secondary_diag_sum += row[-1 - row_count]\n\nprint(abs(primary_diag_sum - secondary_diag_sum))\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"mit"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2269,"cells":{"commit":{"kind":"string","value":"9098904ffcd47c4327594f8fc6ce8ce8694e5422"},"subject":{"kind":"string","value":"Create getsubinterfaces.py"},"repos":{"kind":"string","value":"infobloxopen/netmri-toolkit,infobloxopen/netmri-toolkit,infobloxopen/netmri-toolkit"},"old_file":{"kind":"string","value":"python/getsubinterfaces.py"},"new_file":{"kind":"string","value":"python/getsubinterfaces.py"},"new_contents":{"kind":"string","value":"#Device subinterface data retrieval script. Copyright Ingmar Van Glabbeek ingmar@infoblox.com\n#Licensed under Apache-2.0\n\n#This script will pull all devices of a given device group and then list the devices management ip as well as the available management ips.\n#By default it saves the output to \"deviceinterfacedump.json\"\n#Tested on NetMRI 7.3.1 and 7.3.2\n\n#Modules required:\nimport getpass\nimport requests\nimport json\nimport urllib3\nfrom requests.auth import HTTPBasicAuth\nfrom http.client import responses\nimport time\n\n#You can hardcode credentials here, it's not safe. Don't do it.\n#hostname = \"netmri.infoblox.com\"\n#username = \"admin\"\n#password = \"infoblox\"\n#urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)\n\ndef main():\n\n cookie_host = wapi_connect()\n #print(cookie_host)\n devicelist = getdevices(cookie_host)\n filtered_data = devicedata(devicelist)\n #uncomment next line if you want to write to console\n #print(json.dumps(filtered_data,indent=4, sort_keys=True))\n filename = open(\"deviceinterfacedump.json\",\"w\")\n filename.write(json.dumps(filtered_data,indent=4))\n filename.close()\n\n print(\"Data retrieved successfully\")\n\ndef devicedata(devicelist):\n listload = json.loads(devicelist)\n data = []\n for e in listload['rows']:\n if not e[\"if_addrs\"]:\n device = {\"DeviceID\":e[\"DeviceID\"],\"DeviceName\":e[\"DeviceName\"],\"DeviceType\":e[\"DeviceType\"],\"DeviceIPDotted\":e[\"DeviceIPDotted\"],\"Other InterfaceIP\":[\"none\"]}\n data.append(device)\n else:\n device = {\"DeviceID\": e['DeviceID'], \"DeviceName\": e[\"DeviceName\"], \"DeviceType\": e[\"DeviceType\"],\n \"DeviceIPDotted\": e[\"DeviceIPDotted\"], \"Other InterfaceIP\":[]}\n for f in e[\"if_addrs\"]:\n i=1\n interface = {\"InterfaceIP\":f[\"ifIPDotted\"], \"Interfacename\":f[\"ifName\"]}\n device[\"Other InterfaceIP\"].insert(i,interface)\n data.append(device)\n i=i+1\n dataftw=json.dumps(data)\n returndata=json.loads(dataftw)\n return returndata\n\n\ndef getdevices(cookie_host):\n if not cookie_host:\n print(\"No connection established.\")\n return 0\n #get current time\n ts = time.time()\n hostname=cookie_host[1]\n #limits number of results\n limit = input(\"Limit to this number of devices: \")\n get_url = \"https://\" + hostname + \"/api/3.3/device_groups/index\"\n response = requests.get(get_url, cookies=cookie_host[0], verify=False)\n d=response.text\n dl=json.loads(d)\n print(\"List of DeviceGroups\")\n for e in dl[\"device_groups\"]:\n dglist={\"GroupName\":e[\"GroupName\"],\"GroupID\":e[\"GroupID\"]}\n print(dglist)\n\n devicegroup = input(\"Based on the output specify the devicegroup ID by its ID: \")\n\n get_url = \"https://\" + hostname + \"/api/3.3/discovery_statuses/static/current.extjs\"\n querystring = {\"_dc\": ts, \"filename\": \"recent_activity.csv\", \"filter\": \"null\", \"limit\": limit,\n \"GroupID\": devicegroup}\n\n response = requests.get(get_url, cookies=cookie_host[0], verify=False, params=querystring)\n t=response.text\n print(\"We are fetching a list of \" + str(limit) +\n \" devices for devicegroup \" + str(devicegroup) + \".\")\n return(t)\n\n\n\ndef wapi_connect():\n hostname = input(\"Enter the NetMRI hostname or IP: \")\n username = input(\"Enter your NetMRI username: \")\n password = getpass.getpass(\"Enter your Password: \")\n https_val = input(\"Disable SSL validations?(y/n) \")\n if https_val in (\"y\", \"Y\"):\n urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)\n print(\"SSL validation disabled\")\n if https_val in (\"n\", \"N\"):\n print(\"SSL validation enabled\")\n\n login_url = \"https://\" + hostname + \"/api/3.3/device_groups/index\"\n print(\"logging in to \" + hostname)\n try:\n login_result = requests.get(\n login_url,\n auth=HTTPBasicAuth(username, password),\n timeout=5,\n verify=False)\n except requests.exceptions.ConnectTimeout as e:\n print(\"Connection time out after 5 seconds.\")\n exit(1)\n except requests.exceptions.ConnectionError as e:\n print(\"No route to host \" + hostname)\n exit(1)\n\n if has_error(login_result):\n exit(1)\n else:\n print(\"Login OK\")\n return(login_result.cookies,hostname)\n\n\ndef has_error(_result):\n if _result.status_code == 200:\n return 0\n elif _result.status_code == 201:\n return 0\n\n try:\n err_text = _result.json()['text']\n except KeyError as e:\n err_text = \"Response contains no error text\"\n except json.decoder.JSONDecodeError as e:\n err_text = \"No JSON Response\"\n\n # print out the HTTP response code, description, and error text\n http_code = _result.status_code\n http_desc = responses[http_code]\n print(\"HTTP Code [%3d] %s. %s\" % (http_code, http_desc, err_text))\n return 1\n\n\nif __name__ == \"__main__\":\n main()\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"mit"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2270,"cells":{"commit":{"kind":"string","value":"54a8a77c75660eeae314c410685243e2b5bc59ca"},"subject":{"kind":"string","value":"add sw infer wrapper"},"repos":{"kind":"string","value":"DLTK/DLTK"},"old_file":{"kind":"string","value":"dltk/core/utils.py"},"new_file":{"kind":"string","value":"dltk/core/utils.py"},"new_contents":{"kind":"string","value":"import numpy as np\nfrom dltk.core.io.sliding_window import SlidingWindow\n\ndef sliding_window_segmentation_inference(session, ops_list, sample_dict, batch_size=1):\n \"\"\"\n\n Parameters\n ----------\n session\n ops_list\n sample_dict\n\n Returns\n -------\n\n \"\"\"\n\n # TODO: asserts\n\n pl_shape = list(sample_dict.keys()[0].get_shape().as_list())\n\n pl_bshape = pl_shape[1:-1]\n\n inp_shape = list(sample_dict.values()[0].shape)\n inp_bshape = inp_shape[1:-1]\n\n out_dummies = [np.zeros([inp_shape[0], ] + inp_bshape + [op.get_shape().as_list()[-1]]\n if len(op.get_shape().as_list()) == len(inp_shape) else []) for op in ops_list]\n\n out_dummy_counter = [np.zeros_like(o) for o in out_dummies]\n\n op_shape = list(ops_list[0].get_shape().as_list())\n op_bshape = op_shape[1:-1]\n\n out_diff = np.array(pl_bshape) - np.array(op_bshape)\n\n padding = [[0, 0]] + [[diff // 2, diff - diff // 2] for diff in out_diff] + [[0, 0]]\n\n padded_dict = {k: np.pad(v, padding, mode='constant') for k,v in sample_dict.items()}\n\n f_bshape = padded_dict.values()[0].shape[1:-1]\n\n striding = list(np.array(op_bshape) // 2) if all(out_diff == 0) else op_bshape\n\n sw = SlidingWindow(f_bshape, pl_bshape, striding=striding)\n out_sw = SlidingWindow(inp_bshape, op_bshape, striding=striding)\n\n if batch_size > 1:\n slicers = []\n out_slicers = []\n\n done = False\n while True:\n try:\n slicer = next(sw)\n out_slicer = next(out_sw)\n except StopIteration:\n done = True\n\n if batch_size == 1:\n sw_dict = {k: v[slicer] for k,v in padded_dict.items()}\n op_parts = session.run(ops_list, feed_dict=sw_dict)\n\n for idx in range(len(op_parts)):\n out_dummies[idx][out_slicer] += op_parts[idx]\n out_dummy_counter[idx][out_slicer] += 1\n else:\n slicers.append(slicer)\n out_slicers.append(out_slicer)\n if len(slicers) == batch_size or done:\n slices_dict = {k: np.concatenate([v[slicer] for slicer in slicers], 0) for k,v in padded_dict.items()}\n\n all_op_parts = session.run(ops_list, feed_dict=slices_dict)\n\n zipped_parts = zip(*[np.array_split(part, len(slicers)) for part in all_op_parts])\n\n for out_slicer, op_parts in zip(out_slicers, zipped_parts):\n for idx in range(len(op_parts)):\n out_dummies[idx][out_slicer] += op_parts[idx]\n out_dummy_counter[idx][out_slicer] += 1\n\n slicers = []\n out_slicers = []\n\n if done:\n break\n\n return [o / c for o, c in zip(out_dummies, out_dummy_counter)]\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"apache-2.0"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2271,"cells":{"commit":{"kind":"string","value":"c262cc4cc18336257972105c1cd6c409da8ed5cd"},"subject":{"kind":"string","value":"Create mcmc.py"},"repos":{"kind":"string","value":"RonsenbergVI/trendpy,RonsenbergVI/trendpy"},"old_file":{"kind":"string","value":"mcmc.py"},"new_file":{"kind":"string","value":"mcmc.py"},"new_contents":{"kind":"string","value":"# MIT License\n\n# Copyright (c) 2017 Rene Jean Corneille\n\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n\n# The above copyright notice and this permission notice shall be included in all\n# copies or substantial portions of the Software.\n\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n# SOFTWARE.\n\nfrom numpy import zeros, reshape\nfrom scipy.stats import rv_continuous\n\n__all__ = ['Parameter','Parameters','MCMC']\n\nclass Parameter(object):\n\n\tdef __init__(self, name, distribution, size, current_value=None):\n\t\tself.name = str(name)\n\t\tself.distribution = distribution\n\t\tself.size = size\n\t\tself.current_value = current_value\n\n\t@property\n\tdef current_value(self):\n\t\treturn self.__current_value\n\n\t@current_value.setter\n\tdef current_value(self, current_value):\n\t\tself.__current_value = current_value\n\n\tdef __str__(self):\n\t\treturn \"\"\"\n\t\t\tparameter name : %s\n\t\t\tparameter distribution : %s\n\t\t\"\"\" % (self.name, self.distribution.__str__())\n\n\tdef __len__(self):\n\t\treturn 1\n\t\t\n\tdef is_multivariate(self):\n\t\treturn self.size == (1,1)\n\nclass Parameters(object):\n\n\tdef __init__(self, list={}, hierarchy=[]):\n\t\tself.list = list\n\t\tself.hierarchy = hierarchy\n\n\t@property\n\tdef parameters(self):\n\t\treturn self.__list\n\n\t@parameters.setter\n\tdef parameters(self, list):\n\t\tif not (list=={}):\n\t\t\tself.__list = list\n\t\telse:\n\t\t\tself.__list = {}\n\n\t@property\n\tdef hierarchy(self):\n\t\treturn self.__hierarchy\n\n\t@hierarchy.setter\n\tdef hierarchy(self, hierarchy):\n\t\tself.__hierarchy = hierarchy\n\n\tdef __len__(self):\n\t\treturn len(self.list)\n\n\tdef __str__(self):\n\t\tdescr = '(parameters: ----------------------- \\n'\n\t\tdescr += ', \\n'.join(['name: %s, distribution: %s, size: %s' % (str(l.name), l.distribution.__str__(), l.size) for l in self.list.values()])\n\t\tdescr += '\\n ----------------------- )'\n\t\treturn descr\n\n\tdef append(self, parameter):\n\t\tself.list[parameter.name] = parameter\n\t\tself.hierarchy.append(parameter.name)\n\t\t\nclass Distribution(rv_continuous):\n\tpass\n\nclass MCMC(object):\n\n\tdef __init__(self, data, strategy):\n\t\tself.data = data\n\t\tself.strategy = strategy\n\t\tself.simulations = None\n\n\tdef summary(self):\n\t\tsmry = \"\"\n\t\treturn smry\n\n\tdef distribution_parameters(self, parameter_name, *args, **kwargs):\n\t\treturn self.strategy.distribution_parameters(parameter_name, *args, **kwargs) # returns a dictionary\n\n\tdef generate(self, parameter_name):\n\t\treturn self.strategy.generate(parameter_name)\n\n\tdef output(self, burn, parameter_name):\n\t\treturn self.strategy.output(self.simulations, burn, parameter_name)\n\t\t\n\tdef define_parameters(self):\n\t\treturn self.strategy.define_parameters()\n\n\tdef initial_value(self,parameter_name):\n\t\treturn self.strategy.initial_value(parameter_name)\n\n\tdef run(self, number_simulations=100):\n\t\tself.simulations = {key : zeros((param.size[0],param.size[1],number_simulations)) for (key, param) in self.strategy.parameters.list.items()}\n\n\t\tfor name in self.strategy.parameters.hierarchy:\n\t\t\tself.strategy.parameters.list[name].current_value = self.initial_value(name)\n\t\t\t\n\t\tfor i in range(number_simulations):\n\t\t\tprint(\"== step %i ==\" % (int(i+1),))\n\t\t\trestart_step = True\n\t\t\twhile restart_step:\n\t\t\t\tfor name in self.strategy.parameters.hierarchy:\n\t\t\t\t\tprint(\"== parameter %s ==\" % name)\n\t\t\t\t\ttry:\n\t\t\t\t\t\tself.strategy.parameters.list[name].current_value = self.generate(name)\n\t\t\t\t\t\tself.simulations[name][:,:,i] = self.strategy.parameters.list[name].current_value.reshape(self.strategy.parameters.list[name].size)\n\t\t\t\t\t\trestart_step = False\n\t\t\t\t\texcept:\n\t\t\t\t\t\tprint(\"== restart step %i ==\" % i)\n\t\t\t\t\t\trestart_step = True\n\t\t\t\t\t\tbreak\n\nclass ConvergenceAnalysis(object):\n\t\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"mit"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2272,"cells":{"commit":{"kind":"string","value":"4b561d710e9ad72ad94ffb1ff3ae37db668899e4"},"subject":{"kind":"string","value":"Add generate_examples script"},"repos":{"kind":"string","value":"chunfengh/seq2seq,kontact-chan/seq2seq,shashankrajput/seq2seq,chunfengh/seq2seq,chunfengh/seq2seq,chunfengh/seq2seq,kontact-chan/seq2seq,google/seq2seq,shashankrajput/seq2seq,google/seq2seq,liyi193328/seq2seq,liyi193328/seq2seq,google/seq2seq,liyi193328/seq2seq,liyi193328/seq2seq,shashankrajput/seq2seq,kontact-chan/seq2seq,google/seq2seq,liyi193328/seq2seq,kontact-chan/seq2seq,shashankrajput/seq2seq"},"old_file":{"kind":"string","value":"seq2seq/scripts/generate_examples.py"},"new_file":{"kind":"string","value":"seq2seq/scripts/generate_examples.py"},"new_contents":{"kind":"string","value":"#! /usr/bin/env python\n\n\"\"\"\nGenerates a TFRecords file given sequence-aligned source and target files.\n\nExample Usage:\n\npython ./generate_examples.py --source_file \\\n --target_file \\\n --output_file \n\"\"\"\n\n\n\nimport tensorflow as tf\n\ntf.flags.DEFINE_string('source_file', None,\n 'File containing content in source language.')\ntf.flags.DEFINE_string(\n 'target_file', None,\n 'File containing content in target language, parallel line by line to the'\n 'source file.')\ntf.flags.DEFINE_string('output_file', None,\n 'File to output tf.Example TFRecords.')\n\nFLAGS = tf.flags.FLAGS\n\n\ndef build_example(pair_id, source, target):\n \"\"\"Transforms pair of 'source' and 'target' strings into a tf.Example.\n\n Assumes that 'source' and 'target' are already tokenized.\n\n Args:\n pair_id: id of this pair of source and target strings.\n source: a pretokenized source string.\n target: a pretokenized target string.\n\n Returns:\n a tf.Example corresponding to the 'source' and 'target' inputs.\n \"\"\"\n pair_id = str(pair_id)\n source_tokens = source.strip().split(' ')\n target_tokens = target.strip().split(' ')\n ex = tf.train.Example()\n\n ex.features.feature['pair_id'].bytes_list.value.append(pair_id.encode('utf-8'))\n ex.features.feature['source_len'].int64_list.value.append(len(source_tokens))\n ex.features.feature['target_len'].int64_list.value.append(len(target_tokens))\n\n source_tokens = [t.encode('utf-8') for t in source_tokens]\n target_tokens = [t.encode('utf-8') for t in target_tokens]\n\n ex.features.feature['source_tokens'].bytes_list.value.extend(source_tokens)\n ex.features.feature['target_tokens'].bytes_list.value.extend(target_tokens)\n\n return ex\n\n\ndef write_tfrecords(examples, output_file):\n \"\"\"Writes a list of tf.Examples to 'output_file'.\n\n Args:\n examples: An iterator of tf.Example records\n outputfile: path to the output file\n \"\"\"\n writer = tf.python_io.TFRecordWriter(output_file)\n print('Creating TFRecords file at {}...'.format(output_file))\n for row in examples:\n writer.write(row.SerializeToString())\n writer.close()\n print('Wrote to {}'.format(output_file))\n\n\ndef generate_examples(source_file, target_file):\n \"\"\"Creates an iterator of tf.Example records given aligned source and target files.\n\n Args:\n source_file: path to file with newline-separated source strings\n target_file: path to file with newline-separated target strings\n\n Returns:\n An iterator of tf.Example objects.\n \"\"\"\n with open(source_file) as source_records:\n with open(target_file) as target_records:\n for i, (source, target) in enumerate(zip(source_records, target_records)):\n if i % 10000 == 0:\n print('Processed {} records'.format(i))\n yield build_example(i, source, target)\n\ndef main(unused_argv):\n #pylint: disable=unused-argument\n examples = generate_examples(\n FLAGS.source_file, FLAGS.target_file)\n write_tfrecords(examples, FLAGS.output_file)\n\n\nif __name__ == '__main__':\n tf.app.run()\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"apache-2.0"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2273,"cells":{"commit":{"kind":"string","value":"edb28fffe19e2b0de3113b43aeb075119c9e5830"},"subject":{"kind":"string","value":"Work in progress. Creating new data migration."},"repos":{"kind":"string","value":"EBI-Metagenomics/emgapi,EBI-Metagenomics/emgapi,EBI-Metagenomics/emgapi,EBI-Metagenomics/emgapi,EBI-Metagenomics/emgapi"},"old_file":{"kind":"string","value":"emgapi/migrations/0019_auto_20200110_1455.py"},"new_file":{"kind":"string","value":"emgapi/migrations/0019_auto_20200110_1455.py"},"new_contents":{"kind":"string","value":"# -*- coding: utf-8 -*-\n# Generated by Django 1.11.24 on 2020-01-10 14:55\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations\n\n\ndef create_download_description(apps, schema_editor):\n DownloadDescriptionLabel = apps.get_model(\"emgapi\", \"DownloadDescriptionLabel\")\n downloads = (\n (\"Phylum level taxonomies UNITE (TSV)\", \"Phylum level taxonomies UNITE\"),\n (\"Phylum level taxonomies ITSoneDB (TSV)\", \"Phylum level taxonomies ITSoneDB\"),\n (\"Taxonomic assignments UNITE (TSV)\", \"Taxonomic assignments UNITE\"),\n (\"Taxonomic assignments ITSoneDB (TSV)\", \"Taxonomic assignments ITSoneDB\"),\n )\n _downloads = list()\n for d in downloads:\n _downloads.append(\n DownloadDescriptionLabel(\n description=d[0],\n description_label=d[1]\n )\n )\n DownloadDescriptionLabel.objects.bulk_create(_downloads)\n\n\ndef create_group_types(apps, schema_editor):\n DownloadGroupType = apps.get_model(\"emgapi\", \"DownloadGroupType\")\n group_types = (\n \"Taxonomic analysis ITS\",\n \"Taxonomic analysis ITSoneDB\",\n \"Taxonomic analysis UNITE\",\n \"Pathways and Systems\",\n # TODO: Do we need sub groups for the function and pathways\n )\n _groups = list()\n for group_type in group_types:\n _groups.append(\n DownloadGroupType(group_type=group_type)\n )\n DownloadGroupType.objects.bulk_create(_groups)\n\n\nclass Migration(migrations.Migration):\n dependencies = [\n ('emgapi', '0018_auto_20191105_1052'),\n ]\n\n operations = [\n migrations.RunPython(create_download_description),\n migrations.RunPython(create_group_types)\n ]\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"apache-2.0"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2274,"cells":{"commit":{"kind":"string","value":"d41274ce2a54d37c35f23c8c78de196e57667b0a"},"subject":{"kind":"string","value":"add google translate plugin"},"repos":{"kind":"string","value":"fridim/cabot,fridim/cabot,fridim/cabot,fridim/cabot,fridim/cabot,fridim/cabot"},"old_file":{"kind":"string","value":"plugins_examples/translate.py"},"new_file":{"kind":"string","value":"plugins_examples/translate.py"},"new_contents":{"kind":"string","value":"#!/usr/bin/env python\nimport sys\nimport re\nfrom googletrans import Translator\ntranslator = Translator()\n\nline = sys.stdin.readline()\nwhile line:\n match = re.search('^:([^\\s]+) PRIVMSG (#[^\\s]+) :(.+)', line)\n if not match:\n line = sys.stdin.readline()\n continue\n\n who = match.group(1)\n chan = match.group(2)\n what = match.group(3).strip().strip('\\r\\n')\n\n def reply(text):\n print(\"PRIVMSG %s :%s\" % (chan, text))\n sys.stdout.flush()\n\n if what[:10] == ':translate':\n m2 = re.search('^:translate (.*)', what)\n if not m2:\n line = sys.stdin.readline()\n continue\n try:\n reply(translator.translate(m2.group(1), dest='fr').text)\n except:\n reply('Oups!')\n elif what[:4] == ':tr ':\n m2 = re.search('^:tr (\\w+) (\\w+) (.+)', what)\n if not m2:\n line = sys.stdin.readline()\n continue\n try:\n reply(translator.translate(m2.group(3), src=m2.group(1), dest=m2.group(2)).text)\n except:\n reply('Oups!')\n line = sys.stdin.readline()\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"mit"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2275,"cells":{"commit":{"kind":"string","value":"b450734eea74f5f3536a44ed40c006c3da13656c"},"subject":{"kind":"string","value":"Add diff.py"},"repos":{"kind":"string","value":"jhogan/commonpy,jhogan/epiphany-py"},"old_file":{"kind":"string","value":"diff.py"},"new_file":{"kind":"string","value":"diff.py"},"new_contents":{"kind":"string","value":"# vim: set et ts=4 sw=4 fdm=marker\n\"\"\"\nMIT License\n\nCopyright (c) 2016 Jesse Hogan\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n\"\"\"\nfrom diff_match_patch import diff_match_patch\nfrom entities import entity\nfrom pdb import set_trace; B=set_trace\n\n# TODO Write test\nclass diff(entity):\n def __init__(self, data1, data2):\n self._data1 = data1\n self._data2 = data2\n self._ps = None\n self._dmp = None\n\n @property\n def _diff_match_patch(self):\n if not self._dmp:\n self._dmp = diff_match_patch()\n return self._dmp;\n\n @property\n def _patches(self):\n if self._ps == None:\n dmp = self._diff_match_patch\n diffs = dmp.diff_main(self._data1, self._data2)\n dmp.diff_cleanupSemantic(diffs)\n self._ps = dmp.patch_make(diffs)\n return self._ps\n\n def apply(self, data):\n return patch_apply(self._patches, data)[0]\n\n def __str__(self):\n dmp = self._diff_match_patch\n return dmp.patch_toText(self._patches) \n\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"mit"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2276,"cells":{"commit":{"kind":"string","value":"176af82121da5282842fd7e77809da9780ac57a5"},"subject":{"kind":"string","value":"implement server pool."},"repos":{"kind":"string","value":"tonyseek/rsocks,tonyseek/rsocks"},"old_file":{"kind":"string","value":"rsocks/pool.py"},"new_file":{"kind":"string","value":"rsocks/pool.py"},"new_contents":{"kind":"string","value":"from __future__ import unicode_literals\n\nimport logging\nimport contextlib\n\nfrom .eventlib import GreenPool\nfrom .utils import debug\n\n\n__all__ = ['ServerPool']\n\nlogger = logging.getLogger(__name__)\nlogger.setLevel(logging.DEBUG if debug() else logging.INFO)\nlogger.addHandler(logging.StreamHandler())\n\n\nclass ServerPool(object):\n\n def __init__(self):\n self.pool = GreenPool()\n self.servers = {}\n\n @contextlib.contextmanager\n def new_server(self, name, server_class, *args, **kwargs):\n server = server_class(*args, **kwargs)\n yield server\n self.servers[name] = server\n\n def loop(self):\n for name, server in self.servers.items():\n logger.info('Prepared \"%s\"' % name)\n self.pool.spawn(server.loop)\n try:\n self.pool.waitall()\n except (SystemExit, KeyboardInterrupt):\n logger.info('Exit')\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"mit"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2277,"cells":{"commit":{"kind":"string","value":"416d2b0ffd617c8c6e58360fefe554ad7dc3057b"},"subject":{"kind":"string","value":"add example for discovering existing connections"},"repos":{"kind":"string","value":"epage/telepathy-python,freedesktop-unofficial-mirror/telepathy__telepathy-python,PabloCastellano/telepathy-python,epage/telepathy-python,detrout/telepathy-python,freedesktop-unofficial-mirror/telepathy__telepathy-python,PabloCastellano/telepathy-python,detrout/telepathy-python,max-posedon/telepathy-python,max-posedon/telepathy-python"},"old_file":{"kind":"string","value":"examples/connections.py"},"new_file":{"kind":"string","value":"examples/connections.py"},"new_contents":{"kind":"string","value":"\n\"\"\"\nPrint out a list of existing Telepathy connections.\n\"\"\"\n\nimport dbus.glib\n\nimport telepathy\n\nprefix = 'org.freedesktop.Telepathy.Connection.'\n\nif __name__ == '__main__':\n for conn in telepathy.client.Connection.get_connections():\n conn_iface = conn[telepathy.CONN_INTERFACE]\n handle = conn_iface.GetSelfHandle()\n print conn_iface.InspectHandles(\n telepathy.CONNECTION_HANDLE_TYPE_CONTACT, [handle])[0]\n print ' Protocol:', conn_iface.GetProtocol()\n print ' Name:', conn.service_name[len(prefix):]\n print\n\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"lgpl-2.1"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2278,"cells":{"commit":{"kind":"string","value":"ff53f699ac371266791487f0b863531dd8f5236a"},"subject":{"kind":"string","value":"Add hug 'hello_world' using to be developed support for optional URLs"},"repos":{"kind":"string","value":"jean/hug,shaunstanislaus/hug,origingod/hug,philiptzou/hug,janusnic/hug,STANAPO/hug,gbn972/hug,giserh/hug,philiptzou/hug,STANAPO/hug,MuhammadAlkarouri/hug,MuhammadAlkarouri/hug,giserh/hug,alisaifee/hug,alisaifee/hug,timothycrosley/hug,gbn972/hug,shaunstanislaus/hug,origingod/hug,timothycrosley/hug,yasoob/hug,yasoob/hug,jean/hug,janusnic/hug,MuhammadAlkarouri/hug,timothycrosley/hug"},"old_file":{"kind":"string","value":"examples/hello_world.py"},"new_file":{"kind":"string","value":"examples/hello_world.py"},"new_contents":{"kind":"string","value":"import hug\n\n\n@hug.get()\ndef hello_world():\n return \"Hello world\"\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"mit"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2279,"cells":{"commit":{"kind":"string","value":"397ab61df61d5acac46cf60ede38fa928fdacd7c"},"subject":{"kind":"string","value":"Create solution.py"},"repos":{"kind":"string","value":"lilsweetcaligula/Algorithms,lilsweetcaligula/Algorithms,lilsweetcaligula/Algorithms"},"old_file":{"kind":"string","value":"data_structures/linked_list/problems/pos_num_to_linked_list/solution.py"},"new_file":{"kind":"string","value":"data_structures/linked_list/problems/pos_num_to_linked_list/solution.py"},"new_contents":{"kind":"string","value":"import LinkedList\n\n# Linked List Node inside the LinkedList module is declared as:\n#\n# class Node:\n# def __init__(self, val, nxt=None):\n# self.val = val\n# self.nxt = nxt\n#\n\n\ndef ConvertPositiveNumToLinkedList(val: int) -> LinkedList.Node:\n node = None\n\n while True:\n dig = val % 10\n val //= 10\n prev = LinkedList.Node(dig, node)\n node = prev\n \n if val == 0:\n break\n\n return node\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"mit"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2280,"cells":{"commit":{"kind":"string","value":"724bc46c85e6ea75ac8d786f4d1706b74df8f330"},"subject":{"kind":"string","value":"Create dictid.py"},"repos":{"kind":"string","value":"diamontip/pract,diamontip/pract"},"old_file":{"kind":"string","value":"dictid.py"},"new_file":{"kind":"string","value":"dictid.py"},"new_contents":{"kind":"string","value":"a = (1,2)\nb = [1,2]\nc = {a: 1} # outcome: c= {(1,2): 1} \nd = {b: 1} # outcome: error\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"mit"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2281,"cells":{"commit":{"kind":"string","value":"0fb7a5559f525ab1149ac41d4b399442f7649664"},"subject":{"kind":"string","value":"add script to show statistics (number of chunks, data volume)"},"repos":{"kind":"string","value":"HumanBrainProject/neuroglancer-scripts"},"old_file":{"kind":"string","value":"scale_stats.py"},"new_file":{"kind":"string","value":"scale_stats.py"},"new_contents":{"kind":"string","value":"#! /usr/bin/env python3\n#\n# Copyright (c) 2016, 2017, Forschungszentrum Juelich GmbH\n# Author: Yann Leprince \n#\n# This software is made available under the MIT licence, see LICENCE.txt.\n\n\nimport collections\nimport json\nimport math\nimport os\nimport os.path\nimport sys\n\nimport numpy as np\n\nSI_PREFIXES = [\n (1, \"\"),\n (1024, \"ki\"),\n (1024 * 1024, \"Mi\"),\n (1024 * 1024 * 1024, \"Gi\"),\n (1024 * 1024 * 1024 * 1024, \"Ti\"),\n (1024 * 1024 * 1024 * 1024 * 1024, \"Pi\"),\n (1024 * 1024 * 1024 * 1024 * 1024 * 1024, \"Ei\"),\n]\n\n\ndef readable(count):\n for factor, prefix in SI_PREFIXES:\n if count > 10 * factor:\n num_str = format(count / factor, \".0f\")\n else:\n num_str = format(count / factor, \".1f\")\n if len(num_str) <= 3:\n return num_str + \" \" + prefix\n # Fallback: use the last prefix\n factor, prefix = SI_PREFIXES[-1]\n return \"{:,.0f} {}\".format(count / factor, prefix)\n\n\ndef show_scales_info(info):\n total_size = 0\n total_chunks = 0\n total_directories = 0\n dtype = np.dtype(info[\"data_type\"]).newbyteorder(\"<\")\n num_channels = info[\"num_channels\"]\n for scale in info[\"scales\"]:\n scale_name = scale[\"key\"]\n size = scale[\"size\"] #np.array(scale[\"size\"], dtype=np.uintp)\n for chunk_size in scale[\"chunk_sizes\"]:\n #chunk_size = np.array(chunk_size, dtype=np.uintp)\n size_in_chunks = [(s - 1) // cs + 1 for s, cs in zip(size, chunk_size)]\n num_chunks = np.prod(size_in_chunks)\n num_directories = size_in_chunks[0] * (1 + size_in_chunks[1])\n size_bytes = np.prod(size) * dtype.itemsize * num_channels\n print(\"Scale {}, chunk size {}:\"\n \" {:,d} chunks, {:,d} directories, raw uncompressed size {}B\"\n .format(scale_name, chunk_size,\n num_chunks, num_directories, readable(size_bytes)))\n total_size += size_bytes\n total_chunks += num_chunks\n total_directories += num_directories\n print(\"---\")\n print(\"Total: {:,d} chunks, {:,d} directories, raw uncompressed size {}B\"\n .format(total_chunks, total_directories, readable(total_size)))\n\ndef show_scale_file_info(input_info_filename):\n \"\"\"Show information about a list of scales from an input JSON file\"\"\"\n with open(input_info_filename) as f:\n info = json.load(f)\n show_scales_info(info)\n\n\ndef parse_command_line(argv):\n \"\"\"Parse the script's command line.\"\"\"\n import argparse\n parser = argparse.ArgumentParser(\n description=\"\"\"\\\nShow information about a list of scales in Neuroglancer \"info\" JSON file format\n\"\"\")\n parser.add_argument(\"info_file\", nargs=\"?\", default=\"./info\",\n help=\"JSON file containing the information\")\n args = parser.parse_args(argv[1:])\n return args\n\n\ndef main(argv):\n \"\"\"The script's entry point.\"\"\"\n args = parse_command_line(argv)\n return show_scale_file_info(args.info_file) or 0\n\n\nif __name__ == \"__main__\":\n sys.exit(main(sys.argv))\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"mit"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2282,"cells":{"commit":{"kind":"string","value":"7d574c1f6d194df1f2b2009fb2e48fbaacaca873"},"subject":{"kind":"string","value":"Add migration for_insert_base"},"repos":{"kind":"string","value":"openego/oeplatform,openego/oeplatform,openego/oeplatform,openego/oeplatform"},"old_file":{"kind":"string","value":"oedb_datamodels/versions/6887c442bbee_insert_base.py"},"new_file":{"kind":"string","value":"oedb_datamodels/versions/6887c442bbee_insert_base.py"},"new_contents":{"kind":"string","value":"\"\"\"Add _insert_base\n\nRevision ID: 6887c442bbee\nRevises: 3886946416ba\nCreate Date: 2019-04-25 16:09:20.572057\n\n\"\"\"\nfrom alembic import op\nimport sqlalchemy as sa\n\n\n# revision identifiers, used by Alembic.\nrevision = '6887c442bbee'\ndown_revision = '3886946416ba'\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n op.create_table('_insert_base',\n sa.Column('_id', sa.BigInteger(), autoincrement=True, nullable=False),\n sa.Column('_message', sa.Text(), nullable=True),\n sa.Column('_user', sa.String(length=50), nullable=True),\n sa.Column('_submitted', sa.DateTime(), server_default=sa.text('now()'), nullable=True),\n sa.Column('_autocheck', sa.Boolean(), server_default=sa.text('false'), nullable=True),\n sa.Column('_humancheck', sa.Boolean(), server_default=sa.text('false'), nullable=True),\n sa.Column('_type', sa.String(length=8), nullable=True),\n sa.Column('_applied', sa.Boolean(), server_default=sa.text('false'), nullable=True),\n sa.PrimaryKeyConstraint('_id'),\n schema='public'\n )\n\n\ndef downgrade():\n op.drop_table('_insert_base', schema='public')\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"agpl-3.0"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2283,"cells":{"commit":{"kind":"string","value":"2ef707337adc3d0abc33ca638b2adb70a681bd12"},"subject":{"kind":"string","value":"update for new API"},"repos":{"kind":"string","value":"vighneshbirodkar/scikit-image,paalge/scikit-image,ajaybhat/scikit-image,ajaybhat/scikit-image,ofgulban/scikit-image,Hiyorimi/scikit-image,rjeli/scikit-image,rjeli/scikit-image,ofgulban/scikit-image,ofgulban/scikit-image,vighneshbirodkar/scikit-image,paalge/scikit-image,rjeli/scikit-image,Hiyorimi/scikit-image,vighneshbirodkar/scikit-image,paalge/scikit-image"},"old_file":{"kind":"string","value":"doc/examples/filters/plot_denoise.py"},"new_file":{"kind":"string","value":"doc/examples/filters/plot_denoise.py"},"new_contents":{"kind":"string","value":"\"\"\"\n====================\nDenoising a picture\n====================\n\nIn this example, we denoise a noisy version of the picture of the astronaut\nEileen Collins using the total variation and bilateral denoising filter.\n\nThese algorithms typically produce \"posterized\" images with flat domains\nseparated by sharp edges. It is possible to change the degree of posterization\nby controlling the tradeoff between denoising and faithfulness to the original\nimage.\n\nTotal variation filter\n----------------------\n\nThe result of this filter is an image that has a minimal total variation norm,\nwhile being as close to the initial image as possible. The total variation is\nthe L1 norm of the gradient of the image.\n\nBilateral filter\n----------------\n\nA bilateral filter is an edge-preserving and noise reducing filter. It averages\npixels based on their spatial closeness and radiometric similarity.\n\n\"\"\"\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nfrom skimage import data, img_as_float\nfrom skimage.restoration import denoise_tv_chambolle, denoise_bilateral\n\n\nastro = img_as_float(data.astronaut())\nastro = astro[220:300, 220:320]\n\nnoisy = astro + 0.6 * astro.std() * np.random.random(astro.shape)\nnoisy = np.clip(noisy, 0, 1)\n\nfig, ax = plt.subplots(nrows=2, ncols=3, figsize=(8, 5), sharex=True,\n sharey=True, subplot_kw={'adjustable': 'box-forced'})\n\nplt.gray()\n\nax[0, 0].imshow(noisy)\nax[0, 0].axis('off')\nax[0, 0].set_title('noisy')\nax[0, 1].imshow(denoise_tv_chambolle(noisy, weight=0.1, multichannel=True))\nax[0, 1].axis('off')\nax[0, 1].set_title('TV')\nax[0, 2].imshow(denoise_bilateral(noisy, sigma_color=0.05, sigma_spatial=15))\nax[0, 2].axis('off')\nax[0, 2].set_title('Bilateral')\n\nax[1, 0].imshow(denoise_tv_chambolle(noisy, weight=0.2, multichannel=True))\nax[1, 0].axis('off')\nax[1, 0].set_title('(more) TV')\nax[1, 1].imshow(denoise_bilateral(noisy, sigma_color=0.1, sigma_spatial=15))\nax[1, 1].axis('off')\nax[1, 1].set_title('(more) Bilateral')\nax[1, 2].imshow(astro)\nax[1, 2].axis('off')\nax[1, 2].set_title('original')\n\nfig.tight_layout()\n\nplt.show()\n"},"old_contents":{"kind":"string","value":"\"\"\"\n====================\nDenoising a picture\n====================\n\nIn this example, we denoise a noisy version of the picture of the astronaut\nEileen Collins using the total variation and bilateral denoising filter.\n\nThese algorithms typically produce \"posterized\" images with flat domains\nseparated by sharp edges. It is possible to change the degree of posterization\nby controlling the tradeoff between denoising and faithfulness to the original\nimage.\n\nTotal variation filter\n----------------------\n\nThe result of this filter is an image that has a minimal total variation norm,\nwhile being as close to the initial image as possible. The total variation is\nthe L1 norm of the gradient of the image.\n\nBilateral filter\n----------------\n\nA bilateral filter is an edge-preserving and noise reducing filter. It averages\npixels based on their spatial closeness and radiometric similarity.\n\n\"\"\"\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nfrom skimage import data, img_as_float\nfrom skimage.restoration import denoise_tv_chambolle, denoise_bilateral\n\n\nastro = img_as_float(data.astronaut())\nastro = astro[220:300, 220:320]\n\nnoisy = astro + 0.6 * astro.std() * np.random.random(astro.shape)\nnoisy = np.clip(noisy, 0, 1)\n\nfig, ax = plt.subplots(nrows=2, ncols=3, figsize=(8, 5), sharex=True,\n sharey=True, subplot_kw={'adjustable': 'box-forced'})\n\nplt.gray()\n\nax[0, 0].imshow(noisy)\nax[0, 0].axis('off')\nax[0, 0].set_title('noisy')\nax[0, 1].imshow(denoise_tv_chambolle(noisy, weight=0.1, multichannel=True))\nax[0, 1].axis('off')\nax[0, 1].set_title('TV')\nax[0, 2].imshow(denoise_bilateral(noisy, sigma_range=0.05, sigma_spatial=15))\nax[0, 2].axis('off')\nax[0, 2].set_title('Bilateral')\n\nax[1, 0].imshow(denoise_tv_chambolle(noisy, weight=0.2, multichannel=True))\nax[1, 0].axis('off')\nax[1, 0].set_title('(more) TV')\nax[1, 1].imshow(denoise_bilateral(noisy, sigma_range=0.1, sigma_spatial=15))\nax[1, 1].axis('off')\nax[1, 1].set_title('(more) Bilateral')\nax[1, 2].imshow(astro)\nax[1, 2].axis('off')\nax[1, 2].set_title('original')\n\nfig.tight_layout()\n\nplt.show()\n"},"license":{"kind":"string","value":"bsd-3-clause"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2284,"cells":{"commit":{"kind":"string","value":"9e6a016c5a59b25199426f6825b2c83571997e68"},"subject":{"kind":"string","value":"Refactor buildbot tests so that they can be used downstream."},"repos":{"kind":"string","value":"ondra-novak/chromium.src,hgl888/chromium-crosswalk,jaruba/chromium.src,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,Pluto-tv/chromium-crosswalk,fujunwei/chromium-crosswalk,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,jaruba/chromium.src,ltilve/chromium,ChromiumWebApps/chromium,dushu1203/chromium.src,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,krieger-od/nwjs_chromium.src,chuan9/chromium-crosswalk,Just-D/chromium-1,anirudhSK/chromium,hgl888/chromium-crosswalk,dushu1203/chromium.src,M4sse/chromium.src,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,chuan9/chromium-crosswalk,Chilledheart/chromium,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,ltilve/chromium,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,patrickm/chromium.src,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,fujunwei/chromium-crosswalk,markYoungH/chromium.src,dednal/chromium.src,patrickm/chromium.src,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk-efl,dednal/chromium.src,ChromiumWebApps/chromium,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,patrickm/chromium.src,mogoweb/chromium-crosswalk,jaruba/chromium.src,hgl888/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,bright-sparks/chromium-spacewalk,jaruba/chromium.src,mogoweb/chromium-crosswalk,patrickm/chromium.src,anirudhSK/chromium,M4sse/chromium.src,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,anirudhSK/chromium,ltilve/chromium,markYoungH/chromium.src,PeterWangIntel/chromium-crosswalk,anirudhSK/chromium,ltilve/chromium,Chilledheart/chromium,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,ltilve/chromium,markYoungH/chromium.src,axinging/chromium-crosswalk,ChromiumWebApps/chromium,ltilve/chromium,Chilledheart/chromium,Jonekee/chromium.src,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Jonekee/chromium.src,TheTypoMaster/chromium-crosswalk,dednal/chromium.src,patrickm/chromium.src,ondra-novak/chromium.src,bright-sparks/chromium-spacewalk,PeterWangIntel/chromium-crosswalk,anirudhSK/chromium,Pluto-tv/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,littlstar/chromium.src,ChromiumWebApps/chromium,dushu1203/chromium.src,M4sse/chromium.src,ondra-novak/chromium.src,mogoweb/chromium-crosswalk,Pluto-tv/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Just-D/chromium-1,Fireblend/chromium-crosswalk,markYoungH/chromium.src,markYoungH/chromium.src,Fireblend/chromium-crosswalk,Pluto-tv/chromium-crosswalk,M4sse/chromium.src,TheTypoMaster/chromium-crosswalk,fujunwei/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk-efl,M4sse/chromium.src,bright-sparks/chromium-spacewalk,fujunwei/chromium-crosswalk,patrickm/chromium.src,krieger-od/nwjs_chromium.src,mogoweb/chromium-crosswalk,axinging/chromium-crosswalk,hgl888/chromium-crosswalk-efl,anirudhSK/chromium,Chilledheart/chromium,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,anirudhSK/chromium,jaruba/chromium.src,ondra-novak/chromium.src,dednal/chromium.src,crosswalk-project/chromium-crosswalk-efl,ltilve/chromium,jaruba/chromium.src,bright-sparks/chromium-spacewalk,Fireblend/chromium-crosswalk,Just-D/chromium-1,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,M4sse/chromium.src,jaruba/chromium.src,dednal/chromium.src,bright-sparks/chromium-spacewalk,hgl888/chromium-crosswalk-efl,Chilledheart/chromium,littlstar/chromium.src,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk,M4sse/chromium.src,ondra-novak/chromium.src,Chilledheart/chromium,littlstar/chromium.src,dednal/chromium.src,krieger-od/nwjs_chromium.src,mogoweb/chromium-crosswalk,krieger-od/nwjs_chromium.src,ondra-novak/chromium.src,mogoweb/chromium-crosswalk,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,mohamed--abdel-maksoud/chromium.src,crosswalk-project/chromium-crosswalk-efl,TheTypoMaster/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Just-D/chromium-1,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,jaruba/chromium.src,axinging/chromium-crosswalk,bright-sparks/chromium-spacewalk,ondra-novak/chromium.src,Fireblend/chromium-crosswalk,bright-sparks/chromium-spacewalk,dednal/chromium.src,anirudhSK/chromium,crosswalk-project/chromium-crosswalk-efl,ltilve/chromium,littlstar/chromium.src,markYoungH/chromium.src,mogoweb/chromium-crosswalk,jaruba/chromium.src,M4sse/chromium.src,patrickm/chromium.src,mohamed--abdel-maksoud/chromium.src,anirudhSK/chromium,ondra-novak/chromium.src,ltilve/chromium,dushu1203/chromium.src,Just-D/chromium-1,markYoungH/chromium.src,jaruba/chromium.src,anirudhSK/chromium,Jonekee/chromium.src,Jonekee/chromium.src,bright-sparks/chromium-spacewalk,mogoweb/chromium-crosswalk,chuan9/chromium-crosswalk,M4sse/chromium.src,hgl888/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,axinging/chromium-crosswalk,Jonekee/chromium.src,ChromiumWebApps/chromium,patrickm/chromium.src,Chilledheart/chromium,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,hgl888/chromium-crosswalk,Jonekee/chromium.src,chuan9/chromium-crosswalk,littlstar/chromium.src,Just-D/chromium-1,ChromiumWebApps/chromium,chuan9/chromium-crosswalk,markYoungH/chromium.src,mohamed--abdel-maksoud/chromium.src,mogoweb/chromium-crosswalk,littlstar/chromium.src,TheTypoMaster/chromium-crosswalk,dushu1203/chromium.src,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk-efl,dushu1203/chromium.src,ChromiumWebApps/chromium,ChromiumWebApps/chromium,markYoungH/chromium.src,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,littlstar/chromium.src,jaruba/chromium.src,axinging/chromium-crosswalk,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,hgl888/chromium-crosswalk,Chilledheart/chromium,M4sse/chromium.src,krieger-od/nwjs_chromium.src,ondra-novak/chromium.src,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,mogoweb/chromium-crosswalk,M4sse/chromium.src,ChromiumWebApps/chromium,dednal/chromium.src,crosswalk-project/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,anirudhSK/chromium,dushu1203/chromium.src,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,Jonekee/chromium.src,Just-D/chromium-1,hgl888/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,ChromiumWebApps/chromium,patrickm/chromium.src,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,Just-D/chromium-1,PeterWangIntel/chromium-crosswalk,littlstar/chromium.src,ChromiumWebApps/chromium,dednal/chromium.src,ChromiumWebApps/chromium,Fireblend/chromium-crosswalk,anirudhSK/chromium"},"old_file":{"kind":"string","value":"build/android/buildbot/tests/bb_run_bot_test.py"},"new_file":{"kind":"string","value":"build/android/buildbot/tests/bb_run_bot_test.py"},"new_contents":{"kind":"string","value":"#!/usr/bin/env python\n# Copyright (c) 2013 The Chromium Authors. All rights reserved.\n# Use of this source code is governed by a BSD-style license that can be\n# found in the LICENSE file.\n\nimport os\nimport subprocess\nimport sys\n\nBUILDBOT_DIR = os.path.join(os.path.dirname(__file__), '..')\nsys.path.append(BUILDBOT_DIR)\nimport bb_run_bot\n\ndef RunBotProcesses(bot_process_map):\n code = 0\n for bot, proc in bot_process_map:\n _, err = proc.communicate()\n code |= proc.returncode\n if proc.returncode != 0:\n print 'Error running the bot script with id=\"%s\"' % bot, err\n\n return code\n\n\ndef main():\n procs = [\n (bot, subprocess.Popen(\n [os.path.join(BUILDBOT_DIR, 'bb_run_bot.py'), '--bot-id', bot,\n '--testing'], stdout=subprocess.PIPE, stderr=subprocess.PIPE))\n for bot in bb_run_bot.GetBotStepMap()]\n return RunBotProcesses(procs)\n\n\nif __name__ == '__main__':\n sys.exit(main())\n"},"old_contents":{"kind":"string","value":"#!/usr/bin/env python\n# Copyright (c) 2013 The Chromium Authors. All rights reserved.\n# Use of this source code is governed by a BSD-style license that can be\n# found in the LICENSE file.\n\nimport os\nimport subprocess\nimport sys\n\nBUILDBOT_DIR = os.path.join(os.path.dirname(__file__), '..')\nsys.path.append(BUILDBOT_DIR)\nimport bb_run_bot\n\ndef RunBotsWithTesting(bot_step_map):\n code = 0\n procs = [\n (bot, subprocess.Popen(\n [os.path.join(BUILDBOT_DIR, 'bb_run_bot.py'), '--bot-id', bot,\n '--testing'], stdout=subprocess.PIPE, stderr=subprocess.PIPE))\n for bot in bot_step_map]\n for bot, proc in procs:\n _, err = proc.communicate()\n code |= proc.returncode\n if proc.returncode != 0:\n print 'Error running bb_run_bot with id=\"%s\"' % bot, err\n\n return code\n\n\ndef main():\n return RunBotsWithTesting(bb_run_bot.GetBotStepMap())\n\n\nif __name__ == '__main__':\n sys.exit(main())\n"},"license":{"kind":"string","value":"bsd-3-clause"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2285,"cells":{"commit":{"kind":"string","value":"eb9f9d8bfa5ea278e1fb39c59ed660a223b1f6a9"},"subject":{"kind":"string","value":"Add flask api app creation to init"},"repos":{"kind":"string","value":"EdwinKato/bucket-list,EdwinKato/bucket-list,EdwinKato/bucket-list,EdwinKato/bucket-list,EdwinKato/bucket-list"},"old_file":{"kind":"string","value":"api/__init__.py"},"new_file":{"kind":"string","value":"api/__init__.py"},"new_contents":{"kind":"string","value":"from flask_sqlalchemy import SQLAlchemy\nimport connexion\n\nfrom config import config\n\ndb = SQLAlchemy()\n\n\ndef create_app(config_name):\n app = connexion.FlaskApp(__name__, specification_dir='swagger/')\n app.add_api('swagger.yaml')\n application = app.app\n application.config.from_object(config[config_name])\n db.init_app(application)\n\n return application\n\nfrom api.api import *\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"mit"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2286,"cells":{"commit":{"kind":"string","value":"c10eb3861daf48c13ec854bd210db5d5e1163b11"},"subject":{"kind":"string","value":"Add LotGroupAutocomplete"},"repos":{"kind":"string","value":"596acres/django-livinglots-lots,596acres/django-livinglots-lots"},"old_file":{"kind":"string","value":"livinglots_lots/autocomplete_light_registry.py"},"new_file":{"kind":"string","value":"livinglots_lots/autocomplete_light_registry.py"},"new_contents":{"kind":"string","value":"from autocomplete_light import AutocompleteModelBase, register\n\nfrom livinglots import get_lotgroup_model\n\n\nclass LotGroupAutocomplete(AutocompleteModelBase):\n autocomplete_js_attributes = {'placeholder': 'lot group name',}\n search_fields = ('name',)\n\n def choices_for_request(self):\n choices = super(LotGroupAutocomplete, self).choices_for_request()\n if not self.request.user.is_staff:\n choices = choices.none()\n return choices\n\n\nregister(get_lotgroup_model(), LotGroupAutocomplete)\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"agpl-3.0"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2287,"cells":{"commit":{"kind":"string","value":"2527683522394c823bc100c75f1ce4885949136e"},"subject":{"kind":"string","value":"add paths module for other modules to find paths from one place"},"repos":{"kind":"string","value":"aacanakin/glim"},"old_file":{"kind":"string","value":"glim/paths.py"},"new_file":{"kind":"string","value":"glim/paths.py"},"new_contents":{"kind":"string","value":"import os\nfrom termcolor import colored\n\nPROJECT_PATH = os.getcwd()\nAPP_PATH = os.path.join(PROJECT_PATH, 'app')\nEXT_PATH = os.path.join(PROJECT_PATH, 'ext')\nGLIM_ROOT_PATH = os.path.dirname(os.path.dirname(__file__))\nPROTO_PATH = os.path.join(os.path.dirname(__file__), 'prototype')\n\nimport sys\nfrom pprint import pprint as p\n\ndef configure_sys_path():\n\tif GLIM_ROOT_PATH == PROJECT_PATH:\n\t\tprint colored('Development mode is on, sys.path is being configured', 'yellow')\n\t\tsys.path.pop(0)\n\t\tsys.path.insert(0, GLIM_ROOT_PATH)\n\telse:\n\t\tsys.path.insert(0, PROJECT_PATH)\n\ndef controllers():\n\treturn os.path.join(APP_PATH, 'controllers.py')\n\ndef config(env):\n\treturn os.path.join(APP_PATH, 'config', '%s.py' % env)\n\ndef start():\n\treturn os.path.join(APP_PATH, 'start.py')\n\ndef commands():\n\treturn os.path.join(APP_PATH, 'commands.py')\n\ndef routes():\n\treturn os.path.join(APP_PATH, 'routes.py')\n\ndef extensions(ext):\n\treturn os.path.join(EXT_PATH, '%s' % ext, '%s.py' % ext)\n\ndef extension_commands(ext):\n\treturn os.path.join(EXT_PATH, '%s' % ext, 'commands.py')"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"mit"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2288,"cells":{"commit":{"kind":"string","value":"24f21146b01ff75a244df40d1626c54883abeb1a"},"subject":{"kind":"string","value":"Add helper-lib for json object conversion and split dicts"},"repos":{"kind":"string","value":"UngaForskareStockholm/medlem2"},"old_file":{"kind":"string","value":"lib/helpers.py"},"new_file":{"kind":"string","value":"lib/helpers.py"},"new_contents":{"kind":"string","value":"#! /usr/bin/env python2.7\n\nimport datetime\n\ndef typecast_json(o):\n\tif isinstance(o, datetime.datetime) or isinstance(o, datetime.date):\n\t\treturn o.isoformat()\n\telse:\n\t\treturn o\n\ndef split_dict(src, keys):\n\tresult = dict()\n\tfor k in set(src.keys()) & set(keys):\n\t\tresult[k] = src[k]\n\treturn result\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"bsd-3-clause"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2289,"cells":{"commit":{"kind":"string","value":"0f5c0168b257436882f837e5d521cce46a740ad6"},"subject":{"kind":"string","value":"Add symbol translator to make utf-8 variables compilable"},"repos":{"kind":"string","value":"FInAT/FInAT"},"old_file":{"kind":"string","value":"finat/greek_alphabet.py"},"new_file":{"kind":"string","value":"finat/greek_alphabet.py"},"new_contents":{"kind":"string","value":"\"\"\"Translation table from utf-8 to greek variable names, taken from:\nhttps://gist.github.com/piquadrat/765262#file-greek_alphabet-py\n\"\"\"\n\n\ndef translate_symbol(symbol):\n \"\"\"Translates utf-8 sub-strings into compilable variable names\"\"\"\n name = symbol.decode(\"utf-8\")\n for k, v in greek_alphabet.iteritems():\n name = name.replace(k, v)\n return name\n\n\ngreek_alphabet = {\n u'\\u0391': 'Alpha',\n u'\\u0392': 'Beta',\n u'\\u0393': 'Gamma',\n u'\\u0394': 'Delta',\n u'\\u0395': 'Epsilon',\n u'\\u0396': 'Zeta',\n u'\\u0397': 'Eta',\n u'\\u0398': 'Theta',\n u'\\u0399': 'Iota',\n u'\\u039A': 'Kappa',\n u'\\u039B': 'Lamda',\n u'\\u039C': 'Mu',\n u'\\u039D': 'Nu',\n u'\\u039E': 'Xi',\n u'\\u039F': 'Omicron',\n u'\\u03A0': 'Pi',\n u'\\u03A1': 'Rho',\n u'\\u03A3': 'Sigma',\n u'\\u03A4': 'Tau',\n u'\\u03A5': 'Upsilon',\n u'\\u03A6': 'Phi',\n u'\\u03A7': 'Chi',\n u'\\u03A8': 'Psi',\n u'\\u03A9': 'Omega',\n u'\\u03B1': 'alpha',\n u'\\u03B2': 'beta',\n u'\\u03B3': 'gamma',\n u'\\u03B4': 'delta',\n u'\\u03B5': 'epsilon',\n u'\\u03B6': 'zeta',\n u'\\u03B7': 'eta',\n u'\\u03B8': 'theta',\n u'\\u03B9': 'iota',\n u'\\u03BA': 'kappa',\n u'\\u03BB': 'lamda',\n u'\\u03BC': 'mu',\n u'\\u03BD': 'nu',\n u'\\u03BE': 'xi',\n u'\\u03BF': 'omicron',\n u'\\u03C0': 'pi',\n u'\\u03C1': 'rho',\n u'\\u03C3': 'sigma',\n u'\\u03C4': 'tau',\n u'\\u03C5': 'upsilon',\n u'\\u03C6': 'phi',\n u'\\u03C7': 'chi',\n u'\\u03C8': 'psi',\n u'\\u03C9': 'omega',\n}\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"mit"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2290,"cells":{"commit":{"kind":"string","value":"03951a227bfafb0b1017354bdbf3a1247322fc9b"},"subject":{"kind":"string","value":"Fix cycler tests"},"repos":{"kind":"string","value":"emmagordon/Axelrod,uglyfruitcake/Axelrod,risicle/Axelrod,kathryncrouch/Axelrod,uglyfruitcake/Axelrod,mojones/Axelrod,bootandy/Axelrod,risicle/Axelrod,mojones/Axelrod,kathryncrouch/Axelrod,emmagordon/Axelrod,bootandy/Axelrod"},"old_file":{"kind":"string","value":"axelrod/tests/unit/test_cycler.py"},"new_file":{"kind":"string","value":"axelrod/tests/unit/test_cycler.py"},"new_contents":{"kind":"string","value":"\"\"\"Test for the Cycler strategies.\"\"\"\n\nimport itertools\nimport axelrod\nfrom .test_player import TestPlayer, test_four_vector\n\nC, D = 'C', 'D'\n\n\nclass TestAntiCycler(TestPlayer):\n\n name = \"AntiCycler\"\n player = axelrod.AntiCycler\n expected_classifier = {\n 'memory_depth': float('inf'),\n 'stochastic': False,\n 'inspects_source': False,\n 'manipulates_source': False,\n 'manipulates_state': False\n }\n\n def test_strategy(self):\n \"\"\"Starts by cooperating\"\"\"\n responses = [C, D, C, C, D, C, C, C, D, C, C, C, C, D, C, C, C]\n self.responses_test([], [], responses)\n\n\ndef test_cycler_factory(cycle):\n\n class TestCycler(TestPlayer):\n\n name = \"Cycler %s\" % cycle\n player = getattr(axelrod, 'Cycler%s' % cycle)\n expected_classifier = {\n 'memory_depth': len(cycle),\n 'stochastic': False,\n 'inspects_source': False,\n 'manipulates_source': False,\n 'manipulates_state': False\n }\n\n def test_strategy(self):\n \"\"\"Starts by cooperating\"\"\"\n for i in range(20):\n responses = itertools.islice(itertools.cycle(cycle), i)\n self.responses_test([], [], responses)\n\n return TestCycler\n\nTestCyclerCCD = test_cycler_factory(\"CCD\")\nTestCyclerCCCD = test_cycler_factory(\"CCCD\")\nTestCyclerCCCCCD = test_cycler_factory(\"CCCCCD\")\n"},"old_contents":{"kind":"string","value":"\"\"\"Test for the Cycler strategies.\"\"\"\n\nimport itertools\nimport axelrod\nfrom .test_player import TestPlayer, test_four_vector\n\nC, D = 'C', 'D'\n\n\nclass TestAntiCycler(TestPlayer):\n\n name = \"AntiCycler\"\n player = axelrod.AntiCycler\n expected_classifier = {\n 'memory_depth': float('inf'),\n 'stochastic': False,\n 'inspects_source': False,\n 'manipulates_source': False,\n 'manipulates_state': False\n }\n\n def test_strategy(self):\n \"\"\"Starts by cooperating\"\"\"\n responses = [C, D, C, C, D, C, C, C, D, C, C, C, C, D, C, C, C]\n self.responses_test([], [], responses)\n\n\ndef test_cycler_factory(cycle):\n\n class TestCycler(TestPlayer):\n\n name = \"Cycler %s\" % cycle\n player = getattr(axelrod, 'Cycler%s' % cycle)\n expected_classifier = {\n 'memory_depth': 1,\n 'stochastic': False,\n 'inspects_source': False,\n 'manipulates_source': False,\n 'manipulates_state': False\n }\n\n def test_strategy(self):\n \"\"\"Starts by cooperating\"\"\"\n for i in range(20):\n responses = itertools.islice(itertools.cycle(cycle), i)\n self.responses_test([], [], responses)\n\n return TestCycler\n\nTestCyclerCCD = test_cycler_factory(\"CCD\")\nTestCyclerCCCD = test_cycler_factory(\"CCCD\")\nTestCyclerCCCCCD = test_cycler_factory(\"CCCCCD\")\n"},"license":{"kind":"string","value":"mit"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2291,"cells":{"commit":{"kind":"string","value":"ca2269c5ae568cd63253af7bc614a79d26f7f8ac"},"subject":{"kind":"string","value":"Add ns_drop_indexes command."},"repos":{"kind":"string","value":"niwinz/needlestack"},"old_file":{"kind":"string","value":"needlestack/management/commands/ns_drop_indexes.py"},"new_file":{"kind":"string","value":"needlestack/management/commands/ns_drop_indexes.py"},"new_contents":{"kind":"string","value":"# -*- coding: utf-8 -*-\n\nfrom __future__ import print_function, absolute_import, unicode_literals\n\nfrom django.core.management.base import BaseCommand, CommandError\nfrom needlestack import commands\n\n\nclass Command(BaseCommand):\n help = 'Sync all defined indexes with a current backend'\n option_list = BaseCommand.option_list + (\n make_option('--backend',\n action='store',\n dest='backend',\n default='default'),)\n\n def handle(self, *args, **options):\n commands.drop_indexes(options[\"backend\"], options[\"verbosity\"])\n\n\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"bsd-3-clause"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2292,"cells":{"commit":{"kind":"string","value":"b8a84e612d67f7948d6dec8c202ac8a73390f9dc"},"subject":{"kind":"string","value":"make sure all protein ids are unique in a genbank file"},"repos":{"kind":"string","value":"linsalrob/EdwardsLab,linsalrob/EdwardsLab,linsalrob/EdwardsLab,linsalrob/EdwardsLab,linsalrob/EdwardsLab"},"old_file":{"kind":"string","value":"proteins/unique_protein_ids.py"},"new_file":{"kind":"string","value":"proteins/unique_protein_ids.py"},"new_contents":{"kind":"string","value":"\"\"\"\nTest a genbank file and make sure all the protein_ids are unique\n\"\"\"\n\nimport os\nimport sys\nimport argparse\nfrom Bio import SeqIO\n\n__author__ = 'Rob Edwards'\n__copyright__ = 'Copyright 2020, Rob Edwards'\n__credits__ = ['Rob Edwards']\n__license__ = 'MIT'\n__maintainer__ = 'Rob Edwards'\n__email__ = 'raedwards@gmail.com'\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description=\" \")\n parser.add_argument('-f', help='genbank file', required=True)\n args = parser.parse_args()\n\n pids = set()\n rc = 0\n for seq in SeqIO.parse(args.f, \"genbank\"):\n rc+=1;\n print(f\"record {rc}: {seq.id}\")\n for feat in seq.features:\n if feat.type != \"CDS\":\n continue\n if 'protein_id' not in feat.qualifiers:\n thisid = \" \".join(feat.qualifiers.get('locus_tag', [str(feat.location)]))\n print(f\"No protein id in {thisid}\")\n continue\n pid = \"|\".join(feat.qualifiers[\"protein_id\"])\n if pid in pids:\n print(f\"{pid} is not unique\")\n pids.add(pid)"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"mit"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2293,"cells":{"commit":{"kind":"string","value":"61fa404da3eeb3b695b12f398c27f641e1e681e2"},"subject":{"kind":"string","value":"add codegen script for fname.pyf.src -> _fnamemodule.c"},"repos":{"kind":"string","value":"matthew-brett/scipy,matthew-brett/scipy,matthew-brett/scipy,matthew-brett/scipy,matthew-brett/scipy"},"old_file":{"kind":"string","value":"tools/generate_f2pymod.py"},"new_file":{"kind":"string","value":"tools/generate_f2pymod.py"},"new_contents":{"kind":"string","value":"\"\"\"\nProcess f2py template files (`filename.pyf.src` -> `filename.pyf`)\n\nUsage: python generate_pyf.py filename.pyf.src -o filename.pyf\n\"\"\"\n\nimport os\nimport sys\nimport subprocess\nimport argparse\n\nfrom numpy.distutils.from_template import process_file\n\n\ndef main():\n parser = argparse.ArgumentParser()\n parser.add_argument(\"infile\", type=str,\n help=\"Path to the input file\")\n parser.add_argument(\"-o\", \"--outfile\", type=str,\n help=\"Path to the output file\")\n args = parser.parse_args()\n\n # Read .pyf.src file\n code = process_file(args.infile)\n\n # Write out the .pyf file\n outdir = os.path.split(args.outfile)[0]\n outdir_abs = os.path.join(os.getcwd(), outdir)\n fname_pyf = os.path.join(outdir,\n os.path.splitext(os.path.split(args.infile)[1])[0])\n\n with open(fname_pyf, 'w') as f:\n f.write(code)\n\n # Now invoke f2py to generate the C API module file\n p = subprocess.Popen([sys.executable, '-m', 'numpy.f2py', fname_pyf,\n '--build-dir', outdir_abs], #'--quiet'],\n stdout=subprocess.PIPE, stderr=subprocess.PIPE,\n cwd=os.getcwd())\n out, err = p.communicate()\n if not (p.returncode == 0):\n raise RuntimeError(f\"Writing {args.outfile} with f2py failed!\\n\"\n f\"{out}\\n\"\n r\"{err}\")\n\n\nif __name__ == \"__main__\":\n main()\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"bsd-3-clause"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2294,"cells":{"commit":{"kind":"string","value":"0f94251c7cc844042c9e3ce160d78e4d81d895ea"},"subject":{"kind":"string","value":"add log module"},"repos":{"kind":"string","value":"johnnymo87/simple-db-migrate,guilhermechapiewski/simple-db-migrate"},"old_file":{"kind":"string","value":"src/log.py"},"new_file":{"kind":"string","value":"src/log.py"},"new_contents":{"kind":"string","value":"import logging\nimport os\nfrom datetime import datetime\n\nclass LOG(object):\n logger = None\n def __init__(self, log_dir):\n if log_dir:\n if not os.path.exists(log_dir):\n os.makedirs(log_dir)\n self.logger = logging.getLogger('simple-db-migrate')\n\n now = datetime.now()\n filename = \"%s/%s.log\" %(os.path.abspath(log_dir), now.strftime(\"%Y%m%d%H%M%S\"))\n hdlr = logging.FileHandler(filename)\n formatter = logging.Formatter('%(message)s')\n hdlr.setFormatter(formatter)\n self.logger.addHandler(hdlr)\n self.logger.setLevel(logging.DEBUG)\n\n def debug(self, msg):\n if self.logger:\n self.logger.debug(msg)\n\n def info(self, msg):\n if self.logger:\n self.logger.info(msg)\n\n def error(self, msg):\n if self.logger:\n self.logger.error(msg)\n\n def warn(self, msg):\n if self.logger:\n self.logger.warn(msg)\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"apache-2.0"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2295,"cells":{"commit":{"kind":"string","value":"bc567eda01abcaf23717f5da5f494c1be46f47da"},"subject":{"kind":"string","value":"Create ValAnagram_001.py"},"repos":{"kind":"string","value":"Chasego/cod,Chasego/codirit,Chasego/codi,Chasego/cod,Chasego/codi,cc13ny/algo,Chasego/codirit,Chasego/codirit,Chasego/codirit,cc13ny/Allin,cc13ny/algo,Chasego/codirit,cc13ny/Allin,Chasego/codi,cc13ny/Allin,Chasego/cod,cc13ny/algo,cc13ny/algo,cc13ny/Allin,Chasego/codi,Chasego/cod,cc13ny/Allin,cc13ny/algo,Chasego/codi,Chasego/cod"},"old_file":{"kind":"string","value":"leetcode/242-Valid-Anagram/ValAnagram_001.py"},"new_file":{"kind":"string","value":"leetcode/242-Valid-Anagram/ValAnagram_001.py"},"new_contents":{"kind":"string","value":"class Solution:\n # @param {string} s\n # @param {string} t\n # @return {boolean}\n def anaRepresentation(self, s):\n p = {}\n for c in s:\n if c in p:\n p[c] += 1\n else:\n p[c] = 1\n return p\n \n def isAnagram(self, s, t):\n if len(s) != len(t):\n return False\n p = self.anaRepresentation(s)\n q = self.anaRepresentation(t)\n \n for c in p:\n if c not in q or (c in q and p[c] != q[c]):\n return False\n \n return True\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"mit"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2296,"cells":{"commit":{"kind":"string","value":"682d6b3ca9c4a0dd49f9762ddd20ac746971e3eb"},"subject":{"kind":"string","value":"Create solution.py"},"repos":{"kind":"string","value":"lilsweetcaligula/Online-Judges,lilsweetcaligula/Online-Judges,lilsweetcaligula/Online-Judges"},"old_file":{"kind":"string","value":"leetcode/easy/find_the_difference/py/solution.py"},"new_file":{"kind":"string","value":"leetcode/easy/find_the_difference/py/solution.py"},"new_contents":{"kind":"string","value":"class Solution(object):\n def findTheDifference(self, s, t):\n \"\"\"\n :type s: str\n :type t: str\n :rtype: str\n \"\"\"\n import collections\n import itertools\n \n c1 = collections.Counter(s)\n c2 = collections.Counter(t)\n \n for char in set(itertools.chain(s, t)):\n if c1[char] != c2[char]:\n return char\n \n return None\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"mit"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2297,"cells":{"commit":{"kind":"string","value":"9e128fdd5af0598a233416de5a1e8f2d3a74fdc0"},"subject":{"kind":"string","value":"Enforce unique paths and names"},"repos":{"kind":"string","value":"jgillick/Spaces,jgillick/Spaces,jgillick/Spaces,jgillick/Spaces,jgillick/Spaces,jgillick/Spaces"},"old_file":{"kind":"string","value":"spaces/migrations/0006_unique_space_document.py"},"new_file":{"kind":"string","value":"spaces/migrations/0006_unique_space_document.py"},"new_contents":{"kind":"string","value":"# -*- coding: utf-8 -*-\n# Generated by Django 1.9 on 2015-12-15 02:12\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('spaces', '0005_document_space_doc'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='space',\n name='name',\n field=models.CharField(max_length=100, unique=True),\n ),\n migrations.AlterField(\n model_name='space',\n name='path',\n field=models.CharField(max_length=40, unique=True),\n ),\n migrations.AlterUniqueTogether(\n name='document',\n unique_together=set([('path', 'parent')]),\n ),\n ]\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"mit"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2298,"cells":{"commit":{"kind":"string","value":"0256868a3b261e598689eebdf5ac5f939ea20a0d"},"subject":{"kind":"string","value":"add test cases for mni module"},"repos":{"kind":"string","value":"arokem/nipy,nipy/nipy-labs,arokem/nipy,alexis-roche/niseg,alexis-roche/nireg,alexis-roche/niseg,arokem/nipy,alexis-roche/nipy,bthirion/nipy,alexis-roche/nipy,nipy/nipy-labs,nipy/nireg,alexis-roche/nipy,bthirion/nipy,alexis-roche/nipy,alexis-roche/register,bthirion/nipy,nipy/nireg,arokem/nipy,alexis-roche/register,alexis-roche/nireg,alexis-roche/register,bthirion/nipy"},"old_file":{"kind":"string","value":"lib/neuroimaging/reference/tests/test_mni.py"},"new_file":{"kind":"string","value":"lib/neuroimaging/reference/tests/test_mni.py"},"new_contents":{"kind":"string","value":"import unittest\nimport numpy as N\n\nimport neuroimaging.reference.mni as mni\n\nclass MNITest(unittest.TestCase):\n\n def testMNI(self):\n \"\"\" ensure all elementes of the interface exist \"\"\"\n m = mni.MNI\n g = mni.generic\n m_v = mni.MNI_voxel\n m_w = mni.MNI_world \n m_m = mni.MNI_mapping\n\nif __name__ == '__main__':\n unittest.main()\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"bsd-3-clause"},"lang":{"kind":"string","value":"Python"}}},{"rowIdx":2299,"cells":{"commit":{"kind":"string","value":"d91adef072e2150edde62a49bea4eecb6a26a6ac"},"subject":{"kind":"string","value":"add sns_notify script"},"repos":{"kind":"string","value":"pyconjp/pyconjp-cron"},"old_file":{"kind":"string","value":"sns_notify.py"},"new_file":{"kind":"string","value":"sns_notify.py"},"new_contents":{"kind":"string","value":"#!/usr/bin/env python\n\nfrom datetime import datetime, date\n\nfrom dateutil import parser\n\nfrom google_sheets import get_service\n\nSHEET_ID = \"1lpa9p_dCyTckREf09-oA2C6ZAMACCrgD9W3HQSKeoSI\"\n\n\ndef is_valid_period(start, end):\n \"\"\"\n 今日が start, end の範囲内かどうかを返す\n\n :params start: 通知開始日の文字列または空文字\n :params end: 通知終了日の文字列または空文字\n\n :return: True: 通知範囲内、False: 通知範囲外\n \"\"\"\n\n # 文字列を date 型にする\n try:\n start = parser.parse(start).date()\n except ValueError:\n start = date(2000, 1, 1) # 過去の日付にする\n try:\n end = parser.parse(end).date()\n except ValueError:\n end = date(3000, 1, 1) # 未来の日付にする\n today = date.today()\n\n # 今日が範囲内かどうかを返す\n return start <= today <= end\n\n\ndef sns_notify(row, now):\n \"\"\"\n スプレッドシートのデータ1行分をSNSに通知する。\n データは以下の形式。\n\n 1. 通知日(YYYY/MM/DD または曜日指定)\n 2. 通知時刻\n 3. 送信メッセージ\n 4. 送信するURL\n 5. 通知開始日\n 6. 通知終了日\n 7. twitter通知フラグ(1なら通知)\n 8. facebook通知フラグ(1なら通知)\n\n :param row: スプレッドシートの1行分のデータ\n :param now: 現在時刻(datetime)\n \"\"\"\n\n # データの件数が少なかったらなにもしない\n if len(row) < 7:\n return\n # 通知期間の範囲外ならなにもしない\n if not is_valid_period(row[4], row[5]):\n return\n # 通知対象日時じゃなかったらなにもしない\n # メッセージ送信する\n if row[6] == '1':\n pass\n if row[7] == '1':\n pass\n\n\ndef main():\n \"\"\"\n PyCon JP Twitter/Facebook通知シートからデータを読み込んで通知する\n \"\"\"\n now = datetime.now()\n service = get_service()\n # シートから全データを読み込む\n result = service.spreadsheets().values().get(\n spreadsheetId=SHEET_ID, range='messages!A4:H').execute()\n for row in result.get('values', []):\n # 1行のデータを元にSNSへの通知を実行\n sns_notify(row, now)\n\n\nif __name__ == '__main__':\n main()\n"},"old_contents":{"kind":"string","value":""},"license":{"kind":"string","value":"mit"},"lang":{"kind":"string","value":"Python"}}}],"truncated":false,"partial":false},"paginationData":{"pageIndex":22,"numItemsPerPage":100,"numTotalItems":2673685,"offset":2200,"length":100}},"jwt":"eyJhbGciOiJFZERTQSJ9.eyJyZWFkIjp0cnVlLCJwZXJtaXNzaW9ucyI6eyJyZXBvLmNvbnRlbnQucmVhZCI6dHJ1ZX0sImlhdCI6MTc1NzYyMTY4MCwic3ViIjoiL2RhdGFzZXRzL2JpZ2NvZGUvY29tbWl0cy1jb2RlZ2VleCIsImV4cCI6MTc1NzYyNTI4MCwiaXNzIjoiaHR0cHM6Ly9odWdnaW5nZmFjZS5jbyJ9.eh4TXU1A8_PEis5h8pVqjgjPZkGLa8Xx5te06cWdtJkpITKdk4qQ8UpkkQz8JL_Mbhrp19oNM5PB8CuEoHzMAA","displayUrls":true},"discussionsStats":{"closed":0,"open":0,"total":0},"fullWidth":true,"hasGatedAccess":true,"hasFullAccess":true,"isEmbedded":false,"savedQueries":{"community":[],"user":[]}}">
    commit
    stringlengths
    40
    40
    subject
    stringlengths
    4
    1.73k
    repos
    stringlengths
    5
    127k
    old_file
    stringlengths
    2
    751
    new_file
    stringlengths
    2
    751
    new_contents
    stringlengths
    1
    8.98k
    old_contents
    stringlengths
    0
    6.59k
    license
    stringclasses
    13 values
    lang
    stringclasses
    23 values
    bd0bdc543ba1e44ddc9d149fbaadd12ab051614d
    Add migrations
    unt-libraries/django-accession,unt-libraries/django-accession
    accession/migrations/0003_auto_20191101_1625.py
    accession/migrations/0003_auto_20191101_1625.py
    # -*- coding: utf-8 -*- # Generated by Django 1.11.25 on 2019-11-01 16:25 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('accession', '0002_auto_20191031_2139'), ] operations = [ migrations.AlterField( model_name='object', name='object_era', field=models.CharField(blank=True, choices=[('Pre-1770', 'Pre-1770'), ('1770-1779', '1770-1779'), ('1780-1789', '1780-1789'), ('1790-1799', '1790-1799'), ('1800-1809', '1800-1809'), ('1810-1819', '1810-1819'), ('1820-1829', '1820-1829'), ('1830-1839', '1830-1839'), ('1840-1849', '1840-1849'), ('1850-1859', '1850-1859'), ('1860-1869', '1860-1869'), ('1870-1879', '1870-1879'), ('1880-1889', '1880-1889'), ('1890-1899', '1890-1899'), ('1900-1909', '1900-1909'), ('1910-1919', '1910-1919'), ('1920-1929', '1920-1929'), ('1930-1939', '1930-1939'), ('1940-1949', '1940-1949'), ('1950-1959', '1950-1959'), ('1960-1969', '1960-1969'), ('1970-1979', '1970-1979'), ('1980-1989', '1980-1989'), ('1990-1999', '1990-1999'), ('2000-2009', '2000-2009'), ('2010-2019', '2010-2019'), ('2020-2029', '2020-2029')], max_length=10), ), ]
    bsd-3-clause
    Python
    7b0ebe74cbaad610bb65f24cc2555d82e7d7a750
    read attachments path from settings, catch jpeg/png
    SEL-Columbia/commcare-hq,puttarajubr/commcare-hq,gmimano/commcaretest,dimagi/commcare-hq,puttarajubr/commcare-hq,SEL-Columbia/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,gmimano/commcaretest,qedsoftware/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,gmimano/commcaretest,dimagi/commcare-hq,SEL-Columbia/commcare-hq,dimagi/commcare-hq
    apps/photos/views.py
    apps/photos/views.py
    from django.contrib.auth.decorators import login_required from django.http import HttpResponse from django.http import HttpResponseRedirect from rapidsms.webui.utils import render_to_response from photos.models import Photo import os import settings # default page - show all thumbnails by date @login_required() def recent(request, template_name="photos/list.html"): photos = Photo.objects.all() return render_to_response(request, template_name, {'photos' : photos}) # show a single photo + comments @login_required() def show(request, photo_id, template_name="photos/single.html"): p = Photo.objects.get(id=photo_id) return render_to_response(request, template_name, {'photo' : p}) @login_required() def import_photos(request): path = settings.RAPIDSMS_APPS['receiver']['attachments_path'] # -> data/attachments def is_img(filename): return (filename.endswith('.jpg') or filename.endswith('.jpeg') or filename.endswith('.png')) def not_in_db_already(filename): # Note that there's a query for each file here - another way would be to load all existing files to a list in one operation and work with that # but, that might generate huge list when there are a lot of photos in the DB, and might cause data freshness issues in some edge cases # so, we just do n queries each time (where n is probably not too big) instead return (Photo.objects.filter(original_image="%s/%s" % (path, filename)).count() == 0) files = os.listdir(path) img_files = filter(is_img, files) new_img_files = filter(not_in_db_already, img_files) for f in new_img_files: p = Photo(name=f, original_image="%s/%s" % (path, f)) p.save() return HttpResponseRedirect("/photos") @login_required() def populate(request): for i in (1,2,3): p = Photo(name="test image #%s" % i, original_image="apps/photos/tests/test%s.jpg" % i) p.save() return HttpResponseRedirect("/photos")
    from django.contrib.auth.decorators import login_required from django.http import HttpResponse from django.http import HttpResponseRedirect from rapidsms.webui.utils import render_to_response from photos.models import Photo import os import settings # default page - show all thumbnails by date @login_required() def recent(request, template_name="photos/list.html"): photos = Photo.objects.all() return render_to_response(request, template_name, {'photos' : photos}) # show a single photo + comments @login_required() def show(request, photo_id, template_name="photos/single.html"): p = Photo.objects.get(id=photo_id) return render_to_response(request, template_name, {'photo' : p}) @login_required() def import_photos(request): path = 'data/attachments' #settings.RAPIDSMS_APPS['receiver']['attachments_path'] def is_img(filename): return filename.endswith('.jpg') def not_in_db_already(filename): # Note that there's a query for each file here - another way would be to load all existing files to a list in one operation and work with that # but, that might generate huge list when there are a lot of photos in the DB, and might cause data freshness issues in some edge cases # so, we just do n queries each time (where n is probably not too big) instead return (Photo.objects.filter(original_image="%s/%s" % (path, filename)).count() == 0) files = os.listdir(path) img_files = filter(is_img, files) new_img_files = filter(not_in_db_already, img_files) out = '' for f in new_img_files: out += "%s/%s <br/> " % (path, f) p = Photo(name=f, original_image="%s/%s" % (path, f)) p.save() return HttpResponseRedirect("/photos") # return HttpResponse(out) @login_required() def populate(request): for i in (1,2,3): p = Photo(name="test image #%s" % i, original_image="apps/photos/tests/test%s.jpg" % i) p.save() return HttpResponseRedirect("/photos")
    bsd-3-clause
    Python
    fda4f436bbaea9215efa03648d2df8e413fb47dd
    add class loader tests
    wolcomm/rptk,wolcomm/rptk
    test/test_loader.py
    test/test_loader.py
    # Copyright (c) 2018 Workonline Communications (Pty) Ltd. All rights reserved. # # The contents of this file are licensed under the Apache License version 2.0 # (the "License"); you may not use this file except in compliance with the # License. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. """rptk class loader test cases.""" from __future__ import print_function from __future__ import unicode_literals from helpers import default_format_classes, default_query_classes import pytest class_sets = ( default_query_classes().items(), default_format_classes().items(), pytest.param([("foo", "rptk.foo.FooClass")], marks=pytest.mark.xfail), pytest.param(0, marks=pytest.mark.xfail) ) class TestClassLoader(object): """Test cases for rptk class loader classes.""" @pytest.mark.parametrize("class_set", class_sets) def test_class_loader(self, class_set): """Test rptk class loader.""" from rptk.load import ClassLoader loader = ClassLoader(items=class_set) assert isinstance(loader.class_names, list) for name, path in class_set: assert name in loader.class_names assert name in loader.class_info assert loader.class_info[name] assert loader.get_class(name=name).__name__ in path assert isinstance(loader.classes, list) for cls in loader.classes: assert isinstance(cls, type)
    apache-2.0
    Python
    dd75e1c5afb05c5d46adae465947fb3f893cdf6b
    Create 7kyu_complete_the_pattern4.py
    Orange9000/Codewars,Orange9000/Codewars
    Solutions/7kyu/7kyu_complete_the_pattern4.py
    Solutions/7kyu/7kyu_complete_the_pattern4.py
    def pattern(n): l=list(range(1,n+1)) return '\n'.join(''.join(map(str,l[i:])) for i in range(n))
    mit
    Python
    422b5573b72cc2014893aa15758b9d0bc61baf05
    refactor from core.py
    stefanseefeld/synopsis,stefanseefeld/synopsis,stefanseefeld/synopsis,stefanseefeld/synopsis,stefanseefeld/synopsis,stefanseefeld/synopsis
    Synopsis/Formatters/HTML/DeclarationStyle.py
    Synopsis/Formatters/HTML/DeclarationStyle.py
    # $Id: DeclarationStyle.py,v 1.1 2003/11/15 19:55:06 stefan Exp $ # # Copyright (C) 2000 Stephen Davies # Copyright (C) 2000 Stefan Seefeld # All rights reserved. # Licensed to the public under the terms of the GNU LGPL (>= 2), # see the file COPYING for details. # class Style: """This class just maintains a mapping from declaration to display style. The style is an enumeration, possible values being: SUMMARY (only display a summary for this declaration), DETAIL (summary and detailed info), INLINE (summary and detailed info, where detailed info is an inline version of the declaration even if it's a class, etc.)""" SUMMARY = 0 DETAIL = 1 INLINE = 2 def __init__(self): self.__dict = {} def style_of(self, decl): """Returns the style of the given decl""" SUMMARY = self.SUMMARY DETAIL = self.DETAIL key = id(decl) if self.__dict.has_key(key): return self.__dict[key] if len(decl.comments()) == 0: # Set to summary, as this will mean no detailed section style = SUMMARY else: comment = decl.comments()[0] # Calculate the style. The default is detail if not comment.text(): # No comment, don't show detail style = SUMMARY elif comment.summary() != comment.text(): # There is more to the comment than the summary, show detail style = DETAIL else: # Summary == Comment, don't show detail style = SUMMARY # Always show tags if comment.tags(): style = DETAIL # Always show enums if isinstance(decl, AST.Enum): style = DETAIL # Show functions if they have exceptions if isinstance(decl, AST.Function) and len(decl.exceptions()): style = DETAIL # Don't show detail for scopes (they have their own pages) if isinstance(decl, AST.Scope): style = SUMMARY self.__dict[key] = style return style __getitem__ = style_of
    lgpl-2.1
    Python
    b8cc84245ae7f3ceda0e0cd92b6b2eecb0426ee3
    add start of peg generator
    scristopher/paintown,scristopher/paintown,scristopher/paintown,scristopher/paintown,scristopher/paintown,scristopher/paintown,scristopher/paintown,scristopher/paintown
    src/mugen/parser/peg.py
    src/mugen/parser/peg.py
    #!/usr/bin/env python next_var = 0 def nextVar(): global next_var; next_var += 1; return next_var class Pattern: def __init__(self): pass def generate(self, result): pass class PatternNot(Pattern): def __init__(self, next): Pattern.__init__(self) self.next = next def generate(self, result): my_result = "result_%d" % nextVar() data = """ Result %s = 0; %s %s = ! %s; """ % (my_result, self.next.generate(my_result), result, my_result) return data class PatternVerbatim(Pattern): def __init__(self, letters): Pattern.__init__(self) self.letters = letters def generate(self, result): data = """ %s = "%s"; """ % (result, self.letters) return data class Rule: def __init__(self, name, patterns): self.name = name self.patterns = patterns def generate(self): result = "result_%d" % nextVar() data = """ static Result rule_%s(){ Result %s = 0; %s return Result; } """ % (self.name, result, '\n'.join([pattern.generate(result) for pattern in self.patterns])) return data class Peg: def __init__(self, start, rules): self.start = start self.rules = rules def generate(self): namespace = "Peg" data = """ namespace %s{ %s Result main(){ return rule_%s(); } } """ % (namespace, '\n'.join([rule.generate() for rule in self.rules]), self.start) return data def generate(peg): print peg.generate() def test(): rules = [ Rule("s", [PatternNot(PatternVerbatim("hello"))]), ] peg = Peg("s", rules) generate(peg) test()
    bsd-3-clause
    Python
    82f15b2dae1b23b75a019362e5925c4a3591fa92
    Create InputNeuronGroup_multiple_inputs_1.py
    ricardodeazambuja/BrianConnectUDP
    examples/InputNeuronGroup_multiple_inputs_1.py
    examples/InputNeuronGroup_multiple_inputs_1.py
    ''' Example of a spike generator (only outputs spikes) In this example spikes are generated and sent through UDP packages. At the end of the simulation a raster plot of the spikes is created. ''' from brian import * import numpy from brian_multiprocess_udp import BrianConnectUDP number_of_neurons_total = 40 number_of_neurons_spiking = 30 def main_NeuronGroup(input_Neuron_Group, simulation_clock): print "main_NeuronGroup!" #DEBUG! simclock = simulation_clock delta_t=5 random_list=numpy.random.randint(number_of_neurons_total,size=number_of_neurons_spiking) random_list.sort() spiketimes = [(i, delta_t*ms) for i in random_list] SpikesOut = SpikeGeneratorGroup(number_of_neurons_total, spiketimes, period=300*ms, clock=simclock) # the maximum clock of the input spikes is limited here (period) MSpkOut=SpikeMonitor(SpikesOut) # Spikes sent by UDP return ([SpikesOut],[],[MSpkOut]) def post_simulation_function(input_NG, simulation_NG, simulation_SYN, simulation_MN): """ input_NG: the neuron group that receives the input spikes simulation_NG: the neuron groups list passed to the system by the user function (main_NeuronGroup) simulation_SYN: the synapses list passed to the system by the user function (main_NeuronGroup) simulation_MN: the monitors list passed to the system by the user function (main_NeuronGroup) This way it is possible to plot, save or do whatever you want with these objects after the end of the simulation! """ figure() raster_plot(simulation_MN[0]) title("Spikes Sent by UDP") show(block=True) if __name__=="__main__": my_simulation = BrianConnectUDP(main_NeuronGroup, NumOfNeuronsOutput=number_of_neurons_total, post_simulation_function=post_simulation_function, output_addresses=[("127.0.0.1", 14141)], simclock_dt=5, TotalSimulationTime=10000, brian_address=0)
    cc0-1.0
    Python
    765897a05a7aae6a89bfd62d8493fb14aa16048a
    Create db_migrate.py
    ASpelling/mw-trading,ASpelling/mw-trading,ASpelling/mw-trading
    db_migrate.py
    db_migrate.py
    #!venv/bin/python import imp from migrate.versioning import api from app import db from config import SQLALCHEMY_DATABASE_URI from config import SQLALCHEMY_MIGRATE_REPO v = api.db_version(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO) migration = SQLALCHEMY_MIGRATE_REPO + ('/versions/%03d_migration.py' % (v+1)) tmp_module = imp.new_module('old_model') old_model = api.create_model(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO) exec(old_model, tmp_module.__dict__) script = api.make_update_script_for_model(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO, tmp_module.meta, db.metadata) open(migration, "wt").write(script) api.upgrade(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO) v = api.db_version(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO) print('New migration saved as ' + migration) print('Current database version: ' + str(v))
    apache-2.0
    Python
    c085f9b5af73a50a86d592b3d8b02b1e8e444cde
    Create optsimulate.py
    OpenPTrack/open_ptrack,OpenPTrack/open_ptrack,chanbrown007/open_ptrack,chanbrown007/open_ptrack,chanbrown007/open_ptrack,OpenPTrack/open_ptrack
    docs/assets/optsimulate.py
    docs/assets/optsimulate.py
    # OpenPTrack Sender Simulator # Sept 13, 2015 # [email protected] import socket, time, json, time, random UDP_IP = "127.0.0.1" UDP_PORT = 21234 PERIOD = .100 # how often to publish in time # For the random walk MAXSTEP_X = 10 MAXSTEP_Y = 10 WOBBLE_Z = 1 Z_NOMINAL = 40 # Increasing packet seq number _SEQ = 0 # Current message format # https://github.com/OpenPTrack/open_ptrack/wiki/Using%20The%20Data # #MESSAGE = '{"header":{"seq":336988,"stamp":{"sec":1441244414,"nsec":266356327},"frame_id":"world"},"tracks":[{"id":170,"x":0.740519,"y":-3.21577,"height":1.01898,"age":79.4518,"confidence":0.491777},{"id":172,"x":0.843167,"y":-3.29433,"height":1.10497,"age":29.471,"confidence":0.500193}]}' def track( id, x, y, height, age, confidence ) : return {"id":id, "x":x, "y":y, "height":height, "age": age, "confidence":confidence} def packet( tracks ) : global _SEQ _SEQ+=1 now = float(time.time()) sec = int(now) nsec = int((now-sec) * 1e9) header = { "seq":_SEQ, "stamp": {"sec":sec, "nsec":nsec}, "frame_id":"world" } return { "header":header, "tracks":tracks } # Provide two random walkers # More is exercise for reader ... def walk(W): for w in W: w[0] += MAXSTEP_X * 2*(random.random() - 0.5) w[1] += MAXSTEP_Y * 2*(random.random() - 0.5) w[2] = Z_NOMINAL + WOBBLE_Z*2*(random.random()-0.5) walkers = [ [random.randrange(200)-100, random.randrange(200)-100, Z_NOMINAL], [random.randrange(200)-100, random.randrange(200)-100, Z_NOMINAL] ] print("^C to stop") sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # UDP try: while True: walk(walkers) MESSAGE = json.dumps( packet( [ track(42, walkers[0][0], walkers[0][1], walkers[0][2], _SEQ+100+random.random(), random.random()), track(43, walkers[1][0], walkers[1][1], walkers[1][2], _SEQ+100+random.random(), random.random())] ) ) # We throw some zeroes at the end to simulate OpenPTrack's current zero padding, # so parsers make sure to handle it. This padding should be removed soon. # https://github.com/OpenPTrack/open_ptrack/issues/52 payload = bytes(MESSAGE.encode('utf-8')) + bytes(bytearray(100)) sock.sendto(payload, (UDP_IP, UDP_PORT)) print(payload) time.sleep(PERIOD) except KeyboardInterrupt: pass # do cleanup here
    bsd-3-clause
    Python
    7e30de04cad1070eb84c1de0c370e950b5e2c783
    Annotate zerver.views.webhooks.pingdom.
    mohsenSy/zulip,timabbott/zulip,ahmadassaf/zulip,JPJPJPOPOP/zulip,dhcrzf/zulip,dawran6/zulip,peguin40/zulip,ryanbackman/zulip,grave-w-grave/zulip,AZtheAsian/zulip,christi3k/zulip,ahmadassaf/zulip,verma-varsha/zulip,TigorC/zulip,Galexrt/zulip,arpith/zulip,Diptanshu8/zulip,rht/zulip,mohsenSy/zulip,andersk/zulip,dattatreya303/zulip,j831/zulip,zulip/zulip,christi3k/zulip,cosmicAsymmetry/zulip,showell/zulip,isht3/zulip,andersk/zulip,ryanbackman/zulip,Galexrt/zulip,sonali0901/zulip,reyha/zulip,umkay/zulip,paxapy/zulip,eeshangarg/zulip,peguin40/zulip,niftynei/zulip,showell/zulip,brainwane/zulip,samatdav/zulip,jainayush975/zulip,shubhamdhama/zulip,hackerkid/zulip,christi3k/zulip,souravbadami/zulip,rht/zulip,grave-w-grave/zulip,punchagan/zulip,souravbadami/zulip,shubhamdhama/zulip,vikas-parashar/zulip,JPJPJPOPOP/zulip,dawran6/zulip,Galexrt/zulip,jainayush975/zulip,vikas-parashar/zulip,ahmadassaf/zulip,punchagan/zulip,kou/zulip,krtkmj/zulip,dhcrzf/zulip,eeshangarg/zulip,KingxBanana/zulip,timabbott/zulip,Galexrt/zulip,samatdav/zulip,timabbott/zulip,dawran6/zulip,KingxBanana/zulip,amanharitsh123/zulip,zacps/zulip,tommyip/zulip,Juanvulcano/zulip,JPJPJPOPOP/zulip,dattatreya303/zulip,AZtheAsian/zulip,brockwhittaker/zulip,rht/zulip,punchagan/zulip,aakash-cr7/zulip,jainayush975/zulip,zacps/zulip,isht3/zulip,aakash-cr7/zulip,Jianchun1/zulip,joyhchen/zulip,j831/zulip,jrowan/zulip,SmartPeople/zulip,susansls/zulip,sup95/zulip,dhcrzf/zulip,dhcrzf/zulip,niftynei/zulip,reyha/zulip,hackerkid/zulip,kou/zulip,KingxBanana/zulip,aakash-cr7/zulip,umkay/zulip,dattatreya303/zulip,krtkmj/zulip,zulip/zulip,amyliu345/zulip,ahmadassaf/zulip,dhcrzf/zulip,jrowan/zulip,synicalsyntax/zulip,souravbadami/zulip,hackerkid/zulip,calvinleenyc/zulip,sonali0901/zulip,brainwane/zulip,JPJPJPOPOP/zulip,blaze225/zulip,isht3/zulip,aakash-cr7/zulip,eeshangarg/zulip,amanharitsh123/zulip,isht3/zulip,brockwhittaker/zulip,Diptanshu8/zulip,punchagan/zulip,sonali0901/zulip,grave-w-grave/zulip,AZtheAsian/zulip,AZtheAsian/zulip,sonali0901/zulip,AZtheAsian/zulip,cosmicAsymmetry/zulip,Diptanshu8/zulip,andersk/zulip,Diptanshu8/zulip,shubhamdhama/zulip,tommyip/zulip,vaidap/zulip,ahmadassaf/zulip,rht/zulip,PhilSk/zulip,susansls/zulip,dattatreya303/zulip,jackrzhang/zulip,eeshangarg/zulip,Jianchun1/zulip,ryanbackman/zulip,amyliu345/zulip,jackrzhang/zulip,jphilipsen05/zulip,brainwane/zulip,jackrzhang/zulip,shubhamdhama/zulip,Vallher/zulip,andersk/zulip,Vallher/zulip,shubhamdhama/zulip,synicalsyntax/zulip,arpith/zulip,aakash-cr7/zulip,samatdav/zulip,ryanbackman/zulip,joyhchen/zulip,Vallher/zulip,PhilSk/zulip,mohsenSy/zulip,krtkmj/zulip,kou/zulip,SmartPeople/zulip,hackerkid/zulip,joyhchen/zulip,jackrzhang/zulip,Juanvulcano/zulip,brainwane/zulip,rishig/zulip,showell/zulip,calvinleenyc/zulip,peguin40/zulip,sup95/zulip,souravbadami/zulip,krtkmj/zulip,paxapy/zulip,mahim97/zulip,Vallher/zulip,andersk/zulip,TigorC/zulip,tommyip/zulip,aakash-cr7/zulip,rishig/zulip,vaidap/zulip,niftynei/zulip,shubhamdhama/zulip,amyliu345/zulip,zacps/zulip,peguin40/zulip,dawran6/zulip,brainwane/zulip,Galexrt/zulip,Galexrt/zulip,joyhchen/zulip,reyha/zulip,mahim97/zulip,joyhchen/zulip,tommyip/zulip,umkay/zulip,arpith/zulip,niftynei/zulip,vikas-parashar/zulip,samatdav/zulip,sup95/zulip,mohsenSy/zulip,vaidap/zulip,ahmadassaf/zulip,jrowan/zulip,Juanvulcano/zulip,arpith/zulip,PhilSk/zulip,brainwane/zulip,zulip/zulip,krtkmj/zulip,synicalsyntax/zulip,arpith/zulip,zulip/zulip,dhcrzf/zulip,sonali0901/zulip,arpith/zulip,umkay/zulip,rht/zulip,Diptanshu8/zulip,susansls/zulip,brockwhittaker/zulip,Jianchun1/zulip,eeshangarg/zulip,andersk/zulip,jphilipsen05/zulip,verma-varsha/zulip,verma-varsha/zulip,vaidap/zulip,synicalsyntax/zulip,zulip/zulip,SmartPeople/zulip,samatdav/zulip,j831/zulip,verma-varsha/zulip,paxapy/zulip,Vallher/zulip,SmartPeople/zulip,jrowan/zulip,vabs22/zulip,Diptanshu8/zulip,umkay/zulip,verma-varsha/zulip,krtkmj/zulip,rht/zulip,synicalsyntax/zulip,vabs22/zulip,rishig/zulip,sharmaeklavya2/zulip,synicalsyntax/zulip,jainayush975/zulip,Galexrt/zulip,paxapy/zulip,reyha/zulip,kou/zulip,vaidap/zulip,showell/zulip,jrowan/zulip,kou/zulip,shubhamdhama/zulip,cosmicAsymmetry/zulip,jphilipsen05/zulip,mahim97/zulip,mohsenSy/zulip,mahim97/zulip,sharmaeklavya2/zulip,grave-w-grave/zulip,timabbott/zulip,samatdav/zulip,calvinleenyc/zulip,umkay/zulip,blaze225/zulip,showell/zulip,Jianchun1/zulip,showell/zulip,jphilipsen05/zulip,sharmaeklavya2/zulip,amanharitsh123/zulip,andersk/zulip,PhilSk/zulip,j831/zulip,timabbott/zulip,TigorC/zulip,hackerkid/zulip,verma-varsha/zulip,TigorC/zulip,amanharitsh123/zulip,zulip/zulip,j831/zulip,reyha/zulip,rishig/zulip,amyliu345/zulip,mahim97/zulip,jackrzhang/zulip,mohsenSy/zulip,blaze225/zulip,umkay/zulip,brockwhittaker/zulip,amanharitsh123/zulip,SmartPeople/zulip,sharmaeklavya2/zulip,rishig/zulip,vabs22/zulip,hackerkid/zulip,jackrzhang/zulip,punchagan/zulip,calvinleenyc/zulip,eeshangarg/zulip,Jianchun1/zulip,cosmicAsymmetry/zulip,zacps/zulip,Jianchun1/zulip,synicalsyntax/zulip,tommyip/zulip,isht3/zulip,eeshangarg/zulip,kou/zulip,isht3/zulip,jphilipsen05/zulip,tommyip/zulip,calvinleenyc/zulip,rishig/zulip,kou/zulip,ryanbackman/zulip,timabbott/zulip,blaze225/zulip,souravbadami/zulip,JPJPJPOPOP/zulip,vabs22/zulip,amanharitsh123/zulip,punchagan/zulip,susansls/zulip,hackerkid/zulip,PhilSk/zulip,KingxBanana/zulip,tommyip/zulip,TigorC/zulip,zulip/zulip,christi3k/zulip,cosmicAsymmetry/zulip,brainwane/zulip,rht/zulip,jainayush975/zulip,dawran6/zulip,JPJPJPOPOP/zulip,zacps/zulip,niftynei/zulip,christi3k/zulip,dhcrzf/zulip,amyliu345/zulip,peguin40/zulip,showell/zulip,brockwhittaker/zulip,souravbadami/zulip,Juanvulcano/zulip,PhilSk/zulip,vaidap/zulip,vabs22/zulip,sup95/zulip,jrowan/zulip,susansls/zulip,zacps/zulip,dattatreya303/zulip,paxapy/zulip,joyhchen/zulip,timabbott/zulip,Juanvulcano/zulip,sup95/zulip,reyha/zulip,ryanbackman/zulip,Juanvulcano/zulip,jackrzhang/zulip,vabs22/zulip,KingxBanana/zulip,paxapy/zulip,dawran6/zulip,krtkmj/zulip,mahim97/zulip,j831/zulip,vikas-parashar/zulip,sharmaeklavya2/zulip,punchagan/zulip,ahmadassaf/zulip,grave-w-grave/zulip,grave-w-grave/zulip,Vallher/zulip,sonali0901/zulip,jainayush975/zulip,susansls/zulip,sharmaeklavya2/zulip,amyliu345/zulip,sup95/zulip,blaze225/zulip,rishig/zulip,Vallher/zulip,blaze225/zulip,SmartPeople/zulip,christi3k/zulip,KingxBanana/zulip,TigorC/zulip,dattatreya303/zulip,calvinleenyc/zulip,vikas-parashar/zulip,cosmicAsymmetry/zulip,jphilipsen05/zulip,peguin40/zulip,brockwhittaker/zulip,AZtheAsian/zulip,vikas-parashar/zulip,niftynei/zulip
    zerver/views/webhooks/pingdom.py
    zerver/views/webhooks/pingdom.py
    # Webhooks for external integrations. from __future__ import absolute_import from typing import Any from django.utils.translation import ugettext as _ from django.http import HttpRequest, HttpResponse from zerver.lib.actions import check_send_message from zerver.lib.response import json_success, json_error from zerver.decorator import REQ, has_request_variables, api_key_only_webhook_view from zerver.models import Client, UserProfile import ujson import six PINGDOM_SUBJECT_TEMPLATE = '{name} status.' PINGDOM_MESSAGE_TEMPLATE = 'Service {service_url} changed its {type} status from {previous_state} to {current_state}.' PINGDOM_MESSAGE_DESCRIPTION_TEMPLATE = 'Description: {description}.' SUPPORTED_CHECK_TYPES = ( 'HTTP', 'HTTP_CUSTOM' 'HTTPS', 'SMTP', 'POP3', 'IMAP', 'PING', 'DNS', 'UDP', 'PORT_TCP', ) @api_key_only_webhook_view('Pingdom') @has_request_variables def api_pingdom_webhook(request, user_profile, client, payload=REQ(argument_type='body'), stream=REQ(default='pingdom')): # type: (HttpRequest, UserProfile, Client, Dict[str, Any], six.text_type) -> HttpResponse check_type = get_check_type(payload) if check_type in SUPPORTED_CHECK_TYPES: subject = get_subject_for_http_request(payload) body = get_body_for_http_request(payload) else: return json_error(_('Unsupported check_type: {check_type}').format(check_type=check_type)) check_send_message(user_profile, client, 'stream', [stream], subject, body) return json_success() def get_subject_for_http_request(payload): # type: (Dict[str, Any]) -> six.text_type return PINGDOM_SUBJECT_TEMPLATE.format(name=payload['check_name']) def get_body_for_http_request(payload): # type: (Dict[str, Any]) -> six.text_type current_state = payload['current_state'] previous_state = payload['previous_state'] data = { 'service_url': payload['check_params']['hostname'], 'previous_state': previous_state, 'current_state': current_state, 'type': get_check_type(payload) } body = PINGDOM_MESSAGE_TEMPLATE.format(**data) if current_state == 'DOWN' and previous_state == 'UP': description = PINGDOM_MESSAGE_DESCRIPTION_TEMPLATE.format(description=payload['long_description']) body += '\n{description}'.format(description=description) return body def get_check_type(payload): # type: (Dict[str, Any]) -> six.text_type return payload['check_type']
    # Webhooks for external integrations. from __future__ import absolute_import from django.utils.translation import ugettext as _ from zerver.lib.actions import check_send_message from zerver.lib.response import json_success, json_error from zerver.decorator import REQ, has_request_variables, api_key_only_webhook_view import ujson PINGDOM_SUBJECT_TEMPLATE = '{name} status.' PINGDOM_MESSAGE_TEMPLATE = 'Service {service_url} changed its {type} status from {previous_state} to {current_state}.' PINGDOM_MESSAGE_DESCRIPTION_TEMPLATE = 'Description: {description}.' SUPPORTED_CHECK_TYPES = ( 'HTTP', 'HTTP_CUSTOM' 'HTTPS', 'SMTP', 'POP3', 'IMAP', 'PING', 'DNS', 'UDP', 'PORT_TCP', ) @api_key_only_webhook_view('Pingdom') @has_request_variables def api_pingdom_webhook(request, user_profile, client, payload=REQ(argument_type='body'), stream=REQ(default='pingdom')): check_type = get_check_type(payload) if check_type in SUPPORTED_CHECK_TYPES: subject = get_subject_for_http_request(payload) body = get_body_for_http_request(payload) else: return json_error(_('Unsupported check_type: {check_type}').format(check_type=check_type)) check_send_message(user_profile, client, 'stream', [stream], subject, body) return json_success() def get_subject_for_http_request(payload): return PINGDOM_SUBJECT_TEMPLATE.format(name=payload['check_name']) def get_body_for_http_request(payload): current_state = payload['current_state'] previous_state = payload['previous_state'] data = { 'service_url': payload['check_params']['hostname'], 'previous_state': previous_state, 'current_state': current_state, 'type': get_check_type(payload) } body = PINGDOM_MESSAGE_TEMPLATE.format(**data) if current_state == 'DOWN' and previous_state == 'UP': description = PINGDOM_MESSAGE_DESCRIPTION_TEMPLATE.format(description=payload['long_description']) body += '\n{description}'.format(description=description) return body def get_check_type(payload): return payload['check_type']
    apache-2.0
    Python
    d8c359b27d371f5bd66825202860a0a376a2466c
    add script to convert old plans to new ones
    jamesmarva/myria,jamesmarva/myria,uwescience/myria,uwescience/myria,bsalimi/myria,bsalimi/myria,bsalimi/myria,uwescience/myria,jamesmarva/myria
    jsonQueries/old_to_new_plan.py
    jsonQueries/old_to_new_plan.py
    #!/usr/bin/env python import json import sys def read_json(filename): with open(filename, 'r') as f: return json.load(f) def uniquify_fragments(query_plan): fragment_inv = [] for worker in sorted(query_plan.keys()): worker_plan = query_plan[worker] for fragment in worker_plan: flag = False for (i,(x,y)) in enumerate(fragment_inv): if y == fragment: fragment_inv[i] = (x + [worker], y) flag = True break if flag: continue fragment_inv.append(([worker], fragment)) return fragment_inv def json_pretty(obj): return json.dumps(obj, sort_keys=True, indent=4, separators=(',', ': ')) if __name__ == "__main__": if len(sys.argv) != 2: print >> sys.stderr, "Usage: %s <old json file>" % sys.argv[0] sys.exit(1) myria_json_plan = read_json(sys.argv[1]) fragments = [] frags = uniquify_fragments(myria_json_plan['query_plan']) for (ws,ops) in frags: fragments.append({ 'workers' : ws, 'operators' : ops }) output = { 'raw_datalog' : myria_json_plan['raw_datalog'], 'logical_ra' : myria_json_plan['logical_ra'], 'fragments' : fragments } print json_pretty(output)
    bsd-3-clause
    Python
    f71ce70330f7dea86820f1d9cdc390ea972aaeca
    add 2s-complement
    EdisonAlgorithms/HackerRank,EdisonAlgorithms/HackerRank,zeyuanxy/hacker-rank,EdisonAlgorithms/HackerRank,zeyuanxy/hacker-rank,EdisonCodeKeeper/hacker-rank,zeyuanxy/hacker-rank,zeyuanxy/hacker-rank,EdisonCodeKeeper/hacker-rank,zeyuanxy/hacker-rank,EdisonAlgorithms/HackerRank,EdisonCodeKeeper/hacker-rank,EdisonCodeKeeper/hacker-rank,zeyuanxy/hacker-rank,EdisonCodeKeeper/hacker-rank,EdisonCodeKeeper/hacker-rank,EdisonAlgorithms/HackerRank,EdisonAlgorithms/HackerRank
    algorithms/bit-manipulation/2s-complement.py
    algorithms/bit-manipulation/2s-complement.py
    import sys def ones(x): uCount = x - ((x >> 1) & 033333333333) - ((x >> 2) & 011111111111); return ((uCount + (uCount >> 3)) & 030707070707) % 63; def count(x): if x >= 0: if x == 0: return 0 if x % 2 == 0: return count(x - 1) + ones(x) return (x + 1) / 2 + 2 * count(x / 2) else: x += 1 return 32 * (1 - x) - count(-x) def solve(A, B): if A >= 0: if A == 0: return count(B) return count(B) - count(A - 1) else: if B >= 0: return count(A) + count(B) return count(A) - count(B + 1) if __name__ == '__main__': T = int(sys.stdin.readline()) for i in range(T): [A, B] = map(int, sys.stdin.readline().split()); #print count(A), count(B) print solve(A, B)
    mit
    Python
    173565f7f2b9ffa548b355a0cbc8f972f1445a50
    Add test coverage for rdopkg.guess version2tag and tag2version
    redhat-openstack/rdopkg,redhat-openstack/rdopkg,openstack-packages/rdopkg,openstack-packages/rdopkg
    tests/test_guess.py
    tests/test_guess.py
    from rdopkg import guess from collections import namedtuple import pytest VersionTestCase = namedtuple('VersionTestCase', ('expected', 'input_data')) data_table_good = [ VersionTestCase(('1.2.3', None), '1.2.3'), VersionTestCase(('1.2.3', 'vX.Y.Z'), 'v1.2.3'), VersionTestCase(('1.2.3', 'VX.Y.Z'), 'V1.2.3'), VersionTestCase(('banana', None), 'banana'), ] data_table_bad = [ VersionTestCase((None, None), None), VersionTestCase((None, None), []), VersionTestCase((None, None), ()), VersionTestCase((None, None), ''), VersionTestCase((None, None), {}), ] data_table_ugly = [ VersionTestCase((None, None), ('foo', 'bar', 'bah')), VersionTestCase((None, None), ['foo', 'bar', 'bah']), VersionTestCase((None, None), {'foo': 'bar'}), ] def test_table_data_good_tag2version(): for entry in data_table_good: assert entry.expected == guess.tag2version(entry.input_data) def test_table_data_bad_tag2version(): for entry in data_table_bad: # Input Validation should probably return to us (None, None) # assert entry.expected == guess.tag2version(entry.input_data) assert (entry.input_data, None) == guess.tag2version(entry.input_data) def test_table_data_ugly_tag2version(): for entry in data_table_ugly: # TODO: probably should be a more specific exception with pytest.raises(Exception): guess.tag2version(entry.input_data) def test_version2tag_simple(): assert '1.2.3' == guess.version2tag('1.2.3') def test_version2tag_type1(): assert 'v1.2.3' == guess.version2tag('1.2.3', 'vX.Y.Z') def test_version2tag_type2(): assert 'V1.2.3' == guess.version2tag('1.2.3', 'VX.Y.Z')
    apache-2.0
    Python
    e50060ca76c667b77db433ca03ef640140831dc9
    Add migration for dagman_metrics
    pegasus-isi/pegasus-metrics,pegasus-isi/pegasus-metrics,pegasus-isi/pegasus-metrics
    migrations/004_add_dagman_metrics.py
    migrations/004_add_dagman_metrics.py
    import migrations conn = migrations.connect() cur = conn.cursor() cur.execute(""" create table dagman_metrics ( id INTEGER UNSIGNED NOT NULL, ts DOUBLE, remote_addr VARCHAR(15), hostname VARCHAR(256), domain VARCHAR(256), version VARCHAR(10), wf_uuid VARCHAR(36), root_wf_uuid VARCHAR(36), start_time DOUBLE, end_time DOUBLE, duration FLOAT, exitcode SMALLINT, dagman_id VARCHAR(32), parent_dagman_id VARCHAR(32), jobs INTEGER, jobs_failed INTEGER, jobs_succeeded INTEGER, dag_jobs INTEGER, dag_jobs_failed INTEGER, dag_jobs_succeeded INTEGER, dag_status INTEGER, planner VARCHAR(1024), planner_version VARCHAR(10), rescue_dag_number INTEGER, total_job_time DOUBLE, total_jobs INTEGER, total_jobs_run INTEGER, PRIMARY KEY (id), FOREIGN KEY (id) REFERENCES raw_data(id) ON DELETE CASCADE ) ENGINE=InnoDB DEFAULT CHARSET=utf8; """) conn.commit() cur.close()
    apache-2.0
    Python
    dc314e50a573f3ecb2cf41d1e08df29ea991d3b6
    Add migrations versions
    SerryJohns/bucket-list
    migrations/versions/d71a3e9499ef_.py
    migrations/versions/d71a3e9499ef_.py
    """empty message Revision ID: d71a3e9499ef Revises: Create Date: 2017-11-21 23:19:12.740735 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = 'd71a3e9499ef' down_revision = None branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('user', sa.Column('id', sa.Integer(), nullable=False), sa.Column('username', sa.String(length=50), nullable=True), sa.Column('password_hash', sa.String(length=128), nullable=True), sa.Column('email', sa.String(length=120), nullable=True), sa.Column('surname', sa.String(length=100), nullable=False), sa.Column('first_name', sa.String(length=100), nullable=False), sa.Column('active', sa.Boolean(), nullable=True), sa.Column('date_created', sa.DateTime(), nullable=True), sa.Column('date_modified', sa.DateTime(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('email'), sa.UniqueConstraint('username') ) op.create_table('bucket_list', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=100), nullable=True), sa.Column('description', sa.Text(), nullable=True), sa.Column('interests', sa.String(length=120), nullable=True), sa.Column('date_created', sa.DateTime(), nullable=True), sa.Column('date_modified', sa.DateTime(), nullable=True), sa.Column('created_by', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['created_by'], ['user.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('name') ) op.create_table('item', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=100), nullable=True), sa.Column('description', sa.Text(), nullable=True), sa.Column('status', sa.Text(), nullable=True), sa.Column('date_accomplished', sa.DateTime(), nullable=True), sa.Column('date_created', sa.DateTime(), nullable=True), sa.Column('date_modified', sa.DateTime(), nullable=True), sa.Column('bucketlists', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['bucketlists'], ['bucket_list.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('name') ) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_table('item') op.drop_table('bucket_list') op.drop_table('user') # ### end Alembic commands ###
    mit
    Python
    5bac311ac9da94edbd08b0b43c5214ba6b9fc1c8
    add scollable pages
    brendonparker/nuimo-py-web
    app2.py
    app2.py
    from webkit import WebView import pygtk pygtk.require('2.0') import gtk, threading, time from nuimo import NuimoScanner, Nuimo, NuimoDelegate class App: def __init__(self): window = gtk.Window(gtk.WINDOW_TOPLEVEL) fixed = gtk.Fixed() views = [WebView(), WebView(), WebView()] width = gtk.gdk.screen_width() height = gtk.gdk.screen_height() for idx, view in enumerate(views): view.set_usize(width, height) fixed.put(views[idx], -width+(idx*width), 0) window.add(fixed) #self.loadUrls() window.fullscreen() window.show_all() views[0].open('http://google.com?q=page1') views[1].open('http://google.com?q=page2') views[2].open('http://google.com?q=page3') self.views = views self.fixed = fixed self.x = 0 self.width = width def rotate(self, val): w = self.width x = self.x = (self.x - val) % (3 * w) for idx, view in enumerate(self.views): if idx == 0 and x > w: self.fixed.move(view, ((idx+3)*w)-x, 0) else: self.fixed.move(view, (idx*w)-x, 0) def loadUrls(self): self.current = 0 try: with open('urls.csv') as f: self.urls = f.readlines() #remove empties self.urls = filter(None, self.urls) except: print 'failed to read urls.csv' self.urls = ['http://google.com'] def next(self): self.current = (self.current + 1) % len(self.urls) self.view.open(self.urls[self.current]) def previous(self): self.current = self.current - 1 if self.current < 0: self.current = len(self.urls) - 1 self.view.open(self.urls[self.current]) class CustomNuimoDelegate(NuimoDelegate): def __init__(self, nuimo, app): NuimoDelegate.__init__(self, nuimo) self.app = app def handleRotation(self, value): NuimoDelegate.handleRotation(self, value) gtk.idle_add(app.rotate, value) def showImagesOnNuimo(nuimo): nuimo.displayLedMatrix( " " + " *** " + " * * * " + " * * " + " *** * " + " * * " + " * * " + " * * " + " ", 2.0) time.sleep(2) nuimo.displayLedMatrix( " ** ** " + " * * * * " + " ***** " + " * * " + " * * * * " + " * * * " + " * * * * " + " * * " + " *** ", 20.0) def main(): try: gtk.main() except Exception, e: print '%s', e return 0 if __name__ == "__main__": app = App() def nuimo_process(): def foundDevice(addr): print 'found device: ' + addr nuimo = Nuimo(addr) nuimo.set_delegate(CustomNuimoDelegate(nuimo, app)) nuimo.connect() showImagesOnNuimo(nuimo) while True: nuimo.waitForNotifications() while True: try: NuimoScanner().start(foundDevice) except Exception, e: print 'failed to connect to nuimo: %s' % e time.sleep(5) thread = threading.Thread(target=nuimo_process) thread.daemon = True thread.start() main()
    mit
    Python
    4933e4ca107516a667ae3449337746bf7e002cc2
    Create bkvm.py
    rmx-tools/rmx-internals
    bkvm.py
    bkvm.py
    #!/usr/bin/python import commands, time def prepareTarget(): print "prepare backup Target" print "---------------------" cmd = "mount -t cifs //10.0.0.9/public/BK\ VM\ XEN -o username=xxx,password=yyy /bak/" output = commands.getoutput(cmd) cmd = "ls -lht --time-style=\"long-iso\" /bak/" output = commands.getoutput(cmd) print output print "..." def releaseTarget(): print "release backup Target" print "---------------------" cmd = "ls -lht --time-style=\"long-iso\" /bak/" output = commands.getoutput(cmd) print output cmd = "umount /bak/" output = commands.getoutput(cmd) print "..." def get_backup_vms(): result = [] cmd = "xe vm-list is-control-domain=false is-a-snapshot=false power-state=running" output = commands.getoutput(cmd) for vm in output.split("\n\n\n"): lines = vm.splitlines() uuid = lines[0].split(":")[1][1:] name = lines[1].split(":")[1][1:] result += [(uuid, name)] return result def backup_vm(uuid, filename, timestamp): cmd = "xe vm-snapshot uuid=" + uuid + " new-name-label=" + timestamp snapshot_uuid = commands.getoutput(cmd) cmd = "xe template-param-set is-a-template=false ha-always-run=false uuid=" cmd = cmd + snapshot_uuid commands.getoutput(cmd) cmd = "rm " + filename+".tmp" commands.getoutput(cmd) cmd = "xe vm-export vm=" + snapshot_uuid + " filename=" + filename+".tmp" (status,output)=commands.getstatusoutput(cmd) if (status==0): cmd = "rm " + filename + " ; mv " + filename+".tmp"+ " " + filename commands.getoutput(cmd) else: print "Error" print output cmd = "xe vm-uninstall uuid=" + snapshot_uuid + " force=true" commands.getoutput(cmd) prepareTarget() print "Backup Running VMs" print "------------------" for (uuid, name) in get_backup_vms(): timestamp = time.strftime("%Y%m%d-%H%M", time.gmtime()) # filename = "\"/bak/" + timestamp + " " + name + ".xva\"" filename = "\"/bak/" + name + ".xva\"" print timestamp, uuid, name," to ", filename backup_vm(uuid, filename, timestamp) print "..." releaseTarget()
    apache-2.0
    Python
    e050d9ce4fb4d63ec7857f581033258f87c805b0
    Create pyPdfMerger.py
    johnhimics/automations,johnhimics/automations
    pyPdfMerger.py
    pyPdfMerger.py
    # -*- coding: utf-8 -*- """ TITLE: pyPdfMerger.py AUTHOR: John Himics EMAIL: [email protected] TIMEZONE: EST VERSION: 0 DESCRIPTION: Merges pdf files together DEPENDANCIES: PyPDF2 """ from PyPDF2 import PdfFileMerger #Global Variables merger = PdfFileMerger() #Methods #Program starts here if __name__ == "__main__": input1 = open("C:\PFile\@ActiveProjects\1050LF Yeild Issues\Emails\All emails 11-18-13 2.pdf", "rb") input2 = open("C:\PFile\@ActiveProjects\1050LF Yeild Issues\Emails\Wade 343005 [compatibility mode].pdf", "rb") input3 = open("C:\PFile\@ActiveProjects\1050LF Yeild Issues\Emails\1050LF Mill Mix MDR.pdf", "rb") # add the first 3 pages of input1 document to output #merger.append(fileobj = input1, pages = (0,3)) # insert the first page of input2 into the output beginning after the second page #merger.merge(position = 2, fileobj = input2, pages = (0,1)) # append entire input3 document to the end of the output document merger.append(input1) merger.append(input2) merger.append(input3) # Write to an output PDF document output = open("C:\PFile\@ActiveProjects\1050LF Yeild Issues\Emails\document-output.pdf", "wb") merger.write(output)
    mit
    Python
    62e65ae978b703b6af0b594e958e79d467e83421
    add 63
    ericdahl/project-euler,ericdahl/project-euler,ericdahl/project-euler,ericdahl/project-euler,ericdahl/project-euler,ericdahl/project-euler
    python/p063.py
    python/p063.py
    def g(power): count = 0 i = 1 min = 10**(power - 1) max = 10**power - 1 while True: result = i**power if result >= min: if result <= max: count += 1 else: break i += 1 return count count = 0 for i in xrange(1, 1000): current = g(i) if current > 0: count += current else: break print count
    bsd-3-clause
    Python
    600bf1bbce7db5f62d55537a33d4586fa2892d8a
    Create conf.py
    jcketz/PIGAL,jcketz/PIGAL
    conf.py
    conf.py
    #OK
    mit
    Python
    66c8c6f587c49f587901cf6a9cf7e122d110d668
    Add migration to encrypt secrets
    jobscore/sync-engine,jobscore/sync-engine,jobscore/sync-engine,jobscore/sync-engine
    migrations/versions/3bac7f8ccfdb_encrypt_secrets.py
    migrations/versions/3bac7f8ccfdb_encrypt_secrets.py
    """encrypt_secrets Revision ID: 3bac7f8ccfdb Revises: 291237183b82 Create Date: 2019-01-14 17:35:58.872052 """ # revision identifiers, used by Alembic. revision = '3bac7f8ccfdb' down_revision = '291237183b82' from alembic import op, context import sqlalchemy as sa # def upgrade(): # op.add_column('secret', # sa.Column('secret', sa.String(length=512), nullable=True)) # import nacl.secret # import nacl.utils # from inbox.ignition import engine, engine_manager # from inbox.models.session import session_scope # from inbox.config import config # print engine_manager.engines # _engine = engine_manager.engines[0] # Base = sa.ext.declarative.declarative_base() # Base.metadata.reflect(_engine) # key = config.get_required('SECRET_ENCRYPTION_KEY') # class Secret(Base): # __table__ = Base.metadata.tables['secret'] # with session_scope(0, versioned=False) as db_session: # secrets = db_session.query(Secret).filter( # Secret.encryption_scheme == 0, # Secret._secret.isnot(None)).order_by(Secret.id).all() # for s in secrets: # unencrypted = s._secret # nonce = nacl.utils.random(nacl.secret.SecretBox.NONCE_SIZE) # s.secret = nacl.secret.SecretBox( # key=key, # encoder=nacl.encoding.HexEncoder # ).encrypt( # plaintext=unencrypted, # nonce=nacl.utils.random(nacl.secret.SecretBox.NONCE_SIZE) # ) # # Picked arbitrarily # # s.acl_id = 0 # # s.type = 0 # db_session.add(s) # db_session.commit() # op.drop_column('secret', '_secret') def upgrade(): from inbox.config import config import nacl.secret import nacl.utils from inbox.ignition import engine_manager from inbox.models.session import session_scope shard_id = int(context.get_x_argument(as_dictionary=True).get('shard_id')) engine = engine_manager.engines[shard_id] Base = sa.ext.declarative.declarative_base() Base.metadata.reflect(engine) class Secret(Base): __table__ = Base.metadata.tables['secret'] class GenericAccount(Base): __table__ = Base.metadata.tables['genericaccount'] with session_scope(shard_id, versioned=False) as db_session: secrets = db_session.query(Secret).filter( Secret._secret.isnot(None), Secret.encryption_scheme == 0).all() # Join on the genericaccount and optionally easaccount tables to # determine which secrets should have type 'password'. generic_query = db_session.query(Secret.id).join( GenericAccount, Secret.id == GenericAccount.password_id) password_secrets = [id_ for id_, in generic_query] if engine.has_table('easaccount'): class EASAccount(Base): __table__ = Base.metadata.tables['easaccount'] eas_query = db_session.query(Secret.id).join( EASAccount).filter(Secret.id == EASAccount.password_id) password_secrets.extend([id_ for id_, in eas_query]) for s in secrets: plain = s._secret.encode('utf-8') if isinstance(s._secret, unicode) \ else s._secret if config.get_required('ENCRYPT_SECRETS'): s._secret = nacl.secret.SecretBox( key=config.get_required('SECRET_ENCRYPTION_KEY'), encoder=nacl.encoding.HexEncoder ).encrypt( plaintext=plain, nonce=nacl.utils.random(nacl.secret.SecretBox.NONCE_SIZE)) # 1 is EncryptionScheme.SECRETBOX_WITH_STATIC_KEY s.encryption_scheme = 1 else: s._secret = plain if s.id in password_secrets: s.type = 'password' else: s.type = 'token' db_session.add(s) db_session.commit() def downgrade(): pass
    agpl-3.0
    Python
    45cb6df45df84cb9ae85fc8aa15710bde6a15bad
    Add create image functional negative tests
    rahulunair/nova,rahulunair/nova,mahak/nova,vmturbo/nova,klmitch/nova,hanlind/nova,openstack/nova,mikalstill/nova,jianghuaw/nova,klmitch/nova,hanlind/nova,vmturbo/nova,Juniper/nova,rajalokan/nova,Juniper/nova,klmitch/nova,gooddata/openstack-nova,rajalokan/nova,Juniper/nova,klmitch/nova,rajalokan/nova,phenoxim/nova,mahak/nova,gooddata/openstack-nova,openstack/nova,gooddata/openstack-nova,mahak/nova,mikalstill/nova,Juniper/nova,phenoxim/nova,jianghuaw/nova,vmturbo/nova,mikalstill/nova,hanlind/nova,rajalokan/nova,openstack/nova,vmturbo/nova,rahulunair/nova,jianghuaw/nova,gooddata/openstack-nova,jianghuaw/nova
    nova/tests/functional/test_images.py
    nova/tests/functional/test_images.py
    # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from nova.tests.functional.api import client from nova.tests.functional import test_servers class ImagesTest(test_servers.ServersTestBase): def test_create_images_negative_invalid_state(self): # Create server server = self._build_minimal_create_server_request() created_server = self.api.post_server({"server": server}) server_id = created_server['id'] found_server = self._wait_for_state_change(created_server, 'BUILD') self.assertEqual('ACTIVE', found_server['status']) # Create image name = 'Snapshot 1' self.api.post_server_action( server_id, {'createImage': {'name': name}}) self.assertEqual('ACTIVE', found_server['status']) # Confirm that the image was created images = self.api.get_images(detail=False) image_map = {image['name']: image for image in images} found_image = image_map.get(name) self.assertTrue(found_image) # Change server status from ACTIVE to SHELVED for negative test self.flags(shelved_offload_time = -1) self.api.post_server_action(server_id, {'shelve': {}}) found_server = self._wait_for_state_change(found_server, 'ACTIVE') self.assertEqual('SHELVED', found_server['status']) # Create image in SHELVED (not ACTIVE, etc.) name = 'Snapshot 2' ex = self.assertRaises(client.OpenStackApiException, self.api.post_server_action, server_id, {'createImage': {'name': name}}) self.assertEqual(409, ex.response.status_code) self.assertEqual('SHELVED', found_server['status']) # Confirm that the image was not created images = self.api.get_images(detail=False) image_map = {image['name']: image for image in images} found_image = image_map.get(name) self.assertFalse(found_image) # Cleanup self._delete_server(server_id)
    apache-2.0
    Python
    18e2263a636e97519272a21562cbba4b978fcf49
    Create EmailForm
    alchermd/headlines,alchermd/headlines
    headlines/forms.py
    headlines/forms.py
    from flask_wtf import FlaskForm from wtforms import StringField, TextAreaField, SubmitField from wtforms.validators import DataRequired, Email class EmailForm(FlaskForm): """ Form used to submit messages to the admin. """ name = StringField('Name') reply_to = StringField('Email', validators=[Email(), DataRequired()]) message = TextAreaField('Message', validators=[DataRequired()]) submit = SubmitField('Submit')
    mit
    Python
    61b21d1ec14e0be683f8da2b92b3ca2aa9fdcf59
    add sample for api caller
    inventree/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree
    InvenTree/plugin/samples/integration/api_caller.py
    InvenTree/plugin/samples/integration/api_caller.py
    """ Sample plugin for calling an external API """ from django.utils.translation import ugettext_lazy as _ from plugin import IntegrationPluginBase from plugin.mixins import APICallMixin, SettingsMixin class SampleApiCallerPlugin(APICallMixin, SettingsMixin, IntegrationPluginBase): """ A small api call sample """ PLUGIN_NAME = "Sample API Caller" SETTINGS = { 'API_TOKEN': { 'name': 'API Token', 'protected': True, }, 'API_URL': { 'name': 'External URL', 'description': 'Where is your API located?', 'default': 'https://reqres.in', }, } API_URL_SETTING = 'API_URL' API_TOKEN_SETTING = 'API_TOKEN' def get_external_url(self): """ returns data from the sample endpoint """ return self.api_call('api/users/2')
    mit
    Python
    1a68a1a461a66c4a4aaf3a19a607ab64475cb05c
    Create simpleExamples.py
    uchouinard/MechDSO
    simpleExamples.py
    simpleExamples.py
    import DSM as dsmx import random as rnd import copy def example1(): myDSM=dsmx.DSM('example') ## adding components myDSM.addComponent(['c1']) myDSM.addComponent(['c2']) myDSM.addComponent(['c3']) # myDSM.display() print "--------" ## adding relations between existing components myDSM.addRelation(['c1'], ['c2'], [1]) myDSM.addRelation(['c3'], ['c1'], [1]) myDSM.addRelation(['c2'], ['c3'], [1]) myDSM.display() print "--------" ## adding relations with non existing elements myDSM.addRelation(['c4'], ['c5'], [1.0]) myDSM.display() #using pandas for better visualisation myDSM.dispPDFrame() def example2(): ### simple examples un-directional dsm myDSMU=dsmx.DSM('example undirectional','simple','no') ## adding components myDSMU.addComponent(['c1']) myDSMU.addComponent(['c2']) myDSMU.addComponent(['c3']) # myDSMU.display() print "--------" ## adding relations between existing components myDSMU.addRelation(['c1'], ['c2'], [1]) myDSMU.addRelation(['c3'], ['c1'], [1]) myDSMU.addRelation(['c2'], ['c3'], [1]) myDSMU.display() print "--------" ## adding relations with non existing elements myDSMU.addRelation(['c4'], ['c5'], [1.0]) myDSMU.display() def example3(): ### simple examples for array inputs myDSM=dsmx.DSM('example array') #print 'creating a list of elements' myList=list(range(0,10)) #print myList ## adding components myDSM.addComponent(myList) #print 'creating two shuffled list' rnd.shuffle(myList) myList1=copy.copy(myList) rnd.shuffle(myList) myList2=copy.copy(myList) #print myList1 #print myList2 #print "--------" # myDSM.display() print "--------" ## adding relations between existing components myDSM.addRelation(myList1, myList2, [1.0]*len(myList)) myDSM.display() print "--------" ## adding relations with non existing elements #using pandas for better visualisation myDSM.dispPDFrame() def example4(): ## Example using Interactions #Based on interactions of Pimmler and Eppinger (1994) ## http://web.mit.edu/eppinger/www/pdf/Pimmler_DTM1994.pdf ## required = 2 ## desired = 1 ## indifferent = 0 (default value) ## undesired = -1 ## detrimental = -2 ## ## create a dict of format [ S E ## I M ] ########################################################### myDSM2=dsmx.DSM(name='example 2', dsmType='interactions') #adding components myDSM2.addComponent(['c1']) myDSM2.addComponent(['c2']) myDSM2.addComponent(['c3']) # myDSM2.display() print "--------" ## adding relations between existing components # using complete interaction list myDSM2.addRelation(['c1'], ['c2'], [{'s':1, 'e':0, 'i':0 ,'m':-2}]) myDSM2.addRelation(['c3'], ['c1'], [{'s':0, 'e':1, 'i':1 ,'m':0}]) #one interaction at a time myDSM2.addRelation(['c2'], ['c3'], [{'s':2}]) myDSM2.addRelation(['c2'], ['c3'], [{'e':-1}]) myDSM2.addRelation(['c2'], ['c3'], [{'i':0}]) myDSM2.addRelation(['c2'], ['c3'], [{'m':-1}]) #using lists of components and interactions, and new components myDSM2.addRelation(['c4', 'c6'], ['c5', 'c4'], [{'s':1, 'e':1, 'i':1 ,'m':1},{'s':-1, 'e':1, 'i':-1 ,'m':-2}]) myDSM2.display() print "--------" myDSM2.dispPDFrame()
    mit
    Python
    d5fcaf05d100d3fe709b34b8f6b839736773a130
    Create dict.py
    Myselfminer/nCrypt
    dict.py
    dict.py
    import random a=["a","b","c","d","e","f","g","h","i","j","k","l","m","n","o","p","q","r","s"\ ,"t","u","v","w","x","y","z"] def create(): dictionary=open("dictionary.py","w") tablewenn=["a","b","c","d","e","f","g","h","i","j","k","l","m","n","o","p","q","r","s"\ ,"t","u","v","w","x","y","z"," "] tablewennupper=["A","B","C","D","E","F","G","H","I","J","K","L","M","N","O","P","Q","R","S"\ ,"T","U","V","W","X","Y","Z"] tabledann=["a","b","c","d","e","f","g","h","i","j","k","l","m","n","o","p","q","r","s"\ ,"t","u","v","w","x","y","z","A","B","C","D","E","F","G","H","I","J","K","L","M","N","O","P","Q","R","S"\ ,"T","U","V","W","X","Y","Z"," "] dictionary.write("def ver(letter):\n") entkeys=[] for i in tablewenn: returning=random.choice(tabledann) tabledann.remove(returning) dictionary.write(" if letter == '"+i+"' :\n return '"+returning+"'\n") entkeys.append([returning,i]) for i in tablewennupper: returning=random.choice(tabledann) tabledann.remove(returning) dictionary.write(" if letter == '"+i+"' :\n return '"+returning+"'\n") entkeys.append([returning,i]) dictionary.write(" else:\n return letter\n") dictionary.write("def ent(letter):\n") for i in entkeys: dictionary.write(" if letter == '"+i[0]+"':\n return '"+i[1]+"'\n") dictionary.write(" else:\n return letter") def debug(): pass
    apache-2.0
    Python
    2fba29b90156e844d7d61a15c9ad9c37e2b5dfe2
    load template
    cathywu/flow,cathywu/flow
    examples/aimsun/load_template.py
    examples/aimsun/load_template.py
    """ Load an already existing Aimsun template and run the simulation """ from flow.core.experiment import Experiment from flow.core.params import AimsunParams, EnvParams, NetParams from flow.core.params import VehicleParams from flow.envs import TestEnv from flow.scenarios.loop import Scenario from flow.controllers.rlcontroller import RLController sim_params = AimsunParams( sim_step=0.1, render=True, emission_path='data', subnetwork_name="Subnetwork 8028981") env_params = EnvParams() vehicles = VehicleParams() vehicles.add( veh_id="rl", acceleration_controller=(RLController, {}), num_vehicles=22) scenario = Scenario( name="test", vehicles=vehicles, net_params=NetParams(template="/Users/nathan/internship/I-210Pasadena/I-210subnetwork.ang") ) env = TestEnv(env_params, sim_params, scenario, simulator='aimsun') exp = Experiment(env) exp.run(1, 3000)
    mit
    Python
    c6f6278c1915ef90e8825f94cc33a4dea4124722
    Add http directory listing with content display
    dgengtek/scripts,dgengtek/scripts
    network/http_server_cat.py
    network/http_server_cat.py
    #!/bin/env python3 import http.server import string import click import pathlib import urllib.parse import os @click.command() @click.argument("port", required=False) @click.option("-s", "--server", default="0.0.0.0") def main(port, server): if not port: port = 8888 http_server = http.server.HTTPServer((server, port), PostHandler) print('Starting server on {0}:{1}, use <Ctrl-C> to stop'.format( server, port)) http_server.serve_forever() class PostHandler(http.server.BaseHTTPRequestHandler): cwd = pathlib.Path(".") def do_GET(self): body_file_cat = string.Template("$content") body_dir_list = string.Template(""" <h1>Directory listing for $cwd</h1> <ul> $items </ul> """) page = string.Template("""<html> <head> <meta http-equiv="Content-Type" content="text/html; charset=utf-8"> <title>Directory listing for $cwd</title> </head> <body> $body </body> </html> """) path = urllib.parse.urlparse(self.path) fs_path = pathlib.Path("{}{}".format(self.cwd, path.path)) prefix_ref = "{}/".format(path.path) if fs_path.is_file(): body = body_file_cat content = "" with fs_path.open() as f: content = "".join(f.readlines()) content = "<pre>{}</pre>".format(content) body = body.substitute(content=content) else: body = body_dir_list items = list() item_template = string.Template('<li><a href="$item_path">$item_name</a></li>') for p in fs_path.iterdir(): item_path = urllib.parse.urljoin(prefix_ref, p.name) item_name = p.name if os.path.isdir(p): item_name = "{}/".format(item_name) items.append(item_template.substitute(item_path=item_path, item_name=item_name)) body = body.substitute(cwd=fs_path, items="\n".join(items)) page = page.substitute(cwd=fs_path, body=body) self.send_response(200) self.send_header("Content-type", "text/html") self.end_headers() self.wfile.write(page.encode("UTF-8")) if __name__ == '__main__': main()
    mit
    Python
    8fa81263cfcc63f6bf22ed2ad50103f91bc43b21
    Create hira.py
    tomohiko/8946
    hira.py
    hira.py
    #coding:utf-8 import hashlib start = ord(u'あ') end = ord(u'ん') hira = [] print "Create hiragana" for i in range(start, end+1, 1): hira.append(unichr(i).encode('utf-8')) num = len(hira) for i4 in range(num): for i3 in range(num): for i2 in range(num): for i1 in range(num): msg = hira[i1] + hira[i2] + hira[i3] + hira[i4] print msg, print hashlib.md5(msg).hexdigest()
    apache-2.0
    Python
    92bc1ad22b6147f61ef4b51b16e115109bc04596
    add build.gyp
    256481788jianghao/opengl_test,256481788jianghao/opengl_test,256481788jianghao/opengl_test
    build.gyp
    build.gyp
    { 'targets':[ { 'target_name':'start_first', 'type':'executable', 'dependencies':[], 'defines':[], 'include_dirs':[], 'sources':[ 'start_first/opengl_first.c', ], 'libraries':[ '-lGLU -lGL -lglut' ], 'conditions':[] } ], }
    apache-2.0
    Python
    45a0b65106f665872f14780e93ab9f09e65bbce3
    add genRandomGraph.py
    zhfkt/ComplexCi,zhfkt/ComplexCi,zhfkt/ComplexCi,zhfkt/ComplexCi,zhfkt/ComplexCi
    ComplexCiPython/genRandomGraph.py
    ComplexCiPython/genRandomGraph.py
    import networkx import sys if len(sys.argv) < 2: print ("python genRandomGraph.py [output folder]"); input() sys.exit(0); outputPath = sys.argv[1] G=networkx.erdos_renyi_graph(100000,3/100000) networkx.write_edgelist(G, outputPath + "/genRandomGraph.csv", data=False , delimiter=',')
    mit
    Python
    3b15fb1d43bad6d6cf2112538d1de8c1710d0272
    add test for within_page_range
    ScorpionResponse/freelancefinder,ScorpionResponse/freelancefinder,ScorpionResponse/freelancefinder
    freelancefinder/freelancefinder/tests/test_within_page_range_templatetag.py
    freelancefinder/freelancefinder/tests/test_within_page_range_templatetag.py
    """Test the within_page_range function.""" from ..templatetags.within_page_range import within_filter def test_in_range_above(): """One page above current should be displayed.""" test_page = 5 current_page = 4 result = within_filter(test_page, current_page) assert result def test_in_range_below(): """One page below current should be displayed.""" test_page = 3 current_page = 4 result = within_filter(test_page, current_page) assert result def test_out_of_range_above(): """20 pages above current should not be displayed.""" test_page = 74 current_page = 54 result = within_filter(test_page, current_page) assert not result def test_out_of_range_below(): """20 pages below current should not be displayed.""" test_page = 34 current_page = 54 result = within_filter(test_page, current_page) assert not result
    bsd-3-clause
    Python
    0c315f766b31c105c60b39746db977d6702955ca
    Remove unneeded model attributes
    manhhomienbienthuy/pythondotorg,manhhomienbienthuy/pythondotorg,proevo/pythondotorg,Mariatta/pythondotorg,proevo/pythondotorg,Mariatta/pythondotorg,python/pythondotorg,Mariatta/pythondotorg,manhhomienbienthuy/pythondotorg,manhhomienbienthuy/pythondotorg,python/pythondotorg,python/pythondotorg,python/pythondotorg,proevo/pythondotorg,Mariatta/pythondotorg,proevo/pythondotorg
    successstories/views.py
    successstories/views.py
    from django.contrib import messages from django.core.urlresolvers import reverse from django.utils.decorators import method_decorator from django.views.generic import CreateView, DetailView, ListView from honeypot.decorators import check_honeypot from .forms import StoryForm from .models import Story, StoryCategory class ContextMixin: def get_context_data(self, **kwargs): ctx = super().get_context_data(**kwargs) ctx['category_list'] = StoryCategory.objects.all() return ctx class StoryCreate(ContextMixin, CreateView): model = Story form_class = StoryForm template_name = 'successstories/story_form.html' success_message = ( 'Your success story submission has been recorded. ' 'It will be reviewed by the PSF staff and published.' ) @method_decorator(check_honeypot) def dispatch(self, *args, **kwargs): return super().dispatch(*args, **kwargs) def get_success_url(self): return reverse('success_story_create') def form_valid(self, form): messages.add_message(self.request, messages.SUCCESS, self.success_message) return super().form_valid(form) class StoryDetail(ContextMixin, DetailView): template_name = 'successstories/story_detail.html' context_object_name = 'story' def get_queryset(self): if self.request.user.is_staff: return Story.objects.select_related() return Story.objects.select_related().published() class StoryList(ListView): template_name = 'successstories/story_list.html' context_object_name = 'stories' def get_queryset(self): return Story.objects.select_related().published() class StoryListCategory(ContextMixin, DetailView): model = StoryCategory
    from django.contrib import messages from django.core.urlresolvers import reverse from django.utils.decorators import method_decorator from django.views.generic import CreateView, DetailView, ListView from honeypot.decorators import check_honeypot from .forms import StoryForm from .models import Story, StoryCategory class ContextMixin: def get_context_data(self, **kwargs): ctx = super().get_context_data(**kwargs) ctx['category_list'] = StoryCategory.objects.all() return ctx class StoryCreate(ContextMixin, CreateView): model = Story form_class = StoryForm template_name = 'successstories/story_form.html' success_message = ( 'Your success story submission has been recorded. ' 'It will be reviewed by the PSF staff and published.' ) @method_decorator(check_honeypot) def dispatch(self, *args, **kwargs): return super().dispatch(*args, **kwargs) def get_success_url(self): return reverse('success_story_create') def form_valid(self, form): messages.add_message(self.request, messages.SUCCESS, self.success_message) return super().form_valid(form) model = Story class StoryDetail(ContextMixin, DetailView): template_name = 'successstories/story_detail.html' context_object_name = 'story' def get_queryset(self): if self.request.user.is_staff: return Story.objects.select_related() return Story.objects.select_related().published() class StoryList(ListView): model = Story template_name = 'successstories/story_list.html' context_object_name = 'stories' def get_queryset(self): return Story.objects.select_related().published() class StoryListCategory(ContextMixin, DetailView): model = StoryCategory
    apache-2.0
    Python
    9abb8108f62451fb993a398c8165a4605e40ec4a
    Add tests for JSONPResponseMiddleware
    Code4SA/mapit,chris48s/mapit,Sinar/mapit,Code4SA/mapit,chris48s/mapit,opencorato/mapit,chris48s/mapit,Sinar/mapit,opencorato/mapit,opencorato/mapit,Code4SA/mapit
    mapit/tests/test_middleware.py
    mapit/tests/test_middleware.py
    from django.test import TestCase from django.test.client import RequestFactory from django.http import HttpResponse, HttpResponsePermanentRedirect from ..middleware import JSONPMiddleware class JSONPMiddlewareTest(TestCase): def setUp(self): self.middleware = JSONPMiddleware() self.factory = RequestFactory() def test_process_response_ignores_302_redirects(self): request = self.factory.get("/dummy_url", {"callback": "xyz"}) response = HttpResponsePermanentRedirect("/new_url") middleware_response = self.middleware.process_response(request, response) self.assertEqual(middleware_response, response) def test_process_response_uses_callback(self): request = self.factory.get("/dummy_url", {"callback": "xyz"}) response = HttpResponse(content="blah") middleware_response = self.middleware.process_response(request, response) self.assertEqual(middleware_response.content, u'xyz(blah)') def test_process_response_uses_ignores_requests_without_callback(self): request = self.factory.get("/dummy_url") response = HttpResponse(content="blah") middleware_response = self.middleware.process_response(request, response) self.assertEqual(middleware_response, response) def test_process_response_callback_allowed_characters(self): request = self.factory.get("/dummy_url", {"callback": "xyz123_$."}) response = HttpResponse(content="blah") middleware_response = self.middleware.process_response(request, response) self.assertEqual(middleware_response.content, u'xyz123_$.(blah)') # Try with a character not allowed in the callback request = self.factory.get("/dummy_url", {"callback": "xyz123_$.["}) response = HttpResponse(content="blah") middleware_response = self.middleware.process_response(request, response) self.assertEqual(middleware_response, response)
    agpl-3.0
    Python
    e20d3ff6147b857cb9a8efa32bfb4ee80610dd34
    Revert "dump"
    assassinen/python_training
    dump/fastMessageReaderOriginal.py
    dump/fastMessageReaderOriginal.py
    #!/usr/bin/python import sys import re # ============================================================================ class MessageReader: messageRegexp = r"s*(\w+)\[\d+\]=(.*?)(?=\s\w+\[\d+\]|$)"; def __init__(self, fileName): self.fileName = fileName #self.file = open(fileName, encoding="utf8") self.file = open(fileName) self.carryover = ""; def __del__(self): self.file.close() def getMessage(self): if (self.carryover != ""): line = self.carryover self.carryover = "" else: line = self.file.readline() while (line.startswith('ApplVerID') is not True): if not line: return {} line = self.file.readline() message = dict(re.findall(self.messageRegexp, line)) message['entries'] = [] line = self.file.readline(); noEntries = re.sub(".*?NoMDEntries\[268\]\s*=\s*(\d+)[^\d]*", r'\1', line) if (noEntries == line): self.carryover = line; return message for i in range(int(noEntries)): line = self.file.readline().split(':')[1].strip() entry = dict(re.findall(self.messageRegexp, line)) message["entries"].append(entry) return message # ============================================================================
    apache-2.0
    Python
    f917c7ccfbe22a50049e76957a05f35eaaa46b2a
    migrate child table
    DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations
    polling_stations/apps/addressbase/migrations/0010_remove_onsud_ctry_flag.py
    polling_stations/apps/addressbase/migrations/0010_remove_onsud_ctry_flag.py
    # -*- coding: utf-8 -*- # Generated by Django 1.11.20 on 2019-02-15 14:12 from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [("addressbase", "0009_onsud_ced")] operations = [migrations.RemoveField(model_name="onsud", name="ctry_flag")]
    bsd-3-clause
    Python
    1553cdda2edc16368ba2281616923e849f09bdee
    Create matching_{x,y}.py
    JsWatt/Free-Parking,JsWatt/Free-Parking,JsWatt/Free-Parking,JsWatt/Free-Parking,JsWatt/Free-Parking,JsWatt/Free-Parking,JsWatt/Free-Parking,JsWatt/Free-Parking
    hacker_rank/regex/repetitions/matching_{x,y}.py
    hacker_rank/regex/repetitions/matching_{x,y}.py
    Regex_Pattern = r'^\d{1,2}[a-zA-Z]{3,}\W{0,3}$' # Do not delete 'r'.
    mit
    Python
    527a53ee1e43f59462b94b50ea997058836a7031
    Create voicersss-inmoovservice-test.py
    MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab
    home/moz4r/Test/voicersss-inmoovservice-test.py
    home/moz4r/Test/voicersss-inmoovservice-test.py
    i01 = Runtime.createAndStart("i01", "InMoov") i01.mouth = Runtime.createAndStart("i01.mouth", "voiceRSS") python.subscribe(i01.mouth.getName(),"publishStartSpeaking") python.subscribe(i01.mouth.getName(),"publishEndSpeaking") def onEndSpeaking(text): print "end speak" def onStartSpeaking(text): print "start speak" i01.mouth.setKey("6b714718f09e48c9a7f260e385ca99a4") i01.mouth.setVoice("fr-fr"); i01.mouth.speakBlocking(u"test accent utf8 : éléphant")
    apache-2.0
    Python
    75980fc2e2f63e210f1e58e9a1d56c09072aa04e
    add play_camera.py
    physacco/cv-test,physacco/cv-test,physacco/cv-test,physacco/cv-test
    python/video/play_camera.py
    python/video/play_camera.py
    #!/usr/bin/env python3 # encoding: utf-8 # pylint: disable=no-member """Play a video with OpenCV.""" import sys import cv2 def main(): """The main function of this module.""" cv2.namedWindow('video', cv2.WINDOW_AUTOSIZE) cap = cv2.VideoCapture(0) i = 0 while cap.isOpened(): ret, frame = cap.read() if not ret: # done break i += 1 if i == 1: print frame.shape, frame.dtype, frame.size cv2.imshow('video', frame) key = cv2.waitKey(30) if key & 0xFF == ord('q'): # quit break cap.release() cv2.destroyAllWindows() if __name__ == '__main__': main()
    unlicense
    Python
    6dfc5a3d7845633570b83aac06c47756292cf8ac
    Add tests for get_uid() method for common DB models.
    dennybaa/st2,StackStorm/st2,pixelrebel/st2,Itxaka/st2,Plexxi/st2,pixelrebel/st2,nzlosh/st2,punalpatel/st2,nzlosh/st2,Itxaka/st2,emedvedev/st2,dennybaa/st2,tonybaloney/st2,Plexxi/st2,punalpatel/st2,peak6/st2,dennybaa/st2,StackStorm/st2,tonybaloney/st2,peak6/st2,StackStorm/st2,StackStorm/st2,armab/st2,alfasin/st2,nzlosh/st2,emedvedev/st2,Itxaka/st2,punalpatel/st2,armab/st2,pixelrebel/st2,alfasin/st2,lakshmi-kannan/st2,tonybaloney/st2,lakshmi-kannan/st2,emedvedev/st2,armab/st2,peak6/st2,nzlosh/st2,Plexxi/st2,lakshmi-kannan/st2,alfasin/st2,Plexxi/st2
    st2common/tests/unit/test_db_model_uids.py
    st2common/tests/unit/test_db_model_uids.py
    # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest2 from st2common.models.db.pack import PackDB from st2common.models.db.sensor import SensorTypeDB from st2common.models.db.action import ActionDB from st2common.models.db.rule import RuleDB from st2common.models.db.trigger import TriggerTypeDB from st2common.models.db.trigger import TriggerDB __all__ = [ 'DBModelUIDFieldTestCase' ] class DBModelUIDFieldTestCase(unittest2.TestCase): def test_get_uid(self): pack_db = PackDB(ref='ma_pack') self.assertEqual(pack_db.get_uid(), 'pack:ma_pack') sensor_type_db = SensorTypeDB(name='sname', pack='spack') self.assertEqual(sensor_type_db.get_uid(), 'sensor_type:spack:sname') action_db = ActionDB(name='aname', pack='apack', runner_info={}) self.assertEqual(action_db.get_uid(), 'action:apack:aname') rule_db = RuleDB(name='rname', pack='rpack') self.assertEqual(rule_db.get_uid(), 'rule:rpack:rname') trigger_type_db = TriggerTypeDB(name='ttname', pack='ttpack') self.assertEqual(trigger_type_db.get_uid(), 'trigger_type:ttpack:ttname') trigger_db = TriggerDB(name='tname', pack='tpack') self.assertTrue(trigger_db.get_uid().startswith('trigger:tpack:tname:'))
    apache-2.0
    Python
    5d64acfd475ca0bb0db2ef7c032fc4ee16df4f75
    remove highlight table
    pajlada/tyggbot,pajlada/pajbot,pajlada/pajbot,pajlada/pajbot,pajlada/pajbot,pajlada/tyggbot,pajlada/tyggbot,pajlada/tyggbot
    alembic/versions/186928676dbc_remove_highlights.py
    alembic/versions/186928676dbc_remove_highlights.py
    """remove_highlights Revision ID: 186928676dbc Revises: f163a00a02aa Create Date: 2019-06-01 15:14:13.999836 """ # revision identifiers, used by Alembic. revision = '186928676dbc' down_revision = 'f163a00a02aa' branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import mysql def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_table('tb_stream_chunk_highlight') # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('tb_stream_chunk_highlight', sa.Column('id', mysql.INTEGER(display_width=11), autoincrement=True, nullable=False), sa.Column('stream_chunk_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False), sa.Column('created_at', mysql.DATETIME(), nullable=False), sa.Column('highlight_offset', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False), sa.Column('description', mysql.VARCHAR(length=128), nullable=True), sa.Column('override_link', mysql.VARCHAR(length=256), nullable=True), sa.Column('thumbnail', mysql.TINYINT(display_width=1), autoincrement=False, nullable=True), sa.Column('created_by', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), sa.Column('last_edited_by', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['stream_chunk_id'], ['tb_stream_chunk.id'], name='tb_stream_chunk_highlight_ibfk_1'), sa.PrimaryKeyConstraint('id'), mysql_default_charset='utf8mb4', mysql_engine='InnoDB' ) # ### end Alembic commands ###
    mit
    Python
    8658ad72c74306617e58ca82ff0f3fdba35bd353
    implement auto build database interface
    free-free/pyblog,free-free/pyblog,free-free/pyblog,free-free/pyblog
    app/tools/dbautocreat.py
    app/tools/dbautocreat.py
    #-*- coding:utf-8 -*- import asyncio import aiomysql from tools.config import Config class AutoCreate(obj): def __init__(self): pass def _create_db(self): pass def _create_field_type(self): pass def _create_field_primary_key(self): pass def _create_field_unique_key(self): pass def _create_auto_increment(self): pass def _create_default(self): pass def _create_table(self): pass def run(self): pass @asyncio.coroutine def auto_create(): conn=yield from aiomysql.connect(db=Config.database.database, host=Config.database.host, password=Config.database.password, user=Config.database.user) cursor =yield from conn.cursor() yield from cursor.execute('show databases;') ret=yield from cursor.fetchall() print(ret) if __name__=='__main__': loop=asyncio.get_event_loop() loop.run_until_complete(asyncio.wait([auto_create()])) loop.close()
    mit
    Python
    36b8c44f8c2554109ab4ab09add9ac10fae20781
    add entities orm
    clicheio/cliche,item4/cliche,item4/cliche,clicheio/cliche,clicheio/cliche
    cliche/services/tvtropes/entities.py
    cliche/services/tvtropes/entities.py
    from sqlalchemy import Column, DateTime, ForeignKey, String from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import relationship Base = declarative_base() __all__ = 'Entity' class Entity(Base): namespace = Column(String, primary_key=True) name = Column(String, primary_key=True) url = Column(String) last_crawled = Column(DateTime) type = Column(String) relations = relationship('Relation', foreign_keys=[namespace, name], primaryjoin='and_(Entity.namespace == \ Relation.origin_namespace, \ Entity.name == Relation.origin)', collection_class=set) def __init__(self, namespace, name, url, last_crawled, type): self.namespace = namespace self.name = name self.url = url self.last_crawled = last_crawled self.type = type def __repr__(self): return "<Entity('%s', '%s', '%s', '%s', '%s')" % ( self.namespace, self.name, self.url, str(self.last_crawled), self.type ) __tablename__ = 'entities' __repr_columns__ = namespace, name class Relation(Base): origin_namespace = Column(String, ForeignKey(Entity.namespace), primary_key=True) origin = Column(String, ForeignKey(Entity.name), primary_key=True) destination_namespace = Column(String, primary_key=True) destination = Column(String, primary_key=True) origin_entity = relationship('Entity', foreign_keys=[origin_namespace, origin]) def __init__(self, origin_namespace, origin, destination_namespace, destination): self.origin_namespace = origin_namespace self.origin = origin self.destination_namespace = destination_namespace self.destination = destination __tablename__ = 'relations' __repr_columns__ = origin_namespace, origin, destination_namespace, \ destination
    mit
    Python
    ad664a7722da63d783a2b9d73077d91a8a012057
    Create hello.py
    WebClub-NITK/Hacktoberfest-2k17,WebClub-NITK/Hacktoberfest-2k17,WebClub-NITK/Hacktoberfest-2k17,WebClub-NITK/Hacktoberfest-2k17,WebClub-NITK/Hacktoberfest-2k17,WebClub-NITK/Hacktoberfest-2k17,WebClub-NITK/Hacktoberfest-2k17,WebClub-NITK/Hacktoberfest-2k17,WebClub-NITK/Hacktoberfest-2k17,WebClub-NITK/Hacktoberfest-2k17,WebClub-NITK/Hacktoberfest-2k17
    Python/hello.py
    Python/hello.py
    print("hello world!!!")
    mit
    Python
    dfed8f837b5fe07445b3914b33c1dab1b0b5741b
    add basic UAV object incl. very basic search algo
    DakotaNelson/freesearch
    uav.py
    uav.py
    import random class Uav: def __init__(x,y, worldMap): self.x = x self.y = y self.worldMap = worldMap self.sensorStrength = None def setMap(self, newMap): self.worldMap = newMap def nextStep(self): """ where should we go next tick? """ options = self.surroundingValues() m = max(a) maxIndexes = [i for i, j in enumerate(a) if j == m] return random.choice(maxIndexes) def surroundingValues(self): return [self.worldMap[self.x][self.y+1], self.worldMap[self.x+1][self.y], self.worldMap[self.x][self.y-1], self.worldMap[self.x-1][self.y]]
    mit
    Python
    8d1946c9656ea6c29d4730a68cbf4610152cd98b
    make migrations
    sebastianlan/wedfairy-api
    poll/migrations/0002_vote_user_id.py
    poll/migrations/0002_vote_user_id.py
    # -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('poll', '0001_initial'), ] operations = [ migrations.AddField( model_name='vote', name='user_id', field=models.IntegerField(default=None), preserve_default=False, ), ]
    mit
    Python
    64109dddedb7441456ae8e255c6a4b20ccaa6a73
    Create ReinhardNorm.py
    DigitalSlideArchive/HistomicsTK,DigitalSlideArchive/HistomicsTK
    ReinhardNorm.py
    ReinhardNorm.py
    import numpy def ReinhardNorm(I, TargetMu, TargetSigma): ''' Performs Reinhard color normalization to transform the color characteristics of an image to a desired standard. The standard is defined by the mean and standard deviations of the target image in LAB color space defined by Ruderman. The input image is converted to Ruderman's LAB space, the LAB channels are each centered and scaled to zero-mean unit variance, and then rescaled and shifted to match the target image statistics. *Inputs: I (rgbimage) - an RGB image of type unsigned char. TargetMu - a 3-element list containing the means of the target image channels in LAB color space. TargetSigma - a 3-element list containing the standard deviations of the target image channels in LAB color space. *Outputs: Normalized (rgbimage) - a normalized RGB image with corrected color characteristics. *Related functions: RudermanLABFwd, RudermanLABInv *References: Erik Reinhard, Michael Ashikhmin, Bruce Gooch, and Peter Shirley. 2001. Color Transfer between Images. IEEE Comput. Graph. Appl. 21, 5 (September 2001), 34-41. Daniel Ruderman, Thomas Cronin, Chuan-Chin Chiao, Statistics of Cone Responses to Natural Images: Implications for Visual Coding, J. Optical Soc. of America, vol. 15, no. 8, 1998, pp. 2036-2045. ''' #get input image dimensions m = I.shape[0] n = I.shape[1] #convert input image to LAB color space LAB = RudermanLAB(I) #center and scale to zero-mean and unit variance Mu = LAB.sum(axis=0).sum(axis=0) LAB[:,:,0] = LAB[:,:,0] - Mu[0] LAB[:,:,1] = LAB[:,:,1] - Mu[1] LAB[:,:,2] = LAB[:,:,2] - Mu[2] Sigma = (LAB*LAB).sum(axis=0).sum(axis=0) / (m*n-1) LAB[:,:,0] = LAB[:,:,0] / Sigma[0] LAB[:,:,1] = LAB[:,:,1] / Sigma[1] LAB[:,:,2] = LAB[:,:,2] / Sigma[2] #rescale and recenter to match target statistics LAB[:,:,0] = LAB[:,:,0] * TargetSigma[0] + TargetMu[0] LAB[:,:,1] = LAB[:,:,1] * TargetSigma[1] + TargetMu[1] LAB[:,:,2] = LAB[:,:,2] * TargetSigma[2] + TargetMu[2] #convert back to RGB colorspace Normalized = RudermanLABInv(LAB) return(Normalized)
    apache-2.0
    Python
    ebabfa0e14bdfd061e248285b8f7b5473f5a676e
    Create convert_to_morse.py
    clhq/work-tools
    morse_code/convert_to_morse.py
    morse_code/convert_to_morse.py
    from ConfigParser import SafeConfigParser import string target = 'target.txt' def parse_ini(): parser = SafeConfigParser() parser.read('conversion.ini') morselist = list(string.ascii_uppercase) number = 0 for i in morselist: i = parser.get('CONVERSIONS', i) morselist[number] = i number += 1 return morselist def convert_target(): with open(target, "r") as targetfile: targetstring = targetfile.read() for i in xrange(0, len(targetstring)): print targetstring[i] if any(character in targetstring) pass morselist = parse_ini() #print morselist capital_alphabet = list(string.ascii_uppercase) lower_alphabet = list(string.ascii_lowercase) #print capital_alphabet #print lower_alphabet convert_target()
    mit
    Python
    fcb311ffd264821767f58c92e96101aa8086acf5
    rewrite DHKE.py as crypto.py
    aburgd/DHKE-py
    crypto.py
    crypto.py
    import random import time timestamp = int(time.time()) random.seed(timestamp) def gen_check(n): if not isprime(n): while not isprime(n): n = random.randint(0, timestamp) def input_check(n): if not isprime(n): n = input("Sorry, that number isn't prime. Please try another: ") def isprime(n): '''check if integer n is a prime''' # make sure n is a positive integer n = abs(int(n)) # 0 and 1 are not primes if n < 2: return False # 2 is the only even prime number if n == 2: return True # all other even numbers are not primes if not n & 1: return False # range starts with 3 and only needs to go up the squareroot of n # for all odd numbers for x in range(3, int(n**0.5) + 1, 2): if n % x == 0: return False return True def publicKey(): resp = input("Do you have a shared base integer? (y/n): ") if resp.lower() == "y": b = input("Please enter your shared base integer: ") input_check(b) elif resp.lower() == "n": b = random.randint(0, timestamp) gen_check(b) print("Your shared base integer is: ", b) resp = input("Do you have a secret integer? (y/n): ") if resp.lower() == "y": alex = input("Please enter your secret integer: ") input_check(alex) elif resp.lower() == "n": alex = random.randint(0, timestamp) gen_check(alex) print("Your secret integer is: ", alex) resp = input("Do you have a shared modulus? (y/n): ") if resp.lower() == "y": mp = input("Please enter your shared modulus: ") input_check(mp) elif resp.lower() == "n": mp = random.randint(0, timestamp) gen_check(mp) print("Your shared modulus is: ", mp) b = int(b) alex = int(alex) mp = int(mp) pubKey = b ** alex pubKey = pubKey % mp return pubKey def sharedSecret(): pK = input("Please enter your public key: ") mp = input("Please enter your shared modulus: ") alex = input("Please enter your secret integer: ") sharedSec = (int(pK) ** int(alex)) % int(mp) return sharedSec answer = input("Would you like to calculate a public key, or a shared secret? ") if answer.lower() == "public key": public = publicKey() print("Your public key is: ", public) elif answer.lower() == "shared secret": shared = sharedSecret() print("Your shared secret is: ", shared)
    mit
    Python
    81c793870387910cd0c4eda02b2b95588a02cc7f
    Add files via upload
    I-Spangler/Enigma-Machine
    cypher.py
    cypher.py
    #!/usr/bin/python import argparse, sys ALPHABET_SIZE = 26 parser = argparse.ArgumentParser(description='Encrypts a message from a text file.') parser.add_argument('walzenlage1', metavar='w1', type=int, action='store', help='') parser.add_argument('walzenlage2', metavar='w2', type=int, action='store', help='') parser.add_argument('walzenlage3', metavar='w3', type=int, action='store', help='') parser.add_argument('ringstellung', metavar='rs', type=str, action='store', help='') #parser.add_argument('--decrypt', nargs='?', const=decrypt, default=encrypt, #help='decrypts the message') parser.add_argument('file', metavar='filename', type=str, help='name or path to the file wich contains your message') args = parser.parse_args() text = open(args.file, 'r') msg = text.read() lenmsg = len(msg) w1 = args.walzenlage1 w2 = args.walzenlage2 w3 = args.walzenlage3 rs1 = args.ringstellung #inicia os rotores class Rotor: config = {'1':[13, 17, 21, 16, 15, 24, 9, 25, 4, 18, 14, 8, 0, 20, 10, 19, 11, 1, 12, 22, 3, 6, 23, 5, 7, 2], '2':[17, 8, 18, 2, 11, 1, 6, 19, 24, 10, 16, 14, 7, 4, 23, 13, 0, 25, 20, 12, 22, 5, 9, 15, 21, 3], '3':[24, 16, 13, 0, 18, 12, 3, 25, 21, 8, 10, 15, 22, 2, 6, 7, 5, 17, 14, 1, 9, 11, 20, 23, 4, 19], 'Reflector':[14, 18, 1, 19, 25, 21, 5, 3, 24, 7, 8, 23, 4, 0, 9, 15, 6, 16, 12, 13, 10, 22, 20, 2, 17, 11] } def __init__(self, Id): self.len = ALPHABET_SIZE self.numbers = self.config[Id] def rotate(self): init = self.numbers[0] for index in range (0, self.len-1): self.numbers[index] = self.numbers[index+1] self.numbers[self.len-1] = init def set(self, rs): while self.numbers[0] != rs: self.rotate def do(self, previousOut): if previousOut < 0: pass return self.numbers[previousOut] #inicia a maquina baseada na configuração da chave class Enigma: counter = [0, 0, 0] def __init__(self, r1, r2, r3, ref): self.r1 = r1 self.r2 = r2 self.r3 = r3 self.ref = ref def ringset(self, rs): self.r1.set(int(rs[0])-96) self.r2.set(int(rs[1])-96) self.r3.set(int(rs[2])-96) def encrypt(self, message): EncryptedMessage = [] for i in message: EncryptedMessage.append(self.newLetter(ord(i.lower())-96, message)) self.rotateAll() return EncryptedMessage # def decrypt(self, message) def newLetter(self, num): return self.r1.do(self.r2.do(self.r3.do(self.ref.do(self.r3.do(self.r2.do(self.r1.do(num))))))) def rotateAll(self): self.r1.rotate() self.counter[0] = self.counter[0] + 1 if self.counter[0] == ALPHABET_SIZE: self.r2.rotate() self.counter[1] = self.counter[1] + 1 self.counter[0] = 0 if self.counter[1] == ALPHABET_SIZE: self.r3.rotate() self.counter[2] = self.counter[2] + 1 self.counter[1] = 0 E = Enigma(Rotor(w1), Rotor(w2), Rotor(w3), Rotor('Reflector')) E.ringset(rs) print(E.r1)
    mit
    Python
    47c8aa34eb9f4d2c4f702bc3957c87ef92cf7d28
    add simple learning switch app for OF1.3
    openvapour/ryu,lsqtongxin/ryu,ysywh/ryu,elahejalalpour/ELRyu,takahashiminoru/ryu,alanquillin/ryu,habibiefaried/ryu,jkoelker/ryu,gareging/SDN_Framework,ynkjm/ryu,Tesi-Luca-Davide/ryu,OpenState-SDN/ryu,ntts-clo/mld-ryu,zangree/ryu,lzppp/mylearning,jazzmes/ryu,sivaramakrishnansr/ryu,TakeshiTseng/ryu,ttsubo/ryu,hisaharu/ryu,elahejalalpour/ELRyu,zyq001/ryu,yamt/ryu,muzixing/ryu,hisaharu/ryu,torufuru/OFPatchPanel,fkakuma/ryu,habibiefaried/ryu,ttsubo/ryu,zyq001/ryu,shinpeimuraoka/ryu,takahashiminoru/ryu,lagopus/ryu-lagopus-ext,hisaharu/ryu,umkcdcrg01/ryu_openflow,lagopus/ryu-lagopus-ext,John-Lin/ryu,TakeshiTseng/ryu,unifycore/ryu,jkoelker/ryu,castroflavio/ryu,muzixing/ryu,Tesi-Luca-Davide/ryu,Zouyiran/ryu,TakeshiTseng/ryu,ynkjm/ryu,fujita/ryu,gopchandani/ryu,ttsubo/ryu,torufuru/oolhackathon,lzppp/mylearning,Zouyiran/ryu,lagopus/ryu-lagopus-ext,evanscottgray/ryu,lsqtongxin/ryu,shinpeimuraoka/ryu,citrix-openstack-build/ryu,citrix-openstack-build/ryu,fujita/ryu,yamt/ryu,StephenKing/summerschool-2015-ryu,StephenKing/ryu,ttsubo/ryu,umkcdcrg01/ryu_openflow,sivaramakrishnansr/ryu,lzppp/mylearning,alanquillin/ryu,haniehrajabi/ryu,evanscottgray/ryu,OpenState-SDN/ryu,sivaramakrishnansr/ryu,fujita/ryu,jazzmes/ryu,lzppp/mylearning,castroflavio/ryu,yamt/ryu,Zouyiran/ryu,zangree/ryu,StephenKing/summerschool-2015-ryu,takahashiminoru/ryu,darjus-amzn/ryu,habibiefaried/ryu,muzixing/ryu,StephenKing/summerschool-2015-ryu,Tejas-Subramanya/RYU_MEC,ynkjm/ryu,jazzmes/ryu,StephenKing/ryu,yamt/ryu,torufuru/oolhackathon,alyosha1879/ryu,ntts-clo/ryu,Zouyiran/ryu,pichuang/ryu,mikhaelharswanto/ryu,Tejas-Subramanya/RYU_MEC,iwaseyusuke/ryu,osrg/ryu,openvapour/ryu,osrg/ryu,TakeshiTseng/ryu,torufuru/oolhackathon,umkcdcrg01/ryu_openflow,gareging/SDN_Framework,elahejalalpour/ELRyu,diogommartins/ryu,Zouyiran/ryu,fkakuma/ryu,lsqtongxin/ryu,lsqtongxin/ryu,John-Lin/ryu,Tesi-Luca-Davide/ryu,StephenKing/ryu,lagopus/ryu-lagopus-ext,zyq001/ryu,fkakuma/ryu,StephenKing/summerschool-2015-ryu,zyq001/ryu,gareging/SDN_Framework,fkakuma/ryu,fujita/ryu,torufuru/OFPatchPanel,jalilm/ryu,jalilm/ryu,iwaseyusuke/ryu,o3project/ryu-oe,ysywh/ryu,StephenKing/summerschool-2015-ryu,diogommartins/ryu,iwaseyusuke/ryu,TakeshiTseng/ryu,alanquillin/ryu,pichuang/ryu,unifycore/ryu,ntts-clo/mld-ryu,StephenKing/ryu,alyosha1879/ryu,StephenKing/ryu,OpenState-SDN/ryu,haniehrajabi/ryu,diogommartins/ryu,ysywh/ryu,openvapour/ryu,alanquillin/ryu,gopchandani/ryu,jalilm/ryu,OpenState-SDN/ryu,darjus-amzn/ryu,osrg/ryu,ynkjm/ryu,haniehrajabi/ryu,pichuang/ryu,John-Lin/ryu,castroflavio/ryu,takahashiminoru/ryu,zangree/ryu,umkcdcrg01/ryu_openflow,fkakuma/ryu,alyosha1879/ryu,umkcdcrg01/ryu_openflow,ysywh/ryu,lzppp/mylearning,iwaseyusuke/ryu,ntts-clo/ryu,hisaharu/ryu,shinpeimuraoka/ryu,habibiefaried/ryu,citrix-openstack/build-ryu,openvapour/ryu,darjus-amzn/ryu,John-Lin/ryu,elahejalalpour/ELRyu,darjus-amzn/ryu,diogommartins/ryu,habibiefaried/ryu,diogommartins/ryu,Tesi-Luca-Davide/ryu,fujita/ryu,zyq001/ryu,alyosha1879/ryu,yamada-h/ryu,shinpeimuraoka/ryu,citrix-openstack/build-ryu,zangree/ryu,darjus-amzn/ryu,openvapour/ryu,lsqtongxin/ryu,lagopus/ryu-lagopus-ext,jalilm/ryu,jalilm/ryu,John-Lin/ryu,zangree/ryu,ynkjm/ryu,Tejas-Subramanya/RYU_MEC,o3project/ryu-oe,pichuang/ryu,yamt/ryu,muzixing/ryu,yamada-h/ryu,evanscottgray/ryu,Tejas-Subramanya/RYU_MEC,gopchandani/ryu,haniehrajabi/ryu,takahashiminoru/ryu,osrg/ryu,muzixing/ryu,ysywh/ryu,alanquillin/ryu,hisaharu/ryu,gareging/SDN_Framework,shinpeimuraoka/ryu,pichuang/ryu,osrg/ryu,sivaramakrishnansr/ryu,gareging/SDN_Framework,elahejalalpour/ELRyu,Tejas-Subramanya/RYU_MEC,gopchandani/ryu,sivaramakrishnansr/ryu,iwaseyusuke/ryu,mikhaelharswanto/ryu,jkoelker/ryu,Tesi-Luca-Davide/ryu,ttsubo/ryu,OpenState-SDN/ryu,gopchandani/ryu,haniehrajabi/ryu
    ryu/app/simple_switch_13.py
    ryu/app/simple_switch_13.py
    # Copyright (C) 2011 Nippon Telegraph and Telephone Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import logging import struct from ryu.base import app_manager from ryu.controller import ofp_event from ryu.controller.handler import MAIN_DISPATCHER from ryu.controller.handler import set_ev_cls from ryu.ofproto import ofproto_v1_3 from ryu.lib.packet import packet from ryu.lib.packet import ethernet class SimpleSwitch13(app_manager.RyuApp): OFP_VERSIONS = [ofproto_v1_3.OFP_VERSION] def __init__(self, *args, **kwargs): super(SimpleSwitch13, self).__init__(*args, **kwargs) self.mac_to_port = {} def add_flow(self, datapath, port, dst, actions): ofproto = datapath.ofproto match = datapath.ofproto_parser.OFPMatch(in_port=port, eth_dst=dst) inst = [datapath.ofproto_parser.OFPInstructionActions( ofproto.OFPIT_APPLY_ACTIONS, actions)] mod = datapath.ofproto_parser.OFPFlowMod( datapath=datapath, cookie=0, cookie_mask=0, table_id=0, command=ofproto.OFPFC_ADD, idle_timeout=0, hard_timeout=0, priority=0, buffer_id=ofproto.OFP_NO_BUFFER, out_port=ofproto.OFPP_ANY, out_group=ofproto.OFPG_ANY, flags=0, match=match, instructions=inst) datapath.send_msg(mod) @set_ev_cls(ofp_event.EventOFPPacketIn, MAIN_DISPATCHER) def _packet_in_handler(self, ev): msg = ev.msg datapath = msg.datapath ofproto = datapath.ofproto in_port = msg.match['in_port'] pkt = packet.Packet(msg.data) eth = pkt.get_protocols(ethernet.ethernet)[0] dst = eth.dst src = eth.src dpid = datapath.id self.mac_to_port.setdefault(dpid, {}) self.logger.info("packet in %s %s %s %s", dpid, src, dst, in_port) # learn a mac address to avoid FLOOD next time. self.mac_to_port[dpid][src] = in_port if dst in self.mac_to_port[dpid]: out_port = self.mac_to_port[dpid][dst] else: out_port = ofproto.OFPP_FLOOD actions = [datapath.ofproto_parser.OFPActionOutput(out_port)] # install a flow to avoid packet_in next time if out_port != ofproto.OFPP_FLOOD: self.add_flow(datapath, in_port, dst, actions) out = datapath.ofproto_parser.OFPPacketOut( datapath=datapath, buffer_id=msg.buffer_id, in_port=in_port, actions=actions) datapath.send_msg(out)
    apache-2.0
    Python
    615e51ce1bf15c012a6c7cc2d026cb69bf0ce2b8
    Create MAIN.py
    bubblegumwar/Pythagoras-Calculator
    MAIN.py
    MAIN.py
    def pythagoras(SIDE, LEN1, LEN2): from math import sqrt # This is function is needed to work, it **SHOULD** be included with the default install. ANSWER = "Error Code 1" # This should not logicaly happen if the user is not an idiot and follows the usage. if type(LEN1) is str or type(LEN2) is str: # This checks if the user didn't listen to the usage // Was the LEN a string? ANWSER = "Error Code 2" return ANWSER # This will return an error to the user that didn't listen. if type(SIDE) is int or type(SIDE) is float: # This checks if the user didn't listen to the usage // Was the SIDE an integer or float? ANWSER = "Error Code 4" return ANWSER # This will return an error to the user that didn't listen. #--SIDE C-- if SIDE.lower() == "c": #SIDE C CALCULATION (Hypotenuse) A_SIDE = LEN1 B_SIDE = LEN2 C_SIDE = sqrt(A_SIDE * A_SIDE + B_SIDE * B_SIDE) ANSWER = C_SIDE # This sets the answer to be returned. #--SIDE A-- elif SIDE.lower() == 'a': if LEN1 < LEN2: # This will happen if the user did not follow instructions. See error below. print("The hypotenues should be bigger") anwser = "Error code 2" return ANSWER # This will return an error to the user that didn't listen. #SIDE A CALCULATION B_SIDE = LEN2 C_SIDE = LEN1 ASIDE = sqrt((C_SIDE * C_SIDE) - (B_SIDE * B_SIDE)) ANSWER = A_SIDE # This sets the answer to be returned. #--SIDE B-- elif SIDE.lower() == 'b': if LEN1 < LEN2: # This will happen if the user did not follow instructions. See error below. print("The hypotenues should be bigger") ANSWER = "Error code 2" return ANSWER # This will return an error to the user that didn't listen. #SIDE B CALCULATION A_SIDE = LEN2 C_SIDE = LEN1 B_SIDE = sqrt(C_SIDE * C_SIDE - A_SIDE * A_SIDE) ANSWER = B_SIDE # This sets the answer to be returned. return ANSWER # Returns the anwser for the user to use.
    mit
    Python
    cbe0d5b37d4055ea78568838c3fd4cc953342b80
    remove stale data
    IQSS/geoconnect,IQSS/geoconnect,IQSS/geoconnect,IQSS/geoconnect
    geoconnect/apps/gis_tabular/utils_stale_data.py
    geoconnect/apps/gis_tabular/utils_stale_data.py
    from datetime import datetime, timedelta from apps.gis_tabular.models import TabularFileInfo # for testing from apps.gis_tabular.models import WorldMapTabularLayerInfo,\ WorldMapLatLngInfo, WorldMapJoinLayerInfo from apps.worldmap_connect.models import WorldMapLayerInfo DEFAULT_STALE_AGE = 3 * 60 * 60 # 3 hours, in seconds def remove_stale_map_data(stale_age_in_seconds=DEFAULT_STALE_AGE): """ Remove old map data... """ current_time = datetime.now() for wm_info in WorldMapLatLngInfo.objects.all(): remove_if_stale(wm_info, stale_age_in_seconds, current_time) for wm_info in WorldMapLatLngInfo.objects.all(): remove_if_stale(wm_info, stale_age_in_seconds, current_time) for wm_info in WorldMapLayerInfo.objects.all(): remove_if_stale(wm_info, stale_age_in_seconds, current_time) def remove_if_stale(info_object, stale_age_in_seconds, current_time=None): assert hasattr(info_object, 'modified'),\ 'The info_object must have "modified" date' if not current_time: current_time = datetime.now() mod_time = info_object.modified if hasattr(mod_time, 'tzinfo'): mod_time = mod_time.replace(tzinfo=None) # Is this object beyond it's time limit time_diff = (current_time - mod_time).total_seconds() if time_diff > stale_age_in_seconds: # Yes! delete it print 'Removing: ', info_object info_object.delete() """ from apps.gis_tabular.utils_stale_data import * remove_stale_map_data() """
    apache-2.0
    Python
    cb56e0151b37a79e2ba95815555cde0633e167e7
    add client subscribe testing
    beerfactory/hbmqtt
    samples/client_subscribe.py
    samples/client_subscribe.py
    import logging from hbmqtt.client._client import MQTTClient import asyncio logger = logging.getLogger(__name__) C = MQTTClient() @asyncio.coroutine def test_coro(): yield from C.connect(uri='mqtt://iot.eclipse.org:1883/', username=None, password=None) yield from C.subscribe([ {'filter': '$SYS/broker/uptime', 'qos': 0x00}, ]) logger.info("Subscribed") yield from asyncio.sleep(60) yield from C.disconnect() if __name__ == '__main__': formatter = "[%(asctime)s] {%(filename)s:%(lineno)d} %(levelname)s - %(message)s" logging.basicConfig(level=logging.DEBUG, format=formatter) asyncio.get_event_loop().run_until_complete(test_coro())
    mit
    Python
    c422b5019c6e638bce40a7fecef6977aa5e63ce0
    add __init__.py
    weizhenwei/tech-docs-2016,weizhenwei/tech-docs-2016,weizhenwei/tech-docs-2016,weizhenwei/tech-docs-2016,weizhenwei/tech-docs-2016,weizhenwei/tech-docs-2016
    python/18-package/parent/__init__.py
    python/18-package/parent/__init__.py
    #!/usr/bin/env python #-*- coding=utf-8 -*- if __name__ == "__main__": print "Package parent running as main program" else: print "Package parent initializing"
    bsd-2-clause
    Python
    8d6a5c4092d4f092416fc39fc7faa8bb20e701c3
    Add a manage command to sync reservations from external hook .. hard coded first product only atm (cherry picked from commit 63a80b711e1be9a6047965b8d0061b676d8c50ed)
    jaywink/cartridge-reservable,jaywink/cartridge-reservable,jaywink/cartridge-reservable
    cartridge/shop/management/commands/syncreshooks.py
    cartridge/shop/management/commands/syncreshooks.py
    from django.core.management.base import BaseCommand from django.core.management.base import CommandError from mezzanine.conf import settings from cartridge.shop.models import * class Command(BaseCommand): help = 'Sync reservations from external hook' def handle(self, *args, **options): p = ReservableProduct.objects.all()[0] p.update_from_hook()
    bsd-2-clause
    Python
    afab4bcd795da4395920eab6107bc33e401ed86a
    Create PiWS.py
    llamafarmer/Pi_Weather_Station,llamafarmer/Pi_Weather_Station,llamafarmer/Pi_Weather_Station
    PiWS.py
    PiWS.py
    import time import datetime import csv from math import log from flask import Flask, render_template from sense_hat import SenseHat app = Flask(__name__) def weather(): sense = SenseHat() sense.clear() celcius = round(sense.get_temperature(), 1) fahrenheit = round(1.8 * celcius + 32, 1) humidity = round(sense.get_humidity(), 1) pressure = round(sense.get_pressure(), 1) dewpoint = round(243.04 * (log(humidity / 100) + ((17.625 * celcius) / (243.04 + celcius))) / (17.625 - log(humidity / 100) - (17.625 * celcius) / (243.04 + celcius)), 1) acceleration = sense.get_accelerometer_raw() x = round(acceleration['x'], 0) y = round(acceleration['y'], 0) z = round(acceleration['z'], 0) if x == -1: sense.set_rotation(90) elif y == 1: sense.set_rotation(0) elif y == -1: sense.set_rotation(180) else: sense.set_rotation(180) if fahrenheit > 20 and fahrenheit < 80: bg_color = [0, 0, 155] # blue if fahrenheit > 81 and fahrenheit < 90: bg_color = [0, 155, 0] # Green if fahrenheit > 91 and fahrenheit < 100: bg_color = [155, 155, 0] # Yellow if fahrenheit > 101 and fahrenheit < 102: bg_color = [255, 127, 0] # Orange if fahrenheit > 103 and fahrenheit < 104: bg_color = [155, 0, 0] # Red if fahrenheit > 105 and fahrenheit < 109: bg_color = [255, 0, 0] # Bright Red if fahrenheit > 110 and fahrenheit < 120: bg_color = [155, 155, 155] # White else: bg_color = [0, 155, 0] # Green result = ' Temp. F ' + str(fahrenheit) + ' Temp. C ' + str(celcius) + ' Hum. ' + str(humidity) + ' Press. ' + str(pressure) + ' DewPoint ' + str(dewpoint) print(result) result_list = [(datetime.datetime.now(), celcius, fahrenheit, humidity, pressure, dewpoint)] with open('weather_logs.csv', 'a', newline='') as csv_file: writer = csv.writer(csv_file) writer.writerows(result_list) for x in range(5): sense.show_message(result, scroll_speed=0.10, back_colour=bg_color, text_colour=[155, 155, 155]) @app.route('/') def index(): sense = SenseHat() sense.clear() celcius = round(sense.get_temperature(), 1) fahrenheit = round(1.8 * celcius + 32, 1) humidity = round(sense.get_humidity(), 1) pressure = round(sense.get_pressure(), 1) dewpoint = round(243.04 * (log(humidity / 100) + ((17.625 * celcius) / (243.04 + celcius))) / (17.625 - log(humidity / 100) - (17.625 * celcius) / (243.04 + celcius)), 1) acceleration = sense.get_accelerometer_raw() x = round(acceleration['x'], 1) y = round(acceleration['y'], 1) z = round(acceleration['z'], 1) return render_template('weather.html', celcius=celcius, fahrenheit=fahrenheit, humidity=humidity, pressure=pressure, dewpoint=dewpoint, x=x, y=y, z=z) while __name__ == '__main__': weather() #app.run(host='0.0.0.0')
    mit
    Python
    7e71b21f655ec35bd5ebd79aeb5dbec6945a77a7
    Add purdue harvester
    erinspace/scrapi,fabianvf/scrapi,erinspace/scrapi,fabianvf/scrapi,CenterForOpenScience/scrapi,CenterForOpenScience/scrapi
    scrapi/harvesters/purdue.py
    scrapi/harvesters/purdue.py
    ''' Harvester for the Purdue University Research Repository for the SHARE project Example API call: http://purr.purdue.edu/oaipmh?verb=ListRecords&metadataPrefix=oai_dc ''' from __future__ import unicode_literals from scrapi.base import OAIHarvester class PurdueHarvester(OAIHarvester): short_name = 'purdue' long_name = 'PURR - Purdue University Research Repository' url = 'http://purr.purdue.edu' base_url = 'http://purr.purdue.edu/oaipmh' property_list = ['date', 'relation', 'identifier', 'type', 'setSpec'] timezone_granularity = True
    apache-2.0
    Python
    7ecfe7d20f8708a1dada5761cdc02905b0e370e5
    use correct separator
    owlabs/incubator-airflow,opensignal/airflow,hamedhsn/incubator-airflow,DinoCow/airflow,jiwang576/incubator-airflow,rishibarve/incubator-airflow,MetrodataTeam/incubator-airflow,andrewmchen/incubator-airflow,yoziru-desu/airflow,malmiron/incubator-airflow,nathanielvarona/airflow,r39132/airflow,Fokko/incubator-airflow,r39132/airflow,preete-dixit-ck/incubator-airflow,nathanielvarona/airflow,dud225/incubator-airflow,yati-sagade/incubator-airflow,sergiohgz/incubator-airflow,airbnb/airflow,wndhydrnt/airflow,mrkm4ntr/incubator-airflow,gilt/incubator-airflow,brandsoulmates/incubator-airflow,skudriashev/incubator-airflow,Acehaidrey/incubator-airflow,storpipfugl/airflow,jesusfcr/airflow,AllisonWang/incubator-airflow,zodiac/incubator-airflow,mtdewulf/incubator-airflow,lyft/incubator-airflow,jiwang576/incubator-airflow,modsy/incubator-airflow,kerzhner/airflow,zoyahav/incubator-airflow,dgies/incubator-airflow,cfei18/incubator-airflow,airbnb/airflow,cjqian/incubator-airflow,sekikn/incubator-airflow,yoziru-desu/airflow,jfantom/incubator-airflow,Acehaidrey/incubator-airflow,artwr/airflow,cademarkegard/airflow,aminghadersohi/airflow,moritzpein/airflow,Fokko/incubator-airflow,lxneng/incubator-airflow,malmiron/incubator-airflow,stverhae/incubator-airflow,griffinqiu/airflow,jfantom/incubator-airflow,janczak10/incubator-airflow,mattuuh7/incubator-airflow,alexvanboxel/airflow,btallman/incubator-airflow,DinoCow/airflow,nathanielvarona/airflow,sdiazb/airflow,r39132/airflow,opensignal/airflow,easytaxibr/airflow,sid88in/incubator-airflow,DEVELByte/incubator-airflow,apache/airflow,ty707/airflow,biln/airflow,andrewmchen/incubator-airflow,vineet-rh/incubator-airflow,forevernull/incubator-airflow,spektom/incubator-airflow,dhuang/incubator-airflow,mtagle/airflow,apache/incubator-airflow,yati-sagade/incubator-airflow,kerzhner/airflow,NielsZeilemaker/incubator-airflow,jhsenjaliya/incubator-airflow,spektom/incubator-airflow,dmitry-r/incubator-airflow,easytaxibr/airflow,malmiron/incubator-airflow,holygits/incubator-airflow,mtustin-handy/airflow,wolfier/incubator-airflow,MortalViews/incubator-airflow,cfei18/incubator-airflow,holygits/incubator-airflow,aminghadersohi/airflow,preete-dixit-ck/incubator-airflow,opensignal/airflow,adamhaney/airflow,N3da/incubator-airflow,jhsenjaliya/incubator-airflow,brandsoulmates/incubator-airflow,dhuang/incubator-airflow,Acehaidrey/incubator-airflow,vijaysbhat/incubator-airflow,Tagar/incubator-airflow,sid88in/incubator-airflow,jgao54/airflow,moritzpein/airflow,Chedi/airflow,OpringaoDoTurno/airflow,mrares/incubator-airflow,owlabs/incubator-airflow,mylons/incubator-airflow,caseyching/incubator-airflow,asnir/airflow,wxiang7/airflow,plypaul/airflow,preete-dixit-ck/incubator-airflow,RealImpactAnalytics/airflow,yiqingj/airflow,edgarRd/incubator-airflow,andyxhadji/incubator-airflow,hgrif/incubator-airflow,nathanielvarona/airflow,Tagar/incubator-airflow,wileeam/airflow,akosel/incubator-airflow,yk5/incubator-airflow,ronfung/incubator-airflow,edgarRd/incubator-airflow,zodiac/incubator-airflow,rishibarve/incubator-airflow,vijaysbhat/incubator-airflow,Twistbioscience/incubator-airflow,cjqian/incubator-airflow,subodhchhabra/airflow,d-lee/airflow,danielvdende/incubator-airflow,jwi078/incubator-airflow,mattuuh7/incubator-airflow,malmiron/incubator-airflow,yoziru-desu/airflow,ty707/airflow,apache/airflow,mrkm4ntr/incubator-airflow,Acehaidrey/incubator-airflow,N3da/incubator-airflow,DEVELByte/incubator-airflow,subodhchhabra/airflow,vineet-rh/incubator-airflow,CloverHealth/airflow,easytaxibr/airflow,adamhaney/airflow,janczak10/incubator-airflow,storpipfugl/airflow,gtoonstra/airflow,cfei18/incubator-airflow,biln/airflow,andyxhadji/incubator-airflow,MortalViews/incubator-airflow,griffinqiu/airflow,mrkm4ntr/incubator-airflow,mistercrunch/airflow,aminghadersohi/airflow,wndhydrnt/airflow,fenglu-g/incubator-airflow,wileeam/airflow,AllisonWang/incubator-airflow,yati-sagade/incubator-airflow,mrares/incubator-airflow,Tagar/incubator-airflow,Chedi/airflow,mattuuh7/incubator-airflow,OpringaoDoTurno/airflow,akosel/incubator-airflow,d-lee/airflow,andyxhadji/incubator-airflow,edgarRd/incubator-airflow,mtdewulf/incubator-airflow,mrkm4ntr/incubator-airflow,cfei18/incubator-airflow,spektom/incubator-airflow,ledsusop/airflow,jgao54/airflow,brandsoulmates/incubator-airflow,sid88in/incubator-airflow,plypaul/airflow,DEVELByte/incubator-airflow,biln/airflow,ledsusop/airflow,skudriashev/incubator-airflow,owlabs/incubator-airflow,jhsenjaliya/incubator-airflow,mattuuh7/incubator-airflow,DEVELByte/incubator-airflow,wooga/airflow,KL-WLCR/incubator-airflow,danielvdende/incubator-airflow,jiwang576/incubator-airflow,mistercrunch/airflow,jiwang576/incubator-airflow,OpringaoDoTurno/airflow,adrpar/incubator-airflow,jfantom/incubator-airflow,janczak10/incubator-airflow,mylons/incubator-airflow,jlowin/airflow,bolkedebruin/airflow,zodiac/incubator-airflow,sid88in/incubator-airflow,lxneng/incubator-airflow,caseyching/incubator-airflow,kerzhner/airflow,zack3241/incubator-airflow,lyft/incubator-airflow,opensignal/airflow,Chedi/airflow,apache/incubator-airflow,wileeam/airflow,criccomini/airflow,stverhae/incubator-airflow,vijaysbhat/incubator-airflow,jwi078/incubator-airflow,mtdewulf/incubator-airflow,subodhchhabra/airflow,wileeam/airflow,bolkedebruin/airflow,hamedhsn/incubator-airflow,spektom/incubator-airflow,dhuang/incubator-airflow,sergiohgz/incubator-airflow,ProstoMaxim/incubator-airflow,juvoinc/airflow,KL-WLCR/incubator-airflow,hamedhsn/incubator-airflow,airbnb/airflow,fenglu-g/incubator-airflow,dgies/incubator-airflow,airbnb/airflow,fenglu-g/incubator-airflow,r39132/airflow,wooga/airflow,mistercrunch/airflow,Tagar/incubator-airflow,CloverHealth/airflow,janczak10/incubator-airflow,lxneng/incubator-airflow,alexvanboxel/airflow,MetrodataTeam/incubator-airflow,wolfier/incubator-airflow,caseyching/incubator-airflow,storpipfugl/airflow,btallman/incubator-airflow,NielsZeilemaker/incubator-airflow,wxiang7/airflow,NielsZeilemaker/incubator-airflow,hgrif/incubator-airflow,Fokko/incubator-airflow,nathanielvarona/airflow,jason-z-hang/airflow,neovintage/airflow,zoyahav/incubator-airflow,MetrodataTeam/incubator-airflow,mrares/incubator-airflow,skudriashev/incubator-airflow,dmitry-r/incubator-airflow,yoziru-desu/airflow,wolfier/incubator-airflow,RealImpactAnalytics/airflow,btallman/incubator-airflow,holygits/incubator-airflow,neovintage/airflow,mtagle/airflow,jhsenjaliya/incubator-airflow,zoyahav/incubator-airflow,gritlogic/incubator-airflow,RealImpactAnalytics/airflow,griffinqiu/airflow,cademarkegard/airflow,wndhydrnt/airflow,MortalViews/incubator-airflow,jbhsieh/incubator-airflow,hgrif/incubator-airflow,lxneng/incubator-airflow,mylons/incubator-airflow,biln/airflow,jfantom/incubator-airflow,apache/incubator-airflow,ty707/airflow,adrpar/incubator-airflow,MortalViews/incubator-airflow,juvoinc/airflow,sdiazb/airflow,yiqingj/airflow,holygits/incubator-airflow,cademarkegard/airflow,ProstoMaxim/incubator-airflow,wooga/airflow,edgarRd/incubator-airflow,adrpar/incubator-airflow,apache/airflow,andyxhadji/incubator-airflow,forevernull/incubator-airflow,dud225/incubator-airflow,dhuang/incubator-airflow,mtustin-handy/airflow,wxiang7/airflow,btallman/incubator-airflow,N3da/incubator-airflow,lyft/incubator-airflow,akosel/incubator-airflow,danielvdende/incubator-airflow,vijaysbhat/incubator-airflow,d-lee/airflow,nathanielvarona/airflow,plypaul/airflow,criccomini/airflow,saguziel/incubator-airflow,zack3241/incubator-airflow,andrewmchen/incubator-airflow,AllisonWang/incubator-airflow,dgies/incubator-airflow,KL-WLCR/incubator-airflow,lyft/incubator-airflow,mtagle/airflow,saguziel/incubator-airflow,dmitry-r/incubator-airflow,aminghadersohi/airflow,gritlogic/incubator-airflow,easytaxibr/airflow,adrpar/incubator-airflow,jason-z-hang/airflow,ronfung/incubator-airflow,zack3241/incubator-airflow,jlowin/airflow,CloverHealth/airflow,danielvdende/incubator-airflow,adamhaney/airflow,wolfier/incubator-airflow,neovintage/airflow,modsy/incubator-airflow,jason-z-hang/airflow,KL-WLCR/incubator-airflow,dgies/incubator-airflow,danielvdende/incubator-airflow,ronfung/incubator-airflow,vineet-rh/incubator-airflow,moritzpein/airflow,jgao54/airflow,Chedi/airflow,bolkedebruin/airflow,Twistbioscience/incubator-airflow,asnir/airflow,hamedhsn/incubator-airflow,RealImpactAnalytics/airflow,MetrodataTeam/incubator-airflow,adamhaney/airflow,Acehaidrey/incubator-airflow,cademarkegard/airflow,wooga/airflow,apache/incubator-airflow,cfei18/incubator-airflow,mistercrunch/airflow,wxiang7/airflow,artwr/airflow,akosel/incubator-airflow,dud225/incubator-airflow,N3da/incubator-airflow,jlowin/airflow,sekikn/incubator-airflow,neovintage/airflow,jbhsieh/incubator-airflow,rishibarve/incubator-airflow,Twistbioscience/incubator-airflow,sekikn/incubator-airflow,gtoonstra/airflow,jlowin/airflow,cjqian/incubator-airflow,yk5/incubator-airflow,sdiazb/airflow,yiqingj/airflow,Fokko/incubator-airflow,brandsoulmates/incubator-airflow,storpipfugl/airflow,Acehaidrey/incubator-airflow,NielsZeilemaker/incubator-airflow,apache/airflow,danielvdende/incubator-airflow,modsy/incubator-airflow,caseyching/incubator-airflow,yiqingj/airflow,gilt/incubator-airflow,saguziel/incubator-airflow,cjqian/incubator-airflow,gritlogic/incubator-airflow,gilt/incubator-airflow,owlabs/incubator-airflow,ProstoMaxim/incubator-airflow,criccomini/airflow,jwi078/incubator-airflow,gritlogic/incubator-airflow,OpringaoDoTurno/airflow,jesusfcr/airflow,sergiohgz/incubator-airflow,bolkedebruin/airflow,skudriashev/incubator-airflow,sdiazb/airflow,zoyahav/incubator-airflow,jason-z-hang/airflow,jgao54/airflow,mtustin-handy/airflow,mrares/incubator-airflow,stverhae/incubator-airflow,vineet-rh/incubator-airflow,plypaul/airflow,alexvanboxel/airflow,asnir/airflow,DinoCow/airflow,alexvanboxel/airflow,ronfung/incubator-airflow,andrewmchen/incubator-airflow,griffinqiu/airflow,Twistbioscience/incubator-airflow,AllisonWang/incubator-airflow,dmitry-r/incubator-airflow,sergiohgz/incubator-airflow,sekikn/incubator-airflow,forevernull/incubator-airflow,mtustin-handy/airflow,mtagle/airflow,gtoonstra/airflow,ledsusop/airflow,gtoonstra/airflow,wndhydrnt/airflow,hgrif/incubator-airflow,jwi078/incubator-airflow,jbhsieh/incubator-airflow,CloverHealth/airflow,asnir/airflow,juvoinc/airflow,mylons/incubator-airflow,jbhsieh/incubator-airflow,yk5/incubator-airflow,rishibarve/incubator-airflow,ty707/airflow,apache/airflow,zack3241/incubator-airflow,fenglu-g/incubator-airflow,cfei18/incubator-airflow,artwr/airflow,gilt/incubator-airflow,criccomini/airflow,dud225/incubator-airflow,juvoinc/airflow,zodiac/incubator-airflow,ledsusop/airflow,modsy/incubator-airflow,mtdewulf/incubator-airflow,jesusfcr/airflow,DinoCow/airflow,apache/airflow,yk5/incubator-airflow,bolkedebruin/airflow,moritzpein/airflow,kerzhner/airflow,saguziel/incubator-airflow,artwr/airflow,forevernull/incubator-airflow,d-lee/airflow,jesusfcr/airflow,subodhchhabra/airflow,ProstoMaxim/incubator-airflow,preete-dixit-ck/incubator-airflow,yati-sagade/incubator-airflow,stverhae/incubator-airflow
    scripts/ci/wheel_factory.py
    scripts/ci/wheel_factory.py
    #!/usr/bin/env python import requirements import argparse import glob import os parser = argparse.ArgumentParser() parser.add_argument('file', help="requirements.txt", type=str) parser.add_argument('wheeldir', help="wheeldir location", type=str) args = parser.parse_args() req_file = open(args.file, 'r') for req in requirements.parse(req_file): print "Checking " + args.wheeldir + os.path.sep + req.name + "*.whl" if not glob.glob(args.wheeldir + os.path.sep + req.name + "*.whl"): os.system("pip wheel --wheel-dir=" + args.wheeldir + " " + req.name + "".join(req.specs) + "".join(req.extras))
    #!/usr/bin/env python import requirements import argparse import glob import os parser = argparse.ArgumentParser() parser.add_argument('file', help="requirements.txt", type=str) parser.add_argument('wheeldir', help="wheeldir location", type=str) args = parser.parse_args() req_file = open(args.file, 'r') for req in requirements.parse(req_file): print "Checking " + args.wheeldir + os.path.pathsep + req.name + "*.whl" if not glob.glob(args.wheeldir + os.path.pathsep + req.name + "*.whl"): os.system("pip wheel --wheel-dir=" + args.wheeldir + " " + req.name + "".join(req.specs) + "".join(req.extras))
    apache-2.0
    Python
    027a199924ee256170a2e369733a57fcc7483c88
    Add missing numeter namespace in poller
    enovance/numeter,redhat-cip/numeter,enovance/numeter,enovance/numeter,redhat-cip/numeter,redhat-cip/numeter,redhat-cip/numeter,enovance/numeter
    poller/numeter/__init__.py
    poller/numeter/__init__.py
    __import__('pkg_resources').declare_namespace(__name__)
    agpl-3.0
    Python
    7420f49f8e1508fa2017c629d8d11a16a9e28c4a
    add abstract biobox class
    pbelmann/command-line-interface,bioboxes/command-line-interface,michaelbarton/command-line-interface,michaelbarton/command-line-interface,pbelmann/command-line-interface,bioboxes/command-line-interface
    biobox_cli/biobox.py
    biobox_cli/biobox.py
    from abc import ABCMeta, abstractmethod import biobox_cli.container as ctn import biobox_cli.util.misc as util import tempfile as tmp class Biobox: __metaclass__ = ABCMeta @abstractmethod def prepare_volumes(opts): pass @abstractmethod def get_doc(self): pass @abstractmethod def after_run(self, host_dst_dir): pass def run(self, argv): opts = util.parse_docopt(self.get_doc(), argv, False) task = opts['--task'] image = opts['<image>'] output = opts['--output'] host_dst_dir = tmp.mkdtemp() volumes = self.prepare_volumes(opts, host_dst_dir) ctn.exit_if_no_image_available(image) ctnr = ctn.create(image, task, volumes) ctn.run(ctnr) self.after_run(output, host_dst_dir) return ctnr def remove(self, container): """ Removes a container Note this method is not tested due to limitations of circle ci """ ctn.remove(container)
    mit
    Python
    4d1b006e5ba559715d55a88528cdfc0bed755182
    add import script for Weymouth
    chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations
    polling_stations/apps/data_collection/management/commands/import_weymouth.py
    polling_stations/apps/data_collection/management/commands/import_weymouth.py
    from data_collection.management.commands import BaseXpressDCCsvInconsistentPostcodesImporter class Command(BaseXpressDCCsvInconsistentPostcodesImporter): council_id = 'E07000053' addresses_name = 'parl.2017-06-08/Version 1/Democracy_Club__08June2017WPBC.TSV' stations_name = 'parl.2017-06-08/Version 1/Democracy_Club__08June2017WPBC.TSV' elections = ['parl.2017-06-08'] csv_delimiter = '\t'
    bsd-3-clause
    Python
    2ce80e667de438fca20de7b4ab6847751b683e33
    Add digikey command.
    kivhift/qmk,kivhift/qmk
    src/commands/digikey.py
    src/commands/digikey.py
    # # Copyright (c) 2013 Joshua Hughes <[email protected]> # import urllib import webbrowser import qmk class DigikeyCommand(qmk.Command): """Look up a part on Digi-Key. A new tab will be opened in the default web browser that contains the search results. """ def __init__(self): self._name = 'digikey' self._help = self.__doc__ self.__baseURL = 'http://www.digikey.com/product-search/en?KeyWords={}' @qmk.Command.actionRequiresArgument def action(self, arg): webbrowser.open_new_tab(self.__baseURL.format(urllib.quote_plus( ' '.join(arg.split()).encode('utf-8')))) def commands(): return [ DigikeyCommand() ]
    mit
    Python
    8b4bbd23bf37fb946b664f5932e4903f802c6e0d
    Add first pass at integration style tests
    wdv4758h/flake8,lericson/flake8
    flake8/tests/test_integration.py
    flake8/tests/test_integration.py
    from __future__ import with_statement import os import unittest try: from unittest import mock except ImportError: import mock # < PY33 from flake8 import engine class IntegrationTestCase(unittest.TestCase): """Integration style tests to exercise different command line options.""" def this_file(self): """Return the real path of this file.""" this_file = os.path.realpath(__file__) if this_file.endswith("pyc"): this_file = this_file[:-1] return this_file def check_files(self, arglist=[], explicit_stdin=False, count=0): """Call check_files.""" if explicit_stdin: target_file = "-" else: target_file = self.this_file() argv = ['flake8'] + arglist + [target_file] with mock.patch("sys.argv", argv): style_guide = engine.get_style_guide(parse_argv=True) report = style_guide.check_files() self.assertEqual(report.total_errors, count) return style_guide, report def test_no_args(self): # assert there are no reported errors self.check_files() def _job_tester(self, jobs): # mock stdout.flush so we can count the number of jobs created with mock.patch('sys.stdout.flush') as mocked: guide, report = self.check_files(arglist=['--jobs=%s' % jobs]) self.assertEqual(guide.options.jobs, jobs) self.assertEqual(mocked.call_count, jobs) def test_jobs(self): self._job_tester(2) self._job_tester(10) def test_stdin(self): self.count = 0 def fake_stdin(): self.count += 1 with open(self.this_file(), "r") as f: return f.read() with mock.patch("pep8.stdin_get_value", fake_stdin): guide, report = self.check_files(arglist=['--jobs=4'], explicit_stdin=True) self.assertEqual(self.count, 1) def test_stdin_fail(self): def fake_stdin(): return "notathing\n" with mock.patch("pep8.stdin_get_value", fake_stdin): # only assert needed is in check_files guide, report = self.check_files(arglist=['--jobs=4'], explicit_stdin=True, count=1)
    mit
    Python
    0d2adfcce21dd2efb5d781babec3e6b03464b6d5
    Add basic tests
    gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin
    tests/app/main/test_request_header.py
    tests/app/main/test_request_header.py
    from tests.conftest import set_config_values def test_route_correct_secret_key(app_, client): with set_config_values(app_, { 'ROUTE_SECRET_KEY_1': 'key_1', 'ROUTE_SECRET_KEY_2': '', 'DEBUG': False, }): response = client.get( path='/_status', headers=[ ('X-Custom-forwarder', 'key_1'), ] ) assert response.status_code == 200 def test_route_incorrect_secret_key(app_, client): with set_config_values(app_, { 'ROUTE_SECRET_KEY_1': 'key_1', 'ROUTE_SECRET_KEY_2': '', 'DEBUG': False, }): response = client.get( path='/_status', headers=[ ('X-Custom-forwarder', 'wrong_key'), ] ) assert response.status_code == 403
    mit
    Python
    77af87198d1116b77df431d9139b30f76103dd64
    Add migration for latitute and longitude of event
    softwaresaved/fat,softwaresaved/fat,softwaresaved/fat,softwaresaved/fat
    fellowms/migrations/0023_auto_20160617_1350.py
    fellowms/migrations/0023_auto_20160617_1350.py
    # -*- coding: utf-8 -*- # Generated by Django 1.9.5 on 2016-06-17 13:50 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('fellowms', '0022_event_report_url'), ] operations = [ migrations.AddField( model_name='event', name='lat', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='event', name='lon', field=models.FloatField(blank=True, null=True), ), ]
    bsd-3-clause
    Python
    fb07837db870a5fdea3a98aa1381793b1b20d2c0
    Create main.py
    jbaum517/jcb2tp-grocery
    main.py
    main.py
    import webapp2 import jinja2 import os import urllib from google.appengine.api import users from google.appengine.ext import ndb JINJA_ENVIRONMENT = jinja2.Environment( loader=jinja2.FileSystemLoader(os.path.dirname(__file__)), extensions=['jinja2.ext.autoescape'], autoescape=True) def user_key(id): return ndb.Key('GroceryList',id) class GroceryItem(ndb.Model): name = ndb.StringProperty() cost = ndb.FloatProperty() quantity = ndb.IntegerProperty() total = ndb.FloatProperty() picture = ndb.BlobProperty() time = ndb.DateTimeProperty(auto_now_add=True) class MainHandler(webapp2.RequestHandler): def get(self): user = users.get_current_user() items_query = GroceryItem.query( ancestor=user_key(users.get_current_user().user_id())).order(-GroceryItem.time) items = items_query.fetch(10) if user: url = users.create_logout_url(self.request.uri) url_linktext = 'Logout' else: url = users.create_login_url(self.request.uri) url_linktext = 'Login' template_values = { 'user':users.get_current_user(), 'items':items, 'url':url, 'url_linktext':url_linktext, } template = JINJA_ENVIRONMENT.get_template('index.html') self.response.write(template.render(template_values)) class GroceryList(webapp2.RequestHandler): def post(self): user = users.get_current_user(); item = GroceryItem(parent=user_key(user.user_id())) item.name = self.request.get('name') item.cost = self.request.get('cost') item.quantity = self.request.get('quantity') item.picture = self.request.get('img') item.total = item.cost * item.quantity item.put() query_params = {'user': user_key(user.user_id())} self.redirect('/?' + urllib.urlencode(query_params)) app = webapp2.WSGIApplication([ ('/', MainHandler) ('/add', GroceryList) ], debug=True)
    unlicense
    Python
    b920f5aeecf7843fcc699db4a70a9a0f124fa198
    Add unit test for protonate.py
    jensengroup/propka
    tests/test_protonate.py
    tests/test_protonate.py
    import propka.atom import propka.protonate def test_protonate_atom(): atom = propka.atom.Atom( "HETATM 4479 V VO4 A1578 -19.097 16.967 0.500 1.00 17.21 V " ) assert not atom.is_protonated p = propka.protonate.Protonate() p.protonate_atom(atom) assert atom.is_protonated assert atom.number_of_protons_to_add == 6
    lgpl-2.1
    Python
    2bf763e39e91ef989c121bba420e4ae09ea0a569
    Add Diagonal Difference HackerRank Problem
    PlattsSEC/HackerRank,PlattsSEC/HackerRank,PlattsSEC/HackerRank,PlattsSEC/HackerRank,PlattsSEC/HackerRank,PlattsSEC/HackerRank
    algorithms/diagonal_difference/kevin.py
    algorithms/diagonal_difference/kevin.py
    #!/usr/bin/env python def get_matrix_row_from_input(): return [int(index) for index in input().strip().split(' ')] n = int(input().strip()) primary_diag_sum = 0 secondary_diag_sum = 0 for row_count in range(n): row = get_matrix_row_from_input() primary_diag_sum += row[row_count] secondary_diag_sum += row[-1 - row_count] print(abs(primary_diag_sum - secondary_diag_sum))
    mit
    Python
    9098904ffcd47c4327594f8fc6ce8ce8694e5422
    Create getsubinterfaces.py
    infobloxopen/netmri-toolkit,infobloxopen/netmri-toolkit,infobloxopen/netmri-toolkit
    python/getsubinterfaces.py
    python/getsubinterfaces.py
    #Device subinterface data retrieval script. Copyright Ingmar Van Glabbeek [email protected] #Licensed under Apache-2.0 #This script will pull all devices of a given device group and then list the devices management ip as well as the available management ips. #By default it saves the output to "deviceinterfacedump.json" #Tested on NetMRI 7.3.1 and 7.3.2 #Modules required: import getpass import requests import json import urllib3 from requests.auth import HTTPBasicAuth from http.client import responses import time #You can hardcode credentials here, it's not safe. Don't do it. #hostname = "netmri.infoblox.com" #username = "admin" #password = "infoblox" #urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) def main(): cookie_host = wapi_connect() #print(cookie_host) devicelist = getdevices(cookie_host) filtered_data = devicedata(devicelist) #uncomment next line if you want to write to console #print(json.dumps(filtered_data,indent=4, sort_keys=True)) filename = open("deviceinterfacedump.json","w") filename.write(json.dumps(filtered_data,indent=4)) filename.close() print("Data retrieved successfully") def devicedata(devicelist): listload = json.loads(devicelist) data = [] for e in listload['rows']: if not e["if_addrs"]: device = {"DeviceID":e["DeviceID"],"DeviceName":e["DeviceName"],"DeviceType":e["DeviceType"],"DeviceIPDotted":e["DeviceIPDotted"],"Other InterfaceIP":["none"]} data.append(device) else: device = {"DeviceID": e['DeviceID'], "DeviceName": e["DeviceName"], "DeviceType": e["DeviceType"], "DeviceIPDotted": e["DeviceIPDotted"], "Other InterfaceIP":[]} for f in e["if_addrs"]: i=1 interface = {"InterfaceIP":f["ifIPDotted"], "Interfacename":f["ifName"]} device["Other InterfaceIP"].insert(i,interface) data.append(device) i=i+1 dataftw=json.dumps(data) returndata=json.loads(dataftw) return returndata def getdevices(cookie_host): if not cookie_host: print("No connection established.") return 0 #get current time ts = time.time() hostname=cookie_host[1] #limits number of results limit = input("Limit to this number of devices: ") get_url = "https://" + hostname + "/api/3.3/device_groups/index" response = requests.get(get_url, cookies=cookie_host[0], verify=False) d=response.text dl=json.loads(d) print("List of DeviceGroups") for e in dl["device_groups"]: dglist={"GroupName":e["GroupName"],"GroupID":e["GroupID"]} print(dglist) devicegroup = input("Based on the output specify the devicegroup ID by its ID: ") get_url = "https://" + hostname + "/api/3.3/discovery_statuses/static/current.extjs" querystring = {"_dc": ts, "filename": "recent_activity.csv", "filter": "null", "limit": limit, "GroupID": devicegroup} response = requests.get(get_url, cookies=cookie_host[0], verify=False, params=querystring) t=response.text print("We are fetching a list of " + str(limit) + " devices for devicegroup " + str(devicegroup) + ".") return(t) def wapi_connect(): hostname = input("Enter the NetMRI hostname or IP: ") username = input("Enter your NetMRI username: ") password = getpass.getpass("Enter your Password: ") https_val = input("Disable SSL validations?(y/n) ") if https_val in ("y", "Y"): urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) print("SSL validation disabled") if https_val in ("n", "N"): print("SSL validation enabled") login_url = "https://" + hostname + "/api/3.3/device_groups/index" print("logging in to " + hostname) try: login_result = requests.get( login_url, auth=HTTPBasicAuth(username, password), timeout=5, verify=False) except requests.exceptions.ConnectTimeout as e: print("Connection time out after 5 seconds.") exit(1) except requests.exceptions.ConnectionError as e: print("No route to host " + hostname) exit(1) if has_error(login_result): exit(1) else: print("Login OK") return(login_result.cookies,hostname) def has_error(_result): if _result.status_code == 200: return 0 elif _result.status_code == 201: return 0 try: err_text = _result.json()['text'] except KeyError as e: err_text = "Response contains no error text" except json.decoder.JSONDecodeError as e: err_text = "No JSON Response" # print out the HTTP response code, description, and error text http_code = _result.status_code http_desc = responses[http_code] print("HTTP Code [%3d] %s. %s" % (http_code, http_desc, err_text)) return 1 if __name__ == "__main__": main()
    mit
    Python
    54a8a77c75660eeae314c410685243e2b5bc59ca
    add sw infer wrapper
    DLTK/DLTK
    dltk/core/utils.py
    dltk/core/utils.py
    import numpy as np from dltk.core.io.sliding_window import SlidingWindow def sliding_window_segmentation_inference(session, ops_list, sample_dict, batch_size=1): """ Parameters ---------- session ops_list sample_dict Returns ------- """ # TODO: asserts pl_shape = list(sample_dict.keys()[0].get_shape().as_list()) pl_bshape = pl_shape[1:-1] inp_shape = list(sample_dict.values()[0].shape) inp_bshape = inp_shape[1:-1] out_dummies = [np.zeros([inp_shape[0], ] + inp_bshape + [op.get_shape().as_list()[-1]] if len(op.get_shape().as_list()) == len(inp_shape) else []) for op in ops_list] out_dummy_counter = [np.zeros_like(o) for o in out_dummies] op_shape = list(ops_list[0].get_shape().as_list()) op_bshape = op_shape[1:-1] out_diff = np.array(pl_bshape) - np.array(op_bshape) padding = [[0, 0]] + [[diff // 2, diff - diff // 2] for diff in out_diff] + [[0, 0]] padded_dict = {k: np.pad(v, padding, mode='constant') for k,v in sample_dict.items()} f_bshape = padded_dict.values()[0].shape[1:-1] striding = list(np.array(op_bshape) // 2) if all(out_diff == 0) else op_bshape sw = SlidingWindow(f_bshape, pl_bshape, striding=striding) out_sw = SlidingWindow(inp_bshape, op_bshape, striding=striding) if batch_size > 1: slicers = [] out_slicers = [] done = False while True: try: slicer = next(sw) out_slicer = next(out_sw) except StopIteration: done = True if batch_size == 1: sw_dict = {k: v[slicer] for k,v in padded_dict.items()} op_parts = session.run(ops_list, feed_dict=sw_dict) for idx in range(len(op_parts)): out_dummies[idx][out_slicer] += op_parts[idx] out_dummy_counter[idx][out_slicer] += 1 else: slicers.append(slicer) out_slicers.append(out_slicer) if len(slicers) == batch_size or done: slices_dict = {k: np.concatenate([v[slicer] for slicer in slicers], 0) for k,v in padded_dict.items()} all_op_parts = session.run(ops_list, feed_dict=slices_dict) zipped_parts = zip(*[np.array_split(part, len(slicers)) for part in all_op_parts]) for out_slicer, op_parts in zip(out_slicers, zipped_parts): for idx in range(len(op_parts)): out_dummies[idx][out_slicer] += op_parts[idx] out_dummy_counter[idx][out_slicer] += 1 slicers = [] out_slicers = [] if done: break return [o / c for o, c in zip(out_dummies, out_dummy_counter)]
    apache-2.0
    Python
    c262cc4cc18336257972105c1cd6c409da8ed5cd
    Create mcmc.py
    RonsenbergVI/trendpy,RonsenbergVI/trendpy
    mcmc.py
    mcmc.py
    # MIT License # Copyright (c) 2017 Rene Jean Corneille # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. from numpy import zeros, reshape from scipy.stats import rv_continuous __all__ = ['Parameter','Parameters','MCMC'] class Parameter(object): def __init__(self, name, distribution, size, current_value=None): self.name = str(name) self.distribution = distribution self.size = size self.current_value = current_value @property def current_value(self): return self.__current_value @current_value.setter def current_value(self, current_value): self.__current_value = current_value def __str__(self): return """ parameter name : %s parameter distribution : %s """ % (self.name, self.distribution.__str__()) def __len__(self): return 1 def is_multivariate(self): return self.size == (1,1) class Parameters(object): def __init__(self, list={}, hierarchy=[]): self.list = list self.hierarchy = hierarchy @property def parameters(self): return self.__list @parameters.setter def parameters(self, list): if not (list=={}): self.__list = list else: self.__list = {} @property def hierarchy(self): return self.__hierarchy @hierarchy.setter def hierarchy(self, hierarchy): self.__hierarchy = hierarchy def __len__(self): return len(self.list) def __str__(self): descr = '(parameters: ----------------------- \n' descr += ', \n'.join(['name: %s, distribution: %s, size: %s' % (str(l.name), l.distribution.__str__(), l.size) for l in self.list.values()]) descr += '\n ----------------------- )' return descr def append(self, parameter): self.list[parameter.name] = parameter self.hierarchy.append(parameter.name) class Distribution(rv_continuous): pass class MCMC(object): def __init__(self, data, strategy): self.data = data self.strategy = strategy self.simulations = None def summary(self): smry = "" return smry def distribution_parameters(self, parameter_name, *args, **kwargs): return self.strategy.distribution_parameters(parameter_name, *args, **kwargs) # returns a dictionary def generate(self, parameter_name): return self.strategy.generate(parameter_name) def output(self, burn, parameter_name): return self.strategy.output(self.simulations, burn, parameter_name) def define_parameters(self): return self.strategy.define_parameters() def initial_value(self,parameter_name): return self.strategy.initial_value(parameter_name) def run(self, number_simulations=100): self.simulations = {key : zeros((param.size[0],param.size[1],number_simulations)) for (key, param) in self.strategy.parameters.list.items()} for name in self.strategy.parameters.hierarchy: self.strategy.parameters.list[name].current_value = self.initial_value(name) for i in range(number_simulations): print("== step %i ==" % (int(i+1),)) restart_step = True while restart_step: for name in self.strategy.parameters.hierarchy: print("== parameter %s ==" % name) try: self.strategy.parameters.list[name].current_value = self.generate(name) self.simulations[name][:,:,i] = self.strategy.parameters.list[name].current_value.reshape(self.strategy.parameters.list[name].size) restart_step = False except: print("== restart step %i ==" % i) restart_step = True break class ConvergenceAnalysis(object):
    mit
    Python
    4b561d710e9ad72ad94ffb1ff3ae37db668899e4
    Add generate_examples script
    chunfengh/seq2seq,kontact-chan/seq2seq,shashankrajput/seq2seq,chunfengh/seq2seq,chunfengh/seq2seq,chunfengh/seq2seq,kontact-chan/seq2seq,google/seq2seq,shashankrajput/seq2seq,google/seq2seq,liyi193328/seq2seq,liyi193328/seq2seq,google/seq2seq,liyi193328/seq2seq,liyi193328/seq2seq,shashankrajput/seq2seq,kontact-chan/seq2seq,google/seq2seq,liyi193328/seq2seq,kontact-chan/seq2seq,shashankrajput/seq2seq
    seq2seq/scripts/generate_examples.py
    seq2seq/scripts/generate_examples.py
    #! /usr/bin/env python """ Generates a TFRecords file given sequence-aligned source and target files. Example Usage: python ./generate_examples.py --source_file <SOURCE_FILE> \ --target_file <TARGET_FILE> \ --output_file <OUTPUT_FILE> """ import tensorflow as tf tf.flags.DEFINE_string('source_file', None, 'File containing content in source language.') tf.flags.DEFINE_string( 'target_file', None, 'File containing content in target language, parallel line by line to the' 'source file.') tf.flags.DEFINE_string('output_file', None, 'File to output tf.Example TFRecords.') FLAGS = tf.flags.FLAGS def build_example(pair_id, source, target): """Transforms pair of 'source' and 'target' strings into a tf.Example. Assumes that 'source' and 'target' are already tokenized. Args: pair_id: id of this pair of source and target strings. source: a pretokenized source string. target: a pretokenized target string. Returns: a tf.Example corresponding to the 'source' and 'target' inputs. """ pair_id = str(pair_id) source_tokens = source.strip().split(' ') target_tokens = target.strip().split(' ') ex = tf.train.Example() ex.features.feature['pair_id'].bytes_list.value.append(pair_id.encode('utf-8')) ex.features.feature['source_len'].int64_list.value.append(len(source_tokens)) ex.features.feature['target_len'].int64_list.value.append(len(target_tokens)) source_tokens = [t.encode('utf-8') for t in source_tokens] target_tokens = [t.encode('utf-8') for t in target_tokens] ex.features.feature['source_tokens'].bytes_list.value.extend(source_tokens) ex.features.feature['target_tokens'].bytes_list.value.extend(target_tokens) return ex def write_tfrecords(examples, output_file): """Writes a list of tf.Examples to 'output_file'. Args: examples: An iterator of tf.Example records outputfile: path to the output file """ writer = tf.python_io.TFRecordWriter(output_file) print('Creating TFRecords file at {}...'.format(output_file)) for row in examples: writer.write(row.SerializeToString()) writer.close() print('Wrote to {}'.format(output_file)) def generate_examples(source_file, target_file): """Creates an iterator of tf.Example records given aligned source and target files. Args: source_file: path to file with newline-separated source strings target_file: path to file with newline-separated target strings Returns: An iterator of tf.Example objects. """ with open(source_file) as source_records: with open(target_file) as target_records: for i, (source, target) in enumerate(zip(source_records, target_records)): if i % 10000 == 0: print('Processed {} records'.format(i)) yield build_example(i, source, target) def main(unused_argv): #pylint: disable=unused-argument examples = generate_examples( FLAGS.source_file, FLAGS.target_file) write_tfrecords(examples, FLAGS.output_file) if __name__ == '__main__': tf.app.run()
    apache-2.0
    Python
    edb28fffe19e2b0de3113b43aeb075119c9e5830
    Work in progress. Creating new data migration.
    EBI-Metagenomics/emgapi,EBI-Metagenomics/emgapi,EBI-Metagenomics/emgapi,EBI-Metagenomics/emgapi,EBI-Metagenomics/emgapi
    emgapi/migrations/0019_auto_20200110_1455.py
    emgapi/migrations/0019_auto_20200110_1455.py
    # -*- coding: utf-8 -*- # Generated by Django 1.11.24 on 2020-01-10 14:55 from __future__ import unicode_literals from django.db import migrations def create_download_description(apps, schema_editor): DownloadDescriptionLabel = apps.get_model("emgapi", "DownloadDescriptionLabel") downloads = ( ("Phylum level taxonomies UNITE (TSV)", "Phylum level taxonomies UNITE"), ("Phylum level taxonomies ITSoneDB (TSV)", "Phylum level taxonomies ITSoneDB"), ("Taxonomic assignments UNITE (TSV)", "Taxonomic assignments UNITE"), ("Taxonomic assignments ITSoneDB (TSV)", "Taxonomic assignments ITSoneDB"), ) _downloads = list() for d in downloads: _downloads.append( DownloadDescriptionLabel( description=d[0], description_label=d[1] ) ) DownloadDescriptionLabel.objects.bulk_create(_downloads) def create_group_types(apps, schema_editor): DownloadGroupType = apps.get_model("emgapi", "DownloadGroupType") group_types = ( "Taxonomic analysis ITS", "Taxonomic analysis ITSoneDB", "Taxonomic analysis UNITE", "Pathways and Systems", # TODO: Do we need sub groups for the function and pathways ) _groups = list() for group_type in group_types: _groups.append( DownloadGroupType(group_type=group_type) ) DownloadGroupType.objects.bulk_create(_groups) class Migration(migrations.Migration): dependencies = [ ('emgapi', '0018_auto_20191105_1052'), ] operations = [ migrations.RunPython(create_download_description), migrations.RunPython(create_group_types) ]
    apache-2.0
    Python
    d41274ce2a54d37c35f23c8c78de196e57667b0a
    add google translate plugin
    fridim/cabot,fridim/cabot,fridim/cabot,fridim/cabot,fridim/cabot,fridim/cabot
    plugins_examples/translate.py
    plugins_examples/translate.py
    #!/usr/bin/env python import sys import re from googletrans import Translator translator = Translator() line = sys.stdin.readline() while line: match = re.search('^:([^\s]+) PRIVMSG (#[^\s]+) :(.+)', line) if not match: line = sys.stdin.readline() continue who = match.group(1) chan = match.group(2) what = match.group(3).strip().strip('\r\n') def reply(text): print("PRIVMSG %s :%s" % (chan, text)) sys.stdout.flush() if what[:10] == ':translate': m2 = re.search('^:translate (.*)', what) if not m2: line = sys.stdin.readline() continue try: reply(translator.translate(m2.group(1), dest='fr').text) except: reply('Oups!') elif what[:4] == ':tr ': m2 = re.search('^:tr (\w+) (\w+) (.+)', what) if not m2: line = sys.stdin.readline() continue try: reply(translator.translate(m2.group(3), src=m2.group(1), dest=m2.group(2)).text) except: reply('Oups!') line = sys.stdin.readline()
    mit
    Python
    b450734eea74f5f3536a44ed40c006c3da13656c
    Add diff.py
    jhogan/commonpy,jhogan/epiphany-py
    diff.py
    diff.py
    # vim: set et ts=4 sw=4 fdm=marker """ MIT License Copyright (c) 2016 Jesse Hogan Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ from diff_match_patch import diff_match_patch from entities import entity from pdb import set_trace; B=set_trace # TODO Write test class diff(entity): def __init__(self, data1, data2): self._data1 = data1 self._data2 = data2 self._ps = None self._dmp = None @property def _diff_match_patch(self): if not self._dmp: self._dmp = diff_match_patch() return self._dmp; @property def _patches(self): if self._ps == None: dmp = self._diff_match_patch diffs = dmp.diff_main(self._data1, self._data2) dmp.diff_cleanupSemantic(diffs) self._ps = dmp.patch_make(diffs) return self._ps def apply(self, data): return patch_apply(self._patches, data)[0] def __str__(self): dmp = self._diff_match_patch return dmp.patch_toText(self._patches)
    mit
    Python
    176af82121da5282842fd7e77809da9780ac57a5
    implement server pool.
    tonyseek/rsocks,tonyseek/rsocks
    rsocks/pool.py
    rsocks/pool.py
    from __future__ import unicode_literals import logging import contextlib from .eventlib import GreenPool from .utils import debug __all__ = ['ServerPool'] logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG if debug() else logging.INFO) logger.addHandler(logging.StreamHandler()) class ServerPool(object): def __init__(self): self.pool = GreenPool() self.servers = {} @contextlib.contextmanager def new_server(self, name, server_class, *args, **kwargs): server = server_class(*args, **kwargs) yield server self.servers[name] = server def loop(self): for name, server in self.servers.items(): logger.info('Prepared "%s"' % name) self.pool.spawn(server.loop) try: self.pool.waitall() except (SystemExit, KeyboardInterrupt): logger.info('Exit')
    mit
    Python
    416d2b0ffd617c8c6e58360fefe554ad7dc3057b
    add example for discovering existing connections
    epage/telepathy-python,freedesktop-unofficial-mirror/telepathy__telepathy-python,PabloCastellano/telepathy-python,epage/telepathy-python,detrout/telepathy-python,freedesktop-unofficial-mirror/telepathy__telepathy-python,PabloCastellano/telepathy-python,detrout/telepathy-python,max-posedon/telepathy-python,max-posedon/telepathy-python
    examples/connections.py
    examples/connections.py
    """ Print out a list of existing Telepathy connections. """ import dbus.glib import telepathy prefix = 'org.freedesktop.Telepathy.Connection.' if __name__ == '__main__': for conn in telepathy.client.Connection.get_connections(): conn_iface = conn[telepathy.CONN_INTERFACE] handle = conn_iface.GetSelfHandle() print conn_iface.InspectHandles( telepathy.CONNECTION_HANDLE_TYPE_CONTACT, [handle])[0] print ' Protocol:', conn_iface.GetProtocol() print ' Name:', conn.service_name[len(prefix):] print
    lgpl-2.1
    Python
    ff53f699ac371266791487f0b863531dd8f5236a
    Add hug 'hello_world' using to be developed support for optional URLs
    jean/hug,shaunstanislaus/hug,origingod/hug,philiptzou/hug,janusnic/hug,STANAPO/hug,gbn972/hug,giserh/hug,philiptzou/hug,STANAPO/hug,MuhammadAlkarouri/hug,MuhammadAlkarouri/hug,giserh/hug,alisaifee/hug,alisaifee/hug,timothycrosley/hug,gbn972/hug,shaunstanislaus/hug,origingod/hug,timothycrosley/hug,yasoob/hug,yasoob/hug,jean/hug,janusnic/hug,MuhammadAlkarouri/hug,timothycrosley/hug
    examples/hello_world.py
    examples/hello_world.py
    import hug @hug.get() def hello_world(): return "Hello world"
    mit
    Python
    397ab61df61d5acac46cf60ede38fa928fdacd7c
    Create solution.py
    lilsweetcaligula/Algorithms,lilsweetcaligula/Algorithms,lilsweetcaligula/Algorithms
    data_structures/linked_list/problems/pos_num_to_linked_list/solution.py
    data_structures/linked_list/problems/pos_num_to_linked_list/solution.py
    import LinkedList # Linked List Node inside the LinkedList module is declared as: # # class Node: # def __init__(self, val, nxt=None): # self.val = val # self.nxt = nxt # def ConvertPositiveNumToLinkedList(val: int) -> LinkedList.Node: node = None while True: dig = val % 10 val //= 10 prev = LinkedList.Node(dig, node) node = prev if val == 0: break return node
    mit
    Python
    724bc46c85e6ea75ac8d786f4d1706b74df8f330
    Create dictid.py
    diamontip/pract,diamontip/pract
    dictid.py
    dictid.py
    a = (1,2) b = [1,2] c = {a: 1} # outcome: c= {(1,2): 1} d = {b: 1} # outcome: error
    mit
    Python
    0fb7a5559f525ab1149ac41d4b399442f7649664
    add script to show statistics (number of chunks, data volume)
    HumanBrainProject/neuroglancer-scripts
    scale_stats.py
    scale_stats.py
    #! /usr/bin/env python3 # # Copyright (c) 2016, 2017, Forschungszentrum Juelich GmbH # Author: Yann Leprince <[email protected]> # # This software is made available under the MIT licence, see LICENCE.txt. import collections import json import math import os import os.path import sys import numpy as np SI_PREFIXES = [ (1, ""), (1024, "ki"), (1024 * 1024, "Mi"), (1024 * 1024 * 1024, "Gi"), (1024 * 1024 * 1024 * 1024, "Ti"), (1024 * 1024 * 1024 * 1024 * 1024, "Pi"), (1024 * 1024 * 1024 * 1024 * 1024 * 1024, "Ei"), ] def readable(count): for factor, prefix in SI_PREFIXES: if count > 10 * factor: num_str = format(count / factor, ".0f") else: num_str = format(count / factor, ".1f") if len(num_str) <= 3: return num_str + " " + prefix # Fallback: use the last prefix factor, prefix = SI_PREFIXES[-1] return "{:,.0f} {}".format(count / factor, prefix) def show_scales_info(info): total_size = 0 total_chunks = 0 total_directories = 0 dtype = np.dtype(info["data_type"]).newbyteorder("<") num_channels = info["num_channels"] for scale in info["scales"]: scale_name = scale["key"] size = scale["size"] #np.array(scale["size"], dtype=np.uintp) for chunk_size in scale["chunk_sizes"]: #chunk_size = np.array(chunk_size, dtype=np.uintp) size_in_chunks = [(s - 1) // cs + 1 for s, cs in zip(size, chunk_size)] num_chunks = np.prod(size_in_chunks) num_directories = size_in_chunks[0] * (1 + size_in_chunks[1]) size_bytes = np.prod(size) * dtype.itemsize * num_channels print("Scale {}, chunk size {}:" " {:,d} chunks, {:,d} directories, raw uncompressed size {}B" .format(scale_name, chunk_size, num_chunks, num_directories, readable(size_bytes))) total_size += size_bytes total_chunks += num_chunks total_directories += num_directories print("---") print("Total: {:,d} chunks, {:,d} directories, raw uncompressed size {}B" .format(total_chunks, total_directories, readable(total_size))) def show_scale_file_info(input_info_filename): """Show information about a list of scales from an input JSON file""" with open(input_info_filename) as f: info = json.load(f) show_scales_info(info) def parse_command_line(argv): """Parse the script's command line.""" import argparse parser = argparse.ArgumentParser( description="""\ Show information about a list of scales in Neuroglancer "info" JSON file format """) parser.add_argument("info_file", nargs="?", default="./info", help="JSON file containing the information") args = parser.parse_args(argv[1:]) return args def main(argv): """The script's entry point.""" args = parse_command_line(argv) return show_scale_file_info(args.info_file) or 0 if __name__ == "__main__": sys.exit(main(sys.argv))
    mit
    Python
    7d574c1f6d194df1f2b2009fb2e48fbaacaca873
    Add migration for_insert_base
    openego/oeplatform,openego/oeplatform,openego/oeplatform,openego/oeplatform
    oedb_datamodels/versions/6887c442bbee_insert_base.py
    oedb_datamodels/versions/6887c442bbee_insert_base.py
    """Add _insert_base Revision ID: 6887c442bbee Revises: 3886946416ba Create Date: 2019-04-25 16:09:20.572057 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '6887c442bbee' down_revision = '3886946416ba' branch_labels = None depends_on = None def upgrade(): op.create_table('_insert_base', sa.Column('_id', sa.BigInteger(), autoincrement=True, nullable=False), sa.Column('_message', sa.Text(), nullable=True), sa.Column('_user', sa.String(length=50), nullable=True), sa.Column('_submitted', sa.DateTime(), server_default=sa.text('now()'), nullable=True), sa.Column('_autocheck', sa.Boolean(), server_default=sa.text('false'), nullable=True), sa.Column('_humancheck', sa.Boolean(), server_default=sa.text('false'), nullable=True), sa.Column('_type', sa.String(length=8), nullable=True), sa.Column('_applied', sa.Boolean(), server_default=sa.text('false'), nullable=True), sa.PrimaryKeyConstraint('_id'), schema='public' ) def downgrade(): op.drop_table('_insert_base', schema='public')
    agpl-3.0
    Python
    2ef707337adc3d0abc33ca638b2adb70a681bd12
    update for new API
    vighneshbirodkar/scikit-image,paalge/scikit-image,ajaybhat/scikit-image,ajaybhat/scikit-image,ofgulban/scikit-image,Hiyorimi/scikit-image,rjeli/scikit-image,rjeli/scikit-image,ofgulban/scikit-image,ofgulban/scikit-image,vighneshbirodkar/scikit-image,paalge/scikit-image,rjeli/scikit-image,Hiyorimi/scikit-image,vighneshbirodkar/scikit-image,paalge/scikit-image
    doc/examples/filters/plot_denoise.py
    doc/examples/filters/plot_denoise.py
    """ ==================== Denoising a picture ==================== In this example, we denoise a noisy version of the picture of the astronaut Eileen Collins using the total variation and bilateral denoising filter. These algorithms typically produce "posterized" images with flat domains separated by sharp edges. It is possible to change the degree of posterization by controlling the tradeoff between denoising and faithfulness to the original image. Total variation filter ---------------------- The result of this filter is an image that has a minimal total variation norm, while being as close to the initial image as possible. The total variation is the L1 norm of the gradient of the image. Bilateral filter ---------------- A bilateral filter is an edge-preserving and noise reducing filter. It averages pixels based on their spatial closeness and radiometric similarity. """ import numpy as np import matplotlib.pyplot as plt from skimage import data, img_as_float from skimage.restoration import denoise_tv_chambolle, denoise_bilateral astro = img_as_float(data.astronaut()) astro = astro[220:300, 220:320] noisy = astro + 0.6 * astro.std() * np.random.random(astro.shape) noisy = np.clip(noisy, 0, 1) fig, ax = plt.subplots(nrows=2, ncols=3, figsize=(8, 5), sharex=True, sharey=True, subplot_kw={'adjustable': 'box-forced'}) plt.gray() ax[0, 0].imshow(noisy) ax[0, 0].axis('off') ax[0, 0].set_title('noisy') ax[0, 1].imshow(denoise_tv_chambolle(noisy, weight=0.1, multichannel=True)) ax[0, 1].axis('off') ax[0, 1].set_title('TV') ax[0, 2].imshow(denoise_bilateral(noisy, sigma_color=0.05, sigma_spatial=15)) ax[0, 2].axis('off') ax[0, 2].set_title('Bilateral') ax[1, 0].imshow(denoise_tv_chambolle(noisy, weight=0.2, multichannel=True)) ax[1, 0].axis('off') ax[1, 0].set_title('(more) TV') ax[1, 1].imshow(denoise_bilateral(noisy, sigma_color=0.1, sigma_spatial=15)) ax[1, 1].axis('off') ax[1, 1].set_title('(more) Bilateral') ax[1, 2].imshow(astro) ax[1, 2].axis('off') ax[1, 2].set_title('original') fig.tight_layout() plt.show()
    """ ==================== Denoising a picture ==================== In this example, we denoise a noisy version of the picture of the astronaut Eileen Collins using the total variation and bilateral denoising filter. These algorithms typically produce "posterized" images with flat domains separated by sharp edges. It is possible to change the degree of posterization by controlling the tradeoff between denoising and faithfulness to the original image. Total variation filter ---------------------- The result of this filter is an image that has a minimal total variation norm, while being as close to the initial image as possible. The total variation is the L1 norm of the gradient of the image. Bilateral filter ---------------- A bilateral filter is an edge-preserving and noise reducing filter. It averages pixels based on their spatial closeness and radiometric similarity. """ import numpy as np import matplotlib.pyplot as plt from skimage import data, img_as_float from skimage.restoration import denoise_tv_chambolle, denoise_bilateral astro = img_as_float(data.astronaut()) astro = astro[220:300, 220:320] noisy = astro + 0.6 * astro.std() * np.random.random(astro.shape) noisy = np.clip(noisy, 0, 1) fig, ax = plt.subplots(nrows=2, ncols=3, figsize=(8, 5), sharex=True, sharey=True, subplot_kw={'adjustable': 'box-forced'}) plt.gray() ax[0, 0].imshow(noisy) ax[0, 0].axis('off') ax[0, 0].set_title('noisy') ax[0, 1].imshow(denoise_tv_chambolle(noisy, weight=0.1, multichannel=True)) ax[0, 1].axis('off') ax[0, 1].set_title('TV') ax[0, 2].imshow(denoise_bilateral(noisy, sigma_range=0.05, sigma_spatial=15)) ax[0, 2].axis('off') ax[0, 2].set_title('Bilateral') ax[1, 0].imshow(denoise_tv_chambolle(noisy, weight=0.2, multichannel=True)) ax[1, 0].axis('off') ax[1, 0].set_title('(more) TV') ax[1, 1].imshow(denoise_bilateral(noisy, sigma_range=0.1, sigma_spatial=15)) ax[1, 1].axis('off') ax[1, 1].set_title('(more) Bilateral') ax[1, 2].imshow(astro) ax[1, 2].axis('off') ax[1, 2].set_title('original') fig.tight_layout() plt.show()
    bsd-3-clause
    Python
    9e6a016c5a59b25199426f6825b2c83571997e68
    Refactor buildbot tests so that they can be used downstream.
    ondra-novak/chromium.src,hgl888/chromium-crosswalk,jaruba/chromium.src,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,Pluto-tv/chromium-crosswalk,fujunwei/chromium-crosswalk,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,jaruba/chromium.src,ltilve/chromium,ChromiumWebApps/chromium,dushu1203/chromium.src,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,krieger-od/nwjs_chromium.src,chuan9/chromium-crosswalk,Just-D/chromium-1,anirudhSK/chromium,hgl888/chromium-crosswalk,dushu1203/chromium.src,M4sse/chromium.src,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,chuan9/chromium-crosswalk,Chilledheart/chromium,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,ltilve/chromium,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,patrickm/chromium.src,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,fujunwei/chromium-crosswalk,markYoungH/chromium.src,dednal/chromium.src,patrickm/chromium.src,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk-efl,dednal/chromium.src,ChromiumWebApps/chromium,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,patrickm/chromium.src,mogoweb/chromium-crosswalk,jaruba/chromium.src,hgl888/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,bright-sparks/chromium-spacewalk,jaruba/chromium.src,mogoweb/chromium-crosswalk,patrickm/chromium.src,anirudhSK/chromium,M4sse/chromium.src,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,anirudhSK/chromium,ltilve/chromium,markYoungH/chromium.src,PeterWangIntel/chromium-crosswalk,anirudhSK/chromium,ltilve/chromium,Chilledheart/chromium,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,ltilve/chromium,markYoungH/chromium.src,axinging/chromium-crosswalk,ChromiumWebApps/chromium,ltilve/chromium,Chilledheart/chromium,Jonekee/chromium.src,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Jonekee/chromium.src,TheTypoMaster/chromium-crosswalk,dednal/chromium.src,patrickm/chromium.src,ondra-novak/chromium.src,bright-sparks/chromium-spacewalk,PeterWangIntel/chromium-crosswalk,anirudhSK/chromium,Pluto-tv/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,littlstar/chromium.src,ChromiumWebApps/chromium,dushu1203/chromium.src,M4sse/chromium.src,ondra-novak/chromium.src,mogoweb/chromium-crosswalk,Pluto-tv/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Just-D/chromium-1,Fireblend/chromium-crosswalk,markYoungH/chromium.src,markYoungH/chromium.src,Fireblend/chromium-crosswalk,Pluto-tv/chromium-crosswalk,M4sse/chromium.src,TheTypoMaster/chromium-crosswalk,fujunwei/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk-efl,M4sse/chromium.src,bright-sparks/chromium-spacewalk,fujunwei/chromium-crosswalk,patrickm/chromium.src,krieger-od/nwjs_chromium.src,mogoweb/chromium-crosswalk,axinging/chromium-crosswalk,hgl888/chromium-crosswalk-efl,anirudhSK/chromium,Chilledheart/chromium,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,anirudhSK/chromium,jaruba/chromium.src,ondra-novak/chromium.src,dednal/chromium.src,crosswalk-project/chromium-crosswalk-efl,ltilve/chromium,jaruba/chromium.src,bright-sparks/chromium-spacewalk,Fireblend/chromium-crosswalk,Just-D/chromium-1,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,M4sse/chromium.src,jaruba/chromium.src,dednal/chromium.src,bright-sparks/chromium-spacewalk,hgl888/chromium-crosswalk-efl,Chilledheart/chromium,littlstar/chromium.src,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk,M4sse/chromium.src,ondra-novak/chromium.src,Chilledheart/chromium,littlstar/chromium.src,dednal/chromium.src,krieger-od/nwjs_chromium.src,mogoweb/chromium-crosswalk,krieger-od/nwjs_chromium.src,ondra-novak/chromium.src,mogoweb/chromium-crosswalk,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,mohamed--abdel-maksoud/chromium.src,crosswalk-project/chromium-crosswalk-efl,TheTypoMaster/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Just-D/chromium-1,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,jaruba/chromium.src,axinging/chromium-crosswalk,bright-sparks/chromium-spacewalk,ondra-novak/chromium.src,Fireblend/chromium-crosswalk,bright-sparks/chromium-spacewalk,dednal/chromium.src,anirudhSK/chromium,crosswalk-project/chromium-crosswalk-efl,ltilve/chromium,littlstar/chromium.src,markYoungH/chromium.src,mogoweb/chromium-crosswalk,jaruba/chromium.src,M4sse/chromium.src,patrickm/chromium.src,mohamed--abdel-maksoud/chromium.src,anirudhSK/chromium,ondra-novak/chromium.src,ltilve/chromium,dushu1203/chromium.src,Just-D/chromium-1,markYoungH/chromium.src,jaruba/chromium.src,anirudhSK/chromium,Jonekee/chromium.src,Jonekee/chromium.src,bright-sparks/chromium-spacewalk,mogoweb/chromium-crosswalk,chuan9/chromium-crosswalk,M4sse/chromium.src,hgl888/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,axinging/chromium-crosswalk,Jonekee/chromium.src,ChromiumWebApps/chromium,patrickm/chromium.src,Chilledheart/chromium,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,hgl888/chromium-crosswalk,Jonekee/chromium.src,chuan9/chromium-crosswalk,littlstar/chromium.src,Just-D/chromium-1,ChromiumWebApps/chromium,chuan9/chromium-crosswalk,markYoungH/chromium.src,mohamed--abdel-maksoud/chromium.src,mogoweb/chromium-crosswalk,littlstar/chromium.src,TheTypoMaster/chromium-crosswalk,dushu1203/chromium.src,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk-efl,dushu1203/chromium.src,ChromiumWebApps/chromium,ChromiumWebApps/chromium,markYoungH/chromium.src,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,littlstar/chromium.src,jaruba/chromium.src,axinging/chromium-crosswalk,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,hgl888/chromium-crosswalk,Chilledheart/chromium,M4sse/chromium.src,krieger-od/nwjs_chromium.src,ondra-novak/chromium.src,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,mogoweb/chromium-crosswalk,M4sse/chromium.src,ChromiumWebApps/chromium,dednal/chromium.src,crosswalk-project/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,anirudhSK/chromium,dushu1203/chromium.src,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,Jonekee/chromium.src,Just-D/chromium-1,hgl888/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,ChromiumWebApps/chromium,patrickm/chromium.src,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,Just-D/chromium-1,PeterWangIntel/chromium-crosswalk,littlstar/chromium.src,ChromiumWebApps/chromium,dednal/chromium.src,ChromiumWebApps/chromium,Fireblend/chromium-crosswalk,anirudhSK/chromium
    build/android/buildbot/tests/bb_run_bot_test.py
    build/android/buildbot/tests/bb_run_bot_test.py
    #!/usr/bin/env python # Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import os import subprocess import sys BUILDBOT_DIR = os.path.join(os.path.dirname(__file__), '..') sys.path.append(BUILDBOT_DIR) import bb_run_bot def RunBotProcesses(bot_process_map): code = 0 for bot, proc in bot_process_map: _, err = proc.communicate() code |= proc.returncode if proc.returncode != 0: print 'Error running the bot script with id="%s"' % bot, err return code def main(): procs = [ (bot, subprocess.Popen( [os.path.join(BUILDBOT_DIR, 'bb_run_bot.py'), '--bot-id', bot, '--testing'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)) for bot in bb_run_bot.GetBotStepMap()] return RunBotProcesses(procs) if __name__ == '__main__': sys.exit(main())
    #!/usr/bin/env python # Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import os import subprocess import sys BUILDBOT_DIR = os.path.join(os.path.dirname(__file__), '..') sys.path.append(BUILDBOT_DIR) import bb_run_bot def RunBotsWithTesting(bot_step_map): code = 0 procs = [ (bot, subprocess.Popen( [os.path.join(BUILDBOT_DIR, 'bb_run_bot.py'), '--bot-id', bot, '--testing'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)) for bot in bot_step_map] for bot, proc in procs: _, err = proc.communicate() code |= proc.returncode if proc.returncode != 0: print 'Error running bb_run_bot with id="%s"' % bot, err return code def main(): return RunBotsWithTesting(bb_run_bot.GetBotStepMap()) if __name__ == '__main__': sys.exit(main())
    bsd-3-clause
    Python
    eb9f9d8bfa5ea278e1fb39c59ed660a223b1f6a9
    Add flask api app creation to init
    EdwinKato/bucket-list,EdwinKato/bucket-list,EdwinKato/bucket-list,EdwinKato/bucket-list,EdwinKato/bucket-list
    api/__init__.py
    api/__init__.py
    from flask_sqlalchemy import SQLAlchemy import connexion from config import config db = SQLAlchemy() def create_app(config_name): app = connexion.FlaskApp(__name__, specification_dir='swagger/') app.add_api('swagger.yaml') application = app.app application.config.from_object(config[config_name]) db.init_app(application) return application from api.api import *
    mit
    Python
    c10eb3861daf48c13ec854bd210db5d5e1163b11
    Add LotGroupAutocomplete
    596acres/django-livinglots-lots,596acres/django-livinglots-lots
    livinglots_lots/autocomplete_light_registry.py
    livinglots_lots/autocomplete_light_registry.py
    from autocomplete_light import AutocompleteModelBase, register from livinglots import get_lotgroup_model class LotGroupAutocomplete(AutocompleteModelBase): autocomplete_js_attributes = {'placeholder': 'lot group name',} search_fields = ('name',) def choices_for_request(self): choices = super(LotGroupAutocomplete, self).choices_for_request() if not self.request.user.is_staff: choices = choices.none() return choices register(get_lotgroup_model(), LotGroupAutocomplete)
    agpl-3.0
    Python
    2527683522394c823bc100c75f1ce4885949136e
    add paths module for other modules to find paths from one place
    aacanakin/glim
    glim/paths.py
    glim/paths.py
    import os from termcolor import colored PROJECT_PATH = os.getcwd() APP_PATH = os.path.join(PROJECT_PATH, 'app') EXT_PATH = os.path.join(PROJECT_PATH, 'ext') GLIM_ROOT_PATH = os.path.dirname(os.path.dirname(__file__)) PROTO_PATH = os.path.join(os.path.dirname(__file__), 'prototype') import sys from pprint import pprint as p def configure_sys_path(): if GLIM_ROOT_PATH == PROJECT_PATH: print colored('Development mode is on, sys.path is being configured', 'yellow') sys.path.pop(0) sys.path.insert(0, GLIM_ROOT_PATH) else: sys.path.insert(0, PROJECT_PATH) def controllers(): return os.path.join(APP_PATH, 'controllers.py') def config(env): return os.path.join(APP_PATH, 'config', '%s.py' % env) def start(): return os.path.join(APP_PATH, 'start.py') def commands(): return os.path.join(APP_PATH, 'commands.py') def routes(): return os.path.join(APP_PATH, 'routes.py') def extensions(ext): return os.path.join(EXT_PATH, '%s' % ext, '%s.py' % ext) def extension_commands(ext): return os.path.join(EXT_PATH, '%s' % ext, 'commands.py')
    mit
    Python
    24f21146b01ff75a244df40d1626c54883abeb1a
    Add helper-lib for json object conversion and split dicts
    UngaForskareStockholm/medlem2
    lib/helpers.py
    lib/helpers.py
    #! /usr/bin/env python2.7 import datetime def typecast_json(o): if isinstance(o, datetime.datetime) or isinstance(o, datetime.date): return o.isoformat() else: return o def split_dict(src, keys): result = dict() for k in set(src.keys()) & set(keys): result[k] = src[k] return result
    bsd-3-clause
    Python
    0f5c0168b257436882f837e5d521cce46a740ad6
    Add symbol translator to make utf-8 variables compilable
    FInAT/FInAT
    finat/greek_alphabet.py
    finat/greek_alphabet.py
    """Translation table from utf-8 to greek variable names, taken from: https://gist.github.com/piquadrat/765262#file-greek_alphabet-py """ def translate_symbol(symbol): """Translates utf-8 sub-strings into compilable variable names""" name = symbol.decode("utf-8") for k, v in greek_alphabet.iteritems(): name = name.replace(k, v) return name greek_alphabet = { u'\u0391': 'Alpha', u'\u0392': 'Beta', u'\u0393': 'Gamma', u'\u0394': 'Delta', u'\u0395': 'Epsilon', u'\u0396': 'Zeta', u'\u0397': 'Eta', u'\u0398': 'Theta', u'\u0399': 'Iota', u'\u039A': 'Kappa', u'\u039B': 'Lamda', u'\u039C': 'Mu', u'\u039D': 'Nu', u'\u039E': 'Xi', u'\u039F': 'Omicron', u'\u03A0': 'Pi', u'\u03A1': 'Rho', u'\u03A3': 'Sigma', u'\u03A4': 'Tau', u'\u03A5': 'Upsilon', u'\u03A6': 'Phi', u'\u03A7': 'Chi', u'\u03A8': 'Psi', u'\u03A9': 'Omega', u'\u03B1': 'alpha', u'\u03B2': 'beta', u'\u03B3': 'gamma', u'\u03B4': 'delta', u'\u03B5': 'epsilon', u'\u03B6': 'zeta', u'\u03B7': 'eta', u'\u03B8': 'theta', u'\u03B9': 'iota', u'\u03BA': 'kappa', u'\u03BB': 'lamda', u'\u03BC': 'mu', u'\u03BD': 'nu', u'\u03BE': 'xi', u'\u03BF': 'omicron', u'\u03C0': 'pi', u'\u03C1': 'rho', u'\u03C3': 'sigma', u'\u03C4': 'tau', u'\u03C5': 'upsilon', u'\u03C6': 'phi', u'\u03C7': 'chi', u'\u03C8': 'psi', u'\u03C9': 'omega', }
    mit
    Python
    03951a227bfafb0b1017354bdbf3a1247322fc9b
    Fix cycler tests
    emmagordon/Axelrod,uglyfruitcake/Axelrod,risicle/Axelrod,kathryncrouch/Axelrod,uglyfruitcake/Axelrod,mojones/Axelrod,bootandy/Axelrod,risicle/Axelrod,mojones/Axelrod,kathryncrouch/Axelrod,emmagordon/Axelrod,bootandy/Axelrod
    axelrod/tests/unit/test_cycler.py
    axelrod/tests/unit/test_cycler.py
    """Test for the Cycler strategies.""" import itertools import axelrod from .test_player import TestPlayer, test_four_vector C, D = 'C', 'D' class TestAntiCycler(TestPlayer): name = "AntiCycler" player = axelrod.AntiCycler expected_classifier = { 'memory_depth': float('inf'), 'stochastic': False, 'inspects_source': False, 'manipulates_source': False, 'manipulates_state': False } def test_strategy(self): """Starts by cooperating""" responses = [C, D, C, C, D, C, C, C, D, C, C, C, C, D, C, C, C] self.responses_test([], [], responses) def test_cycler_factory(cycle): class TestCycler(TestPlayer): name = "Cycler %s" % cycle player = getattr(axelrod, 'Cycler%s' % cycle) expected_classifier = { 'memory_depth': len(cycle), 'stochastic': False, 'inspects_source': False, 'manipulates_source': False, 'manipulates_state': False } def test_strategy(self): """Starts by cooperating""" for i in range(20): responses = itertools.islice(itertools.cycle(cycle), i) self.responses_test([], [], responses) return TestCycler TestCyclerCCD = test_cycler_factory("CCD") TestCyclerCCCD = test_cycler_factory("CCCD") TestCyclerCCCCCD = test_cycler_factory("CCCCCD")
    """Test for the Cycler strategies.""" import itertools import axelrod from .test_player import TestPlayer, test_four_vector C, D = 'C', 'D' class TestAntiCycler(TestPlayer): name = "AntiCycler" player = axelrod.AntiCycler expected_classifier = { 'memory_depth': float('inf'), 'stochastic': False, 'inspects_source': False, 'manipulates_source': False, 'manipulates_state': False } def test_strategy(self): """Starts by cooperating""" responses = [C, D, C, C, D, C, C, C, D, C, C, C, C, D, C, C, C] self.responses_test([], [], responses) def test_cycler_factory(cycle): class TestCycler(TestPlayer): name = "Cycler %s" % cycle player = getattr(axelrod, 'Cycler%s' % cycle) expected_classifier = { 'memory_depth': 1, 'stochastic': False, 'inspects_source': False, 'manipulates_source': False, 'manipulates_state': False } def test_strategy(self): """Starts by cooperating""" for i in range(20): responses = itertools.islice(itertools.cycle(cycle), i) self.responses_test([], [], responses) return TestCycler TestCyclerCCD = test_cycler_factory("CCD") TestCyclerCCCD = test_cycler_factory("CCCD") TestCyclerCCCCCD = test_cycler_factory("CCCCCD")
    mit
    Python
    ca2269c5ae568cd63253af7bc614a79d26f7f8ac
    Add ns_drop_indexes command.
    niwinz/needlestack
    needlestack/management/commands/ns_drop_indexes.py
    needlestack/management/commands/ns_drop_indexes.py
    # -*- coding: utf-8 -*- from __future__ import print_function, absolute_import, unicode_literals from django.core.management.base import BaseCommand, CommandError from needlestack import commands class Command(BaseCommand): help = 'Sync all defined indexes with a current backend' option_list = BaseCommand.option_list + ( make_option('--backend', action='store', dest='backend', default='default'),) def handle(self, *args, **options): commands.drop_indexes(options["backend"], options["verbosity"])
    bsd-3-clause
    Python
    b8a84e612d67f7948d6dec8c202ac8a73390f9dc
    make sure all protein ids are unique in a genbank file
    linsalrob/EdwardsLab,linsalrob/EdwardsLab,linsalrob/EdwardsLab,linsalrob/EdwardsLab,linsalrob/EdwardsLab
    proteins/unique_protein_ids.py
    proteins/unique_protein_ids.py
    """ Test a genbank file and make sure all the protein_ids are unique """ import os import sys import argparse from Bio import SeqIO __author__ = 'Rob Edwards' __copyright__ = 'Copyright 2020, Rob Edwards' __credits__ = ['Rob Edwards'] __license__ = 'MIT' __maintainer__ = 'Rob Edwards' __email__ = '[email protected]' if __name__ == '__main__': parser = argparse.ArgumentParser(description=" ") parser.add_argument('-f', help='genbank file', required=True) args = parser.parse_args() pids = set() rc = 0 for seq in SeqIO.parse(args.f, "genbank"): rc+=1; print(f"record {rc}: {seq.id}") for feat in seq.features: if feat.type != "CDS": continue if 'protein_id' not in feat.qualifiers: thisid = " ".join(feat.qualifiers.get('locus_tag', [str(feat.location)])) print(f"No protein id in {thisid}") continue pid = "|".join(feat.qualifiers["protein_id"]) if pid in pids: print(f"{pid} is not unique") pids.add(pid)
    mit
    Python
    61fa404da3eeb3b695b12f398c27f641e1e681e2
    add codegen script for fname.pyf.src -> _fnamemodule.c
    matthew-brett/scipy,matthew-brett/scipy,matthew-brett/scipy,matthew-brett/scipy,matthew-brett/scipy
    tools/generate_f2pymod.py
    tools/generate_f2pymod.py
    """ Process f2py template files (`filename.pyf.src` -> `filename.pyf`) Usage: python generate_pyf.py filename.pyf.src -o filename.pyf """ import os import sys import subprocess import argparse from numpy.distutils.from_template import process_file def main(): parser = argparse.ArgumentParser() parser.add_argument("infile", type=str, help="Path to the input file") parser.add_argument("-o", "--outfile", type=str, help="Path to the output file") args = parser.parse_args() # Read .pyf.src file code = process_file(args.infile) # Write out the .pyf file outdir = os.path.split(args.outfile)[0] outdir_abs = os.path.join(os.getcwd(), outdir) fname_pyf = os.path.join(outdir, os.path.splitext(os.path.split(args.infile)[1])[0]) with open(fname_pyf, 'w') as f: f.write(code) # Now invoke f2py to generate the C API module file p = subprocess.Popen([sys.executable, '-m', 'numpy.f2py', fname_pyf, '--build-dir', outdir_abs], #'--quiet'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=os.getcwd()) out, err = p.communicate() if not (p.returncode == 0): raise RuntimeError(f"Writing {args.outfile} with f2py failed!\n" f"{out}\n" r"{err}") if __name__ == "__main__": main()
    bsd-3-clause
    Python
    0f94251c7cc844042c9e3ce160d78e4d81d895ea
    add log module
    johnnymo87/simple-db-migrate,guilhermechapiewski/simple-db-migrate
    src/log.py
    src/log.py
    import logging import os from datetime import datetime class LOG(object): logger = None def __init__(self, log_dir): if log_dir: if not os.path.exists(log_dir): os.makedirs(log_dir) self.logger = logging.getLogger('simple-db-migrate') now = datetime.now() filename = "%s/%s.log" %(os.path.abspath(log_dir), now.strftime("%Y%m%d%H%M%S")) hdlr = logging.FileHandler(filename) formatter = logging.Formatter('%(message)s') hdlr.setFormatter(formatter) self.logger.addHandler(hdlr) self.logger.setLevel(logging.DEBUG) def debug(self, msg): if self.logger: self.logger.debug(msg) def info(self, msg): if self.logger: self.logger.info(msg) def error(self, msg): if self.logger: self.logger.error(msg) def warn(self, msg): if self.logger: self.logger.warn(msg)
    apache-2.0
    Python
    bc567eda01abcaf23717f5da5f494c1be46f47da
    Create ValAnagram_001.py
    Chasego/cod,Chasego/codirit,Chasego/codi,Chasego/cod,Chasego/codi,cc13ny/algo,Chasego/codirit,Chasego/codirit,Chasego/codirit,cc13ny/Allin,cc13ny/algo,Chasego/codirit,cc13ny/Allin,Chasego/codi,cc13ny/Allin,Chasego/cod,cc13ny/algo,cc13ny/algo,cc13ny/Allin,Chasego/codi,Chasego/cod,cc13ny/Allin,cc13ny/algo,Chasego/codi,Chasego/cod
    leetcode/242-Valid-Anagram/ValAnagram_001.py
    leetcode/242-Valid-Anagram/ValAnagram_001.py
    class Solution: # @param {string} s # @param {string} t # @return {boolean} def anaRepresentation(self, s): p = {} for c in s: if c in p: p[c] += 1 else: p[c] = 1 return p def isAnagram(self, s, t): if len(s) != len(t): return False p = self.anaRepresentation(s) q = self.anaRepresentation(t) for c in p: if c not in q or (c in q and p[c] != q[c]): return False return True
    mit
    Python
    682d6b3ca9c4a0dd49f9762ddd20ac746971e3eb
    Create solution.py
    lilsweetcaligula/Online-Judges,lilsweetcaligula/Online-Judges,lilsweetcaligula/Online-Judges
    leetcode/easy/find_the_difference/py/solution.py
    leetcode/easy/find_the_difference/py/solution.py
    class Solution(object): def findTheDifference(self, s, t): """ :type s: str :type t: str :rtype: str """ import collections import itertools c1 = collections.Counter(s) c2 = collections.Counter(t) for char in set(itertools.chain(s, t)): if c1[char] != c2[char]: return char return None
    mit
    Python
    9e128fdd5af0598a233416de5a1e8f2d3a74fdc0
    Enforce unique paths and names
    jgillick/Spaces,jgillick/Spaces,jgillick/Spaces,jgillick/Spaces,jgillick/Spaces,jgillick/Spaces
    spaces/migrations/0006_unique_space_document.py
    spaces/migrations/0006_unique_space_document.py
    # -*- coding: utf-8 -*- # Generated by Django 1.9 on 2015-12-15 02:12 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('spaces', '0005_document_space_doc'), ] operations = [ migrations.AlterField( model_name='space', name='name', field=models.CharField(max_length=100, unique=True), ), migrations.AlterField( model_name='space', name='path', field=models.CharField(max_length=40, unique=True), ), migrations.AlterUniqueTogether( name='document', unique_together=set([('path', 'parent')]), ), ]
    mit
    Python
    0256868a3b261e598689eebdf5ac5f939ea20a0d
    add test cases for mni module
    arokem/nipy,nipy/nipy-labs,arokem/nipy,alexis-roche/niseg,alexis-roche/nireg,alexis-roche/niseg,arokem/nipy,alexis-roche/nipy,bthirion/nipy,alexis-roche/nipy,nipy/nipy-labs,nipy/nireg,alexis-roche/nipy,bthirion/nipy,alexis-roche/nipy,alexis-roche/register,bthirion/nipy,nipy/nireg,arokem/nipy,alexis-roche/register,alexis-roche/nireg,alexis-roche/register,bthirion/nipy
    lib/neuroimaging/reference/tests/test_mni.py
    lib/neuroimaging/reference/tests/test_mni.py
    import unittest import numpy as N import neuroimaging.reference.mni as mni class MNITest(unittest.TestCase): def testMNI(self): """ ensure all elementes of the interface exist """ m = mni.MNI g = mni.generic m_v = mni.MNI_voxel m_w = mni.MNI_world m_m = mni.MNI_mapping if __name__ == '__main__': unittest.main()
    bsd-3-clause
    Python
    d91adef072e2150edde62a49bea4eecb6a26a6ac
    add sns_notify script
    pyconjp/pyconjp-cron
    sns_notify.py
    sns_notify.py
    #!/usr/bin/env python from datetime import datetime, date from dateutil import parser from google_sheets import get_service SHEET_ID = "1lpa9p_dCyTckREf09-oA2C6ZAMACCrgD9W3HQSKeoSI" def is_valid_period(start, end): """ 今日が start, end の範囲内かどうかを返す :params start: 通知開始日の文字列または空文字 :params end: 通知終了日の文字列または空文字 :return: True: 通知範囲内、False: 通知範囲外 """ # 文字列を date 型にする try: start = parser.parse(start).date() except ValueError: start = date(2000, 1, 1) # 過去の日付にする try: end = parser.parse(end).date() except ValueError: end = date(3000, 1, 1) # 未来の日付にする today = date.today() # 今日が範囲内かどうかを返す return start <= today <= end def sns_notify(row, now): """ スプレッドシートのデータ1行分をSNSに通知する。 データは以下の形式。 1. 通知日(YYYY/MM/DD または曜日指定) 2. 通知時刻 3. 送信メッセージ 4. 送信するURL 5. 通知開始日 6. 通知終了日 7. twitter通知フラグ(1なら通知) 8. facebook通知フラグ(1なら通知) :param row: スプレッドシートの1行分のデータ :param now: 現在時刻(datetime) """ # データの件数が少なかったらなにもしない if len(row) < 7: return # 通知期間の範囲外ならなにもしない if not is_valid_period(row[4], row[5]): return # 通知対象日時じゃなかったらなにもしない # メッセージ送信する if row[6] == '1': pass if row[7] == '1': pass def main(): """ PyCon JP Twitter/Facebook通知シートからデータを読み込んで通知する """ now = datetime.now() service = get_service() # シートから全データを読み込む result = service.spreadsheets().values().get( spreadsheetId=SHEET_ID, range='messages!A4:H').execute() for row in result.get('values', []): # 1行のデータを元にSNSへの通知を実行 sns_notify(row, now) if __name__ == '__main__': main()
    mit
    Python