{ // 获取包含Hugging Face文本的span元素 const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap'); spans.forEach(span => { if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) { span.textContent = 'AI快站'; } }); }); // 替换logo图片的alt属性 document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => { if (img.alt.match(/Hugging\s*Face/i)) { img.alt = 'AI快站 logo'; } }); } // 替换导航栏中的链接 function replaceNavigationLinks() { // 已替换标记,防止重复运行 if (window._navLinksReplaced) { return; } // 已经替换过的链接集合,防止重复替换 const replacedLinks = new Set(); // 只在导航栏区域查找和替换链接 const headerArea = document.querySelector('header') || document.querySelector('nav'); if (!headerArea) { return; } // 在导航区域内查找链接 const navLinks = headerArea.querySelectorAll('a'); navLinks.forEach(link => { // 如果已经替换过,跳过 if (replacedLinks.has(link)) return; const linkText = link.textContent.trim(); const linkHref = link.getAttribute('href') || ''; // 替换Spaces链接 - 仅替换一次 if ( (linkHref.includes('/spaces') || linkHref === '/spaces' || linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) && linkText !== 'OCR模型免费转Markdown' && linkText !== 'OCR模型免费转Markdown' ) { link.textContent = 'OCR模型免费转Markdown'; link.href = 'https://fast360.xyz'; link.setAttribute('target', '_blank'); link.setAttribute('rel', 'noopener noreferrer'); replacedLinks.add(link); } // 删除Posts链接 else if ( (linkHref.includes('/posts') || linkHref === '/posts' || linkText === 'Posts' || linkText.match(/^s*Postss*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } // 替换Docs链接 - 仅替换一次 else if ( (linkHref.includes('/docs') || linkHref === '/docs' || linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) && linkText !== '模型下载攻略' ) { link.textContent = '模型下载攻略'; link.href = '/'; replacedLinks.add(link); } // 删除Enterprise链接 else if ( (linkHref.includes('/enterprise') || linkHref === '/enterprise' || linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } }); // 查找可能嵌套的Spaces和Posts文本 const textNodes = []; function findTextNodes(element) { if (element.nodeType === Node.TEXT_NODE) { const text = element.textContent.trim(); if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') { textNodes.push(element); } } else { for (const child of element.childNodes) { findTextNodes(child); } } } // 只在导航区域内查找文本节点 findTextNodes(headerArea); // 替换找到的文本节点 textNodes.forEach(node => { const text = node.textContent.trim(); if (text === 'Spaces') { node.textContent = node.textContent.replace(/Spaces/g, 'OCR模型免费转Markdown'); } else if (text === 'Posts') { // 删除Posts文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } else if (text === 'Enterprise') { // 删除Enterprise文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } }); // 标记已替换完成 window._navLinksReplaced = true; } // 替换代码区域中的域名 function replaceCodeDomains() { // 特别处理span.hljs-string和span.njs-string元素 document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换hljs-string类的span中的域名(移除多余的转义符号) document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换pre和code标签中包含git clone命令的域名 document.querySelectorAll('pre, code').forEach(element => { if (element.textContent && element.textContent.includes('git clone')) { const text = element.innerHTML; if (text.includes('huggingface.co')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 处理特定的命令行示例 document.querySelectorAll('pre, code').forEach(element => { const text = element.innerHTML; if (text.includes('huggingface.co')) { // 针对git clone命令的专门处理 if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 特别处理模型下载页面上的代码片段 document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => { const content = container.innerHTML; if (content && content.includes('huggingface.co')) { container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 特别处理模型仓库克隆对话框中的代码片段 try { // 查找包含"Clone this model repository"标题的对话框 const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]'); if (cloneDialog) { // 查找对话框中所有的代码片段和命令示例 const codeElements = cloneDialog.querySelectorAll('pre, code, span'); codeElements.forEach(element => { if (element.textContent && element.textContent.includes('huggingface.co')) { if (element.innerHTML.includes('huggingface.co')) { element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); } // 更精确地定位克隆命令中的域名 document.querySelectorAll('[data-target]').forEach(container => { const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string'); codeBlocks.forEach(block => { if (block.textContent && block.textContent.includes('huggingface.co')) { if (block.innerHTML.includes('huggingface.co')) { block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); }); } catch (e) { // 错误处理但不打印日志 } } // 当DOM加载完成后执行替换 if (document.readyState === 'loading') { document.addEventListener('DOMContentLoaded', () => { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); }); } else { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); } // 增加一个MutationObserver来处理可能的动态元素加载 const observer = new MutationObserver(mutations => { // 检查是否导航区域有变化 const hasNavChanges = mutations.some(mutation => { // 检查是否存在header或nav元素变化 return Array.from(mutation.addedNodes).some(node => { if (node.nodeType === Node.ELEMENT_NODE) { // 检查是否是导航元素或其子元素 if (node.tagName === 'HEADER' || node.tagName === 'NAV' || node.querySelector('header, nav')) { return true; } // 检查是否在导航元素内部 let parent = node.parentElement; while (parent) { if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') { return true; } parent = parent.parentElement; } } return false; }); }); // 只在导航区域有变化时执行替换 if (hasNavChanges) { // 重置替换状态,允许再次替换 window._navLinksReplaced = false; replaceHeaderBranding(); replaceNavigationLinks(); } }); // 开始观察document.body的变化,包括子节点 if (document.body) { observer.observe(document.body, { childList: true, subtree: true }); } else { document.addEventListener('DOMContentLoaded', () => { observer.observe(document.body, { childList: true, subtree: true }); }); } })(); ')\n\nclass HelloPage(webapp.RequestHandler):\n def post(self):\n self.response.headers['Content-Type'] = 'text/plain'\n self.response.out.write('Hello, %s' % self.request.get('name'))\n\napplication = webapp.WSGIApplication([\n ('/', MainPage),\n ('/hello', HelloPage)\n], debug=True)\n\nif __name__ == '__main__':\n import wsgiref.simple_server\n server_host = ''\n server_port = 8080\n server = wsgiref.simple_server.make_server(\n server_host, server_port, application)\n print 'Serving on %s:%s' % (server_host, server_port)\n server.serve_forever()\n"},"license":{"kind":"string","value":"apache-2.0"}}},{"rowIdx":203567,"cells":{"repo_name":{"kind":"string","value":"spunkmars/ProFTPD-Admin"},"path":{"kind":"string","value":"src/proftpd/ftpadmin/templatetags/FtpTags.py"},"copies":{"kind":"string","value":"1"},"size":{"kind":"string","value":"6554"},"content":{"kind":"string","value":"#coding=utf-8\r\nfrom django import template\r\nfrom proftpd.ftpadmin.lib.common import initlog\r\nfrom django.core.urlresolvers import reverse\r\n\r\nfrom django.shortcuts import render_to_response, get_object_or_404\r\n\r\nfrom proftpd.ftpadmin.models.ftpusers import Ftpuser\r\n\r\nfrom proftpd.ftpadmin.models.ftpgroups import Ftpgroup\r\n\r\n\r\n#logger2 = initlog()\r\nregister = template.Library()\r\n\r\ndef do_get_sort_by_url(parser, token):\r\n\r\n try: \r\n tag_name, current_sort_by, target_sort_by = token.split_contents() \r\n except: \r\n raise template.TemplateSyntaxError, \"%r tags error\" % token.split_contents[0]\r\n\r\n #另一种取得模板变量值方法 步骤1\r\n #current_sort_by = parser.compile_filter(current_sort_by)\r\n #target_sort_by = parser.compile_filter(target_sort_by)\r\n\r\n #logger2.info(\"hhh%shhh, vvv%svvv, ddd%sddd\" % (tag_name, current_sort_by, target_sort_by) )\r\n\r\n\r\n return FtpXferNode(current_sort_by, target_sort_by)\r\n\r\n\r\n\r\nclass FtpXferNode(template.Node):\r\n\r\n def __init__(self, current_sort_by, target_sort_by):\r\n #另一种取得模板变量值方法 步骤2\r\n #self.current_sort_by = current_sort_by\r\n #self.target_sort_by = target_sort_by\r\n self.current_sort_by = template.Variable(current_sort_by)\r\n self.target_sort_by = template.Variable(target_sort_by)\r\n\r\n def render(self, context):\r\n \r\n #另一种取得模板变量值方法 步骤3\r\n #sort_by = self.current_sort_by.resolve(context, True)\r\n #target_sort_by = self.target_sort_by.resolve(context, True)\r\n\r\n sort_by = self.current_sort_by.resolve(context)\r\n target_sort_by = self.target_sort_by.resolve(context)\r\n if (sort_by == target_sort_by):\r\n output_sort_by = '-' + target_sort_by\r\n else:\r\n output_sort_by = target_sort_by\r\n\r\n return output_sort_by\r\n\r\n\r\nregister.tag('get_sort_by_url', do_get_sort_by_url)\r\n\r\n#----------------------------------------------------------------------------\r\ndef do_get_user_url_by_username(parser, token):\r\n\r\n try: \r\n tag_name, do_action, user_name = token.split_contents() \r\n except: \r\n raise template.TemplateSyntaxError, \"%r tags error\" % token.split_contents[0]\r\n return FtpUserNode1(do_action, user_name)\r\n\r\n\r\nclass FtpUserNode1(template.Node):\r\n def __init__(self, do_action, user_name):\r\n self.user_name = template.Variable(user_name)\r\n self.do_action = template.Variable(do_action)\r\n\r\n\r\n def render(self, context):\r\n user_name = self.user_name.resolve(context)\r\n do_action = self.do_action.resolve(context)\r\n\r\n user_detail_url = ''\r\n url_type = 'ftpuser_user_detail'\r\n\r\n if do_action == 'edit' :\r\n url_type = 'ftpuser_edit_user'\r\n elif do_action == 'del' :\r\n url_type = 'ftpuser_del_user'\r\n elif do_action == 'detail' :\r\n url_type = 'ftpuser_user_detail'\r\n\r\n ftpuser = get_object_or_404(Ftpuser, username=user_name)\r\n if ftpuser :\r\n user_detail_url = reverse(url_type, args=[ftpuser.id])\r\n return user_detail_url\r\n\r\n\r\nregister.tag('get_user_url_by_username', do_get_user_url_by_username)\r\n\r\n\r\n#----------------------------------------------------------------------------\r\ndef do_get_user_group_url_by_username(parser, token):\r\n\r\n try: \r\n tag_name, user_name = token.split_contents() \r\n except: \r\n raise template.TemplateSyntaxError, \"%r tags error\" % token.split_contents[0]\r\n return FtpUserGroupNode1(user_name)\r\n\r\n\r\nclass FtpUserGroupNode1(template.Node):\r\n def __init__(self,user_name):\r\n self.user_name = template.Variable(user_name)\r\n\r\n\r\n def render(self, context):\r\n user_name = self.user_name.resolve(context)\r\n\r\n group_edit_url = ''\r\n ftpuser = get_object_or_404(Ftpuser, username=user_name)\r\n if ftpuser :\r\n ftpgroup = get_object_or_404(Ftpgroup, pk=ftpuser.id)\r\n if ftpgroup :\r\n group_edit_url = reverse('ftpgroup_edit_group', args=[ftpgroup.id])\r\n return group_edit_url\r\n\r\n\r\nregister.tag('get_user_group_url_by_username', do_get_user_group_url_by_username)\r\n\r\n\r\ndef do_get_user_group_url_by_groupname(parser, token):\r\n\r\n try: \r\n tag_name, do_action, group_name = token.split_contents() \r\n except: \r\n raise template.TemplateSyntaxError, \"%r tags error\" % token.split_contents[0]\r\n return FtpUserGroupNode2(do_action, group_name)\r\n\r\n\r\nclass FtpUserGroupNode2(template.Node):\r\n def __init__(self, do_action, group_name):\r\n self.group_name = template.Variable(group_name)\r\n self.do_action = template.Variable(do_action)\r\n\r\n def render(self, context):\r\n group_name = self.group_name.resolve(context)\r\n do_action = self.do_action.resolve(context)\r\n\r\n group_edit_url = ''\r\n url_type = 'ftpgroup_group_detail'\r\n\r\n if do_action == 'edit' :\r\n url_type = 'ftpgroup_edit_group'\r\n elif do_action == 'del' :\r\n url_type = 'ftpgroup_del_group'\r\n elif do_action == 'detail' :\r\n url_type = 'ftpgroup_group_detail'\r\n\r\n ftpgroup = get_object_or_404(Ftpgroup, groupname=group_name)\r\n if ftpgroup :\r\n group_edit_url = reverse(url_type, args=[ftpgroup.id])\r\n return group_edit_url\r\n\r\n\r\nregister.tag('get_user_group_url_by_groupname', do_get_user_group_url_by_groupname)\r\n\r\n\r\n#----------------------------------------------------------------------------\r\ndef do_get_group_member_html_context(parser, token):\r\n\r\n try: \r\n tag_name, mem_str = token.split_contents() \r\n except: \r\n raise template.TemplateSyntaxError, \"%r tags error\" % token.split_contents[0]\r\n return FtpUserGroupNode3(mem_str)\r\n\r\n\r\nclass FtpUserGroupNode3(template.Node):\r\n def __init__(self, mem_str):\r\n self.mem_str = template.Variable(mem_str)\r\n\r\n\r\n def render(self, context):\r\n mem_str = self.mem_str.resolve(context)\r\n mem_html_context = ''\r\n url_array = []\r\n mem_array = []\r\n if mem_str :\r\n mem_array = mem_str.split(',')\r\n list_url = reverse('ftpuser_list_user')\r\n for member in mem_array :\r\n search_url = list_url + '?q=' + member\r\n url_array.append('' + member + '')\r\n mem_html_context = ',&nbsp;'.join(url_array)\r\n\r\n return mem_html_context\r\n\r\n\r\nregister.tag('get_group_member_html_context', do_get_group_member_html_context)"},"license":{"kind":"string","value":"bsd-3-clause"}}},{"rowIdx":203568,"cells":{"repo_name":{"kind":"string","value":"mims2707/bite-project"},"path":{"kind":"string","value":"deps/gdata-python-client/samples/apps/marketplace_sample/atom/mock_service.py"},"copies":{"kind":"string","value":"277"},"size":{"kind":"string","value":"10350"},"content":{"kind":"string","value":"#!/usr/bin/python\n#\n# Copyright (C) 2008 Google Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\n\"\"\"MockService provides CRUD ops. for mocking calls to AtomPub services.\n\n MockService: Exposes the publicly used methods of AtomService to provide\n a mock interface which can be used in unit tests.\n\"\"\"\n\nimport atom.service\nimport pickle\n\n\n__author__ = 'api.jscudder (Jeffrey Scudder)'\n\n\n# Recordings contains pairings of HTTP MockRequest objects with MockHttpResponse objects.\nrecordings = []\n# If set, the mock service HttpRequest are actually made through this object.\nreal_request_handler = None\n\ndef ConcealValueWithSha(source):\n import sha\n return sha.new(source[:-5]).hexdigest()\n\ndef DumpRecordings(conceal_func=ConcealValueWithSha):\n if conceal_func:\n for recording_pair in recordings:\n recording_pair[0].ConcealSecrets(conceal_func)\n return pickle.dumps(recordings)\n\ndef LoadRecordings(recordings_file_or_string):\n if isinstance(recordings_file_or_string, str):\n atom.mock_service.recordings = pickle.loads(recordings_file_or_string)\n elif hasattr(recordings_file_or_string, 'read'):\n atom.mock_service.recordings = pickle.loads(\n recordings_file_or_string.read())\n\ndef HttpRequest(service, operation, data, uri, extra_headers=None,\n url_params=None, escape_params=True, content_type='application/atom+xml'):\n \"\"\"Simulates an HTTP call to the server, makes an actual HTTP request if \n real_request_handler is set.\n\n This function operates in two different modes depending on if \n real_request_handler is set or not. If real_request_handler is not set,\n HttpRequest will look in this module's recordings list to find a response\n which matches the parameters in the function call. If real_request_handler\n is set, this function will call real_request_handler.HttpRequest, add the\n response to the recordings list, and respond with the actual response.\n\n Args:\n service: atom.AtomService object which contains some of the parameters\n needed to make the request. The following members are used to\n construct the HTTP call: server (str), additional_headers (dict),\n port (int), and ssl (bool).\n operation: str The HTTP operation to be performed. This is usually one of\n 'GET', 'POST', 'PUT', or 'DELETE'\n data: ElementTree, filestream, list of parts, or other object which can be\n converted to a string.\n Should be set to None when performing a GET or PUT.\n If data is a file-like object which can be read, this method will read\n a chunk of 100K bytes at a time and send them.\n If the data is a list of parts to be sent, each part will be evaluated\n and sent.\n uri: The beginning of the URL to which the request should be sent.\n Examples: '/', '/base/feeds/snippets',\n '/m8/feeds/contacts/default/base'\n extra_headers: dict of strings. HTTP headers which should be sent\n in the request. These headers are in addition to those stored in\n service.additional_headers.\n url_params: dict of strings. Key value pairs to be added to the URL as\n URL parameters. For example {'foo':'bar', 'test':'param'} will\n become ?foo=bar&test=param.\n escape_params: bool default True. If true, the keys and values in\n url_params will be URL escaped when the form is constructed\n (Special characters converted to %XX form.)\n content_type: str The MIME type for the data being sent. Defaults to\n 'application/atom+xml', this is only used if data is set.\n \"\"\"\n full_uri = atom.service.BuildUri(uri, url_params, escape_params)\n (server, port, ssl, uri) = atom.service.ProcessUrl(service, uri)\n current_request = MockRequest(operation, full_uri, host=server, ssl=ssl, \n data=data, extra_headers=extra_headers, url_params=url_params, \n escape_params=escape_params, content_type=content_type)\n # If the request handler is set, we should actually make the request using \n # the request handler and record the response to replay later.\n if real_request_handler:\n response = real_request_handler.HttpRequest(service, operation, data, uri,\n extra_headers=extra_headers, url_params=url_params, \n escape_params=escape_params, content_type=content_type)\n # TODO: need to copy the HTTP headers from the real response into the\n # recorded_response.\n recorded_response = MockHttpResponse(body=response.read(), \n status=response.status, reason=response.reason)\n # Insert a tuple which maps the request to the response object returned\n # when making an HTTP call using the real_request_handler.\n recordings.append((current_request, recorded_response))\n return recorded_response\n else:\n # Look through available recordings to see if one matches the current \n # request.\n for request_response_pair in recordings:\n if request_response_pair[0].IsMatch(current_request):\n return request_response_pair[1]\n return None\n\n\nclass MockRequest(object):\n \"\"\"Represents a request made to an AtomPub server.\n \n These objects are used to determine if a client request matches a recorded\n HTTP request to determine what the mock server's response will be. \n \"\"\"\n\n def __init__(self, operation, uri, host=None, ssl=False, port=None, \n data=None, extra_headers=None, url_params=None, escape_params=True,\n content_type='application/atom+xml'):\n \"\"\"Constructor for a MockRequest\n \n Args:\n operation: str One of 'GET', 'POST', 'PUT', or 'DELETE' this is the\n HTTP operation requested on the resource.\n uri: str The URL describing the resource to be modified or feed to be\n retrieved. This should include the protocol (http/https) and the host\n (aka domain). For example, these are some valud full_uris:\n 'http://example.com', 'https://www.google.com/accounts/ClientLogin'\n host: str (optional) The server name which will be placed at the \n beginning of the URL if the uri parameter does not begin with 'http'.\n Examples include 'example.com', 'www.google.com', 'www.blogger.com'.\n ssl: boolean (optional) If true, the request URL will begin with https \n instead of http.\n data: ElementTree, filestream, list of parts, or other object which can be\n converted to a string. (optional)\n Should be set to None when performing a GET or PUT.\n If data is a file-like object which can be read, the constructor \n will read the entire file into memory. If the data is a list of \n parts to be sent, each part will be evaluated and stored.\n extra_headers: dict (optional) HTTP headers included in the request.\n url_params: dict (optional) Key value pairs which should be added to \n the URL as URL parameters in the request. For example uri='/', \n url_parameters={'foo':'1','bar':'2'} could become '/?foo=1&bar=2'.\n escape_params: boolean (optional) Perform URL escaping on the keys and \n values specified in url_params. Defaults to True.\n content_type: str (optional) Provides the MIME type of the data being \n sent.\n \"\"\"\n self.operation = operation\n self.uri = _ConstructFullUrlBase(uri, host=host, ssl=ssl)\n self.data = data\n self.extra_headers = extra_headers\n self.url_params = url_params or {}\n self.escape_params = escape_params\n self.content_type = content_type\n\n def ConcealSecrets(self, conceal_func):\n \"\"\"Conceal secret data in this request.\"\"\"\n if self.extra_headers.has_key('Authorization'):\n self.extra_headers['Authorization'] = conceal_func(\n self.extra_headers['Authorization'])\n\n def IsMatch(self, other_request):\n \"\"\"Check to see if the other_request is equivalent to this request.\n \n Used to determine if a recording matches an incoming request so that a\n recorded response should be sent to the client.\n\n The matching is not exact, only the operation and URL are examined \n currently.\n\n Args:\n other_request: MockRequest The request which we want to check this\n (self) MockRequest against to see if they are equivalent.\n \"\"\"\n # More accurate matching logic will likely be required.\n return (self.operation == other_request.operation and self.uri == \n other_request.uri)\n\n\ndef _ConstructFullUrlBase(uri, host=None, ssl=False):\n \"\"\"Puts URL components into the form http(s)://full.host.strinf/uri/path\n \n Used to construct a roughly canonical URL so that URLs which begin with \n 'http://example.com/' can be compared to a uri of '/' when the host is \n set to 'example.com'\n\n If the uri contains 'http://host' already, the host and ssl parameters\n are ignored.\n\n Args:\n uri: str The path component of the URL, examples include '/'\n host: str (optional) The host name which should prepend the URL. Example:\n 'example.com'\n ssl: boolean (optional) If true, the returned URL will begin with https\n instead of http.\n\n Returns:\n String which has the form http(s)://example.com/uri/string/contents\n \"\"\"\n if uri.startswith('http'):\n return uri\n if ssl:\n return 'https://%s%s' % (host, uri)\n else:\n return 'http://%s%s' % (host, uri)\n\n\nclass MockHttpResponse(object):\n \"\"\"Returned from MockService crud methods as the server's response.\"\"\"\n\n def __init__(self, body=None, status=None, reason=None, headers=None):\n \"\"\"Construct a mock HTTPResponse and set members.\n\n Args:\n body: str (optional) The HTTP body of the server's response. \n status: int (optional) \n reason: str (optional)\n headers: dict (optional)\n \"\"\"\n self.body = body\n self.status = status\n self.reason = reason\n self.headers = headers or {}\n\n def read(self):\n return self.body\n\n def getheader(self, header_name):\n return self.headers[header_name]\n\n"},"license":{"kind":"string","value":"apache-2.0"}}},{"rowIdx":203569,"cells":{"repo_name":{"kind":"string","value":"RichardLitt/wyrd-django-dev"},"path":{"kind":"string","value":"tests/regressiontests/queryset_pickle/models.py"},"copies":{"kind":"string","value":"65"},"size":{"kind":"string","value":"1056"},"content":{"kind":"string","value":"from __future__ import absolute_import\n\nimport datetime\n\nfrom django.db import models\nfrom django.utils.translation import ugettext_lazy as _\n\n\ndef standalone_number(self):\n return 1\n\nclass Numbers(object):\n @staticmethod\n def get_static_number(self):\n return 2\n\n @classmethod\n def get_class_number(self):\n return 3\n\n def get_member_number(self):\n return 4\n\nnn = Numbers()\n\nclass Group(models.Model):\n name = models.CharField(_('name'), max_length=100)\n\nclass Event(models.Model):\n group = models.ForeignKey(Group)\n\nclass Happening(models.Model):\n when = models.DateTimeField(blank=True, default=datetime.datetime.now)\n name = models.CharField(blank=True, max_length=100, default=lambda:\"test\")\n number1 = models.IntegerField(blank=True, default=standalone_number)\n number2 = models.IntegerField(blank=True, default=Numbers.get_static_number)\n number3 = models.IntegerField(blank=True, default=Numbers.get_class_number)\n number4 = models.IntegerField(blank=True, default=nn.get_member_number)\n"},"license":{"kind":"string","value":"bsd-3-clause"}}},{"rowIdx":203570,"cells":{"repo_name":{"kind":"string","value":"sklnet/openatv-enigma2"},"path":{"kind":"string","value":"lib/python/Components/ConditionalWidget.py"},"copies":{"kind":"string","value":"84"},"size":{"kind":"string","value":"1700"},"content":{"kind":"string","value":"from GUIComponent import GUIComponent\nfrom enigma import eTimer\n\nclass ConditionalWidget(GUIComponent):\n\tdef __init__(self, withTimer = True):\n\t\tGUIComponent.__init__(self)\n\n\t\tself.setConnect(None)\n\n\t\tif withTimer:\n\t\t\tself.conditionCheckTimer = eTimer()\n\t\t\tself.conditionCheckTimer.callback.append(self.update)\n\t\t\tself.conditionCheckTimer.start(1000)\n\n\tdef postWidgetCreate(self, instance):\n\t\tself.visible = 0\n\n\tdef setConnect(self, conditionalFunction):\n\t\tself.conditionalFunction = conditionalFunction\n\n\tdef activateCondition(self, condition):\n\t\tif condition:\n\t\t\tself.visible = 1\n\t\telse:\n\t\t\tself.visible = 0\n\n\tdef update(self):\n\t\tif self.conditionalFunction is not None:\n\t\t\ttry:\n\t\t\t\tself.activateCondition(self.conditionalFunction())\n\t\t\texcept:\n\t\t\t\tself.conditionalFunction = None\n\t\t\t\tself.activateCondition(False)\n\nclass BlinkingWidget(GUIComponent):\n\tdef __init__(self):\n\t\tGUIComponent.__init__(self)\n\t\tself.blinking = False\n\t\tself.setBlinkTime(500)\n\t\tself.timer = eTimer()\n\t\tself.timer.callback.append(self.blink)\n\n\tdef setBlinkTime(self, time):\n\t\tself.blinktime = time\n\n\tdef blink(self):\n\t\tif self.blinking:\n\t\t\tself.visible = not self.visible\n\n\tdef startBlinking(self):\n\t\tself.blinking = True\n\t\tself.timer.start(self.blinktime)\n\n\tdef stopBlinking(self):\n\t\tself.blinking = False\n\t\tif self.visible:\n\t\t\tself.hide()\n\t\tself.timer.stop()\n\nclass BlinkingWidgetConditional(BlinkingWidget, ConditionalWidget):\n\tdef __init__(self):\n\t\tBlinkingWidget.__init__(self)\n\t\tConditionalWidget.__init__(self)\n\n\tdef activateCondition(self, condition):\n\t\tif condition:\n\t\t\tif not self.blinking: # we are already blinking\n\t\t\t\tself.startBlinking()\n\t\telse:\n\t\t\tif self.blinking: # we are blinking\n\t\t\t\tself.stopBlinking()\n"},"license":{"kind":"string","value":"gpl-2.0"}}},{"rowIdx":203571,"cells":{"repo_name":{"kind":"string","value":"myFengo2015/volatility"},"path":{"kind":"string","value":"volatility/plugins/mac/mac_yarascan.py"},"copies":{"kind":"string","value":"44"},"size":{"kind":"string","value":"3999"},"content":{"kind":"string","value":"# Volatility\n# Copyright (C) 2007-2013 Volatility Foundation\n#\n# This file is part of Volatility.\n#\n# Volatility is free software; you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation; either version 2 of the License, or\n# (at your option) any later version.\n#\n# Volatility is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with Volatility. If not, see .\n#\n\nimport volatility.plugins.malware.malfind as malfind\nimport volatility.plugins.mac.pstasks as pstasks\nimport volatility.plugins.mac.common as common\nimport volatility.utils as utils \nimport volatility.debug as debug\nimport volatility.obj as obj\n\ntry:\n import yara\n has_yara = True\nexcept ImportError:\n has_yara = False\n\nclass MapYaraScanner(malfind.BaseYaraScanner):\n \"\"\"A scanner over all memory regions of a process.\"\"\"\n\n def __init__(self, task = None, **kwargs):\n \"\"\"Scan the process address space through the VMAs.\n\n Args:\n task: The task_struct object for this task.\n \"\"\"\n self.task = task\n malfind.BaseYaraScanner.__init__(self, address_space = task.get_process_address_space(), **kwargs)\n\n def scan(self, offset = 0, maxlen = None):\n for map in self.task.get_proc_maps():\n for match in malfind.BaseYaraScanner.scan(self, map.links.start, map.links.end - map.links.start):\n yield match\n\nclass mac_yarascan(malfind.YaraScan):\n \"\"\"Scan memory for yara signatures\"\"\"\n\n @staticmethod\n def is_valid_profile(profile):\n return profile.metadata.get('os', 'Unknown').lower() == 'mac'\n\n def calculate(self):\n \n ## we need this module imported\n if not has_yara:\n debug.error(\"Please install Yara from code.google.com/p/yara-project\")\n \n ## leveraged from the windows yarascan plugin\n rules = self._compile_rules()\n \n ## set the linux plugin address spaces \n common.set_plugin_members(self)\n\n if self._config.KERNEL:\n ## http://fxr.watson.org/fxr/source/osfmk/mach/i386/vm_param.h?v=xnu-2050.18.24\n if self.addr_space.profile.metadata.get('memory_model', '32bit') == \"32bit\":\n if not common.is_64bit_capable(self.addr_space):\n kernel_start = 0\n else:\n kernel_start = 0xc0000000\n else:\n kernel_start = 0xffffff8000000000\n\n scanner = malfind.DiscontigYaraScanner(rules = rules, \n address_space = self.addr_space) \n \n for hit, address in scanner.scan(start_offset = kernel_start):\n yield (None, address, hit, \n scanner.address_space.zread(address, 64))\n else:\n # Scan each process memory block \n for task in pstasks.mac_tasks(self._config).calculate():\n scanner = MapYaraScanner(task = task, rules = rules)\n for hit, address in scanner.scan():\n yield (task, address, hit, \n scanner.address_space.zread(address, 64))\n \n def render_text(self, outfd, data):\n for task, address, hit, buf in data:\n if task:\n outfd.write(\"Task: {0} pid {1} rule {2} addr {3:#x}\\n\".format(\n task.p_comm, task.p_pid, hit.rule, address))\n else:\n outfd.write(\"[kernel] rule {0} addr {1:#x}\\n\".format(hit.rule, address))\n \n outfd.write(\"\".join([\"{0:#018x} {1:<48} {2}\\n\".format(\n address + o, h, ''.join(c)) for o, h, c in utils.Hexdump(buf)]))\n"},"license":{"kind":"string","value":"gpl-2.0"}}},{"rowIdx":203572,"cells":{"repo_name":{"kind":"string","value":"mvtuong/Yelp-Challenge"},"path":{"kind":"string","value":"LIBSVM/tools/subset.py"},"copies":{"kind":"string","value":"124"},"size":{"kind":"string","value":"3202"},"content":{"kind":"string","value":"#!/usr/bin/env python\n\nimport os, sys, math, random\nfrom collections import defaultdict\n\nif sys.version_info[0] >= 3:\n\txrange = range\n\ndef exit_with_help(argv):\n\tprint(\"\"\"\\\nUsage: {0} [options] dataset subset_size [output1] [output2]\n\nThis script randomly selects a subset of the dataset.\n\noptions:\n-s method : method of selection (default 0)\n 0 -- stratified selection (classification only)\n 1 -- random selection\n\noutput1 : the subset (optional)\noutput2 : rest of the data (optional)\nIf output1 is omitted, the subset will be printed on the screen.\"\"\".format(argv[0]))\n\texit(1)\n\ndef process_options(argv):\n\targc = len(argv)\n\tif argc < 3:\n\t\texit_with_help(argv)\n\n\t# default method is stratified selection\n\tmethod = 0 \n\tsubset_file = sys.stdout\n\trest_file = None\n\n\ti = 1\n\twhile i < argc:\n\t\tif argv[i][0] != \"-\":\n\t\t\tbreak\n\t\tif argv[i] == \"-s\":\n\t\t\ti = i + 1\n\t\t\tmethod = int(argv[i])\n\t\t\tif method not in [0,1]:\n\t\t\t\tprint(\"Unknown selection method {0}\".format(method))\n\t\t\t\texit_with_help(argv)\n\t\ti = i + 1\n\n\tdataset = argv[i]\n\tsubset_size = int(argv[i+1])\n\tif i+2 < argc:\n\t\tsubset_file = open(argv[i+2],'w')\n\tif i+3 < argc:\n\t\trest_file = open(argv[i+3],'w')\n\n\treturn dataset, subset_size, method, subset_file, rest_file\n\ndef random_selection(dataset, subset_size):\n\tl = sum(1 for line in open(dataset,'r'))\n\treturn sorted(random.sample(xrange(l), subset_size))\n\ndef stratified_selection(dataset, subset_size):\n\tlabels = [line.split(None,1)[0] for line in open(dataset)]\n\tlabel_linenums = defaultdict(list)\n\tfor i, label in enumerate(labels):\n\t\tlabel_linenums[label] += [i]\n\n\tl = len(labels)\n\tremaining = subset_size\n\tret = []\n\n\t# classes with fewer data are sampled first; otherwise\n\t# some rare classes may not be selected\n\tfor label in sorted(label_linenums, key=lambda x: len(label_linenums[x])):\n\t\tlinenums = label_linenums[label]\n\t\tlabel_size = len(linenums) \n\t\t# at least one instance per class\n\t\ts = int(min(remaining, max(1, math.ceil(label_size*(float(subset_size)/l)))))\n\t\tif s == 0:\n\t\t\tsys.stderr.write('''\\\nError: failed to have at least one instance per class\n 1. You may have regression data.\n 2. Your classification data is unbalanced or too small.\nPlease use -s 1.\n''')\n\t\t\tsys.exit(-1)\n\t\tremaining -= s\n\t\tret += [linenums[i] for i in random.sample(xrange(label_size), s)]\n\treturn sorted(ret)\n\ndef main(argv=sys.argv):\n\tdataset, subset_size, method, subset_file, rest_file = process_options(argv)\n\t#uncomment the following line to fix the random seed \n\t#random.seed(0)\n\tselected_lines = []\n\n\tif method == 0:\n\t\tselected_lines = stratified_selection(dataset, subset_size)\n\telif method == 1:\n\t\tselected_lines = random_selection(dataset, subset_size)\n\n\t#select instances based on selected_lines\n\tdataset = open(dataset,'r')\n\tprev_selected_linenum = -1\n\tfor i in xrange(len(selected_lines)):\n\t\tfor cnt in xrange(selected_lines[i]-prev_selected_linenum-1):\n\t\t\tline = dataset.readline()\n\t\t\tif rest_file: \n\t\t\t\trest_file.write(line)\n\t\tsubset_file.write(dataset.readline())\n\t\tprev_selected_linenum = selected_lines[i]\n\tsubset_file.close()\n\n\tif rest_file:\n\t\tfor line in dataset: \n\t\t\trest_file.write(line)\n\t\trest_file.close()\n\tdataset.close()\n\nif __name__ == '__main__':\n\tmain(sys.argv)\n\n"},"license":{"kind":"string","value":"apache-2.0"}}},{"rowIdx":203573,"cells":{"repo_name":{"kind":"string","value":"Avinash-Raj/appengine-django-skeleton"},"path":{"kind":"string","value":"lib/django/templatetags/i18n.py"},"copies":{"kind":"string","value":"219"},"size":{"kind":"string","value":"19311"},"content":{"kind":"string","value":"from __future__ import unicode_literals\n\nimport sys\n\nfrom django.conf import settings\nfrom django.template import Library, Node, TemplateSyntaxError, Variable\nfrom django.template.base import TOKEN_TEXT, TOKEN_VAR, render_value_in_context\nfrom django.template.defaulttags import token_kwargs\nfrom django.utils import six, translation\nfrom django.utils.safestring import SafeData, mark_safe\n\nregister = Library()\n\n\nclass GetAvailableLanguagesNode(Node):\n def __init__(self, variable):\n self.variable = variable\n\n def render(self, context):\n context[self.variable] = [(k, translation.ugettext(v)) for k, v in settings.LANGUAGES]\n return ''\n\n\nclass GetLanguageInfoNode(Node):\n def __init__(self, lang_code, variable):\n self.lang_code = lang_code\n self.variable = variable\n\n def render(self, context):\n lang_code = self.lang_code.resolve(context)\n context[self.variable] = translation.get_language_info(lang_code)\n return ''\n\n\nclass GetLanguageInfoListNode(Node):\n def __init__(self, languages, variable):\n self.languages = languages\n self.variable = variable\n\n def get_language_info(self, language):\n # ``language`` is either a language code string or a sequence\n # with the language code as its first item\n if len(language[0]) > 1:\n return translation.get_language_info(language[0])\n else:\n return translation.get_language_info(str(language))\n\n def render(self, context):\n langs = self.languages.resolve(context)\n context[self.variable] = [self.get_language_info(lang) for lang in langs]\n return ''\n\n\nclass GetCurrentLanguageNode(Node):\n def __init__(self, variable):\n self.variable = variable\n\n def render(self, context):\n context[self.variable] = translation.get_language()\n return ''\n\n\nclass GetCurrentLanguageBidiNode(Node):\n def __init__(self, variable):\n self.variable = variable\n\n def render(self, context):\n context[self.variable] = translation.get_language_bidi()\n return ''\n\n\nclass TranslateNode(Node):\n def __init__(self, filter_expression, noop, asvar=None,\n message_context=None):\n self.noop = noop\n self.asvar = asvar\n self.message_context = message_context\n self.filter_expression = filter_expression\n if isinstance(self.filter_expression.var, six.string_types):\n self.filter_expression.var = Variable(\"'%s'\" %\n self.filter_expression.var)\n\n def render(self, context):\n self.filter_expression.var.translate = not self.noop\n if self.message_context:\n self.filter_expression.var.message_context = (\n self.message_context.resolve(context))\n output = self.filter_expression.resolve(context)\n value = render_value_in_context(output, context)\n # Restore percent signs. Percent signs in template text are doubled\n # so they are not interpreted as string format flags.\n is_safe = isinstance(value, SafeData)\n value = value.replace('%%', '%')\n value = mark_safe(value) if is_safe else value\n if self.asvar:\n context[self.asvar] = value\n return ''\n else:\n return value\n\n\nclass BlockTranslateNode(Node):\n\n def __init__(self, extra_context, singular, plural=None, countervar=None,\n counter=None, message_context=None, trimmed=False, asvar=None):\n self.extra_context = extra_context\n self.singular = singular\n self.plural = plural\n self.countervar = countervar\n self.counter = counter\n self.message_context = message_context\n self.trimmed = trimmed\n self.asvar = asvar\n\n def render_token_list(self, tokens):\n result = []\n vars = []\n for token in tokens:\n if token.token_type == TOKEN_TEXT:\n result.append(token.contents.replace('%', '%%'))\n elif token.token_type == TOKEN_VAR:\n result.append('%%(%s)s' % token.contents)\n vars.append(token.contents)\n msg = ''.join(result)\n if self.trimmed:\n msg = translation.trim_whitespace(msg)\n return msg, vars\n\n def render(self, context, nested=False):\n if self.message_context:\n message_context = self.message_context.resolve(context)\n else:\n message_context = None\n tmp_context = {}\n for var, val in self.extra_context.items():\n tmp_context[var] = val.resolve(context)\n # Update() works like a push(), so corresponding context.pop() is at\n # the end of function\n context.update(tmp_context)\n singular, vars = self.render_token_list(self.singular)\n if self.plural and self.countervar and self.counter:\n count = self.counter.resolve(context)\n context[self.countervar] = count\n plural, plural_vars = self.render_token_list(self.plural)\n if message_context:\n result = translation.npgettext(message_context, singular,\n plural, count)\n else:\n result = translation.ungettext(singular, plural, count)\n vars.extend(plural_vars)\n else:\n if message_context:\n result = translation.pgettext(message_context, singular)\n else:\n result = translation.ugettext(singular)\n default_value = context.template.engine.string_if_invalid\n\n def render_value(key):\n if key in context:\n val = context[key]\n else:\n val = default_value % key if '%s' in default_value else default_value\n return render_value_in_context(val, context)\n\n data = {v: render_value(v) for v in vars}\n context.pop()\n try:\n result = result % data\n except (KeyError, ValueError):\n if nested:\n # Either string is malformed, or it's a bug\n raise TemplateSyntaxError(\"'blocktrans' is unable to format \"\n \"string returned by gettext: %r using %r\" % (result, data))\n with translation.override(None):\n result = self.render(context, nested=True)\n if self.asvar:\n context[self.asvar] = result\n return ''\n else:\n return result\n\n\nclass LanguageNode(Node):\n def __init__(self, nodelist, language):\n self.nodelist = nodelist\n self.language = language\n\n def render(self, context):\n with translation.override(self.language.resolve(context)):\n output = self.nodelist.render(context)\n return output\n\n\n@register.tag(\"get_available_languages\")\ndef do_get_available_languages(parser, token):\n \"\"\"\n This will store a list of available languages\n in the context.\n\n Usage::\n\n {% get_available_languages as languages %}\n {% for language in languages %}\n ...\n {% endfor %}\n\n This will just pull the LANGUAGES setting from\n your setting file (or the default settings) and\n put it into the named variable.\n \"\"\"\n # token.split_contents() isn't useful here because this tag doesn't accept variable as arguments\n args = token.contents.split()\n if len(args) != 3 or args[1] != 'as':\n raise TemplateSyntaxError(\"'get_available_languages' requires 'as variable' (got %r)\" % args)\n return GetAvailableLanguagesNode(args[2])\n\n\n@register.tag(\"get_language_info\")\ndef do_get_language_info(parser, token):\n \"\"\"\n This will store the language information dictionary for the given language\n code in a context variable.\n\n Usage::\n\n {% get_language_info for LANGUAGE_CODE as l %}\n {{ l.code }}\n {{ l.name }}\n {{ l.name_translated }}\n {{ l.name_local }}\n {{ l.bidi|yesno:\"bi-directional,uni-directional\" }}\n \"\"\"\n args = token.split_contents()\n if len(args) != 5 or args[1] != 'for' or args[3] != 'as':\n raise TemplateSyntaxError(\"'%s' requires 'for string as variable' (got %r)\" % (args[0], args[1:]))\n return GetLanguageInfoNode(parser.compile_filter(args[2]), args[4])\n\n\n@register.tag(\"get_language_info_list\")\ndef do_get_language_info_list(parser, token):\n \"\"\"\n This will store a list of language information dictionaries for the given\n language codes in a context variable. The language codes can be specified\n either as a list of strings or a settings.LANGUAGES style list (or any\n sequence of sequences whose first items are language codes).\n\n Usage::\n\n {% get_language_info_list for LANGUAGES as langs %}\n {% for l in langs %}\n {{ l.code }}\n {{ l.name }}\n {{ l.name_translated }}\n {{ l.name_local }}\n {{ l.bidi|yesno:\"bi-directional,uni-directional\" }}\n {% endfor %}\n \"\"\"\n args = token.split_contents()\n if len(args) != 5 or args[1] != 'for' or args[3] != 'as':\n raise TemplateSyntaxError(\"'%s' requires 'for sequence as variable' (got %r)\" % (args[0], args[1:]))\n return GetLanguageInfoListNode(parser.compile_filter(args[2]), args[4])\n\n\n@register.filter\ndef language_name(lang_code):\n return translation.get_language_info(lang_code)['name']\n\n\n@register.filter\ndef language_name_translated(lang_code):\n english_name = translation.get_language_info(lang_code)['name']\n return translation.ugettext(english_name)\n\n\n@register.filter\ndef language_name_local(lang_code):\n return translation.get_language_info(lang_code)['name_local']\n\n\n@register.filter\ndef language_bidi(lang_code):\n return translation.get_language_info(lang_code)['bidi']\n\n\n@register.tag(\"get_current_language\")\ndef do_get_current_language(parser, token):\n \"\"\"\n This will store the current language in the context.\n\n Usage::\n\n {% get_current_language as language %}\n\n This will fetch the currently active language and\n put it's value into the ``language`` context\n variable.\n \"\"\"\n # token.split_contents() isn't useful here because this tag doesn't accept variable as arguments\n args = token.contents.split()\n if len(args) != 3 or args[1] != 'as':\n raise TemplateSyntaxError(\"'get_current_language' requires 'as variable' (got %r)\" % args)\n return GetCurrentLanguageNode(args[2])\n\n\n@register.tag(\"get_current_language_bidi\")\ndef do_get_current_language_bidi(parser, token):\n \"\"\"\n This will store the current language layout in the context.\n\n Usage::\n\n {% get_current_language_bidi as bidi %}\n\n This will fetch the currently active language's layout and\n put it's value into the ``bidi`` context variable.\n True indicates right-to-left layout, otherwise left-to-right\n \"\"\"\n # token.split_contents() isn't useful here because this tag doesn't accept variable as arguments\n args = token.contents.split()\n if len(args) != 3 or args[1] != 'as':\n raise TemplateSyntaxError(\"'get_current_language_bidi' requires 'as variable' (got %r)\" % args)\n return GetCurrentLanguageBidiNode(args[2])\n\n\n@register.tag(\"trans\")\ndef do_translate(parser, token):\n \"\"\"\n This will mark a string for translation and will\n translate the string for the current language.\n\n Usage::\n\n {% trans \"this is a test\" %}\n\n This will mark the string for translation so it will\n be pulled out by mark-messages.py into the .po files\n and will run the string through the translation engine.\n\n There is a second form::\n\n {% trans \"this is a test\" noop %}\n\n This will only mark for translation, but will return\n the string unchanged. Use it when you need to store\n values into forms that should be translated later on.\n\n You can use variables instead of constant strings\n to translate stuff you marked somewhere else::\n\n {% trans variable %}\n\n This will just try to translate the contents of\n the variable ``variable``. Make sure that the string\n in there is something that is in the .po file.\n\n It is possible to store the translated string into a variable::\n\n {% trans \"this is a test\" as var %}\n {{ var }}\n\n Contextual translations are also supported::\n\n {% trans \"this is a test\" context \"greeting\" %}\n\n This is equivalent to calling pgettext instead of (u)gettext.\n \"\"\"\n bits = token.split_contents()\n if len(bits) < 2:\n raise TemplateSyntaxError(\"'%s' takes at least one argument\" % bits[0])\n message_string = parser.compile_filter(bits[1])\n remaining = bits[2:]\n\n noop = False\n asvar = None\n message_context = None\n seen = set()\n invalid_context = {'as', 'noop'}\n\n while remaining:\n option = remaining.pop(0)\n if option in seen:\n raise TemplateSyntaxError(\n \"The '%s' option was specified more than once.\" % option,\n )\n elif option == 'noop':\n noop = True\n elif option == 'context':\n try:\n value = remaining.pop(0)\n except IndexError:\n msg = \"No argument provided to the '%s' tag for the context option.\" % bits[0]\n six.reraise(TemplateSyntaxError, TemplateSyntaxError(msg), sys.exc_info()[2])\n if value in invalid_context:\n raise TemplateSyntaxError(\n \"Invalid argument '%s' provided to the '%s' tag for the context option\" % (value, bits[0]),\n )\n message_context = parser.compile_filter(value)\n elif option == 'as':\n try:\n value = remaining.pop(0)\n except IndexError:\n msg = \"No argument provided to the '%s' tag for the as option.\" % bits[0]\n six.reraise(TemplateSyntaxError, TemplateSyntaxError(msg), sys.exc_info()[2])\n asvar = value\n else:\n raise TemplateSyntaxError(\n \"Unknown argument for '%s' tag: '%s'. The only options \"\n \"available are 'noop', 'context' \\\"xxx\\\", and 'as VAR'.\" % (\n bits[0], option,\n )\n )\n seen.add(option)\n\n return TranslateNode(message_string, noop, asvar, message_context)\n\n\n@register.tag(\"blocktrans\")\ndef do_block_translate(parser, token):\n \"\"\"\n This will translate a block of text with parameters.\n\n Usage::\n\n {% blocktrans with bar=foo|filter boo=baz|filter %}\n This is {{ bar }} and {{ boo }}.\n {% endblocktrans %}\n\n Additionally, this supports pluralization::\n\n {% blocktrans count count=var|length %}\n There is {{ count }} object.\n {% plural %}\n There are {{ count }} objects.\n {% endblocktrans %}\n\n This is much like ngettext, only in template syntax.\n\n The \"var as value\" legacy format is still supported::\n\n {% blocktrans with foo|filter as bar and baz|filter as boo %}\n {% blocktrans count var|length as count %}\n\n The translated string can be stored in a variable using `asvar`::\n\n {% blocktrans with bar=foo|filter boo=baz|filter asvar var %}\n This is {{ bar }} and {{ boo }}.\n {% endblocktrans %}\n {{ var }}\n\n Contextual translations are supported::\n\n {% blocktrans with bar=foo|filter context \"greeting\" %}\n This is {{ bar }}.\n {% endblocktrans %}\n\n This is equivalent to calling pgettext/npgettext instead of\n (u)gettext/(u)ngettext.\n \"\"\"\n bits = token.split_contents()\n\n options = {}\n remaining_bits = bits[1:]\n asvar = None\n while remaining_bits:\n option = remaining_bits.pop(0)\n if option in options:\n raise TemplateSyntaxError('The %r option was specified more '\n 'than once.' % option)\n if option == 'with':\n value = token_kwargs(remaining_bits, parser, support_legacy=True)\n if not value:\n raise TemplateSyntaxError('\"with\" in %r tag needs at least '\n 'one keyword argument.' % bits[0])\n elif option == 'count':\n value = token_kwargs(remaining_bits, parser, support_legacy=True)\n if len(value) != 1:\n raise TemplateSyntaxError('\"count\" in %r tag expected exactly '\n 'one keyword argument.' % bits[0])\n elif option == \"context\":\n try:\n value = remaining_bits.pop(0)\n value = parser.compile_filter(value)\n except Exception:\n msg = (\n '\"context\" in %r tag expected '\n 'exactly one argument.') % bits[0]\n six.reraise(TemplateSyntaxError, TemplateSyntaxError(msg), sys.exc_info()[2])\n elif option == \"trimmed\":\n value = True\n elif option == \"asvar\":\n try:\n value = remaining_bits.pop(0)\n except IndexError:\n msg = \"No argument provided to the '%s' tag for the asvar option.\" % bits[0]\n six.reraise(TemplateSyntaxError, TemplateSyntaxError(msg), sys.exc_info()[2])\n asvar = value\n else:\n raise TemplateSyntaxError('Unknown argument for %r tag: %r.' %\n (bits[0], option))\n options[option] = value\n\n if 'count' in options:\n countervar, counter = list(options['count'].items())[0]\n else:\n countervar, counter = None, None\n if 'context' in options:\n message_context = options['context']\n else:\n message_context = None\n extra_context = options.get('with', {})\n\n trimmed = options.get(\"trimmed\", False)\n\n singular = []\n plural = []\n while parser.tokens:\n token = parser.next_token()\n if token.token_type in (TOKEN_VAR, TOKEN_TEXT):\n singular.append(token)\n else:\n break\n if countervar and counter:\n if token.contents.strip() != 'plural':\n raise TemplateSyntaxError(\"'blocktrans' doesn't allow other block tags inside it\")\n while parser.tokens:\n token = parser.next_token()\n if token.token_type in (TOKEN_VAR, TOKEN_TEXT):\n plural.append(token)\n else:\n break\n if token.contents.strip() != 'endblocktrans':\n raise TemplateSyntaxError(\"'blocktrans' doesn't allow other block tags (seen %r) inside it\" % token.contents)\n\n return BlockTranslateNode(extra_context, singular, plural, countervar,\n counter, message_context, trimmed=trimmed,\n asvar=asvar)\n\n\n@register.tag\ndef language(parser, token):\n \"\"\"\n This will enable the given language just for this block.\n\n Usage::\n\n {% language \"de\" %}\n This is {{ bar }} and {{ boo }}.\n {% endlanguage %}\n \"\"\"\n bits = token.split_contents()\n if len(bits) != 2:\n raise TemplateSyntaxError(\"'%s' takes one argument (language)\" % bits[0])\n language = parser.compile_filter(bits[1])\n nodelist = parser.parse(('endlanguage',))\n parser.delete_first_token()\n return LanguageNode(nodelist, language)\n"},"license":{"kind":"string","value":"bsd-3-clause"}}},{"rowIdx":203574,"cells":{"repo_name":{"kind":"string","value":"MrReN/django-oscar"},"path":{"kind":"string","value":"sites/demo/apps/order/migrations/0001_initial.py"},"copies":{"kind":"string","value":"16"},"size":{"kind":"string","value":"48092"},"content":{"kind":"string","value":"# encoding: utf-8\nimport datetime\nfrom south.db import db\nfrom south.v2 import SchemaMigration\nfrom django.db import models\n\nclass Migration(SchemaMigration):\n depends_on = (\n ('catalogue', '0001_initial'),\n ('customer', '0001_initial'),\n ('partner', '0001_initial'),\n ('address', '0001_initial'),\n )\n\n def forwards(self, orm):\n \n # Adding model 'PaymentEventQuantity'\n db.create_table('order_paymenteventquantity', (\n ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),\n ('event', self.gf('django.db.models.fields.related.ForeignKey')(related_name='line_quantities', to=orm['order.PaymentEvent'])),\n ('line', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['order.Line'])),\n ('quantity', self.gf('django.db.models.fields.PositiveIntegerField')()),\n ))\n db.send_create_signal('order', ['PaymentEventQuantity'])\n\n # Adding model 'ShippingEventQuantity'\n db.create_table('order_shippingeventquantity', (\n ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),\n ('event', self.gf('django.db.models.fields.related.ForeignKey')(related_name='line_quantities', to=orm['order.ShippingEvent'])),\n ('line', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['order.Line'])),\n ('quantity', self.gf('django.db.models.fields.PositiveIntegerField')()),\n ))\n db.send_create_signal('order', ['ShippingEventQuantity'])\n\n # Adding model 'Order'\n db.create_table('order_order', (\n ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),\n ('number', self.gf('django.db.models.fields.CharField')(max_length=128, db_index=True)),\n ('site', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['sites.Site'])),\n ('basket_id', self.gf('django.db.models.fields.PositiveIntegerField')(null=True, blank=True)),\n ('user', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='orders', null=True, to=orm['auth.User'])),\n ('billing_address', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['order.BillingAddress'], null=True, blank=True)),\n ('total_incl_tax', self.gf('django.db.models.fields.DecimalField')(max_digits=12, decimal_places=2)),\n ('total_excl_tax', self.gf('django.db.models.fields.DecimalField')(max_digits=12, decimal_places=2)),\n ('shipping_incl_tax', self.gf('django.db.models.fields.DecimalField')(default=0, max_digits=12, decimal_places=2)),\n ('shipping_excl_tax', self.gf('django.db.models.fields.DecimalField')(default=0, max_digits=12, decimal_places=2)),\n ('shipping_address', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['order.ShippingAddress'], null=True, blank=True)),\n ('shipping_method', self.gf('django.db.models.fields.CharField')(max_length=128, null=True, blank=True)),\n ('status', self.gf('django.db.models.fields.CharField')(max_length=100, null=True, blank=True)),\n ('date_placed', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, db_index=True, blank=True)),\n ))\n db.send_create_signal('order', ['Order'])\n\n # Adding model 'OrderNote'\n db.create_table('order_ordernote', (\n ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),\n ('order', self.gf('django.db.models.fields.related.ForeignKey')(related_name='notes', to=orm['order.Order'])),\n ('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True)),\n ('note_type', self.gf('django.db.models.fields.CharField')(max_length=128, null=True)),\n ('message', self.gf('django.db.models.fields.TextField')()),\n ('date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),\n ))\n db.send_create_signal('order', ['OrderNote'])\n\n # Adding model 'CommunicationEvent'\n db.create_table('order_communicationevent', (\n ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),\n ('order', self.gf('django.db.models.fields.related.ForeignKey')(related_name='communication_events', to=orm['order.Order'])),\n ('event_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['customer.CommunicationEventType'])),\n ('date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),\n ))\n db.send_create_signal('order', ['CommunicationEvent'])\n\n # Adding model 'ShippingAddress'\n db.create_table('order_shippingaddress', (\n ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),\n ('title', self.gf('django.db.models.fields.CharField')(max_length=64, null=True, blank=True)),\n ('first_name', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),\n ('last_name', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),\n ('line1', self.gf('django.db.models.fields.CharField')(max_length=255)),\n ('line2', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),\n ('line3', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),\n ('line4', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),\n ('state', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),\n ('postcode', self.gf('django.db.models.fields.CharField')(max_length=64)),\n ('country', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['address.Country'])),\n ('search_text', self.gf('django.db.models.fields.CharField')(max_length=1000)),\n ('phone_number', self.gf('django.db.models.fields.CharField')(max_length=32, null=True, blank=True)),\n ('notes', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),\n ))\n db.send_create_signal('order', ['ShippingAddress'])\n\n # Adding model 'BillingAddress'\n db.create_table('order_billingaddress', (\n ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),\n ('title', self.gf('django.db.models.fields.CharField')(max_length=64, null=True, blank=True)),\n ('first_name', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),\n ('last_name', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),\n ('line1', self.gf('django.db.models.fields.CharField')(max_length=255)),\n ('line2', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),\n ('line3', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),\n ('line4', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),\n ('state', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),\n ('postcode', self.gf('django.db.models.fields.CharField')(max_length=64)),\n ('country', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['address.Country'])),\n ('search_text', self.gf('django.db.models.fields.CharField')(max_length=1000)),\n ))\n db.send_create_signal('order', ['BillingAddress'])\n\n # Adding model 'Line'\n db.create_table('order_line', (\n ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),\n ('order', self.gf('django.db.models.fields.related.ForeignKey')(related_name='lines', to=orm['order.Order'])),\n ('partner', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='order_lines', null=True, to=orm['partner.Partner'])),\n ('partner_name', self.gf('django.db.models.fields.CharField')(max_length=128)),\n ('partner_sku', self.gf('django.db.models.fields.CharField')(max_length=128)),\n ('title', self.gf('django.db.models.fields.CharField')(max_length=255)),\n ('product', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['catalogue.Product'], null=True, blank=True)),\n ('quantity', self.gf('django.db.models.fields.PositiveIntegerField')(default=1)),\n ('line_price_incl_tax', self.gf('django.db.models.fields.DecimalField')(max_digits=12, decimal_places=2)),\n ('line_price_excl_tax', self.gf('django.db.models.fields.DecimalField')(max_digits=12, decimal_places=2)),\n ('line_price_before_discounts_incl_tax', self.gf('django.db.models.fields.DecimalField')(max_digits=12, decimal_places=2)),\n ('line_price_before_discounts_excl_tax', self.gf('django.db.models.fields.DecimalField')(max_digits=12, decimal_places=2)),\n ('unit_cost_price', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=12, decimal_places=2, blank=True)),\n ('unit_price_incl_tax', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=12, decimal_places=2, blank=True)),\n ('unit_price_excl_tax', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=12, decimal_places=2, blank=True)),\n ('unit_retail_price', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=12, decimal_places=2, blank=True)),\n ('partner_line_reference', self.gf('django.db.models.fields.CharField')(max_length=128, null=True, blank=True)),\n ('partner_line_notes', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),\n ('status', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),\n ('est_dispatch_date', self.gf('django.db.models.fields.DateField')(null=True, blank=True)),\n ))\n db.send_create_signal('order', ['Line'])\n\n # Adding model 'LinePrice'\n db.create_table('order_lineprice', (\n ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),\n ('order', self.gf('django.db.models.fields.related.ForeignKey')(related_name='line_prices', to=orm['order.Order'])),\n ('line', self.gf('django.db.models.fields.related.ForeignKey')(related_name='prices', to=orm['order.Line'])),\n ('quantity', self.gf('django.db.models.fields.PositiveIntegerField')(default=1)),\n ('price_incl_tax', self.gf('django.db.models.fields.DecimalField')(max_digits=12, decimal_places=2)),\n ('price_excl_tax', self.gf('django.db.models.fields.DecimalField')(max_digits=12, decimal_places=2)),\n ('shipping_incl_tax', self.gf('django.db.models.fields.DecimalField')(default=0, max_digits=12, decimal_places=2)),\n ('shipping_excl_tax', self.gf('django.db.models.fields.DecimalField')(default=0, max_digits=12, decimal_places=2)),\n ))\n db.send_create_signal('order', ['LinePrice'])\n\n # Adding model 'LineAttribute'\n db.create_table('order_lineattribute', (\n ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),\n ('line', self.gf('django.db.models.fields.related.ForeignKey')(related_name='attributes', to=orm['order.Line'])),\n ('option', self.gf('django.db.models.fields.related.ForeignKey')(related_name='line_attributes', null=True, to=orm['catalogue.Option'])),\n ('type', self.gf('django.db.models.fields.CharField')(max_length=128)),\n ('value', self.gf('django.db.models.fields.CharField')(max_length=255)),\n ))\n db.send_create_signal('order', ['LineAttribute'])\n\n # Adding model 'ShippingEvent'\n db.create_table('order_shippingevent', (\n ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),\n ('order', self.gf('django.db.models.fields.related.ForeignKey')(related_name='shipping_events', to=orm['order.Order'])),\n ('event_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['order.ShippingEventType'])),\n ('notes', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),\n ('date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),\n ))\n db.send_create_signal('order', ['ShippingEvent'])\n\n # Adding model 'ShippingEventType'\n db.create_table('order_shippingeventtype', (\n ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),\n ('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=255)),\n ('code', self.gf('django.db.models.fields.SlugField')(unique=True, max_length=128, db_index=True)),\n ('is_required', self.gf('django.db.models.fields.BooleanField')(default=True)),\n ('sequence_number', self.gf('django.db.models.fields.PositiveIntegerField')(default=0)),\n ))\n db.send_create_signal('order', ['ShippingEventType'])\n\n # Adding model 'PaymentEvent'\n db.create_table('order_paymentevent', (\n ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),\n ('order', self.gf('django.db.models.fields.related.ForeignKey')(related_name='payment_events', to=orm['order.Order'])),\n ('amount', self.gf('django.db.models.fields.DecimalField')(max_digits=12, decimal_places=2)),\n ('event_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['order.PaymentEventType'])),\n ('date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),\n ))\n db.send_create_signal('order', ['PaymentEvent'])\n\n # Adding model 'PaymentEventType'\n db.create_table('order_paymenteventtype', (\n ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),\n ('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=128)),\n ('code', self.gf('django.db.models.fields.SlugField')(unique=True, max_length=128, db_index=True)),\n ('sequence_number', self.gf('django.db.models.fields.PositiveIntegerField')(default=0)),\n ))\n db.send_create_signal('order', ['PaymentEventType'])\n\n # Adding model 'OrderDiscount'\n db.create_table('order_orderdiscount', (\n ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),\n ('order', self.gf('django.db.models.fields.related.ForeignKey')(related_name='discounts', to=orm['order.Order'])),\n ('offer_id', self.gf('django.db.models.fields.PositiveIntegerField')(null=True, blank=True)),\n ('voucher_id', self.gf('django.db.models.fields.PositiveIntegerField')(null=True, blank=True)),\n ('voucher_code', self.gf('django.db.models.fields.CharField')(max_length=128, null=True, db_index=True)),\n ('amount', self.gf('django.db.models.fields.DecimalField')(default=0, max_digits=12, decimal_places=2)),\n ))\n db.send_create_signal('order', ['OrderDiscount'])\n\n\n def backwards(self, orm):\n \n # Deleting model 'PaymentEventQuantity'\n db.delete_table('order_paymenteventquantity')\n\n # Deleting model 'ShippingEventQuantity'\n db.delete_table('order_shippingeventquantity')\n\n # Deleting model 'Order'\n db.delete_table('order_order')\n\n # Deleting model 'OrderNote'\n db.delete_table('order_ordernote')\n\n # Deleting model 'CommunicationEvent'\n db.delete_table('order_communicationevent')\n\n # Deleting model 'ShippingAddress'\n db.delete_table('order_shippingaddress')\n\n # Deleting model 'BillingAddress'\n db.delete_table('order_billingaddress')\n\n # Deleting model 'Line'\n db.delete_table('order_line')\n\n # Deleting model 'LinePrice'\n db.delete_table('order_lineprice')\n\n # Deleting model 'LineAttribute'\n db.delete_table('order_lineattribute')\n\n # Deleting model 'ShippingEvent'\n db.delete_table('order_shippingevent')\n\n # Deleting model 'ShippingEventType'\n db.delete_table('order_shippingeventtype')\n\n # Deleting model 'PaymentEvent'\n db.delete_table('order_paymentevent')\n\n # Deleting model 'PaymentEventType'\n db.delete_table('order_paymenteventtype')\n\n # Deleting model 'OrderDiscount'\n db.delete_table('order_orderdiscount')\n\n\n models = {\n 'address.country': {\n 'Meta': {'ordering': \"('-is_highlighted', 'name')\", 'object_name': 'Country'},\n 'is_highlighted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),\n 'is_shipping_country': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),\n 'iso_3166_1_a2': ('django.db.models.fields.CharField', [], {'max_length': '2', 'primary_key': 'True'}),\n 'iso_3166_1_a3': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'db_index': 'True'}),\n 'iso_3166_1_numeric': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'db_index': 'True'}),\n 'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),\n 'printable_name': ('django.db.models.fields.CharField', [], {'max_length': '128'})\n },\n 'auth.group': {\n 'Meta': {'object_name': 'Group'},\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),\n 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': \"orm['auth.Permission']\", 'symmetrical': 'False', 'blank': 'True'})\n },\n 'auth.permission': {\n 'Meta': {'ordering': \"('content_type__app_label', 'content_type__model', 'codename')\", 'unique_together': \"(('content_type', 'codename'),)\", 'object_name': 'Permission'},\n 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': \"orm['contenttypes.ContentType']\"}),\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})\n },\n 'auth.user': {\n 'Meta': {'object_name': 'User'},\n 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),\n 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),\n 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),\n 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': \"orm['auth.Group']\", 'symmetrical': 'False', 'blank': 'True'}),\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),\n 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),\n 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),\n 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),\n 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),\n 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),\n 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': \"orm['auth.Permission']\", 'symmetrical': 'False', 'blank': 'True'}),\n 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})\n },\n 'catalogue.attributeentity': {\n 'Meta': {'object_name': 'AttributeEntity'},\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),\n 'slug': ('django.db.models.fields.SlugField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),\n 'type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': \"'entities'\", 'to': \"orm['catalogue.AttributeEntityType']\"})\n },\n 'catalogue.attributeentitytype': {\n 'Meta': {'object_name': 'AttributeEntityType'},\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),\n 'slug': ('django.db.models.fields.SlugField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'})\n },\n 'catalogue.attributeoption': {\n 'Meta': {'object_name': 'AttributeOption'},\n 'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': \"'options'\", 'to': \"orm['catalogue.AttributeOptionGroup']\"}),\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'option': ('django.db.models.fields.CharField', [], {'max_length': '255'})\n },\n 'catalogue.attributeoptiongroup': {\n 'Meta': {'object_name': 'AttributeOptionGroup'},\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})\n },\n 'catalogue.category': {\n 'Meta': {'ordering': \"['name']\", 'object_name': 'Category'},\n 'depth': ('django.db.models.fields.PositiveIntegerField', [], {}),\n 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),\n 'full_name': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'db_index': 'True'}),\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),\n 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),\n 'numchild': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),\n 'path': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),\n 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '1024', 'db_index': 'True'})\n },\n 'catalogue.option': {\n 'Meta': {'object_name': 'Option'},\n 'code': ('django.db.models.fields.SlugField', [], {'max_length': '128', 'db_index': 'True'}),\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),\n 'type': ('django.db.models.fields.CharField', [], {'default': \"'Required'\", 'max_length': '128'})\n },\n 'catalogue.product': {\n 'Meta': {'ordering': \"['-date_created']\", 'object_name': 'Product'},\n 'attributes': ('django.db.models.fields.related.ManyToManyField', [], {'to': \"orm['catalogue.ProductAttribute']\", 'through': \"orm['catalogue.ProductAttributeValue']\", 'symmetrical': 'False'}),\n 'categories': ('django.db.models.fields.related.ManyToManyField', [], {'to': \"orm['catalogue.Category']\", 'through': \"orm['catalogue.ProductCategory']\", 'symmetrical': 'False'}),\n 'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),\n 'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),\n 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': \"'variants'\", 'null': 'True', 'to': \"orm['catalogue.Product']\"}),\n 'product_class': ('django.db.models.fields.related.ForeignKey', [], {'to': \"orm['catalogue.ProductClass']\", 'null': 'True'}),\n 'product_options': ('django.db.models.fields.related.ManyToManyField', [], {'to': \"orm['catalogue.Option']\", 'symmetrical': 'False', 'blank': 'True'}),\n 'recommended_products': ('django.db.models.fields.related.ManyToManyField', [], {'to': \"orm['catalogue.Product']\", 'symmetrical': 'False', 'through': \"orm['catalogue.ProductRecommendation']\", 'blank': 'True'}),\n 'related_products': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': \"'relations'\", 'blank': 'True', 'to': \"orm['catalogue.Product']\"}),\n 'score': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'db_index': 'True'}),\n 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'db_index': 'True'}),\n 'status': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}),\n 'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),\n 'upc': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True', 'blank': 'True'})\n },\n 'catalogue.productattribute': {\n 'Meta': {'ordering': \"['code']\", 'object_name': 'ProductAttribute'},\n 'code': ('django.db.models.fields.SlugField', [], {'max_length': '128', 'db_index': 'True'}),\n 'entity_type': ('django.db.models.fields.related.ForeignKey', [], {'to': \"orm['catalogue.AttributeEntityType']\", 'null': 'True', 'blank': 'True'}),\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),\n 'option_group': ('django.db.models.fields.related.ForeignKey', [], {'to': \"orm['catalogue.AttributeOptionGroup']\", 'null': 'True', 'blank': 'True'}),\n 'product_class': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': \"'attributes'\", 'null': 'True', 'to': \"orm['catalogue.ProductClass']\"}),\n 'required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),\n 'type': ('django.db.models.fields.CharField', [], {'default': \"'text'\", 'max_length': '20'})\n },\n 'catalogue.productattributevalue': {\n 'Meta': {'object_name': 'ProductAttributeValue'},\n 'attribute': ('django.db.models.fields.related.ForeignKey', [], {'to': \"orm['catalogue.ProductAttribute']\"}),\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': \"'attribute_values'\", 'to': \"orm['catalogue.Product']\"}),\n 'value_boolean': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),\n 'value_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),\n 'value_entity': ('django.db.models.fields.related.ForeignKey', [], {'to': \"orm['catalogue.AttributeEntity']\", 'null': 'True', 'blank': 'True'}),\n 'value_float': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),\n 'value_integer': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),\n 'value_option': ('django.db.models.fields.related.ForeignKey', [], {'to': \"orm['catalogue.AttributeOption']\", 'null': 'True', 'blank': 'True'}),\n 'value_richtext': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),\n 'value_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})\n },\n 'catalogue.productcategory': {\n 'Meta': {'ordering': \"['-is_canonical']\", 'object_name': 'ProductCategory'},\n 'category': ('django.db.models.fields.related.ForeignKey', [], {'to': \"orm['catalogue.Category']\"}),\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'is_canonical': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),\n 'product': ('django.db.models.fields.related.ForeignKey', [], {'to': \"orm['catalogue.Product']\"})\n },\n 'catalogue.productclass': {\n 'Meta': {'ordering': \"['name']\", 'object_name': 'ProductClass'},\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),\n 'options': ('django.db.models.fields.related.ManyToManyField', [], {'to': \"orm['catalogue.Option']\", 'symmetrical': 'False', 'blank': 'True'}),\n 'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128', 'db_index': 'True'})\n },\n 'catalogue.productrecommendation': {\n 'Meta': {'object_name': 'ProductRecommendation'},\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'primary': ('django.db.models.fields.related.ForeignKey', [], {'related_name': \"'primary_recommendations'\", 'to': \"orm['catalogue.Product']\"}),\n 'ranking': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),\n 'recommendation': ('django.db.models.fields.related.ForeignKey', [], {'to': \"orm['catalogue.Product']\"})\n },\n 'contenttypes.contenttype': {\n 'Meta': {'ordering': \"('name',)\", 'unique_together': \"(('app_label', 'model'),)\", 'object_name': 'ContentType', 'db_table': \"'django_content_type'\"},\n 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})\n },\n 'customer.communicationeventtype': {\n 'Meta': {'object_name': 'CommunicationEventType'},\n 'category': ('django.db.models.fields.CharField', [], {'default': \"'Order related'\", 'max_length': '255'}),\n 'code': ('django.db.models.fields.SlugField', [], {'max_length': '128', 'db_index': 'True'}),\n 'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),\n 'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),\n 'email_body_html_template': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),\n 'email_body_template': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),\n 'email_subject_template': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),\n 'sms_template': ('django.db.models.fields.CharField', [], {'max_length': '170', 'blank': 'True'})\n },\n 'order.billingaddress': {\n 'Meta': {'object_name': 'BillingAddress'},\n 'country': ('django.db.models.fields.related.ForeignKey', [], {'to': \"orm['address.Country']\"}),\n 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),\n 'line1': ('django.db.models.fields.CharField', [], {'max_length': '255'}),\n 'line2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),\n 'line3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),\n 'line4': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),\n 'postcode': ('django.db.models.fields.CharField', [], {'max_length': '64'}),\n 'search_text': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),\n 'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),\n 'title': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'})\n },\n 'order.communicationevent': {\n 'Meta': {'object_name': 'CommunicationEvent'},\n 'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),\n 'event_type': ('django.db.models.fields.related.ForeignKey', [], {'to': \"orm['customer.CommunicationEventType']\"}),\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': \"'communication_events'\", 'to': \"orm['order.Order']\"})\n },\n 'order.line': {\n 'Meta': {'object_name': 'Line'},\n 'est_dispatch_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'line_price_before_discounts_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),\n 'line_price_before_discounts_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),\n 'line_price_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),\n 'line_price_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),\n 'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': \"'lines'\", 'to': \"orm['order.Order']\"}),\n 'partner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': \"'order_lines'\", 'null': 'True', 'to': \"orm['partner.Partner']\"}),\n 'partner_line_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),\n 'partner_line_reference': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),\n 'partner_name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),\n 'partner_sku': ('django.db.models.fields.CharField', [], {'max_length': '128'}),\n 'product': ('django.db.models.fields.related.ForeignKey', [], {'to': \"orm['catalogue.Product']\", 'null': 'True', 'blank': 'True'}),\n 'quantity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),\n 'status': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),\n 'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),\n 'unit_cost_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),\n 'unit_price_excl_tax': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),\n 'unit_price_incl_tax': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),\n 'unit_retail_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'})\n },\n 'order.lineattribute': {\n 'Meta': {'object_name': 'LineAttribute'},\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'line': ('django.db.models.fields.related.ForeignKey', [], {'related_name': \"'attributes'\", 'to': \"orm['order.Line']\"}),\n 'option': ('django.db.models.fields.related.ForeignKey', [], {'related_name': \"'line_attributes'\", 'null': 'True', 'to': \"orm['catalogue.Option']\"}),\n 'type': ('django.db.models.fields.CharField', [], {'max_length': '128'}),\n 'value': ('django.db.models.fields.CharField', [], {'max_length': '255'})\n },\n 'order.lineprice': {\n 'Meta': {'object_name': 'LinePrice'},\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'line': ('django.db.models.fields.related.ForeignKey', [], {'related_name': \"'prices'\", 'to': \"orm['order.Line']\"}),\n 'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': \"'line_prices'\", 'to': \"orm['order.Order']\"}),\n 'price_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),\n 'price_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),\n 'quantity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),\n 'shipping_excl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),\n 'shipping_incl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'})\n },\n 'order.order': {\n 'Meta': {'ordering': \"['-date_placed']\", 'object_name': 'Order'},\n 'basket_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),\n 'billing_address': ('django.db.models.fields.related.ForeignKey', [], {'to': \"orm['order.BillingAddress']\", 'null': 'True', 'blank': 'True'}),\n 'date_placed': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'number': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),\n 'shipping_address': ('django.db.models.fields.related.ForeignKey', [], {'to': \"orm['order.ShippingAddress']\", 'null': 'True', 'blank': 'True'}),\n 'shipping_excl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),\n 'shipping_incl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),\n 'shipping_method': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),\n 'site': ('django.db.models.fields.related.ForeignKey', [], {'to': \"orm['sites.Site']\"}),\n 'status': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),\n 'total_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),\n 'total_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),\n 'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': \"'orders'\", 'null': 'True', 'to': \"orm['auth.User']\"})\n },\n 'order.orderdiscount': {\n 'Meta': {'object_name': 'OrderDiscount'},\n 'amount': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'offer_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),\n 'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': \"'discounts'\", 'to': \"orm['order.Order']\"}),\n 'voucher_code': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),\n 'voucher_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'})\n },\n 'order.ordernote': {\n 'Meta': {'object_name': 'OrderNote'},\n 'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'message': ('django.db.models.fields.TextField', [], {}),\n 'note_type': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),\n 'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': \"'notes'\", 'to': \"orm['order.Order']\"}),\n 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': \"orm['auth.User']\", 'null': 'True'})\n },\n 'order.paymentevent': {\n 'Meta': {'object_name': 'PaymentEvent'},\n 'amount': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),\n 'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),\n 'event_type': ('django.db.models.fields.related.ForeignKey', [], {'to': \"orm['order.PaymentEventType']\"}),\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'lines': ('django.db.models.fields.related.ManyToManyField', [], {'to': \"orm['order.Line']\", 'through': \"orm['order.PaymentEventQuantity']\", 'symmetrical': 'False'}),\n 'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': \"'payment_events'\", 'to': \"orm['order.Order']\"})\n },\n 'order.paymenteventquantity': {\n 'Meta': {'object_name': 'PaymentEventQuantity'},\n 'event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': \"'line_quantities'\", 'to': \"orm['order.PaymentEvent']\"}),\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'line': ('django.db.models.fields.related.ForeignKey', [], {'to': \"orm['order.Line']\"}),\n 'quantity': ('django.db.models.fields.PositiveIntegerField', [], {})\n },\n 'order.paymenteventtype': {\n 'Meta': {'ordering': \"('sequence_number',)\", 'object_name': 'PaymentEventType'},\n 'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128', 'db_index': 'True'}),\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),\n 'sequence_number': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})\n },\n 'order.shippingaddress': {\n 'Meta': {'object_name': 'ShippingAddress'},\n 'country': ('django.db.models.fields.related.ForeignKey', [], {'to': \"orm['address.Country']\"}),\n 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),\n 'line1': ('django.db.models.fields.CharField', [], {'max_length': '255'}),\n 'line2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),\n 'line3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),\n 'line4': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),\n 'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),\n 'phone_number': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),\n 'postcode': ('django.db.models.fields.CharField', [], {'max_length': '64'}),\n 'search_text': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),\n 'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),\n 'title': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'})\n },\n 'order.shippingevent': {\n 'Meta': {'ordering': \"['-date']\", 'object_name': 'ShippingEvent'},\n 'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),\n 'event_type': ('django.db.models.fields.related.ForeignKey', [], {'to': \"orm['order.ShippingEventType']\"}),\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'lines': ('django.db.models.fields.related.ManyToManyField', [], {'to': \"orm['order.Line']\", 'through': \"orm['order.ShippingEventQuantity']\", 'symmetrical': 'False'}),\n 'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),\n 'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': \"'shipping_events'\", 'to': \"orm['order.Order']\"})\n },\n 'order.shippingeventquantity': {\n 'Meta': {'object_name': 'ShippingEventQuantity'},\n 'event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': \"'line_quantities'\", 'to': \"orm['order.ShippingEvent']\"}),\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'line': ('django.db.models.fields.related.ForeignKey', [], {'to': \"orm['order.Line']\"}),\n 'quantity': ('django.db.models.fields.PositiveIntegerField', [], {})\n },\n 'order.shippingeventtype': {\n 'Meta': {'ordering': \"('sequence_number',)\", 'object_name': 'ShippingEventType'},\n 'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128', 'db_index': 'True'}),\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'is_required': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),\n 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),\n 'sequence_number': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})\n },\n 'partner.partner': {\n 'Meta': {'object_name': 'Partner'},\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),\n 'users': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': \"'partners'\", 'null': 'True', 'symmetrical': 'False', 'to': \"orm['auth.User']\"})\n },\n 'sites.site': {\n 'Meta': {'ordering': \"('domain',)\", 'object_name': 'Site', 'db_table': \"'django_site'\"},\n 'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})\n }\n }\n\n complete_apps = ['order']\n"},"license":{"kind":"string","value":"bsd-3-clause"}}},{"rowIdx":203575,"cells":{"repo_name":{"kind":"string","value":"bood/htc-magic-kernel"},"path":{"kind":"string","value":"tools/perf/scripts/python/syscall-counts.py"},"copies":{"kind":"string","value":"944"},"size":{"kind":"string","value":"1429"},"content":{"kind":"string","value":"# system call counts\n# (c) 2010, Tom Zanussi \n# Licensed under the terms of the GNU GPL License version 2\n#\n# Displays system-wide system call totals, broken down by syscall.\n# If a [comm] arg is specified, only syscalls called by [comm] are displayed.\n\nimport os\nimport sys\n\nsys.path.append(os.environ['PERF_EXEC_PATH'] + \\\n\t'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')\n\nfrom perf_trace_context import *\nfrom Core import *\n\nusage = \"perf trace -s syscall-counts.py [comm]\\n\";\n\nfor_comm = None\n\nif len(sys.argv) > 2:\n\tsys.exit(usage)\n\nif len(sys.argv) > 1:\n\tfor_comm = sys.argv[1]\n\nsyscalls = autodict()\n\ndef trace_begin():\n\tpass\n\ndef trace_end():\n\tprint_syscall_totals()\n\ndef raw_syscalls__sys_enter(event_name, context, common_cpu,\n\tcommon_secs, common_nsecs, common_pid, common_comm,\n\tid, args):\n\tif for_comm is not None:\n\t\tif common_comm != for_comm:\n\t\t\treturn\n\ttry:\n\t\tsyscalls[id] += 1\n\texcept TypeError:\n\t\tsyscalls[id] = 1\n\ndef print_syscall_totals():\n if for_comm is not None:\n\t print \"\\nsyscall events for %s:\\n\\n\" % (for_comm),\n else:\n\t print \"\\nsyscall events:\\n\\n\",\n\n print \"%-40s %10s\\n\" % (\"event\", \"count\"),\n print \"%-40s %10s\\n\" % (\"----------------------------------------\", \\\n \"-----------\"),\n\n for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \\\n\t\t\t\t reverse = True):\n\t print \"%-40d %10d\\n\" % (id, val),\n"},"license":{"kind":"string","value":"gpl-2.0"}}},{"rowIdx":203576,"cells":{"repo_name":{"kind":"string","value":"wangdkchina/vnpy"},"path":{"kind":"string","value":"vn.strategy/strategyEngine.py"},"copies":{"kind":"string","value":"46"},"size":{"kind":"string","value":"23417"},"content":{"kind":"string","value":"# encoding: UTF-8\n\nfrom datetime import datetime\n\nfrom pymongo import Connection\nfrom pymongo.errors import *\n\nfrom eventEngine import *\n\n\n# 常量定义\nOFFSET_OPEN = '0' # 开仓\nOFFSET_CLOSE = '1' # 平仓\n\nDIRECTION_BUY = '0' # 买入\nDIRECTION_SELL = '1' # 卖出\n\nPRICETYPE_LIMIT = '2' # 限价\n\n\n\n########################################################################\nclass Tick:\n \"\"\"Tick数据对象\"\"\"\n\n #----------------------------------------------------------------------\n def __init__(self, symbol):\n \"\"\"Constructor\"\"\"\n self.symbol = symbol # 合约代码\n \n self.openPrice = 0 # OHLC\n self.highPrice = 0\n self.lowPrice = 0\n self.lastPrice = 0\n \n self.volume = 0 # 成交量\n self.openInterest = 0 # 持仓量\n \n self.upperLimit = 0 # 涨停价\n self.lowerLimit = 0 # 跌停价\n \n self.time = '' # 更新时间和毫秒\n self.ms= 0\n \n self.bidPrice1 = 0 # 深度行情\n self.bidPrice2 = 0\n self.bidPrice3 = 0\n self.bidPrice4 = 0\n self.bidPrice5 = 0\n \n self.askPrice1 = 0\n self.askPrice2 = 0\n self.askPrice3 = 0\n self.askPrice4 = 0\n self.askPrice5 = 0\n \n self.bidVolume1 = 0\n self.bidVolume2 = 0\n self.bidVolume3 = 0\n self.bidVolume4 = 0\n self.bidVolume5 = 0\n \n self.askVolume1 = 0\n self.askVolume2 = 0\n self.askVolume3 = 0\n self.askVolume4 = 0\n self.askVolume5 = 0 \n\n\n########################################################################\nclass Trade:\n \"\"\"成交数据对象\"\"\"\n\n #----------------------------------------------------------------------\n def __init__(self, symbol):\n \"\"\"Constructor\"\"\"\n self.symbol = symbol # 合约代码\n \n self.orderRef = '' # 报单号\n self.tradeID = '' # 成交编号\n \n self.direction = None # 方向\n self.offset = None # 开平\n self.price = 0 # 成交价\n self.volume = 0 # 成交量\n \n\n########################################################################\nclass Order:\n \"\"\"报单数据对象\"\"\"\n\n #----------------------------------------------------------------------\n def __init__(self, symbol):\n \"\"\"Constructor\"\"\"\n self.symbol = symbol # 合约代码\n \n self.orderRef = '' # 报单编号\n \n self.direction = None # 方向\n self.offset = None # 开平\n self.price = 0 # 委托价\n self.volumeOriginal = 0 # 报单量\n self.volumeTraded = 0 # 已成交数量\n \n self.insertTime = '' # 报单时间\n self.cancelTime = '' # 撤单时间\n \n self.frontID = 0 # 前置机编号\n self.sessionID = 0 # 会话编号\n \n self.status = '' # 报单状态代码\n\n\n########################################################################\nclass StopOrder:\n \"\"\"\n 停止单对象\n 用于实现价格突破某一水平后自动追入\n 即通常的条件单和止损单\n \"\"\"\n\n #----------------------------------------------------------------------\n def __init__(self, symbol, direction, offset, price, volume, strategy):\n \"\"\"Constructor\"\"\"\n self.symbol = symbol\n self.direction = direction\n self.offset = offset\n self.price = price\n self.volume = volume\n self.strategy = strategy\n\n\n########################################################################\nclass StrategyEngine(object):\n \"\"\"策略引擎\"\"\"\n\n #----------------------------------------------------------------------\n def __init__(self, eventEngine, mainEngine):\n \"\"\"Constructor\"\"\"\n self.__eventEngine = eventEngine\n self.mainEngine = mainEngine\n \n # 获取代表今日的datetime\n t = datetime.today()\n self.today = t.replace(hour=0, minute=0, second=0, microsecond=0)\n \n # 保存所有报单数据的字典\n self.__dictOrder = {}\n \n # 保存策略对象的字典\n # key为策略名称\n # value为策略对象\n self.dictStrategy = {}\n \n # 保存合约代码和策略对象映射关系的字典\n # key为合约代码\n # value为交易该合约的策略列表\n self.__dictSymbolStrategy = {}\n \n # 保存报单编号和策略对象映射关系的字典\n # key为报单编号\n # value为策略对象\n self.__dictOrderRefStrategy = {}\n \n # 保存合约代码和相关停止单的字典\n # key为合约代码\n # value为该合约相关的停止单列表\n self.__dictStopOrder = {}\n \n # MongoDB数据库相关\n self.__mongoConnected = False\n self.__mongoConnection = None\n self.__mongoTickDB = None\n \n # 调用函数\n self.__connectMongo()\n self.__registerEvent()\n \n #----------------------------------------------------------------------\n def createStrategy(self, strategyName, strategySymbol, strategyClass, strategySetting):\n \"\"\"创建策略\"\"\"\n strategy = strategyClass(strategyName, strategySymbol, self)\n self.dictStrategy[strategyName] = strategy\n strategy.loadSetting(strategySetting)\n \n # 订阅合约行情,注意这里因为是CTP,所以ExchangeID可以忽略\n self.mainEngine.subscribe(strategySymbol, None)\n \n # 注册策略监听\n self.registerStrategy(strategySymbol, strategy)\n \n #----------------------------------------------------------------------\n def __connectMongo(self):\n \"\"\"连接MongoDB数据库\"\"\"\n try:\n self.__mongoConnection = Connection()\n self.__mongoConnected = True\n self.__mongoTickDB = self.__mongoConnection['TickDB']\n self.writeLog(u'策略引擎连接MongoDB成功')\n except ConnectionFailure:\n self.writeLog(u'策略引擎连接MongoDB失败')\n\n #----------------------------------------------------------------------\n def __recordTick(self, data):\n \"\"\"将Tick数据插入到MongoDB中\"\"\"\n if self.__mongoConnected:\n symbol = data['InstrumentID']\n data['date'] = self.today\n self.__mongoTickDB[symbol].insert(data)\n \n #----------------------------------------------------------------------\n def loadTick(self, symbol, dt):\n \"\"\"从MongoDB中读取Tick数据\"\"\"\n if self.__mongoConnected:\n collection = self.__mongoTickDB[symbol]\n cx = collection.find({'date':{'$gte':dt}})\n return cx\n else:\n return None \n\n #----------------------------------------------------------------------\n def __updateMarketData(self, event):\n \"\"\"行情更新\"\"\"\n data = event.dict_['data']\n symbol = data['InstrumentID']\n \n # 检查是否存在交易该合约的策略\n if symbol in self.__dictSymbolStrategy:\n # 创建TICK数据对象并更新数据\n tick = Tick(symbol)\n \n tick.openPrice = data['OpenPrice']\n tick.highPrice = data['HighestPrice']\n tick.lowPrice = data['LowestPrice']\n tick.lastPrice = data['LastPrice']\n \n tick.volume = data['Volume']\n tick.openInterest = data['OpenInterest']\n \n tick.upperLimit = data['UpperLimitPrice']\n tick.lowerLimit = data['LowerLimitPrice']\n \n tick.time = data['UpdateTime']\n tick.ms = data['UpdateMillisec']\n \n tick.bidPrice1 = data['BidPrice1']\n tick.bidPrice2 = data['BidPrice2']\n tick.bidPrice3 = data['BidPrice3']\n tick.bidPrice4 = data['BidPrice4']\n tick.bidPrice5 = data['BidPrice5']\n \n tick.askPrice1 = data['AskPrice1']\n tick.askPrice2 = data['AskPrice2']\n tick.askPrice3 = data['AskPrice3']\n tick.askPrice4 = data['AskPrice4']\n tick.askPrice5 = data['AskPrice5'] \n \n tick.bidVolume1 = data['BidVolume1']\n tick.bidVolume2 = data['BidVolume2']\n tick.bidVolume3 = data['BidVolume3']\n tick.bidVolume4 = data['BidVolume4']\n tick.bidVolume5 = data['BidVolume5']\n \n tick.askVolume1 = data['AskVolume1']\n tick.askVolume2 = data['AskVolume2']\n tick.askVolume3 = data['AskVolume3']\n tick.askVolume4 = data['AskVolume4']\n tick.askVolume5 = data['AskVolume5'] \n \n # 首先检查停止单是否需要发出\n self.__processStopOrder(tick)\n \n # 将该TICK数据推送给每个策略\n for strategy in self.__dictSymbolStrategy[symbol]:\n strategy.onTick(tick) \n \n # 将数据插入MongoDB数据库,实盘建议另开程序记录TICK数据\n self.__recordTick(data)\n \n #----------------------------------------------------------------------\n def __processStopOrder(self, tick):\n \"\"\"处理停止单\"\"\"\n symbol = tick.symbol\n lastPrice = tick.lastPrice\n upperLimit = tick.upperLimit\n lowerLimit = tick.lowerLimit\n \n # 如果当前有该合约上的止损单\n if symbol in self.__dictStopOrder:\n # 获取止损单列表\n listSO = self.__dictStopOrder[symbol] # SO:stop order\n \n # 准备一个空的已发止损单列表\n listSent = []\n \n for so in listSO:\n # 如果是买入停止单,且最新成交价大于停止触发价\n if so.direction == DIRECTION_BUY and lastPrice >= so.price:\n # 以当日涨停价发出限价单买入\n ref = self.sendOrder(symbol, DIRECTION_BUY, so.offset, \n upperLimit, so.volume, strategy) \n \n # 触发策略的止损单发出更新\n so.strategy.onStopOrder(ref)\n \n # 将该止损单对象保存到已发送列表中\n listSent.append(so)\n \n # 如果是卖出停止单,且最新成交价小于停止触发价\n elif so.direction == DIRECTION_SELL and lastPrice <= so.price:\n ref = self.sendOrder(symbol, DIRECTION_SELL, so.offset,\n lowerLimit, so.volume, strategy)\n \n so.strategy.onStopOrder(ref)\n \n listSent.append(so)\n \n # 从停止单列表中移除已经发单的停止单对象\n if listSent:\n for so in listSent:\n listSO.remove(so)\n \n # 检查停止单列表是否为空,若为空,则从停止单字典中移除该合约代码\n if not listSO:\n del self.__dictStopOrder[symbol]\n \n #----------------------------------------------------------------------\n def __updateOrder(self, event):\n \"\"\"报单更新\"\"\"\n data = event.dict_['data']\n orderRef = data['OrderRef']\n \n # 检查是否存在监听该报单的策略\n if orderRef in self.__dictOrderRefStrategy:\n \n # 创建Order数据对象\n order = Order(data['InstrumentID'])\n \n order.orderRef = data['OrderRef']\n order.direction = data['Direction']\n order.offset = data['CombOffsetFlag']\n \n order.price = data['LimitPrice']\n order.volumeOriginal = data['VolumeTotalOriginal']\n order.volumeTraded = data['VolumeTraded']\n order.insertTime = data['InsertTime']\n order.cancelTime = data['CancelTime']\n order.frontID = data['FrontID']\n order.sessionID = data['SessionID']\n \n order.status = data['OrderStatus']\n \n # 推送给策略\n strategy = self.__dictOrderRefStrategy[orderRef]\n strategy.onOrder(order)\n \n # 记录该Order的数据\n self.__dictOrder[orderRef] = data\n \n #----------------------------------------------------------------------\n def __updateTrade(self, event):\n \"\"\"成交更新\"\"\"\n print 'updateTrade'\n data = event.dict_['data']\n orderRef = data['OrderRef']\n print 'trade:', orderRef\n \n if orderRef in self.__dictOrderRefStrategy:\n \n # 创建Trade数据对象\n trade = Trade(data['InstrumentID'])\n \n trade.orderRef = orderRef\n trade.tradeID = data['TradeID']\n trade.direction = data['Direction']\n trade.offset = data['OffsetFlag']\n \n trade.price = data['Price']\n trade.volume = data['Volume']\n \n # 推送给策略\n strategy = self.__dictOrderRefStrategy[orderRef]\n strategy.onTrade(trade) \n \n #----------------------------------------------------------------------\n def sendOrder(self, symbol, direction, offset, price, volume, strategy):\n \"\"\"\n 发单(仅允许限价单)\n symbol:合约代码\n direction:方向,DIRECTION_BUY/DIRECTION_SELL\n offset:开平,OFFSET_OPEN/OFFSET_CLOSE\n price:下单价格\n volume:下单手数\n strategy:策略对象 \n \"\"\"\n contract = self.mainEngine.selectInstrument(symbol)\n \n if contract:\n ref = self.mainEngine.sendOrder(symbol,\n contract['ExchangeID'],\n price,\n PRICETYPE_LIMIT,\n volume,\n direction,\n offset)\n \n self.__dictOrderRefStrategy[ref] = strategy\n print 'ref:', ref\n print 'strategy:', strategy.name\n \n return ref\n\n #----------------------------------------------------------------------\n def cancelOrder(self, orderRef):\n \"\"\"\n 撤单\n \"\"\"\n order = self.__dictOrder[orderRef]\n symbol = order['InstrumentID']\n contract = self.mainEngine.selectInstrument(symbol)\n \n if contract:\n self.mainEngine.cancelOrder(symbol,\n contract['ExchangeID'],\n orderRef,\n order['FrontID'],\n order['SessionID'])\n \n #----------------------------------------------------------------------\n def __registerEvent(self):\n \"\"\"注册事件监听\"\"\"\n self.__eventEngine.register(EVENT_MARKETDATA, self.__updateMarketData)\n self.__eventEngine.register(EVENT_ORDER, self.__updateOrder)\n self.__eventEngine.register(EVENT_TRADE ,self.__updateTrade)\n \n #----------------------------------------------------------------------\n def writeLog(self, log):\n \"\"\"写日志\"\"\"\n event = Event(type_=EVENT_LOG)\n event.dict_['log'] = log\n self.__eventEngine.put(event)\n \n #----------------------------------------------------------------------\n def registerStrategy(self, symbol, strategy):\n \"\"\"注册策略对合约TICK数据的监听\"\"\"\n # 尝试获取监听该合约代码的策略的列表,若无则创建\n try:\n listStrategy = self.__dictSymbolStrategy[symbol]\n except KeyError:\n listStrategy = []\n self.__dictSymbolStrategy[symbol] = listStrategy\n \n # 防止重复注册\n if strategy not in listStrategy:\n listStrategy.append(strategy)\n\n #----------------------------------------------------------------------\n def placeStopOrder(self, symbol, direction, offset, price, volume, strategy):\n \"\"\"\n 下停止单(运行于本地引擎中)\n 注意这里的price是停止单的触发价\n \"\"\"\n # 创建止损单对象\n so = StopOrder(symbol, direction, offset, price, volume, strategy)\n \n # 获取该合约相关的止损单列表\n try:\n listSO = self.__dictStopOrder[symbol]\n except KeyError:\n listSO = []\n self.__dictStopOrder[symbol] = listSO\n \n # 将该止损单插入列表中\n listSO.append(so)\n \n return so\n \n #----------------------------------------------------------------------\n def cancelStopOrder(self, so):\n \"\"\"撤销停止单\"\"\"\n symbol = so.symbol\n \n try:\n listSO = self.__dictStopOrder[symbol]\n\n if so in listSO:\n listSO.remove(so)\n \n if not listSO:\n del self.__dictStopOrder[symbol]\n except KeyError:\n pass\n \n #----------------------------------------------------------------------\n def startAll(self):\n \"\"\"启动所有策略\"\"\"\n for strategy in self.dictStrategy.values():\n strategy.start()\n \n #----------------------------------------------------------------------\n def stopAll(self):\n \"\"\"停止所有策略\"\"\"\n for strategy in self.dictStrategy.values():\n strategy.stop()\n\n\n########################################################################\nclass StrategyTemplate(object):\n \"\"\"策略模板\"\"\"\n\n #----------------------------------------------------------------------\n def __init__(self, name, symbol, engine):\n \"\"\"Constructor\"\"\"\n self.name = name # 策略名称(注意唯一性)\n self.symbol = symbol # 策略交易的合约\n self.engine = engine # 策略引擎对象\n \n self.trading = False # 策略是否启动交易\n \n #----------------------------------------------------------------------\n def onTick(self, tick):\n \"\"\"行情更新\"\"\"\n raise NotImplementedError\n \n #----------------------------------------------------------------------\n def onTrade(self, trade):\n \"\"\"交易更新\"\"\"\n raise NotImplementedError\n \n #----------------------------------------------------------------------\n def onOrder(self, order):\n \"\"\"报单更新\"\"\"\n raise NotImplementedError\n \n #----------------------------------------------------------------------\n def onStopOrder(self, orderRef):\n \"\"\"停止单更新\"\"\"\n raise NotImplementedError\n \n #----------------------------------------------------------------------\n def onBar(self, o, h, l, c, volume, time):\n \"\"\"K线数据更新\"\"\"\n raise NotImplementedError\n \n #----------------------------------------------------------------------\n def start(self):\n \"\"\"\n 启动交易\n 这里是最简单的改变self.trading\n 有需要可以重新实现更复杂的操作\n \"\"\"\n self.trading = True\n self.engine.writeLog(self.name + u'开始运行')\n \n #----------------------------------------------------------------------\n def stop(self):\n \"\"\"\n 停止交易\n 同上\n \"\"\"\n self.trading = False\n self.engine.writeLog(self.name + u'停止运行')\n \n #----------------------------------------------------------------------\n def loadSetting(self, setting):\n \"\"\"\n 载入设置\n setting通常是一个包含了参数设置的字典\n \"\"\"\n raise NotImplementedError\n \n #----------------------------------------------------------------------\n def buy(self, price, volume, stopOrder=False):\n \"\"\"买入开仓\"\"\"\n if self.trading:\n if stopOrder:\n so = self.engine.placeStopOrder(self.symbol, DIRECTION_BUY, \n OFFSET_OPEN, price, volume, self)\n return so\n else:\n ref = self.engine.sendOrder(self.symbol, DIRECTION_BUY,\n OFFSET_OPEN, price, volume, self)\n return ref\n else:\n return None\n \n #----------------------------------------------------------------------\n def cover(self, price, volume, StopOrder=False):\n \"\"\"买入平仓\"\"\"\n if self.trading:\n if stopOrder:\n so = self.engine.placeStopOrder(self.symbol, DIRECTION_BUY,\n OFFSET_CLOSE, price, volume, self)\n return so\n else:\n ref = self.engine.sendOrder(self.symbol, DIRECTION_BUY,\n OFFSET_CLOSE, price, volume, self)\n return ref\n else:\n return None\n \n #----------------------------------------------------------------------\n def sell(self, price, volume, stopOrder=False):\n \"\"\"卖出平仓\"\"\"\n if self.trading:\n if stopOrder:\n so = self.engine.placeStopOrder(self.symbol, DIRECTION_SELL,\n OFFSET_CLOSE, price, volume, self)\n return so\n else:\n ref = self.engine.sendOrder(self.symbol, DIRECTION_SELL,\n OFFSET_CLOSE, price, volume, self)\n return ref\n else:\n return None\n \n #----------------------------------------------------------------------\n def short(self, price, volume, stopOrder=False):\n \"\"\"卖出开仓\"\"\"\n if self.trading:\n if stopOrder:\n so = self.engine.placeStopOrder(self.symbol, DIRECTION_SELL,\n OFFSET_OPEN, price, volume, self)\n return so\n else:\n ref = self.engine.sendOrder(self.symbol, DIRECTION_SELL, \n OFFSET_OPEN, price, volume, self)\n return ref \n else:\n return None\n \n #----------------------------------------------------------------------\n def cancelOrder(self, orderRef):\n \"\"\"撤单\"\"\"\n self.engine.cancelOrder(orderRef)\n \n #----------------------------------------------------------------------\n def cancelStopOrder(self, so):\n \"\"\"撤销停止单\"\"\"\n self.engine.cancelStopOrder(so)\n \n "},"license":{"kind":"string","value":"mit"}}},{"rowIdx":203577,"cells":{"repo_name":{"kind":"string","value":"Kazade/NeHe-Website"},"path":{"kind":"string","value":"google_appengine/lib/django-1.3/tests/regressiontests/middleware_exceptions/tests.py"},"copies":{"kind":"string","value":"48"},"size":{"kind":"string","value":"39506"},"content":{"kind":"string","value":"import sys\n\nfrom django.conf import settings\nfrom django.core.signals import got_request_exception\nfrom django.http import HttpResponse\nfrom django.template.response import TemplateResponse\nfrom django.template import Template\nfrom django.test import TestCase\n\nclass TestException(Exception):\n pass\n\n# A middleware base class that tracks which methods have been called\n\nclass TestMiddleware(object):\n def __init__(self):\n self.process_request_called = False\n self.process_view_called = False\n self.process_response_called = False\n self.process_template_response_called = False\n self.process_exception_called = False\n\n def process_request(self, request):\n self.process_request_called = True\n\n def process_view(self, request, view_func, view_args, view_kwargs):\n self.process_view_called = True\n\n def process_template_response(self, request, response):\n self.process_template_response_called = True\n return response\n\n def process_response(self, request, response):\n self.process_response_called = True\n return response\n\n def process_exception(self, request, exception):\n self.process_exception_called = True\n\n# Middleware examples that do the right thing\n\nclass RequestMiddleware(TestMiddleware):\n def process_request(self, request):\n super(RequestMiddleware, self).process_request(request)\n return HttpResponse('Request Middleware')\n\nclass ViewMiddleware(TestMiddleware):\n def process_view(self, request, view_func, view_args, view_kwargs):\n super(ViewMiddleware, self).process_view(request, view_func, view_args, view_kwargs)\n return HttpResponse('View Middleware')\n\nclass ResponseMiddleware(TestMiddleware):\n def process_response(self, request, response):\n super(ResponseMiddleware, self).process_response(request, response)\n return HttpResponse('Response Middleware')\n\nclass TemplateResponseMiddleware(TestMiddleware):\n def process_template_response(self, request, response):\n super(TemplateResponseMiddleware, self).process_template_response(request, response)\n return TemplateResponse(request, Template('Template Response Middleware'))\n\nclass ExceptionMiddleware(TestMiddleware):\n def process_exception(self, request, exception):\n super(ExceptionMiddleware, self).process_exception(request, exception)\n return HttpResponse('Exception Middleware')\n\n\n# Sample middlewares that raise exceptions\n\nclass BadRequestMiddleware(TestMiddleware):\n def process_request(self, request):\n super(BadRequestMiddleware, self).process_request(request)\n raise TestException('Test Request Exception')\n\nclass BadViewMiddleware(TestMiddleware):\n def process_view(self, request, view_func, view_args, view_kwargs):\n super(BadViewMiddleware, self).process_view(request, view_func, view_args, view_kwargs)\n raise TestException('Test View Exception')\n\nclass BadTemplateResponseMiddleware(TestMiddleware):\n def process_template_response(self, request, response):\n super(BadTemplateResponseMiddleware, self).process_template_response(request, response)\n raise TestException('Test Template Response Exception')\n\nclass BadResponseMiddleware(TestMiddleware):\n def process_response(self, request, response):\n super(BadResponseMiddleware, self).process_response(request, response)\n raise TestException('Test Response Exception')\n\nclass BadExceptionMiddleware(TestMiddleware):\n def process_exception(self, request, exception):\n super(BadExceptionMiddleware, self).process_exception(request, exception)\n raise TestException('Test Exception Exception')\n\n\nclass BaseMiddlewareExceptionTest(TestCase):\n def setUp(self):\n self.exceptions = []\n got_request_exception.connect(self._on_request_exception)\n self.client.handler.load_middleware()\n\n def tearDown(self):\n got_request_exception.disconnect(self._on_request_exception)\n self.exceptions = []\n\n def _on_request_exception(self, sender, request, **kwargs):\n self.exceptions.append(sys.exc_info())\n\n def _add_middleware(self, middleware):\n self.client.handler._request_middleware.insert(0, middleware.process_request)\n self.client.handler._view_middleware.insert(0, middleware.process_view)\n self.client.handler._template_response_middleware.append(middleware.process_template_response)\n self.client.handler._response_middleware.append(middleware.process_response)\n self.client.handler._exception_middleware.append(middleware.process_exception)\n\n def assert_exceptions_handled(self, url, errors, extra_error=None):\n try:\n response = self.client.get(url)\n except TestException, e:\n # Test client intentionally re-raises any exceptions being raised\n # during request handling. Hence actual testing that exception was\n # properly handled is done by relying on got_request_exception\n # signal being sent.\n pass\n except Exception, e:\n if type(extra_error) != type(e):\n self.fail(\"Unexpected exception: %s\" % e)\n self.assertEqual(len(self.exceptions), len(errors))\n for i, error in enumerate(errors):\n exception, value, tb = self.exceptions[i]\n self.assertEqual(value.args, (error, ))\n\n def assert_middleware_usage(self, middleware, request, view, template_response, response, exception):\n self.assertEqual(middleware.process_request_called, request)\n self.assertEqual(middleware.process_view_called, view)\n self.assertEqual(middleware.process_template_response_called, template_response)\n self.assertEqual(middleware.process_response_called, response)\n self.assertEqual(middleware.process_exception_called, exception)\n\n\nclass MiddlewareTests(BaseMiddlewareExceptionTest):\n\n def test_process_request_middleware(self):\n pre_middleware = TestMiddleware()\n middleware = RequestMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/view/', [])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, False, False, True, False)\n self.assert_middleware_usage(middleware, True, False, False, True, False)\n self.assert_middleware_usage(post_middleware, False, False, False, True, False)\n\n def test_process_view_middleware(self):\n pre_middleware = TestMiddleware()\n middleware = ViewMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/view/', [])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, True, False, True, False)\n self.assert_middleware_usage(middleware, True, True, False, True, False)\n self.assert_middleware_usage(post_middleware, True, False, False, True, False)\n\n def test_process_response_middleware(self):\n pre_middleware = TestMiddleware()\n middleware = ResponseMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/view/', [])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, True, False, True, False)\n self.assert_middleware_usage(middleware, True, True, False, True, False)\n self.assert_middleware_usage(post_middleware, True, True, False, True, False)\n\n def test_process_template_response_middleware(self):\n pre_middleware = TestMiddleware()\n middleware = TemplateResponseMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/template_response/', [])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, True, True, True, False)\n self.assert_middleware_usage(middleware, True, True, True, True, False)\n self.assert_middleware_usage(post_middleware, True, True, True, True, False)\n\n def test_process_exception_middleware(self):\n pre_middleware = TestMiddleware()\n middleware = ExceptionMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/view/', [])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, True, False, True, False)\n self.assert_middleware_usage(middleware, True, True, False, True, False)\n self.assert_middleware_usage(post_middleware, True, True, False, True, False)\n\n def test_process_request_middleware_not_found(self):\n pre_middleware = TestMiddleware()\n middleware = RequestMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/not_found/', [])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, False, False, True, False)\n self.assert_middleware_usage(middleware, True, False, False, True, False)\n self.assert_middleware_usage(post_middleware, False, False, False, True, False)\n\n def test_process_view_middleware_not_found(self):\n pre_middleware = TestMiddleware()\n middleware = ViewMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/not_found/', [])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, True, False, True, False)\n self.assert_middleware_usage(middleware, True, True, False, True, False)\n self.assert_middleware_usage(post_middleware, True, False, False, True, False)\n\n def test_process_template_response_middleware_not_found(self):\n pre_middleware = TestMiddleware()\n middleware = TemplateResponseMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/not_found/', [])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, True, False, True, True)\n self.assert_middleware_usage(middleware, True, True, False, True, True)\n self.assert_middleware_usage(post_middleware, True, True, False, True, True)\n\n def test_process_response_middleware_not_found(self):\n pre_middleware = TestMiddleware()\n middleware = ResponseMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/not_found/', [])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, True, False, True, True)\n self.assert_middleware_usage(middleware, True, True, False, True, True)\n self.assert_middleware_usage(post_middleware, True, True, False, True, True)\n\n def test_process_exception_middleware_not_found(self):\n pre_middleware = TestMiddleware()\n middleware = ExceptionMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/not_found/', [])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, True, False, True, False)\n self.assert_middleware_usage(middleware, True, True, False, True, True)\n self.assert_middleware_usage(post_middleware, True, True, False, True, True)\n\n def test_process_request_middleware_exception(self):\n pre_middleware = TestMiddleware()\n middleware = RequestMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/error/', [])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, False, False, True, False)\n self.assert_middleware_usage(middleware, True, False, False, True, False)\n self.assert_middleware_usage(post_middleware, False, False, False, True, False)\n\n def test_process_view_middleware_exception(self):\n pre_middleware = TestMiddleware()\n middleware = ViewMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/error/', [])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, True, False, True, False)\n self.assert_middleware_usage(middleware, True, True, False, True, False)\n self.assert_middleware_usage(post_middleware, True, False, False, True, False)\n\n def test_process_response_middleware_exception(self):\n pre_middleware = TestMiddleware()\n middleware = ResponseMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/error/', ['Error in view'], Exception())\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, True, False, True, True)\n self.assert_middleware_usage(middleware, True, True, False, True, True)\n self.assert_middleware_usage(post_middleware, True, True, False, True, True)\n\n def test_process_exception_middleware_exception(self):\n pre_middleware = TestMiddleware()\n middleware = ExceptionMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/error/', [])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, True, False, True, False)\n self.assert_middleware_usage(middleware, True, True, False, True, True)\n self.assert_middleware_usage(post_middleware, True, True, False, True, True)\n\n def test_process_request_middleware_null_view(self):\n pre_middleware = TestMiddleware()\n middleware = RequestMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/null_view/', [])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, False, False, True, False)\n self.assert_middleware_usage(middleware, True, False, False, True, False)\n self.assert_middleware_usage(post_middleware, False, False, False, True, False)\n\n def test_process_view_middleware_null_view(self):\n pre_middleware = TestMiddleware()\n middleware = ViewMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/null_view/', [])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, True, False, True, False)\n self.assert_middleware_usage(middleware, True, True, False, True, False)\n self.assert_middleware_usage(post_middleware, True, False, False, True, False)\n\n def test_process_response_middleware_null_view(self):\n pre_middleware = TestMiddleware()\n middleware = ResponseMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/null_view/', [\n \"The view regressiontests.middleware_exceptions.views.null_view didn't return an HttpResponse object.\",\n ],\n ValueError())\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, True, False, True, False)\n self.assert_middleware_usage(middleware, True, True, False, True, False)\n self.assert_middleware_usage(post_middleware, True, True, False, True, False)\n\n def test_process_exception_middleware_null_view(self):\n pre_middleware = TestMiddleware()\n middleware = ExceptionMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/null_view/', [\n \"The view regressiontests.middleware_exceptions.views.null_view didn't return an HttpResponse object.\"\n ],\n ValueError())\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, True, False, True, False)\n self.assert_middleware_usage(middleware, True, True, False, True, False)\n self.assert_middleware_usage(post_middleware, True, True, False, True, False)\n\n def test_process_request_middleware_permission_denied(self):\n pre_middleware = TestMiddleware()\n middleware = RequestMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', [])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, False, False, True, False)\n self.assert_middleware_usage(middleware, True, False, False, True, False)\n self.assert_middleware_usage(post_middleware, False, False, False, True, False)\n\n def test_process_view_middleware_permission_denied(self):\n pre_middleware = TestMiddleware()\n middleware = ViewMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', [])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, True, False, True, False)\n self.assert_middleware_usage(middleware, True, True, False, True, False)\n self.assert_middleware_usage(post_middleware, True, False, False, True, False)\n\n def test_process_response_middleware_permission_denied(self):\n pre_middleware = TestMiddleware()\n middleware = ResponseMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', [])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, True, False, True, True)\n self.assert_middleware_usage(middleware, True, True, False, True, True)\n self.assert_middleware_usage(post_middleware, True, True, False, True, True)\n\n def test_process_exception_middleware_permission_denied(self):\n pre_middleware = TestMiddleware()\n middleware = ExceptionMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', [])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, True, False, True, False)\n self.assert_middleware_usage(middleware, True, True, False, True, True)\n self.assert_middleware_usage(post_middleware, True, True, False, True, True)\n\n def test_process_template_response_error(self):\n middleware = TestMiddleware()\n self._add_middleware(middleware)\n self.assert_exceptions_handled('/middleware_exceptions/template_response_error/', [])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(middleware, True, True, True, True, False)\n\n\nclass BadMiddlewareTests(BaseMiddlewareExceptionTest):\n\n def test_process_request_bad_middleware(self):\n pre_middleware = TestMiddleware()\n bad_middleware = BadRequestMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(bad_middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/view/', ['Test Request Exception'])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, False, False, True, False)\n self.assert_middleware_usage(bad_middleware, True, False, False, True, False)\n self.assert_middleware_usage(post_middleware, False, False, False, True, False)\n\n def test_process_view_bad_middleware(self):\n pre_middleware = TestMiddleware()\n bad_middleware = BadViewMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(bad_middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/view/', ['Test View Exception'])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, True, False, True, False)\n self.assert_middleware_usage(bad_middleware, True, True, False, True, False)\n self.assert_middleware_usage(post_middleware, True, False, False, True, False)\n\n def test_process_template_response_bad_middleware(self):\n pre_middleware = TestMiddleware()\n bad_middleware = BadTemplateResponseMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(bad_middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/template_response/', ['Test Template Response Exception'])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, True, False, True, False)\n self.assert_middleware_usage(bad_middleware, True, True, True, True, False)\n self.assert_middleware_usage(post_middleware, True, True, True, True, False)\n\n def test_process_response_bad_middleware(self):\n pre_middleware = TestMiddleware()\n bad_middleware = BadResponseMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(bad_middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/view/', ['Test Response Exception'])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, True, False, False, False)\n self.assert_middleware_usage(bad_middleware, True, True, False, True, False)\n self.assert_middleware_usage(post_middleware, True, True, False, True, False)\n\n def test_process_exception_bad_middleware(self):\n pre_middleware = TestMiddleware()\n bad_middleware = BadExceptionMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(bad_middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/view/', [])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, True, False, True, False)\n self.assert_middleware_usage(bad_middleware, True, True, False, True, False)\n self.assert_middleware_usage(post_middleware, True, True, False, True, False)\n\n def test_process_request_bad_middleware_not_found(self):\n pre_middleware = TestMiddleware()\n bad_middleware = BadRequestMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(bad_middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/not_found/', ['Test Request Exception'])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, False, False, True, False)\n self.assert_middleware_usage(bad_middleware, True, False, False, True, False)\n self.assert_middleware_usage(post_middleware, False, False, False, True, False)\n\n def test_process_view_bad_middleware_not_found(self):\n pre_middleware = TestMiddleware()\n bad_middleware = BadViewMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(bad_middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/not_found/', ['Test View Exception'])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, True, False, True, False)\n self.assert_middleware_usage(bad_middleware, True, True, False, True, False)\n self.assert_middleware_usage(post_middleware, True, False, False, True, False)\n\n def test_process_response_bad_middleware_not_found(self):\n pre_middleware = TestMiddleware()\n bad_middleware = BadResponseMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(bad_middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/not_found/', ['Test Response Exception'])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, True, False, False, True)\n self.assert_middleware_usage(bad_middleware, True, True, False, True, True)\n self.assert_middleware_usage(post_middleware, True, True, False, True, True)\n\n def test_process_exception_bad_middleware_not_found(self):\n pre_middleware = TestMiddleware()\n bad_middleware = BadExceptionMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(bad_middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/not_found/', ['Test Exception Exception'])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, True, False, True, False)\n self.assert_middleware_usage(bad_middleware, True, True, False, True, True)\n self.assert_middleware_usage(post_middleware, True, True, False, True, True)\n\n def test_process_request_bad_middleware_exception(self):\n pre_middleware = TestMiddleware()\n bad_middleware = BadRequestMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(bad_middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/error/', ['Test Request Exception'])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, False, False, True, False)\n self.assert_middleware_usage(bad_middleware, True, False, False, True, False)\n self.assert_middleware_usage(post_middleware, False, False, False, True, False)\n\n def test_process_view_bad_middleware_exception(self):\n pre_middleware = TestMiddleware()\n bad_middleware = BadViewMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(bad_middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/error/', ['Test View Exception'])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, True, False, True, False)\n self.assert_middleware_usage(bad_middleware, True, True, False, True, False)\n self.assert_middleware_usage(post_middleware, True, False, False, True, False)\n\n def test_process_response_bad_middleware_exception(self):\n pre_middleware = TestMiddleware()\n bad_middleware = BadResponseMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(bad_middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/error/', ['Error in view', 'Test Response Exception'])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, True, False, False, True)\n self.assert_middleware_usage(bad_middleware, True, True, False, True, True)\n self.assert_middleware_usage(post_middleware, True, True, False, True, True)\n\n def test_process_exception_bad_middleware_exception(self):\n pre_middleware = TestMiddleware()\n bad_middleware = BadExceptionMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(bad_middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/error/', ['Test Exception Exception'])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, True, False, True, False)\n self.assert_middleware_usage(bad_middleware, True, True, False, True, True)\n self.assert_middleware_usage(post_middleware, True, True, False, True, True)\n\n def test_process_request_bad_middleware_null_view(self):\n pre_middleware = TestMiddleware()\n bad_middleware = BadRequestMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(bad_middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/null_view/', ['Test Request Exception'])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, False, False, True, False)\n self.assert_middleware_usage(bad_middleware, True, False, False, True, False)\n self.assert_middleware_usage(post_middleware, False, False, False, True, False)\n\n def test_process_view_bad_middleware_null_view(self):\n pre_middleware = TestMiddleware()\n bad_middleware = BadViewMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(bad_middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/null_view/', ['Test View Exception'])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, True, False, True, False)\n self.assert_middleware_usage(bad_middleware, True, True, False, True, False)\n self.assert_middleware_usage(post_middleware, True, False, False, True, False)\n\n def test_process_response_bad_middleware_null_view(self):\n pre_middleware = TestMiddleware()\n bad_middleware = BadResponseMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(bad_middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/null_view/', [\n \"The view regressiontests.middleware_exceptions.views.null_view didn't return an HttpResponse object.\",\n 'Test Response Exception'\n ])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, True, False, False, False)\n self.assert_middleware_usage(bad_middleware, True, True, False, True, False)\n self.assert_middleware_usage(post_middleware, True, True, False, True, False)\n\n def test_process_exception_bad_middleware_null_view(self):\n pre_middleware = TestMiddleware()\n bad_middleware = BadExceptionMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(bad_middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/null_view/', [\n \"The view regressiontests.middleware_exceptions.views.null_view didn't return an HttpResponse object.\"\n ],\n ValueError())\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, True, False, True, False)\n self.assert_middleware_usage(bad_middleware, True, True, False, True, False)\n self.assert_middleware_usage(post_middleware, True, True, False, True, False)\n\n def test_process_request_bad_middleware_permission_denied(self):\n pre_middleware = TestMiddleware()\n bad_middleware = BadRequestMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(bad_middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', ['Test Request Exception'])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, False, False, True, False)\n self.assert_middleware_usage(bad_middleware, True, False, False, True, False)\n self.assert_middleware_usage(post_middleware, False, False, False, True, False)\n\n def test_process_view_bad_middleware_permission_denied(self):\n pre_middleware = TestMiddleware()\n bad_middleware = BadViewMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(bad_middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', ['Test View Exception'])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, True, False, True, False)\n self.assert_middleware_usage(bad_middleware, True, True, False, True, False)\n self.assert_middleware_usage(post_middleware, True, False, False, True, False)\n\n def test_process_response_bad_middleware_permission_denied(self):\n pre_middleware = TestMiddleware()\n bad_middleware = BadResponseMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(bad_middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', ['Test Response Exception'])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, True, False, False, True)\n self.assert_middleware_usage(bad_middleware, True, True, False, True, True)\n self.assert_middleware_usage(post_middleware, True, True, False, True, True)\n\n def test_process_exception_bad_middleware_permission_denied(self):\n pre_middleware = TestMiddleware()\n bad_middleware = BadExceptionMiddleware()\n post_middleware = TestMiddleware()\n self._add_middleware(post_middleware)\n self._add_middleware(bad_middleware)\n self._add_middleware(pre_middleware)\n self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', ['Test Exception Exception'])\n\n # Check that the right middleware methods have been invoked\n self.assert_middleware_usage(pre_middleware, True, True, False, True, False)\n self.assert_middleware_usage(bad_middleware, True, True, False, True, True)\n self.assert_middleware_usage(post_middleware, True, True, False, True, True)\n\n\n_missing = object()\nclass RootUrlconfTests(TestCase):\n def test_missing_root_urlconf(self):\n try:\n original_ROOT_URLCONF = settings.ROOT_URLCONF\n del settings.ROOT_URLCONF\n except AttributeError:\n original_ROOT_URLCONF = _missing\n self.assertRaises(AttributeError,\n self.client.get, \"/middleware_exceptions/view/\"\n )\n\n if original_ROOT_URLCONF is not _missing:\n settings.ROOT_URLCONF = original_ROOT_URLCONF\n"},"license":{"kind":"string","value":"bsd-3-clause"}}},{"rowIdx":203578,"cells":{"repo_name":{"kind":"string","value":"nhaney90/developer-support"},"path":{"kind":"string","value":"python/general-python/export-users-csv/accountHelper.py"},"copies":{"kind":"string","value":"9"},"size":{"kind":"string","value":"5538"},"content":{"kind":"string","value":"#-------------------------------------------------------------------------------\n# Name: Account Helper with user and role dictionaries\n# Purpose: Queries critical information from ArcGIS Online organization which\n# can be used in other scripts\n#\n# Author: Kelly Gerrow kgerrow@esri.com\n#\n# Created: 09/11/2014\n# Copyright: (c) Kelly 2014\n# Licence: \n#-------------------------------------------------------------------------------\n\nimport requests\nimport json, time, datetime\nimport string, smtplib, os\n\nclass agolAdmin(object):\n #Initializes script reporting on needed values\n def __init__(self, username, password):\n self.username = username\n self.password = password\n self.__token, self.__ssl= self.__getToken(username, password)\n if self.__ssl == False:\n self.__pref='http://'\n else:\n self.__pref='https://'\n self.__urlKey, self.__id, self.__Name, self.__FullName, self.__Email, self.__maxUsers = self.__GetAccount()\n self.__portalUrl = self.__pref+self.__urlKey\n self.__userDict = self.__userDictMethod()\n self.__roleDict = self.__roleDictMethod()\n\n\n #assigns Variables to names\n @property\n def token(self):\n return self.__token\n\n @property\n def portalUrl(self):\n return self.__portalUrl\n\n @property\n def orgID(self):\n return self.__id\n\n @property\n def orgName(self):\n return self.__Name\n\n @property\n def fullName(self):\n return self.__FullName\n\n @property\n def adminEmail(self):\n return self.__Email\n\n @property\n def maxUser(self):\n return self.__maxUsers\n\n @property\n def userDict(self):\n return self.__userDict\n @property\n def roleDict(self):\n return self.__roleDict\n\n#----------------------------------------------------Account Information -----------------------------------------------\n #generates token\n def __getToken(self,adminUser, pw):\n data = {'username': adminUser,\n 'password': pw,\n 'referer' : 'https://www.arcgis.com',\n 'expiration': '432000',\n 'f': 'json'}\n url = 'https://arcgis.com/sharing/rest/generateToken'\n jres = requests.post(url, data=data, verify=False).json()\n return jres['token'],jres['ssl']\n\n #generates account information\n def __GetAccount(self):\n URL= self.__pref+'www.arcgis.com/sharing/rest/portals/self?f=json&token=' + self.token\n response = requests.get(URL, verify=False)\n jres = json.loads(response.text)\n return jres['urlKey'], jres['id'], jres['name'], jres['user']['fullName'], jres['user']['email'], jres['subscriptionInfo']['maxUsers']\n\n\n #creates dictionary of role names and corresponding IDs\n def __roleDictMethod(self):\n roleVal = {'administrator':'org_admin', 'publisher':'org_publisher', 'user': 'org_user'}\n start = 1\n number = 50\n while start != -1:\n roleUrl= self.__pref+'www.arcgis.com/sharing/rest/portals/self/roles?f=json&start='+str(start)+'&num='+str(number)+'&token=' + self.token\n response = requests.get(roleUrl, verify = False)\n jres = json.loads(response.text)\n for item in jres['roles']:\n roleVal[str(item['name'])] = str(item['id'])\n start =jres['nextStart']\n return roleVal\n\n #creates a dictionary of Usernames and related information\n def __userDictMethod(self):\n\n start = 1\n number = 200\n #retreive information of all users in organization\n userDict = []\n while start != -1:\n listURL ='{}.maps.arcgis.com/sharing/rest/portals/self/users'.format(self.portalUrl)\n request = listURL +\"?start=\"+str(start)+\"&num=\"+str(number)+\"&f=json&token=\"+self.token\n response = requests.get(request, verify = False)\n jres = json.loads(response.text)\n for row in jres['users']:\n userDict.append(row)\n start =jres['nextStart']\n return userDict\n\n #updates username properties depending on the input\n\n def updateUser(self,userName,myEsri=None,fullName = None,description=None, access=None,tags=None,email=None, password=None):\n userURL ='https://{}.maps.arcgis.com/sharing/rest/community/users/{}/update'.format(self.__urlKey, userName)\n data = {'f':'json','token':self.token}\n if access:\n data['access'] = access\n if fullName :\n data['fullName']= fullName\n if description:\n data['description'] = description\n if myEsri:\n data['usertype'] = myEsri\n if tags:\n data['tags']= tags\n if email:\n data['email'] = email\n if password:\n data['password'] = password\n print data\n response = requests.post(userURL, data=data, verify=False).json()\n\n #Assign a name or ID for a user role\n def roleAssign(self,roleInput):\n\n for key,val in self.roleDict.iteritems():\n if key.lower() == roleInput.lower():\n return val\n if val.lower() == roleInput.lower():\n return key\n\n def myEsriAssign(self, myEsriInput):\n myEsriVal={'my esri': 'both', 'arcgis online':'arcgisonly'}\n for key,val in myEsriVal.iteritems():\n if key.lower() == myEsriInput.lower():\n return val\n if val.lower() == myEsriInput.lower():\n return key\n\n\n"},"license":{"kind":"string","value":"apache-2.0"}}},{"rowIdx":203579,"cells":{"repo_name":{"kind":"string","value":"ryfeus/lambda-packs"},"path":{"kind":"string","value":"Keras_tensorflow_nightly/source2.7/tensorflow/contrib/distributions/python/ops/test_util.py"},"copies":{"kind":"string","value":"44"},"size":{"kind":"string","value":"16499"},"content":{"kind":"string","value":"# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ==============================================================================\n\"\"\"Utilities for testing distributions and/or bijectors.\"\"\"\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nimport numpy as np\n\nfrom tensorflow.python.framework import ops\nfrom tensorflow.python.ops import array_ops\nfrom tensorflow.python.ops import histogram_ops\nfrom tensorflow.python.ops import linalg_ops\nfrom tensorflow.python.ops import math_ops\nfrom tensorflow.python.ops import variables as variables_ops\n\n\n__all__ = [\n \"DiscreteScalarDistributionTestHelpers\",\n \"VectorDistributionTestHelpers\",\n]\n\n\nclass DiscreteScalarDistributionTestHelpers(object):\n \"\"\"DiscreteScalarDistributionTestHelpers.\"\"\"\n\n def run_test_sample_consistent_log_prob(\n self, sess_run_fn, dist,\n num_samples=int(1e5), num_threshold=int(1e3), seed=42,\n batch_size=None,\n rtol=1e-2, atol=0.):\n \"\"\"Tests that sample/log_prob are consistent with each other.\n\n \"Consistency\" means that `sample` and `log_prob` correspond to the same\n distribution.\n\n Note: this test only verifies a necessary condition for consistency--it does\n does not verify sufficiency hence does not prove `sample`, `log_prob` truly\n are consistent.\n\n Args:\n sess_run_fn: Python `callable` taking `list`-like of `Tensor`s and\n returning a list of results after running one \"step\" of TensorFlow\n computation, typically set to `sess.run`.\n dist: Distribution instance or object which implements `sample`,\n `log_prob`, `event_shape_tensor` and `batch_shape_tensor`.\n num_samples: Python `int` scalar indicating the number of Monte-Carlo\n samples to draw from `dist`.\n num_threshold: Python `int` scalar indicating the number of samples a\n bucket must contain before being compared to the probability.\n Default value: 1e3; must be at least 1.\n Warning, set too high will cause test to falsely pass but setting too\n low will cause the test to falsely fail.\n seed: Python `int` indicating the seed to use when sampling from `dist`.\n In general it is not recommended to use `None` during a test as this\n increases the likelihood of spurious test failure.\n batch_size: Hint for unpacking result of samples. Default: `None` means\n batch_size is inferred.\n rtol: Python `float`-type indicating the admissible relative error between\n analytical and sample statistics.\n atol: Python `float`-type indicating the admissible absolute error between\n analytical and sample statistics.\n\n Raises:\n ValueError: if `num_threshold < 1`.\n \"\"\"\n if num_threshold < 1:\n raise ValueError(\"num_threshold({}) must be at least 1.\".format(\n num_threshold))\n # Histogram only supports vectors so we call it once per batch coordinate.\n y = dist.sample(num_samples, seed=seed)\n y = array_ops.reshape(y, shape=[num_samples, -1])\n if batch_size is None:\n batch_size = math_ops.reduce_prod(dist.batch_shape_tensor())\n batch_dims = array_ops.shape(dist.batch_shape_tensor())[0]\n edges_expanded_shape = 1 + array_ops.pad([-2], paddings=[[0, batch_dims]])\n for b, x in enumerate(array_ops.unstack(y, num=batch_size, axis=1)):\n counts, edges = self.histogram(x)\n edges = array_ops.reshape(edges, edges_expanded_shape)\n probs = math_ops.exp(dist.log_prob(edges))\n probs = array_ops.reshape(probs, shape=[-1, batch_size])[:, b]\n\n [counts_, probs_] = sess_run_fn([counts, probs])\n valid = counts_ > num_threshold\n probs_ = probs_[valid]\n counts_ = counts_[valid]\n self.assertAllClose(probs_, counts_ / num_samples,\n rtol=rtol, atol=atol)\n\n def run_test_sample_consistent_mean_variance(\n self, sess_run_fn, dist,\n num_samples=int(1e5), seed=24,\n rtol=1e-2, atol=0.):\n \"\"\"Tests that sample/mean/variance are consistent with each other.\n\n \"Consistency\" means that `sample`, `mean`, `variance`, etc all correspond\n to the same distribution.\n\n Args:\n sess_run_fn: Python `callable` taking `list`-like of `Tensor`s and\n returning a list of results after running one \"step\" of TensorFlow\n computation, typically set to `sess.run`.\n dist: Distribution instance or object which implements `sample`,\n `log_prob`, `event_shape_tensor` and `batch_shape_tensor`.\n num_samples: Python `int` scalar indicating the number of Monte-Carlo\n samples to draw from `dist`.\n seed: Python `int` indicating the seed to use when sampling from `dist`.\n In general it is not recommended to use `None` during a test as this\n increases the likelihood of spurious test failure.\n rtol: Python `float`-type indicating the admissible relative error between\n analytical and sample statistics.\n atol: Python `float`-type indicating the admissible absolute error between\n analytical and sample statistics.\n \"\"\"\n x = math_ops.to_float(dist.sample(num_samples, seed=seed))\n sample_mean = math_ops.reduce_mean(x, axis=0)\n sample_variance = math_ops.reduce_mean(\n math_ops.square(x - sample_mean), axis=0)\n sample_stddev = math_ops.sqrt(sample_variance)\n\n [\n sample_mean_,\n sample_variance_,\n sample_stddev_,\n mean_,\n variance_,\n stddev_\n ] = sess_run_fn([\n sample_mean,\n sample_variance,\n sample_stddev,\n dist.mean(),\n dist.variance(),\n dist.stddev(),\n ])\n\n self.assertAllClose(mean_, sample_mean_, rtol=rtol, atol=atol)\n self.assertAllClose(variance_, sample_variance_, rtol=rtol, atol=atol)\n self.assertAllClose(stddev_, sample_stddev_, rtol=rtol, atol=atol)\n\n def histogram(self, x, value_range=None, nbins=None, name=None):\n \"\"\"Return histogram of values.\n\n Given the tensor `values`, this operation returns a rank 1 histogram\n counting the number of entries in `values` that fell into every bin. The\n bins are equal width and determined by the arguments `value_range` and\n `nbins`.\n\n Args:\n x: 1D numeric `Tensor` of items to count.\n value_range: Shape [2] `Tensor`. `new_values <= value_range[0]` will be\n mapped to `hist[0]`, `values >= value_range[1]` will be mapped to\n `hist[-1]`. Must be same dtype as `x`.\n nbins: Scalar `int32 Tensor`. Number of histogram bins.\n name: Python `str` name prefixed to Ops created by this class.\n\n Returns:\n counts: 1D `Tensor` of counts, i.e.,\n `counts[i] = sum{ edges[i-1] <= values[j] < edges[i] : j }`.\n edges: 1D `Tensor` characterizing intervals used for counting.\n \"\"\"\n with ops.name_scope(name, \"histogram\", [x]):\n x = ops.convert_to_tensor(x, name=\"x\")\n if value_range is None:\n value_range = [math_ops.reduce_min(x), 1 + math_ops.reduce_max(x)]\n value_range = ops.convert_to_tensor(value_range, name=\"value_range\")\n lo = value_range[0]\n hi = value_range[1]\n if nbins is None:\n nbins = math_ops.to_int32(hi - lo)\n delta = (hi - lo) / math_ops.cast(\n nbins, dtype=value_range.dtype.base_dtype)\n edges = math_ops.range(\n start=lo, limit=hi, delta=delta, dtype=x.dtype.base_dtype)\n counts = histogram_ops.histogram_fixed_width(\n x, value_range=value_range, nbins=nbins)\n return counts, edges\n\n\nclass VectorDistributionTestHelpers(object):\n \"\"\"VectorDistributionTestHelpers helps test vector-event distributions.\"\"\"\n\n def run_test_sample_consistent_log_prob(\n self,\n sess_run_fn,\n dist,\n num_samples=int(1e5),\n radius=1.,\n center=0.,\n seed=42,\n rtol=1e-2,\n atol=0.):\n \"\"\"Tests that sample/log_prob are mutually consistent.\n\n \"Consistency\" means that `sample` and `log_prob` correspond to the same\n distribution.\n\n The idea of this test is to compute the Monte-Carlo estimate of the volume\n enclosed by a hypersphere, i.e., the volume of an `n`-ball. While we could\n choose an arbitrary function to integrate, the hypersphere's volume is nice\n because it is intuitive, has an easy analytical expression, and works for\n `dimensions > 1`.\n\n Technical Details:\n\n Observe that:\n\n ```none\n int_{R**d} dx [x in Ball(radius=r, center=c)]\n = E_{p(X)}[ [X in Ball(r, c)] / p(X) ]\n = lim_{m->infty} m**-1 sum_j^m [x[j] in Ball(r, c)] / p(x[j]),\n where x[j] ~iid p(X)\n ```\n\n Thus, for fixed `m`, the above is approximately true when `sample` and\n `log_prob` are mutually consistent.\n\n Furthermore, the above calculation has the analytical result:\n `pi**(d/2) r**d / Gamma(1 + d/2)`.\n\n Note: this test only verifies a necessary condition for consistency--it does\n does not verify sufficiency hence does not prove `sample`, `log_prob` truly\n are consistent. For this reason we recommend testing several different\n hyperspheres (assuming the hypersphere is supported by the distribution).\n Furthermore, we gain additional trust in this test when also tested `sample`\n against the first, second moments\n (`run_test_sample_consistent_mean_covariance`); it is probably unlikely that\n a \"best-effort\" implementation of `log_prob` would incorrectly pass both\n tests and for different hyperspheres.\n\n For a discussion on the analytical result (second-line) see:\n https://en.wikipedia.org/wiki/Volume_of_an_n-ball.\n\n For a discussion of importance sampling (fourth-line) see:\n https://en.wikipedia.org/wiki/Importance_sampling.\n\n Args:\n sess_run_fn: Python `callable` taking `list`-like of `Tensor`s and\n returning a list of results after running one \"step\" of TensorFlow\n computation, typically set to `sess.run`.\n dist: Distribution instance or object which implements `sample`,\n `log_prob`, `event_shape_tensor` and `batch_shape_tensor`. The\n distribution must have non-zero probability of sampling every point\n enclosed by the hypersphere.\n num_samples: Python `int` scalar indicating the number of Monte-Carlo\n samples to draw from `dist`.\n radius: Python `float`-type indicating the radius of the `n`-ball which\n we're computing the volume.\n center: Python floating-type vector (or scalar) indicating the center of\n the `n`-ball which we're computing the volume. When scalar, the value is\n broadcast to all event dims.\n seed: Python `int` indicating the seed to use when sampling from `dist`.\n In general it is not recommended to use `None` during a test as this\n increases the likelihood of spurious test failure.\n rtol: Python `float`-type indicating the admissible relative error between\n actual- and approximate-volumes.\n atol: Python `float`-type indicating the admissible absolute error between\n actual- and approximate-volumes. In general this should be zero since\n a typical radius implies a non-zero volume.\n \"\"\"\n\n def actual_hypersphere_volume(dims, radius):\n # https://en.wikipedia.org/wiki/Volume_of_an_n-ball\n # Using tf.lgamma because we'd have to otherwise use SciPy which is not\n # a required dependency of core.\n radius = np.asarray(radius)\n dims = math_ops.cast(dims, dtype=radius.dtype)\n return math_ops.exp(\n (dims / 2.) * np.log(np.pi)\n - math_ops.lgamma(1. + dims / 2.)\n + dims * math_ops.log(radius))\n\n def is_in_ball(x, radius, center):\n return math_ops.cast(linalg_ops.norm(x - center, axis=-1) <= radius,\n dtype=x.dtype)\n\n def monte_carlo_hypersphere_volume(dist, num_samples, radius, center):\n # https://en.wikipedia.org/wiki/Importance_sampling\n x = dist.sample(num_samples, seed=seed)\n x = array_ops.identity(x) # Invalidate bijector cacheing.\n return math_ops.reduce_mean(\n math_ops.exp(-dist.log_prob(x)) * is_in_ball(x, radius, center),\n axis=0)\n\n # Build graph.\n with ops.name_scope(\n \"run_test_sample_consistent_log_prob\",\n values=[num_samples, radius, center] + dist._graph_parents): # pylint: disable=protected-access\n batch_shape = dist.batch_shape_tensor()\n actual_volume = actual_hypersphere_volume(\n dims=dist.event_shape_tensor()[0],\n radius=radius)\n sample_volume = monte_carlo_hypersphere_volume(\n dist,\n num_samples=num_samples,\n radius=radius,\n center=center)\n init_op = variables_ops.global_variables_initializer()\n\n # Execute graph.\n sess_run_fn(init_op)\n [batch_shape_, actual_volume_, sample_volume_] = sess_run_fn([\n batch_shape, actual_volume, sample_volume])\n\n # Check results.\n self.assertAllClose(np.tile(actual_volume_, reps=batch_shape_),\n sample_volume_,\n rtol=rtol, atol=atol)\n\n def run_test_sample_consistent_mean_covariance(\n self,\n sess_run_fn,\n dist,\n num_samples=int(1e5),\n seed=24,\n rtol=1e-2,\n atol=0.1,\n cov_rtol=None,\n cov_atol=None):\n \"\"\"Tests that sample/mean/covariance are consistent with each other.\n\n \"Consistency\" means that `sample`, `mean`, `covariance`, etc all correspond\n to the same distribution.\n\n Args:\n sess_run_fn: Python `callable` taking `list`-like of `Tensor`s and\n returning a list of results after running one \"step\" of TensorFlow\n computation, typically set to `sess.run`.\n dist: Distribution instance or object which implements `sample`,\n `log_prob`, `event_shape_tensor` and `batch_shape_tensor`.\n num_samples: Python `int` scalar indicating the number of Monte-Carlo\n samples to draw from `dist`.\n seed: Python `int` indicating the seed to use when sampling from `dist`.\n In general it is not recommended to use `None` during a test as this\n increases the likelihood of spurious test failure.\n rtol: Python `float`-type indicating the admissible relative error between\n analytical and sample statistics.\n atol: Python `float`-type indicating the admissible absolute error between\n analytical and sample statistics.\n cov_rtol: Python `float`-type indicating the admissible relative error\n between analytical and sample covariance. Default: rtol.\n cov_atol: Python `float`-type indicating the admissible absolute error\n between analytical and sample covariance. Default: atol.\n \"\"\"\n\n x = dist.sample(num_samples, seed=seed)\n sample_mean = math_ops.reduce_mean(x, axis=0)\n sample_covariance = math_ops.reduce_mean(\n _vec_outer_square(x - sample_mean), axis=0)\n sample_variance = array_ops.matrix_diag_part(sample_covariance)\n sample_stddev = math_ops.sqrt(sample_variance)\n\n [\n sample_mean_,\n sample_covariance_,\n sample_variance_,\n sample_stddev_,\n mean_,\n covariance_,\n variance_,\n stddev_\n ] = sess_run_fn([\n sample_mean,\n sample_covariance,\n sample_variance,\n sample_stddev,\n dist.mean(),\n dist.covariance(),\n dist.variance(),\n dist.stddev(),\n ])\n\n self.assertAllClose(mean_, sample_mean_, rtol=rtol, atol=atol)\n self.assertAllClose(covariance_, sample_covariance_,\n rtol=cov_rtol or rtol,\n atol=cov_atol or atol)\n self.assertAllClose(variance_, sample_variance_, rtol=rtol, atol=atol)\n self.assertAllClose(stddev_, sample_stddev_, rtol=rtol, atol=atol)\n\n\ndef _vec_outer_square(x, name=None):\n \"\"\"Computes the outer-product of a vector, i.e., x.T x.\"\"\"\n with ops.name_scope(name, \"vec_osquare\", [x]):\n return x[..., :, array_ops.newaxis] * x[..., array_ops.newaxis, :]\n"},"license":{"kind":"string","value":"mit"}}},{"rowIdx":203580,"cells":{"repo_name":{"kind":"string","value":"frank10704/DF_GCS_W"},"path":{"kind":"string","value":"MissionPlanner-master/packages/IronPython.StdLib.2.7.5-beta1/content/Lib/pstats.py"},"copies":{"kind":"string","value":"50"},"size":{"kind":"string","value":"27927"},"content":{"kind":"string","value":"\"\"\"Class for printing reports on profiled python code.\"\"\"\r\n\r\n# Class for printing reports on profiled python code. rev 1.0 4/1/94\r\n#\r\n# Based on prior profile module by Sjoerd Mullender...\r\n# which was hacked somewhat by: Guido van Rossum\r\n#\r\n# see profile.py for more info.\r\n\r\n# Copyright 1994, by InfoSeek Corporation, all rights reserved.\r\n# Written by James Roskind\r\n#\r\n# Permission to use, copy, modify, and distribute this Python software\r\n# and its associated documentation for any purpose (subject to the\r\n# restriction in the following sentence) without fee is hereby granted,\r\n# provided that the above copyright notice appears in all copies, and\r\n# that both that copyright notice and this permission notice appear in\r\n# supporting documentation, and that the name of InfoSeek not be used in\r\n# advertising or publicity pertaining to distribution of the software\r\n# without specific, written prior permission. This permission is\r\n# explicitly restricted to the copying and modification of the software\r\n# to remain in Python, compiled Python, or other languages (such as C)\r\n# wherein the modified or derived code is exclusively imported into a\r\n# Python module.\r\n#\r\n# INFOSEEK CORPORATION DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS\r\n# SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND\r\n# FITNESS. IN NO EVENT SHALL INFOSEEK CORPORATION BE LIABLE FOR ANY\r\n# SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER\r\n# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF\r\n# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN\r\n# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.\r\n\r\n\r\nimport sys\r\nimport os\r\nimport time\r\nimport marshal\r\nimport re\r\nfrom functools import cmp_to_key\r\n\r\n__all__ = [\"Stats\"]\r\n\r\nclass Stats:\r\n \"\"\"This class is used for creating reports from data generated by the\r\n Profile class. It is a \"friend\" of that class, and imports data either\r\n by direct access to members of Profile class, or by reading in a dictionary\r\n that was emitted (via marshal) from the Profile class.\r\n\r\n The big change from the previous Profiler (in terms of raw functionality)\r\n is that an \"add()\" method has been provided to combine Stats from\r\n several distinct profile runs. Both the constructor and the add()\r\n method now take arbitrarily many file names as arguments.\r\n\r\n All the print methods now take an argument that indicates how many lines\r\n to print. If the arg is a floating point number between 0 and 1.0, then\r\n it is taken as a decimal percentage of the available lines to be printed\r\n (e.g., .1 means print 10% of all available lines). If it is an integer,\r\n it is taken to mean the number of lines of data that you wish to have\r\n printed.\r\n\r\n The sort_stats() method now processes some additional options (i.e., in\r\n addition to the old -1, 0, 1, or 2). It takes an arbitrary number of\r\n quoted strings to select the sort order. For example sort_stats('time',\r\n 'name') sorts on the major key of 'internal function time', and on the\r\n minor key of 'the name of the function'. Look at the two tables in\r\n sort_stats() and get_sort_arg_defs(self) for more examples.\r\n\r\n All methods return self, so you can string together commands like:\r\n Stats('foo', 'goo').strip_dirs().sort_stats('calls').\\\r\n print_stats(5).print_callers(5)\r\n \"\"\"\r\n\r\n def __init__(self, *args, **kwds):\r\n # I can't figure out how to explictly specify a stream keyword arg\r\n # with *args:\r\n # def __init__(self, *args, stream=sys.stdout): ...\r\n # so I use **kwds and sqauwk if something unexpected is passed in.\r\n self.stream = sys.stdout\r\n if \"stream\" in kwds:\r\n self.stream = kwds[\"stream\"]\r\n del kwds[\"stream\"]\r\n if kwds:\r\n keys = kwds.keys()\r\n keys.sort()\r\n extras = \", \".join([\"%s=%s\" % (k, kwds[k]) for k in keys])\r\n raise ValueError, \"unrecognized keyword args: %s\" % extras\r\n if not len(args):\r\n arg = None\r\n else:\r\n arg = args[0]\r\n args = args[1:]\r\n self.init(arg)\r\n self.add(*args)\r\n\r\n def init(self, arg):\r\n self.all_callees = None # calc only if needed\r\n self.files = []\r\n self.fcn_list = None\r\n self.total_tt = 0\r\n self.total_calls = 0\r\n self.prim_calls = 0\r\n self.max_name_len = 0\r\n self.top_level = {}\r\n self.stats = {}\r\n self.sort_arg_dict = {}\r\n self.load_stats(arg)\r\n trouble = 1\r\n try:\r\n self.get_top_level_stats()\r\n trouble = 0\r\n finally:\r\n if trouble:\r\n print >> self.stream, \"Invalid timing data\",\r\n if self.files: print >> self.stream, self.files[-1],\r\n print >> self.stream\r\n\r\n def load_stats(self, arg):\r\n if not arg: self.stats = {}\r\n elif isinstance(arg, basestring):\r\n f = open(arg, 'rb')\r\n self.stats = marshal.load(f)\r\n f.close()\r\n try:\r\n file_stats = os.stat(arg)\r\n arg = time.ctime(file_stats.st_mtime) + \" \" + arg\r\n except: # in case this is not unix\r\n pass\r\n self.files = [ arg ]\r\n elif hasattr(arg, 'create_stats'):\r\n arg.create_stats()\r\n self.stats = arg.stats\r\n arg.stats = {}\r\n if not self.stats:\r\n raise TypeError, \"Cannot create or construct a %r object from '%r''\" % (\r\n self.__class__, arg)\r\n return\r\n\r\n def get_top_level_stats(self):\r\n for func, (cc, nc, tt, ct, callers) in self.stats.items():\r\n self.total_calls += nc\r\n self.prim_calls += cc\r\n self.total_tt += tt\r\n if (\"jprofile\", 0, \"profiler\") in callers:\r\n self.top_level[func] = None\r\n if len(func_std_string(func)) > self.max_name_len:\r\n self.max_name_len = len(func_std_string(func))\r\n\r\n def add(self, *arg_list):\r\n if not arg_list: return self\r\n if len(arg_list) > 1: self.add(*arg_list[1:])\r\n other = arg_list[0]\r\n if type(self) != type(other) or self.__class__ != other.__class__:\r\n other = Stats(other)\r\n self.files += other.files\r\n self.total_calls += other.total_calls\r\n self.prim_calls += other.prim_calls\r\n self.total_tt += other.total_tt\r\n for func in other.top_level:\r\n self.top_level[func] = None\r\n\r\n if self.max_name_len < other.max_name_len:\r\n self.max_name_len = other.max_name_len\r\n\r\n self.fcn_list = None\r\n\r\n for func, stat in other.stats.iteritems():\r\n if func in self.stats:\r\n old_func_stat = self.stats[func]\r\n else:\r\n old_func_stat = (0, 0, 0, 0, {},)\r\n self.stats[func] = add_func_stats(old_func_stat, stat)\r\n return self\r\n\r\n def dump_stats(self, filename):\r\n \"\"\"Write the profile data to a file we know how to load back.\"\"\"\r\n f = file(filename, 'wb')\r\n try:\r\n marshal.dump(self.stats, f)\r\n finally:\r\n f.close()\r\n\r\n # list the tuple indices and directions for sorting,\r\n # along with some printable description\r\n sort_arg_dict_default = {\r\n \"calls\" : (((1,-1), ), \"call count\"),\r\n \"cumulative\": (((3,-1), ), \"cumulative time\"),\r\n \"file\" : (((4, 1), ), \"file name\"),\r\n \"line\" : (((5, 1), ), \"line number\"),\r\n \"module\" : (((4, 1), ), \"file name\"),\r\n \"name\" : (((6, 1), ), \"function name\"),\r\n \"nfl\" : (((6, 1),(4, 1),(5, 1),), \"name/file/line\"),\r\n \"pcalls\" : (((0,-1), ), \"call count\"),\r\n \"stdname\" : (((7, 1), ), \"standard name\"),\r\n \"time\" : (((2,-1), ), \"internal time\"),\r\n }\r\n\r\n def get_sort_arg_defs(self):\r\n \"\"\"Expand all abbreviations that are unique.\"\"\"\r\n if not self.sort_arg_dict:\r\n self.sort_arg_dict = dict = {}\r\n bad_list = {}\r\n for word, tup in self.sort_arg_dict_default.iteritems():\r\n fragment = word\r\n while fragment:\r\n if not fragment:\r\n break\r\n if fragment in dict:\r\n bad_list[fragment] = 0\r\n break\r\n dict[fragment] = tup\r\n fragment = fragment[:-1]\r\n for word in bad_list:\r\n del dict[word]\r\n return self.sort_arg_dict\r\n\r\n def sort_stats(self, *field):\r\n if not field:\r\n self.fcn_list = 0\r\n return self\r\n if len(field) == 1 and isinstance(field[0], (int, long)):\r\n # Be compatible with old profiler\r\n field = [ {-1: \"stdname\",\r\n 0: \"calls\",\r\n 1: \"time\",\r\n 2: \"cumulative\"}[field[0]] ]\r\n\r\n sort_arg_defs = self.get_sort_arg_defs()\r\n sort_tuple = ()\r\n self.sort_type = \"\"\r\n connector = \"\"\r\n for word in field:\r\n sort_tuple = sort_tuple + sort_arg_defs[word][0]\r\n self.sort_type += connector + sort_arg_defs[word][1]\r\n connector = \", \"\r\n\r\n stats_list = []\r\n for func, (cc, nc, tt, ct, callers) in self.stats.iteritems():\r\n stats_list.append((cc, nc, tt, ct) + func +\r\n (func_std_string(func), func))\r\n\r\n stats_list.sort(key=cmp_to_key(TupleComp(sort_tuple).compare))\r\n\r\n self.fcn_list = fcn_list = []\r\n for tuple in stats_list:\r\n fcn_list.append(tuple[-1])\r\n return self\r\n\r\n def reverse_order(self):\r\n if self.fcn_list:\r\n self.fcn_list.reverse()\r\n return self\r\n\r\n def strip_dirs(self):\r\n oldstats = self.stats\r\n self.stats = newstats = {}\r\n max_name_len = 0\r\n for func, (cc, nc, tt, ct, callers) in oldstats.iteritems():\r\n newfunc = func_strip_path(func)\r\n if len(func_std_string(newfunc)) > max_name_len:\r\n max_name_len = len(func_std_string(newfunc))\r\n newcallers = {}\r\n for func2, caller in callers.iteritems():\r\n newcallers[func_strip_path(func2)] = caller\r\n\r\n if newfunc in newstats:\r\n newstats[newfunc] = add_func_stats(\r\n newstats[newfunc],\r\n (cc, nc, tt, ct, newcallers))\r\n else:\r\n newstats[newfunc] = (cc, nc, tt, ct, newcallers)\r\n old_top = self.top_level\r\n self.top_level = new_top = {}\r\n for func in old_top:\r\n new_top[func_strip_path(func)] = None\r\n\r\n self.max_name_len = max_name_len\r\n\r\n self.fcn_list = None\r\n self.all_callees = None\r\n return self\r\n\r\n def calc_callees(self):\r\n if self.all_callees: return\r\n self.all_callees = all_callees = {}\r\n for func, (cc, nc, tt, ct, callers) in self.stats.iteritems():\r\n if not func in all_callees:\r\n all_callees[func] = {}\r\n for func2, caller in callers.iteritems():\r\n if not func2 in all_callees:\r\n all_callees[func2] = {}\r\n all_callees[func2][func] = caller\r\n return\r\n\r\n #******************************************************************\r\n # The following functions support actual printing of reports\r\n #******************************************************************\r\n\r\n # Optional \"amount\" is either a line count, or a percentage of lines.\r\n\r\n def eval_print_amount(self, sel, list, msg):\r\n new_list = list\r\n if isinstance(sel, basestring):\r\n try:\r\n rex = re.compile(sel)\r\n except re.error:\r\n msg += \" \\n\" % sel\r\n return new_list, msg\r\n new_list = []\r\n for func in list:\r\n if rex.search(func_std_string(func)):\r\n new_list.append(func)\r\n else:\r\n count = len(list)\r\n if isinstance(sel, float) and 0.0 <= sel < 1.0:\r\n count = int(count * sel + .5)\r\n new_list = list[:count]\r\n elif isinstance(sel, (int, long)) and 0 <= sel < count:\r\n count = sel\r\n new_list = list[:count]\r\n if len(list) != len(new_list):\r\n msg += \" List reduced from %r to %r due to restriction <%r>\\n\" % (\r\n len(list), len(new_list), sel)\r\n\r\n return new_list, msg\r\n\r\n def get_print_list(self, sel_list):\r\n width = self.max_name_len\r\n if self.fcn_list:\r\n stat_list = self.fcn_list[:]\r\n msg = \" Ordered by: \" + self.sort_type + '\\n'\r\n else:\r\n stat_list = self.stats.keys()\r\n msg = \" Random listing order was used\\n\"\r\n\r\n for selection in sel_list:\r\n stat_list, msg = self.eval_print_amount(selection, stat_list, msg)\r\n\r\n count = len(stat_list)\r\n\r\n if not stat_list:\r\n return 0, stat_list\r\n print >> self.stream, msg\r\n if count < len(self.stats):\r\n width = 0\r\n for func in stat_list:\r\n if len(func_std_string(func)) > width:\r\n width = len(func_std_string(func))\r\n return width+2, stat_list\r\n\r\n def print_stats(self, *amount):\r\n for filename in self.files:\r\n print >> self.stream, filename\r\n if self.files: print >> self.stream\r\n indent = ' ' * 8\r\n for func in self.top_level:\r\n print >> self.stream, indent, func_get_function_name(func)\r\n\r\n print >> self.stream, indent, self.total_calls, \"function calls\",\r\n if self.total_calls != self.prim_calls:\r\n print >> self.stream, \"(%d primitive calls)\" % self.prim_calls,\r\n print >> self.stream, \"in %.3f seconds\" % self.total_tt\r\n print >> self.stream\r\n width, list = self.get_print_list(amount)\r\n if list:\r\n self.print_title()\r\n for func in list:\r\n self.print_line(func)\r\n print >> self.stream\r\n print >> self.stream\r\n return self\r\n\r\n def print_callees(self, *amount):\r\n width, list = self.get_print_list(amount)\r\n if list:\r\n self.calc_callees()\r\n\r\n self.print_call_heading(width, \"called...\")\r\n for func in list:\r\n if func in self.all_callees:\r\n self.print_call_line(width, func, self.all_callees[func])\r\n else:\r\n self.print_call_line(width, func, {})\r\n print >> self.stream\r\n print >> self.stream\r\n return self\r\n\r\n def print_callers(self, *amount):\r\n width, list = self.get_print_list(amount)\r\n if list:\r\n self.print_call_heading(width, \"was called by...\")\r\n for func in list:\r\n cc, nc, tt, ct, callers = self.stats[func]\r\n self.print_call_line(width, func, callers, \"<-\")\r\n print >> self.stream\r\n print >> self.stream\r\n return self\r\n\r\n def print_call_heading(self, name_size, column_title):\r\n print >> self.stream, \"Function \".ljust(name_size) + column_title\r\n # print sub-header only if we have new-style callers\r\n subheader = False\r\n for cc, nc, tt, ct, callers in self.stats.itervalues():\r\n if callers:\r\n value = callers.itervalues().next()\r\n subheader = isinstance(value, tuple)\r\n break\r\n if subheader:\r\n print >> self.stream, \" \"*name_size + \" ncalls tottime cumtime\"\r\n\r\n def print_call_line(self, name_size, source, call_dict, arrow=\"->\"):\r\n print >> self.stream, func_std_string(source).ljust(name_size) + arrow,\r\n if not call_dict:\r\n print >> self.stream\r\n return\r\n clist = call_dict.keys()\r\n clist.sort()\r\n indent = \"\"\r\n for func in clist:\r\n name = func_std_string(func)\r\n value = call_dict[func]\r\n if isinstance(value, tuple):\r\n nc, cc, tt, ct = value\r\n if nc != cc:\r\n substats = '%d/%d' % (nc, cc)\r\n else:\r\n substats = '%d' % (nc,)\r\n substats = '%s %s %s %s' % (substats.rjust(7+2*len(indent)),\r\n f8(tt), f8(ct), name)\r\n left_width = name_size + 1\r\n else:\r\n substats = '%s(%r) %s' % (name, value, f8(self.stats[func][3]))\r\n left_width = name_size + 3\r\n print >> self.stream, indent*left_width + substats\r\n indent = \" \"\r\n\r\n def print_title(self):\r\n print >> self.stream, ' ncalls tottime percall cumtime percall',\r\n print >> self.stream, 'filename:lineno(function)'\r\n\r\n def print_line(self, func): # hack : should print percentages\r\n cc, nc, tt, ct, callers = self.stats[func]\r\n c = str(nc)\r\n if nc != cc:\r\n c = c + '/' + str(cc)\r\n print >> self.stream, c.rjust(9),\r\n print >> self.stream, f8(tt),\r\n if nc == 0:\r\n print >> self.stream, ' '*8,\r\n else:\r\n print >> self.stream, f8(float(tt)/nc),\r\n print >> self.stream, f8(ct),\r\n if cc == 0:\r\n print >> self.stream, ' '*8,\r\n else:\r\n print >> self.stream, f8(float(ct)/cc),\r\n print >> self.stream, func_std_string(func)\r\n\r\nclass TupleComp:\r\n \"\"\"This class provides a generic function for comparing any two tuples.\r\n Each instance records a list of tuple-indices (from most significant\r\n to least significant), and sort direction (ascending or decending) for\r\n each tuple-index. The compare functions can then be used as the function\r\n argument to the system sort() function when a list of tuples need to be\r\n sorted in the instances order.\"\"\"\r\n\r\n def __init__(self, comp_select_list):\r\n self.comp_select_list = comp_select_list\r\n\r\n def compare (self, left, right):\r\n for index, direction in self.comp_select_list:\r\n l = left[index]\r\n r = right[index]\r\n if l < r:\r\n return -direction\r\n if l > r:\r\n return direction\r\n return 0\r\n\r\n#**************************************************************************\r\n# func_name is a triple (file:string, line:int, name:string)\r\n\r\ndef func_strip_path(func_name):\r\n filename, line, name = func_name\r\n return os.path.basename(filename), line, name\r\n\r\ndef func_get_function_name(func):\r\n return func[2]\r\n\r\ndef func_std_string(func_name): # match what old profile produced\r\n if func_name[:2] == ('~', 0):\r\n # special case for built-in functions\r\n name = func_name[2]\r\n if name.startswith('<') and name.endswith('>'):\r\n return '{%s}' % name[1:-1]\r\n else:\r\n return name\r\n else:\r\n return \"%s:%d(%s)\" % func_name\r\n\r\n#**************************************************************************\r\n# The following functions combine statists for pairs functions.\r\n# The bulk of the processing involves correctly handling \"call\" lists,\r\n# such as callers and callees.\r\n#**************************************************************************\r\n\r\ndef add_func_stats(target, source):\r\n \"\"\"Add together all the stats for two profile entries.\"\"\"\r\n cc, nc, tt, ct, callers = source\r\n t_cc, t_nc, t_tt, t_ct, t_callers = target\r\n return (cc+t_cc, nc+t_nc, tt+t_tt, ct+t_ct,\r\n add_callers(t_callers, callers))\r\n\r\ndef add_callers(target, source):\r\n \"\"\"Combine two caller lists in a single list.\"\"\"\r\n new_callers = {}\r\n for func, caller in target.iteritems():\r\n new_callers[func] = caller\r\n for func, caller in source.iteritems():\r\n if func in new_callers:\r\n if isinstance(caller, tuple):\r\n # format used by cProfile\r\n new_callers[func] = tuple([i[0] + i[1] for i in\r\n zip(caller, new_callers[func])])\r\n else:\r\n # format used by profile\r\n new_callers[func] += caller\r\n else:\r\n new_callers[func] = caller\r\n return new_callers\r\n\r\ndef count_calls(callers):\r\n \"\"\"Sum the caller statistics to get total number of calls received.\"\"\"\r\n nc = 0\r\n for calls in callers.itervalues():\r\n nc += calls\r\n return nc\r\n\r\n#**************************************************************************\r\n# The following functions support printing of reports\r\n#**************************************************************************\r\n\r\ndef f8(x):\r\n return \"%8.3f\" % x\r\n\r\n#**************************************************************************\r\n# Statistics browser added by ESR, April 2001\r\n#**************************************************************************\r\n\r\nif __name__ == '__main__':\r\n import cmd\r\n try:\r\n import readline\r\n except ImportError:\r\n pass\r\n\r\n class ProfileBrowser(cmd.Cmd):\r\n def __init__(self, profile=None):\r\n cmd.Cmd.__init__(self)\r\n self.prompt = \"% \"\r\n self.stats = None\r\n self.stream = sys.stdout\r\n if profile is not None:\r\n self.do_read(profile)\r\n\r\n def generic(self, fn, line):\r\n args = line.split()\r\n processed = []\r\n for term in args:\r\n try:\r\n processed.append(int(term))\r\n continue\r\n except ValueError:\r\n pass\r\n try:\r\n frac = float(term)\r\n if frac > 1 or frac < 0:\r\n print >> self.stream, \"Fraction argument must be in [0, 1]\"\r\n continue\r\n processed.append(frac)\r\n continue\r\n except ValueError:\r\n pass\r\n processed.append(term)\r\n if self.stats:\r\n getattr(self.stats, fn)(*processed)\r\n else:\r\n print >> self.stream, \"No statistics object is loaded.\"\r\n return 0\r\n def generic_help(self):\r\n print >> self.stream, \"Arguments may be:\"\r\n print >> self.stream, \"* An integer maximum number of entries to print.\"\r\n print >> self.stream, \"* A decimal fractional number between 0 and 1, controlling\"\r\n print >> self.stream, \" what fraction of selected entries to print.\"\r\n print >> self.stream, \"* A regular expression; only entries with function names\"\r\n print >> self.stream, \" that match it are printed.\"\r\n\r\n def do_add(self, line):\r\n if self.stats:\r\n self.stats.add(line)\r\n else:\r\n print >> self.stream, \"No statistics object is loaded.\"\r\n return 0\r\n def help_add(self):\r\n print >> self.stream, \"Add profile info from given file to current statistics object.\"\r\n\r\n def do_callees(self, line):\r\n return self.generic('print_callees', line)\r\n def help_callees(self):\r\n print >> self.stream, \"Print callees statistics from the current stat object.\"\r\n self.generic_help()\r\n\r\n def do_callers(self, line):\r\n return self.generic('print_callers', line)\r\n def help_callers(self):\r\n print >> self.stream, \"Print callers statistics from the current stat object.\"\r\n self.generic_help()\r\n\r\n def do_EOF(self, line):\r\n print >> self.stream, \"\"\r\n return 1\r\n def help_EOF(self):\r\n print >> self.stream, \"Leave the profile brower.\"\r\n\r\n def do_quit(self, line):\r\n return 1\r\n def help_quit(self):\r\n print >> self.stream, \"Leave the profile brower.\"\r\n\r\n def do_read(self, line):\r\n if line:\r\n try:\r\n self.stats = Stats(line)\r\n except IOError, args:\r\n print >> self.stream, args[1]\r\n return\r\n except Exception as err:\r\n print >> self.stream, err.__class__.__name__ + ':', err\r\n return\r\n self.prompt = line + \"% \"\r\n elif len(self.prompt) > 2:\r\n line = self.prompt[:-2]\r\n self.do_read(line)\r\n else:\r\n print >> self.stream, \"No statistics object is current -- cannot reload.\"\r\n return 0\r\n def help_read(self):\r\n print >> self.stream, \"Read in profile data from a specified file.\"\r\n print >> self.stream, \"Without argument, reload the current file.\"\r\n\r\n def do_reverse(self, line):\r\n if self.stats:\r\n self.stats.reverse_order()\r\n else:\r\n print >> self.stream, \"No statistics object is loaded.\"\r\n return 0\r\n def help_reverse(self):\r\n print >> self.stream, \"Reverse the sort order of the profiling report.\"\r\n\r\n def do_sort(self, line):\r\n if not self.stats:\r\n print >> self.stream, \"No statistics object is loaded.\"\r\n return\r\n abbrevs = self.stats.get_sort_arg_defs()\r\n if line and all((x in abbrevs) for x in line.split()):\r\n self.stats.sort_stats(*line.split())\r\n else:\r\n print >> self.stream, \"Valid sort keys (unique prefixes are accepted):\"\r\n for (key, value) in Stats.sort_arg_dict_default.iteritems():\r\n print >> self.stream, \"%s -- %s\" % (key, value[1])\r\n return 0\r\n def help_sort(self):\r\n print >> self.stream, \"Sort profile data according to specified keys.\"\r\n print >> self.stream, \"(Typing `sort' without arguments lists valid keys.)\"\r\n def complete_sort(self, text, *args):\r\n return [a for a in Stats.sort_arg_dict_default if a.startswith(text)]\r\n\r\n def do_stats(self, line):\r\n return self.generic('print_stats', line)\r\n def help_stats(self):\r\n print >> self.stream, \"Print statistics from the current stat object.\"\r\n self.generic_help()\r\n\r\n def do_strip(self, line):\r\n if self.stats:\r\n self.stats.strip_dirs()\r\n else:\r\n print >> self.stream, \"No statistics object is loaded.\"\r\n def help_strip(self):\r\n print >> self.stream, \"Strip leading path information from filenames in the report.\"\r\n\r\n def help_help(self):\r\n print >> self.stream, \"Show help for a given command.\"\r\n\r\n def postcmd(self, stop, line):\r\n if stop:\r\n return stop\r\n return None\r\n\r\n import sys\r\n if len(sys.argv) > 1:\r\n initprofile = sys.argv[1]\r\n else:\r\n initprofile = None\r\n try:\r\n browser = ProfileBrowser(initprofile)\r\n print >> browser.stream, \"Welcome to the profile statistics browser.\"\r\n browser.cmdloop()\r\n print >> browser.stream, \"Goodbye.\"\r\n except KeyboardInterrupt:\r\n pass\r\n\r\n# That's all, folks.\r\n"},"license":{"kind":"string","value":"gpl-3.0"}}},{"rowIdx":203581,"cells":{"repo_name":{"kind":"string","value":"kunesj/io3d"},"path":{"kind":"string","value":"tests/sample_data_test.py"},"copies":{"kind":"string","value":"1"},"size":{"kind":"string","value":"1948"},"content":{"kind":"string","value":"#! /usr/bin/python\n# -*- coding: utf-8 -*-\n\n# import funkcí z jiného adresáře\nimport os\nimport os.path\n\nfrom nose.plugins.attrib import attr\npath_to_script = os.path.dirname(os.path.abspath(__file__))\nimport unittest\n\nimport shutil\nimport numpy as np\n\nimport logging\nlogger = logging.getLogger(__name__)\n\n\n# from imtools import qmisc\n# from imtools import misc\n\n\nimport io3d.datasets as sd\n#\n\nclass SampleDataTest(unittest.TestCase):\n interactivetTest = False\n # interactivetTest = True\n def sample_data_test(self):\n sd.download(\"head\", \"delete_head\")\n self.assertTrue(os.path.exists(\"./delete_head/matlab/examples/sample_data/DICOM/digest_article/brain_001.dcm\"))\n shutil.rmtree(\"delete_head\")\n\n\n # import imtools.vesseltree_export as vt\n # yaml_input = os.path.join(path_to_script, \"vt_biodur.yaml\")\n # yaml_output = os.path.join(path_to_script, \"delme_esofspy.txt\")\n # vt.vt2esofspy(yaml_input, yaml_output)\n\n @attr(\"slow\")\n def sample_data_get_all_test(self):\n keys = sd.data_urls.keys()\n sd.download(keys, \"delete_all\")\n self.assertTrue(os.path.exists(\"./delete_all/matlab/examples/sample_data/DICOM/digest_article/brain_001.dcm\"))\n shutil.rmtree(\"delete_all\")\n\n def sample_data_batch_test(self):\n tmp_sample_data_path = \"delete_sample_data\"\n if os.path.exists(tmp_sample_data_path):\n shutil.rmtree(tmp_sample_data_path)\n\n sd.download([\"head\", \"exp_small\"], tmp_sample_data_path)\n self.assertTrue(os.path.exists(\"./delete_sample_data/exp_small/seeds/org-liver-orig003-seeds.pklz\"))\n self.assertTrue(os.path.exists(\"./delete_sample_data/matlab/examples/sample_data/DICOM/digest_article/brain_001.dcm\"))\n shutil.rmtree(tmp_sample_data_path)\n\n def generate_liver_test(self):\n liver = sd.generate_abdominal()\n\nif __name__ == \"__main__\":\n # logging.basicConfig()\n unittest.main()\n"},"license":{"kind":"string","value":"mit"}}},{"rowIdx":203582,"cells":{"repo_name":{"kind":"string","value":"audihsu-qci/ONL"},"path":{"kind":"string","value":"components/all/platform-config/powerpc-quanta-lb9a-r0/src/python/onlpc.py"},"copies":{"kind":"string","value":"9"},"size":{"kind":"string","value":"2801"},"content":{"kind":"string","value":"#!/usr/bin/python\n############################################################\n# \n# \n# Copyright 2013, 2014 Big Switch Networks, Inc. \n# \n# Licensed under the Eclipse Public License, Version 1.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n# \n# http://www.eclipse.org/legal/epl-v10.html\n# \n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,\n# either express or implied. See the License for the specific\n# language governing permissions and limitations under the\n# License.\n# \n# \n############################################################\n############################################################\n#\n# Platform Driver for the Quanta LB9A\n#\n############################################################\nimport os\nimport struct\nimport time\nimport subprocess\nfrom onl.platform.base import *\nfrom onl.vendor.quanta import *\n\nclass OpenNetworkPlatformImplementation(OpenNetworkPlatformQuanta):\n\n def _eeprom_file(self):\n return \"/sys/devices/e0000000.soc8541/e0003000.i2c/i2c-0/0-0054/eeprom\"\n\n def model(self):\n return \"LB9A\"\n\n def platform(self):\n return \"powerpc-quanta-lb9a-r0\"\n\n def _plat_info_dict(self):\n return {\n platinfo.LAG_COMPONENT_MAX : 8,\n platinfo.PORT_COUNT : 52\n }\n\n def _plat_oid_table(self):\n return {\n oids.TEMP_SENSORS : {\n 'ctemp1' : '.1.3.6.1.4.1.2021.13.16.2.1.3.1',\n 'ctemp2' : '.1.3.6.1.4.1.2021.13.16.2.1.3.5',\n 'ctemp3' : '.1.3.6.1.4.1.2021.13.16.2.1.3.9',\n 'ctemp4' : '.1.3.6.1.4.1.2021.13.16.2.1.3.13',\n 'ctemp5' : '.1.3.6.1.4.1.2021.13.16.2.1.3.17',\n 'pwr-temp1' : '.1.3.6.1.4.1.2021.13.16.2.1.3.41',\n 'pwr-temp2' : '.1.3.6.1.4.1.2021.13.16.2.1.3.44',\n 'pwr-temp3' : '.1.3.6.1.4.1.2021.13.16.2.1.3.46',\n },\n oids.CHASSIS_FAN_SENSORS : {\n 'cfan1' : '.1.3.6.1.4.1.2021.13.16.3.1.3.1',\n 'cfan2' : '.1.3.6.1.4.1.2021.13.16.3.1.3.5',\n 'cfan3' : '.1.3.6.1.4.1.2021.13.16.3.1.3.9',\n 'cfan4' : '.1.3.6.1.4.1.2021.13.16.3.1.3.13',\n },\n oids.POWER_FAN_SENSORS : {\n 'pwr-fan' : '.1.3.6.1.4.1.2021.13.16.3.1.3.33',\n },\n oids.POWER_SENSORS : {\n 'power' : '.1.3.6.1.4.1.2021.13.16.5.1.3.8'\n },\n }\n\n def sys_init(self):\n pass\n\n\nif __name__ == \"__main__\":\n print OpenNetworkPlatformImplementation()\n\n"},"license":{"kind":"string","value":"epl-1.0"}}},{"rowIdx":203583,"cells":{"repo_name":{"kind":"string","value":"josejpalacios/knowledge-management"},"path":{"kind":"string","value":"Server/controllers/file_controller.py"},"copies":{"kind":"string","value":"2"},"size":{"kind":"string","value":"3328"},"content":{"kind":"string","value":"from Server import verboseFunc\nfrom . import db, SOCKET_EOF\nimport os\n\nimport os\nimport pickle\nfrom . import db, SUCCESS, FAILURE\n\n@verboseFunc\ndef upload_file(connection, upload_info):\n \"\"\"\n upload a user's file to the database\n\n :param connection: client connection\n :type socket.socket:\n :param upload_info: file info (i.e. name and tags)\n :type upload_info: dict\n \"\"\"\n\n filename = upload_info['fname']\n tags = [tag.strip() for tag in upload_info['tags'].split(',')]\n group_id = int(upload_info['gid'])\n notes = upload_info['notes']\n mod_time = float(upload_info['mod_time'])\n if group_id != 0:\n owner = db.get_username(group_id)\n else:\n owner = 'DUMMY_SHARED_USER'\n db.upload(filename, tags, owner, group_id, notes, mod_time)\n\n if db.__contains__(filename,owner):\n connection.send(FAILURE + \"ERROR: file already exists\".encode())\n else:\n connection.send(SUCCESS)\n prefix = 'FILE_REPO'\n repo_name = db.repo_name(group_id)\n if not repo_name:\n print('REPO NAME ERROR')\n return\n file = open(os.path.normpath(\n os.path.join(\n os.getcwd(),\n prefix,\n repo_name,\n filename)), 'wb')\n print(\"\\tOpened file: \" + filename)\n # date_time_info = connection.recv(1024)\n #\n # print(date_time_info)\n #\n # print(\"File receieved. Sending response.\")\n #\n # connection.send(\"SUCCESS\".encode())\n #\n # months = [\"Jan\", \"Feb\", \"Mar\", \"Apr\", \"May\", \"Jun\", \"Jul\", \"Aug\", \"Sep\", \"Oct\", \"Nov\", \"Dec\"]\n # file_metadata = date_time_info.decode().split(\"|\")\n #\n # month = months[int(file_metadata[1]) - 1]\n # print(file_metadata[0] + \" \" + month)\n\n while True:\n line = connection.recv(1024)\n print(line)\n if line == SOCKET_EOF:\n break\n else:\n file.write(line)\n\n file.close()\n print(\"Closed File\")\n\n\n@verboseFunc\ndef retrieve_file(connection, filename):\n\n print(\"Inside RetrieveHandler\")\n print(filename.decode())\n file = open(filename, 'rb')\n\n for line in file:\n connection.send(line)\n\n print(\"\\tOpened file: \" + filename.decode())\n\n file.close()\n print(\"Leaving RetrieveHandler\")\n\n\n@verboseFunc\ndef retrieve_repo(connection, query):\n if 'group_ids' not in query:\n connection.send(FAILURE)\n return\n group_ids = query['group_ids']\n group_ids = group_ids.split(',')\n connection.send(SUCCESS)\n result = []\n for group_id in group_ids:\n result.extend(db.retrieve_repo(int(group_id)))\n pickled_repo = pickle.dumps(result)\n connection.send(pickled_repo)\n connection.send(SOCKET_EOF)\n\n\n@verboseFunc\ndef retrieve_personal_repo(connection, uname):\n repo_id = db.get_personal_repo_id(uname)\n retrieve_repo(connection, {'group_id': repo_id})\n\n\n@verboseFunc\ndef delete_file(connection, query):\n print(\"Inside DeleteHandler\")\n if 'filename' not in query or 'group_id' not in query:\n connection.send(connection.send(FAILURE + \" ERROR: missing parameters\".encode()))\n filename = query['filename']\n group_id = query['group_id']\n if db.delete(filename, group_id):\n connection.send(SUCCESS)\n else:\n connection.send(FAILURE + \" ERROR: deletion failed\".encode())\n print(\"Leaving DeleteHandler\")\n"},"license":{"kind":"string","value":"mit"}}},{"rowIdx":203584,"cells":{"repo_name":{"kind":"string","value":"Kamik423/uni_plan"},"path":{"kind":"string","value":"plan/plan/lib/python3.4/site-packages/pip/_vendor/__init__.py"},"copies":{"kind":"string","value":"329"},"size":{"kind":"string","value":"4670"},"content":{"kind":"string","value":"\"\"\"\npip._vendor is for vendoring dependencies of pip to prevent needing pip to\ndepend on something external.\n\nFiles inside of pip._vendor should be considered immutable and should only be\nupdated to versions from upstream.\n\"\"\"\nfrom __future__ import absolute_import\n\nimport glob\nimport os.path\nimport sys\n\n# Downstream redistributors which have debundled our dependencies should also\n# patch this value to be true. This will trigger the additional patching\n# to cause things like \"six\" to be available as pip.\nDEBUNDLED = False\n\n# By default, look in this directory for a bunch of .whl files which we will\n# add to the beginning of sys.path before attempting to import anything. This\n# is done to support downstream re-distributors like Debian and Fedora who\n# wish to create their own Wheels for our dependencies to aid in debundling.\nWHEEL_DIR = os.path.abspath(os.path.dirname(__file__))\n\n\n# Define a small helper function to alias our vendored modules to the real ones\n# if the vendored ones do not exist. This idea of this was taken from\n# https://github.com/kennethreitz/requests/pull/2567.\ndef vendored(modulename):\n vendored_name = \"{0}.{1}\".format(__name__, modulename)\n\n try:\n __import__(vendored_name, globals(), locals(), level=0)\n except ImportError:\n try:\n __import__(modulename, globals(), locals(), level=0)\n except ImportError:\n # We can just silently allow import failures to pass here. If we\n # got to this point it means that ``import pip._vendor.whatever``\n # failed and so did ``import whatever``. Since we're importing this\n # upfront in an attempt to alias imports, not erroring here will\n # just mean we get a regular import error whenever pip *actually*\n # tries to import one of these modules to use it, which actually\n # gives us a better error message than we would have otherwise\n # gotten.\n pass\n else:\n sys.modules[vendored_name] = sys.modules[modulename]\n base, head = vendored_name.rsplit(\".\", 1)\n setattr(sys.modules[base], head, sys.modules[modulename])\n\n\n# If we're operating in a debundled setup, then we want to go ahead and trigger\n# the aliasing of our vendored libraries as well as looking for wheels to add\n# to our sys.path. This will cause all of this code to be a no-op typically\n# however downstream redistributors can enable it in a consistent way across\n# all platforms.\nif DEBUNDLED:\n # Actually look inside of WHEEL_DIR to find .whl files and add them to the\n # front of our sys.path.\n sys.path[:] = glob.glob(os.path.join(WHEEL_DIR, \"*.whl\")) + sys.path\n\n # Actually alias all of our vendored dependencies.\n vendored(\"cachecontrol\")\n vendored(\"colorama\")\n vendored(\"distlib\")\n vendored(\"distro\")\n vendored(\"html5lib\")\n vendored(\"lockfile\")\n vendored(\"six\")\n vendored(\"six.moves\")\n vendored(\"six.moves.urllib\")\n vendored(\"packaging\")\n vendored(\"packaging.version\")\n vendored(\"packaging.specifiers\")\n vendored(\"pkg_resources\")\n vendored(\"progress\")\n vendored(\"retrying\")\n vendored(\"requests\")\n vendored(\"requests.packages\")\n vendored(\"requests.packages.urllib3\")\n vendored(\"requests.packages.urllib3._collections\")\n vendored(\"requests.packages.urllib3.connection\")\n vendored(\"requests.packages.urllib3.connectionpool\")\n vendored(\"requests.packages.urllib3.contrib\")\n vendored(\"requests.packages.urllib3.contrib.ntlmpool\")\n vendored(\"requests.packages.urllib3.contrib.pyopenssl\")\n vendored(\"requests.packages.urllib3.exceptions\")\n vendored(\"requests.packages.urllib3.fields\")\n vendored(\"requests.packages.urllib3.filepost\")\n vendored(\"requests.packages.urllib3.packages\")\n vendored(\"requests.packages.urllib3.packages.ordered_dict\")\n vendored(\"requests.packages.urllib3.packages.six\")\n vendored(\"requests.packages.urllib3.packages.ssl_match_hostname\")\n vendored(\"requests.packages.urllib3.packages.ssl_match_hostname.\"\n \"_implementation\")\n vendored(\"requests.packages.urllib3.poolmanager\")\n vendored(\"requests.packages.urllib3.request\")\n vendored(\"requests.packages.urllib3.response\")\n vendored(\"requests.packages.urllib3.util\")\n vendored(\"requests.packages.urllib3.util.connection\")\n vendored(\"requests.packages.urllib3.util.request\")\n vendored(\"requests.packages.urllib3.util.response\")\n vendored(\"requests.packages.urllib3.util.retry\")\n vendored(\"requests.packages.urllib3.util.ssl_\")\n vendored(\"requests.packages.urllib3.util.timeout\")\n vendored(\"requests.packages.urllib3.util.url\")\n"},"license":{"kind":"string","value":"apache-2.0"}}},{"rowIdx":203585,"cells":{"repo_name":{"kind":"string","value":"bx5974/sikuli"},"path":{"kind":"string","value":"sikuli-script/src/main/python/sikuli/Screen.py"},"copies":{"kind":"string","value":"1"},"size":{"kind":"string","value":"2876"},"content":{"kind":"string","value":"# Copyright 2010-2011, Sikuli.org\n# Released under the MIT License.\nfrom org.sikuli.script import Screen as JScreen\nimport inspect\nimport __main__\nimport __builtin__\nimport sys\n\nfrom Region import *\nfrom java.awt import Rectangle\n\nDEBUG=False\n\nclass Screen(Region):\n def __init__(self, id=None):\n if id != None:\n r = JScreen.getBounds(id)\n else:\n r = JScreen().getBounds()\n (x, y, w, h) = (int(r.getX()), int(r.getY()), \\\n int(r.getWidth()), int(r.getHeight()))\n Region.__init__(self, x, y, w, h)\n\n @classmethod\n def getNumberScreens(cls):\n return JScreen.getNumberScreens()\n\n def getBounds(self):\n return self.getScreen().getBounds()\n\n def selectRegion(self, msg=None):\n if msg:\n r = self.getScreen().selectRegion(msg)\n else:\n r = self.getScreen().selectRegion()\n if r:\n return Region(r)\n else:\n return None\n\n def showRegion(self, region):\n self.getScreen().showRegion(region)\n\n\n ##\n # Enters the screen-capture mode asking the user to capture a region of \n # the screen if no arguments are given.\n # If any arguments are specified, capture() automatically captures the given\n # region of the screen.\n # @param *args The args can be 4 integers: x, y, w, and h, a Match object or a {@link #Region} object.\n # @return The path to the captured image.\n #\n def capture(self, *args):\n scr = self.getScreen()\n if len(args) == 0:\n simg = scr.userCapture()\n if simg: \n return simg.getFilename()\n else:\n return None\n elif len(args) == 1:\n if __builtin__.type(args[0]) is types.StringType or __builtin__.type(args[0]) is types.UnicodeType:\n simg = scr.userCapture(args[0])\n if simg:\n return simg.getFilename()\n else:\n return None\n else:\n return scr.capture(args[0]).getFilename()\n elif len(args) == 4:\n return scr.capture(args[0], args[1], args[2], args[3]).getFilename()\n else:\n return None\n\n\n def toString(self):\n return self.getScreen().toString()\n\n def _exposeAllMethods(self, mod):\n exclude_list = [ 'class', 'classDictInit', 'clone', 'equals', 'finalize', \n 'getClass', 'hashCode', 'notify', 'notifyAll', \n 'toGlobalCoord', 'toString',\n 'capture', 'selectRegion']\n dict = sys.modules[mod].__dict__\n for name in dir(self):\n if inspect.ismethod(getattr(self,name)) \\\n and name[0] != '_' and name[:7] != 'super__' and \\\n not name in exclude_list:\n if DEBUG: print \"expose \" + name\n dict[name] = eval(\"self.\"+name)\n #__main__.__dict__[name] = eval(\"self.\"+name)\n\n"},"license":{"kind":"string","value":"mit"}}},{"rowIdx":203586,"cells":{"repo_name":{"kind":"string","value":"harterj/moose"},"path":{"kind":"string","value":"python/chigger/tests/utils/test_animate.py"},"copies":{"kind":"string","value":"8"},"size":{"kind":"string","value":"1087"},"content":{"kind":"string","value":"#!/usr/bin/env python3\n#pylint: disable=missing-docstring\n#* This file is part of the MOOSE framework\n#* https://www.mooseframework.org\n#*\n#* All rights reserved, see COPYRIGHT for full restrictions\n#* https://github.com/idaholab/moose/blob/master/COPYRIGHT\n#*\n#* Licensed under LGPL 2.1, please see LICENSE for details\n#* https://www.gnu.org/licenses/lgpl-2.1.html\n\nimport unittest\nimport chigger\nimport mock\nimport subprocess\nimport os.path\n\nclass Test(unittest.TestCase):\n @mock.patch('subprocess.call')\n def testAnimate(self, subproc):\n chigger.utils.animate(os.path.join('..', 'adapt', 'gold', 'adapt_*.png'), 'out.gif')\n subproc.assert_called_with(['convert', '-delay', '20', os.path.join('..', 'adapt', 'gold', 'adapt_0.png'),\n '-delay', '20', os.path.join('..', 'adapt', 'gold', 'adapt_4.png'),\n '-delay', '500', os.path.join('..', 'adapt', 'gold', 'adapt_9.png'), '-loop', '0', 'out.gif'])\n\nif __name__ == '__main__':\n unittest.main(module=__name__, verbosity=2)\n"},"license":{"kind":"string","value":"lgpl-2.1"}}},{"rowIdx":203587,"cells":{"repo_name":{"kind":"string","value":"asmuelle/heekscnc"},"path":{"kind":"string","value":"pycnc/wxOutputWindow.py"},"copies":{"kind":"string","value":"25"},"size":{"kind":"string","value":"1840"},"content":{"kind":"string","value":"import wx\nimport HeeksCNC\n\nclass OutputTextCtrl(wx.TextCtrl):\n def __init__(self, parent):\n wx.TextCtrl.__init__(self, parent, style = wx.TE_MULTILINE + wx.TE_DONTWRAP + wx.TE_RICH + wx.TE_RICH2)\n self.painting = False\n #self.Bind(wx.EVT_MOUSE_EVENTS, self.OnMouse)\n #self.Bind(wx.EVT_PAINT, self.OnPaint)\n \n def OnMouse(self, event):\n if event.LeftUp():\n pos = self.GetInsertionPoint()\n HeeksCNC.program.nccode.HighlightBlock(pos)\n HeeksCNC.cad.repaint()\n event.Skip()\n \n def OnPaint(self, event):\n dc = wx.PaintDC(self)\n \n if self.painting == False:\n self.painting = True\n size = self.GetClientSize()\n scrollpos = self.GetScrollPos(wx.VERTICAL)\n result0, col0, row0 = self.HitTest(wx.Point(0, 0))\n result1, col1, row1 = self.HitTest(wx.Point(size.x, size.y))\n \n pos0 = self.XYToPosition(0, row0)\n pos1 = self.XYToPosition(1, row1)\n \n HeeksCNC.program.nccode.FormatBlocks(self, pos0, pos1)\n \n self.SetScrollPos(wx.VERTICAL, scrollpos)\n \n self.painting = False\n \n event.Skip()\n \nclass OutputWindow(wx.ScrolledWindow):\n def __init__(self, parent):\n wx.ScrolledWindow.__init__(self, parent, name = 'Output', style = wx.HSCROLL + wx.VSCROLL + wx.NO_FULL_REPAINT_ON_RESIZE)\n self.textCtrl = OutputTextCtrl(self)\n self.textCtrl.SetMaxLength(0)\n self.Bind(wx.EVT_SIZE, self.OnSize)\n self.Resize()\n \n def Resize(self):\n self.textCtrl.SetSize(self.GetClientSize())\n \n def Clear(self):\n self.textCtrl.Clear()\n \n def OnSize(self, event):\n self.Resize()\n event.Skip()\n \n"},"license":{"kind":"string","value":"bsd-3-clause"}}},{"rowIdx":203588,"cells":{"repo_name":{"kind":"string","value":"sjlehtin/django"},"path":{"kind":"string","value":"django/core/files/locks.py"},"copies":{"kind":"string","value":"725"},"size":{"kind":"string","value":"3516"},"content":{"kind":"string","value":"\"\"\"\nPortable file locking utilities.\n\nBased partially on an example by Jonathan Feignberg in the Python\nCookbook [1] (licensed under the Python Software License) and a ctypes port by\nAnatoly Techtonik for Roundup [2] (license [3]).\n\n[1] http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/65203\n[2] http://sourceforge.net/p/roundup/code/ci/default/tree/roundup/backends/portalocker.py\n[3] http://sourceforge.net/p/roundup/code/ci/default/tree/COPYING.txt\n\nExample Usage::\n\n >>> from django.core.files import locks\n >>> with open('./file', 'wb') as f:\n ... locks.lock(f, locks.LOCK_EX)\n ... f.write('Django')\n\"\"\"\nimport os\n\n__all__ = ('LOCK_EX', 'LOCK_SH', 'LOCK_NB', 'lock', 'unlock')\n\n\ndef _fd(f):\n \"\"\"Get a filedescriptor from something which could be a file or an fd.\"\"\"\n return f.fileno() if hasattr(f, 'fileno') else f\n\n\nif os.name == 'nt':\n import msvcrt\n from ctypes import (sizeof, c_ulong, c_void_p, c_int64,\n Structure, Union, POINTER, windll, byref)\n from ctypes.wintypes import BOOL, DWORD, HANDLE\n\n LOCK_SH = 0 # the default\n LOCK_NB = 0x1 # LOCKFILE_FAIL_IMMEDIATELY\n LOCK_EX = 0x2 # LOCKFILE_EXCLUSIVE_LOCK\n\n # --- Adapted from the pyserial project ---\n # detect size of ULONG_PTR\n if sizeof(c_ulong) != sizeof(c_void_p):\n ULONG_PTR = c_int64\n else:\n ULONG_PTR = c_ulong\n PVOID = c_void_p\n\n # --- Union inside Structure by stackoverflow:3480240 ---\n class _OFFSET(Structure):\n _fields_ = [\n ('Offset', DWORD),\n ('OffsetHigh', DWORD)]\n\n class _OFFSET_UNION(Union):\n _anonymous_ = ['_offset']\n _fields_ = [\n ('_offset', _OFFSET),\n ('Pointer', PVOID)]\n\n class OVERLAPPED(Structure):\n _anonymous_ = ['_offset_union']\n _fields_ = [\n ('Internal', ULONG_PTR),\n ('InternalHigh', ULONG_PTR),\n ('_offset_union', _OFFSET_UNION),\n ('hEvent', HANDLE)]\n\n LPOVERLAPPED = POINTER(OVERLAPPED)\n\n # --- Define function prototypes for extra safety ---\n LockFileEx = windll.kernel32.LockFileEx\n LockFileEx.restype = BOOL\n LockFileEx.argtypes = [HANDLE, DWORD, DWORD, DWORD, DWORD, LPOVERLAPPED]\n UnlockFileEx = windll.kernel32.UnlockFileEx\n UnlockFileEx.restype = BOOL\n UnlockFileEx.argtypes = [HANDLE, DWORD, DWORD, DWORD, LPOVERLAPPED]\n\n def lock(f, flags):\n hfile = msvcrt.get_osfhandle(_fd(f))\n overlapped = OVERLAPPED()\n ret = LockFileEx(hfile, flags, 0, 0, 0xFFFF0000, byref(overlapped))\n return bool(ret)\n\n def unlock(f):\n hfile = msvcrt.get_osfhandle(_fd(f))\n overlapped = OVERLAPPED()\n ret = UnlockFileEx(hfile, 0, 0, 0xFFFF0000, byref(overlapped))\n return bool(ret)\nelse:\n try:\n import fcntl\n LOCK_SH = fcntl.LOCK_SH # shared lock\n LOCK_NB = fcntl.LOCK_NB # non-blocking\n LOCK_EX = fcntl.LOCK_EX\n except (ImportError, AttributeError):\n # File locking is not supported.\n LOCK_EX = LOCK_SH = LOCK_NB = 0\n\n # Dummy functions that don't do anything.\n def lock(f, flags):\n # File is not locked\n return False\n\n def unlock(f):\n # File is unlocked\n return True\n else:\n def lock(f, flags):\n ret = fcntl.flock(_fd(f), flags)\n return (ret == 0)\n\n def unlock(f):\n ret = fcntl.flock(_fd(f), fcntl.LOCK_UN)\n return (ret == 0)\n"},"license":{"kind":"string","value":"bsd-3-clause"}}},{"rowIdx":203589,"cells":{"repo_name":{"kind":"string","value":"hkawasaki/kawasaki-aio8-2"},"path":{"kind":"string","value":"cms/djangoapps/contentstore/views/tests/test_access.py"},"copies":{"kind":"string","value":"16"},"size":{"kind":"string","value":"2238"},"content":{"kind":"string","value":"\"\"\"\nTests access.py\n\"\"\"\nfrom django.test import TestCase\nfrom django.contrib.auth.models import User\nfrom xmodule.modulestore import Location\nfrom xmodule.modulestore.locator import CourseLocator\n\nfrom student.roles import CourseInstructorRole, CourseStaffRole\nfrom student.tests.factories import AdminFactory\nfrom student.auth import add_users\nfrom contentstore.views.access import get_user_role\n\n\nclass RolesTest(TestCase):\n \"\"\"\n Tests for user roles.\n \"\"\"\n def setUp(self):\n \"\"\" Test case setup \"\"\"\n self.global_admin = AdminFactory()\n self.instructor = User.objects.create_user('testinstructor', 'testinstructor+courses@edx.org', 'foo')\n self.staff = User.objects.create_user('teststaff', 'teststaff+courses@edx.org', 'foo')\n self.location = Location('i4x', 'mitX', '101', 'course', 'test')\n self.locator = CourseLocator(url='edx://mitX.101.test')\n\n def test_get_user_role_instructor(self):\n \"\"\"\n Verifies if user is instructor.\n \"\"\"\n add_users(self.global_admin, CourseInstructorRole(self.location), self.instructor)\n self.assertEqual(\n 'instructor',\n get_user_role(self.instructor, self.location, self.location.course_id)\n )\n\n def test_get_user_role_instructor_locator(self):\n \"\"\"\n Verifies if user is instructor, using a CourseLocator.\n \"\"\"\n add_users(self.global_admin, CourseInstructorRole(self.locator), self.instructor)\n self.assertEqual(\n 'instructor',\n get_user_role(self.instructor, self.locator)\n )\n\n def test_get_user_role_staff(self):\n \"\"\"\n Verifies if user is staff.\n \"\"\"\n add_users(self.global_admin, CourseStaffRole(self.location), self.staff)\n self.assertEqual(\n 'staff',\n get_user_role(self.staff, self.location, self.location.course_id)\n )\n\n def test_get_user_role_staff_locator(self):\n \"\"\"\n Verifies if user is staff, using a CourseLocator.\n \"\"\"\n add_users(self.global_admin, CourseStaffRole(self.locator), self.staff)\n self.assertEqual(\n 'staff',\n get_user_role(self.staff, self.locator)\n )\n"},"license":{"kind":"string","value":"agpl-3.0"}}},{"rowIdx":203590,"cells":{"repo_name":{"kind":"string","value":"nvoron23/scikit-learn"},"path":{"kind":"string","value":"sklearn/dummy.py"},"copies":{"kind":"string","value":"208"},"size":{"kind":"string","value":"17370"},"content":{"kind":"string","value":"# Author: Mathieu Blondel \n# Arnaud Joly \n# Maheshakya Wijewardena \n# License: BSD 3 clause\nfrom __future__ import division\n\nimport warnings\nimport numpy as np\nimport scipy.sparse as sp\n\nfrom .base import BaseEstimator, ClassifierMixin, RegressorMixin\nfrom .utils import check_random_state\nfrom .utils.validation import check_array\nfrom .utils.validation import check_consistent_length\nfrom .utils.random import random_choice_csc\nfrom .utils.stats import _weighted_percentile\nfrom .utils.multiclass import class_distribution\n\n\nclass DummyClassifier(BaseEstimator, ClassifierMixin):\n \"\"\"\n DummyClassifier is a classifier that makes predictions using simple rules.\n\n This classifier is useful as a simple baseline to compare with other\n (real) classifiers. Do not use it for real problems.\n\n Read more in the :ref:`User Guide `.\n\n Parameters\n ----------\n strategy : str\n Strategy to use to generate predictions.\n\n * \"stratified\": generates predictions by respecting the training\n set's class distribution.\n * \"most_frequent\": always predicts the most frequent label in the\n training set.\n * \"prior\": always predicts the class that maximizes the class prior\n (like \"most_frequent\") and ``predict_proba`` returns the class prior.\n * \"uniform\": generates predictions uniformly at random.\n * \"constant\": always predicts a constant label that is provided by\n the user. This is useful for metrics that evaluate a non-majority\n class\n\n random_state : int seed, RandomState instance, or None (default)\n The seed of the pseudo random number generator to use.\n\n constant : int or str or array of shape = [n_outputs]\n The explicit constant as predicted by the \"constant\" strategy. This\n parameter is useful only for the \"constant\" strategy.\n\n Attributes\n ----------\n classes_ : array or list of array of shape = [n_classes]\n Class labels for each output.\n\n n_classes_ : array or list of array of shape = [n_classes]\n Number of label for each output.\n\n class_prior_ : array or list of array of shape = [n_classes]\n Probability of each class for each output.\n\n n_outputs_ : int,\n Number of outputs.\n\n outputs_2d_ : bool,\n True if the output at fit is 2d, else false.\n\n sparse_output_ : bool,\n True if the array returned from predict is to be in sparse CSC format.\n Is automatically set to True if the input y is passed in sparse format.\n\n \"\"\"\n\n def __init__(self, strategy=\"stratified\", random_state=None,\n constant=None):\n self.strategy = strategy\n self.random_state = random_state\n self.constant = constant\n\n def fit(self, X, y, sample_weight=None):\n \"\"\"Fit the random classifier.\n\n Parameters\n ----------\n X : {array-like, sparse matrix}, shape = [n_samples, n_features]\n Training vectors, where n_samples is the number of samples\n and n_features is the number of features.\n\n y : array-like, shape = [n_samples] or [n_samples, n_outputs]\n Target values.\n\n sample_weight : array-like of shape = [n_samples], optional\n Sample weights.\n\n Returns\n -------\n self : object\n Returns self.\n \"\"\"\n if self.strategy not in (\"most_frequent\", \"stratified\", \"uniform\",\n \"constant\", \"prior\"):\n raise ValueError(\"Unknown strategy type.\")\n\n if self.strategy == \"uniform\" and sp.issparse(y):\n y = y.toarray()\n warnings.warn('A local copy of the target data has been converted '\n 'to a numpy array. Predicting on sparse target data '\n 'with the uniform strategy would not save memory '\n 'and would be slower.',\n UserWarning)\n\n self.sparse_output_ = sp.issparse(y)\n\n if not self.sparse_output_:\n y = np.atleast_1d(y)\n\n self.output_2d_ = y.ndim == 2\n if y.ndim == 1:\n y = np.reshape(y, (-1, 1))\n\n self.n_outputs_ = y.shape[1]\n\n if self.strategy == \"constant\":\n if self.constant is None:\n raise ValueError(\"Constant target value has to be specified \"\n \"when the constant strategy is used.\")\n else:\n constant = np.reshape(np.atleast_1d(self.constant), (-1, 1))\n if constant.shape[0] != self.n_outputs_:\n raise ValueError(\"Constant target value should have \"\n \"shape (%d, 1).\" % self.n_outputs_)\n\n (self.classes_,\n self.n_classes_,\n self.class_prior_) = class_distribution(y, sample_weight)\n\n if (self.strategy == \"constant\" and\n any(constant[k] not in self.classes_[k]\n for k in range(self.n_outputs_))):\n # Checking in case of constant strategy if the constant\n # provided by the user is in y.\n raise ValueError(\"The constant target value must be \"\n \"present in training data\")\n\n if self.n_outputs_ == 1 and not self.output_2d_:\n self.n_classes_ = self.n_classes_[0]\n self.classes_ = self.classes_[0]\n self.class_prior_ = self.class_prior_[0]\n\n return self\n\n def predict(self, X):\n \"\"\"Perform classification on test vectors X.\n\n Parameters\n ----------\n X : {array-like, sparse matrix}, shape = [n_samples, n_features]\n Input vectors, where n_samples is the number of samples\n and n_features is the number of features.\n\n Returns\n -------\n y : array, shape = [n_samples] or [n_samples, n_outputs]\n Predicted target values for X.\n \"\"\"\n if not hasattr(self, \"classes_\"):\n raise ValueError(\"DummyClassifier not fitted.\")\n\n X = check_array(X, accept_sparse=['csr', 'csc', 'coo'])\n # numpy random_state expects Python int and not long as size argument\n # under Windows\n n_samples = int(X.shape[0])\n rs = check_random_state(self.random_state)\n\n n_classes_ = self.n_classes_\n classes_ = self.classes_\n class_prior_ = self.class_prior_\n constant = self.constant\n if self.n_outputs_ == 1:\n # Get same type even for self.n_outputs_ == 1\n n_classes_ = [n_classes_]\n classes_ = [classes_]\n class_prior_ = [class_prior_]\n constant = [constant]\n # Compute probability only once\n if self.strategy == \"stratified\":\n proba = self.predict_proba(X)\n if self.n_outputs_ == 1:\n proba = [proba]\n\n if self.sparse_output_:\n class_prob = None\n if self.strategy in (\"most_frequent\", \"prior\"):\n classes_ = [np.array([cp.argmax()]) for cp in class_prior_]\n\n elif self.strategy == \"stratified\":\n class_prob = class_prior_\n\n elif self.strategy == \"uniform\":\n raise ValueError(\"Sparse target prediction is not \"\n \"supported with the uniform strategy\")\n\n elif self.strategy == \"constant\":\n classes_ = [np.array([c]) for c in constant]\n\n y = random_choice_csc(n_samples, classes_, class_prob,\n self.random_state)\n else:\n if self.strategy in (\"most_frequent\", \"prior\"):\n y = np.tile([classes_[k][class_prior_[k].argmax()] for\n k in range(self.n_outputs_)], [n_samples, 1])\n\n elif self.strategy == \"stratified\":\n y = np.vstack(classes_[k][proba[k].argmax(axis=1)] for\n k in range(self.n_outputs_)).T\n\n elif self.strategy == \"uniform\":\n ret = [classes_[k][rs.randint(n_classes_[k], size=n_samples)]\n for k in range(self.n_outputs_)]\n y = np.vstack(ret).T\n\n elif self.strategy == \"constant\":\n y = np.tile(self.constant, (n_samples, 1))\n\n if self.n_outputs_ == 1 and not self.output_2d_:\n y = np.ravel(y)\n\n return y\n\n def predict_proba(self, X):\n \"\"\"\n Return probability estimates for the test vectors X.\n\n Parameters\n ----------\n X : {array-like, sparse matrix}, shape = [n_samples, n_features]\n Input vectors, where n_samples is the number of samples\n and n_features is the number of features.\n\n Returns\n -------\n P : array-like or list of array-lke of shape = [n_samples, n_classes]\n Returns the probability of the sample for each class in\n the model, where classes are ordered arithmetically, for each\n output.\n \"\"\"\n if not hasattr(self, \"classes_\"):\n raise ValueError(\"DummyClassifier not fitted.\")\n\n X = check_array(X, accept_sparse=['csr', 'csc', 'coo'])\n # numpy random_state expects Python int and not long as size argument\n # under Windows\n n_samples = int(X.shape[0])\n rs = check_random_state(self.random_state)\n\n n_classes_ = self.n_classes_\n classes_ = self.classes_\n class_prior_ = self.class_prior_\n constant = self.constant\n if self.n_outputs_ == 1 and not self.output_2d_:\n # Get same type even for self.n_outputs_ == 1\n n_classes_ = [n_classes_]\n classes_ = [classes_]\n class_prior_ = [class_prior_]\n constant = [constant]\n\n P = []\n for k in range(self.n_outputs_):\n if self.strategy == \"most_frequent\":\n ind = np.ones(n_samples, dtype=int) * class_prior_[k].argmax()\n out = np.zeros((n_samples, n_classes_[k]), dtype=np.float64)\n out[:, ind] = 1.0\n elif self.strategy == \"prior\":\n out = np.ones((n_samples, 1)) * class_prior_[k]\n\n elif self.strategy == \"stratified\":\n out = rs.multinomial(1, class_prior_[k], size=n_samples)\n\n elif self.strategy == \"uniform\":\n out = np.ones((n_samples, n_classes_[k]), dtype=np.float64)\n out /= n_classes_[k]\n\n elif self.strategy == \"constant\":\n ind = np.where(classes_[k] == constant[k])\n out = np.zeros((n_samples, n_classes_[k]), dtype=np.float64)\n out[:, ind] = 1.0\n\n P.append(out)\n\n if self.n_outputs_ == 1 and not self.output_2d_:\n P = P[0]\n\n return P\n\n def predict_log_proba(self, X):\n \"\"\"\n Return log probability estimates for the test vectors X.\n\n Parameters\n ----------\n X : {array-like, sparse matrix}, shape = [n_samples, n_features]\n Input vectors, where n_samples is the number of samples\n and n_features is the number of features.\n\n Returns\n -------\n P : array-like or list of array-like of shape = [n_samples, n_classes]\n Returns the log probability of the sample for each class in\n the model, where classes are ordered arithmetically for each\n output.\n \"\"\"\n proba = self.predict_proba(X)\n if self.n_outputs_ == 1:\n return np.log(proba)\n else:\n return [np.log(p) for p in proba]\n\n\nclass DummyRegressor(BaseEstimator, RegressorMixin):\n \"\"\"\n DummyRegressor is a regressor that makes predictions using\n simple rules.\n\n This regressor is useful as a simple baseline to compare with other\n (real) regressors. Do not use it for real problems.\n\n Read more in the :ref:`User Guide `.\n\n Parameters\n ----------\n strategy : str\n Strategy to use to generate predictions.\n\n * \"mean\": always predicts the mean of the training set\n * \"median\": always predicts the median of the training set\n * \"quantile\": always predicts a specified quantile of the training set,\n provided with the quantile parameter.\n * \"constant\": always predicts a constant value that is provided by\n the user.\n\n constant : int or float or array of shape = [n_outputs]\n The explicit constant as predicted by the \"constant\" strategy. This\n parameter is useful only for the \"constant\" strategy.\n\n quantile : float in [0.0, 1.0]\n The quantile to predict using the \"quantile\" strategy. A quantile of\n 0.5 corresponds to the median, while 0.0 to the minimum and 1.0 to the\n maximum.\n\n Attributes\n ----------\n constant_ : float or array of shape [n_outputs]\n Mean or median or quantile of the training targets or constant value\n given by the user.\n\n n_outputs_ : int,\n Number of outputs.\n\n outputs_2d_ : bool,\n True if the output at fit is 2d, else false.\n \"\"\"\n\n def __init__(self, strategy=\"mean\", constant=None, quantile=None):\n self.strategy = strategy\n self.constant = constant\n self.quantile = quantile\n\n def fit(self, X, y, sample_weight=None):\n \"\"\"Fit the random regressor.\n\n Parameters\n ----------\n X : {array-like, sparse matrix}, shape = [n_samples, n_features]\n Training vectors, where n_samples is the number of samples\n and n_features is the number of features.\n\n y : array-like, shape = [n_samples] or [n_samples, n_outputs]\n Target values.\n\n sample_weight : array-like of shape = [n_samples], optional\n Sample weights.\n\n Returns\n -------\n self : object\n Returns self.\n \"\"\"\n\n if self.strategy not in (\"mean\", \"median\", \"quantile\", \"constant\"):\n raise ValueError(\"Unknown strategy type: %s, expected \"\n \"'mean', 'median', 'quantile' or 'constant'\"\n % self.strategy)\n\n y = check_array(y, ensure_2d=False)\n if len(y) == 0:\n raise ValueError(\"y must not be empty.\")\n\n self.output_2d_ = y.ndim == 2\n if y.ndim == 1:\n y = np.reshape(y, (-1, 1))\n self.n_outputs_ = y.shape[1]\n\n check_consistent_length(X, y, sample_weight)\n\n if self.strategy == \"mean\":\n self.constant_ = np.average(y, axis=0, weights=sample_weight)\n\n elif self.strategy == \"median\":\n if sample_weight is None:\n self.constant_ = np.median(y, axis=0)\n else:\n self.constant_ = [_weighted_percentile(y[:, k], sample_weight,\n percentile=50.)\n for k in range(self.n_outputs_)]\n\n elif self.strategy == \"quantile\":\n if self.quantile is None or not np.isscalar(self.quantile):\n raise ValueError(\"Quantile must be a scalar in the range \"\n \"[0.0, 1.0], but got %s.\" % self.quantile)\n\n percentile = self.quantile * 100.0\n if sample_weight is None:\n self.constant_ = np.percentile(y, axis=0, q=percentile)\n else:\n self.constant_ = [_weighted_percentile(y[:, k], sample_weight,\n percentile=percentile)\n for k in range(self.n_outputs_)]\n\n elif self.strategy == \"constant\":\n if self.constant is None:\n raise TypeError(\"Constant target value has to be specified \"\n \"when the constant strategy is used.\")\n\n self.constant = check_array(self.constant,\n accept_sparse=['csr', 'csc', 'coo'],\n ensure_2d=False, ensure_min_samples=0)\n\n if self.output_2d_ and self.constant.shape[0] != y.shape[1]:\n raise ValueError(\n \"Constant target value should have \"\n \"shape (%d, 1).\" % y.shape[1])\n\n self.constant_ = self.constant\n\n self.constant_ = np.reshape(self.constant_, (1, -1))\n return self\n\n def predict(self, X):\n \"\"\"\n Perform classification on test vectors X.\n\n Parameters\n ----------\n X : {array-like, sparse matrix}, shape = [n_samples, n_features]\n Input vectors, where n_samples is the number of samples\n and n_features is the number of features.\n\n Returns\n -------\n y : array, shape = [n_samples] or [n_samples, n_outputs]\n Predicted target values for X.\n \"\"\"\n if not hasattr(self, \"constant_\"):\n raise ValueError(\"DummyRegressor not fitted.\")\n\n X = check_array(X, accept_sparse=['csr', 'csc', 'coo'])\n n_samples = X.shape[0]\n\n y = np.ones((n_samples, 1)) * self.constant_\n\n if self.n_outputs_ == 1 and not self.output_2d_:\n y = np.ravel(y)\n\n return y\n"},"license":{"kind":"string","value":"bsd-3-clause"}}},{"rowIdx":203591,"cells":{"repo_name":{"kind":"string","value":"danlurie/C-PAC"},"path":{"kind":"string","value":"CPAC/qc/blue.py"},"copies":{"kind":"string","value":"5"},"size":{"kind":"string","value":"2048"},"content":{"kind":"string","value":"#007ffe\n#007ffd\n#007efc\n#007efb\n#007dfa\n#007df9\n#007cf8\n#007cf7\n#007bf6\n#007bf5\n#007af4\n#007af3\n#0079f2\n#0079f1\n#0078f0\n#0078ef\n#0077ee\n#0077ed\n#0076ec\n#0076eb\n#0075ea\n#0075e9\n#0074e8\n#0074e7\n#0073e6\n#0073e5\n#0072e4\n#0072e3\n#0071e2\n#0071e1\n#0070e0\n#0070df\n#006fde\n#006fdd\n#006edc\n#006edb\n#006dda\n#006dd9\n#006cd8\n#006cd7\n#006bd6\n#006bd5\n#006ad4\n#006ad3\n#0069d2\n#0069d1\n#0068d0\n#0068cf\n#0067ce\n#0067cd\n#0066cc\n#0066cb\n#0065ca\n#0065c9\n#0064c8\n#0064c7\n#0063c6\n#0063c5\n#0062c4\n#0062c3\n#0061c2\n#0061c1\n#0060c0\n#0060bf\n#005fbf\n#005fbe\n#005ebd\n#005ebc\n#005dbb\n#005dba\n#005cb9\n#005cb8\n#005bb7\n#005bb6\n#005ab5\n#005ab4\n#0059b3\n#0059b2\n#0058b1\n#0058b0\n#0057af\n#0057ae\n#0056ad\n#0056ac\n#0055ab\n#0055aa\n#0054a9\n#0054a8\n#0053a7\n#0053a6\n#0052a5\n#0052a4\n#0051a3\n#0051a2\n#0050a1\n#0050a0\n#004f9f\n#004f9e\n#004e9d\n#004e9c\n#004d9b\n#004d9a\n#004c99\n#004c98\n#004b97\n#004b96\n#004a95\n#004a94\n#004993\n#004992\n#004891\n#004890\n#00478f\n#00478e\n#00468d\n#00468c\n#00458b\n#00458a\n#004489\n#004488\n#004387\n#004386\n#004285\n#004284\n#004183\n#004182\n#004081\n#004080\n#003f7f\n#003f7e\n#003e7d\n#003e7c\n#003d7b\n#003d7a\n#003c79\n#003c78\n#003b77\n#003b76\n#003a75\n#003a74\n#003973\n#003972\n#003871\n#003870\n#00376f\n#00376e\n#00366d\n#00366c\n#00356b\n#00356a\n#003469\n#003468\n#003367\n#003366\n#003265\n#003264\n#003163\n#003162\n#003061\n#003060\n#002f5f\n#002f5e\n#002e5d\n#002e5c\n#002d5b\n#002d5a\n#002c59\n#002c58\n#002b57\n#002b56\n#002a55\n#002a54\n#002953\n#002952\n#002851\n#002850\n#00274f\n#00274e\n#00264d\n#00264c\n#00254b\n#00254a\n#002449\n#002448\n#002347\n#002346\n#002245\n#002244\n#002143\n#002142\n#002041\n#002040\n#001f40\n#001f3f\n#001e3e\n#001e3d\n#001d3c\n#001d3b\n#001c3a\n#001c39\n#001b38\n#001b37\n#001a36\n#001a35\n#001934\n#001933\n#001832\n#001831\n#001730\n#00172f\n#00162e\n#00162d\n#00152c\n#00152b\n#00142a\n#001429\n#001328\n#001327\n#001226\n#001225\n#001124\n#001123\n#001022\n#001021\n#000f20\n#000f1f\n#000e1e\n#000e1d\n#000d1c\n#000d1b\n#000c1a\n#000c19\n#000b18\n#000b17\n#000a16\n#000a15\n#000914\n#000913\n#000812\n#000811\n#000710\n#00070f\n#00060e\n#00060d\n#00050c\n#00050b\n#00040a\n#000409\n#000308\n#000307\n#000206\n#000205\n#000104\n#000103\n#000002\n#000001\n"},"license":{"kind":"string","value":"bsd-3-clause"}}},{"rowIdx":203592,"cells":{"repo_name":{"kind":"string","value":"BigFatNoob-NCSU/x9115george2"},"path":{"kind":"string","value":"hw/code/6/models/kursawe.py"},"copies":{"kind":"string","value":"1"},"size":{"kind":"string","value":"1196"},"content":{"kind":"string","value":"from __future__ import print_function, division\n__author__ = 'panzer'\nfrom model import *\nfrom math import sqrt, sin\n\nclass Kursawe(Model):\n def __init__(self):\n Model.__init__(self)\n self.__name__ = Kursawe.__name__\n self.decisions=[]\n self.decisions.append(Decision(\"x1\", -5, 5))\n self.decisions.append(Decision(\"x2\", -5, 5))\n self.decisions.append(Decision(\"x3\", -5, 5))\n self.objectives = []\n self.objectives.append(Objective(\"f1\", -20, -4.86, to_minimize=True))\n self.objectives.append(Objective(\"f2\", -9.63, 22.90, to_minimize=True))\n\n def evaluate(self, one):\n return [Kursawe.f1(one), Kursawe.f2(one)]\n\n @staticmethod\n def f1(ds):\n total = 0\n for i in range(len(ds)-1):\n e = -0.2 * sqrt(ds[i]**2 + ds[i+1]**2)\n total+= -10*exp(e)\n return total\n\n @staticmethod\n def f2(ds):\n total = 0\n for i in range(len(ds)):\n total+= abs(ds[i])**0.8 + 5*sin(ds[i]**3)\n return total\n\n @staticmethod\n def get_extreme_objectives():\n o = Kursawe()\n f1s = []\n f2s = []\n for one in o.all_inputs():\n f1s.append(Kursawe.f1(one))\n f2s.append(Kursawe.f2(one))\n print(min(f1s), max(f1s))\n print(min(f2s), max(f2s))\n"},"license":{"kind":"string","value":"mit"}}},{"rowIdx":203593,"cells":{"repo_name":{"kind":"string","value":"chrwu/PyGithub"},"path":{"kind":"string","value":"github/PullRequestPart.py"},"copies":{"kind":"string","value":"74"},"size":{"kind":"string","value":"3771"},"content":{"kind":"string","value":"# -*- coding: utf-8 -*-\n\n# ########################## Copyrights and license ############################\n# #\n# Copyright 2012 Vincent Jacques #\n# Copyright 2012 Zearin #\n# Copyright 2013 AKFish #\n# Copyright 2013 Vincent Jacques #\n# #\n# This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ #\n# #\n# PyGithub is free software: you can redistribute it and/or modify it under #\n# the terms of the GNU Lesser General Public License as published by the Free #\n# Software Foundation, either version 3 of the License, or (at your option) #\n# any later version. #\n# #\n# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #\n# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #\n# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #\n# details. #\n# #\n# You should have received a copy of the GNU Lesser General Public License #\n# along with PyGithub. If not, see . #\n# #\n# ##############################################################################\n\nimport github.GithubObject\n\nimport github.Repository\nimport github.NamedUser\n\n\nclass PullRequestPart(github.GithubObject.NonCompletableGithubObject):\n \"\"\"\n This class represents PullRequestParts as returned for example by http://developer.github.com/v3/todo\n \"\"\"\n\n @property\n def label(self):\n \"\"\"\n :type: string\n \"\"\"\n return self._label.value\n\n @property\n def ref(self):\n \"\"\"\n :type: string\n \"\"\"\n return self._ref.value\n\n @property\n def repo(self):\n \"\"\"\n :type: :class:`github.Repository.Repository`\n \"\"\"\n return self._repo.value\n\n @property\n def sha(self):\n \"\"\"\n :type: string\n \"\"\"\n return self._sha.value\n\n @property\n def user(self):\n \"\"\"\n :type: :class:`github.NamedUser.NamedUser`\n \"\"\"\n return self._user.value\n\n def _initAttributes(self):\n self._label = github.GithubObject.NotSet\n self._ref = github.GithubObject.NotSet\n self._repo = github.GithubObject.NotSet\n self._sha = github.GithubObject.NotSet\n self._user = github.GithubObject.NotSet\n\n def _useAttributes(self, attributes):\n if \"label\" in attributes: # pragma no branch\n self._label = self._makeStringAttribute(attributes[\"label\"])\n if \"ref\" in attributes: # pragma no branch\n self._ref = self._makeStringAttribute(attributes[\"ref\"])\n if \"repo\" in attributes: # pragma no branch\n self._repo = self._makeClassAttribute(github.Repository.Repository, attributes[\"repo\"])\n if \"sha\" in attributes: # pragma no branch\n self._sha = self._makeStringAttribute(attributes[\"sha\"])\n if \"user\" in attributes: # pragma no branch\n self._user = self._makeClassAttribute(github.NamedUser.NamedUser, attributes[\"user\"])\n"},"license":{"kind":"string","value":"gpl-3.0"}}},{"rowIdx":203594,"cells":{"repo_name":{"kind":"string","value":"cxxgtxy/tensorflow"},"path":{"kind":"string","value":"tensorflow/contrib/slim/python/slim/model_analyzer.py"},"copies":{"kind":"string","value":"166"},"size":{"kind":"string","value":"3509"},"content":{"kind":"string","value":"# Copyright 2016 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ==============================================================================\n\"\"\"Tools for analyzing the operations and variables in a TensorFlow graph.\n\nTo analyze the operations in a graph:\n\n images, labels = LoadData(...)\n predictions = MyModel(images)\n\n slim.model_analyzer.analyze_ops(tf.get_default_graph(), print_info=True)\n\nTo analyze the model variables in a graph:\n\n variables = tf.model_variables()\n slim.model_analyzer.analyze_vars(variables, print_info=False)\n\"\"\"\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\n\ndef tensor_description(var):\n \"\"\"Returns a compact and informative string about a tensor.\n\n Args:\n var: A tensor variable.\n\n Returns:\n a string with type and size, e.g.: (float32 1x8x8x1024).\n \"\"\"\n description = '(' + str(var.dtype.name) + ' '\n sizes = var.get_shape()\n for i, size in enumerate(sizes):\n description += str(size)\n if i < len(sizes) - 1:\n description += 'x'\n description += ')'\n return description\n\n\ndef analyze_ops(graph, print_info=False):\n \"\"\"Compute the estimated size of the ops.outputs in the graph.\n\n Args:\n graph: the graph containing the operations.\n print_info: Optional, if true print ops and their outputs.\n\n Returns:\n total size of the ops.outputs\n \"\"\"\n if print_info:\n print('---------')\n print('Operations: name -> (type shapes) [size]')\n print('---------')\n total_size = 0\n for op in graph.get_operations():\n op_size = 0\n shapes = []\n for output in op.outputs:\n # if output.num_elements() is None or [] assume size 0.\n output_size = output.get_shape().num_elements() or 0\n if output.get_shape():\n shapes.append(tensor_description(output))\n op_size += output_size\n if print_info:\n print(op.name, '\\t->', ', '.join(shapes), '[' + str(op_size) + ']')\n total_size += op_size\n return total_size\n\n\ndef analyze_vars(variables, print_info=False):\n \"\"\"Prints the names and shapes of the variables.\n\n Args:\n variables: list of variables, for example tf.global_variables().\n print_info: Optional, if true print variables and their shape.\n\n Returns:\n (total size of the variables, total bytes of the variables)\n \"\"\"\n if print_info:\n print('---------')\n print('Variables: name (type shape) [size]')\n print('---------')\n total_size = 0\n total_bytes = 0\n for var in variables:\n # if var.num_elements() is None or [] assume size 0.\n var_size = var.get_shape().num_elements() or 0\n var_bytes = var_size * var.dtype.size\n total_size += var_size\n total_bytes += var_bytes\n if print_info:\n print(var.name, tensor_description(var), '[%d, bytes: %d]' %\n (var_size, var_bytes))\n if print_info:\n print('Total size of variables: %d' % total_size)\n print('Total bytes of variables: %d' % total_bytes)\n return total_size, total_bytes\n"},"license":{"kind":"string","value":"apache-2.0"}}},{"rowIdx":203595,"cells":{"repo_name":{"kind":"string","value":"MartinHjelmare/home-assistant"},"path":{"kind":"string","value":"homeassistant/components/mqtt/camera.py"},"copies":{"kind":"string","value":"7"},"size":{"kind":"string","value":"4190"},"content":{"kind":"string","value":"\"\"\"Camera that loads a picture from an MQTT topic.\"\"\"\n\nimport asyncio\nimport logging\n\nimport voluptuous as vol\n\nfrom homeassistant.components import camera, mqtt\nfrom homeassistant.components.camera import PLATFORM_SCHEMA, Camera\nfrom homeassistant.const import CONF_NAME\nfrom homeassistant.core import callback\nfrom homeassistant.helpers import config_validation as cv\nfrom homeassistant.helpers.dispatcher import async_dispatcher_connect\nfrom homeassistant.helpers.typing import ConfigType, HomeAssistantType\n\nfrom . import (\n ATTR_DISCOVERY_HASH, CONF_UNIQUE_ID, MqttDiscoveryUpdate, subscription)\nfrom .discovery import MQTT_DISCOVERY_NEW, clear_discovery_hash\n\n_LOGGER = logging.getLogger(__name__)\n\nCONF_TOPIC = 'topic'\nDEFAULT_NAME = 'MQTT Camera'\n\nPLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({\n vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,\n vol.Required(CONF_TOPIC): mqtt.valid_subscribe_topic,\n vol.Optional(CONF_UNIQUE_ID): cv.string,\n})\n\n\nasync def async_setup_platform(hass: HomeAssistantType, config: ConfigType,\n async_add_entities, discovery_info=None):\n \"\"\"Set up MQTT camera through configuration.yaml.\"\"\"\n await _async_setup_entity(config, async_add_entities)\n\n\nasync def async_setup_entry(hass, config_entry, async_add_entities):\n \"\"\"Set up MQTT camera dynamically through MQTT discovery.\"\"\"\n async def async_discover(discovery_payload):\n \"\"\"Discover and add a MQTT camera.\"\"\"\n try:\n discovery_hash = discovery_payload.pop(ATTR_DISCOVERY_HASH)\n config = PLATFORM_SCHEMA(discovery_payload)\n await _async_setup_entity(config, async_add_entities,\n discovery_hash)\n except Exception:\n if discovery_hash:\n clear_discovery_hash(hass, discovery_hash)\n raise\n\n async_dispatcher_connect(\n hass, MQTT_DISCOVERY_NEW.format(camera.DOMAIN, 'mqtt'),\n async_discover)\n\n\nasync def _async_setup_entity(config, async_add_entities, discovery_hash=None):\n \"\"\"Set up the MQTT Camera.\"\"\"\n async_add_entities([MqttCamera(config, discovery_hash)])\n\n\nclass MqttCamera(MqttDiscoveryUpdate, Camera):\n \"\"\"representation of a MQTT camera.\"\"\"\n\n def __init__(self, config, discovery_hash):\n \"\"\"Initialize the MQTT Camera.\"\"\"\n self._config = config\n self._unique_id = config.get(CONF_UNIQUE_ID)\n self._sub_state = None\n\n self._qos = 0\n self._last_image = None\n\n Camera.__init__(self)\n MqttDiscoveryUpdate.__init__(self, discovery_hash,\n self.discovery_update)\n\n async def async_added_to_hass(self):\n \"\"\"Subscribe MQTT events.\"\"\"\n await super().async_added_to_hass()\n await self._subscribe_topics()\n\n async def discovery_update(self, discovery_payload):\n \"\"\"Handle updated discovery message.\"\"\"\n config = PLATFORM_SCHEMA(discovery_payload)\n self._config = config\n await self._subscribe_topics()\n self.async_write_ha_state()\n\n async def _subscribe_topics(self):\n \"\"\"(Re)Subscribe to topics.\"\"\"\n @callback\n def message_received(msg):\n \"\"\"Handle new MQTT messages.\"\"\"\n self._last_image = msg.payload\n\n self._sub_state = await subscription.async_subscribe_topics(\n self.hass, self._sub_state,\n {'state_topic': {'topic': self._config[CONF_TOPIC],\n 'msg_callback': message_received,\n 'qos': self._qos,\n 'encoding': None}})\n\n async def async_will_remove_from_hass(self):\n \"\"\"Unsubscribe when removed.\"\"\"\n self._sub_state = await subscription.async_unsubscribe_topics(\n self.hass, self._sub_state)\n\n @asyncio.coroutine\n def async_camera_image(self):\n \"\"\"Return image response.\"\"\"\n return self._last_image\n\n @property\n def name(self):\n \"\"\"Return the name of this camera.\"\"\"\n return self._config[CONF_NAME]\n\n @property\n def unique_id(self):\n \"\"\"Return a unique ID.\"\"\"\n return self._unique_id\n"},"license":{"kind":"string","value":"apache-2.0"}}},{"rowIdx":203596,"cells":{"repo_name":{"kind":"string","value":"gsehub/edx-platform"},"path":{"kind":"string","value":"common/djangoapps/third_party_auth/api/tests/test_views.py"},"copies":{"kind":"string","value":"9"},"size":{"kind":"string","value":"12482"},"content":{"kind":"string","value":"# pylint: disable=no-member\n\"\"\"\nTests for the Third Party Auth REST API\n\"\"\"\nimport unittest\n\nimport ddt\nfrom django.urls import reverse\nfrom django.http import QueryDict\nfrom mock import patch\nfrom provider.constants import CONFIDENTIAL\nfrom provider.oauth2.models import Client, AccessToken\nfrom openedx.core.lib.api.permissions import ApiKeyHeaderPermission\nfrom rest_framework.test import APITestCase\nfrom django.conf import settings\nfrom django.test.utils import override_settings\nfrom social_django.models import UserSocialAuth\n\nfrom student.tests.factories import UserFactory\nfrom third_party_auth.api.permissions import ThirdPartyAuthProviderApiPermission\nfrom third_party_auth.models import ProviderApiPermissions\nfrom third_party_auth.tests.testutil import ThirdPartyAuthTestMixin\n\n\nVALID_API_KEY = \"i am a key\"\nIDP_SLUG_TESTSHIB = 'testshib'\nPROVIDER_ID_TESTSHIB = 'saml-' + IDP_SLUG_TESTSHIB\n\nALICE_USERNAME = \"alice\"\nCARL_USERNAME = \"carl\"\nSTAFF_USERNAME = \"staff\"\nADMIN_USERNAME = \"admin\"\n# These users will be created and linked to third party accounts:\nLINKED_USERS = (ALICE_USERNAME, STAFF_USERNAME, ADMIN_USERNAME)\nPASSWORD = \"edx\"\n\n\ndef get_mapping_data_by_usernames(usernames):\n \"\"\" Generate mapping data used in response \"\"\"\n return [{'username': username, 'remote_id': 'remote_' + username} for username in usernames]\n\n\nclass TpaAPITestCase(ThirdPartyAuthTestMixin, APITestCase):\n \"\"\" Base test class \"\"\"\n\n def setUp(self):\n \"\"\" Create users for use in the tests \"\"\"\n super(TpaAPITestCase, self).setUp()\n\n google = self.configure_google_provider(enabled=True)\n self.configure_facebook_provider(enabled=True)\n self.configure_linkedin_provider(enabled=False)\n self.enable_saml()\n testshib = self.configure_saml_provider(\n name='TestShib',\n enabled=True,\n slug=IDP_SLUG_TESTSHIB\n )\n\n # Create several users and link each user to Google and TestShib\n for username in LINKED_USERS:\n make_superuser = (username == ADMIN_USERNAME)\n make_staff = (username == STAFF_USERNAME) or make_superuser\n user = UserFactory.create(\n username=username,\n password=PASSWORD,\n is_staff=make_staff,\n is_superuser=make_superuser\n )\n UserSocialAuth.objects.create(\n user=user,\n provider=google.backend_name,\n uid='{}@gmail.com'.format(username),\n )\n UserSocialAuth.objects.create(\n user=user,\n provider=testshib.backend_name,\n uid='{}:remote_{}'.format(testshib.slug, username),\n )\n # Create another user not linked to any providers:\n UserFactory.create(username=CARL_USERNAME, password=PASSWORD)\n\n\n@override_settings(EDX_API_KEY=VALID_API_KEY)\n@ddt.ddt\n@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')\nclass UserViewAPITests(TpaAPITestCase):\n \"\"\"\n Test the Third Party Auth User REST API\n \"\"\"\n\n def expected_active(self, username):\n \"\"\" The JSON active providers list response expected for the given user \"\"\"\n if username not in LINKED_USERS:\n return []\n return [\n {\n \"provider_id\": \"oa2-google-oauth2\",\n \"name\": \"Google\",\n \"remote_id\": \"{}@gmail.com\".format(username),\n },\n {\n \"provider_id\": PROVIDER_ID_TESTSHIB,\n \"name\": \"TestShib\",\n # The \"testshib:\" prefix is stored in the UserSocialAuth.uid field but should\n # not be present in the 'remote_id', since that's an implementation detail:\n \"remote_id\": 'remote_' + username,\n },\n ]\n\n @ddt.data(\n # Any user can query their own list of providers\n (ALICE_USERNAME, ALICE_USERNAME, 200),\n (CARL_USERNAME, CARL_USERNAME, 200),\n # A regular user cannot query another user nor deduce the existence of users based on the status code\n (ALICE_USERNAME, STAFF_USERNAME, 403),\n (ALICE_USERNAME, \"nonexistent_user\", 403),\n # Even Staff cannot query other users\n (STAFF_USERNAME, ALICE_USERNAME, 403),\n # But admins can\n (ADMIN_USERNAME, ALICE_USERNAME, 200),\n (ADMIN_USERNAME, CARL_USERNAME, 200),\n (ADMIN_USERNAME, \"invalid_username\", 404),\n )\n @ddt.unpack\n def test_list_connected_providers(self, request_user, target_user, expect_result):\n self.client.login(username=request_user, password=PASSWORD)\n url = reverse('third_party_auth_users_api', kwargs={'username': target_user})\n\n response = self.client.get(url)\n self.assertEqual(response.status_code, expect_result)\n if expect_result == 200:\n self.assertIn(\"active\", response.data)\n self.assertItemsEqual(response.data[\"active\"], self.expected_active(target_user))\n\n @ddt.data(\n # A server with a valid API key can query any user's list of providers\n (VALID_API_KEY, ALICE_USERNAME, 200),\n (VALID_API_KEY, \"invalid_username\", 404),\n (\"i am an invalid key\", ALICE_USERNAME, 403),\n (None, ALICE_USERNAME, 403),\n )\n @ddt.unpack\n def test_list_connected_providers__withapi_key(self, api_key, target_user, expect_result):\n url = reverse('third_party_auth_users_api', kwargs={'username': target_user})\n response = self.client.get(url, HTTP_X_EDX_API_KEY=api_key)\n self.assertEqual(response.status_code, expect_result)\n if expect_result == 200:\n self.assertIn(\"active\", response.data)\n self.assertItemsEqual(response.data[\"active\"], self.expected_active(target_user))\n\n\n@override_settings(EDX_API_KEY=VALID_API_KEY)\n@ddt.ddt\n@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')\nclass UserMappingViewAPITests(TpaAPITestCase):\n \"\"\"\n Test the Third Party Auth User Mapping REST API\n \"\"\"\n @ddt.data(\n (VALID_API_KEY, PROVIDER_ID_TESTSHIB, 200, get_mapping_data_by_usernames(LINKED_USERS)),\n (\"i am an invalid key\", PROVIDER_ID_TESTSHIB, 403, None),\n (None, PROVIDER_ID_TESTSHIB, 403, None),\n (VALID_API_KEY, 'non-existing-id', 404, []),\n )\n @ddt.unpack\n def test_list_all_user_mappings_withapi_key(self, api_key, provider_id, expect_code, expect_data):\n url = reverse('third_party_auth_user_mapping_api', kwargs={'provider_id': provider_id})\n response = self.client.get(url, HTTP_X_EDX_API_KEY=api_key)\n self._verify_response(response, expect_code, expect_data)\n\n @ddt.data(\n (PROVIDER_ID_TESTSHIB, 'valid-token', 200, get_mapping_data_by_usernames(LINKED_USERS)),\n ('non-existing-id', 'valid-token', 404, []),\n (PROVIDER_ID_TESTSHIB, 'invalid-token', 401, []),\n )\n @ddt.unpack\n def test_list_all_user_mappings_oauth2(self, provider_id, access_token, expect_code, expect_data):\n url = reverse('third_party_auth_user_mapping_api', kwargs={'provider_id': provider_id})\n # create oauth2 auth data\n user = UserFactory.create(username='api_user')\n client = Client.objects.create(name='oauth2_client', client_type=CONFIDENTIAL)\n token = AccessToken.objects.create(user=user, client=client)\n ProviderApiPermissions.objects.create(client=client, provider_id=provider_id)\n\n if access_token == 'valid-token':\n access_token = token.token\n\n response = self.client.get(url, HTTP_AUTHORIZATION='Bearer {}'.format(access_token))\n self._verify_response(response, expect_code, expect_data)\n\n @ddt.data(\n ({'username': [ALICE_USERNAME, STAFF_USERNAME]}, 200,\n get_mapping_data_by_usernames([ALICE_USERNAME, STAFF_USERNAME])),\n ({'remote_id': ['remote_' + ALICE_USERNAME, 'remote_' + STAFF_USERNAME, 'remote_' + CARL_USERNAME]}, 200,\n get_mapping_data_by_usernames([ALICE_USERNAME, STAFF_USERNAME])),\n ({'username': [ALICE_USERNAME, CARL_USERNAME, STAFF_USERNAME]}, 200,\n get_mapping_data_by_usernames([ALICE_USERNAME, STAFF_USERNAME])),\n ({'username': [ALICE_USERNAME], 'remote_id': ['remote_' + STAFF_USERNAME]}, 200,\n get_mapping_data_by_usernames([ALICE_USERNAME, STAFF_USERNAME])),\n )\n @ddt.unpack\n def test_user_mappings_with_query_params_comma_separated(self, query_params, expect_code, expect_data):\n \"\"\" test queries like username=user1,user2,... \"\"\"\n base_url = reverse(\n 'third_party_auth_user_mapping_api', kwargs={'provider_id': PROVIDER_ID_TESTSHIB}\n )\n params = []\n for attr in ['username', 'remote_id']:\n if attr in query_params:\n params.append('{}={}'.format(attr, ','.join(query_params[attr])))\n url = \"{}?{}\".format(base_url, '&'.join(params))\n response = self.client.get(url, HTTP_X_EDX_API_KEY=VALID_API_KEY)\n self._verify_response(response, expect_code, expect_data)\n\n @ddt.data(\n ({'username': [ALICE_USERNAME, STAFF_USERNAME]}, 200,\n get_mapping_data_by_usernames([ALICE_USERNAME, STAFF_USERNAME])),\n ({'remote_id': ['remote_' + ALICE_USERNAME, 'remote_' + STAFF_USERNAME, 'remote_' + CARL_USERNAME]}, 200,\n get_mapping_data_by_usernames([ALICE_USERNAME, STAFF_USERNAME])),\n ({'username': [ALICE_USERNAME, CARL_USERNAME, STAFF_USERNAME]}, 200,\n get_mapping_data_by_usernames([ALICE_USERNAME, STAFF_USERNAME])),\n ({'username': [ALICE_USERNAME], 'remote_id': ['remote_' + STAFF_USERNAME]}, 200,\n get_mapping_data_by_usernames([ALICE_USERNAME, STAFF_USERNAME])),\n )\n @ddt.unpack\n def test_user_mappings_with_query_params_multi_value_key(self, query_params, expect_code, expect_data):\n \"\"\" test queries like username=user1&username=user2&... \"\"\"\n base_url = reverse(\n 'third_party_auth_user_mapping_api', kwargs={'provider_id': PROVIDER_ID_TESTSHIB}\n )\n params = QueryDict('', mutable=True)\n for attr in ['username', 'remote_id']:\n if attr in query_params:\n params.setlist(attr, query_params[attr])\n url = \"{}?{}\".format(base_url, params.urlencode())\n response = self.client.get(url, HTTP_X_EDX_API_KEY=VALID_API_KEY)\n self._verify_response(response, expect_code, expect_data)\n\n def test_user_mappings_only_return_requested_idp_mapping_by_provider_id(self):\n testshib2 = self.configure_saml_provider(name='TestShib2', enabled=True, slug='testshib2')\n username = 'testshib2user'\n user = UserFactory.create(\n username=username,\n password=PASSWORD,\n is_staff=False,\n is_superuser=False\n )\n UserSocialAuth.objects.create(\n user=user,\n provider=testshib2.backend_name,\n uid='{}:{}'.format(testshib2.slug, username),\n )\n\n url = reverse('third_party_auth_user_mapping_api', kwargs={'provider_id': PROVIDER_ID_TESTSHIB})\n response = self.client.get(url, HTTP_X_EDX_API_KEY=VALID_API_KEY)\n self.assertEqual(response.status_code, 200)\n self._verify_response(response, 200, get_mapping_data_by_usernames(LINKED_USERS))\n\n @ddt.data(\n (True, True, 200),\n (False, True, 200),\n (True, False, 200),\n (False, False, 403)\n )\n @ddt.unpack\n def test_user_mapping_permission_logic(self, api_key_permission, token_permission, expect):\n url = reverse('third_party_auth_user_mapping_api', kwargs={'provider_id': PROVIDER_ID_TESTSHIB})\n with patch.object(ApiKeyHeaderPermission, 'has_permission', return_value=api_key_permission):\n with patch.object(ThirdPartyAuthProviderApiPermission, 'has_permission', return_value=token_permission):\n response = self.client.get(url)\n self.assertEqual(response.status_code, expect)\n\n def _verify_response(self, response, expect_code, expect_result):\n \"\"\" verify the items in data_list exists in response and data_results matches results in response \"\"\"\n self.assertEqual(response.status_code, expect_code)\n if expect_code == 200:\n for item in ['results', 'count', 'num_pages']:\n self.assertIn(item, response.data)\n self.assertItemsEqual(response.data['results'], expect_result)\n"},"license":{"kind":"string","value":"agpl-3.0"}}},{"rowIdx":203597,"cells":{"repo_name":{"kind":"string","value":"JCBarahona/edX"},"path":{"kind":"string","value":"common/djangoapps/third_party_auth/tests/specs/test_twitter.py"},"copies":{"kind":"string","value":"86"},"size":{"kind":"string","value":"1311"},"content":{"kind":"string","value":"\"\"\"\nSeparate integration test for Twitter which is an OAuth1 provider.\n\"\"\"\n\nfrom mock import patch\nfrom third_party_auth.tests.specs import base\n\n\nclass TwitterIntegrationTest(base.Oauth2IntegrationTest):\n \"\"\"Integration tests for Twitter backend.\"\"\"\n\n def setUp(self):\n super(TwitterIntegrationTest, self).setUp()\n self.provider = self.configure_twitter_provider(\n enabled=True,\n key='twitter_oauth1_key',\n secret='twitter_oauth1_secret',\n )\n\n # To test an OAuth1 provider, we need to patch an additional method:\n patcher = patch(\n 'social.backends.twitter.TwitterOAuth.unauthorized_token',\n create=True,\n return_value=\"unauth_token\"\n )\n patcher.start()\n self.addCleanup(patcher.stop)\n\n TOKEN_RESPONSE_DATA = {\n 'access_token': 'access_token_value',\n 'token_type': 'bearer',\n }\n USER_RESPONSE_DATA = {\n 'id': 10101010,\n 'name': 'Bob Loblaw',\n 'description': 'A Twitter User',\n 'screen_name': 'bobloblaw',\n 'location': 'Twitterverse',\n 'followers_count': 77,\n 'verified': False,\n }\n\n def get_username(self):\n response_data = self.get_response_data()\n return response_data.get('screen_name')\n"},"license":{"kind":"string","value":"agpl-3.0"}}},{"rowIdx":203598,"cells":{"repo_name":{"kind":"string","value":"ztemt/N939Sc_5.1_kenel"},"path":{"kind":"string","value":"tools/perf/scripts/python/futex-contention.py"},"copies":{"kind":"string","value":"11261"},"size":{"kind":"string","value":"1486"},"content":{"kind":"string","value":"# futex contention\n# (c) 2010, Arnaldo Carvalho de Melo \n# Licensed under the terms of the GNU GPL License version 2\n#\n# Translation of:\n#\n# http://sourceware.org/systemtap/wiki/WSFutexContention\n#\n# to perf python scripting.\n#\n# Measures futex contention\n\nimport os, sys\nsys.path.append(os.environ['PERF_EXEC_PATH'] + '/scripts/python/Perf-Trace-Util/lib/Perf/Trace')\nfrom Util import *\n\nprocess_names = {}\nthread_thislock = {}\nthread_blocktime = {}\n\nlock_waits = {} # long-lived stats on (tid,lock) blockage elapsed time\nprocess_names = {} # long-lived pid-to-execname mapping\n\ndef syscalls__sys_enter_futex(event, ctxt, cpu, s, ns, tid, comm,\n\t\t\t nr, uaddr, op, val, utime, uaddr2, val3):\n\tcmd = op & FUTEX_CMD_MASK\n\tif cmd != FUTEX_WAIT:\n\t\treturn # we don't care about originators of WAKE events\n\n\tprocess_names[tid] = comm\n\tthread_thislock[tid] = uaddr\n\tthread_blocktime[tid] = nsecs(s, ns)\n\ndef syscalls__sys_exit_futex(event, ctxt, cpu, s, ns, tid, comm,\n\t\t\t nr, ret):\n\tif thread_blocktime.has_key(tid):\n\t\telapsed = nsecs(s, ns) - thread_blocktime[tid]\n\t\tadd_stats(lock_waits, (tid, thread_thislock[tid]), elapsed)\n\t\tdel thread_blocktime[tid]\n\t\tdel thread_thislock[tid]\n\ndef trace_begin():\n\tprint \"Press control+C to stop and show the summary\"\n\ndef trace_end():\n\tfor (tid, lock) in lock_waits:\n\t\tmin, max, avg, count = lock_waits[tid, lock]\n\t\tprint \"%s[%d] lock %x contended %d times, %d avg ns\" % \\\n\t\t (process_names[tid], tid, lock, count, avg)\n\n"},"license":{"kind":"string","value":"gpl-2.0"}}},{"rowIdx":203599,"cells":{"repo_name":{"kind":"string","value":"Grumpy-Mike/Mikes-Pi-Bakery"},"path":{"kind":"string","value":"Physical Sequencer/videoSequencer.py"},"copies":{"kind":"string","value":"1"},"size":{"kind":"string","value":"5316"},"content":{"kind":"string","value":"#!/usr/bin/env python\n# Video sequencer with track bars\n# and multi pixel sampling\nimport time, pygame, pickle\nimport os, sys, math, copy\nimport cv2\n\npygame.init() # initialise pygame\npygame.mixer.quit()\npygame.mixer.init(frequency=22050, size=-16, channels=4, buffer=512)\n\npygame.event.set_allowed(None)\nsamples =[\"clap\",\"closedhat\",\"cowbell\",\"crash\",\"hitom\",\"lotom\"]\ncolours = [\"Red\",\"Orange\",\"Yellow\",\"Green\",\"Blue\",\"Magenta\"]\nmaxCol = 6\nseqSound = [ pygame.mixer.Sound(\"sounds/\"+samples[sound]+\".wav\")\n for sound in range(0,maxCol)]\n\ncv2.namedWindow(\"Video Sequencer\", cv2.CV_WINDOW_AUTOSIZE)\ncamera_index = 0\ncapture = cv2.VideoCapture(camera_index)\nif not capture.isOpened():\n capture.open(camera_index) \nhList = [ 0.0 for h in range (0,64)]\ncList = [ 0.0 for c in range (0,64)]\nnextNote = time.time()\ncromaThresh = 0.23 # threshold for a colour\ndef track_mouse(event,x,y,flags,param):\n pass\ncv2.setMouseCallback(\"Video Sequencer\",track_mouse)\n\ndef main():\n global capture,samplePoint\n f = open('grid16x4.txt','r') # change to use another points file\n samplePoint = pickle.load(f)\n f.close()\n print\"type Esc to quit - d to save colours to a CSV file and quit\"\n for c in range(0,maxCol):\n print colours[c],\"for\",samples[c] \n cv2.createTrackbar('BPM',\"Video Sequencer\",120,200,nothing)\n switch = '0 : Stop \\n1 : Run'\n cv2.createTrackbar(switch,\"Video Sequencer\",0,1,nothing)\n startTime = time.time() + 3\n while startTime > time.time(): # let web cam's AGC settle\n ret, frame = capture.read()\n cv2.imshow(\"Video Sequencer\", frame)\n c = cv2.waitKey(1)\n\n while True:\n ret = True\n for i in range (0,5) : #read off 5 frames\n ret, frame = capture.read()\n points(frame)\n cv2.imshow(\"Video Sequencer\", frame)\n getKey()\n if cv2.getTrackbarPos(switch,\"Video Sequencer\") == 1 :\n soundOut(frame)\n \ndef getKey():\n k = cv2.waitKey(1)& 0xFF\n if k == 27:\n terminate(False)\n if k == ord('d'):\n terminate(True) \n\ndef soundOut(frame):\n global nextNote\n bpm = 60.0 / float(cv2.getTrackbarPos('BPM',\"Video Sequencer\"))\n if bpm > 2.0:\n bpm = 2.0\n for i in range(0,16):\n temp = cv2.copyMakeBorder(frame,0,0,0,0,1)\n cv2.line(temp,samplePoint[i],samplePoint[i+16],(0,255,0),3)\n cv2.line(temp,samplePoint[i+16],samplePoint[i+32],(0,255,0),3)\n cv2.line(temp,samplePoint[i+32],samplePoint[i+48],(0,255,0),3)\n while nextNote > time.time() :\n pass\n nextNote = time.time() + bpm\n cv2.imshow(\"Video Sequencer\", temp)\n getKey()\n for j in range(0,4):\n index = i + (j*16)\n if cList[index] > cromaThresh :\n seqSound[getColour(hList[index])].play()\n \ndef nothing(x): #dummy call back function for track bars\n pass\n\ndef output(): # CSV format\n print\"Saving colours to file - colours.csv\"\n with open('colours.csv','w') as f:\n f.write(\"Hole, Hue, Croma, Colour \\n\")\n for c in range(0,16):\n f.write(\"\\n\") \n for r in range(0,4):\n i=(r*16) + c\n entry = str(i)+ \", \"+\"%.2f\" % hList[i]\n f.write(entry+\", \")\n entry = \"%.2f\" % cList[i]\n f.write(entry+\", \")\n if cList[i] > cromaThresh :\n f.write(describeColour(hList[i])+\"\\n\")\n else :\n f.write(\"neutral \\n\")\n f.close()\n \ndef getColour(h):\n colour = -1\n if h < 1 or h > 340:\n colour = 0\n if h>1 and h< 30:\n colour = 1\n elif h>30 and h< 90 :\n colour = 2\n elif h > 90 and h < 190 :\n colour = 3\n elif h> 190 and h< 300 :\n colour = 4\n elif h> 300 and h < 340 :\n colour = 5\n return colour \n \ndef describeColour(h):\n colourNumber = getColour(h)\n if colourNumber == -1:\n colour = str(h)+\" is unknown\"\n else:\n colour = colours[colourNumber]\n return colour\n\ndef points(frame): # outline sample area and get the colours \n for point in range(0,64):\n surround(samplePoint[point][0],samplePoint[point][1] ,(0,0,0),frame,point)\n \ndef surround(x, y, col, frame, place):\n getCol(x,y, frame, place) \n frame[y, x-2] = col\n frame[y+2,x-2] = col\n frame[y-2,x-2] = col\n frame[y+2,x,] = col\n frame[y-2,x] = col \n frame[y,x+2] = col\n frame[y+2,x+2] = col\n frame[y-2,x+2] = col\n \ndef getCol(x,y, frame,place):\n global hList,cList\n bt = rt = gt = 0\n m = 255.0 * 9.0\n for ox in range(-1,2):\n for oy in range(-1,2):\n blue, green, red = frame[y+oy,x+ox]\n bt += blue\n gt += green\n rt += red \n r = float(rt) / m # normalise colours\n g = float(gt) / m\n b = float(bt) / m\n alp = 0.5*(2*r - g - b)\n bet = 0.866*(g - b)\n hList[place] = math.degrees(math.atan2(bet,alp))\n if hList[place] <0 :\n hList[place] = 360 + hList[place]\n cList[place] = math.sqrt(alp * alp + bet * bet)\n \ndef terminate(debug): # close down the program\n if debug :\n output() # colours to a csv file\n print (\"Closing down please wait\")\n pygame.quit() # close pygame\n capture.release()\n cv2.destroyAllWindows()\n cv2.waitKey(1)\n os._exit(1)\n \nif __name__ == '__main__': \n main()\n \n"},"license":{"kind":"string","value":"gpl-2.0"}}}],"truncated":false,"partial":false},"paginationData":{"pageIndex":2035,"numItemsPerPage":100,"numTotalItems":203850,"offset":203500,"length":100}},"jwt":"eyJhbGciOiJFZERTQSJ9.eyJyZWFkIjp0cnVlLCJwZXJtaXNzaW9ucyI6eyJyZXBvLmNvbnRlbnQucmVhZCI6dHJ1ZX0sImlhdCI6MTc1Nzc5MTg4MSwic3ViIjoiL2RhdGFzZXRzL3Rob213b2xmL2dpdGh1Yi1kYXRhc2V0IiwiZXhwIjoxNzU3Nzk1NDgxLCJpc3MiOiJodHRwczovL2h1Z2dpbmdmYWNlLmNvIn0.iawAUpKqyJ1XNeUggmj8ez437HXSFKQLfzp0pnRFkFCB7j57U4kGXlStYBYFA3UL8YK3ntgoo2fGhdr3l75PCQ","displayUrls":true},"discussionsStats":{"closed":0,"open":1,"total":1},"fullWidth":true,"hasGatedAccess":true,"hasFullAccess":true,"isEmbedded":false,"savedQueries":{"community":[],"user":[]}}">
repo_name
stringlengths
5
100
path
stringlengths
4
294
copies
stringclasses
990 values
size
stringlengths
4
7
content
stringlengths
666
1M
license
stringclasses
15 values
mitsuhiko/django
django/db/models/expressions.py
103
4946
import datetime from django.utils import tree class ExpressionNode(tree.Node): """ Base class for all query expressions. """ # Arithmetic connectors ADD = '+' SUB = '-' MUL = '*' DIV = '/' MOD = '%%' # This is a quoted % operator - it is quoted # because it can be used in strings that also # have parameter substitution. # Bitwise operators AND = '&' OR = '|' def __init__(self, children=None, connector=None, negated=False): if children is not None and len(children) > 1 and connector is None: raise TypeError('You have to specify a connector.') super(ExpressionNode, self).__init__(children, connector, negated) def _combine(self, other, connector, reversed, node=None): if isinstance(other, datetime.timedelta): return DateModifierNode([self, other], connector) if reversed: obj = ExpressionNode([other], connector) obj.add(node or self, connector) else: obj = node or ExpressionNode([self], connector) obj.add(other, connector) return obj ################### # VISITOR METHODS # ################### def prepare(self, evaluator, query, allow_joins): return evaluator.prepare_node(self, query, allow_joins) def evaluate(self, evaluator, qn, connection): return evaluator.evaluate_node(self, qn, connection) ############# # OPERATORS # ############# def __add__(self, other): return self._combine(other, self.ADD, False) def __sub__(self, other): return self._combine(other, self.SUB, False) def __mul__(self, other): return self._combine(other, self.MUL, False) def __div__(self, other): return self._combine(other, self.DIV, False) def __mod__(self, other): return self._combine(other, self.MOD, False) def __and__(self, other): return self._combine(other, self.AND, False) def __or__(self, other): return self._combine(other, self.OR, False) def __radd__(self, other): return self._combine(other, self.ADD, True) def __rsub__(self, other): return self._combine(other, self.SUB, True) def __rmul__(self, other): return self._combine(other, self.MUL, True) def __rdiv__(self, other): return self._combine(other, self.DIV, True) def __rmod__(self, other): return self._combine(other, self.MOD, True) def __rand__(self, other): return self._combine(other, self.AND, True) def __ror__(self, other): return self._combine(other, self.OR, True) def prepare_database_save(self, unused): return self class F(ExpressionNode): """ An expression representing the value of the given field. """ def __init__(self, name): super(F, self).__init__(None, None, False) self.name = name def __deepcopy__(self, memodict): obj = super(F, self).__deepcopy__(memodict) obj.name = self.name return obj def prepare(self, evaluator, query, allow_joins): return evaluator.prepare_leaf(self, query, allow_joins) def evaluate(self, evaluator, qn, connection): return evaluator.evaluate_leaf(self, qn, connection) class DateModifierNode(ExpressionNode): """ Node that implements the following syntax: filter(end_date__gt=F('start_date') + datetime.timedelta(days=3, seconds=200)) which translates into: POSTGRES: WHERE end_date > (start_date + INTERVAL '3 days 200 seconds') MYSQL: WHERE end_date > (start_date + INTERVAL '3 0:0:200:0' DAY_MICROSECOND) ORACLE: WHERE end_date > (start_date + INTERVAL '3 00:03:20.000000' DAY(1) TO SECOND(6)) SQLITE: WHERE end_date > django_format_dtdelta(start_date, "+" "3", "200", "0") (A custom function is used in order to preserve six digits of fractional second information on sqlite, and to format both date and datetime values.) Note that microsecond comparisons are not well supported with MySQL, since MySQL does not store microsecond information. Only adding and subtracting timedeltas is supported, attempts to use other operations raise a TypeError. """ def __init__(self, children, connector, negated=False): if len(children) != 2: raise TypeError('Must specify a node and a timedelta.') if not isinstance(children[1], datetime.timedelta): raise TypeError('Second child must be a timedelta.') if connector not in (self.ADD, self.SUB): raise TypeError('Connector must be + or -, not %s' % connector) super(DateModifierNode, self).__init__(children, connector, negated) def evaluate(self, evaluator, qn, connection): return evaluator.evaluate_date_modifier_node(self, qn, connection)
bsd-3-clause
it-depends/CPSG-ML
books/PCI/Original/PCI_Code/chapter8/numpredict.py
4
4802
from random import random,randint import math def wineprice(rating,age): peak_age=rating-50 # Calculate price based on rating price=rating/2 if age>peak_age: # Past its peak, goes bad in 10 years price=price*(5-(age-peak_age)/2) else: # Increases to 5x original value as it # approaches its peak price=price*(5*((age+1)/peak_age)) if price<0: price=0 return price def wineset1(): rows=[] for i in range(300): # Create a random age and rating rating=random()*50+50 age=random()*50 # Get reference price price=wineprice(rating,age) # Add some noise price*=(random()*0.2+0.9) # Add to the dataset rows.append({'input':(rating,age), 'result':price}) return rows def euclidean(v1,v2): d=0.0 for i in range(len(v1)): d+=(v1[i]-v2[i])**2 return math.sqrt(d) def getdistances(data,vec1): distancelist=[] # Loop over every item in the dataset for i in range(len(data)): vec2=data[i]['input'] # Add the distance and the index distancelist.append((euclidean(vec1,vec2),i)) # Sort by distance distancelist.sort() return distancelist def knnestimate(data,vec1,k=5): # Get sorted distances dlist=getdistances(data,vec1) avg=0.0 # Take the average of the top k results for i in range(k): idx=dlist[i][1] avg+=data[idx]['result'] avg=avg/k return avg def inverseweight(dist,num=1.0,const=0.1): return num/(dist+const) def subtractweight(dist,const=1.0): if dist>const: return 0 else: return const-dist def gaussian(dist,sigma=5.0): return math.e**(-dist**2/(2*sigma**2)) def weightedknn(data,vec1,k=5,weightf=gaussian): # Get distances dlist=getdistances(data,vec1) avg=0.0 totalweight=0.0 # Get weighted average for i in range(k): dist=dlist[i][0] idx=dlist[i][1] weight=weightf(dist) avg+=weight*data[idx]['result'] totalweight+=weight if totalweight==0: return 0 avg=avg/totalweight return avg def dividedata(data,test=0.05): trainset=[] testset=[] for row in data: if random()<test: testset.append(row) else: trainset.append(row) return trainset,testset def testalgorithm(algf,trainset,testset): error=0.0 for row in testset: guess=algf(trainset,row['input']) error+=(row['result']-guess)**2 #print row['result'],guess #print error/len(testset) return error/len(testset) def crossvalidate(algf,data,trials=100,test=0.1): error=0.0 for i in range(trials): trainset,testset=dividedata(data,test) error+=testalgorithm(algf,trainset,testset) return error/trials def wineset2(): rows=[] for i in range(300): rating=random()*50+50 age=random()*50 aisle=float(randint(1,20)) bottlesize=[375.0,750.0,1500.0][randint(0,2)] price=wineprice(rating,age) price*=(bottlesize/750) price*=(random()*0.2+0.9) rows.append({'input':(rating,age,aisle,bottlesize), 'result':price}) return rows def rescale(data,scale): scaleddata=[] for row in data: scaled=[scale[i]*row['input'][i] for i in range(len(scale))] scaleddata.append({'input':scaled,'result':row['result']}) return scaleddata def createcostfunction(algf,data): def costf(scale): sdata=rescale(data,scale) return crossvalidate(algf,sdata,trials=20) return costf weightdomain=[(0,10)]*4 def wineset3(): rows=wineset1() for row in rows: if random()<0.5: # Wine was bought at a discount store row['result']*=0.6 return rows def probguess(data,vec1,low,high,k=5,weightf=gaussian): dlist=getdistances(data,vec1) nweight=0.0 tweight=0.0 for i in range(k): dist=dlist[i][0] idx=dlist[i][1] weight=weightf(dist) v=data[idx]['result'] # Is this point in the range? if v>=low and v<=high: nweight+=weight tweight+=weight if tweight==0: return 0 # The probability is the weights in the range # divided by all the weights return nweight/tweight from pylab import * def cumulativegraph(data,vec1,high,k=5,weightf=gaussian): t1=arange(0.0,high,0.1) cprob=array([probguess(data,vec1,0,v,k,weightf) for v in t1]) plot(t1,cprob) show() def probabilitygraph(data,vec1,high,k=5,weightf=gaussian,ss=5.0): # Make a range for the prices t1=arange(0.0,high,0.1) # Get the probabilities for the entire range probs=[probguess(data,vec1,v,v+0.1,k,weightf) for v in t1] # Smooth them by adding the gaussian of the nearby probabilites smoothed=[] for i in range(len(probs)): sv=0.0 for j in range(0,len(probs)): dist=abs(i-j)*0.1 weight=gaussian(dist,sigma=ss) sv+=weight*probs[j] smoothed.append(sv) smoothed=array(smoothed) plot(t1,smoothed) show()
mit
xuxiao19910803/edx-platform
openedx/core/djangoapps/content/course_structures/models.py
16
2011
import json import logging from collections import OrderedDict from model_utils.models import TimeStampedModel from util.models import CompressedTextField from xmodule_django.models import CourseKeyField logger = logging.getLogger(__name__) # pylint: disable=invalid-name class CourseStructure(TimeStampedModel): course_id = CourseKeyField(max_length=255, db_index=True, unique=True, verbose_name='Course ID') # Right now the only thing we do with the structure doc is store it and # send it on request. If we need to store a more complex data model later, # we can do so and build a migration. The only problem with a normalized # data model for this is that it will likely involve hundreds of rows, and # we'd have to be careful about caching. structure_json = CompressedTextField(verbose_name='Structure JSON', blank=True, null=True) @property def structure(self): if self.structure_json: return json.loads(self.structure_json) return None @property def ordered_blocks(self): """ Return the blocks in the order with which they're seen in the courseware. Parents are ordered before children. """ if self.structure: ordered_blocks = OrderedDict() self._traverse_tree(self.structure['root'], self.structure['blocks'], ordered_blocks) return ordered_blocks def _traverse_tree(self, block, unordered_structure, ordered_blocks, parent=None): """ Traverses the tree and fills in the ordered_blocks OrderedDict with the blocks in the order that they appear in the course. """ # find the dictionary entry for the current node cur_block = unordered_structure[block] if parent: cur_block['parent'] = parent ordered_blocks[block] = cur_block for child_node in cur_block['children']: self._traverse_tree(child_node, unordered_structure, ordered_blocks, parent=block)
agpl-3.0
Zumium/boxes
boxes/handlers/link.py
1
2685
#Copyright (C) 2016 Zumium [email protected] # # #Licensed to the Apache Software Foundation (ASF) under one #or more contributor license agreements. See the NOTICE file #distributed with this work for additional information #regarding copyright ownership. The ASF licenses this file #to you under the Apache License, Version 2.0 (the #"License"); you may not use this file except in compliance #with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #Unless required by applicable law or agreed to in writing, #software distributed under the License is distributed on an #"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY #KIND, either express or implied. See the License for the #specific language governing permissions and limitations #under the License. from boxes import handlerBase class LinkHandler(handlerBase.BaseHandler): def __init__(self): super().__init__() def handle(self): import os import os.path #check number of arguments if self.argumentNum == 0: print('LINK command need at least 1 argument') return if self.argumentNum == 1: #path is not set,use current working directory sas default cwdPath=os.getcwd() if cwdPath[-1] != self.pathSeperator: cwdPath+=self.pathSeperator self.arguments.append({'path':cwdPath,'type':'path'}) #check type if (self.arguments[0]['type'] != 'box' and self.arguments[0]['type'] != 'boxfile') or self.arguments[1]['type'] != 'path': print('usage: box link BOX[:FILE] /path/to/link') return #get link type isBox=None if self.arguments[0]['type'] == 'box': isBox=True else: isBox=False #check if exists if isBox: if not self.checkBoxExists(self.arguments[0]['box']): print('box {} doesn\'t exist'.format(self.arguments[0]['box'])) return else: if not self.checkFileExists(self.arguments[0]['box'],self.arguments[0]['file']): print('file {0} in box {1} doesn\'t exist'.format(self.arguments[0]['file'],self.arguments[0]['box'])) return #get link file name boxName=None fileName=None linkFilePath=None linkFileParentPath=os.path.abspath(self.arguments[1]['path']) if linkFileParentPath[-1] != self.pathSeperator: linkFileParentPath+=self.pathSeperator if isBox: boxName=self.arguments[0]['box'] linkFilePath=linkFileParentPath+boxName os.symlink(self.getFullBoxPath(boxName),linkFilePath) else: boxName=self.arguments[0]['box'] fileName=self.arguments[0]['file'] linkFilePath=linkFileParentPath+fileName os.symlink(self.getFilePath(boxName,fileName),linkFilePath) #add link path to record file self.addLink(linkFilePath,boxName,fileName)
apache-2.0
egoldchain/egoldchain-master
test/functional/signrawtransactions.py
22
7190
#!/usr/bin/env python3 # Copyright (c) 2015-2016 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test transaction signing using the signrawtransaction RPC.""" from test_framework.test_framework import BitcoinTestFramework from test_framework.util import * class SignRawTransactionsTest(BitcoinTestFramework): def __init__(self): super().__init__() self.setup_clean_chain = True self.num_nodes = 1 def successful_signing_test(self): """Create and sign a valid raw transaction with one input. Expected results: 1) The transaction has a complete set of signatures 2) No script verification error occurred""" privKeys = ['cUeKHd5orzT3mz8P9pxyREHfsWtVfgsfDjiZZBcjUBAaGk1BTj7N', 'cVKpPfVKSJxKqVpE9awvXNWuLHCa5j5tiE7K6zbUSptFpTEtiFrA'] inputs = [ # Valid pay-to-pubkey scripts {'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 0, 'scriptPubKey': '76a91460baa0f494b38ce3c940dea67f3804dc52d1fb9488ac'}, {'txid': '83a4f6a6b73660e13ee6cb3c6063fa3759c50c9b7521d0536022961898f4fb02', 'vout': 0, 'scriptPubKey': '76a914669b857c03a5ed269d5d85a1ffac9ed5d663072788ac'}, ] outputs = {'mpLQjfK79b7CCV4VMJWEWAj5Mpx8Up5zxB': 0.1} rawTx = self.nodes[0].createrawtransaction(inputs, outputs) rawTxSigned = self.nodes[0].signrawtransaction(rawTx, inputs, privKeys) # 1) The transaction has a complete set of signatures assert 'complete' in rawTxSigned assert_equal(rawTxSigned['complete'], True) # 2) No script verification error occurred assert 'errors' not in rawTxSigned def script_verification_error_test(self): """Create and sign a raw transaction with valid (vin 0), invalid (vin 1) and one missing (vin 2) input script. Expected results: 3) The transaction has no complete set of signatures 4) Two script verification errors occurred 5) Script verification errors have certain properties ("txid", "vout", "scriptSig", "sequence", "error") 6) The verification errors refer to the invalid (vin 1) and missing input (vin 2)""" privKeys = ['cUeKHd5orzT3mz8P9pxyREHfsWtVfgsfDjiZZBcjUBAaGk1BTj7N'] inputs = [ # Valid pay-to-pubkey script {'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 0}, # Invalid script {'txid': '5b8673686910442c644b1f4993d8f7753c7c8fcb5c87ee40d56eaeef25204547', 'vout': 7}, # Missing scriptPubKey {'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 1}, ] scripts = [ # Valid pay-to-pubkey script {'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 0, 'scriptPubKey': '76a91460baa0f494b38ce3c940dea67f3804dc52d1fb9488ac'}, # Invalid script {'txid': '5b8673686910442c644b1f4993d8f7753c7c8fcb5c87ee40d56eaeef25204547', 'vout': 7, 'scriptPubKey': 'badbadbadbad'} ] outputs = {'mpLQjfK79b7CCV4VMJWEWAj5Mpx8Up5zxB': 0.1} rawTx = self.nodes[0].createrawtransaction(inputs, outputs) # Make sure decoderawtransaction is at least marginally sane decodedRawTx = self.nodes[0].decoderawtransaction(rawTx) for i, inp in enumerate(inputs): assert_equal(decodedRawTx["vin"][i]["txid"], inp["txid"]) assert_equal(decodedRawTx["vin"][i]["vout"], inp["vout"]) # Make sure decoderawtransaction throws if there is extra data assert_raises(JSONRPCException, self.nodes[0].decoderawtransaction, rawTx + "00") rawTxSigned = self.nodes[0].signrawtransaction(rawTx, scripts, privKeys) # 3) The transaction has no complete set of signatures assert 'complete' in rawTxSigned assert_equal(rawTxSigned['complete'], False) # 4) Two script verification errors occurred assert 'errors' in rawTxSigned assert_equal(len(rawTxSigned['errors']), 2) # 5) Script verification errors have certain properties assert 'txid' in rawTxSigned['errors'][0] assert 'vout' in rawTxSigned['errors'][0] assert 'witness' in rawTxSigned['errors'][0] assert 'scriptSig' in rawTxSigned['errors'][0] assert 'sequence' in rawTxSigned['errors'][0] assert 'error' in rawTxSigned['errors'][0] # 6) The verification errors refer to the invalid (vin 1) and missing input (vin 2) assert_equal(rawTxSigned['errors'][0]['txid'], inputs[1]['txid']) assert_equal(rawTxSigned['errors'][0]['vout'], inputs[1]['vout']) assert_equal(rawTxSigned['errors'][1]['txid'], inputs[2]['txid']) assert_equal(rawTxSigned['errors'][1]['vout'], inputs[2]['vout']) assert not rawTxSigned['errors'][0]['witness'] # Now test signing failure for transaction with input witnesses p2wpkh_raw_tx = "01000000000102fff7f7881a8099afa6940d42d1e7f6362bec38171ea3edf433541db4e4ad969f00000000494830450221008b9d1dc26ba6a9cb62127b02742fa9d754cd3bebf337f7a55d114c8e5cdd30be022040529b194ba3f9281a99f2b1c0a19c0489bc22ede944ccf4ecbab4cc618ef3ed01eeffffffef51e1b804cc89d182d279655c3aa89e815b1b309fe287d9b2b55d57b90ec68a0100000000ffffffff02202cb206000000001976a9148280b37df378db99f66f85c95a783a76ac7a6d5988ac9093510d000000001976a9143bde42dbee7e4dbe6a21b2d50ce2f0167faa815988ac000247304402203609e17b84f6a7d30c80bfa610b5b4542f32a8a0d5447a12fb1366d7f01cc44a0220573a954c4518331561406f90300e8f3358f51928d43c212a8caed02de67eebee0121025476c2e83188368da1ff3e292e7acafcdb3566bb0ad253f62fc70f07aeee635711000000" rawTxSigned = self.nodes[0].signrawtransaction(p2wpkh_raw_tx) # 7) The transaction has no complete set of signatures assert 'complete' in rawTxSigned assert_equal(rawTxSigned['complete'], False) # 8) Two script verification errors occurred assert 'errors' in rawTxSigned assert_equal(len(rawTxSigned['errors']), 2) # 9) Script verification errors have certain properties assert 'txid' in rawTxSigned['errors'][0] assert 'vout' in rawTxSigned['errors'][0] assert 'witness' in rawTxSigned['errors'][0] assert 'scriptSig' in rawTxSigned['errors'][0] assert 'sequence' in rawTxSigned['errors'][0] assert 'error' in rawTxSigned['errors'][0] # Non-empty witness checked here assert_equal(rawTxSigned['errors'][1]['witness'], ["304402203609e17b84f6a7d30c80bfa610b5b4542f32a8a0d5447a12fb1366d7f01cc44a0220573a954c4518331561406f90300e8f3358f51928d43c212a8caed02de67eebee01", "025476c2e83188368da1ff3e292e7acafcdb3566bb0ad253f62fc70f07aeee6357"]) assert not rawTxSigned['errors'][0]['witness'] def run_test(self): self.successful_signing_test() self.script_verification_error_test() if __name__ == '__main__': SignRawTransactionsTest().main()
mit
wangyum/tensorflow
tensorflow/contrib/keras/python/keras/applications/vgg19.py
30
9353
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== # pylint: disable=invalid-name """VGG19 model for Keras. # Reference - [Very Deep Convolutional Networks for Large-Scale Image Recognition](https://arxiv.org/abs/1409.1556) """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import warnings from tensorflow.contrib.keras.python.keras import backend as K from tensorflow.contrib.keras.python.keras.applications.imagenet_utils import _obtain_input_shape from tensorflow.contrib.keras.python.keras.applications.imagenet_utils import decode_predictions # pylint: disable=unused-import from tensorflow.contrib.keras.python.keras.applications.imagenet_utils import preprocess_input # pylint: disable=unused-import from tensorflow.contrib.keras.python.keras.engine.topology import get_source_inputs from tensorflow.contrib.keras.python.keras.layers import Conv2D from tensorflow.contrib.keras.python.keras.layers import Dense from tensorflow.contrib.keras.python.keras.layers import Flatten from tensorflow.contrib.keras.python.keras.layers import GlobalAveragePooling2D from tensorflow.contrib.keras.python.keras.layers import GlobalMaxPooling2D from tensorflow.contrib.keras.python.keras.layers import Input from tensorflow.contrib.keras.python.keras.layers import MaxPooling2D from tensorflow.contrib.keras.python.keras.models import Model from tensorflow.contrib.keras.python.keras.utils import layer_utils from tensorflow.contrib.keras.python.keras.utils.data_utils import get_file WEIGHTS_PATH = 'https://github.com/fchollet/deep-learning-models/releases/download/v0.1/vgg19_weights_tf_dim_ordering_tf_kernels.h5' WEIGHTS_PATH_NO_TOP = 'https://github.com/fchollet/deep-learning-models/releases/download/v0.1/vgg19_weights_tf_dim_ordering_tf_kernels_notop.h5' def VGG19(include_top=True, weights='imagenet', input_tensor=None, input_shape=None, pooling=None, classes=1000): """Instantiates the VGG19 architecture. Optionally loads weights pre-trained on ImageNet. Note that when using TensorFlow, for best performance you should set `image_data_format="channels_last"` in your Keras config at ~/.keras/keras.json. The model and the weights are compatible with both TensorFlow and Theano. The data format convention used by the model is the one specified in your Keras config file. Arguments: include_top: whether to include the 3 fully-connected layers at the top of the network. weights: one of `None` (random initialization) or "imagenet" (pre-training on ImageNet). input_tensor: optional Keras tensor (i.e. output of `layers.Input()`) to use as image input for the model. input_shape: optional shape tuple, only to be specified if `include_top` is False (otherwise the input shape has to be `(224, 224, 3)` (with `channels_last` data format) or `(3, 224, 244)` (with `channels_first` data format). It should have exactly 3 inputs channels, and width and height should be no smaller than 48. E.g. `(200, 200, 3)` would be one valid value. pooling: Optional pooling mode for feature extraction when `include_top` is `False`. - `None` means that the output of the model will be the 4D tensor output of the last convolutional layer. - `avg` means that global average pooling will be applied to the output of the last convolutional layer, and thus the output of the model will be a 2D tensor. - `max` means that global max pooling will be applied. classes: optional number of classes to classify images into, only to be specified if `include_top` is True, and if no `weights` argument is specified. Returns: A Keras model instance. Raises: ValueError: in case of invalid argument for `weights`, or invalid input shape. """ if weights not in {'imagenet', None}: raise ValueError('The `weights` argument should be either ' '`None` (random initialization) or `imagenet` ' '(pre-training on ImageNet).') if weights == 'imagenet' and include_top and classes != 1000: raise ValueError('If using `weights` as imagenet with `include_top`' ' as true, `classes` should be 1000') # Determine proper input shape input_shape = _obtain_input_shape( input_shape, default_size=224, min_size=48, data_format=K.image_data_format(), include_top=include_top) if input_tensor is None: img_input = Input(shape=input_shape) else: img_input = Input(tensor=input_tensor, shape=input_shape) # Block 1 x = Conv2D( 64, (3, 3), activation='relu', padding='same', name='block1_conv1')(img_input) x = Conv2D( 64, (3, 3), activation='relu', padding='same', name='block1_conv2')(x) x = MaxPooling2D((2, 2), strides=(2, 2), name='block1_pool')(x) # Block 2 x = Conv2D( 128, (3, 3), activation='relu', padding='same', name='block2_conv1')(x) x = Conv2D( 128, (3, 3), activation='relu', padding='same', name='block2_conv2')(x) x = MaxPooling2D((2, 2), strides=(2, 2), name='block2_pool')(x) # Block 3 x = Conv2D( 256, (3, 3), activation='relu', padding='same', name='block3_conv1')(x) x = Conv2D( 256, (3, 3), activation='relu', padding='same', name='block3_conv2')(x) x = Conv2D( 256, (3, 3), activation='relu', padding='same', name='block3_conv3')(x) x = Conv2D( 256, (3, 3), activation='relu', padding='same', name='block3_conv4')(x) x = MaxPooling2D((2, 2), strides=(2, 2), name='block3_pool')(x) # Block 4 x = Conv2D( 512, (3, 3), activation='relu', padding='same', name='block4_conv1')(x) x = Conv2D( 512, (3, 3), activation='relu', padding='same', name='block4_conv2')(x) x = Conv2D( 512, (3, 3), activation='relu', padding='same', name='block4_conv3')(x) x = Conv2D( 512, (3, 3), activation='relu', padding='same', name='block4_conv4')(x) x = MaxPooling2D((2, 2), strides=(2, 2), name='block4_pool')(x) # Block 5 x = Conv2D( 512, (3, 3), activation='relu', padding='same', name='block5_conv1')(x) x = Conv2D( 512, (3, 3), activation='relu', padding='same', name='block5_conv2')(x) x = Conv2D( 512, (3, 3), activation='relu', padding='same', name='block5_conv3')(x) x = Conv2D( 512, (3, 3), activation='relu', padding='same', name='block5_conv4')(x) x = MaxPooling2D((2, 2), strides=(2, 2), name='block5_pool')(x) if include_top: # Classification block x = Flatten(name='flatten')(x) x = Dense(4096, activation='relu', name='fc1')(x) x = Dense(4096, activation='relu', name='fc2')(x) x = Dense(classes, activation='softmax', name='predictions')(x) else: if pooling == 'avg': x = GlobalAveragePooling2D()(x) elif pooling == 'max': x = GlobalMaxPooling2D()(x) # Ensure that the model takes into account # any potential predecessors of `input_tensor`. if input_tensor is not None: inputs = get_source_inputs(input_tensor) else: inputs = img_input # Create model. model = Model(inputs, x, name='vgg19') # load weights if weights == 'imagenet': if include_top: weights_path = get_file( 'vgg19_weights_tf_dim_ordering_tf_kernels.h5', WEIGHTS_PATH, cache_subdir='models') else: weights_path = get_file( 'vgg19_weights_tf_dim_ordering_tf_kernels_notop.h5', WEIGHTS_PATH_NO_TOP, cache_subdir='models') model.load_weights(weights_path) if K.backend() == 'theano': layer_utils.convert_all_kernels_in_model(model) if K.image_data_format() == 'channels_first': if include_top: maxpool = model.get_layer(name='block5_pool') shape = maxpool.output_shape[1:] dense = model.get_layer(name='fc1') layer_utils.convert_dense_weights_data_format(dense, shape, 'channels_first') if K.backend() == 'tensorflow': warnings.warn('You are using the TensorFlow backend, yet you ' 'are using the Theano ' 'image data format convention ' '(`image_data_format="channels_first"`). ' 'For best performance, set ' '`image_data_format="channels_last"` in ' 'your Keras config ' 'at ~/.keras/keras.json.') return model
apache-2.0
Ichimonji10/robottelo
robottelo/ui/operatingsys.py
4
8335
# -*- encoding: utf-8 -*- """Implements Operating System UI.""" from robottelo.constants import FILTER from robottelo.ui.base import Base, UIError from robottelo.ui.locators import common_locators, locators, tab_locators from robottelo.ui.navigator import Navigator class OperatingSys(Base): """Manipulates Foreman's operating system from UI.""" def navigate_to_entity(self): """Navigate to OS entity page""" Navigator(self.browser).go_to_operating_systems() def _search_locator(self): """Specify locator for OS entity search procedure""" return locators['operatingsys.operatingsys_name'] def _configure_os(self, archs, ptables, mediums, select, minor_version=None, description=None, os_family=None, template=None, arch_list=None, ptable_list=None, medium_list=None): """Configures the operating system details.""" tab_primary_locator = tab_locators['tab_primary'] tab_ptable_locator = tab_locators['operatingsys.tab_ptable'] tab_medium_locator = tab_locators['operatingsys.tab_medium'] if minor_version: if self.wait_until_element( locators['operatingsys.minor_version']): self.field_update('operatingsys.minor_version', minor_version) if description: if self.wait_until_element( locators['operatingsys.description']): self.field_update('operatingsys.description', description) if os_family: self.select(locators['operatingsys.family'], os_family) if archs or arch_list: self.configure_entity( archs, FILTER['os_arch'], tab_locator=tab_primary_locator, new_entity_list=arch_list, entity_select=select ) if ptables or ptable_list: self.configure_entity( ptables, FILTER['os_ptable'], tab_locator=tab_ptable_locator, new_entity_list=ptable_list, entity_select=select ) if mediums or medium_list: self.configure_entity( mediums, FILTER['os_medium'], tab_locator=tab_medium_locator, new_entity_list=medium_list, entity_select=select ) if template: self.click(tab_locators['operatingsys.tab_templates']) self.select(locators['operatingsys.template'], template) def create(self, name, major_version=None, minor_version=None, description=None, os_family=None, archs=None, ptables=None, mediums=None, select=True, template=None): """Create operating system from UI.""" new_os = self.wait_until_element(locators['operatingsys.new']) if new_os: new_os.click() os_name_locator = locators['operatingsys.name'] os_major_locator = locators['operatingsys.major_version'] if self.wait_until_element(os_name_locator): self.find_element(os_name_locator).send_keys(name) if self.wait_until_element(os_major_locator): self.find_element(os_major_locator).send_keys(major_version) self._configure_os( archs, ptables, mediums, select, minor_version, description, os_family, template, arch_list=None, ptable_list=None, medium_list=None ) self.click(common_locators['submit']) else: raise UIError(u'Could not create OS without major_version') else: raise UIError( u'Could not create new operating system "{0}"'.format(name) ) def delete(self, os_name, really=True): """Delete operating system from UI.""" self.delete_entity( os_name, really, locators['operatingsys.delete'] ) def update(self, os_name, new_name=None, major_version=None, minor_version=None, description=None, os_family=None, archs=None, ptables=None, mediums=None, new_archs=None, new_ptables=None, new_mediums=None, select=False, template=None): """Update all entities(arch, Partition table, medium) of OS from UI.""" element = self.search(os_name) self.click(element) if new_name: if self.wait_until_element(locators['operatingsys.name']): self.field_update('operatingsys.name', new_name) if major_version: if self.wait_until_element( locators['operatingsys.major_version']): self.field_update( 'operatingsys.major_version', major_version) self._configure_os( archs, ptables, mediums, select, minor_version, description, os_family, template, arch_list=new_archs, ptable_list=new_ptables, medium_list=new_mediums ) self.click(common_locators['submit']) def set_os_parameter(self, os_name, param_name, param_value): """Add new OS parameter.""" element = self.search(os_name) self.click(element) self.set_parameter(param_name, param_value) def remove_os_parameter(self, os_name, param_name): """Remove selected OS parameter.""" element = self.search(os_name) self.click(element) self.remove_parameter(param_name) def get_selected_entities(self): """Function to get selected elements (either it is a check-box or selection list). """ selected_element = self.wait_until_element( common_locators['selected_entity']) checked_element = self.find_element(common_locators['checked_entity']) if selected_element: entity_value = selected_element.text else: entity_value = checked_element.text return entity_value def get_os_entities(self, os_name, entity_name=None): """Assert OS name, minor, major_version, os_family, template, media, and partition table to validate results. """ name_loc = locators['operatingsys.name'] major_ver_loc = locators['operatingsys.major_version'] minor_ver_loc = locators['operatingsys.minor_version'] os_family_loc = locators['operatingsys.fetch_family'] element = self.search(os_name) self.click(element) if self.wait_until_element(locators['operatingsys.name']): result = dict([('name', None), ('major', None), ('minor', None), ('os_family', None), ('ptable', None), ('template', None), ('medium', None)]) result['name'] = self.find_element( name_loc).get_attribute('value') result['major'] = self.find_element( major_ver_loc).get_attribute('value') result['minor'] = self.find_element( minor_ver_loc).get_attribute('value') result['os_family'] = self.find_element(os_family_loc).text if entity_name == 'ptable': self.click(tab_locators['operatingsys.tab_ptable']) result['ptable'] = self.get_selected_entities() elif entity_name == 'medium': self.click(tab_locators['operatingsys.tab_medium']) result['medium'] = self.get_selected_entities() elif entity_name == 'template': self.click(tab_locators['operatingsys.tab_templates']) result['template'] = self.find_element( locators['operatingsys.fetch_template']).text return result else: raise UIError( u'Could not find the OS name "{0}"'.format(os_name) )
gpl-3.0
jurcicek/blocks-testing
s2s/s2s.py
1
8471
#!/usr/bin/env python3 import numpy as np import h5py from theano import tensor from blocks.bricks import Linear, Rectifier, Tanh, Logistic, Softmax, NDimensionalSoftmax, WithExtraDims, application, \ Brick from blocks.bricks.cost import CategoricalCrossEntropy, MisclassificationRate, Cost from blocks.bricks import WEIGHT from blocks.graph import ComputationGraph from blocks.filter import VariableFilter from blocks.initialization import IsotropicGaussian, Constant from blocks.algorithms import GradientDescent, Scale, RMSProp, AdaGrad from blocks.extensions.monitoring import DataStreamMonitoring from blocks.main_loop import MainLoop from blocks.extensions import FinishAfter, Printing, ProgressBar from fuel.streams import DataStream from fuel.schemes import SequentialScheme, ShuffledScheme from fuel.transformers import Flatten from fuel.datasets.hdf5 import H5PYDataset """ This code shows how to train a sequence to sequence translation model. """ # the text data is composed of questions and answers text_data = [ ('What color is the sky', 'It is blue.'), ('What color is the sun', 'The sun is yellow.'), ('What color is the moon', 'It is blue.'), ] def get_words(sentence): for c in '?!.,': sentence = sentence.replace(c, ' ').replace(' ', ' ') return sentence.upper().split() def normalise(text_data): td = [] for q, a in text_data: q_words = get_words(q) a_words = get_words(a) td.append((q_words, a_words)) return td def get_word_index(word_list): return dict([(w, i) for i, w in enumerate(word_list)]) def get_word_list(text_data): words = set(['_OOV_', '_SOS_', '_EOS_']) for q, a in text_data: words.update(q) words.update(a) word_list = sorted(words) word_list_index = get_word_index(word_list) return word_list, word_list_index def transform_sentence(sentence, word_list_index, max_length): s = [word_list_index['_SOS_']] for w in sentence: s.append(word_list_index.get(w, word_list_index['_OOV_'])) for w in range(0, max_length - len(s)): s.append(word_list_index['_EOS_']) return s def transform_data(text_data, word_list_index, max_length): id = [] for q, a in text_data: qi = transform_sentence(q, word_list_index, max_length) ai = transform_sentence(a, word_list_index, max_length) id.append((qi, ai)) return id def split_q_and_a(index_data): qd = [] ad = [] for q, a in index_data: qd.append(q) ad.append(a) return qd, ad def prepare_dataset(text_data): text_data = normalise(text_data) word_list, word_list_index = get_word_list(text_data) # print(word_list) # print(word_list_index) max_length = 0 for q, a in text_data: max_length = max(max_length, len(q), len(a)) # print(max_length) # add two for _SOS_ and _EOS_ word symbols index_data = transform_data(text_data, word_list_index, max_length + 2) print(index_data) train_features, train_targets = split_q_and_a(index_data) train_features = np.asarray(train_features, dtype=np.int32) train_targets = np.asarray(train_targets, dtype=np.int32) test_features = train_features test_targets = train_targets f = h5py.File('data/dataset.hdf5', mode='w') dt = h5py.special_dtype(vlen=np.unicode) words = f.create_dataset( "word_list", ( len(word_list), ), dtype=dt) words.dims[0].label = 'words' for i, w in enumerate(word_list): words[i] = w features = f.create_dataset( 'features', ( train_features.shape[0] + test_features.shape[0], train_features.shape[1], ), dtype='int32' ) features.dims[0].label = 'example' features.dims[1].label = 'index' features[...] = np.vstack( [train_features, test_features] ) targets = f.create_dataset( 'targets', ( train_targets.shape[0] + test_targets.shape[0], train_targets.shape[1], ), dtype='uint8' ) targets.dims[0].label = 'example' targets.dims[1].label = 'index' targets[...] = np.vstack( [train_targets, test_targets] ) split_dict = { 'word_list': { 'word_list': (0, words.shape[0]) }, 'train': { 'features': (0, train_features.shape[0]), 'targets': (0, train_targets.shape[0]) }, 'test': { 'features': (train_features.shape[0], train_features.shape[0] + test_features.shape[0]), 'targets': (train_targets.shape[0], train_targets.shape[0] + test_targets.shape[0]) } } f.attrs['split'] = H5PYDataset.create_split_array(split_dict) f.flush() f.close() train_set = H5PYDataset('data/dataset.hdf5', which_sets=('train',), load_in_memory=True) test_set = H5PYDataset('data/dataset.hdf5', which_sets=('test',), load_in_memory=True) word_list = h5py.File('data/dataset.hdf5')['word_list'] word_list_index = get_word_index(word_list) return train_set, test_set, word_list, word_list_index def train(train_set, test_set, l2_weight=1e-18): x = tensor.matrix('features') y = tensor.lmatrix('targets') n_classifiers = 3 n_classes = 2 l1 = Linear( name='l1', input_dim=2, output_dim=10, weights_init=IsotropicGaussian(0.1), biases_init=Constant(0) ) l1.initialize() h1 = Logistic().apply(l1.apply(x)) l2 = Linear( name='l1', input_dim=l1.output_dim, output_dim=n_classes * n_classifiers, weights_init=IsotropicGaussian(0.1), biases_init=Constant(0) ) l2.initialize() l2 = l2.apply(h1) y_hat = MultiTargetSoftmax().apply(l2, n_classes, n_classifiers) cost = MultiTargetCategoricalCrossEntropy().apply(y, y_hat) error = MisclassificationRate().apply(y, y_hat) error.name = 'misclassification_rate' cg = ComputationGraph(cost) for w in VariableFilter(roles=[WEIGHT])(cg.variables): cost += l2_weight * (w ** 2).sum() cost.name = 'cost_with_regularization' # print('W1', W1.get_value()) # print('W2', W2.get_value()) algorithm = GradientDescent( cost=cost, parameters=cg.parameters, step_rule=RMSProp() ) data_stream_train = Flatten( DataStream.default_stream( train_set, iteration_scheme=ShuffledScheme(train_set.num_examples, batch_size=80) ) ) data_stream_test = Flatten( DataStream.default_stream( test_set, iteration_scheme=SequentialScheme(test_set.num_examples, batch_size=1) ) ) monitor = DataStreamMonitoring( variables=[cost, error], data_stream=data_stream_test, prefix="test" ) main_loop = MainLoop( data_stream=data_stream_train, algorithm=algorithm, extensions=[ monitor, FinishAfter(after_n_epochs=100), Printing(), # ProgressBar() ] ) main_loop.run() return x, y_hat def test(x, y_hat): features_test = np.array( [ [0, 0], [0, 1], [1, 0], [1, 1], ], dtype=np.float32 ) targets_test = np.array( [ [0, 0, 0], [1, 0, 1], [1, 0, 1], [1, 1, 0], ], dtype=np.uint8 ) y_hat_eval = y_hat.eval({x: features_test}) y_hat_eval_argmax = y_hat_eval.argmax(axis=1) print('Testing example') print('-' * 80) print('Features') print(features_test) print() print('Targets') print(targets_test) print() print('Probability predictions') print(y_hat_eval) print() print('ArgMax Predictions') print(y_hat_eval_argmax) if __name__ == '__main__': train_set, test_set, word_list, word_list_index = prepare_dataset(text_data) # x, y_hat = train(train_set, test_set) # # test(x, y_hat)
apache-2.0
dhutty/ansible
plugins/callbacks/osx_say.py
72
3203
# (C) 2012, Michael DeHaan, <[email protected]> # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. import subprocess import os FAILED_VOICE="Zarvox" REGULAR_VOICE="Trinoids" HAPPY_VOICE="Cellos" LASER_VOICE="Princess" SAY_CMD="/usr/bin/say" def say(msg, voice): subprocess.call([SAY_CMD, msg, "--voice=%s" % (voice)]) class CallbackModule(object): """ makes Ansible much more exciting on OS X. """ def __init__(self): # plugin disable itself if say is not present # ansible will not call any callback if disabled is set to True if not os.path.exists(SAY_CMD): self.disabled = True print "%s does not exist, plugin %s disabled" % \ (SAY_CMD, os.path.basename(__file__)) def on_any(self, *args, **kwargs): pass def runner_on_failed(self, host, res, ignore_errors=False): say("Failure on host %s" % host, FAILED_VOICE) def runner_on_ok(self, host, res): say("pew", LASER_VOICE) def runner_on_skipped(self, host, item=None): say("pew", LASER_VOICE) def runner_on_unreachable(self, host, res): say("Failure on host %s" % host, FAILED_VOICE) def runner_on_no_hosts(self): pass def runner_on_async_poll(self, host, res, jid, clock): pass def runner_on_async_ok(self, host, res, jid): say("pew", LASER_VOICE) def runner_on_async_failed(self, host, res, jid): say("Failure on host %s" % host, FAILED_VOICE) def playbook_on_start(self): say("Running Playbook", REGULAR_VOICE) def playbook_on_notify(self, host, handler): say("pew", LASER_VOICE) def playbook_on_no_hosts_matched(self): pass def playbook_on_no_hosts_remaining(self): pass def playbook_on_task_start(self, name, is_conditional): if not is_conditional: say("Starting task: %s" % name, REGULAR_VOICE) else: say("Notifying task: %s" % name, REGULAR_VOICE) def playbook_on_vars_prompt(self, varname, private=True, prompt=None, encrypt=None, confirm=False, salt_size=None, salt=None, default=None): pass def playbook_on_setup(self): say("Gathering facts", REGULAR_VOICE) def playbook_on_import_for_host(self, host, imported_file): pass def playbook_on_not_import_for_host(self, host, missing_file): pass def playbook_on_play_start(self, name): say("Starting play: %s" % name, HAPPY_VOICE) def playbook_on_stats(self, stats): say("Play complete", HAPPY_VOICE)
gpl-3.0
wtsi-hgi/hgi-cookie-monster-setup
hgicookiemonster/rules/creation_observed_and_incorrect_human_reference_rule.py
2
2388
import re from os.path import normpath, dirname, join, realpath from cookiemonster.common.models import Cookie from cookiemonster.processor.models import Rule from hgicommon.data_source import register from hgicookiemonster.context import HgiContext from hgicookiemonster.rules.not_cram_rule import NOT_CRAM_RULE_PRIORITY from hgicookiemonster.shared.common import was_creation_observed, extract_latest_metadata_key_value_known_in_irods from hgicookiemonster.shared.constants.irods import IRODS_REFERENCE_KEY CREATION_OBSERVED_AND_INCORRECT_HUMAN_REFERENCE_RULE_ID = "creation_observed_and_incorrect_human_reference" CREATION_OBSERVED_AND_INCORRECT_HUMAN_REFERENCE_RULE_PRIORITY = NOT_CRAM_RULE_PRIORITY + 1 KNOWN_UNINTERESTING_REFERENCES_PATH = normpath(join(dirname(realpath(__file__)), "resources/non-human-species.txt")) _REFERENCE_EXTRACTION_PATTERN = re.compile(".*/references/(.*?)/.*", re.IGNORECASE) def _matches(cookie: Cookie, context: HgiContext) -> bool: """ Matches if the creation of the data object in iRODS has been observed but the reference has been set to one that we are not interested in. The order of these events is not considered. """ if not was_creation_observed(cookie.enrichments): return False reference = extract_latest_metadata_key_value_known_in_irods(cookie.enrichments, IRODS_REFERENCE_KEY) if reference is None: # No update to reference yet return False if len(reference) != 1: # Multiple values for reference is unexpected return False reference_species_groups = re.match(_REFERENCE_EXTRACTION_PATTERN, list(reference)[0]) if reference_species_groups is None: # Reference is in unexpected format return False reference_species = reference_species_groups.group(1) # Read uninteresting references from file every time to (easily) support live updates to this list with open(KNOWN_UNINTERESTING_REFERENCES_PATH, "r") as file: uninteresting_references = file.read().splitlines() return reference_species in uninteresting_references def _action(cookie: Cookie, context: HgiContext) -> bool: """Stop further processing.""" return True _rule = Rule(_matches, _action, CREATION_OBSERVED_AND_INCORRECT_HUMAN_REFERENCE_RULE_ID, CREATION_OBSERVED_AND_INCORRECT_HUMAN_REFERENCE_RULE_PRIORITY) register(_rule)
gpl-3.0
ryfeus/lambda-packs
Tensorflow_Pandas_Numpy/source3.6/tensorflow/contrib/learn/python/learn/estimators/tensor_signature.py
134
6730
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """TensorSignature class and utilities.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import collections from tensorflow.python.framework import dtypes from tensorflow.python.framework import sparse_tensor from tensorflow.python.framework import tensor_shape from tensorflow.python.ops import array_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import parsing_ops class TensorSignature(collections.namedtuple( "TensorSignature", ["dtype", "shape", "is_sparse"])): """Signature of the `Tensor` object. Useful to check compatibility of tensors. Example: ```python examples = tf.placeholder(...) inputs = {'a': var_a, 'b': var_b} signatures = tensor_signature.create_signatures(inputs) result = tensor_signature.create_example_parser_from_signatures( signatures, examples) self.assertTrue(tensor_signature.tensors_compatible(result, signatures)) ``` Attributes: dtype: `DType` object. shape: `TensorShape` object. """ def __new__(cls, tensor): if isinstance(tensor, sparse_tensor.SparseTensor): return super(TensorSignature, cls).__new__( cls, dtype=tensor.values.dtype, shape=None, is_sparse=True) return super(TensorSignature, cls).__new__( cls, dtype=tensor.dtype, shape=tensor.get_shape(), is_sparse=False) def is_compatible_with(self, other): """Returns True if signatures are compatible.""" def _shape_is_compatible_0dim(this, other): """Checks that shapes are compatible skipping dim 0.""" other = tensor_shape.as_shape(other) # If shapes are None (unknown) they may be compatible. if this.dims is None or other.dims is None: return True if this.ndims != other.ndims: return False for dim, (x_dim, y_dim) in enumerate(zip(this.dims, other.dims)): if dim == 0: continue if not x_dim.is_compatible_with(y_dim): return False return True if other.is_sparse: return self.is_sparse and self.dtype.is_compatible_with(other.dtype) return (self.dtype.is_compatible_with(other.dtype) and _shape_is_compatible_0dim(self.shape, other.shape) and not self.is_sparse) def get_placeholder(self): if self.is_sparse: return array_ops.sparse_placeholder(dtype=self.dtype) return array_ops.placeholder(dtype=self.dtype, shape=[None] + list(self.shape[1:])) def get_feature_spec(self): dtype = self.dtype # Convert, because example parser only supports float32, int64 and string. if dtype == dtypes.int32: dtype = dtypes.int64 if dtype == dtypes.float64: dtype = dtypes.float32 if self.is_sparse: return parsing_ops.VarLenFeature(dtype=dtype) return parsing_ops.FixedLenFeature(shape=self.shape[1:], dtype=dtype) def tensors_compatible(tensors, signatures): """Check that tensors are compatible with signatures. Args: tensors: Dict of `Tensor` objects or single `Tensor` object. signatures: Dict of `TensorSignature` objects or single `TensorSignature` object. Returns: True if all tensors are compatible, False otherwise. """ # Dict of Tensors as input. if tensors is None: return signatures is None if isinstance(tensors, dict): if not isinstance(signatures, dict): return False for key in signatures: if key not in tensors: return False if not TensorSignature(tensors[key]).is_compatible_with(signatures[key]): return False return True # Single tensor as input. if signatures is None or isinstance(signatures, dict): return False return TensorSignature(tensors).is_compatible_with(signatures) def create_signatures(tensors): """Creates TensorSignature objects for given tensors. Args: tensors: Dict of `Tensor` objects or single `Tensor`. Returns: Dict of `TensorSignature` objects or single `TensorSignature`. """ if isinstance(tensors, dict): return { key: TensorSignature(tensors[key]) for key in tensors} if tensors is None: return None return TensorSignature(tensors) def create_placeholders_from_signatures(signatures): """Creates placeholders from given signatures. Args: signatures: Dict of `TensorSignature` objects or single `TensorSignature`, or `None`. Returns: Dict of `tf.placeholder` objects or single `tf.placeholder`, or `None`. """ if signatures is None: return None if not isinstance(signatures, dict): return signatures.get_placeholder() return { key: signatures[key].get_placeholder() for key in signatures} def create_example_parser_from_signatures(signatures, examples_batch, single_feature_name="feature"): """Creates example parser from given signatures. Args: signatures: Dict of `TensorSignature` objects or single `TensorSignature`. examples_batch: string `Tensor` of serialized `Example` proto. single_feature_name: string, single feature name. Returns: features: `Tensor` or `dict` of `Tensor` objects. """ feature_spec = {} if not isinstance(signatures, dict): feature_spec[single_feature_name] = signatures.get_feature_spec() else: feature_spec = {key: signatures[key].get_feature_spec() for key in signatures} features = parsing_ops.parse_example(examples_batch, feature_spec) if not isinstance(signatures, dict): # Returns single feature, casts if needed. features = features[single_feature_name] if not signatures.dtype.is_compatible_with(features.dtype): features = math_ops.cast(features, signatures.dtype) return features # Returns dict of features, casts if needed. for name in features: if not signatures[name].dtype.is_compatible_with(features[name].dtype): features[name] = math_ops.cast(features[name], signatures[name].dtype) return features
mit
dednal/chromium.src
third_party/protobuf/python/google/protobuf/text_format.py
162
22004
# Protocol Buffers - Google's data interchange format # Copyright 2008 Google Inc. All rights reserved. # http://code.google.com/p/protobuf/ # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Contains routines for printing protocol messages in text format.""" __author__ = '[email protected] (Kenton Varda)' import cStringIO import re from collections import deque from google.protobuf.internal import type_checkers from google.protobuf import descriptor __all__ = [ 'MessageToString', 'PrintMessage', 'PrintField', 'PrintFieldValue', 'Merge' ] _INTEGER_CHECKERS = (type_checkers.Uint32ValueChecker(), type_checkers.Int32ValueChecker(), type_checkers.Uint64ValueChecker(), type_checkers.Int64ValueChecker()) _FLOAT_INFINITY = re.compile('-?inf(?:inity)?f?', re.IGNORECASE) _FLOAT_NAN = re.compile('nanf?', re.IGNORECASE) class ParseError(Exception): """Thrown in case of ASCII parsing error.""" def MessageToString(message, as_utf8=False, as_one_line=False): out = cStringIO.StringIO() PrintMessage(message, out, as_utf8=as_utf8, as_one_line=as_one_line) result = out.getvalue() out.close() if as_one_line: return result.rstrip() return result def PrintMessage(message, out, indent=0, as_utf8=False, as_one_line=False): for field, value in message.ListFields(): if field.label == descriptor.FieldDescriptor.LABEL_REPEATED: for element in value: PrintField(field, element, out, indent, as_utf8, as_one_line) else: PrintField(field, value, out, indent, as_utf8, as_one_line) def PrintField(field, value, out, indent=0, as_utf8=False, as_one_line=False): """Print a single field name/value pair. For repeated fields, the value should be a single element.""" out.write(' ' * indent); if field.is_extension: out.write('[') if (field.containing_type.GetOptions().message_set_wire_format and field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and field.message_type == field.extension_scope and field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL): out.write(field.message_type.full_name) else: out.write(field.full_name) out.write(']') elif field.type == descriptor.FieldDescriptor.TYPE_GROUP: # For groups, use the capitalized name. out.write(field.message_type.name) else: out.write(field.name) if field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE: # The colon is optional in this case, but our cross-language golden files # don't include it. out.write(': ') PrintFieldValue(field, value, out, indent, as_utf8, as_one_line) if as_one_line: out.write(' ') else: out.write('\n') def PrintFieldValue(field, value, out, indent=0, as_utf8=False, as_one_line=False): """Print a single field value (not including name). For repeated fields, the value should be a single element.""" if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: if as_one_line: out.write(' { ') PrintMessage(value, out, indent, as_utf8, as_one_line) out.write('}') else: out.write(' {\n') PrintMessage(value, out, indent + 2, as_utf8, as_one_line) out.write(' ' * indent + '}') elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM: enum_value = field.enum_type.values_by_number.get(value, None) if enum_value is not None: out.write(enum_value.name) else: out.write(str(value)) elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING: out.write('\"') if type(value) is unicode: out.write(_CEscape(value.encode('utf-8'), as_utf8)) else: out.write(_CEscape(value, as_utf8)) out.write('\"') elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL: if value: out.write("true") else: out.write("false") else: out.write(str(value)) def Merge(text, message): """Merges an ASCII representation of a protocol message into a message. Args: text: Message ASCII representation. message: A protocol buffer message to merge into. Raises: ParseError: On ASCII parsing problems. """ tokenizer = _Tokenizer(text) while not tokenizer.AtEnd(): _MergeField(tokenizer, message) def _MergeField(tokenizer, message): """Merges a single protocol message field into a message. Args: tokenizer: A tokenizer to parse the field name and values. message: A protocol message to record the data. Raises: ParseError: In case of ASCII parsing problems. """ message_descriptor = message.DESCRIPTOR if tokenizer.TryConsume('['): name = [tokenizer.ConsumeIdentifier()] while tokenizer.TryConsume('.'): name.append(tokenizer.ConsumeIdentifier()) name = '.'.join(name) if not message_descriptor.is_extendable: raise tokenizer.ParseErrorPreviousToken( 'Message type "%s" does not have extensions.' % message_descriptor.full_name) field = message.Extensions._FindExtensionByName(name) if not field: raise tokenizer.ParseErrorPreviousToken( 'Extension "%s" not registered.' % name) elif message_descriptor != field.containing_type: raise tokenizer.ParseErrorPreviousToken( 'Extension "%s" does not extend message type "%s".' % ( name, message_descriptor.full_name)) tokenizer.Consume(']') else: name = tokenizer.ConsumeIdentifier() field = message_descriptor.fields_by_name.get(name, None) # Group names are expected to be capitalized as they appear in the # .proto file, which actually matches their type names, not their field # names. if not field: field = message_descriptor.fields_by_name.get(name.lower(), None) if field and field.type != descriptor.FieldDescriptor.TYPE_GROUP: field = None if (field and field.type == descriptor.FieldDescriptor.TYPE_GROUP and field.message_type.name != name): field = None if not field: raise tokenizer.ParseErrorPreviousToken( 'Message type "%s" has no field named "%s".' % ( message_descriptor.full_name, name)) if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: tokenizer.TryConsume(':') if tokenizer.TryConsume('<'): end_token = '>' else: tokenizer.Consume('{') end_token = '}' if field.label == descriptor.FieldDescriptor.LABEL_REPEATED: if field.is_extension: sub_message = message.Extensions[field].add() else: sub_message = getattr(message, field.name).add() else: if field.is_extension: sub_message = message.Extensions[field] else: sub_message = getattr(message, field.name) sub_message.SetInParent() while not tokenizer.TryConsume(end_token): if tokenizer.AtEnd(): raise tokenizer.ParseErrorPreviousToken('Expected "%s".' % (end_token)) _MergeField(tokenizer, sub_message) else: _MergeScalarField(tokenizer, message, field) def _MergeScalarField(tokenizer, message, field): """Merges a single protocol message scalar field into a message. Args: tokenizer: A tokenizer to parse the field value. message: A protocol message to record the data. field: The descriptor of the field to be merged. Raises: ParseError: In case of ASCII parsing problems. RuntimeError: On runtime errors. """ tokenizer.Consume(':') value = None if field.type in (descriptor.FieldDescriptor.TYPE_INT32, descriptor.FieldDescriptor.TYPE_SINT32, descriptor.FieldDescriptor.TYPE_SFIXED32): value = tokenizer.ConsumeInt32() elif field.type in (descriptor.FieldDescriptor.TYPE_INT64, descriptor.FieldDescriptor.TYPE_SINT64, descriptor.FieldDescriptor.TYPE_SFIXED64): value = tokenizer.ConsumeInt64() elif field.type in (descriptor.FieldDescriptor.TYPE_UINT32, descriptor.FieldDescriptor.TYPE_FIXED32): value = tokenizer.ConsumeUint32() elif field.type in (descriptor.FieldDescriptor.TYPE_UINT64, descriptor.FieldDescriptor.TYPE_FIXED64): value = tokenizer.ConsumeUint64() elif field.type in (descriptor.FieldDescriptor.TYPE_FLOAT, descriptor.FieldDescriptor.TYPE_DOUBLE): value = tokenizer.ConsumeFloat() elif field.type == descriptor.FieldDescriptor.TYPE_BOOL: value = tokenizer.ConsumeBool() elif field.type == descriptor.FieldDescriptor.TYPE_STRING: value = tokenizer.ConsumeString() elif field.type == descriptor.FieldDescriptor.TYPE_BYTES: value = tokenizer.ConsumeByteString() elif field.type == descriptor.FieldDescriptor.TYPE_ENUM: value = tokenizer.ConsumeEnum(field) else: raise RuntimeError('Unknown field type %d' % field.type) if field.label == descriptor.FieldDescriptor.LABEL_REPEATED: if field.is_extension: message.Extensions[field].append(value) else: getattr(message, field.name).append(value) else: if field.is_extension: message.Extensions[field] = value else: setattr(message, field.name, value) class _Tokenizer(object): """Protocol buffer ASCII representation tokenizer. This class handles the lower level string parsing by splitting it into meaningful tokens. It was directly ported from the Java protocol buffer API. """ _WHITESPACE = re.compile('(\\s|(#.*$))+', re.MULTILINE) _TOKEN = re.compile( '[a-zA-Z_][0-9a-zA-Z_+-]*|' # an identifier '[0-9+-][0-9a-zA-Z_.+-]*|' # a number '\"([^\"\n\\\\]|\\\\.)*(\"|\\\\?$)|' # a double-quoted string '\'([^\'\n\\\\]|\\\\.)*(\'|\\\\?$)') # a single-quoted string _IDENTIFIER = re.compile('\w+') def __init__(self, text_message): self._text_message = text_message self._position = 0 self._line = -1 self._column = 0 self._token_start = None self.token = '' self._lines = deque(text_message.split('\n')) self._current_line = '' self._previous_line = 0 self._previous_column = 0 self._SkipWhitespace() self.NextToken() def AtEnd(self): """Checks the end of the text was reached. Returns: True iff the end was reached. """ return self.token == '' def _PopLine(self): while len(self._current_line) <= self._column: if not self._lines: self._current_line = '' return self._line += 1 self._column = 0 self._current_line = self._lines.popleft() def _SkipWhitespace(self): while True: self._PopLine() match = self._WHITESPACE.match(self._current_line, self._column) if not match: break length = len(match.group(0)) self._column += length def TryConsume(self, token): """Tries to consume a given piece of text. Args: token: Text to consume. Returns: True iff the text was consumed. """ if self.token == token: self.NextToken() return True return False def Consume(self, token): """Consumes a piece of text. Args: token: Text to consume. Raises: ParseError: If the text couldn't be consumed. """ if not self.TryConsume(token): raise self._ParseError('Expected "%s".' % token) def ConsumeIdentifier(self): """Consumes protocol message field identifier. Returns: Identifier string. Raises: ParseError: If an identifier couldn't be consumed. """ result = self.token if not self._IDENTIFIER.match(result): raise self._ParseError('Expected identifier.') self.NextToken() return result def ConsumeInt32(self): """Consumes a signed 32bit integer number. Returns: The integer parsed. Raises: ParseError: If a signed 32bit integer couldn't be consumed. """ try: result = ParseInteger(self.token, is_signed=True, is_long=False) except ValueError, e: raise self._ParseError(str(e)) self.NextToken() return result def ConsumeUint32(self): """Consumes an unsigned 32bit integer number. Returns: The integer parsed. Raises: ParseError: If an unsigned 32bit integer couldn't be consumed. """ try: result = ParseInteger(self.token, is_signed=False, is_long=False) except ValueError, e: raise self._ParseError(str(e)) self.NextToken() return result def ConsumeInt64(self): """Consumes a signed 64bit integer number. Returns: The integer parsed. Raises: ParseError: If a signed 64bit integer couldn't be consumed. """ try: result = ParseInteger(self.token, is_signed=True, is_long=True) except ValueError, e: raise self._ParseError(str(e)) self.NextToken() return result def ConsumeUint64(self): """Consumes an unsigned 64bit integer number. Returns: The integer parsed. Raises: ParseError: If an unsigned 64bit integer couldn't be consumed. """ try: result = ParseInteger(self.token, is_signed=False, is_long=True) except ValueError, e: raise self._ParseError(str(e)) self.NextToken() return result def ConsumeFloat(self): """Consumes an floating point number. Returns: The number parsed. Raises: ParseError: If a floating point number couldn't be consumed. """ try: result = ParseFloat(self.token) except ValueError, e: raise self._ParseError(str(e)) self.NextToken() return result def ConsumeBool(self): """Consumes a boolean value. Returns: The bool parsed. Raises: ParseError: If a boolean value couldn't be consumed. """ try: result = ParseBool(self.token) except ValueError, e: raise self._ParseError(str(e)) self.NextToken() return result def ConsumeString(self): """Consumes a string value. Returns: The string parsed. Raises: ParseError: If a string value couldn't be consumed. """ bytes = self.ConsumeByteString() try: return unicode(bytes, 'utf-8') except UnicodeDecodeError, e: raise self._StringParseError(e) def ConsumeByteString(self): """Consumes a byte array value. Returns: The array parsed (as a string). Raises: ParseError: If a byte array value couldn't be consumed. """ list = [self._ConsumeSingleByteString()] while len(self.token) > 0 and self.token[0] in ('\'', '"'): list.append(self._ConsumeSingleByteString()) return "".join(list) def _ConsumeSingleByteString(self): """Consume one token of a string literal. String literals (whether bytes or text) can come in multiple adjacent tokens which are automatically concatenated, like in C or Python. This method only consumes one token. """ text = self.token if len(text) < 1 or text[0] not in ('\'', '"'): raise self._ParseError('Expected string.') if len(text) < 2 or text[-1] != text[0]: raise self._ParseError('String missing ending quote.') try: result = _CUnescape(text[1:-1]) except ValueError, e: raise self._ParseError(str(e)) self.NextToken() return result def ConsumeEnum(self, field): try: result = ParseEnum(field, self.token) except ValueError, e: raise self._ParseError(str(e)) self.NextToken() return result def ParseErrorPreviousToken(self, message): """Creates and *returns* a ParseError for the previously read token. Args: message: A message to set for the exception. Returns: A ParseError instance. """ return ParseError('%d:%d : %s' % ( self._previous_line + 1, self._previous_column + 1, message)) def _ParseError(self, message): """Creates and *returns* a ParseError for the current token.""" return ParseError('%d:%d : %s' % ( self._line + 1, self._column + 1, message)) def _StringParseError(self, e): return self._ParseError('Couldn\'t parse string: ' + str(e)) def NextToken(self): """Reads the next meaningful token.""" self._previous_line = self._line self._previous_column = self._column self._column += len(self.token) self._SkipWhitespace() if not self._lines and len(self._current_line) <= self._column: self.token = '' return match = self._TOKEN.match(self._current_line, self._column) if match: token = match.group(0) self.token = token else: self.token = self._current_line[self._column] # text.encode('string_escape') does not seem to satisfy our needs as it # encodes unprintable characters using two-digit hex escapes whereas our # C++ unescaping function allows hex escapes to be any length. So, # "\0011".encode('string_escape') ends up being "\\x011", which will be # decoded in C++ as a single-character string with char code 0x11. def _CEscape(text, as_utf8): def escape(c): o = ord(c) if o == 10: return r"\n" # optional escape if o == 13: return r"\r" # optional escape if o == 9: return r"\t" # optional escape if o == 39: return r"\'" # optional escape if o == 34: return r'\"' # necessary escape if o == 92: return r"\\" # necessary escape # necessary escapes if not as_utf8 and (o >= 127 or o < 32): return "\\%03o" % o return c return "".join([escape(c) for c in text]) _CUNESCAPE_HEX = re.compile('\\\\x([0-9a-fA-F]{2}|[0-9a-fA-F])') def _CUnescape(text): def ReplaceHex(m): return chr(int(m.group(0)[2:], 16)) # This is required because the 'string_escape' encoding doesn't # allow single-digit hex escapes (like '\xf'). result = _CUNESCAPE_HEX.sub(ReplaceHex, text) return result.decode('string_escape') def ParseInteger(text, is_signed=False, is_long=False): """Parses an integer. Args: text: The text to parse. is_signed: True if a signed integer must be parsed. is_long: True if a long integer must be parsed. Returns: The integer value. Raises: ValueError: Thrown Iff the text is not a valid integer. """ # Do the actual parsing. Exception handling is propagated to caller. try: result = int(text, 0) except ValueError: raise ValueError('Couldn\'t parse integer: %s' % text) # Check if the integer is sane. Exceptions handled by callers. checker = _INTEGER_CHECKERS[2 * int(is_long) + int(is_signed)] checker.CheckValue(result) return result def ParseFloat(text): """Parse a floating point number. Args: text: Text to parse. Returns: The number parsed. Raises: ValueError: If a floating point number couldn't be parsed. """ try: # Assume Python compatible syntax. return float(text) except ValueError: # Check alternative spellings. if _FLOAT_INFINITY.match(text): if text[0] == '-': return float('-inf') else: return float('inf') elif _FLOAT_NAN.match(text): return float('nan') else: # assume '1.0f' format try: return float(text.rstrip('f')) except ValueError: raise ValueError('Couldn\'t parse float: %s' % text) def ParseBool(text): """Parse a boolean value. Args: text: Text to parse. Returns: Boolean values parsed Raises: ValueError: If text is not a valid boolean. """ if text in ('true', 't', '1'): return True elif text in ('false', 'f', '0'): return False else: raise ValueError('Expected "true" or "false".') def ParseEnum(field, value): """Parse an enum value. The value can be specified by a number (the enum value), or by a string literal (the enum name). Args: field: Enum field descriptor. value: String value. Returns: Enum value number. Raises: ValueError: If the enum value could not be parsed. """ enum_descriptor = field.enum_type try: number = int(value, 0) except ValueError: # Identifier. enum_value = enum_descriptor.values_by_name.get(value, None) if enum_value is None: raise ValueError( 'Enum type "%s" has no value named %s.' % ( enum_descriptor.full_name, value)) else: # Numeric value. enum_value = enum_descriptor.values_by_number.get(number, None) if enum_value is None: raise ValueError( 'Enum type "%s" has no value with number %d.' % ( enum_descriptor.full_name, number)) return enum_value.number
bsd-3-clause
jazkarta/edx-platform
cms/envs/dev_with_worker.py
127
1180
""" This config file follows the dev enviroment, but adds the requirement of a celery worker running in the background to process celery tasks. The worker can be executed using: django_admin.py celery worker """ # We intentionally define lots of variables that aren't used, and # want to import all variables from base settings files # pylint: disable=wildcard-import, unused-wildcard-import from dev import * ################################# CELERY ###################################### # Requires a separate celery worker CELERY_ALWAYS_EAGER = False # Use django db as the broker and result store BROKER_URL = 'django://' INSTALLED_APPS += ('djcelery.transport', ) CELERY_RESULT_BACKEND = 'database' DJKOMBU_POLLING_INTERVAL = 1.0 # Disable transaction management because we are using a worker. Views # that request a task and wait for the result will deadlock otherwise. MIDDLEWARE_CLASSES = tuple( c for c in MIDDLEWARE_CLASSES if c != 'django.middleware.transaction.TransactionMiddleware') # Note: other alternatives for disabling transactions don't work in 1.4 # https://code.djangoproject.com/ticket/2304 # https://code.djangoproject.com/ticket/16039
agpl-3.0
TNT-Samuel/Coding-Projects
DNS Server/Source/Lib/site-packages/pip/_vendor/html5lib/treewalkers/etree.py
56
4550
from __future__ import absolute_import, division, unicode_literals from collections import OrderedDict import re from pip._vendor.six import string_types from . import base from .._utils import moduleFactoryFactory tag_regexp = re.compile("{([^}]*)}(.*)") def getETreeBuilder(ElementTreeImplementation): ElementTree = ElementTreeImplementation ElementTreeCommentType = ElementTree.Comment("asd").tag class TreeWalker(base.NonRecursiveTreeWalker): # pylint:disable=unused-variable """Given the particular ElementTree representation, this implementation, to avoid using recursion, returns "nodes" as tuples with the following content: 1. The current element 2. The index of the element relative to its parent 3. A stack of ancestor elements 4. A flag "text", "tail" or None to indicate if the current node is a text node; either the text or tail of the current element (1) """ def getNodeDetails(self, node): if isinstance(node, tuple): # It might be the root Element elt, _, _, flag = node if flag in ("text", "tail"): return base.TEXT, getattr(elt, flag) else: node = elt if not(hasattr(node, "tag")): node = node.getroot() if node.tag in ("DOCUMENT_ROOT", "DOCUMENT_FRAGMENT"): return (base.DOCUMENT,) elif node.tag == "<!DOCTYPE>": return (base.DOCTYPE, node.text, node.get("publicId"), node.get("systemId")) elif node.tag == ElementTreeCommentType: return base.COMMENT, node.text else: assert isinstance(node.tag, string_types), type(node.tag) # This is assumed to be an ordinary element match = tag_regexp.match(node.tag) if match: namespace, tag = match.groups() else: namespace = None tag = node.tag attrs = OrderedDict() for name, value in list(node.attrib.items()): match = tag_regexp.match(name) if match: attrs[(match.group(1), match.group(2))] = value else: attrs[(None, name)] = value return (base.ELEMENT, namespace, tag, attrs, len(node) or node.text) def getFirstChild(self, node): if isinstance(node, tuple): element, key, parents, flag = node else: element, key, parents, flag = node, None, [], None if flag in ("text", "tail"): return None else: if element.text: return element, key, parents, "text" elif len(element): parents.append(element) return element[0], 0, parents, None else: return None def getNextSibling(self, node): if isinstance(node, tuple): element, key, parents, flag = node else: return None if flag == "text": if len(element): parents.append(element) return element[0], 0, parents, None else: return None else: if element.tail and flag != "tail": return element, key, parents, "tail" elif key < len(parents[-1]) - 1: return parents[-1][key + 1], key + 1, parents, None else: return None def getParentNode(self, node): if isinstance(node, tuple): element, key, parents, flag = node else: return None if flag == "text": if not parents: return element else: return element, key, parents, None else: parent = parents.pop() if not parents: return parent else: assert list(parents[-1]).count(parent) == 1 return parent, list(parents[-1]).index(parent), parents, None return locals() getETreeModule = moduleFactoryFactory(getETreeBuilder)
gpl-3.0
Perferom/android_external_chromium_org
chrome/browser/nacl_host/test/debug_stub_browser_tests.py
97
3312
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import re import sys import xml.etree.ElementTree import gdb_rsp def GetTargetArch(connection): """Get the CPU architecture of the NaCl application.""" reply = connection.RspRequest('qXfer:features:read:target.xml:0,fff') assert reply[0] == 'l', reply tree = xml.etree.ElementTree.fromstring(reply[1:]) arch_tag = tree.find('architecture') assert arch_tag is not None, reply return arch_tag.text.strip() def ReverseBytes(byte_string): """Reverse bytes in the hex string: '09ab' -> 'ab09'. This converts little-endian number in the hex string to its normal string representation. """ assert len(byte_string) % 2 == 0, byte_string return ''.join([byte_string[i - 2 : i] for i in xrange(len(byte_string), 0, -2)]) def GetProgCtrString(connection, arch): """Get current execution point.""" registers = connection.RspRequest('g') # PC register indices can be found in # native_client/src/trusted/debug_stub/abi.cc in AbiInit function. if arch == 'i386': # eip index is 8 return ReverseBytes(registers[8 * 8 : 8 * 8 + 8]) if arch == 'i386:x86-64': # rip index is 16 return ReverseBytes(registers[16 * 16 : 16 * 16 + 8]) if arch == 'iwmmxt': # pc index is 15 return ReverseBytes(registers[15 * 8 : 15 * 8 + 8]) raise AssertionError('Unknown architecture: %s' % arch) def TestContinue(connection): result = connection.RspRequest('vCont;c') # Once the NaCl test module reports that the test passed, the NaCl <embed> # element is removed from the page and so the NaCl module is killed by # the browser what is reported as exit due to SIGKILL (X09). assert result == 'X09', result def TestBreakpoint(connection): # Breakpoints and single-stepping might interfere with Chrome sandbox. So we # check that they work properly in this test. arch = GetTargetArch(connection) registers = connection.RspRequest('g') pc = GetProgCtrString(connection, arch) # Set breakpoint result = connection.RspRequest('Z0,%s,1' % pc) assert result == 'OK', result # Check that we stopped at breakpoint result = connection.RspRequest('vCont;c') stop_reply = re.compile(r'T05thread:(\d+);') assert stop_reply.match(result), result thread = stop_reply.match(result).group(1) # Check that registers haven't changed result = connection.RspRequest('g') assert result == registers, (result, registers) # Remove breakpoint result = connection.RspRequest('z0,%s,1' % pc) assert result == 'OK', result # Check single stepping result = connection.RspRequest('vCont;s:%s' % thread) assert result == 'T05thread:%s;' % thread, result assert pc != GetProgCtrString(connection, arch) # Check that we terminate normally result = connection.RspRequest('vCont;c') assert result == 'X09', result def Main(args): port = int(args[0]) name = args[1] connection = gdb_rsp.GdbRspConnection(('localhost', port)) if name == 'continue': TestContinue(connection) elif name == 'breakpoint': TestBreakpoint(connection) else: raise AssertionError('Unknown test name: %r' % name) if __name__ == '__main__': Main(sys.argv[1:])
bsd-3-clause
ccastell/Transfer-System
Website/env/lib/python3.5/site-packages/django/contrib/gis/gdal/raster/band.py
108
8086
from ctypes import byref, c_double, c_int, c_void_p from django.contrib.gis.gdal.base import GDALBase from django.contrib.gis.gdal.error import GDALException from django.contrib.gis.gdal.prototypes import raster as capi from django.contrib.gis.shortcuts import numpy from django.utils import six from django.utils.encoding import force_text from django.utils.six.moves import range from .const import GDAL_INTEGER_TYPES, GDAL_PIXEL_TYPES, GDAL_TO_CTYPES class GDALBand(GDALBase): """ Wraps a GDAL raster band, needs to be obtained from a GDALRaster object. """ def __init__(self, source, index): self.source = source self._ptr = capi.get_ds_raster_band(source._ptr, index) def _flush(self): """ Call the flush method on the Band's parent raster and force a refresh of the statistics attribute when requested the next time. """ self.source._flush() self._stats_refresh = True @property def description(self): """ Returns the description string of the band. """ return force_text(capi.get_band_description(self._ptr)) @property def width(self): """ Width (X axis) in pixels of the band. """ return capi.get_band_xsize(self._ptr) @property def height(self): """ Height (Y axis) in pixels of the band. """ return capi.get_band_ysize(self._ptr) @property def pixel_count(self): """ Returns the total number of pixels in this band. """ return self.width * self.height _stats_refresh = False def statistics(self, refresh=False, approximate=False): """ Compute statistics on the pixel values of this band. The return value is a tuple with the following structure: (minimum, maximum, mean, standard deviation). If approximate=True, the statistics may be computed based on overviews or a subset of image tiles. If refresh=True, the statistics will be computed from the data directly, and the cache will be updated where applicable. For empty bands (where all pixel values are nodata), all statistics values are returned as None. For raster formats using Persistent Auxiliary Metadata (PAM) services, the statistics might be cached in an auxiliary file. """ # Prepare array with arguments for capi function smin, smax, smean, sstd = c_double(), c_double(), c_double(), c_double() stats_args = [ self._ptr, c_int(approximate), byref(smin), byref(smax), byref(smean), byref(sstd), c_void_p(), c_void_p(), ] if refresh or self._stats_refresh: func = capi.compute_band_statistics else: # Add additional argument to force computation if there is no # existing PAM file to take the values from. force = True stats_args.insert(2, c_int(force)) func = capi.get_band_statistics # Computation of statistics fails for empty bands. try: func(*stats_args) result = smin.value, smax.value, smean.value, sstd.value except GDALException: result = (None, None, None, None) self._stats_refresh = False return result @property def min(self): """ Return the minimum pixel value for this band. """ return self.statistics()[0] @property def max(self): """ Return the maximum pixel value for this band. """ return self.statistics()[1] @property def mean(self): """ Return the mean of all pixel values of this band. """ return self.statistics()[2] @property def std(self): """ Return the standard deviation of all pixel values of this band. """ return self.statistics()[3] @property def nodata_value(self): """ Returns the nodata value for this band, or None if it isn't set. """ # Get value and nodata exists flag nodata_exists = c_int() value = capi.get_band_nodata_value(self._ptr, nodata_exists) if not nodata_exists: value = None # If the pixeltype is an integer, convert to int elif self.datatype() in GDAL_INTEGER_TYPES: value = int(value) return value @nodata_value.setter def nodata_value(self, value): """ Sets the nodata value for this band. """ if value is None: if not capi.delete_band_nodata_value: raise ValueError('GDAL >= 2.1 required to delete nodata values.') capi.delete_band_nodata_value(self._ptr) elif not isinstance(value, (int, float)): raise ValueError('Nodata value must be numeric or None.') else: capi.set_band_nodata_value(self._ptr, value) self._flush() def datatype(self, as_string=False): """ Returns the GDAL Pixel Datatype for this band. """ dtype = capi.get_band_datatype(self._ptr) if as_string: dtype = GDAL_PIXEL_TYPES[dtype] return dtype def data(self, data=None, offset=None, size=None, shape=None, as_memoryview=False): """ Reads or writes pixel values for this band. Blocks of data can be accessed by specifying the width, height and offset of the desired block. The same specification can be used to update parts of a raster by providing an array of values. Allowed input data types are bytes, memoryview, list, tuple, and array. """ if not offset: offset = (0, 0) if not size: size = (self.width - offset[0], self.height - offset[1]) if not shape: shape = size if any(x <= 0 for x in size): raise ValueError('Offset too big for this raster.') if size[0] > self.width or size[1] > self.height: raise ValueError('Size is larger than raster.') # Create ctypes type array generator ctypes_array = GDAL_TO_CTYPES[self.datatype()] * (shape[0] * shape[1]) if data is None: # Set read mode access_flag = 0 # Prepare empty ctypes array data_array = ctypes_array() else: # Set write mode access_flag = 1 # Instantiate ctypes array holding the input data if isinstance(data, (bytes, six.memoryview)) or (numpy and isinstance(data, numpy.ndarray)): data_array = ctypes_array.from_buffer_copy(data) else: data_array = ctypes_array(*data) # Access band capi.band_io(self._ptr, access_flag, offset[0], offset[1], size[0], size[1], byref(data_array), shape[0], shape[1], self.datatype(), 0, 0) # Return data as numpy array if possible, otherwise as list if data is None: if as_memoryview: return memoryview(data_array) elif numpy: # reshape() needs a reshape parameter with the height first. return numpy.frombuffer( data_array, dtype=numpy.dtype(data_array) ).reshape(tuple(reversed(size))) else: return list(data_array) else: self._flush() class BandList(list): def __init__(self, source): self.source = source list.__init__(self) def __iter__(self): for idx in range(1, len(self) + 1): yield GDALBand(self.source, idx) def __len__(self): return capi.get_ds_raster_count(self.source._ptr) def __getitem__(self, index): try: return GDALBand(self.source, index + 1) except GDALException: raise GDALException('Unable to get band index %d' % index)
apache-2.0
dannyboi104/SickRage
lib/requests/packages/chardet/big5freq.py
3133
82594
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### # Big5 frequency table # by Taiwan's Mandarin Promotion Council # <http://www.edu.tw:81/mandr/> # # 128 --> 0.42261 # 256 --> 0.57851 # 512 --> 0.74851 # 1024 --> 0.89384 # 2048 --> 0.97583 # # Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98 # Random Distribution Ration = 512/(5401-512)=0.105 # # Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75 #Char to FreqOrder table BIG5_TABLE_SIZE = 5376 Big5CharToFreqOrder = ( 1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16 3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32 1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48 63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64 3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80 4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96 5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112 630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128 179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144 995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160 2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176 1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192 3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208 706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224 1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240 3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256 2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272 437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288 3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304 1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320 5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336 266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352 5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368 1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384 32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400 188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416 3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432 3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448 324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464 2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480 2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496 314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512 287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528 3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544 1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560 1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576 1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592 2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608 265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624 4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640 1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656 5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672 2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688 383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704 98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720 523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736 710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752 5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768 379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784 1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800 585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816 690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832 5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848 1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864 544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880 3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896 4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912 3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928 279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944 610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960 1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976 4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992 3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008 3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024 2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040 5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056 3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072 5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088 1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104 2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120 1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136 78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152 1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168 4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184 3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200 534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216 165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232 626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248 2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264 5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280 1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296 2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312 1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328 1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344 5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360 5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376 5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392 3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408 4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424 4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440 2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456 5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472 3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488 598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504 5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520 5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536 1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552 2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568 3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584 4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600 5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616 3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632 4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648 1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664 1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680 4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696 1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712 240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728 1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744 1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760 3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776 619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792 5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808 2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824 1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840 1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856 5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872 829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888 4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904 375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920 2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936 444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952 1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968 1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984 730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000 4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016 4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032 1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048 3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064 5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080 5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096 1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112 2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128 1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144 3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160 2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176 3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192 2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208 4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224 4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240 3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256 97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272 3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288 424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304 3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320 4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336 3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352 1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368 5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384 199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400 5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416 1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432 391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448 4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464 4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480 397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496 2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512 2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528 3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544 1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560 4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576 2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592 1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608 1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624 2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640 3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656 1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672 5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688 1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704 4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720 1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736 135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752 1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768 4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784 4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800 2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816 1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832 4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848 660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864 5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880 2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896 3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912 4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928 790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944 5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960 5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976 1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992 4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008 4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024 2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040 3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056 3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072 2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088 1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104 4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120 3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136 3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152 2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168 4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184 5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200 3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216 2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232 3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248 1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264 2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280 3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296 4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312 2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328 2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344 5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360 1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376 2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392 1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408 3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424 4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440 2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456 3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472 3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488 2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504 4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520 2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536 3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552 4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568 5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584 3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600 194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616 1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632 4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648 1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664 4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680 5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696 510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712 5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728 5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744 2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760 3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776 2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792 2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808 681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824 1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840 4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856 3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872 3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888 838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904 2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920 625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936 2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952 4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968 1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984 4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000 1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016 3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032 574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048 3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064 5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080 5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096 3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112 3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128 1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144 2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160 5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176 1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192 1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208 3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224 919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240 1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256 4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272 5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288 2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304 3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320 516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336 1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352 2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368 2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384 5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400 5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416 5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432 2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448 2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464 1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480 4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496 3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512 3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528 4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544 4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560 2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576 2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592 5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608 4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624 5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640 4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656 502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672 121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688 1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704 3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720 4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736 1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752 5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768 2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784 2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800 3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816 5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832 1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848 3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864 5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880 1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896 5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912 2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928 3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944 2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960 3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976 3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992 3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008 4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024 803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040 2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056 4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072 3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088 5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104 1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120 5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136 425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152 1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168 479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184 4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200 1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216 4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232 1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248 433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264 3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280 4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296 5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312 938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328 3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344 890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360 2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376 #last 512 #Everything below is of no interest for detection purpose 2522,1613,4812,5799,3345,3945,2523,5800,4162,5801,1637,4163,2471,4813,3946,5802, # 5392 2500,3034,3800,5803,5804,2195,4814,5805,2163,5806,5807,5808,5809,5810,5811,5812, # 5408 5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824,5825,5826,5827,5828, # 5424 5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840,5841,5842,5843,5844, # 5440 5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856,5857,5858,5859,5860, # 5456 5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872,5873,5874,5875,5876, # 5472 5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888,5889,5890,5891,5892, # 5488 5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904,5905,5906,5907,5908, # 5504 5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920,5921,5922,5923,5924, # 5520 5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936,5937,5938,5939,5940, # 5536 5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952,5953,5954,5955,5956, # 5552 5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968,5969,5970,5971,5972, # 5568 5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984,5985,5986,5987,5988, # 5584 5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000,6001,6002,6003,6004, # 5600 6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016,6017,6018,6019,6020, # 5616 6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032,6033,6034,6035,6036, # 5632 6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048,6049,6050,6051,6052, # 5648 6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064,6065,6066,6067,6068, # 5664 6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080,6081,6082,6083,6084, # 5680 6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096,6097,6098,6099,6100, # 5696 6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112,6113,6114,6115,6116, # 5712 6117,6118,6119,6120,6121,6122,6123,6124,6125,6126,6127,6128,6129,6130,6131,6132, # 5728 6133,6134,6135,6136,6137,6138,6139,6140,6141,6142,6143,6144,6145,6146,6147,6148, # 5744 6149,6150,6151,6152,6153,6154,6155,6156,6157,6158,6159,6160,6161,6162,6163,6164, # 5760 6165,6166,6167,6168,6169,6170,6171,6172,6173,6174,6175,6176,6177,6178,6179,6180, # 5776 6181,6182,6183,6184,6185,6186,6187,6188,6189,6190,6191,6192,6193,6194,6195,6196, # 5792 6197,6198,6199,6200,6201,6202,6203,6204,6205,6206,6207,6208,6209,6210,6211,6212, # 5808 6213,6214,6215,6216,6217,6218,6219,6220,6221,6222,6223,3670,6224,6225,6226,6227, # 5824 6228,6229,6230,6231,6232,6233,6234,6235,6236,6237,6238,6239,6240,6241,6242,6243, # 5840 6244,6245,6246,6247,6248,6249,6250,6251,6252,6253,6254,6255,6256,6257,6258,6259, # 5856 6260,6261,6262,6263,6264,6265,6266,6267,6268,6269,6270,6271,6272,6273,6274,6275, # 5872 6276,6277,6278,6279,6280,6281,6282,6283,6284,6285,4815,6286,6287,6288,6289,6290, # 5888 6291,6292,4816,6293,6294,6295,6296,6297,6298,6299,6300,6301,6302,6303,6304,6305, # 5904 6306,6307,6308,6309,6310,6311,4817,4818,6312,6313,6314,6315,6316,6317,6318,4819, # 5920 6319,6320,6321,6322,6323,6324,6325,6326,6327,6328,6329,6330,6331,6332,6333,6334, # 5936 6335,6336,6337,4820,6338,6339,6340,6341,6342,6343,6344,6345,6346,6347,6348,6349, # 5952 6350,6351,6352,6353,6354,6355,6356,6357,6358,6359,6360,6361,6362,6363,6364,6365, # 5968 6366,6367,6368,6369,6370,6371,6372,6373,6374,6375,6376,6377,6378,6379,6380,6381, # 5984 6382,6383,6384,6385,6386,6387,6388,6389,6390,6391,6392,6393,6394,6395,6396,6397, # 6000 6398,6399,6400,6401,6402,6403,6404,6405,6406,6407,6408,6409,6410,3441,6411,6412, # 6016 6413,6414,6415,6416,6417,6418,6419,6420,6421,6422,6423,6424,6425,4440,6426,6427, # 6032 6428,6429,6430,6431,6432,6433,6434,6435,6436,6437,6438,6439,6440,6441,6442,6443, # 6048 6444,6445,6446,6447,6448,6449,6450,6451,6452,6453,6454,4821,6455,6456,6457,6458, # 6064 6459,6460,6461,6462,6463,6464,6465,6466,6467,6468,6469,6470,6471,6472,6473,6474, # 6080 6475,6476,6477,3947,3948,6478,6479,6480,6481,3272,4441,6482,6483,6484,6485,4442, # 6096 6486,6487,6488,6489,6490,6491,6492,6493,6494,6495,6496,4822,6497,6498,6499,6500, # 6112 6501,6502,6503,6504,6505,6506,6507,6508,6509,6510,6511,6512,6513,6514,6515,6516, # 6128 6517,6518,6519,6520,6521,6522,6523,6524,6525,6526,6527,6528,6529,6530,6531,6532, # 6144 6533,6534,6535,6536,6537,6538,6539,6540,6541,6542,6543,6544,6545,6546,6547,6548, # 6160 6549,6550,6551,6552,6553,6554,6555,6556,2784,6557,4823,6558,6559,6560,6561,6562, # 6176 6563,6564,6565,6566,6567,6568,6569,3949,6570,6571,6572,4824,6573,6574,6575,6576, # 6192 6577,6578,6579,6580,6581,6582,6583,4825,6584,6585,6586,3950,2785,6587,6588,6589, # 6208 6590,6591,6592,6593,6594,6595,6596,6597,6598,6599,6600,6601,6602,6603,6604,6605, # 6224 6606,6607,6608,6609,6610,6611,6612,4826,6613,6614,6615,4827,6616,6617,6618,6619, # 6240 6620,6621,6622,6623,6624,6625,4164,6626,6627,6628,6629,6630,6631,6632,6633,6634, # 6256 3547,6635,4828,6636,6637,6638,6639,6640,6641,6642,3951,2984,6643,6644,6645,6646, # 6272 6647,6648,6649,4165,6650,4829,6651,6652,4830,6653,6654,6655,6656,6657,6658,6659, # 6288 6660,6661,6662,4831,6663,6664,6665,6666,6667,6668,6669,6670,6671,4166,6672,4832, # 6304 3952,6673,6674,6675,6676,4833,6677,6678,6679,4167,6680,6681,6682,3198,6683,6684, # 6320 6685,6686,6687,6688,6689,6690,6691,6692,6693,6694,6695,6696,6697,4834,6698,6699, # 6336 6700,6701,6702,6703,6704,6705,6706,6707,6708,6709,6710,6711,6712,6713,6714,6715, # 6352 6716,6717,6718,6719,6720,6721,6722,6723,6724,6725,6726,6727,6728,6729,6730,6731, # 6368 6732,6733,6734,4443,6735,6736,6737,6738,6739,6740,6741,6742,6743,6744,6745,4444, # 6384 6746,6747,6748,6749,6750,6751,6752,6753,6754,6755,6756,6757,6758,6759,6760,6761, # 6400 6762,6763,6764,6765,6766,6767,6768,6769,6770,6771,6772,6773,6774,6775,6776,6777, # 6416 6778,6779,6780,6781,4168,6782,6783,3442,6784,6785,6786,6787,6788,6789,6790,6791, # 6432 4169,6792,6793,6794,6795,6796,6797,6798,6799,6800,6801,6802,6803,6804,6805,6806, # 6448 6807,6808,6809,6810,6811,4835,6812,6813,6814,4445,6815,6816,4446,6817,6818,6819, # 6464 6820,6821,6822,6823,6824,6825,6826,6827,6828,6829,6830,6831,6832,6833,6834,6835, # 6480 3548,6836,6837,6838,6839,6840,6841,6842,6843,6844,6845,6846,4836,6847,6848,6849, # 6496 6850,6851,6852,6853,6854,3953,6855,6856,6857,6858,6859,6860,6861,6862,6863,6864, # 6512 6865,6866,6867,6868,6869,6870,6871,6872,6873,6874,6875,6876,6877,3199,6878,6879, # 6528 6880,6881,6882,4447,6883,6884,6885,6886,6887,6888,6889,6890,6891,6892,6893,6894, # 6544 6895,6896,6897,6898,6899,6900,6901,6902,6903,6904,4170,6905,6906,6907,6908,6909, # 6560 6910,6911,6912,6913,6914,6915,6916,6917,6918,6919,6920,6921,6922,6923,6924,6925, # 6576 6926,6927,4837,6928,6929,6930,6931,6932,6933,6934,6935,6936,3346,6937,6938,4838, # 6592 6939,6940,6941,4448,6942,6943,6944,6945,6946,4449,6947,6948,6949,6950,6951,6952, # 6608 6953,6954,6955,6956,6957,6958,6959,6960,6961,6962,6963,6964,6965,6966,6967,6968, # 6624 6969,6970,6971,6972,6973,6974,6975,6976,6977,6978,6979,6980,6981,6982,6983,6984, # 6640 6985,6986,6987,6988,6989,6990,6991,6992,6993,6994,3671,6995,6996,6997,6998,4839, # 6656 6999,7000,7001,7002,3549,7003,7004,7005,7006,7007,7008,7009,7010,7011,7012,7013, # 6672 7014,7015,7016,7017,7018,7019,7020,7021,7022,7023,7024,7025,7026,7027,7028,7029, # 6688 7030,4840,7031,7032,7033,7034,7035,7036,7037,7038,4841,7039,7040,7041,7042,7043, # 6704 7044,7045,7046,7047,7048,7049,7050,7051,7052,7053,7054,7055,7056,7057,7058,7059, # 6720 7060,7061,7062,7063,7064,7065,7066,7067,7068,7069,7070,2985,7071,7072,7073,7074, # 6736 7075,7076,7077,7078,7079,7080,4842,7081,7082,7083,7084,7085,7086,7087,7088,7089, # 6752 7090,7091,7092,7093,7094,7095,7096,7097,7098,7099,7100,7101,7102,7103,7104,7105, # 6768 7106,7107,7108,7109,7110,7111,7112,7113,7114,7115,7116,7117,7118,4450,7119,7120, # 6784 7121,7122,7123,7124,7125,7126,7127,7128,7129,7130,7131,7132,7133,7134,7135,7136, # 6800 7137,7138,7139,7140,7141,7142,7143,4843,7144,7145,7146,7147,7148,7149,7150,7151, # 6816 7152,7153,7154,7155,7156,7157,7158,7159,7160,7161,7162,7163,7164,7165,7166,7167, # 6832 7168,7169,7170,7171,7172,7173,7174,7175,7176,7177,7178,7179,7180,7181,7182,7183, # 6848 7184,7185,7186,7187,7188,4171,4172,7189,7190,7191,7192,7193,7194,7195,7196,7197, # 6864 7198,7199,7200,7201,7202,7203,7204,7205,7206,7207,7208,7209,7210,7211,7212,7213, # 6880 7214,7215,7216,7217,7218,7219,7220,7221,7222,7223,7224,7225,7226,7227,7228,7229, # 6896 7230,7231,7232,7233,7234,7235,7236,7237,7238,7239,7240,7241,7242,7243,7244,7245, # 6912 7246,7247,7248,7249,7250,7251,7252,7253,7254,7255,7256,7257,7258,7259,7260,7261, # 6928 7262,7263,7264,7265,7266,7267,7268,7269,7270,7271,7272,7273,7274,7275,7276,7277, # 6944 7278,7279,7280,7281,7282,7283,7284,7285,7286,7287,7288,7289,7290,7291,7292,7293, # 6960 7294,7295,7296,4844,7297,7298,7299,7300,7301,7302,7303,7304,7305,7306,7307,7308, # 6976 7309,7310,7311,7312,7313,7314,7315,7316,4451,7317,7318,7319,7320,7321,7322,7323, # 6992 7324,7325,7326,7327,7328,7329,7330,7331,7332,7333,7334,7335,7336,7337,7338,7339, # 7008 7340,7341,7342,7343,7344,7345,7346,7347,7348,7349,7350,7351,7352,7353,4173,7354, # 7024 7355,4845,7356,7357,7358,7359,7360,7361,7362,7363,7364,7365,7366,7367,7368,7369, # 7040 7370,7371,7372,7373,7374,7375,7376,7377,7378,7379,7380,7381,7382,7383,7384,7385, # 7056 7386,7387,7388,4846,7389,7390,7391,7392,7393,7394,7395,7396,7397,7398,7399,7400, # 7072 7401,7402,7403,7404,7405,3672,7406,7407,7408,7409,7410,7411,7412,7413,7414,7415, # 7088 7416,7417,7418,7419,7420,7421,7422,7423,7424,7425,7426,7427,7428,7429,7430,7431, # 7104 7432,7433,7434,7435,7436,7437,7438,7439,7440,7441,7442,7443,7444,7445,7446,7447, # 7120 7448,7449,7450,7451,7452,7453,4452,7454,3200,7455,7456,7457,7458,7459,7460,7461, # 7136 7462,7463,7464,7465,7466,7467,7468,7469,7470,7471,7472,7473,7474,4847,7475,7476, # 7152 7477,3133,7478,7479,7480,7481,7482,7483,7484,7485,7486,7487,7488,7489,7490,7491, # 7168 7492,7493,7494,7495,7496,7497,7498,7499,7500,7501,7502,3347,7503,7504,7505,7506, # 7184 7507,7508,7509,7510,7511,7512,7513,7514,7515,7516,7517,7518,7519,7520,7521,4848, # 7200 7522,7523,7524,7525,7526,7527,7528,7529,7530,7531,7532,7533,7534,7535,7536,7537, # 7216 7538,7539,7540,7541,7542,7543,7544,7545,7546,7547,7548,7549,3801,4849,7550,7551, # 7232 7552,7553,7554,7555,7556,7557,7558,7559,7560,7561,7562,7563,7564,7565,7566,7567, # 7248 7568,7569,3035,7570,7571,7572,7573,7574,7575,7576,7577,7578,7579,7580,7581,7582, # 7264 7583,7584,7585,7586,7587,7588,7589,7590,7591,7592,7593,7594,7595,7596,7597,7598, # 7280 7599,7600,7601,7602,7603,7604,7605,7606,7607,7608,7609,7610,7611,7612,7613,7614, # 7296 7615,7616,4850,7617,7618,3802,7619,7620,7621,7622,7623,7624,7625,7626,7627,7628, # 7312 7629,7630,7631,7632,4851,7633,7634,7635,7636,7637,7638,7639,7640,7641,7642,7643, # 7328 7644,7645,7646,7647,7648,7649,7650,7651,7652,7653,7654,7655,7656,7657,7658,7659, # 7344 7660,7661,7662,7663,7664,7665,7666,7667,7668,7669,7670,4453,7671,7672,7673,7674, # 7360 7675,7676,7677,7678,7679,7680,7681,7682,7683,7684,7685,7686,7687,7688,7689,7690, # 7376 7691,7692,7693,7694,7695,7696,7697,3443,7698,7699,7700,7701,7702,4454,7703,7704, # 7392 7705,7706,7707,7708,7709,7710,7711,7712,7713,2472,7714,7715,7716,7717,7718,7719, # 7408 7720,7721,7722,7723,7724,7725,7726,7727,7728,7729,7730,7731,3954,7732,7733,7734, # 7424 7735,7736,7737,7738,7739,7740,7741,7742,7743,7744,7745,7746,7747,7748,7749,7750, # 7440 3134,7751,7752,4852,7753,7754,7755,4853,7756,7757,7758,7759,7760,4174,7761,7762, # 7456 7763,7764,7765,7766,7767,7768,7769,7770,7771,7772,7773,7774,7775,7776,7777,7778, # 7472 7779,7780,7781,7782,7783,7784,7785,7786,7787,7788,7789,7790,7791,7792,7793,7794, # 7488 7795,7796,7797,7798,7799,7800,7801,7802,7803,7804,7805,4854,7806,7807,7808,7809, # 7504 7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,7824,7825, # 7520 4855,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,7840, # 7536 7841,7842,7843,7844,7845,7846,7847,3955,7848,7849,7850,7851,7852,7853,7854,7855, # 7552 7856,7857,7858,7859,7860,3444,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870, # 7568 7871,7872,7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886, # 7584 7887,7888,7889,7890,7891,4175,7892,7893,7894,7895,7896,4856,4857,7897,7898,7899, # 7600 7900,2598,7901,7902,7903,7904,7905,7906,7907,7908,4455,7909,7910,7911,7912,7913, # 7616 7914,3201,7915,7916,7917,7918,7919,7920,7921,4858,7922,7923,7924,7925,7926,7927, # 7632 7928,7929,7930,7931,7932,7933,7934,7935,7936,7937,7938,7939,7940,7941,7942,7943, # 7648 7944,7945,7946,7947,7948,7949,7950,7951,7952,7953,7954,7955,7956,7957,7958,7959, # 7664 7960,7961,7962,7963,7964,7965,7966,7967,7968,7969,7970,7971,7972,7973,7974,7975, # 7680 7976,7977,7978,7979,7980,7981,4859,7982,7983,7984,7985,7986,7987,7988,7989,7990, # 7696 7991,7992,7993,7994,7995,7996,4860,7997,7998,7999,8000,8001,8002,8003,8004,8005, # 7712 8006,8007,8008,8009,8010,8011,8012,8013,8014,8015,8016,4176,8017,8018,8019,8020, # 7728 8021,8022,8023,4861,8024,8025,8026,8027,8028,8029,8030,8031,8032,8033,8034,8035, # 7744 8036,4862,4456,8037,8038,8039,8040,4863,8041,8042,8043,8044,8045,8046,8047,8048, # 7760 8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063,8064, # 7776 8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079,8080, # 7792 8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095,8096, # 7808 8097,8098,8099,4864,4177,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110, # 7824 8111,8112,8113,8114,8115,8116,8117,8118,8119,8120,4178,8121,8122,8123,8124,8125, # 7840 8126,8127,8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141, # 7856 8142,8143,8144,8145,4865,4866,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155, # 7872 8156,8157,8158,8159,8160,8161,8162,8163,8164,8165,4179,8166,8167,8168,8169,8170, # 7888 8171,8172,8173,8174,8175,8176,8177,8178,8179,8180,8181,4457,8182,8183,8184,8185, # 7904 8186,8187,8188,8189,8190,8191,8192,8193,8194,8195,8196,8197,8198,8199,8200,8201, # 7920 8202,8203,8204,8205,8206,8207,8208,8209,8210,8211,8212,8213,8214,8215,8216,8217, # 7936 8218,8219,8220,8221,8222,8223,8224,8225,8226,8227,8228,8229,8230,8231,8232,8233, # 7952 8234,8235,8236,8237,8238,8239,8240,8241,8242,8243,8244,8245,8246,8247,8248,8249, # 7968 8250,8251,8252,8253,8254,8255,8256,3445,8257,8258,8259,8260,8261,8262,4458,8263, # 7984 8264,8265,8266,8267,8268,8269,8270,8271,8272,4459,8273,8274,8275,8276,3550,8277, # 8000 8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,8288,8289,4460,8290,8291,8292, # 8016 8293,8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,8304,8305,8306,8307,4867, # 8032 8308,8309,8310,8311,8312,3551,8313,8314,8315,8316,8317,8318,8319,8320,8321,8322, # 8048 8323,8324,8325,8326,4868,8327,8328,8329,8330,8331,8332,8333,8334,8335,8336,8337, # 8064 8338,8339,8340,8341,8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,8352,8353, # 8080 8354,8355,8356,8357,8358,8359,8360,8361,8362,8363,4869,4461,8364,8365,8366,8367, # 8096 8368,8369,8370,4870,8371,8372,8373,8374,8375,8376,8377,8378,8379,8380,8381,8382, # 8112 8383,8384,8385,8386,8387,8388,8389,8390,8391,8392,8393,8394,8395,8396,8397,8398, # 8128 8399,8400,8401,8402,8403,8404,8405,8406,8407,8408,8409,8410,4871,8411,8412,8413, # 8144 8414,8415,8416,8417,8418,8419,8420,8421,8422,4462,8423,8424,8425,8426,8427,8428, # 8160 8429,8430,8431,8432,8433,2986,8434,8435,8436,8437,8438,8439,8440,8441,8442,8443, # 8176 8444,8445,8446,8447,8448,8449,8450,8451,8452,8453,8454,8455,8456,8457,8458,8459, # 8192 8460,8461,8462,8463,8464,8465,8466,8467,8468,8469,8470,8471,8472,8473,8474,8475, # 8208 8476,8477,8478,4180,8479,8480,8481,8482,8483,8484,8485,8486,8487,8488,8489,8490, # 8224 8491,8492,8493,8494,8495,8496,8497,8498,8499,8500,8501,8502,8503,8504,8505,8506, # 8240 8507,8508,8509,8510,8511,8512,8513,8514,8515,8516,8517,8518,8519,8520,8521,8522, # 8256 8523,8524,8525,8526,8527,8528,8529,8530,8531,8532,8533,8534,8535,8536,8537,8538, # 8272 8539,8540,8541,8542,8543,8544,8545,8546,8547,8548,8549,8550,8551,8552,8553,8554, # 8288 8555,8556,8557,8558,8559,8560,8561,8562,8563,8564,4872,8565,8566,8567,8568,8569, # 8304 8570,8571,8572,8573,4873,8574,8575,8576,8577,8578,8579,8580,8581,8582,8583,8584, # 8320 8585,8586,8587,8588,8589,8590,8591,8592,8593,8594,8595,8596,8597,8598,8599,8600, # 8336 8601,8602,8603,8604,8605,3803,8606,8607,8608,8609,8610,8611,8612,8613,4874,3804, # 8352 8614,8615,8616,8617,8618,8619,8620,8621,3956,8622,8623,8624,8625,8626,8627,8628, # 8368 8629,8630,8631,8632,8633,8634,8635,8636,8637,8638,2865,8639,8640,8641,8642,8643, # 8384 8644,8645,8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,8656,4463,8657,8658, # 8400 8659,4875,4876,8660,8661,8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,8672, # 8416 8673,8674,8675,8676,8677,8678,8679,8680,8681,4464,8682,8683,8684,8685,8686,8687, # 8432 8688,8689,8690,8691,8692,8693,8694,8695,8696,8697,8698,8699,8700,8701,8702,8703, # 8448 8704,8705,8706,8707,8708,8709,2261,8710,8711,8712,8713,8714,8715,8716,8717,8718, # 8464 8719,8720,8721,8722,8723,8724,8725,8726,8727,8728,8729,8730,8731,8732,8733,4181, # 8480 8734,8735,8736,8737,8738,8739,8740,8741,8742,8743,8744,8745,8746,8747,8748,8749, # 8496 8750,8751,8752,8753,8754,8755,8756,8757,8758,8759,8760,8761,8762,8763,4877,8764, # 8512 8765,8766,8767,8768,8769,8770,8771,8772,8773,8774,8775,8776,8777,8778,8779,8780, # 8528 8781,8782,8783,8784,8785,8786,8787,8788,4878,8789,4879,8790,8791,8792,4880,8793, # 8544 8794,8795,8796,8797,8798,8799,8800,8801,4881,8802,8803,8804,8805,8806,8807,8808, # 8560 8809,8810,8811,8812,8813,8814,8815,3957,8816,8817,8818,8819,8820,8821,8822,8823, # 8576 8824,8825,8826,8827,8828,8829,8830,8831,8832,8833,8834,8835,8836,8837,8838,8839, # 8592 8840,8841,8842,8843,8844,8845,8846,8847,4882,8848,8849,8850,8851,8852,8853,8854, # 8608 8855,8856,8857,8858,8859,8860,8861,8862,8863,8864,8865,8866,8867,8868,8869,8870, # 8624 8871,8872,8873,8874,8875,8876,8877,8878,8879,8880,8881,8882,8883,8884,3202,8885, # 8640 8886,8887,8888,8889,8890,8891,8892,8893,8894,8895,8896,8897,8898,8899,8900,8901, # 8656 8902,8903,8904,8905,8906,8907,8908,8909,8910,8911,8912,8913,8914,8915,8916,8917, # 8672 8918,8919,8920,8921,8922,8923,8924,4465,8925,8926,8927,8928,8929,8930,8931,8932, # 8688 4883,8933,8934,8935,8936,8937,8938,8939,8940,8941,8942,8943,2214,8944,8945,8946, # 8704 8947,8948,8949,8950,8951,8952,8953,8954,8955,8956,8957,8958,8959,8960,8961,8962, # 8720 8963,8964,8965,4884,8966,8967,8968,8969,8970,8971,8972,8973,8974,8975,8976,8977, # 8736 8978,8979,8980,8981,8982,8983,8984,8985,8986,8987,8988,8989,8990,8991,8992,4885, # 8752 8993,8994,8995,8996,8997,8998,8999,9000,9001,9002,9003,9004,9005,9006,9007,9008, # 8768 9009,9010,9011,9012,9013,9014,9015,9016,9017,9018,9019,9020,9021,4182,9022,9023, # 8784 9024,9025,9026,9027,9028,9029,9030,9031,9032,9033,9034,9035,9036,9037,9038,9039, # 8800 9040,9041,9042,9043,9044,9045,9046,9047,9048,9049,9050,9051,9052,9053,9054,9055, # 8816 9056,9057,9058,9059,9060,9061,9062,9063,4886,9064,9065,9066,9067,9068,9069,4887, # 8832 9070,9071,9072,9073,9074,9075,9076,9077,9078,9079,9080,9081,9082,9083,9084,9085, # 8848 9086,9087,9088,9089,9090,9091,9092,9093,9094,9095,9096,9097,9098,9099,9100,9101, # 8864 9102,9103,9104,9105,9106,9107,9108,9109,9110,9111,9112,9113,9114,9115,9116,9117, # 8880 9118,9119,9120,9121,9122,9123,9124,9125,9126,9127,9128,9129,9130,9131,9132,9133, # 8896 9134,9135,9136,9137,9138,9139,9140,9141,3958,9142,9143,9144,9145,9146,9147,9148, # 8912 9149,9150,9151,4888,9152,9153,9154,9155,9156,9157,9158,9159,9160,9161,9162,9163, # 8928 9164,9165,9166,9167,9168,9169,9170,9171,9172,9173,9174,9175,4889,9176,9177,9178, # 8944 9179,9180,9181,9182,9183,9184,9185,9186,9187,9188,9189,9190,9191,9192,9193,9194, # 8960 9195,9196,9197,9198,9199,9200,9201,9202,9203,4890,9204,9205,9206,9207,9208,9209, # 8976 9210,9211,9212,9213,9214,9215,9216,9217,9218,9219,9220,9221,9222,4466,9223,9224, # 8992 9225,9226,9227,9228,9229,9230,9231,9232,9233,9234,9235,9236,9237,9238,9239,9240, # 9008 9241,9242,9243,9244,9245,4891,9246,9247,9248,9249,9250,9251,9252,9253,9254,9255, # 9024 9256,9257,4892,9258,9259,9260,9261,4893,4894,9262,9263,9264,9265,9266,9267,9268, # 9040 9269,9270,9271,9272,9273,4467,9274,9275,9276,9277,9278,9279,9280,9281,9282,9283, # 9056 9284,9285,3673,9286,9287,9288,9289,9290,9291,9292,9293,9294,9295,9296,9297,9298, # 9072 9299,9300,9301,9302,9303,9304,9305,9306,9307,9308,9309,9310,9311,9312,9313,9314, # 9088 9315,9316,9317,9318,9319,9320,9321,9322,4895,9323,9324,9325,9326,9327,9328,9329, # 9104 9330,9331,9332,9333,9334,9335,9336,9337,9338,9339,9340,9341,9342,9343,9344,9345, # 9120 9346,9347,4468,9348,9349,9350,9351,9352,9353,9354,9355,9356,9357,9358,9359,9360, # 9136 9361,9362,9363,9364,9365,9366,9367,9368,9369,9370,9371,9372,9373,4896,9374,4469, # 9152 9375,9376,9377,9378,9379,4897,9380,9381,9382,9383,9384,9385,9386,9387,9388,9389, # 9168 9390,9391,9392,9393,9394,9395,9396,9397,9398,9399,9400,9401,9402,9403,9404,9405, # 9184 9406,4470,9407,2751,9408,9409,3674,3552,9410,9411,9412,9413,9414,9415,9416,9417, # 9200 9418,9419,9420,9421,4898,9422,9423,9424,9425,9426,9427,9428,9429,3959,9430,9431, # 9216 9432,9433,9434,9435,9436,4471,9437,9438,9439,9440,9441,9442,9443,9444,9445,9446, # 9232 9447,9448,9449,9450,3348,9451,9452,9453,9454,9455,9456,9457,9458,9459,9460,9461, # 9248 9462,9463,9464,9465,9466,9467,9468,9469,9470,9471,9472,4899,9473,9474,9475,9476, # 9264 9477,4900,9478,9479,9480,9481,9482,9483,9484,9485,9486,9487,9488,3349,9489,9490, # 9280 9491,9492,9493,9494,9495,9496,9497,9498,9499,9500,9501,9502,9503,9504,9505,9506, # 9296 9507,9508,9509,9510,9511,9512,9513,9514,9515,9516,9517,9518,9519,9520,4901,9521, # 9312 9522,9523,9524,9525,9526,4902,9527,9528,9529,9530,9531,9532,9533,9534,9535,9536, # 9328 9537,9538,9539,9540,9541,9542,9543,9544,9545,9546,9547,9548,9549,9550,9551,9552, # 9344 9553,9554,9555,9556,9557,9558,9559,9560,9561,9562,9563,9564,9565,9566,9567,9568, # 9360 9569,9570,9571,9572,9573,9574,9575,9576,9577,9578,9579,9580,9581,9582,9583,9584, # 9376 3805,9585,9586,9587,9588,9589,9590,9591,9592,9593,9594,9595,9596,9597,9598,9599, # 9392 9600,9601,9602,4903,9603,9604,9605,9606,9607,4904,9608,9609,9610,9611,9612,9613, # 9408 9614,4905,9615,9616,9617,9618,9619,9620,9621,9622,9623,9624,9625,9626,9627,9628, # 9424 9629,9630,9631,9632,4906,9633,9634,9635,9636,9637,9638,9639,9640,9641,9642,9643, # 9440 4907,9644,9645,9646,9647,9648,9649,9650,9651,9652,9653,9654,9655,9656,9657,9658, # 9456 9659,9660,9661,9662,9663,9664,9665,9666,9667,9668,9669,9670,9671,9672,4183,9673, # 9472 9674,9675,9676,9677,4908,9678,9679,9680,9681,4909,9682,9683,9684,9685,9686,9687, # 9488 9688,9689,9690,4910,9691,9692,9693,3675,9694,9695,9696,2945,9697,9698,9699,9700, # 9504 9701,9702,9703,9704,9705,4911,9706,9707,9708,9709,9710,9711,9712,9713,9714,9715, # 9520 9716,9717,9718,9719,9720,9721,9722,9723,9724,9725,9726,9727,9728,9729,9730,9731, # 9536 9732,9733,9734,9735,4912,9736,9737,9738,9739,9740,4913,9741,9742,9743,9744,9745, # 9552 9746,9747,9748,9749,9750,9751,9752,9753,9754,9755,9756,9757,9758,4914,9759,9760, # 9568 9761,9762,9763,9764,9765,9766,9767,9768,9769,9770,9771,9772,9773,9774,9775,9776, # 9584 9777,9778,9779,9780,9781,9782,4915,9783,9784,9785,9786,9787,9788,9789,9790,9791, # 9600 9792,9793,4916,9794,9795,9796,9797,9798,9799,9800,9801,9802,9803,9804,9805,9806, # 9616 9807,9808,9809,9810,9811,9812,9813,9814,9815,9816,9817,9818,9819,9820,9821,9822, # 9632 9823,9824,9825,9826,9827,9828,9829,9830,9831,9832,9833,9834,9835,9836,9837,9838, # 9648 9839,9840,9841,9842,9843,9844,9845,9846,9847,9848,9849,9850,9851,9852,9853,9854, # 9664 9855,9856,9857,9858,9859,9860,9861,9862,9863,9864,9865,9866,9867,9868,4917,9869, # 9680 9870,9871,9872,9873,9874,9875,9876,9877,9878,9879,9880,9881,9882,9883,9884,9885, # 9696 9886,9887,9888,9889,9890,9891,9892,4472,9893,9894,9895,9896,9897,3806,9898,9899, # 9712 9900,9901,9902,9903,9904,9905,9906,9907,9908,9909,9910,9911,9912,9913,9914,4918, # 9728 9915,9916,9917,4919,9918,9919,9920,9921,4184,9922,9923,9924,9925,9926,9927,9928, # 9744 9929,9930,9931,9932,9933,9934,9935,9936,9937,9938,9939,9940,9941,9942,9943,9944, # 9760 9945,9946,4920,9947,9948,9949,9950,9951,9952,9953,9954,9955,4185,9956,9957,9958, # 9776 9959,9960,9961,9962,9963,9964,9965,4921,9966,9967,9968,4473,9969,9970,9971,9972, # 9792 9973,9974,9975,9976,9977,4474,9978,9979,9980,9981,9982,9983,9984,9985,9986,9987, # 9808 9988,9989,9990,9991,9992,9993,9994,9995,9996,9997,9998,9999,10000,10001,10002,10003, # 9824 10004,10005,10006,10007,10008,10009,10010,10011,10012,10013,10014,10015,10016,10017,10018,10019, # 9840 10020,10021,4922,10022,4923,10023,10024,10025,10026,10027,10028,10029,10030,10031,10032,10033, # 9856 10034,10035,10036,10037,10038,10039,10040,10041,10042,10043,10044,10045,10046,10047,10048,4924, # 9872 10049,10050,10051,10052,10053,10054,10055,10056,10057,10058,10059,10060,10061,10062,10063,10064, # 9888 10065,10066,10067,10068,10069,10070,10071,10072,10073,10074,10075,10076,10077,10078,10079,10080, # 9904 10081,10082,10083,10084,10085,10086,10087,4475,10088,10089,10090,10091,10092,10093,10094,10095, # 9920 10096,10097,4476,10098,10099,10100,10101,10102,10103,10104,10105,10106,10107,10108,10109,10110, # 9936 10111,2174,10112,10113,10114,10115,10116,10117,10118,10119,10120,10121,10122,10123,10124,10125, # 9952 10126,10127,10128,10129,10130,10131,10132,10133,10134,10135,10136,10137,10138,10139,10140,3807, # 9968 4186,4925,10141,10142,10143,10144,10145,10146,10147,4477,4187,10148,10149,10150,10151,10152, # 9984 10153,4188,10154,10155,10156,10157,10158,10159,10160,10161,4926,10162,10163,10164,10165,10166, #10000 10167,10168,10169,10170,10171,10172,10173,10174,10175,10176,10177,10178,10179,10180,10181,10182, #10016 10183,10184,10185,10186,10187,10188,10189,10190,10191,10192,3203,10193,10194,10195,10196,10197, #10032 10198,10199,10200,4478,10201,10202,10203,10204,4479,10205,10206,10207,10208,10209,10210,10211, #10048 10212,10213,10214,10215,10216,10217,10218,10219,10220,10221,10222,10223,10224,10225,10226,10227, #10064 10228,10229,10230,10231,10232,10233,10234,4927,10235,10236,10237,10238,10239,10240,10241,10242, #10080 10243,10244,10245,10246,10247,10248,10249,10250,10251,10252,10253,10254,10255,10256,10257,10258, #10096 10259,10260,10261,10262,10263,10264,10265,10266,10267,10268,10269,10270,10271,10272,10273,4480, #10112 4928,4929,10274,10275,10276,10277,10278,10279,10280,10281,10282,10283,10284,10285,10286,10287, #10128 10288,10289,10290,10291,10292,10293,10294,10295,10296,10297,10298,10299,10300,10301,10302,10303, #10144 10304,10305,10306,10307,10308,10309,10310,10311,10312,10313,10314,10315,10316,10317,10318,10319, #10160 10320,10321,10322,10323,10324,10325,10326,10327,10328,10329,10330,10331,10332,10333,10334,4930, #10176 10335,10336,10337,10338,10339,10340,10341,10342,4931,10343,10344,10345,10346,10347,10348,10349, #10192 10350,10351,10352,10353,10354,10355,3088,10356,2786,10357,10358,10359,10360,4189,10361,10362, #10208 10363,10364,10365,10366,10367,10368,10369,10370,10371,10372,10373,10374,10375,4932,10376,10377, #10224 10378,10379,10380,10381,10382,10383,10384,10385,10386,10387,10388,10389,10390,10391,10392,4933, #10240 10393,10394,10395,4934,10396,10397,10398,10399,10400,10401,10402,10403,10404,10405,10406,10407, #10256 10408,10409,10410,10411,10412,3446,10413,10414,10415,10416,10417,10418,10419,10420,10421,10422, #10272 10423,4935,10424,10425,10426,10427,10428,10429,10430,4936,10431,10432,10433,10434,10435,10436, #10288 10437,10438,10439,10440,10441,10442,10443,4937,10444,10445,10446,10447,4481,10448,10449,10450, #10304 10451,10452,10453,10454,10455,10456,10457,10458,10459,10460,10461,10462,10463,10464,10465,10466, #10320 10467,10468,10469,10470,10471,10472,10473,10474,10475,10476,10477,10478,10479,10480,10481,10482, #10336 10483,10484,10485,10486,10487,10488,10489,10490,10491,10492,10493,10494,10495,10496,10497,10498, #10352 10499,10500,10501,10502,10503,10504,10505,4938,10506,10507,10508,10509,10510,2552,10511,10512, #10368 10513,10514,10515,10516,3447,10517,10518,10519,10520,10521,10522,10523,10524,10525,10526,10527, #10384 10528,10529,10530,10531,10532,10533,10534,10535,10536,10537,10538,10539,10540,10541,10542,10543, #10400 4482,10544,4939,10545,10546,10547,10548,10549,10550,10551,10552,10553,10554,10555,10556,10557, #10416 10558,10559,10560,10561,10562,10563,10564,10565,10566,10567,3676,4483,10568,10569,10570,10571, #10432 10572,3448,10573,10574,10575,10576,10577,10578,10579,10580,10581,10582,10583,10584,10585,10586, #10448 10587,10588,10589,10590,10591,10592,10593,10594,10595,10596,10597,10598,10599,10600,10601,10602, #10464 10603,10604,10605,10606,10607,10608,10609,10610,10611,10612,10613,10614,10615,10616,10617,10618, #10480 10619,10620,10621,10622,10623,10624,10625,10626,10627,4484,10628,10629,10630,10631,10632,4940, #10496 10633,10634,10635,10636,10637,10638,10639,10640,10641,10642,10643,10644,10645,10646,10647,10648, #10512 10649,10650,10651,10652,10653,10654,10655,10656,4941,10657,10658,10659,2599,10660,10661,10662, #10528 10663,10664,10665,10666,3089,10667,10668,10669,10670,10671,10672,10673,10674,10675,10676,10677, #10544 10678,10679,10680,4942,10681,10682,10683,10684,10685,10686,10687,10688,10689,10690,10691,10692, #10560 10693,10694,10695,10696,10697,4485,10698,10699,10700,10701,10702,10703,10704,4943,10705,3677, #10576 10706,10707,10708,10709,10710,10711,10712,4944,10713,10714,10715,10716,10717,10718,10719,10720, #10592 10721,10722,10723,10724,10725,10726,10727,10728,4945,10729,10730,10731,10732,10733,10734,10735, #10608 10736,10737,10738,10739,10740,10741,10742,10743,10744,10745,10746,10747,10748,10749,10750,10751, #10624 10752,10753,10754,10755,10756,10757,10758,10759,10760,10761,4946,10762,10763,10764,10765,10766, #10640 10767,4947,4948,10768,10769,10770,10771,10772,10773,10774,10775,10776,10777,10778,10779,10780, #10656 10781,10782,10783,10784,10785,10786,10787,10788,10789,10790,10791,10792,10793,10794,10795,10796, #10672 10797,10798,10799,10800,10801,10802,10803,10804,10805,10806,10807,10808,10809,10810,10811,10812, #10688 10813,10814,10815,10816,10817,10818,10819,10820,10821,10822,10823,10824,10825,10826,10827,10828, #10704 10829,10830,10831,10832,10833,10834,10835,10836,10837,10838,10839,10840,10841,10842,10843,10844, #10720 10845,10846,10847,10848,10849,10850,10851,10852,10853,10854,10855,10856,10857,10858,10859,10860, #10736 10861,10862,10863,10864,10865,10866,10867,10868,10869,10870,10871,10872,10873,10874,10875,10876, #10752 10877,10878,4486,10879,10880,10881,10882,10883,10884,10885,4949,10886,10887,10888,10889,10890, #10768 10891,10892,10893,10894,10895,10896,10897,10898,10899,10900,10901,10902,10903,10904,10905,10906, #10784 10907,10908,10909,10910,10911,10912,10913,10914,10915,10916,10917,10918,10919,4487,10920,10921, #10800 10922,10923,10924,10925,10926,10927,10928,10929,10930,10931,10932,4950,10933,10934,10935,10936, #10816 10937,10938,10939,10940,10941,10942,10943,10944,10945,10946,10947,10948,10949,4488,10950,10951, #10832 10952,10953,10954,10955,10956,10957,10958,10959,4190,10960,10961,10962,10963,10964,10965,10966, #10848 10967,10968,10969,10970,10971,10972,10973,10974,10975,10976,10977,10978,10979,10980,10981,10982, #10864 10983,10984,10985,10986,10987,10988,10989,10990,10991,10992,10993,10994,10995,10996,10997,10998, #10880 10999,11000,11001,11002,11003,11004,11005,11006,3960,11007,11008,11009,11010,11011,11012,11013, #10896 11014,11015,11016,11017,11018,11019,11020,11021,11022,11023,11024,11025,11026,11027,11028,11029, #10912 11030,11031,11032,4951,11033,11034,11035,11036,11037,11038,11039,11040,11041,11042,11043,11044, #10928 11045,11046,11047,4489,11048,11049,11050,11051,4952,11052,11053,11054,11055,11056,11057,11058, #10944 4953,11059,11060,11061,11062,11063,11064,11065,11066,11067,11068,11069,11070,11071,4954,11072, #10960 11073,11074,11075,11076,11077,11078,11079,11080,11081,11082,11083,11084,11085,11086,11087,11088, #10976 11089,11090,11091,11092,11093,11094,11095,11096,11097,11098,11099,11100,11101,11102,11103,11104, #10992 11105,11106,11107,11108,11109,11110,11111,11112,11113,11114,11115,3808,11116,11117,11118,11119, #11008 11120,11121,11122,11123,11124,11125,11126,11127,11128,11129,11130,11131,11132,11133,11134,4955, #11024 11135,11136,11137,11138,11139,11140,11141,11142,11143,11144,11145,11146,11147,11148,11149,11150, #11040 11151,11152,11153,11154,11155,11156,11157,11158,11159,11160,11161,4956,11162,11163,11164,11165, #11056 11166,11167,11168,11169,11170,11171,11172,11173,11174,11175,11176,11177,11178,11179,11180,4957, #11072 11181,11182,11183,11184,11185,11186,4958,11187,11188,11189,11190,11191,11192,11193,11194,11195, #11088 11196,11197,11198,11199,11200,3678,11201,11202,11203,11204,11205,11206,4191,11207,11208,11209, #11104 11210,11211,11212,11213,11214,11215,11216,11217,11218,11219,11220,11221,11222,11223,11224,11225, #11120 11226,11227,11228,11229,11230,11231,11232,11233,11234,11235,11236,11237,11238,11239,11240,11241, #11136 11242,11243,11244,11245,11246,11247,11248,11249,11250,11251,4959,11252,11253,11254,11255,11256, #11152 11257,11258,11259,11260,11261,11262,11263,11264,11265,11266,11267,11268,11269,11270,11271,11272, #11168 11273,11274,11275,11276,11277,11278,11279,11280,11281,11282,11283,11284,11285,11286,11287,11288, #11184 11289,11290,11291,11292,11293,11294,11295,11296,11297,11298,11299,11300,11301,11302,11303,11304, #11200 11305,11306,11307,11308,11309,11310,11311,11312,11313,11314,3679,11315,11316,11317,11318,4490, #11216 11319,11320,11321,11322,11323,11324,11325,11326,11327,11328,11329,11330,11331,11332,11333,11334, #11232 11335,11336,11337,11338,11339,11340,11341,11342,11343,11344,11345,11346,11347,4960,11348,11349, #11248 11350,11351,11352,11353,11354,11355,11356,11357,11358,11359,11360,11361,11362,11363,11364,11365, #11264 11366,11367,11368,11369,11370,11371,11372,11373,11374,11375,11376,11377,3961,4961,11378,11379, #11280 11380,11381,11382,11383,11384,11385,11386,11387,11388,11389,11390,11391,11392,11393,11394,11395, #11296 11396,11397,4192,11398,11399,11400,11401,11402,11403,11404,11405,11406,11407,11408,11409,11410, #11312 11411,4962,11412,11413,11414,11415,11416,11417,11418,11419,11420,11421,11422,11423,11424,11425, #11328 11426,11427,11428,11429,11430,11431,11432,11433,11434,11435,11436,11437,11438,11439,11440,11441, #11344 11442,11443,11444,11445,11446,11447,11448,11449,11450,11451,11452,11453,11454,11455,11456,11457, #11360 11458,11459,11460,11461,11462,11463,11464,11465,11466,11467,11468,11469,4963,11470,11471,4491, #11376 11472,11473,11474,11475,4964,11476,11477,11478,11479,11480,11481,11482,11483,11484,11485,11486, #11392 11487,11488,11489,11490,11491,11492,4965,11493,11494,11495,11496,11497,11498,11499,11500,11501, #11408 11502,11503,11504,11505,11506,11507,11508,11509,11510,11511,11512,11513,11514,11515,11516,11517, #11424 11518,11519,11520,11521,11522,11523,11524,11525,11526,11527,11528,11529,3962,11530,11531,11532, #11440 11533,11534,11535,11536,11537,11538,11539,11540,11541,11542,11543,11544,11545,11546,11547,11548, #11456 11549,11550,11551,11552,11553,11554,11555,11556,11557,11558,11559,11560,11561,11562,11563,11564, #11472 4193,4194,11565,11566,11567,11568,11569,11570,11571,11572,11573,11574,11575,11576,11577,11578, #11488 11579,11580,11581,11582,11583,11584,11585,11586,11587,11588,11589,11590,11591,4966,4195,11592, #11504 11593,11594,11595,11596,11597,11598,11599,11600,11601,11602,11603,11604,3090,11605,11606,11607, #11520 11608,11609,11610,4967,11611,11612,11613,11614,11615,11616,11617,11618,11619,11620,11621,11622, #11536 11623,11624,11625,11626,11627,11628,11629,11630,11631,11632,11633,11634,11635,11636,11637,11638, #11552 11639,11640,11641,11642,11643,11644,11645,11646,11647,11648,11649,11650,11651,11652,11653,11654, #11568 11655,11656,11657,11658,11659,11660,11661,11662,11663,11664,11665,11666,11667,11668,11669,11670, #11584 11671,11672,11673,11674,4968,11675,11676,11677,11678,11679,11680,11681,11682,11683,11684,11685, #11600 11686,11687,11688,11689,11690,11691,11692,11693,3809,11694,11695,11696,11697,11698,11699,11700, #11616 11701,11702,11703,11704,11705,11706,11707,11708,11709,11710,11711,11712,11713,11714,11715,11716, #11632 11717,11718,3553,11719,11720,11721,11722,11723,11724,11725,11726,11727,11728,11729,11730,4969, #11648 11731,11732,11733,11734,11735,11736,11737,11738,11739,11740,4492,11741,11742,11743,11744,11745, #11664 11746,11747,11748,11749,11750,11751,11752,4970,11753,11754,11755,11756,11757,11758,11759,11760, #11680 11761,11762,11763,11764,11765,11766,11767,11768,11769,11770,11771,11772,11773,11774,11775,11776, #11696 11777,11778,11779,11780,11781,11782,11783,11784,11785,11786,11787,11788,11789,11790,4971,11791, #11712 11792,11793,11794,11795,11796,11797,4972,11798,11799,11800,11801,11802,11803,11804,11805,11806, #11728 11807,11808,11809,11810,4973,11811,11812,11813,11814,11815,11816,11817,11818,11819,11820,11821, #11744 11822,11823,11824,11825,11826,11827,11828,11829,11830,11831,11832,11833,11834,3680,3810,11835, #11760 11836,4974,11837,11838,11839,11840,11841,11842,11843,11844,11845,11846,11847,11848,11849,11850, #11776 11851,11852,11853,11854,11855,11856,11857,11858,11859,11860,11861,11862,11863,11864,11865,11866, #11792 11867,11868,11869,11870,11871,11872,11873,11874,11875,11876,11877,11878,11879,11880,11881,11882, #11808 11883,11884,4493,11885,11886,11887,11888,11889,11890,11891,11892,11893,11894,11895,11896,11897, #11824 11898,11899,11900,11901,11902,11903,11904,11905,11906,11907,11908,11909,11910,11911,11912,11913, #11840 11914,11915,4975,11916,11917,11918,11919,11920,11921,11922,11923,11924,11925,11926,11927,11928, #11856 11929,11930,11931,11932,11933,11934,11935,11936,11937,11938,11939,11940,11941,11942,11943,11944, #11872 11945,11946,11947,11948,11949,4976,11950,11951,11952,11953,11954,11955,11956,11957,11958,11959, #11888 11960,11961,11962,11963,11964,11965,11966,11967,11968,11969,11970,11971,11972,11973,11974,11975, #11904 11976,11977,11978,11979,11980,11981,11982,11983,11984,11985,11986,11987,4196,11988,11989,11990, #11920 11991,11992,4977,11993,11994,11995,11996,11997,11998,11999,12000,12001,12002,12003,12004,12005, #11936 12006,12007,12008,12009,12010,12011,12012,12013,12014,12015,12016,12017,12018,12019,12020,12021, #11952 12022,12023,12024,12025,12026,12027,12028,12029,12030,12031,12032,12033,12034,12035,12036,12037, #11968 12038,12039,12040,12041,12042,12043,12044,12045,12046,12047,12048,12049,12050,12051,12052,12053, #11984 12054,12055,12056,12057,12058,12059,12060,12061,4978,12062,12063,12064,12065,12066,12067,12068, #12000 12069,12070,12071,12072,12073,12074,12075,12076,12077,12078,12079,12080,12081,12082,12083,12084, #12016 12085,12086,12087,12088,12089,12090,12091,12092,12093,12094,12095,12096,12097,12098,12099,12100, #12032 12101,12102,12103,12104,12105,12106,12107,12108,12109,12110,12111,12112,12113,12114,12115,12116, #12048 12117,12118,12119,12120,12121,12122,12123,4979,12124,12125,12126,12127,12128,4197,12129,12130, #12064 12131,12132,12133,12134,12135,12136,12137,12138,12139,12140,12141,12142,12143,12144,12145,12146, #12080 12147,12148,12149,12150,12151,12152,12153,12154,4980,12155,12156,12157,12158,12159,12160,4494, #12096 12161,12162,12163,12164,3811,12165,12166,12167,12168,12169,4495,12170,12171,4496,12172,12173, #12112 12174,12175,12176,3812,12177,12178,12179,12180,12181,12182,12183,12184,12185,12186,12187,12188, #12128 12189,12190,12191,12192,12193,12194,12195,12196,12197,12198,12199,12200,12201,12202,12203,12204, #12144 12205,12206,12207,12208,12209,12210,12211,12212,12213,12214,12215,12216,12217,12218,12219,12220, #12160 12221,4981,12222,12223,12224,12225,12226,12227,12228,12229,12230,12231,12232,12233,12234,12235, #12176 4982,12236,12237,12238,12239,12240,12241,12242,12243,12244,12245,4983,12246,12247,12248,12249, #12192 4984,12250,12251,12252,12253,12254,12255,12256,12257,12258,12259,12260,12261,12262,12263,12264, #12208 4985,12265,4497,12266,12267,12268,12269,12270,12271,12272,12273,12274,12275,12276,12277,12278, #12224 12279,12280,12281,12282,12283,12284,12285,12286,12287,4986,12288,12289,12290,12291,12292,12293, #12240 12294,12295,12296,2473,12297,12298,12299,12300,12301,12302,12303,12304,12305,12306,12307,12308, #12256 12309,12310,12311,12312,12313,12314,12315,12316,12317,12318,12319,3963,12320,12321,12322,12323, #12272 12324,12325,12326,12327,12328,12329,12330,12331,12332,4987,12333,12334,12335,12336,12337,12338, #12288 12339,12340,12341,12342,12343,12344,12345,12346,12347,12348,12349,12350,12351,12352,12353,12354, #12304 12355,12356,12357,12358,12359,3964,12360,12361,12362,12363,12364,12365,12366,12367,12368,12369, #12320 12370,3965,12371,12372,12373,12374,12375,12376,12377,12378,12379,12380,12381,12382,12383,12384, #12336 12385,12386,12387,12388,12389,12390,12391,12392,12393,12394,12395,12396,12397,12398,12399,12400, #12352 12401,12402,12403,12404,12405,12406,12407,12408,4988,12409,12410,12411,12412,12413,12414,12415, #12368 12416,12417,12418,12419,12420,12421,12422,12423,12424,12425,12426,12427,12428,12429,12430,12431, #12384 12432,12433,12434,12435,12436,12437,12438,3554,12439,12440,12441,12442,12443,12444,12445,12446, #12400 12447,12448,12449,12450,12451,12452,12453,12454,12455,12456,12457,12458,12459,12460,12461,12462, #12416 12463,12464,4989,12465,12466,12467,12468,12469,12470,12471,12472,12473,12474,12475,12476,12477, #12432 12478,12479,12480,4990,12481,12482,12483,12484,12485,12486,12487,12488,12489,4498,12490,12491, #12448 12492,12493,12494,12495,12496,12497,12498,12499,12500,12501,12502,12503,12504,12505,12506,12507, #12464 12508,12509,12510,12511,12512,12513,12514,12515,12516,12517,12518,12519,12520,12521,12522,12523, #12480 12524,12525,12526,12527,12528,12529,12530,12531,12532,12533,12534,12535,12536,12537,12538,12539, #12496 12540,12541,12542,12543,12544,12545,12546,12547,12548,12549,12550,12551,4991,12552,12553,12554, #12512 12555,12556,12557,12558,12559,12560,12561,12562,12563,12564,12565,12566,12567,12568,12569,12570, #12528 12571,12572,12573,12574,12575,12576,12577,12578,3036,12579,12580,12581,12582,12583,3966,12584, #12544 12585,12586,12587,12588,12589,12590,12591,12592,12593,12594,12595,12596,12597,12598,12599,12600, #12560 12601,12602,12603,12604,12605,12606,12607,12608,12609,12610,12611,12612,12613,12614,12615,12616, #12576 12617,12618,12619,12620,12621,12622,12623,12624,12625,12626,12627,12628,12629,12630,12631,12632, #12592 12633,12634,12635,12636,12637,12638,12639,12640,12641,12642,12643,12644,12645,12646,4499,12647, #12608 12648,12649,12650,12651,12652,12653,12654,12655,12656,12657,12658,12659,12660,12661,12662,12663, #12624 12664,12665,12666,12667,12668,12669,12670,12671,12672,12673,12674,12675,12676,12677,12678,12679, #12640 12680,12681,12682,12683,12684,12685,12686,12687,12688,12689,12690,12691,12692,12693,12694,12695, #12656 12696,12697,12698,4992,12699,12700,12701,12702,12703,12704,12705,12706,12707,12708,12709,12710, #12672 12711,12712,12713,12714,12715,12716,12717,12718,12719,12720,12721,12722,12723,12724,12725,12726, #12688 12727,12728,12729,12730,12731,12732,12733,12734,12735,12736,12737,12738,12739,12740,12741,12742, #12704 12743,12744,12745,12746,12747,12748,12749,12750,12751,12752,12753,12754,12755,12756,12757,12758, #12720 12759,12760,12761,12762,12763,12764,12765,12766,12767,12768,12769,12770,12771,12772,12773,12774, #12736 12775,12776,12777,12778,4993,2175,12779,12780,12781,12782,12783,12784,12785,12786,4500,12787, #12752 12788,12789,12790,12791,12792,12793,12794,12795,12796,12797,12798,12799,12800,12801,12802,12803, #12768 12804,12805,12806,12807,12808,12809,12810,12811,12812,12813,12814,12815,12816,12817,12818,12819, #12784 12820,12821,12822,12823,12824,12825,12826,4198,3967,12827,12828,12829,12830,12831,12832,12833, #12800 12834,12835,12836,12837,12838,12839,12840,12841,12842,12843,12844,12845,12846,12847,12848,12849, #12816 12850,12851,12852,12853,12854,12855,12856,12857,12858,12859,12860,12861,4199,12862,12863,12864, #12832 12865,12866,12867,12868,12869,12870,12871,12872,12873,12874,12875,12876,12877,12878,12879,12880, #12848 12881,12882,12883,12884,12885,12886,12887,4501,12888,12889,12890,12891,12892,12893,12894,12895, #12864 12896,12897,12898,12899,12900,12901,12902,12903,12904,12905,12906,12907,12908,12909,12910,12911, #12880 12912,4994,12913,12914,12915,12916,12917,12918,12919,12920,12921,12922,12923,12924,12925,12926, #12896 12927,12928,12929,12930,12931,12932,12933,12934,12935,12936,12937,12938,12939,12940,12941,12942, #12912 12943,12944,12945,12946,12947,12948,12949,12950,12951,12952,12953,12954,12955,12956,1772,12957, #12928 12958,12959,12960,12961,12962,12963,12964,12965,12966,12967,12968,12969,12970,12971,12972,12973, #12944 12974,12975,12976,12977,12978,12979,12980,12981,12982,12983,12984,12985,12986,12987,12988,12989, #12960 12990,12991,12992,12993,12994,12995,12996,12997,4502,12998,4503,12999,13000,13001,13002,13003, #12976 4504,13004,13005,13006,13007,13008,13009,13010,13011,13012,13013,13014,13015,13016,13017,13018, #12992 13019,13020,13021,13022,13023,13024,13025,13026,13027,13028,13029,3449,13030,13031,13032,13033, #13008 13034,13035,13036,13037,13038,13039,13040,13041,13042,13043,13044,13045,13046,13047,13048,13049, #13024 13050,13051,13052,13053,13054,13055,13056,13057,13058,13059,13060,13061,13062,13063,13064,13065, #13040 13066,13067,13068,13069,13070,13071,13072,13073,13074,13075,13076,13077,13078,13079,13080,13081, #13056 13082,13083,13084,13085,13086,13087,13088,13089,13090,13091,13092,13093,13094,13095,13096,13097, #13072 13098,13099,13100,13101,13102,13103,13104,13105,13106,13107,13108,13109,13110,13111,13112,13113, #13088 13114,13115,13116,13117,13118,3968,13119,4995,13120,13121,13122,13123,13124,13125,13126,13127, #13104 4505,13128,13129,13130,13131,13132,13133,13134,4996,4506,13135,13136,13137,13138,13139,4997, #13120 13140,13141,13142,13143,13144,13145,13146,13147,13148,13149,13150,13151,13152,13153,13154,13155, #13136 13156,13157,13158,13159,4998,13160,13161,13162,13163,13164,13165,13166,13167,13168,13169,13170, #13152 13171,13172,13173,13174,13175,13176,4999,13177,13178,13179,13180,13181,13182,13183,13184,13185, #13168 13186,13187,13188,13189,13190,13191,13192,13193,13194,13195,13196,13197,13198,13199,13200,13201, #13184 13202,13203,13204,13205,13206,5000,13207,13208,13209,13210,13211,13212,13213,13214,13215,13216, #13200 13217,13218,13219,13220,13221,13222,13223,13224,13225,13226,13227,4200,5001,13228,13229,13230, #13216 13231,13232,13233,13234,13235,13236,13237,13238,13239,13240,3969,13241,13242,13243,13244,3970, #13232 13245,13246,13247,13248,13249,13250,13251,13252,13253,13254,13255,13256,13257,13258,13259,13260, #13248 13261,13262,13263,13264,13265,13266,13267,13268,3450,13269,13270,13271,13272,13273,13274,13275, #13264 13276,5002,13277,13278,13279,13280,13281,13282,13283,13284,13285,13286,13287,13288,13289,13290, #13280 13291,13292,13293,13294,13295,13296,13297,13298,13299,13300,13301,13302,3813,13303,13304,13305, #13296 13306,13307,13308,13309,13310,13311,13312,13313,13314,13315,13316,13317,13318,13319,13320,13321, #13312 13322,13323,13324,13325,13326,13327,13328,4507,13329,13330,13331,13332,13333,13334,13335,13336, #13328 13337,13338,13339,13340,13341,5003,13342,13343,13344,13345,13346,13347,13348,13349,13350,13351, #13344 13352,13353,13354,13355,13356,13357,13358,13359,13360,13361,13362,13363,13364,13365,13366,13367, #13360 5004,13368,13369,13370,13371,13372,13373,13374,13375,13376,13377,13378,13379,13380,13381,13382, #13376 13383,13384,13385,13386,13387,13388,13389,13390,13391,13392,13393,13394,13395,13396,13397,13398, #13392 13399,13400,13401,13402,13403,13404,13405,13406,13407,13408,13409,13410,13411,13412,13413,13414, #13408 13415,13416,13417,13418,13419,13420,13421,13422,13423,13424,13425,13426,13427,13428,13429,13430, #13424 13431,13432,4508,13433,13434,13435,4201,13436,13437,13438,13439,13440,13441,13442,13443,13444, #13440 13445,13446,13447,13448,13449,13450,13451,13452,13453,13454,13455,13456,13457,5005,13458,13459, #13456 13460,13461,13462,13463,13464,13465,13466,13467,13468,13469,13470,4509,13471,13472,13473,13474, #13472 13475,13476,13477,13478,13479,13480,13481,13482,13483,13484,13485,13486,13487,13488,13489,13490, #13488 13491,13492,13493,13494,13495,13496,13497,13498,13499,13500,13501,13502,13503,13504,13505,13506, #13504 13507,13508,13509,13510,13511,13512,13513,13514,13515,13516,13517,13518,13519,13520,13521,13522, #13520 13523,13524,13525,13526,13527,13528,13529,13530,13531,13532,13533,13534,13535,13536,13537,13538, #13536 13539,13540,13541,13542,13543,13544,13545,13546,13547,13548,13549,13550,13551,13552,13553,13554, #13552 13555,13556,13557,13558,13559,13560,13561,13562,13563,13564,13565,13566,13567,13568,13569,13570, #13568 13571,13572,13573,13574,13575,13576,13577,13578,13579,13580,13581,13582,13583,13584,13585,13586, #13584 13587,13588,13589,13590,13591,13592,13593,13594,13595,13596,13597,13598,13599,13600,13601,13602, #13600 13603,13604,13605,13606,13607,13608,13609,13610,13611,13612,13613,13614,13615,13616,13617,13618, #13616 13619,13620,13621,13622,13623,13624,13625,13626,13627,13628,13629,13630,13631,13632,13633,13634, #13632 13635,13636,13637,13638,13639,13640,13641,13642,5006,13643,13644,13645,13646,13647,13648,13649, #13648 13650,13651,5007,13652,13653,13654,13655,13656,13657,13658,13659,13660,13661,13662,13663,13664, #13664 13665,13666,13667,13668,13669,13670,13671,13672,13673,13674,13675,13676,13677,13678,13679,13680, #13680 13681,13682,13683,13684,13685,13686,13687,13688,13689,13690,13691,13692,13693,13694,13695,13696, #13696 13697,13698,13699,13700,13701,13702,13703,13704,13705,13706,13707,13708,13709,13710,13711,13712, #13712 13713,13714,13715,13716,13717,13718,13719,13720,13721,13722,13723,13724,13725,13726,13727,13728, #13728 13729,13730,13731,13732,13733,13734,13735,13736,13737,13738,13739,13740,13741,13742,13743,13744, #13744 13745,13746,13747,13748,13749,13750,13751,13752,13753,13754,13755,13756,13757,13758,13759,13760, #13760 13761,13762,13763,13764,13765,13766,13767,13768,13769,13770,13771,13772,13773,13774,3273,13775, #13776 13776,13777,13778,13779,13780,13781,13782,13783,13784,13785,13786,13787,13788,13789,13790,13791, #13792 13792,13793,13794,13795,13796,13797,13798,13799,13800,13801,13802,13803,13804,13805,13806,13807, #13808 13808,13809,13810,13811,13812,13813,13814,13815,13816,13817,13818,13819,13820,13821,13822,13823, #13824 13824,13825,13826,13827,13828,13829,13830,13831,13832,13833,13834,13835,13836,13837,13838,13839, #13840 13840,13841,13842,13843,13844,13845,13846,13847,13848,13849,13850,13851,13852,13853,13854,13855, #13856 13856,13857,13858,13859,13860,13861,13862,13863,13864,13865,13866,13867,13868,13869,13870,13871, #13872 13872,13873,13874,13875,13876,13877,13878,13879,13880,13881,13882,13883,13884,13885,13886,13887, #13888 13888,13889,13890,13891,13892,13893,13894,13895,13896,13897,13898,13899,13900,13901,13902,13903, #13904 13904,13905,13906,13907,13908,13909,13910,13911,13912,13913,13914,13915,13916,13917,13918,13919, #13920 13920,13921,13922,13923,13924,13925,13926,13927,13928,13929,13930,13931,13932,13933,13934,13935, #13936 13936,13937,13938,13939,13940,13941,13942,13943,13944,13945,13946,13947,13948,13949,13950,13951, #13952 13952,13953,13954,13955,13956,13957,13958,13959,13960,13961,13962,13963,13964,13965,13966,13967, #13968 13968,13969,13970,13971,13972) #13973 # flake8: noqa
gpl-3.0
dpendl00/headphones
lib/unidecode/x022.py
165
4329
data = ( '[?]', # 0x00 '[?]', # 0x01 '[?]', # 0x02 '[?]', # 0x03 '[?]', # 0x04 '[?]', # 0x05 '[?]', # 0x06 '[?]', # 0x07 '[?]', # 0x08 '[?]', # 0x09 '[?]', # 0x0a '[?]', # 0x0b '[?]', # 0x0c '[?]', # 0x0d '[?]', # 0x0e '[?]', # 0x0f '[?]', # 0x10 '[?]', # 0x11 '-', # 0x12 '[?]', # 0x13 '[?]', # 0x14 '/', # 0x15 '\\', # 0x16 '*', # 0x17 '[?]', # 0x18 '[?]', # 0x19 '[?]', # 0x1a '[?]', # 0x1b '[?]', # 0x1c '[?]', # 0x1d '[?]', # 0x1e '[?]', # 0x1f '[?]', # 0x20 '[?]', # 0x21 '[?]', # 0x22 '|', # 0x23 '[?]', # 0x24 '[?]', # 0x25 '[?]', # 0x26 '[?]', # 0x27 '[?]', # 0x28 '[?]', # 0x29 '[?]', # 0x2a '[?]', # 0x2b '[?]', # 0x2c '[?]', # 0x2d '[?]', # 0x2e '[?]', # 0x2f '[?]', # 0x30 '[?]', # 0x31 '[?]', # 0x32 '[?]', # 0x33 '[?]', # 0x34 '[?]', # 0x35 ':', # 0x36 '[?]', # 0x37 '[?]', # 0x38 '[?]', # 0x39 '[?]', # 0x3a '[?]', # 0x3b '~', # 0x3c '[?]', # 0x3d '[?]', # 0x3e '[?]', # 0x3f '[?]', # 0x40 '[?]', # 0x41 '[?]', # 0x42 '[?]', # 0x43 '[?]', # 0x44 '[?]', # 0x45 '[?]', # 0x46 '[?]', # 0x47 '[?]', # 0x48 '[?]', # 0x49 '[?]', # 0x4a '[?]', # 0x4b '[?]', # 0x4c '[?]', # 0x4d '[?]', # 0x4e '[?]', # 0x4f '[?]', # 0x50 '[?]', # 0x51 '[?]', # 0x52 '[?]', # 0x53 '[?]', # 0x54 '[?]', # 0x55 '[?]', # 0x56 '[?]', # 0x57 '[?]', # 0x58 '[?]', # 0x59 '[?]', # 0x5a '[?]', # 0x5b '[?]', # 0x5c '[?]', # 0x5d '[?]', # 0x5e '[?]', # 0x5f '[?]', # 0x60 '[?]', # 0x61 '[?]', # 0x62 '[?]', # 0x63 '<=', # 0x64 '>=', # 0x65 '<=', # 0x66 '>=', # 0x67 '[?]', # 0x68 '[?]', # 0x69 '[?]', # 0x6a '[?]', # 0x6b '[?]', # 0x6c '[?]', # 0x6d '[?]', # 0x6e '[?]', # 0x6f '[?]', # 0x70 '[?]', # 0x71 '[?]', # 0x72 '[?]', # 0x73 '[?]', # 0x74 '[?]', # 0x75 '[?]', # 0x76 '[?]', # 0x77 '[?]', # 0x78 '[?]', # 0x79 '[?]', # 0x7a '[?]', # 0x7b '[?]', # 0x7c '[?]', # 0x7d '[?]', # 0x7e '[?]', # 0x7f '[?]', # 0x80 '[?]', # 0x81 '[?]', # 0x82 '[?]', # 0x83 '[?]', # 0x84 '[?]', # 0x85 '[?]', # 0x86 '[?]', # 0x87 '[?]', # 0x88 '[?]', # 0x89 '[?]', # 0x8a '[?]', # 0x8b '[?]', # 0x8c '[?]', # 0x8d '[?]', # 0x8e '[?]', # 0x8f '[?]', # 0x90 '[?]', # 0x91 '[?]', # 0x92 '[?]', # 0x93 '[?]', # 0x94 '[?]', # 0x95 '[?]', # 0x96 '[?]', # 0x97 '[?]', # 0x98 '[?]', # 0x99 '[?]', # 0x9a '[?]', # 0x9b '[?]', # 0x9c '[?]', # 0x9d '[?]', # 0x9e '[?]', # 0x9f '[?]', # 0xa0 '[?]', # 0xa1 '[?]', # 0xa2 '[?]', # 0xa3 '[?]', # 0xa4 '[?]', # 0xa5 '[?]', # 0xa6 '[?]', # 0xa7 '[?]', # 0xa8 '[?]', # 0xa9 '[?]', # 0xaa '[?]', # 0xab '[?]', # 0xac '[?]', # 0xad '[?]', # 0xae '[?]', # 0xaf '[?]', # 0xb0 '[?]', # 0xb1 '[?]', # 0xb2 '[?]', # 0xb3 '[?]', # 0xb4 '[?]', # 0xb5 '[?]', # 0xb6 '[?]', # 0xb7 '[?]', # 0xb8 '[?]', # 0xb9 '[?]', # 0xba '[?]', # 0xbb '[?]', # 0xbc '[?]', # 0xbd '[?]', # 0xbe '[?]', # 0xbf '[?]', # 0xc0 '[?]', # 0xc1 '[?]', # 0xc2 '[?]', # 0xc3 '[?]', # 0xc4 '[?]', # 0xc5 '[?]', # 0xc6 '[?]', # 0xc7 '[?]', # 0xc8 '[?]', # 0xc9 '[?]', # 0xca '[?]', # 0xcb '[?]', # 0xcc '[?]', # 0xcd '[?]', # 0xce '[?]', # 0xcf '[?]', # 0xd0 '[?]', # 0xd1 '[?]', # 0xd2 '[?]', # 0xd3 '[?]', # 0xd4 '[?]', # 0xd5 '[?]', # 0xd6 '[?]', # 0xd7 '[?]', # 0xd8 '[?]', # 0xd9 '[?]', # 0xda '[?]', # 0xdb '[?]', # 0xdc '[?]', # 0xdd '[?]', # 0xde '[?]', # 0xdf '[?]', # 0xe0 '[?]', # 0xe1 '[?]', # 0xe2 '[?]', # 0xe3 '[?]', # 0xe4 '[?]', # 0xe5 '[?]', # 0xe6 '[?]', # 0xe7 '[?]', # 0xe8 '[?]', # 0xe9 '[?]', # 0xea '[?]', # 0xeb '[?]', # 0xec '[?]', # 0xed '[?]', # 0xee '[?]', # 0xef '[?]', # 0xf0 '[?]', # 0xf1 '[?]', # 0xf2 '[?]', # 0xf3 '[?]', # 0xf4 '[?]', # 0xf5 '[?]', # 0xf6 '[?]', # 0xf7 '[?]', # 0xf8 '[?]', # 0xf9 '[?]', # 0xfa '[?]', # 0xfb '[?]', # 0xfc '[?]', # 0xfd '[?]', # 0xfe )
gpl-3.0
Ashaba/rms
rmslocalenv/lib/python2.7/site-packages/django/db/models/fields/subclassing.py
44
2031
""" Convenience routines for creating non-trivial Field subclasses, as well as backwards compatibility utilities. Add SubfieldBase as the metaclass for your Field subclass, implement to_python() and the other necessary methods and everything will work seamlessly. """ import warnings from django.utils.deprecation import RemovedInDjango110Warning class SubfieldBase(type): """ A metaclass for custom Field subclasses. This ensures the model's attribute has the descriptor protocol attached to it. """ def __new__(cls, name, bases, attrs): warnings.warn("SubfieldBase has been deprecated. Use Field.from_db_value instead.", RemovedInDjango110Warning, stacklevel=2) new_class = super(SubfieldBase, cls).__new__(cls, name, bases, attrs) new_class.contribute_to_class = make_contrib( new_class, attrs.get('contribute_to_class') ) return new_class class Creator(object): """ A placeholder class that provides a way to set the attribute on the model. """ def __init__(self, field): self.field = field def __get__(self, obj, type=None): if obj is None: return self return obj.__dict__[self.field.name] def __set__(self, obj, value): obj.__dict__[self.field.name] = self.field.to_python(value) def make_contrib(superclass, func=None): """ Returns a suitable contribute_to_class() method for the Field subclass. If 'func' is passed in, it is the existing contribute_to_class() method on the subclass and it is called before anything else. It is assumed in this case that the existing contribute_to_class() calls all the necessary superclass methods. """ def contribute_to_class(self, cls, name, **kwargs): if func: func(self, cls, name, **kwargs) else: super(superclass, self).contribute_to_class(cls, name, **kwargs) setattr(cls, self.name, Creator(self)) return contribute_to_class
mit
ishank08/scikit-learn
sklearn/__init__.py
28
3073
""" Machine learning module for Python ================================== sklearn is a Python module integrating classical machine learning algorithms in the tightly-knit world of scientific Python packages (numpy, scipy, matplotlib). It aims to provide simple and efficient solutions to learning problems that are accessible to everybody and reusable in various contexts: machine-learning as a versatile tool for science and engineering. See http://scikit-learn.org for complete documentation. """ import sys import re import warnings # Make sure that DeprecationWarning within this package always gets printed warnings.filterwarnings('always', category=DeprecationWarning, module='^{0}\.'.format(re.escape(__name__))) # PEP0440 compatible formatted version, see: # https://www.python.org/dev/peps/pep-0440/ # # Generic release markers: # X.Y # X.Y.Z # For bugfix releases # # Admissible pre-release markers: # X.YaN # Alpha release # X.YbN # Beta release # X.YrcN # Release Candidate # X.Y # Final release # # Dev branch marker is: 'X.Y.dev' or 'X.Y.devN' where N is an integer. # 'X.Y.dev0' is the canonical version of 'X.Y.dev' # __version__ = '0.19.dev0' try: # This variable is injected in the __builtins__ by the build # process. It used to enable importing subpackages of sklearn when # the binaries are not built __SKLEARN_SETUP__ except NameError: __SKLEARN_SETUP__ = False if __SKLEARN_SETUP__: sys.stderr.write('Partial import of sklearn during the build process.\n') # We are not importing the rest of the scikit during the build # process, as it may not be compiled yet else: from . import __check_build from .base import clone __check_build # avoid flakes unused variable error __all__ = ['calibration', 'cluster', 'covariance', 'cross_decomposition', 'cross_validation', 'datasets', 'decomposition', 'dummy', 'ensemble', 'exceptions', 'externals', 'feature_extraction', 'feature_selection', 'gaussian_process', 'grid_search', 'isotonic', 'kernel_approximation', 'kernel_ridge', 'learning_curve', 'linear_model', 'manifold', 'metrics', 'mixture', 'model_selection', 'multiclass', 'multioutput', 'naive_bayes', 'neighbors', 'neural_network', 'pipeline', 'preprocessing', 'random_projection', 'semi_supervised', 'svm', 'tree', 'discriminant_analysis', # Non-modules: 'clone'] def setup_module(module): """Fixture for the tests to assure globally controllable seeding of RNGs""" import os import numpy as np import random # It could have been provided in the environment _random_seed = os.environ.get('SKLEARN_SEED', None) if _random_seed is None: _random_seed = np.random.uniform() * (2 ** 31 - 1) _random_seed = int(_random_seed) print("I: Seeding RNGs with %r" % _random_seed) np.random.seed(_random_seed) random.seed(_random_seed)
bsd-3-clause
atosorigin/ansible
lib/ansible/module_utils/six/__init__.py
22
33552
# This code is strewn with things that are not defined on Python3 (unicode, # long, etc) but they are all shielded by version checks. This is also an # upstream vendored file that we're not going to modify on our own # pylint: disable=undefined-variable # Copyright (c) 2010-2019 Benjamin Peterson # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. """Utilities for writing code that runs on Python 2 and 3""" from __future__ import absolute_import import functools import itertools import operator import sys import types # The following makes it easier for us to script updates of the bundled code. It is not part of # upstream six # CANT_UPDATE due to py2.6 drop: https://github.com/benjaminp/six/pull/314 _BUNDLED_METADATA = {"pypi_name": "six", "version": "1.13.0"} __author__ = "Benjamin Peterson <[email protected]>" __version__ = "1.13.0" # Useful for very coarse version differentiation. PY2 = sys.version_info[0] == 2 PY3 = sys.version_info[0] == 3 PY34 = sys.version_info[0:2] >= (3, 4) if PY3: string_types = str, integer_types = int, class_types = type, text_type = str binary_type = bytes MAXSIZE = sys.maxsize else: string_types = basestring, integer_types = (int, long) class_types = (type, types.ClassType) text_type = unicode binary_type = str if sys.platform.startswith("java"): # Jython always uses 32 bits. MAXSIZE = int((1 << 31) - 1) else: # It's possible to have sizeof(long) != sizeof(Py_ssize_t). class X(object): def __len__(self): return 1 << 31 try: len(X()) except OverflowError: # 32-bit MAXSIZE = int((1 << 31) - 1) else: # 64-bit MAXSIZE = int((1 << 63) - 1) del X def _add_doc(func, doc): """Add documentation to a function.""" func.__doc__ = doc def _import_module(name): """Import module, returning the module after the last dot.""" __import__(name) return sys.modules[name] class _LazyDescr(object): def __init__(self, name): self.name = name def __get__(self, obj, tp): result = self._resolve() setattr(obj, self.name, result) # Invokes __set__. try: # This is a bit ugly, but it avoids running this again by # removing this descriptor. delattr(obj.__class__, self.name) except AttributeError: pass return result class MovedModule(_LazyDescr): def __init__(self, name, old, new=None): super(MovedModule, self).__init__(name) if PY3: if new is None: new = name self.mod = new else: self.mod = old def _resolve(self): return _import_module(self.mod) def __getattr__(self, attr): _module = self._resolve() value = getattr(_module, attr) setattr(self, attr, value) return value class _LazyModule(types.ModuleType): def __init__(self, name): super(_LazyModule, self).__init__(name) self.__doc__ = self.__class__.__doc__ def __dir__(self): attrs = ["__doc__", "__name__"] attrs += [attr.name for attr in self._moved_attributes] return attrs # Subclasses should override this _moved_attributes = [] class MovedAttribute(_LazyDescr): def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): super(MovedAttribute, self).__init__(name) if PY3: if new_mod is None: new_mod = name self.mod = new_mod if new_attr is None: if old_attr is None: new_attr = name else: new_attr = old_attr self.attr = new_attr else: self.mod = old_mod if old_attr is None: old_attr = name self.attr = old_attr def _resolve(self): module = _import_module(self.mod) return getattr(module, self.attr) class _SixMetaPathImporter(object): """ A meta path importer to import six.moves and its submodules. This class implements a PEP302 finder and loader. It should be compatible with Python 2.5 and all existing versions of Python3 """ def __init__(self, six_module_name): self.name = six_module_name self.known_modules = {} def _add_module(self, mod, *fullnames): for fullname in fullnames: self.known_modules[self.name + "." + fullname] = mod def _get_module(self, fullname): return self.known_modules[self.name + "." + fullname] def find_module(self, fullname, path=None): if fullname in self.known_modules: return self return None def __get_module(self, fullname): try: return self.known_modules[fullname] except KeyError: raise ImportError("This loader does not know module " + fullname) def load_module(self, fullname): try: # in case of a reload return sys.modules[fullname] except KeyError: pass mod = self.__get_module(fullname) if isinstance(mod, MovedModule): mod = mod._resolve() else: mod.__loader__ = self sys.modules[fullname] = mod return mod def is_package(self, fullname): """ Return true, if the named module is a package. We need this method to get correct spec objects with Python 3.4 (see PEP451) """ return hasattr(self.__get_module(fullname), "__path__") def get_code(self, fullname): """Return None Required, if is_package is implemented""" self.__get_module(fullname) # eventually raises ImportError return None get_source = get_code # same as get_code _importer = _SixMetaPathImporter(__name__) class _MovedItems(_LazyModule): """Lazy loading of moved objects""" __path__ = [] # mark as package _moved_attributes = [ MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), MovedAttribute("intern", "__builtin__", "sys"), MovedAttribute("map", "itertools", "builtins", "imap", "map"), MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), MovedAttribute("getoutput", "commands", "subprocess"), MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), MovedAttribute("reduce", "__builtin__", "functools"), MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), MovedAttribute("StringIO", "StringIO", "io"), MovedAttribute("UserDict", "UserDict", "collections"), MovedAttribute("UserList", "UserList", "collections"), MovedAttribute("UserString", "UserString", "collections"), MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), MovedModule("builtins", "__builtin__"), MovedModule("configparser", "ConfigParser"), MovedModule("collections_abc", "collections", "collections.abc" if sys.version_info >= (3, 3) else "collections"), MovedModule("copyreg", "copy_reg"), MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"), MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), MovedModule("http_cookies", "Cookie", "http.cookies"), MovedModule("html_entities", "htmlentitydefs", "html.entities"), MovedModule("html_parser", "HTMLParser", "html.parser"), MovedModule("http_client", "httplib", "http.client"), MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"), MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), MovedModule("cPickle", "cPickle", "pickle"), MovedModule("queue", "Queue"), MovedModule("reprlib", "repr"), MovedModule("socketserver", "SocketServer"), MovedModule("_thread", "thread", "_thread"), MovedModule("tkinter", "Tkinter"), MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), MovedModule("tkinter_tix", "Tix", "tkinter.tix"), MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), MovedModule("tkinter_colorchooser", "tkColorChooser", "tkinter.colorchooser"), MovedModule("tkinter_commondialog", "tkCommonDialog", "tkinter.commondialog"), MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), MovedModule("tkinter_font", "tkFont", "tkinter.font"), MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", "tkinter.simpledialog"), MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), ] # Add windows specific modules. if sys.platform == "win32": _moved_attributes += [ MovedModule("winreg", "_winreg"), ] for attr in _moved_attributes: setattr(_MovedItems, attr.name, attr) if isinstance(attr, MovedModule): _importer._add_module(attr, "moves." + attr.name) del attr _MovedItems._moved_attributes = _moved_attributes moves = _MovedItems(__name__ + ".moves") _importer._add_module(moves, "moves") class Module_six_moves_urllib_parse(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_parse""" _urllib_parse_moved_attributes = [ MovedAttribute("ParseResult", "urlparse", "urllib.parse"), MovedAttribute("SplitResult", "urlparse", "urllib.parse"), MovedAttribute("parse_qs", "urlparse", "urllib.parse"), MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), MovedAttribute("urldefrag", "urlparse", "urllib.parse"), MovedAttribute("urljoin", "urlparse", "urllib.parse"), MovedAttribute("urlparse", "urlparse", "urllib.parse"), MovedAttribute("urlsplit", "urlparse", "urllib.parse"), MovedAttribute("urlunparse", "urlparse", "urllib.parse"), MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), MovedAttribute("quote", "urllib", "urllib.parse"), MovedAttribute("quote_plus", "urllib", "urllib.parse"), MovedAttribute("unquote", "urllib", "urllib.parse"), MovedAttribute("unquote_plus", "urllib", "urllib.parse"), MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"), MovedAttribute("urlencode", "urllib", "urllib.parse"), MovedAttribute("splitquery", "urllib", "urllib.parse"), MovedAttribute("splittag", "urllib", "urllib.parse"), MovedAttribute("splituser", "urllib", "urllib.parse"), MovedAttribute("splitvalue", "urllib", "urllib.parse"), MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), MovedAttribute("uses_params", "urlparse", "urllib.parse"), MovedAttribute("uses_query", "urlparse", "urllib.parse"), MovedAttribute("uses_relative", "urlparse", "urllib.parse"), ] for attr in _urllib_parse_moved_attributes: setattr(Module_six_moves_urllib_parse, attr.name, attr) del attr Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes _importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), "moves.urllib_parse", "moves.urllib.parse") class Module_six_moves_urllib_error(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_error""" _urllib_error_moved_attributes = [ MovedAttribute("URLError", "urllib2", "urllib.error"), MovedAttribute("HTTPError", "urllib2", "urllib.error"), MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), ] for attr in _urllib_error_moved_attributes: setattr(Module_six_moves_urllib_error, attr.name, attr) del attr Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes _importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), "moves.urllib_error", "moves.urllib.error") class Module_six_moves_urllib_request(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_request""" _urllib_request_moved_attributes = [ MovedAttribute("urlopen", "urllib2", "urllib.request"), MovedAttribute("install_opener", "urllib2", "urllib.request"), MovedAttribute("build_opener", "urllib2", "urllib.request"), MovedAttribute("pathname2url", "urllib", "urllib.request"), MovedAttribute("url2pathname", "urllib", "urllib.request"), MovedAttribute("getproxies", "urllib", "urllib.request"), MovedAttribute("Request", "urllib2", "urllib.request"), MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), MovedAttribute("BaseHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), MovedAttribute("FileHandler", "urllib2", "urllib.request"), MovedAttribute("FTPHandler", "urllib2", "urllib.request"), MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), MovedAttribute("urlretrieve", "urllib", "urllib.request"), MovedAttribute("urlcleanup", "urllib", "urllib.request"), MovedAttribute("URLopener", "urllib", "urllib.request"), MovedAttribute("FancyURLopener", "urllib", "urllib.request"), MovedAttribute("proxy_bypass", "urllib", "urllib.request"), MovedAttribute("parse_http_list", "urllib2", "urllib.request"), MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"), ] for attr in _urllib_request_moved_attributes: setattr(Module_six_moves_urllib_request, attr.name, attr) del attr Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes _importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), "moves.urllib_request", "moves.urllib.request") class Module_six_moves_urllib_response(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_response""" _urllib_response_moved_attributes = [ MovedAttribute("addbase", "urllib", "urllib.response"), MovedAttribute("addclosehook", "urllib", "urllib.response"), MovedAttribute("addinfo", "urllib", "urllib.response"), MovedAttribute("addinfourl", "urllib", "urllib.response"), ] for attr in _urllib_response_moved_attributes: setattr(Module_six_moves_urllib_response, attr.name, attr) del attr Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes _importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), "moves.urllib_response", "moves.urllib.response") class Module_six_moves_urllib_robotparser(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_robotparser""" _urllib_robotparser_moved_attributes = [ MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), ] for attr in _urllib_robotparser_moved_attributes: setattr(Module_six_moves_urllib_robotparser, attr.name, attr) del attr Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes _importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), "moves.urllib_robotparser", "moves.urllib.robotparser") class Module_six_moves_urllib(types.ModuleType): """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" __path__ = [] # mark as package parse = _importer._get_module("moves.urllib_parse") error = _importer._get_module("moves.urllib_error") request = _importer._get_module("moves.urllib_request") response = _importer._get_module("moves.urllib_response") robotparser = _importer._get_module("moves.urllib_robotparser") def __dir__(self): return ['parse', 'error', 'request', 'response', 'robotparser'] _importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), "moves.urllib") def add_move(move): """Add an item to six.moves.""" setattr(_MovedItems, move.name, move) def remove_move(name): """Remove item from six.moves.""" try: delattr(_MovedItems, name) except AttributeError: try: del moves.__dict__[name] except KeyError: raise AttributeError("no such move, %r" % (name,)) if PY3: _meth_func = "__func__" _meth_self = "__self__" _func_closure = "__closure__" _func_code = "__code__" _func_defaults = "__defaults__" _func_globals = "__globals__" else: _meth_func = "im_func" _meth_self = "im_self" _func_closure = "func_closure" _func_code = "func_code" _func_defaults = "func_defaults" _func_globals = "func_globals" try: advance_iterator = next except NameError: def advance_iterator(it): return it.next() next = advance_iterator try: callable = callable except NameError: def callable(obj): return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) if PY3: def get_unbound_function(unbound): return unbound create_bound_method = types.MethodType def create_unbound_method(func, cls): return func Iterator = object else: def get_unbound_function(unbound): return unbound.im_func def create_bound_method(func, obj): return types.MethodType(func, obj, obj.__class__) def create_unbound_method(func, cls): return types.MethodType(func, None, cls) class Iterator(object): def next(self): return type(self).__next__(self) callable = callable _add_doc(get_unbound_function, """Get the function out of a possibly unbound function""") get_method_function = operator.attrgetter(_meth_func) get_method_self = operator.attrgetter(_meth_self) get_function_closure = operator.attrgetter(_func_closure) get_function_code = operator.attrgetter(_func_code) get_function_defaults = operator.attrgetter(_func_defaults) get_function_globals = operator.attrgetter(_func_globals) if PY3: def iterkeys(d, **kw): return iter(d.keys(**kw)) def itervalues(d, **kw): return iter(d.values(**kw)) def iteritems(d, **kw): return iter(d.items(**kw)) def iterlists(d, **kw): return iter(d.lists(**kw)) viewkeys = operator.methodcaller("keys") viewvalues = operator.methodcaller("values") viewitems = operator.methodcaller("items") else: def iterkeys(d, **kw): return d.iterkeys(**kw) def itervalues(d, **kw): return d.itervalues(**kw) def iteritems(d, **kw): return d.iteritems(**kw) def iterlists(d, **kw): return d.iterlists(**kw) viewkeys = operator.methodcaller("viewkeys") viewvalues = operator.methodcaller("viewvalues") viewitems = operator.methodcaller("viewitems") _add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") _add_doc(itervalues, "Return an iterator over the values of a dictionary.") _add_doc(iteritems, "Return an iterator over the (key, value) pairs of a dictionary.") _add_doc(iterlists, "Return an iterator over the (key, [values]) pairs of a dictionary.") if PY3: def b(s): return s.encode("latin-1") def u(s): return s unichr = chr import struct int2byte = struct.Struct(">B").pack del struct byte2int = operator.itemgetter(0) indexbytes = operator.getitem iterbytes = iter import io StringIO = io.StringIO BytesIO = io.BytesIO del io _assertCountEqual = "assertCountEqual" if sys.version_info[1] <= 1: _assertRaisesRegex = "assertRaisesRegexp" _assertRegex = "assertRegexpMatches" else: _assertRaisesRegex = "assertRaisesRegex" _assertRegex = "assertRegex" else: def b(s): return s # Workaround for standalone backslash def u(s): return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") unichr = unichr int2byte = chr def byte2int(bs): return ord(bs[0]) def indexbytes(buf, i): return ord(buf[i]) iterbytes = functools.partial(itertools.imap, ord) import StringIO StringIO = BytesIO = StringIO.StringIO _assertCountEqual = "assertItemsEqual" _assertRaisesRegex = "assertRaisesRegexp" _assertRegex = "assertRegexpMatches" _add_doc(b, """Byte literal""") _add_doc(u, """Text literal""") def assertCountEqual(self, *args, **kwargs): return getattr(self, _assertCountEqual)(*args, **kwargs) def assertRaisesRegex(self, *args, **kwargs): return getattr(self, _assertRaisesRegex)(*args, **kwargs) def assertRegex(self, *args, **kwargs): return getattr(self, _assertRegex)(*args, **kwargs) if PY3: exec_ = getattr(moves.builtins, "exec") def reraise(tp, value, tb=None): try: if value is None: value = tp() if value.__traceback__ is not tb: raise value.with_traceback(tb) raise value finally: value = None tb = None else: def exec_(_code_, _globs_=None, _locs_=None): """Execute code in a namespace.""" if _globs_ is None: frame = sys._getframe(1) _globs_ = frame.f_globals if _locs_ is None: _locs_ = frame.f_locals del frame elif _locs_ is None: _locs_ = _globs_ exec("""exec _code_ in _globs_, _locs_""") exec_("""def reraise(tp, value, tb=None): try: raise tp, value, tb finally: tb = None """) if sys.version_info[:2] == (3, 2): exec_("""def raise_from(value, from_value): try: if from_value is None: raise value raise value from from_value finally: value = None """) elif sys.version_info[:2] > (3, 2): exec_("""def raise_from(value, from_value): try: raise value from from_value finally: value = None """) else: def raise_from(value, from_value): raise value print_ = getattr(moves.builtins, "print", None) if print_ is None: def print_(*args, **kwargs): """The new-style print function for Python 2.4 and 2.5.""" fp = kwargs.pop("file", sys.stdout) if fp is None: return def write(data): if not isinstance(data, basestring): data = str(data) # If the file has an encoding, encode unicode with it. if (isinstance(fp, file) and isinstance(data, unicode) and fp.encoding is not None): errors = getattr(fp, "errors", None) if errors is None: errors = "strict" data = data.encode(fp.encoding, errors) fp.write(data) want_unicode = False sep = kwargs.pop("sep", None) if sep is not None: if isinstance(sep, unicode): want_unicode = True elif not isinstance(sep, str): raise TypeError("sep must be None or a string") end = kwargs.pop("end", None) if end is not None: if isinstance(end, unicode): want_unicode = True elif not isinstance(end, str): raise TypeError("end must be None or a string") if kwargs: raise TypeError("invalid keyword arguments to print()") if not want_unicode: for arg in args: if isinstance(arg, unicode): want_unicode = True break if want_unicode: newline = unicode("\n") space = unicode(" ") else: newline = "\n" space = " " if sep is None: sep = space if end is None: end = newline for i, arg in enumerate(args): if i: write(sep) write(arg) write(end) if sys.version_info[:2] < (3, 3): _print = print_ def print_(*args, **kwargs): fp = kwargs.get("file", sys.stdout) flush = kwargs.pop("flush", False) _print(*args, **kwargs) if flush and fp is not None: fp.flush() _add_doc(reraise, """Reraise an exception.""") if sys.version_info[0:2] < (3, 4): def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, updated=functools.WRAPPER_UPDATES): def wrapper(f): f = functools.wraps(wrapped, assigned, updated)(f) f.__wrapped__ = wrapped return f return wrapper else: wraps = functools.wraps def with_metaclass(meta, *bases): """Create a base class with a metaclass.""" # This requires a bit of explanation: the basic idea is to make a dummy # metaclass for one level of class instantiation that replaces itself with # the actual metaclass. class metaclass(type): def __new__(cls, name, this_bases, d): if sys.version_info[:2] >= (3, 7): # This version introduced PEP 560 that requires a bit # of extra care (we mimic what is done by __build_class__). resolved_bases = types.resolve_bases(bases) if resolved_bases is not bases: d['__orig_bases__'] = bases else: resolved_bases = bases return meta(name, resolved_bases, d) @classmethod def __prepare__(cls, name, this_bases): return meta.__prepare__(name, bases) return type.__new__(metaclass, 'temporary_class', (), {}) def add_metaclass(metaclass): """Class decorator for creating a class with a metaclass.""" def wrapper(cls): orig_vars = cls.__dict__.copy() slots = orig_vars.get('__slots__') if slots is not None: if isinstance(slots, str): slots = [slots] for slots_var in slots: orig_vars.pop(slots_var) orig_vars.pop('__dict__', None) orig_vars.pop('__weakref__', None) if hasattr(cls, '__qualname__'): orig_vars['__qualname__'] = cls.__qualname__ return metaclass(cls.__name__, cls.__bases__, orig_vars) return wrapper def ensure_binary(s, encoding='utf-8', errors='strict'): """Coerce **s** to six.binary_type. For Python 2: - `unicode` -> encoded to `str` - `str` -> `str` For Python 3: - `str` -> encoded to `bytes` - `bytes` -> `bytes` """ if isinstance(s, text_type): return s.encode(encoding, errors) elif isinstance(s, binary_type): return s else: raise TypeError("not expecting type '%s'" % type(s)) def ensure_str(s, encoding='utf-8', errors='strict'): """Coerce *s* to `str`. For Python 2: - `unicode` -> encoded to `str` - `str` -> `str` For Python 3: - `str` -> `str` - `bytes` -> decoded to `str` """ if not isinstance(s, (text_type, binary_type)): raise TypeError("not expecting type '%s'" % type(s)) if PY2 and isinstance(s, text_type): s = s.encode(encoding, errors) elif PY3 and isinstance(s, binary_type): s = s.decode(encoding, errors) return s def ensure_text(s, encoding='utf-8', errors='strict'): """Coerce *s* to six.text_type. For Python 2: - `unicode` -> `unicode` - `str` -> `unicode` For Python 3: - `str` -> `str` - `bytes` -> decoded to `str` """ if isinstance(s, binary_type): return s.decode(encoding, errors) elif isinstance(s, text_type): return s else: raise TypeError("not expecting type '%s'" % type(s)) def python_2_unicode_compatible(klass): """ A decorator that defines __unicode__ and __str__ methods under Python 2. Under Python 3 it does nothing. To support Python 2 and 3 with a single code base, define a __str__ method returning text and apply this decorator to the class. """ if PY2: if '__str__' not in klass.__dict__: raise ValueError("@python_2_unicode_compatible cannot be applied " "to %s because it doesn't define __str__()." % klass.__name__) klass.__unicode__ = klass.__str__ klass.__str__ = lambda self: self.__unicode__().encode('utf-8') return klass # Complete the moves implementation. # This code is at the end of this module to speed up module loading. # Turn this module into a package. __path__ = [] # required for PEP 302 and PEP 451 __package__ = __name__ # see PEP 366 @ReservedAssignment if globals().get("__spec__") is not None: __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable # Remove other six meta path importers, since they cause problems. This can # happen if six is removed from sys.modules and then reloaded. (Setuptools does # this for some reason.) if sys.meta_path: for i, importer in enumerate(sys.meta_path): # Here's some real nastiness: Another "instance" of the six module might # be floating around. Therefore, we can't use isinstance() to check for # the six meta path importer, since the other six instance will have # inserted an importer with different class. if (type(importer).__name__ == "_SixMetaPathImporter" and importer.name == __name__): del sys.meta_path[i] break del i, importer # Finally, add the importer to the meta path import hook. sys.meta_path.append(_importer)
gpl-3.0
ksmit799/Toontown-Source
toontown/safezone/ButterflyGlobals.py
1
7922
from pandac.PandaModules import * import random OFF = 0 FLYING = 1 LANDED = 2 states = {OFF: 'off', FLYING: 'Flying', LANDED: 'Landed'} NUM_BUTTERFLIES = (6, 36, 5) NUM_BUTTERFLY_AREAS = (4, 1, 4) BUTTERFLY_SPEED = 2.0 BUTTERFLY_HEIGHT = (2.2, 3.2, 2.2) BUTTERFLY_TAKEOFF = (1.4, 1.8, 1.4) BUTTERFLY_LANDING = (1.4, 1.8, 1.4) MAX_LANDED_TIME = 20.0 TTC = 0 DG = 1 ESTATE = 2 ButterflyPoints = (((Point3(84.0, -116.0, 3.5), Point3(95.0, -144.0, 2.6), Point3(94.0, -145.0, 2.6), Point3(95.0, -149.0, 2.6), Point3(50.0, -155.0, 2.6), Point3(51.0, -147.0, 2.6), Point3(51.0, -145.0, 2.6), Point3(14.0, -99.0, 3.1), Point3(17.0, -94.0, 3.1), Point3(50.0, -79.0, 3.1), Point3(47.0, -86.0, 3.1), Point3(54.0, -127.0, 2.6), Point3(84.0, -113.0, 3.8)), (Point3(-57.0, -70.0, 0.1), Point3(-55.0, -68.0, 0.1), Point3(-90.0, -77.0, 0.6), Point3(-90.0, -72.0, 0.1), Point3(-133.0, -50.0, 0.6), Point3(-129.0, -48.0, 0.6), Point3(-127.0, -25.0, 0.1), Point3(-125.0, -22.0, 0.1), Point3(-123.0, -22.0, 0.1), Point3(-103.0, -10.0, -3.0), Point3(-104.0, -13.0, -2.5), Point3(-100.0, -28.0, -2.7), Point3(-89.0, -41.0, -4.4), Point3(-58.0, -34.0, -4.1), Point3(-69.0, -18.0, -1.9), Point3(-65.0, -19.0, -1.9), Point3(-65.0, -16.0, -1.9), Point3(6.0, -49.0, -0.1), Point3(2.6, -47.0, 0.1), Point3(-33.6, -43.0, 0.0)), (Point3(-53.0, 3.0, -1.8), Point3(-58.0, 2.0, -1.8), Point3(-58.0, 2.0, -1.8), Point3(-76.0, 2.0, -1.8), Point3(-69.0, 11.0, -1.8), Point3(-100.0, 14.0, -4.1), Point3(-104.0, 17.0, -2.6), Point3(-125.0, 34.0, 0.1), Point3(-124.0, 30.0, 0.1), Point3(-113.0, 73.0, 0.6), Point3(-33.0, 78.0, 0.1), Point3(-65.0, 48.0, -3.0), Point3(-51.0, 33.0, -3.0), Point3(-30.0, 71.0, 0.1), Point3(-26.0, 71.0, 0.1), Point3(-23.0, 69.0, 0.1), Point3(-23.0, 64.0, 0.1), Point3(-5.0, 42.0, 0.1), Point3(-22.0, 22.0, 0.1), Point3(-27.0, 22.0, 0.1)), (Point3(14.0, 93.0, 3.1), Point3(17.0, 93.0, 3.1), Point3(20.0, 122.0, 2.6), Point3(21.0, 127.0, 2.6), Point3(23.0, 123.0, 2.6), Point3(32.0, 130.0, 2.6), Point3(48.0, 148.0, 2.6), Point3(64.0, 111.0, 2.6), Point3(32.0, 82.0, 2.6), Point3(63.0, 90.0, 3.1), Point3(68.0, 85.0, 3.1), Point3(65.0, 85.0, 3.1), Point3(70.0, 95.0, 3.1))), ((Point3(-7.9, 22.9, 0.05), Point3(-8.0, 17.0, 2.1), Point3(-7.5, 18.0, 2.1), Point3(-27.5, 70.7, 0.05), Point3(-30.0, 70.0, 1.0), Point3(-31.0, 69.0, 1.0), Point3(-1.0, 53.0, 2.2), Point3(-0.5, 53.0, 2.2), Point3(35.0, 71.5, 1.0), Point3(33.0, 69.0, 0.05), Point3(45.0, 61.0, 0.05), Point3(55.0, 62.0, 0.05), Point3(80.0, 74.0, 0.05), Point3(80.0, 73.0, 0.05), Point3(76.0, 46.0, 0.05), Point3(76.0, 45.0, 0.05), Point3(77.0, 41.0, 0.05), Point3(62.0, 28.0, 0.05), Point3(48.0, 24.0, 0.05), Point3(83.0, 122.0, 0.05), Point3(82.0, 123.0, 0.05), Point3(81.0, 81.0, 0.05), Point3(38.0, 77.0, 0.05), Point3(-26.0, 69.0, 0.05), Point3(-26.0, 70.0, 0.05), Point3(-61.0, 71.0, 0.05), Point3(-61.0, 70.0, 0.05), Point3(-78.0, 79.0, 0.05), Point3(-99.0, 106.0, 0.05), Point3(-99.0, 108.0, 0.05), Point3(-80.0, 123.0, 0.05), Point3(-77.0, 125.0, 0.05), Point3(-32.0, 162.0, 0.05), Point3(-3.0, 186.5, 2.2), Point3(-3.2, 186.8, 2.2), Point3(-1.0, 185.0, 2.2), Point3(39.0, 165.0, 0.05), Point3(42.0, 162.0, 0.05), Point3(62.0, 145.0, 0.05), Point3(64.0, 145.0, 0.05), Point3(59.0, 102.0, 0.05), Point3(32.7, 93.7, 0.05), Point3(31.2, 90.8, 0.05), Point3(29.8, 140.1, 0.05), Point3(16.5, 146.3, 0.05), Point3(15.3, 146.9, 0.05), Point3(-24.3, 128.6, 0.05), Point3(-67.9, 117.9, 0.05), Point3(-41.6, 88.4, 0.05), Point3(-13.6, 120.3, 0.05), Point3(26.0, 117.8, 0.05), Point3(22.6, 112.3, 0.05), Point3(-8.2, 107.9, 0.05), Point3(-18.1, 97.0, 0.05), Point3(-21.4, 92.9, 0.05), Point3(-2.1, 74.0, 0.05), Point3(19.8, 93.5, 0.05), Point3(21.4, 95.4, 0.05), Point3(19.2, 97.5, 0.05), Point3(-10.7, 143.3, 0.05), Point3(38.2, 120.7, 0.05), Point3(34.1, 101.5, 0.05), Point3(32.4, 96.5, 0.05), Point3(72.9, 121.8, 0.05)),), ((Point3(-40, -137, 0.025), Point3(2.35, -167.95, 0.025), Point3(70.8, -125.3, 0.025), Point3(63.49, -67.4, 0.025), Point3(17.5, -59.25, 0.623), Point3(-51.87, -107.0, 0.723), Point3(-20.325, -48.716, 4.884), Point3(51.03, -67.244, 0.244), Point3(20.02, -34.271, 7.105), Point3(24.731, -20.905, 9.247)), (Point3(88, -57.4, 0.025), Point3(92.347, -7.71, 0.169), Point3(129.39, 0.85, 0.025), Point3(121.14, 37, 0.025), Point3(126, 30.3, 0.025), Point3(100.3, 21.2, 0.05), Point3(103.42, 1.544, 0.025), Point3(82.37, -45, 0.025), Point3(103.8, 4.306, 0.05), Point3(119.195, -42.042, 0.025)), (Point3(10, 98.5, -0.028), Point3(11.65, 92.52, -0.079), Point3(-16.25, 86.67, 0.216), Point3(-65.3, 67.8, 0.025), Point3(-41.6, 67.0, 0.025), Point3(-34.8, 68.9, 0.025), Point3(-32.272, 56.65, 1.192), Point3(-63.956, 39.678, 0.281), Point3(-79.65, 36.99, 0.025), Point3(-14.769, 72.399, 0.244)), (Point3(-79.6, 36.9, 0.025), Point3(-57.6, 27.24, 2.355), Point3(-69.642, -28.137, 3.98), Point3(-111, -58.1, 0.025), Point3(-152.223, 25.627, 0.025), Point3(-104.4, 43.5, 0.278), Point3(-85.25, 10.513, 0.111), Point3(-43.6, 1.644, 3.838), Point3(-48.993, -21.968, 3.98), Point3(-30.088, -5.987, 7.025)))) allocatedIndexes = {} def generateIndexes(doId, playground): usedI = [] unusedI = [] for area in ButterflyPoints[playground]: usedI.append(range(0, len(area))) unusedI.append([]) allocatedIndexes[doId] = (usedI, unusedI) def clearIndexes(doId): if allocatedIndexes.has_key(doId): del allocatedIndexes[doId] def getFirstRoute(playground, area, doId): curPos, curIndex = __getCurrentPos(playground, area, doId) destPos, destIndex, time = getNextPos(curPos, playground, area, doId) return (curPos, curIndex, destPos, destIndex, time) def __getCurrentPos(playground, area, doId): if allocatedIndexes.has_key(doId): unusedI = allocatedIndexes[doId][0][area] usedI = allocatedIndexes[doId][1][area] else: return (ButterflyPoints[playground][area][0], 0) if len(unusedI) == 0: index = random.choice(usedI) return (ButterflyPoints[playground][area][index], index) index = random.choice(unusedI) unusedI.remove(index) usedI.append(index) return (ButterflyPoints[playground][area][index], index) def getNextPos(currentPos, playground, area, doId): if allocatedIndexes.has_key(doId): unusedI = allocatedIndexes[doId][0][area] usedI = allocatedIndexes[doId][1][area] else: return (ButterflyPoints[playground][area][0], 0, 4.0) nextPos = currentPos while nextPos == currentPos: if len(unusedI) == 0: index = random.choice(usedI) nextPos = ButterflyPoints[playground][area][index] else: index = random.choice(unusedI) nextPos = ButterflyPoints[playground][area][index] if nextPos != currentPos: unusedI.remove(index) usedI.append(index) dist = Vec3(nextPos - currentPos).length() time = dist / BUTTERFLY_SPEED + BUTTERFLY_TAKEOFF[playground] + BUTTERFLY_LANDING[playground] return (nextPos, index, time) def recycleIndex(index, playground, area, doId): if allocatedIndexes.has_key(doId): unusedI = allocatedIndexes[doId][0][area] usedI = allocatedIndexes[doId][1][area] else: return None if usedI.count(index) > 0: usedI.remove(index) if unusedI.count(index) == 0: unusedI.append(index) return None
mit
kieferbonk/xbmc-finnish-tv
plugin.video.ruutu/bs4/__init__.py
417
15401
"""Beautiful Soup Elixir and Tonic "The Screen-Scraper's Friend" http://www.crummy.com/software/BeautifulSoup/ Beautiful Soup uses a pluggable XML or HTML parser to parse a (possibly invalid) document into a tree representation. Beautiful Soup provides provides methods and Pythonic idioms that make it easy to navigate, search, and modify the parse tree. Beautiful Soup works with Python 2.6 and up. It works better if lxml and/or html5lib is installed. For more than you ever wanted to know about Beautiful Soup, see the documentation: http://www.crummy.com/software/BeautifulSoup/bs4/doc/ """ __author__ = "Leonard Richardson ([email protected])" __version__ = "4.3.2" __copyright__ = "Copyright (c) 2004-2013 Leonard Richardson" __license__ = "MIT" __all__ = ['BeautifulSoup'] import os import re import warnings from .builder import builder_registry, ParserRejectedMarkup from .dammit import UnicodeDammit from .element import ( CData, Comment, DEFAULT_OUTPUT_ENCODING, Declaration, Doctype, NavigableString, PageElement, ProcessingInstruction, ResultSet, SoupStrainer, Tag, ) # The very first thing we do is give a useful error if someone is # running this code under Python 3 without converting it. syntax_error = u'You are trying to run the Python 2 version of Beautiful Soup under Python 3. This will not work. You need to convert the code, either by installing it (`python setup.py install`) or by running 2to3 (`2to3 -w bs4`).' class BeautifulSoup(Tag): """ This class defines the basic interface called by the tree builders. These methods will be called by the parser: reset() feed(markup) The tree builder may call these methods from its feed() implementation: handle_starttag(name, attrs) # See note about return value handle_endtag(name) handle_data(data) # Appends to the current data node endData(containerClass=NavigableString) # Ends the current data node No matter how complicated the underlying parser is, you should be able to build a tree using 'start tag' events, 'end tag' events, 'data' events, and "done with data" events. If you encounter an empty-element tag (aka a self-closing tag, like HTML's <br> tag), call handle_starttag and then handle_endtag. """ ROOT_TAG_NAME = u'[document]' # If the end-user gives no indication which tree builder they # want, look for one with these features. DEFAULT_BUILDER_FEATURES = ['html', 'fast'] ASCII_SPACES = '\x20\x0a\x09\x0c\x0d' def __init__(self, markup="", features=None, builder=None, parse_only=None, from_encoding=None, **kwargs): """The Soup object is initialized as the 'root tag', and the provided markup (which can be a string or a file-like object) is fed into the underlying parser.""" if 'convertEntities' in kwargs: warnings.warn( "BS4 does not respect the convertEntities argument to the " "BeautifulSoup constructor. Entities are always converted " "to Unicode characters.") if 'markupMassage' in kwargs: del kwargs['markupMassage'] warnings.warn( "BS4 does not respect the markupMassage argument to the " "BeautifulSoup constructor. The tree builder is responsible " "for any necessary markup massage.") if 'smartQuotesTo' in kwargs: del kwargs['smartQuotesTo'] warnings.warn( "BS4 does not respect the smartQuotesTo argument to the " "BeautifulSoup constructor. Smart quotes are always converted " "to Unicode characters.") if 'selfClosingTags' in kwargs: del kwargs['selfClosingTags'] warnings.warn( "BS4 does not respect the selfClosingTags argument to the " "BeautifulSoup constructor. The tree builder is responsible " "for understanding self-closing tags.") if 'isHTML' in kwargs: del kwargs['isHTML'] warnings.warn( "BS4 does not respect the isHTML argument to the " "BeautifulSoup constructor. You can pass in features='html' " "or features='xml' to get a builder capable of handling " "one or the other.") def deprecated_argument(old_name, new_name): if old_name in kwargs: warnings.warn( 'The "%s" argument to the BeautifulSoup constructor ' 'has been renamed to "%s."' % (old_name, new_name)) value = kwargs[old_name] del kwargs[old_name] return value return None parse_only = parse_only or deprecated_argument( "parseOnlyThese", "parse_only") from_encoding = from_encoding or deprecated_argument( "fromEncoding", "from_encoding") if len(kwargs) > 0: arg = kwargs.keys().pop() raise TypeError( "__init__() got an unexpected keyword argument '%s'" % arg) if builder is None: if isinstance(features, basestring): features = [features] if features is None or len(features) == 0: features = self.DEFAULT_BUILDER_FEATURES builder_class = builder_registry.lookup(*features) if builder_class is None: raise FeatureNotFound( "Couldn't find a tree builder with the features you " "requested: %s. Do you need to install a parser library?" % ",".join(features)) builder = builder_class() self.builder = builder self.is_xml = builder.is_xml self.builder.soup = self self.parse_only = parse_only if hasattr(markup, 'read'): # It's a file-type object. markup = markup.read() elif len(markup) <= 256: # Print out warnings for a couple beginner problems # involving passing non-markup to Beautiful Soup. # Beautiful Soup will still parse the input as markup, # just in case that's what the user really wants. if (isinstance(markup, unicode) and not os.path.supports_unicode_filenames): possible_filename = markup.encode("utf8") else: possible_filename = markup is_file = False try: is_file = os.path.exists(possible_filename) except Exception, e: # This is almost certainly a problem involving # characters not valid in filenames on this # system. Just let it go. pass if is_file: warnings.warn( '"%s" looks like a filename, not markup. You should probably open this file and pass the filehandle into Beautiful Soup.' % markup) if markup[:5] == "http:" or markup[:6] == "https:": # TODO: This is ugly but I couldn't get it to work in # Python 3 otherwise. if ((isinstance(markup, bytes) and not b' ' in markup) or (isinstance(markup, unicode) and not u' ' in markup)): warnings.warn( '"%s" looks like a URL. Beautiful Soup is not an HTTP client. You should probably use an HTTP client to get the document behind the URL, and feed that document to Beautiful Soup.' % markup) for (self.markup, self.original_encoding, self.declared_html_encoding, self.contains_replacement_characters) in ( self.builder.prepare_markup(markup, from_encoding)): self.reset() try: self._feed() break except ParserRejectedMarkup: pass # Clear out the markup and remove the builder's circular # reference to this object. self.markup = None self.builder.soup = None def _feed(self): # Convert the document to Unicode. self.builder.reset() self.builder.feed(self.markup) # Close out any unfinished strings and close all the open tags. self.endData() while self.currentTag.name != self.ROOT_TAG_NAME: self.popTag() def reset(self): Tag.__init__(self, self, self.builder, self.ROOT_TAG_NAME) self.hidden = 1 self.builder.reset() self.current_data = [] self.currentTag = None self.tagStack = [] self.preserve_whitespace_tag_stack = [] self.pushTag(self) def new_tag(self, name, namespace=None, nsprefix=None, **attrs): """Create a new tag associated with this soup.""" return Tag(None, self.builder, name, namespace, nsprefix, attrs) def new_string(self, s, subclass=NavigableString): """Create a new NavigableString associated with this soup.""" navigable = subclass(s) navigable.setup() return navigable def insert_before(self, successor): raise NotImplementedError("BeautifulSoup objects don't support insert_before().") def insert_after(self, successor): raise NotImplementedError("BeautifulSoup objects don't support insert_after().") def popTag(self): tag = self.tagStack.pop() if self.preserve_whitespace_tag_stack and tag == self.preserve_whitespace_tag_stack[-1]: self.preserve_whitespace_tag_stack.pop() #print "Pop", tag.name if self.tagStack: self.currentTag = self.tagStack[-1] return self.currentTag def pushTag(self, tag): #print "Push", tag.name if self.currentTag: self.currentTag.contents.append(tag) self.tagStack.append(tag) self.currentTag = self.tagStack[-1] if tag.name in self.builder.preserve_whitespace_tags: self.preserve_whitespace_tag_stack.append(tag) def endData(self, containerClass=NavigableString): if self.current_data: current_data = u''.join(self.current_data) # If whitespace is not preserved, and this string contains # nothing but ASCII spaces, replace it with a single space # or newline. if not self.preserve_whitespace_tag_stack: strippable = True for i in current_data: if i not in self.ASCII_SPACES: strippable = False break if strippable: if '\n' in current_data: current_data = '\n' else: current_data = ' ' # Reset the data collector. self.current_data = [] # Should we add this string to the tree at all? if self.parse_only and len(self.tagStack) <= 1 and \ (not self.parse_only.text or \ not self.parse_only.search(current_data)): return o = containerClass(current_data) self.object_was_parsed(o) def object_was_parsed(self, o, parent=None, most_recent_element=None): """Add an object to the parse tree.""" parent = parent or self.currentTag most_recent_element = most_recent_element or self._most_recent_element o.setup(parent, most_recent_element) if most_recent_element is not None: most_recent_element.next_element = o self._most_recent_element = o parent.contents.append(o) def _popToTag(self, name, nsprefix=None, inclusivePop=True): """Pops the tag stack up to and including the most recent instance of the given tag. If inclusivePop is false, pops the tag stack up to but *not* including the most recent instqance of the given tag.""" #print "Popping to %s" % name if name == self.ROOT_TAG_NAME: # The BeautifulSoup object itself can never be popped. return most_recently_popped = None stack_size = len(self.tagStack) for i in range(stack_size - 1, 0, -1): t = self.tagStack[i] if (name == t.name and nsprefix == t.prefix): if inclusivePop: most_recently_popped = self.popTag() break most_recently_popped = self.popTag() return most_recently_popped def handle_starttag(self, name, namespace, nsprefix, attrs): """Push a start tag on to the stack. If this method returns None, the tag was rejected by the SoupStrainer. You should proceed as if the tag had not occured in the document. For instance, if this was a self-closing tag, don't call handle_endtag. """ # print "Start tag %s: %s" % (name, attrs) self.endData() if (self.parse_only and len(self.tagStack) <= 1 and (self.parse_only.text or not self.parse_only.search_tag(name, attrs))): return None tag = Tag(self, self.builder, name, namespace, nsprefix, attrs, self.currentTag, self._most_recent_element) if tag is None: return tag if self._most_recent_element: self._most_recent_element.next_element = tag self._most_recent_element = tag self.pushTag(tag) return tag def handle_endtag(self, name, nsprefix=None): #print "End tag: " + name self.endData() self._popToTag(name, nsprefix) def handle_data(self, data): self.current_data.append(data) def decode(self, pretty_print=False, eventual_encoding=DEFAULT_OUTPUT_ENCODING, formatter="minimal"): """Returns a string or Unicode representation of this document. To get Unicode, pass None for encoding.""" if self.is_xml: # Print the XML declaration encoding_part = '' if eventual_encoding != None: encoding_part = ' encoding="%s"' % eventual_encoding prefix = u'<?xml version="1.0"%s?>\n' % encoding_part else: prefix = u'' if not pretty_print: indent_level = None else: indent_level = 0 return prefix + super(BeautifulSoup, self).decode( indent_level, eventual_encoding, formatter) # Alias to make it easier to type import: 'from bs4 import _soup' _s = BeautifulSoup _soup = BeautifulSoup class BeautifulStoneSoup(BeautifulSoup): """Deprecated interface to an XML parser.""" def __init__(self, *args, **kwargs): kwargs['features'] = 'xml' warnings.warn( 'The BeautifulStoneSoup class is deprecated. Instead of using ' 'it, pass features="xml" into the BeautifulSoup constructor.') super(BeautifulStoneSoup, self).__init__(*args, **kwargs) class StopParsing(Exception): pass class FeatureNotFound(ValueError): pass #By default, act as an HTML pretty-printer. if __name__ == '__main__': import sys soup = BeautifulSoup(sys.stdin) print soup.prettify()
gpl-3.0
cselis86/edx-platform
lms/djangoapps/survey/tests/test_views.py
62
5051
""" Python tests for the Survey views """ import json from collections import OrderedDict from django.test.client import Client from django.contrib.auth.models import User from django.core.urlresolvers import reverse from survey.models import SurveyForm from xmodule.modulestore.tests.factories import CourseFactory from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase class SurveyViewsTests(ModuleStoreTestCase): """ All tests for the views.py file """ def setUp(self): """ Set up the test data used in the specific tests """ super(SurveyViewsTests, self).setUp() self.client = Client() # Create two accounts self.password = 'abc' self.student = User.objects.create_user('student', '[email protected]', self.password) self.test_survey_name = 'TestSurvey' self.test_form = '<input name="field1" /><input name="field2" /><select name="ddl"><option>1</option></select>' self.student_answers = OrderedDict({ u'field1': u'value1', u'field2': u'value2', u'ddl': u'1', }) self.course = CourseFactory.create( course_survey_required=True, course_survey_name=self.test_survey_name ) self.survey = SurveyForm.create(self.test_survey_name, self.test_form) self.view_url = reverse('view_survey', args=[self.test_survey_name]) self.postback_url = reverse('submit_answers', args=[self.test_survey_name]) self.client.login(username=self.student.username, password=self.password) def test_unauthenticated_survey_view(self): """ Asserts that an unauthenticated user cannot access a survey """ anon_user = Client() resp = anon_user.get(self.view_url) self.assertEquals(resp.status_code, 302) def test_survey_not_found(self): """ Asserts that if we ask for a Survey that does not exist, then we get a 302 redirect """ resp = self.client.get(reverse('view_survey', args=['NonExisting'])) self.assertEquals(resp.status_code, 302) def test_authenticated_survey_view(self): """ Asserts that an authenticated user can see the survey """ resp = self.client.get(self.view_url) self.assertEquals(resp.status_code, 200) # is the SurveyForm html present in the HTML response? self.assertIn(self.test_form, resp.content) def test_unautneticated_survey_postback(self): """ Asserts that an anonymous user cannot answer a survey """ anon_user = Client() resp = anon_user.post( self.postback_url, self.student_answers ) self.assertEquals(resp.status_code, 302) def test_survey_postback_to_nonexisting_survey(self): """ Asserts that any attempts to post back to a non existing survey returns a 404 """ resp = self.client.post( reverse('submit_answers', args=['NonExisting']), self.student_answers ) self.assertEquals(resp.status_code, 404) def test_survey_postback(self): """ Asserts that a well formed postback of survey answers is properly stored in the database """ resp = self.client.post( self.postback_url, self.student_answers ) self.assertEquals(resp.status_code, 200) data = json.loads(resp.content) self.assertIn('redirect_url', data) answers = self.survey.get_answers(self.student) self.assertEquals(answers[self.student.id], self.student_answers) def test_strip_extra_fields(self): """ Verify that any not expected field name in the post-back is not stored in the database """ data = dict.copy(self.student_answers) data['csrfmiddlewaretoken'] = 'foo' data['_redirect_url'] = 'bar' resp = self.client.post( self.postback_url, data ) self.assertEquals(resp.status_code, 200) answers = self.survey.get_answers(self.student) self.assertNotIn('csrfmiddlewaretoken', answers[self.student.id]) self.assertNotIn('_redirect_url', answers[self.student.id]) def test_encoding_answers(self): """ Verify that if some potentially harmful input data is sent, that is is properly HTML encoded """ data = dict.copy(self.student_answers) data['field1'] = '<script type="javascript">alert("Deleting filesystem...")</script>' resp = self.client.post( self.postback_url, data ) self.assertEquals(resp.status_code, 200) answers = self.survey.get_answers(self.student) self.assertEqual( '&lt;script type=&quot;javascript&quot;&gt;alert(&quot;Deleting filesystem...&quot;)&lt;/script&gt;', answers[self.student.id]['field1'] )
agpl-3.0
Microsoft/Tocino
src/sixlowpan/bindings/modulegen__gcc_ILP32.py
38
320495
from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers import pybindgen.settings import warnings class ErrorHandler(pybindgen.settings.ErrorHandler): def handle_error(self, wrapper, exception, traceback_): warnings.warn("exception %r in wrapper %s" % (exception, wrapper)) return True pybindgen.settings.error_handler = ErrorHandler() import sys def module_init(): root_module = Module('ns.sixlowpan', cpp_namespace='::ns3') return root_module def register_types(module): root_module = module.get_root() ## address.h (module 'network'): ns3::Address [class] module.add_class('Address', import_from_module='ns.network') ## address.h (module 'network'): ns3::Address::MaxSize_e [enumeration] module.add_enum('MaxSize_e', ['MAX_SIZE'], outer_class=root_module['ns3::Address'], import_from_module='ns.network') ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList [class] module.add_class('AttributeConstructionList', import_from_module='ns.core') ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item [struct] module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList']) ## buffer.h (module 'network'): ns3::Buffer [class] module.add_class('Buffer', import_from_module='ns.network') ## buffer.h (module 'network'): ns3::Buffer::Iterator [class] module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::Buffer']) ## packet.h (module 'network'): ns3::ByteTagIterator [class] module.add_class('ByteTagIterator', import_from_module='ns.network') ## packet.h (module 'network'): ns3::ByteTagIterator::Item [class] module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagIterator']) ## byte-tag-list.h (module 'network'): ns3::ByteTagList [class] module.add_class('ByteTagList', import_from_module='ns.network') ## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator [class] module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList']) ## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item [struct] module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList::Iterator']) ## callback.h (module 'core'): ns3::CallbackBase [class] module.add_class('CallbackBase', import_from_module='ns.core') ## event-id.h (module 'core'): ns3::EventId [class] module.add_class('EventId', import_from_module='ns.core') ## hash.h (module 'core'): ns3::Hasher [class] module.add_class('Hasher', import_from_module='ns.core') ## ipv4-address.h (module 'network'): ns3::Ipv4Address [class] module.add_class('Ipv4Address', import_from_module='ns.network') ## ipv4-address.h (module 'network'): ns3::Ipv4Address [class] root_module['ns3::Ipv4Address'].implicitly_converts_to(root_module['ns3::Address']) ## ipv4-address.h (module 'network'): ns3::Ipv4Mask [class] module.add_class('Ipv4Mask', import_from_module='ns.network') ## ipv6-address.h (module 'network'): ns3::Ipv6Address [class] module.add_class('Ipv6Address', import_from_module='ns.network') ## ipv6-address.h (module 'network'): ns3::Ipv6Address [class] root_module['ns3::Ipv6Address'].implicitly_converts_to(root_module['ns3::Address']) ## ipv6-address.h (module 'network'): ns3::Ipv6Prefix [class] module.add_class('Ipv6Prefix', import_from_module='ns.network') ## net-device-container.h (module 'network'): ns3::NetDeviceContainer [class] module.add_class('NetDeviceContainer', import_from_module='ns.network') ## object-base.h (module 'core'): ns3::ObjectBase [class] module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core') ## object.h (module 'core'): ns3::ObjectDeleter [struct] module.add_class('ObjectDeleter', import_from_module='ns.core') ## object-factory.h (module 'core'): ns3::ObjectFactory [class] module.add_class('ObjectFactory', import_from_module='ns.core') ## packet-metadata.h (module 'network'): ns3::PacketMetadata [class] module.add_class('PacketMetadata', import_from_module='ns.network') ## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item [struct] module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata']) ## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item [enumeration] module.add_enum('', ['PAYLOAD', 'HEADER', 'TRAILER'], outer_class=root_module['ns3::PacketMetadata::Item'], import_from_module='ns.network') ## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator [class] module.add_class('ItemIterator', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata']) ## packet.h (module 'network'): ns3::PacketTagIterator [class] module.add_class('PacketTagIterator', import_from_module='ns.network') ## packet.h (module 'network'): ns3::PacketTagIterator::Item [class] module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagIterator']) ## packet-tag-list.h (module 'network'): ns3::PacketTagList [class] module.add_class('PacketTagList', import_from_module='ns.network') ## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData [struct] module.add_class('TagData', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagList']) ## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData_e [enumeration] module.add_enum('TagData_e', ['MAX_SIZE'], outer_class=root_module['ns3::PacketTagList::TagData'], import_from_module='ns.network') ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanDispatch [class] module.add_class('SixLowPanDispatch') ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanDispatch::Dispatch_e [enumeration] module.add_enum('Dispatch_e', ['LOWPAN_NALP', 'LOWPAN_NALP_N', 'LOWPAN_IPv6', 'LOWPAN_HC1', 'LOWPAN_BC0', 'LOWPAN_IPHC', 'LOWPAN_IPHC_N', 'LOWPAN_MESH', 'LOWPAN_MESH_N', 'LOWPAN_FRAG1', 'LOWPAN_FRAG1_N', 'LOWPAN_FRAGN', 'LOWPAN_FRAGN_N', 'LOWPAN_UNSUPPORTED'], outer_class=root_module['ns3::SixLowPanDispatch']) ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanDispatch::NhcDispatch_e [enumeration] module.add_enum('NhcDispatch_e', ['LOWPAN_NHC', 'LOWPAN_NHC_N', 'LOWPAN_UDPNHC', 'LOWPAN_UDPNHC_N', 'LOWPAN_NHCUNSUPPORTED'], outer_class=root_module['ns3::SixLowPanDispatch']) ## sixlowpan-helper.h (module 'sixlowpan'): ns3::SixLowPanHelper [class] module.add_class('SixLowPanHelper') ## tag.h (module 'network'): ns3::Tag [class] module.add_class('Tag', import_from_module='ns.network', parent=root_module['ns3::ObjectBase']) ## tag-buffer.h (module 'network'): ns3::TagBuffer [class] module.add_class('TagBuffer', import_from_module='ns.network') ## nstime.h (module 'core'): ns3::TimeWithUnit [class] module.add_class('TimeWithUnit', import_from_module='ns.core') ## type-id.h (module 'core'): ns3::TypeId [class] module.add_class('TypeId', import_from_module='ns.core') ## type-id.h (module 'core'): ns3::TypeId::AttributeFlag [enumeration] module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core') ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation [struct] module.add_class('AttributeInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId']) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation [struct] module.add_class('TraceSourceInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId']) ## empty.h (module 'core'): ns3::empty [class] module.add_class('empty', import_from_module='ns.core') ## int64x64-double.h (module 'core'): ns3::int64x64_t [class] module.add_class('int64x64_t', import_from_module='ns.core') ## int64x64-double.h (module 'core'): ns3::int64x64_t::impl_type [enumeration] module.add_enum('impl_type', ['int128_impl', 'cairo_impl', 'ld_impl'], outer_class=root_module['ns3::int64x64_t'], import_from_module='ns.core') ## chunk.h (module 'network'): ns3::Chunk [class] module.add_class('Chunk', import_from_module='ns.network', parent=root_module['ns3::ObjectBase']) ## header.h (module 'network'): ns3::Header [class] module.add_class('Header', import_from_module='ns.network', parent=root_module['ns3::Chunk']) ## ipv6-header.h (module 'internet'): ns3::Ipv6Header [class] module.add_class('Ipv6Header', import_from_module='ns.internet', parent=root_module['ns3::Header']) ## ipv6-header.h (module 'internet'): ns3::Ipv6Header::NextHeader_e [enumeration] module.add_enum('NextHeader_e', ['IPV6_EXT_HOP_BY_HOP', 'IPV6_IPV4', 'IPV6_TCP', 'IPV6_UDP', 'IPV6_IPV6', 'IPV6_EXT_ROUTING', 'IPV6_EXT_FRAGMENTATION', 'IPV6_EXT_CONFIDENTIALITY', 'IPV6_EXT_AUTHENTIFICATION', 'IPV6_ICMPV6', 'IPV6_EXT_END', 'IPV6_EXT_DESTINATION', 'IPV6_SCTP', 'IPV6_EXT_MOBILITY', 'IPV6_UDP_LITE'], outer_class=root_module['ns3::Ipv6Header'], import_from_module='ns.internet') ## object.h (module 'core'): ns3::Object [class] module.add_class('Object', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >']) ## object.h (module 'core'): ns3::Object::AggregateIterator [class] module.add_class('AggregateIterator', import_from_module='ns.core', outer_class=root_module['ns3::Object']) ## random-variable-stream.h (module 'core'): ns3::RandomVariableStream [class] module.add_class('RandomVariableStream', import_from_module='ns.core', parent=root_module['ns3::Object']) ## random-variable-stream.h (module 'core'): ns3::SequentialRandomVariable [class] module.add_class('SequentialRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream']) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeChecker', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeChecker>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeValue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::CallbackImplBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::EventImpl', 'ns3::empty', 'ns3::DefaultDeleter<ns3::EventImpl>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Hash::Implementation>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::NixVector', 'ns3::empty', 'ns3::DefaultDeleter<ns3::NixVector>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Packet', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Packet>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::TraceSourceAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanFrag1 [class] module.add_class('SixLowPanFrag1', parent=root_module['ns3::Header']) ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanFragN [class] module.add_class('SixLowPanFragN', parent=root_module['ns3::Header']) ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanHc1 [class] module.add_class('SixLowPanHc1', parent=root_module['ns3::Header']) ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanHc1::LowPanHc1Addr_e [enumeration] module.add_enum('LowPanHc1Addr_e', ['HC1_PIII', 'HC1_PIIC', 'HC1_PCII', 'HC1_PCIC'], outer_class=root_module['ns3::SixLowPanHc1']) ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanHc1::LowPanHc1NextHeader_e [enumeration] module.add_enum('LowPanHc1NextHeader_e', ['HC1_NC', 'HC1_UDP', 'HC1_ICMP', 'HC1_TCP'], outer_class=root_module['ns3::SixLowPanHc1']) ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanIphc [class] module.add_class('SixLowPanIphc', parent=root_module['ns3::Header']) ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanIphc::TrafficClassFlowLabel_e [enumeration] module.add_enum('TrafficClassFlowLabel_e', ['TF_FULL', 'TF_DSCP_ELIDED', 'TF_FL_ELIDED', 'TF_ELIDED'], outer_class=root_module['ns3::SixLowPanIphc']) ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanIphc::Hlim_e [enumeration] module.add_enum('Hlim_e', ['HLIM_INLINE', 'HLIM_COMPR_1', 'HLIM_COMPR_64', 'HLIM_COMPR_255'], outer_class=root_module['ns3::SixLowPanIphc']) ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanIphc::HeaderCompression_e [enumeration] module.add_enum('HeaderCompression_e', ['HC_INLINE', 'HC_COMPR_64', 'HC_COMPR_16', 'HC_COMPR_0'], outer_class=root_module['ns3::SixLowPanIphc']) ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanIpv6 [class] module.add_class('SixLowPanIpv6', parent=root_module['ns3::Header']) ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanNhcExtension [class] module.add_class('SixLowPanNhcExtension', parent=root_module['ns3::Header']) ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanNhcExtension::Eid_e [enumeration] module.add_enum('Eid_e', ['EID_HOPBYHOP_OPTIONS_H', 'EID_ROUTING_H', 'EID_FRAGMENTATION_H', 'EID_DESTINATION_OPTIONS_H', 'EID_MOBILITY_H', 'EID_IPv6_H'], outer_class=root_module['ns3::SixLowPanNhcExtension']) ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanUdpNhcExtension [class] module.add_class('SixLowPanUdpNhcExtension', parent=root_module['ns3::Header']) ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanUdpNhcExtension::Ports_e [enumeration] module.add_enum('Ports_e', ['PORTS_INLINE', 'PORTS_ALL_SRC_LAST_DST', 'PORTS_LAST_SRC_ALL_DST', 'PORTS_LAST_SRC_LAST_DST'], outer_class=root_module['ns3::SixLowPanUdpNhcExtension']) ## nstime.h (module 'core'): ns3::Time [class] module.add_class('Time', import_from_module='ns.core') ## nstime.h (module 'core'): ns3::Time::Unit [enumeration] module.add_enum('Unit', ['Y', 'D', 'H', 'MIN', 'S', 'MS', 'US', 'NS', 'PS', 'FS', 'LAST'], outer_class=root_module['ns3::Time'], import_from_module='ns.core') ## nstime.h (module 'core'): ns3::Time [class] root_module['ns3::Time'].implicitly_converts_to(root_module['ns3::int64x64_t']) ## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor [class] module.add_class('TraceSourceAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >']) ## trailer.h (module 'network'): ns3::Trailer [class] module.add_class('Trailer', import_from_module='ns.network', parent=root_module['ns3::Chunk']) ## random-variable-stream.h (module 'core'): ns3::TriangularRandomVariable [class] module.add_class('TriangularRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream']) ## random-variable-stream.h (module 'core'): ns3::UniformRandomVariable [class] module.add_class('UniformRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream']) ## random-variable-stream.h (module 'core'): ns3::WeibullRandomVariable [class] module.add_class('WeibullRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream']) ## random-variable-stream.h (module 'core'): ns3::ZetaRandomVariable [class] module.add_class('ZetaRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream']) ## random-variable-stream.h (module 'core'): ns3::ZipfRandomVariable [class] module.add_class('ZipfRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream']) ## attribute.h (module 'core'): ns3::AttributeAccessor [class] module.add_class('AttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >']) ## attribute.h (module 'core'): ns3::AttributeChecker [class] module.add_class('AttributeChecker', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >']) ## attribute.h (module 'core'): ns3::AttributeValue [class] module.add_class('AttributeValue', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >']) ## callback.h (module 'core'): ns3::CallbackChecker [class] module.add_class('CallbackChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker']) ## callback.h (module 'core'): ns3::CallbackImplBase [class] module.add_class('CallbackImplBase', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >']) ## callback.h (module 'core'): ns3::CallbackValue [class] module.add_class('CallbackValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) ## random-variable-stream.h (module 'core'): ns3::ConstantRandomVariable [class] module.add_class('ConstantRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream']) ## random-variable-stream.h (module 'core'): ns3::DeterministicRandomVariable [class] module.add_class('DeterministicRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream']) ## random-variable-stream.h (module 'core'): ns3::EmpiricalRandomVariable [class] module.add_class('EmpiricalRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream']) ## attribute.h (module 'core'): ns3::EmptyAttributeValue [class] module.add_class('EmptyAttributeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) ## random-variable-stream.h (module 'core'): ns3::ErlangRandomVariable [class] module.add_class('ErlangRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream']) ## event-impl.h (module 'core'): ns3::EventImpl [class] module.add_class('EventImpl', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >']) ## random-variable-stream.h (module 'core'): ns3::ExponentialRandomVariable [class] module.add_class('ExponentialRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream']) ## random-variable-stream.h (module 'core'): ns3::GammaRandomVariable [class] module.add_class('GammaRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream']) ## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker [class] module.add_class('Ipv4AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker']) ## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue [class] module.add_class('Ipv4AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue']) ## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker [class] module.add_class('Ipv4MaskChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker']) ## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue [class] module.add_class('Ipv4MaskValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue']) ## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker [class] module.add_class('Ipv6AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker']) ## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue [class] module.add_class('Ipv6AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue']) ## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker [class] module.add_class('Ipv6PrefixChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker']) ## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue [class] module.add_class('Ipv6PrefixValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue']) ## random-variable-stream.h (module 'core'): ns3::LogNormalRandomVariable [class] module.add_class('LogNormalRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream']) ## net-device.h (module 'network'): ns3::NetDevice [class] module.add_class('NetDevice', import_from_module='ns.network', parent=root_module['ns3::Object']) ## net-device.h (module 'network'): ns3::NetDevice::PacketType [enumeration] module.add_enum('PacketType', ['PACKET_HOST', 'NS3_PACKET_HOST', 'PACKET_BROADCAST', 'NS3_PACKET_BROADCAST', 'PACKET_MULTICAST', 'NS3_PACKET_MULTICAST', 'PACKET_OTHERHOST', 'NS3_PACKET_OTHERHOST'], outer_class=root_module['ns3::NetDevice'], import_from_module='ns.network') ## nix-vector.h (module 'network'): ns3::NixVector [class] module.add_class('NixVector', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >']) ## random-variable-stream.h (module 'core'): ns3::NormalRandomVariable [class] module.add_class('NormalRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream']) ## object-factory.h (module 'core'): ns3::ObjectFactoryChecker [class] module.add_class('ObjectFactoryChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker']) ## object-factory.h (module 'core'): ns3::ObjectFactoryValue [class] module.add_class('ObjectFactoryValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) ## packet.h (module 'network'): ns3::Packet [class] module.add_class('Packet', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >']) ## random-variable-stream.h (module 'core'): ns3::ParetoRandomVariable [class] module.add_class('ParetoRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream']) ## sixlowpan-net-device.h (module 'sixlowpan'): ns3::SixLowPanNetDevice [class] module.add_class('SixLowPanNetDevice', parent=root_module['ns3::NetDevice']) ## sixlowpan-net-device.h (module 'sixlowpan'): ns3::SixLowPanNetDevice::DropReason [enumeration] module.add_enum('DropReason', ['DROP_FRAGMENT_TIMEOUT', 'DROP_FRAGMENT_BUFFER_FULL', 'DROP_UNKNOWN_EXTENSION'], outer_class=root_module['ns3::SixLowPanNetDevice']) ## nstime.h (module 'core'): ns3::TimeValue [class] module.add_class('TimeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) ## type-id.h (module 'core'): ns3::TypeIdChecker [class] module.add_class('TypeIdChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker']) ## type-id.h (module 'core'): ns3::TypeIdValue [class] module.add_class('TypeIdValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) ## address.h (module 'network'): ns3::AddressChecker [class] module.add_class('AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker']) ## address.h (module 'network'): ns3::AddressValue [class] module.add_class('AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue']) ## Register a nested module for the namespace FatalImpl nested_module = module.add_cpp_namespace('FatalImpl') register_types_ns3_FatalImpl(nested_module) ## Register a nested module for the namespace Hash nested_module = module.add_cpp_namespace('Hash') register_types_ns3_Hash(nested_module) def register_types_ns3_FatalImpl(module): root_module = module.get_root() def register_types_ns3_Hash(module): root_module = module.get_root() ## hash-function.h (module 'core'): ns3::Hash::Implementation [class] module.add_class('Implementation', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >']) typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t ) *', u'ns3::Hash::Hash32Function_ptr') typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t ) **', u'ns3::Hash::Hash32Function_ptr*') typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t ) *&', u'ns3::Hash::Hash32Function_ptr&') typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t ) *', u'ns3::Hash::Hash64Function_ptr') typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t ) **', u'ns3::Hash::Hash64Function_ptr*') typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t ) *&', u'ns3::Hash::Hash64Function_ptr&') ## Register a nested module for the namespace Function nested_module = module.add_cpp_namespace('Function') register_types_ns3_Hash_Function(nested_module) def register_types_ns3_Hash_Function(module): root_module = module.get_root() ## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a [class] module.add_class('Fnv1a', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation']) ## hash-function.h (module 'core'): ns3::Hash::Function::Hash32 [class] module.add_class('Hash32', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation']) ## hash-function.h (module 'core'): ns3::Hash::Function::Hash64 [class] module.add_class('Hash64', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation']) ## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3 [class] module.add_class('Murmur3', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation']) def register_methods(root_module): register_Ns3Address_methods(root_module, root_module['ns3::Address']) register_Ns3AttributeConstructionList_methods(root_module, root_module['ns3::AttributeConstructionList']) register_Ns3AttributeConstructionListItem_methods(root_module, root_module['ns3::AttributeConstructionList::Item']) register_Ns3Buffer_methods(root_module, root_module['ns3::Buffer']) register_Ns3BufferIterator_methods(root_module, root_module['ns3::Buffer::Iterator']) register_Ns3ByteTagIterator_methods(root_module, root_module['ns3::ByteTagIterator']) register_Ns3ByteTagIteratorItem_methods(root_module, root_module['ns3::ByteTagIterator::Item']) register_Ns3ByteTagList_methods(root_module, root_module['ns3::ByteTagList']) register_Ns3ByteTagListIterator_methods(root_module, root_module['ns3::ByteTagList::Iterator']) register_Ns3ByteTagListIteratorItem_methods(root_module, root_module['ns3::ByteTagList::Iterator::Item']) register_Ns3CallbackBase_methods(root_module, root_module['ns3::CallbackBase']) register_Ns3EventId_methods(root_module, root_module['ns3::EventId']) register_Ns3Hasher_methods(root_module, root_module['ns3::Hasher']) register_Ns3Ipv4Address_methods(root_module, root_module['ns3::Ipv4Address']) register_Ns3Ipv4Mask_methods(root_module, root_module['ns3::Ipv4Mask']) register_Ns3Ipv6Address_methods(root_module, root_module['ns3::Ipv6Address']) register_Ns3Ipv6Prefix_methods(root_module, root_module['ns3::Ipv6Prefix']) register_Ns3NetDeviceContainer_methods(root_module, root_module['ns3::NetDeviceContainer']) register_Ns3ObjectBase_methods(root_module, root_module['ns3::ObjectBase']) register_Ns3ObjectDeleter_methods(root_module, root_module['ns3::ObjectDeleter']) register_Ns3ObjectFactory_methods(root_module, root_module['ns3::ObjectFactory']) register_Ns3PacketMetadata_methods(root_module, root_module['ns3::PacketMetadata']) register_Ns3PacketMetadataItem_methods(root_module, root_module['ns3::PacketMetadata::Item']) register_Ns3PacketMetadataItemIterator_methods(root_module, root_module['ns3::PacketMetadata::ItemIterator']) register_Ns3PacketTagIterator_methods(root_module, root_module['ns3::PacketTagIterator']) register_Ns3PacketTagIteratorItem_methods(root_module, root_module['ns3::PacketTagIterator::Item']) register_Ns3PacketTagList_methods(root_module, root_module['ns3::PacketTagList']) register_Ns3PacketTagListTagData_methods(root_module, root_module['ns3::PacketTagList::TagData']) register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >']) register_Ns3SixLowPanDispatch_methods(root_module, root_module['ns3::SixLowPanDispatch']) register_Ns3SixLowPanHelper_methods(root_module, root_module['ns3::SixLowPanHelper']) register_Ns3Tag_methods(root_module, root_module['ns3::Tag']) register_Ns3TagBuffer_methods(root_module, root_module['ns3::TagBuffer']) register_Ns3TimeWithUnit_methods(root_module, root_module['ns3::TimeWithUnit']) register_Ns3TypeId_methods(root_module, root_module['ns3::TypeId']) register_Ns3TypeIdAttributeInformation_methods(root_module, root_module['ns3::TypeId::AttributeInformation']) register_Ns3TypeIdTraceSourceInformation_methods(root_module, root_module['ns3::TypeId::TraceSourceInformation']) register_Ns3Empty_methods(root_module, root_module['ns3::empty']) register_Ns3Int64x64_t_methods(root_module, root_module['ns3::int64x64_t']) register_Ns3Chunk_methods(root_module, root_module['ns3::Chunk']) register_Ns3Header_methods(root_module, root_module['ns3::Header']) register_Ns3Ipv6Header_methods(root_module, root_module['ns3::Ipv6Header']) register_Ns3Object_methods(root_module, root_module['ns3::Object']) register_Ns3ObjectAggregateIterator_methods(root_module, root_module['ns3::Object::AggregateIterator']) register_Ns3RandomVariableStream_methods(root_module, root_module['ns3::RandomVariableStream']) register_Ns3SequentialRandomVariable_methods(root_module, root_module['ns3::SequentialRandomVariable']) register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >']) register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >']) register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >']) register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >']) register_Ns3SimpleRefCount__Ns3EventImpl_Ns3Empty_Ns3DefaultDeleter__lt__ns3EventImpl__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >']) register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >']) register_Ns3SimpleRefCount__Ns3NixVector_Ns3Empty_Ns3DefaultDeleter__lt__ns3NixVector__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >']) register_Ns3SimpleRefCount__Ns3Packet_Ns3Empty_Ns3DefaultDeleter__lt__ns3Packet__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >']) register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >']) register_Ns3SixLowPanFrag1_methods(root_module, root_module['ns3::SixLowPanFrag1']) register_Ns3SixLowPanFragN_methods(root_module, root_module['ns3::SixLowPanFragN']) register_Ns3SixLowPanHc1_methods(root_module, root_module['ns3::SixLowPanHc1']) register_Ns3SixLowPanIphc_methods(root_module, root_module['ns3::SixLowPanIphc']) register_Ns3SixLowPanIpv6_methods(root_module, root_module['ns3::SixLowPanIpv6']) register_Ns3SixLowPanNhcExtension_methods(root_module, root_module['ns3::SixLowPanNhcExtension']) register_Ns3SixLowPanUdpNhcExtension_methods(root_module, root_module['ns3::SixLowPanUdpNhcExtension']) register_Ns3Time_methods(root_module, root_module['ns3::Time']) register_Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::TraceSourceAccessor']) register_Ns3Trailer_methods(root_module, root_module['ns3::Trailer']) register_Ns3TriangularRandomVariable_methods(root_module, root_module['ns3::TriangularRandomVariable']) register_Ns3UniformRandomVariable_methods(root_module, root_module['ns3::UniformRandomVariable']) register_Ns3WeibullRandomVariable_methods(root_module, root_module['ns3::WeibullRandomVariable']) register_Ns3ZetaRandomVariable_methods(root_module, root_module['ns3::ZetaRandomVariable']) register_Ns3ZipfRandomVariable_methods(root_module, root_module['ns3::ZipfRandomVariable']) register_Ns3AttributeAccessor_methods(root_module, root_module['ns3::AttributeAccessor']) register_Ns3AttributeChecker_methods(root_module, root_module['ns3::AttributeChecker']) register_Ns3AttributeValue_methods(root_module, root_module['ns3::AttributeValue']) register_Ns3CallbackChecker_methods(root_module, root_module['ns3::CallbackChecker']) register_Ns3CallbackImplBase_methods(root_module, root_module['ns3::CallbackImplBase']) register_Ns3CallbackValue_methods(root_module, root_module['ns3::CallbackValue']) register_Ns3ConstantRandomVariable_methods(root_module, root_module['ns3::ConstantRandomVariable']) register_Ns3DeterministicRandomVariable_methods(root_module, root_module['ns3::DeterministicRandomVariable']) register_Ns3EmpiricalRandomVariable_methods(root_module, root_module['ns3::EmpiricalRandomVariable']) register_Ns3EmptyAttributeValue_methods(root_module, root_module['ns3::EmptyAttributeValue']) register_Ns3ErlangRandomVariable_methods(root_module, root_module['ns3::ErlangRandomVariable']) register_Ns3EventImpl_methods(root_module, root_module['ns3::EventImpl']) register_Ns3ExponentialRandomVariable_methods(root_module, root_module['ns3::ExponentialRandomVariable']) register_Ns3GammaRandomVariable_methods(root_module, root_module['ns3::GammaRandomVariable']) register_Ns3Ipv4AddressChecker_methods(root_module, root_module['ns3::Ipv4AddressChecker']) register_Ns3Ipv4AddressValue_methods(root_module, root_module['ns3::Ipv4AddressValue']) register_Ns3Ipv4MaskChecker_methods(root_module, root_module['ns3::Ipv4MaskChecker']) register_Ns3Ipv4MaskValue_methods(root_module, root_module['ns3::Ipv4MaskValue']) register_Ns3Ipv6AddressChecker_methods(root_module, root_module['ns3::Ipv6AddressChecker']) register_Ns3Ipv6AddressValue_methods(root_module, root_module['ns3::Ipv6AddressValue']) register_Ns3Ipv6PrefixChecker_methods(root_module, root_module['ns3::Ipv6PrefixChecker']) register_Ns3Ipv6PrefixValue_methods(root_module, root_module['ns3::Ipv6PrefixValue']) register_Ns3LogNormalRandomVariable_methods(root_module, root_module['ns3::LogNormalRandomVariable']) register_Ns3NetDevice_methods(root_module, root_module['ns3::NetDevice']) register_Ns3NixVector_methods(root_module, root_module['ns3::NixVector']) register_Ns3NormalRandomVariable_methods(root_module, root_module['ns3::NormalRandomVariable']) register_Ns3ObjectFactoryChecker_methods(root_module, root_module['ns3::ObjectFactoryChecker']) register_Ns3ObjectFactoryValue_methods(root_module, root_module['ns3::ObjectFactoryValue']) register_Ns3Packet_methods(root_module, root_module['ns3::Packet']) register_Ns3ParetoRandomVariable_methods(root_module, root_module['ns3::ParetoRandomVariable']) register_Ns3SixLowPanNetDevice_methods(root_module, root_module['ns3::SixLowPanNetDevice']) register_Ns3TimeValue_methods(root_module, root_module['ns3::TimeValue']) register_Ns3TypeIdChecker_methods(root_module, root_module['ns3::TypeIdChecker']) register_Ns3TypeIdValue_methods(root_module, root_module['ns3::TypeIdValue']) register_Ns3AddressChecker_methods(root_module, root_module['ns3::AddressChecker']) register_Ns3AddressValue_methods(root_module, root_module['ns3::AddressValue']) register_Ns3HashImplementation_methods(root_module, root_module['ns3::Hash::Implementation']) register_Ns3HashFunctionFnv1a_methods(root_module, root_module['ns3::Hash::Function::Fnv1a']) register_Ns3HashFunctionHash32_methods(root_module, root_module['ns3::Hash::Function::Hash32']) register_Ns3HashFunctionHash64_methods(root_module, root_module['ns3::Hash::Function::Hash64']) register_Ns3HashFunctionMurmur3_methods(root_module, root_module['ns3::Hash::Function::Murmur3']) return def register_Ns3Address_methods(root_module, cls): cls.add_binary_comparison_operator('<') cls.add_binary_comparison_operator('!=') cls.add_output_stream_operator() cls.add_binary_comparison_operator('==') ## address.h (module 'network'): ns3::Address::Address() [constructor] cls.add_constructor([]) ## address.h (module 'network'): ns3::Address::Address(uint8_t type, uint8_t const * buffer, uint8_t len) [constructor] cls.add_constructor([param('uint8_t', 'type'), param('uint8_t const *', 'buffer'), param('uint8_t', 'len')]) ## address.h (module 'network'): ns3::Address::Address(ns3::Address const & address) [copy constructor] cls.add_constructor([param('ns3::Address const &', 'address')]) ## address.h (module 'network'): bool ns3::Address::CheckCompatible(uint8_t type, uint8_t len) const [member function] cls.add_method('CheckCompatible', 'bool', [param('uint8_t', 'type'), param('uint8_t', 'len')], is_const=True) ## address.h (module 'network'): uint32_t ns3::Address::CopyAllFrom(uint8_t const * buffer, uint8_t len) [member function] cls.add_method('CopyAllFrom', 'uint32_t', [param('uint8_t const *', 'buffer'), param('uint8_t', 'len')]) ## address.h (module 'network'): uint32_t ns3::Address::CopyAllTo(uint8_t * buffer, uint8_t len) const [member function] cls.add_method('CopyAllTo', 'uint32_t', [param('uint8_t *', 'buffer'), param('uint8_t', 'len')], is_const=True) ## address.h (module 'network'): uint32_t ns3::Address::CopyFrom(uint8_t const * buffer, uint8_t len) [member function] cls.add_method('CopyFrom', 'uint32_t', [param('uint8_t const *', 'buffer'), param('uint8_t', 'len')]) ## address.h (module 'network'): uint32_t ns3::Address::CopyTo(uint8_t * buffer) const [member function] cls.add_method('CopyTo', 'uint32_t', [param('uint8_t *', 'buffer')], is_const=True) ## address.h (module 'network'): void ns3::Address::Deserialize(ns3::TagBuffer buffer) [member function] cls.add_method('Deserialize', 'void', [param('ns3::TagBuffer', 'buffer')]) ## address.h (module 'network'): uint8_t ns3::Address::GetLength() const [member function] cls.add_method('GetLength', 'uint8_t', [], is_const=True) ## address.h (module 'network'): uint32_t ns3::Address::GetSerializedSize() const [member function] cls.add_method('GetSerializedSize', 'uint32_t', [], is_const=True) ## address.h (module 'network'): bool ns3::Address::IsInvalid() const [member function] cls.add_method('IsInvalid', 'bool', [], is_const=True) ## address.h (module 'network'): bool ns3::Address::IsMatchingType(uint8_t type) const [member function] cls.add_method('IsMatchingType', 'bool', [param('uint8_t', 'type')], is_const=True) ## address.h (module 'network'): static uint8_t ns3::Address::Register() [member function] cls.add_method('Register', 'uint8_t', [], is_static=True) ## address.h (module 'network'): void ns3::Address::Serialize(ns3::TagBuffer buffer) const [member function] cls.add_method('Serialize', 'void', [param('ns3::TagBuffer', 'buffer')], is_const=True) return def register_Ns3AttributeConstructionList_methods(root_module, cls): ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList(ns3::AttributeConstructionList const & arg0) [copy constructor] cls.add_constructor([param('ns3::AttributeConstructionList const &', 'arg0')]) ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList() [constructor] cls.add_constructor([]) ## attribute-construction-list.h (module 'core'): void ns3::AttributeConstructionList::Add(std::string name, ns3::Ptr<ns3::AttributeChecker const> checker, ns3::Ptr<ns3::AttributeValue> value) [member function] cls.add_method('Add', 'void', [param('std::string', 'name'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::Ptr< ns3::AttributeValue >', 'value')]) ## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::Begin() const [member function] cls.add_method('Begin', 'std::_List_const_iterator< ns3::AttributeConstructionList::Item >', [], is_const=True) ## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::End() const [member function] cls.add_method('End', 'std::_List_const_iterator< ns3::AttributeConstructionList::Item >', [], is_const=True) ## attribute-construction-list.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeConstructionList::Find(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('Find', 'ns3::Ptr< ns3::AttributeValue >', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True) return def register_Ns3AttributeConstructionListItem_methods(root_module, cls): ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item() [constructor] cls.add_constructor([]) ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item(ns3::AttributeConstructionList::Item const & arg0) [copy constructor] cls.add_constructor([param('ns3::AttributeConstructionList::Item const &', 'arg0')]) ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::checker [variable] cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False) ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::name [variable] cls.add_instance_attribute('name', 'std::string', is_const=False) ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::value [variable] cls.add_instance_attribute('value', 'ns3::Ptr< ns3::AttributeValue >', is_const=False) return def register_Ns3Buffer_methods(root_module, cls): ## buffer.h (module 'network'): ns3::Buffer::Buffer() [constructor] cls.add_constructor([]) ## buffer.h (module 'network'): ns3::Buffer::Buffer(uint32_t dataSize) [constructor] cls.add_constructor([param('uint32_t', 'dataSize')]) ## buffer.h (module 'network'): ns3::Buffer::Buffer(uint32_t dataSize, bool initialize) [constructor] cls.add_constructor([param('uint32_t', 'dataSize'), param('bool', 'initialize')]) ## buffer.h (module 'network'): ns3::Buffer::Buffer(ns3::Buffer const & o) [copy constructor] cls.add_constructor([param('ns3::Buffer const &', 'o')]) ## buffer.h (module 'network'): bool ns3::Buffer::AddAtEnd(uint32_t end) [member function] cls.add_method('AddAtEnd', 'bool', [param('uint32_t', 'end')]) ## buffer.h (module 'network'): void ns3::Buffer::AddAtEnd(ns3::Buffer const & o) [member function] cls.add_method('AddAtEnd', 'void', [param('ns3::Buffer const &', 'o')]) ## buffer.h (module 'network'): bool ns3::Buffer::AddAtStart(uint32_t start) [member function] cls.add_method('AddAtStart', 'bool', [param('uint32_t', 'start')]) ## buffer.h (module 'network'): ns3::Buffer::Iterator ns3::Buffer::Begin() const [member function] cls.add_method('Begin', 'ns3::Buffer::Iterator', [], is_const=True) ## buffer.h (module 'network'): void ns3::Buffer::CopyData(std::ostream * os, uint32_t size) const [member function] cls.add_method('CopyData', 'void', [param('std::ostream *', 'os'), param('uint32_t', 'size')], is_const=True) ## buffer.h (module 'network'): uint32_t ns3::Buffer::CopyData(uint8_t * buffer, uint32_t size) const [member function] cls.add_method('CopyData', 'uint32_t', [param('uint8_t *', 'buffer'), param('uint32_t', 'size')], is_const=True) ## buffer.h (module 'network'): ns3::Buffer ns3::Buffer::CreateFragment(uint32_t start, uint32_t length) const [member function] cls.add_method('CreateFragment', 'ns3::Buffer', [param('uint32_t', 'start'), param('uint32_t', 'length')], is_const=True) ## buffer.h (module 'network'): ns3::Buffer ns3::Buffer::CreateFullCopy() const [member function] cls.add_method('CreateFullCopy', 'ns3::Buffer', [], is_const=True) ## buffer.h (module 'network'): uint32_t ns3::Buffer::Deserialize(uint8_t const * buffer, uint32_t size) [member function] cls.add_method('Deserialize', 'uint32_t', [param('uint8_t const *', 'buffer'), param('uint32_t', 'size')]) ## buffer.h (module 'network'): ns3::Buffer::Iterator ns3::Buffer::End() const [member function] cls.add_method('End', 'ns3::Buffer::Iterator', [], is_const=True) ## buffer.h (module 'network'): int32_t ns3::Buffer::GetCurrentEndOffset() const [member function] cls.add_method('GetCurrentEndOffset', 'int32_t', [], is_const=True) ## buffer.h (module 'network'): int32_t ns3::Buffer::GetCurrentStartOffset() const [member function] cls.add_method('GetCurrentStartOffset', 'int32_t', [], is_const=True) ## buffer.h (module 'network'): uint32_t ns3::Buffer::GetSerializedSize() const [member function] cls.add_method('GetSerializedSize', 'uint32_t', [], is_const=True) ## buffer.h (module 'network'): uint32_t ns3::Buffer::GetSize() const [member function] cls.add_method('GetSize', 'uint32_t', [], is_const=True) ## buffer.h (module 'network'): uint8_t const * ns3::Buffer::PeekData() const [member function] cls.add_method('PeekData', 'uint8_t const *', [], is_const=True) ## buffer.h (module 'network'): void ns3::Buffer::RemoveAtEnd(uint32_t end) [member function] cls.add_method('RemoveAtEnd', 'void', [param('uint32_t', 'end')]) ## buffer.h (module 'network'): void ns3::Buffer::RemoveAtStart(uint32_t start) [member function] cls.add_method('RemoveAtStart', 'void', [param('uint32_t', 'start')]) ## buffer.h (module 'network'): uint32_t ns3::Buffer::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function] cls.add_method('Serialize', 'uint32_t', [param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')], is_const=True) return def register_Ns3BufferIterator_methods(root_module, cls): ## buffer.h (module 'network'): ns3::Buffer::Iterator::Iterator(ns3::Buffer::Iterator const & arg0) [copy constructor] cls.add_constructor([param('ns3::Buffer::Iterator const &', 'arg0')]) ## buffer.h (module 'network'): ns3::Buffer::Iterator::Iterator() [constructor] cls.add_constructor([]) ## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::CalculateIpChecksum(uint16_t size) [member function] cls.add_method('CalculateIpChecksum', 'uint16_t', [param('uint16_t', 'size')]) ## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::CalculateIpChecksum(uint16_t size, uint32_t initialChecksum) [member function] cls.add_method('CalculateIpChecksum', 'uint16_t', [param('uint16_t', 'size'), param('uint32_t', 'initialChecksum')]) ## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetDistanceFrom(ns3::Buffer::Iterator const & o) const [member function] cls.add_method('GetDistanceFrom', 'uint32_t', [param('ns3::Buffer::Iterator const &', 'o')], is_const=True) ## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetSize() const [member function] cls.add_method('GetSize', 'uint32_t', [], is_const=True) ## buffer.h (module 'network'): bool ns3::Buffer::Iterator::IsEnd() const [member function] cls.add_method('IsEnd', 'bool', [], is_const=True) ## buffer.h (module 'network'): bool ns3::Buffer::Iterator::IsStart() const [member function] cls.add_method('IsStart', 'bool', [], is_const=True) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::Next() [member function] cls.add_method('Next', 'void', []) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::Next(uint32_t delta) [member function] cls.add_method('Next', 'void', [param('uint32_t', 'delta')]) ## buffer.h (module 'network'): uint8_t ns3::Buffer::Iterator::PeekU8() [member function] cls.add_method('PeekU8', 'uint8_t', []) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::Prev() [member function] cls.add_method('Prev', 'void', []) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::Prev(uint32_t delta) [member function] cls.add_method('Prev', 'void', [param('uint32_t', 'delta')]) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::Read(uint8_t * buffer, uint32_t size) [member function] cls.add_method('Read', 'void', [param('uint8_t *', 'buffer'), param('uint32_t', 'size')]) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::Read(ns3::Buffer::Iterator start, uint32_t size) [member function] cls.add_method('Read', 'void', [param('ns3::Buffer::Iterator', 'start'), param('uint32_t', 'size')]) ## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadLsbtohU16() [member function] cls.add_method('ReadLsbtohU16', 'uint16_t', []) ## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadLsbtohU32() [member function] cls.add_method('ReadLsbtohU32', 'uint32_t', []) ## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadLsbtohU64() [member function] cls.add_method('ReadLsbtohU64', 'uint64_t', []) ## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadNtohU16() [member function] cls.add_method('ReadNtohU16', 'uint16_t', []) ## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadNtohU32() [member function] cls.add_method('ReadNtohU32', 'uint32_t', []) ## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadNtohU64() [member function] cls.add_method('ReadNtohU64', 'uint64_t', []) ## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadU16() [member function] cls.add_method('ReadU16', 'uint16_t', []) ## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadU32() [member function] cls.add_method('ReadU32', 'uint32_t', []) ## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadU64() [member function] cls.add_method('ReadU64', 'uint64_t', []) ## buffer.h (module 'network'): uint8_t ns3::Buffer::Iterator::ReadU8() [member function] cls.add_method('ReadU8', 'uint8_t', []) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::Write(uint8_t const * buffer, uint32_t size) [member function] cls.add_method('Write', 'void', [param('uint8_t const *', 'buffer'), param('uint32_t', 'size')]) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::Write(ns3::Buffer::Iterator start, ns3::Buffer::Iterator end) [member function] cls.add_method('Write', 'void', [param('ns3::Buffer::Iterator', 'start'), param('ns3::Buffer::Iterator', 'end')]) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU16(uint16_t data) [member function] cls.add_method('WriteHtolsbU16', 'void', [param('uint16_t', 'data')]) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU32(uint32_t data) [member function] cls.add_method('WriteHtolsbU32', 'void', [param('uint32_t', 'data')]) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU64(uint64_t data) [member function] cls.add_method('WriteHtolsbU64', 'void', [param('uint64_t', 'data')]) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU16(uint16_t data) [member function] cls.add_method('WriteHtonU16', 'void', [param('uint16_t', 'data')]) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU32(uint32_t data) [member function] cls.add_method('WriteHtonU32', 'void', [param('uint32_t', 'data')]) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU64(uint64_t data) [member function] cls.add_method('WriteHtonU64', 'void', [param('uint64_t', 'data')]) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU16(uint16_t data) [member function] cls.add_method('WriteU16', 'void', [param('uint16_t', 'data')]) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU32(uint32_t data) [member function] cls.add_method('WriteU32', 'void', [param('uint32_t', 'data')]) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU64(uint64_t data) [member function] cls.add_method('WriteU64', 'void', [param('uint64_t', 'data')]) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU8(uint8_t data) [member function] cls.add_method('WriteU8', 'void', [param('uint8_t', 'data')]) ## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU8(uint8_t data, uint32_t len) [member function] cls.add_method('WriteU8', 'void', [param('uint8_t', 'data'), param('uint32_t', 'len')]) return def register_Ns3ByteTagIterator_methods(root_module, cls): ## packet.h (module 'network'): ns3::ByteTagIterator::ByteTagIterator(ns3::ByteTagIterator const & arg0) [copy constructor] cls.add_constructor([param('ns3::ByteTagIterator const &', 'arg0')]) ## packet.h (module 'network'): bool ns3::ByteTagIterator::HasNext() const [member function] cls.add_method('HasNext', 'bool', [], is_const=True) ## packet.h (module 'network'): ns3::ByteTagIterator::Item ns3::ByteTagIterator::Next() [member function] cls.add_method('Next', 'ns3::ByteTagIterator::Item', []) return def register_Ns3ByteTagIteratorItem_methods(root_module, cls): ## packet.h (module 'network'): ns3::ByteTagIterator::Item::Item(ns3::ByteTagIterator::Item const & arg0) [copy constructor] cls.add_constructor([param('ns3::ByteTagIterator::Item const &', 'arg0')]) ## packet.h (module 'network'): uint32_t ns3::ByteTagIterator::Item::GetEnd() const [member function] cls.add_method('GetEnd', 'uint32_t', [], is_const=True) ## packet.h (module 'network'): uint32_t ns3::ByteTagIterator::Item::GetStart() const [member function] cls.add_method('GetStart', 'uint32_t', [], is_const=True) ## packet.h (module 'network'): void ns3::ByteTagIterator::Item::GetTag(ns3::Tag & tag) const [member function] cls.add_method('GetTag', 'void', [param('ns3::Tag &', 'tag')], is_const=True) ## packet.h (module 'network'): ns3::TypeId ns3::ByteTagIterator::Item::GetTypeId() const [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_const=True) return def register_Ns3ByteTagList_methods(root_module, cls): ## byte-tag-list.h (module 'network'): ns3::ByteTagList::ByteTagList() [constructor] cls.add_constructor([]) ## byte-tag-list.h (module 'network'): ns3::ByteTagList::ByteTagList(ns3::ByteTagList const & o) [copy constructor] cls.add_constructor([param('ns3::ByteTagList const &', 'o')]) ## byte-tag-list.h (module 'network'): ns3::TagBuffer ns3::ByteTagList::Add(ns3::TypeId tid, uint32_t bufferSize, int32_t start, int32_t end) [member function] cls.add_method('Add', 'ns3::TagBuffer', [param('ns3::TypeId', 'tid'), param('uint32_t', 'bufferSize'), param('int32_t', 'start'), param('int32_t', 'end')]) ## byte-tag-list.h (module 'network'): void ns3::ByteTagList::Add(ns3::ByteTagList const & o) [member function] cls.add_method('Add', 'void', [param('ns3::ByteTagList const &', 'o')]) ## byte-tag-list.h (module 'network'): void ns3::ByteTagList::AddAtEnd(int32_t adjustment, int32_t appendOffset) [member function] cls.add_method('AddAtEnd', 'void', [param('int32_t', 'adjustment'), param('int32_t', 'appendOffset')]) ## byte-tag-list.h (module 'network'): void ns3::ByteTagList::AddAtStart(int32_t adjustment, int32_t prependOffset) [member function] cls.add_method('AddAtStart', 'void', [param('int32_t', 'adjustment'), param('int32_t', 'prependOffset')]) ## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator ns3::ByteTagList::Begin(int32_t offsetStart, int32_t offsetEnd) const [member function] cls.add_method('Begin', 'ns3::ByteTagList::Iterator', [param('int32_t', 'offsetStart'), param('int32_t', 'offsetEnd')], is_const=True) ## byte-tag-list.h (module 'network'): void ns3::ByteTagList::RemoveAll() [member function] cls.add_method('RemoveAll', 'void', []) return def register_Ns3ByteTagListIterator_methods(root_module, cls): ## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Iterator(ns3::ByteTagList::Iterator const & arg0) [copy constructor] cls.add_constructor([param('ns3::ByteTagList::Iterator const &', 'arg0')]) ## byte-tag-list.h (module 'network'): uint32_t ns3::ByteTagList::Iterator::GetOffsetStart() const [member function] cls.add_method('GetOffsetStart', 'uint32_t', [], is_const=True) ## byte-tag-list.h (module 'network'): bool ns3::ByteTagList::Iterator::HasNext() const [member function] cls.add_method('HasNext', 'bool', [], is_const=True) ## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item ns3::ByteTagList::Iterator::Next() [member function] cls.add_method('Next', 'ns3::ByteTagList::Iterator::Item', []) return def register_Ns3ByteTagListIteratorItem_methods(root_module, cls): ## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::Item(ns3::ByteTagList::Iterator::Item const & arg0) [copy constructor] cls.add_constructor([param('ns3::ByteTagList::Iterator::Item const &', 'arg0')]) ## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::Item(ns3::TagBuffer buf) [constructor] cls.add_constructor([param('ns3::TagBuffer', 'buf')]) ## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::buf [variable] cls.add_instance_attribute('buf', 'ns3::TagBuffer', is_const=False) ## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::end [variable] cls.add_instance_attribute('end', 'int32_t', is_const=False) ## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::size [variable] cls.add_instance_attribute('size', 'uint32_t', is_const=False) ## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::start [variable] cls.add_instance_attribute('start', 'int32_t', is_const=False) ## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::tid [variable] cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False) return def register_Ns3CallbackBase_methods(root_module, cls): ## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::CallbackBase const & arg0) [copy constructor] cls.add_constructor([param('ns3::CallbackBase const &', 'arg0')]) ## callback.h (module 'core'): ns3::CallbackBase::CallbackBase() [constructor] cls.add_constructor([]) ## callback.h (module 'core'): ns3::Ptr<ns3::CallbackImplBase> ns3::CallbackBase::GetImpl() const [member function] cls.add_method('GetImpl', 'ns3::Ptr< ns3::CallbackImplBase >', [], is_const=True) ## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::Ptr<ns3::CallbackImplBase> impl) [constructor] cls.add_constructor([param('ns3::Ptr< ns3::CallbackImplBase >', 'impl')], visibility='protected') ## callback.h (module 'core'): static std::string ns3::CallbackBase::Demangle(std::string const & mangled) [member function] cls.add_method('Demangle', 'std::string', [param('std::string const &', 'mangled')], is_static=True, visibility='protected') return def register_Ns3EventId_methods(root_module, cls): cls.add_binary_comparison_operator('!=') cls.add_binary_comparison_operator('==') ## event-id.h (module 'core'): ns3::EventId::EventId(ns3::EventId const & arg0) [copy constructor] cls.add_constructor([param('ns3::EventId const &', 'arg0')]) ## event-id.h (module 'core'): ns3::EventId::EventId() [constructor] cls.add_constructor([]) ## event-id.h (module 'core'): ns3::EventId::EventId(ns3::Ptr<ns3::EventImpl> const & impl, uint64_t ts, uint32_t context, uint32_t uid) [constructor] cls.add_constructor([param('ns3::Ptr< ns3::EventImpl > const &', 'impl'), param('uint64_t', 'ts'), param('uint32_t', 'context'), param('uint32_t', 'uid')]) ## event-id.h (module 'core'): void ns3::EventId::Cancel() [member function] cls.add_method('Cancel', 'void', []) ## event-id.h (module 'core'): uint32_t ns3::EventId::GetContext() const [member function] cls.add_method('GetContext', 'uint32_t', [], is_const=True) ## event-id.h (module 'core'): uint64_t ns3::EventId::GetTs() const [member function] cls.add_method('GetTs', 'uint64_t', [], is_const=True) ## event-id.h (module 'core'): uint32_t ns3::EventId::GetUid() const [member function] cls.add_method('GetUid', 'uint32_t', [], is_const=True) ## event-id.h (module 'core'): bool ns3::EventId::IsExpired() const [member function] cls.add_method('IsExpired', 'bool', [], is_const=True) ## event-id.h (module 'core'): bool ns3::EventId::IsRunning() const [member function] cls.add_method('IsRunning', 'bool', [], is_const=True) ## event-id.h (module 'core'): ns3::EventImpl * ns3::EventId::PeekEventImpl() const [member function] cls.add_method('PeekEventImpl', 'ns3::EventImpl *', [], is_const=True) return def register_Ns3Hasher_methods(root_module, cls): ## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Hasher const & arg0) [copy constructor] cls.add_constructor([param('ns3::Hasher const &', 'arg0')]) ## hash.h (module 'core'): ns3::Hasher::Hasher() [constructor] cls.add_constructor([]) ## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Ptr<ns3::Hash::Implementation> hp) [constructor] cls.add_constructor([param('ns3::Ptr< ns3::Hash::Implementation >', 'hp')]) ## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(char const * buffer, size_t const size) [member function] cls.add_method('GetHash32', 'uint32_t', [param('char const *', 'buffer'), param('size_t const', 'size')]) ## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(std::string const s) [member function] cls.add_method('GetHash32', 'uint32_t', [param('std::string const', 's')]) ## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(char const * buffer, size_t const size) [member function] cls.add_method('GetHash64', 'uint64_t', [param('char const *', 'buffer'), param('size_t const', 'size')]) ## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(std::string const s) [member function] cls.add_method('GetHash64', 'uint64_t', [param('std::string const', 's')]) ## hash.h (module 'core'): ns3::Hasher & ns3::Hasher::clear() [member function] cls.add_method('clear', 'ns3::Hasher &', []) return def register_Ns3Ipv4Address_methods(root_module, cls): cls.add_binary_comparison_operator('<') cls.add_binary_comparison_operator('!=') cls.add_output_stream_operator() cls.add_binary_comparison_operator('==') ## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(ns3::Ipv4Address const & arg0) [copy constructor] cls.add_constructor([param('ns3::Ipv4Address const &', 'arg0')]) ## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address() [constructor] cls.add_constructor([]) ## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(uint32_t address) [constructor] cls.add_constructor([param('uint32_t', 'address')]) ## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(char const * address) [constructor] cls.add_constructor([param('char const *', 'address')]) ## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::CombineMask(ns3::Ipv4Mask const & mask) const [member function] cls.add_method('CombineMask', 'ns3::Ipv4Address', [param('ns3::Ipv4Mask const &', 'mask')], is_const=True) ## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::ConvertFrom(ns3::Address const & address) [member function] cls.add_method('ConvertFrom', 'ns3::Ipv4Address', [param('ns3::Address const &', 'address')], is_static=True) ## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::Deserialize(uint8_t const * buf) [member function] cls.add_method('Deserialize', 'ns3::Ipv4Address', [param('uint8_t const *', 'buf')], is_static=True) ## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Address::Get() const [member function] cls.add_method('Get', 'uint32_t', [], is_const=True) ## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetAny() [member function] cls.add_method('GetAny', 'ns3::Ipv4Address', [], is_static=True) ## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetBroadcast() [member function] cls.add_method('GetBroadcast', 'ns3::Ipv4Address', [], is_static=True) ## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetLoopback() [member function] cls.add_method('GetLoopback', 'ns3::Ipv4Address', [], is_static=True) ## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::GetSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function] cls.add_method('GetSubnetDirectedBroadcast', 'ns3::Ipv4Address', [param('ns3::Ipv4Mask const &', 'mask')], is_const=True) ## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetZero() [member function] cls.add_method('GetZero', 'ns3::Ipv4Address', [], is_static=True) ## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsBroadcast() const [member function] cls.add_method('IsBroadcast', 'bool', [], is_const=True) ## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsEqual(ns3::Ipv4Address const & other) const [member function] cls.add_method('IsEqual', 'bool', [param('ns3::Ipv4Address const &', 'other')], is_const=True) ## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsLocalMulticast() const [member function] cls.add_method('IsLocalMulticast', 'bool', [], is_const=True) ## ipv4-address.h (module 'network'): static bool ns3::Ipv4Address::IsMatchingType(ns3::Address const & address) [member function] cls.add_method('IsMatchingType', 'bool', [param('ns3::Address const &', 'address')], is_static=True) ## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsMulticast() const [member function] cls.add_method('IsMulticast', 'bool', [], is_const=True) ## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function] cls.add_method('IsSubnetDirectedBroadcast', 'bool', [param('ns3::Ipv4Mask const &', 'mask')], is_const=True) ## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Print(std::ostream & os) const [member function] cls.add_method('Print', 'void', [param('std::ostream &', 'os')], is_const=True) ## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Serialize(uint8_t * buf) const [member function] cls.add_method('Serialize', 'void', [param('uint8_t *', 'buf')], is_const=True) ## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(uint32_t address) [member function] cls.add_method('Set', 'void', [param('uint32_t', 'address')]) ## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(char const * address) [member function] cls.add_method('Set', 'void', [param('char const *', 'address')]) return def register_Ns3Ipv4Mask_methods(root_module, cls): cls.add_binary_comparison_operator('!=') cls.add_output_stream_operator() cls.add_binary_comparison_operator('==') ## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(ns3::Ipv4Mask const & arg0) [copy constructor] cls.add_constructor([param('ns3::Ipv4Mask const &', 'arg0')]) ## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask() [constructor] cls.add_constructor([]) ## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(uint32_t mask) [constructor] cls.add_constructor([param('uint32_t', 'mask')]) ## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(char const * mask) [constructor] cls.add_constructor([param('char const *', 'mask')]) ## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::Get() const [member function] cls.add_method('Get', 'uint32_t', [], is_const=True) ## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::GetInverse() const [member function] cls.add_method('GetInverse', 'uint32_t', [], is_const=True) ## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetLoopback() [member function] cls.add_method('GetLoopback', 'ns3::Ipv4Mask', [], is_static=True) ## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetOnes() [member function] cls.add_method('GetOnes', 'ns3::Ipv4Mask', [], is_static=True) ## ipv4-address.h (module 'network'): uint16_t ns3::Ipv4Mask::GetPrefixLength() const [member function] cls.add_method('GetPrefixLength', 'uint16_t', [], is_const=True) ## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetZero() [member function] cls.add_method('GetZero', 'ns3::Ipv4Mask', [], is_static=True) ## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsEqual(ns3::Ipv4Mask other) const [member function] cls.add_method('IsEqual', 'bool', [param('ns3::Ipv4Mask', 'other')], is_const=True) ## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsMatch(ns3::Ipv4Address a, ns3::Ipv4Address b) const [member function] cls.add_method('IsMatch', 'bool', [param('ns3::Ipv4Address', 'a'), param('ns3::Ipv4Address', 'b')], is_const=True) ## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Print(std::ostream & os) const [member function] cls.add_method('Print', 'void', [param('std::ostream &', 'os')], is_const=True) ## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Set(uint32_t mask) [member function] cls.add_method('Set', 'void', [param('uint32_t', 'mask')]) return def register_Ns3Ipv6Address_methods(root_module, cls): cls.add_binary_comparison_operator('<') cls.add_binary_comparison_operator('!=') cls.add_output_stream_operator() cls.add_binary_comparison_operator('==') ## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address() [constructor] cls.add_constructor([]) ## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(char const * address) [constructor] cls.add_constructor([param('char const *', 'address')]) ## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(uint8_t * address) [constructor] cls.add_constructor([param('uint8_t *', 'address')]) ## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const & addr) [copy constructor] cls.add_constructor([param('ns3::Ipv6Address const &', 'addr')]) ## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const * addr) [constructor] cls.add_constructor([param('ns3::Ipv6Address const *', 'addr')]) ## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6Address::CombinePrefix(ns3::Ipv6Prefix const & prefix) [member function] cls.add_method('CombinePrefix', 'ns3::Ipv6Address', [param('ns3::Ipv6Prefix const &', 'prefix')]) ## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::ConvertFrom(ns3::Address const & address) [member function] cls.add_method('ConvertFrom', 'ns3::Ipv6Address', [param('ns3::Address const &', 'address')], is_static=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::Deserialize(uint8_t const * buf) [member function] cls.add_method('Deserialize', 'ns3::Ipv6Address', [param('uint8_t const *', 'buf')], is_static=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllHostsMulticast() [member function] cls.add_method('GetAllHostsMulticast', 'ns3::Ipv6Address', [], is_static=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllNodesMulticast() [member function] cls.add_method('GetAllNodesMulticast', 'ns3::Ipv6Address', [], is_static=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllRoutersMulticast() [member function] cls.add_method('GetAllRoutersMulticast', 'ns3::Ipv6Address', [], is_static=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAny() [member function] cls.add_method('GetAny', 'ns3::Ipv6Address', [], is_static=True) ## ipv6-address.h (module 'network'): void ns3::Ipv6Address::GetBytes(uint8_t * buf) const [member function] cls.add_method('GetBytes', 'void', [param('uint8_t *', 'buf')], is_const=True) ## ipv6-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv6Address::GetIpv4MappedAddress() const [member function] cls.add_method('GetIpv4MappedAddress', 'ns3::Ipv4Address', [], is_const=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetLoopback() [member function] cls.add_method('GetLoopback', 'ns3::Ipv6Address', [], is_static=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetOnes() [member function] cls.add_method('GetOnes', 'ns3::Ipv6Address', [], is_static=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetZero() [member function] cls.add_method('GetZero', 'ns3::Ipv6Address', [], is_static=True) ## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllHostsMulticast() const [member function] cls.add_method('IsAllHostsMulticast', 'bool', [], is_const=True) ## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllNodesMulticast() const [member function] cls.add_method('IsAllNodesMulticast', 'bool', [], is_const=True) ## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllRoutersMulticast() const [member function] cls.add_method('IsAllRoutersMulticast', 'bool', [], is_const=True) ## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAny() const [member function] cls.add_method('IsAny', 'bool', [], is_const=True) ## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsDocumentation() const [member function] cls.add_method('IsDocumentation', 'bool', [], is_const=True) ## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsEqual(ns3::Ipv6Address const & other) const [member function] cls.add_method('IsEqual', 'bool', [param('ns3::Ipv6Address const &', 'other')], is_const=True) ## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsIpv4MappedAddress() const [member function] cls.add_method('IsIpv4MappedAddress', 'bool', [], is_const=True) ## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocal() const [member function] cls.add_method('IsLinkLocal', 'bool', [], is_const=True) ## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocalMulticast() const [member function] cls.add_method('IsLinkLocalMulticast', 'bool', [], is_const=True) ## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLocalhost() const [member function] cls.add_method('IsLocalhost', 'bool', [], is_const=True) ## ipv6-address.h (module 'network'): static bool ns3::Ipv6Address::IsMatchingType(ns3::Address const & address) [member function] cls.add_method('IsMatchingType', 'bool', [param('ns3::Address const &', 'address')], is_static=True) ## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsMulticast() const [member function] cls.add_method('IsMulticast', 'bool', [], is_const=True) ## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsSolicitedMulticast() const [member function] cls.add_method('IsSolicitedMulticast', 'bool', [], is_const=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac16Address addr, ns3::Ipv6Address prefix) [member function] cls.add_method('MakeAutoconfiguredAddress', 'ns3::Ipv6Address', [param('ns3::Mac16Address', 'addr'), param('ns3::Ipv6Address', 'prefix')], is_static=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac48Address addr, ns3::Ipv6Address prefix) [member function] cls.add_method('MakeAutoconfiguredAddress', 'ns3::Ipv6Address', [param('ns3::Mac48Address', 'addr'), param('ns3::Ipv6Address', 'prefix')], is_static=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac64Address addr, ns3::Ipv6Address prefix) [member function] cls.add_method('MakeAutoconfiguredAddress', 'ns3::Ipv6Address', [param('ns3::Mac64Address', 'addr'), param('ns3::Ipv6Address', 'prefix')], is_static=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac16Address mac) [member function] cls.add_method('MakeAutoconfiguredLinkLocalAddress', 'ns3::Ipv6Address', [param('ns3::Mac16Address', 'mac')], is_static=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac48Address mac) [member function] cls.add_method('MakeAutoconfiguredLinkLocalAddress', 'ns3::Ipv6Address', [param('ns3::Mac48Address', 'mac')], is_static=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac64Address mac) [member function] cls.add_method('MakeAutoconfiguredLinkLocalAddress', 'ns3::Ipv6Address', [param('ns3::Mac64Address', 'mac')], is_static=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeIpv4MappedAddress(ns3::Ipv4Address addr) [member function] cls.add_method('MakeIpv4MappedAddress', 'ns3::Ipv6Address', [param('ns3::Ipv4Address', 'addr')], is_static=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeSolicitedAddress(ns3::Ipv6Address addr) [member function] cls.add_method('MakeSolicitedAddress', 'ns3::Ipv6Address', [param('ns3::Ipv6Address', 'addr')], is_static=True) ## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Print(std::ostream & os) const [member function] cls.add_method('Print', 'void', [param('std::ostream &', 'os')], is_const=True) ## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Serialize(uint8_t * buf) const [member function] cls.add_method('Serialize', 'void', [param('uint8_t *', 'buf')], is_const=True) ## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(char const * address) [member function] cls.add_method('Set', 'void', [param('char const *', 'address')]) ## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(uint8_t * address) [member function] cls.add_method('Set', 'void', [param('uint8_t *', 'address')]) return def register_Ns3Ipv6Prefix_methods(root_module, cls): cls.add_binary_comparison_operator('!=') cls.add_output_stream_operator() cls.add_binary_comparison_operator('==') ## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix() [constructor] cls.add_constructor([]) ## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t * prefix) [constructor] cls.add_constructor([param('uint8_t *', 'prefix')]) ## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(char const * prefix) [constructor] cls.add_constructor([param('char const *', 'prefix')]) ## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t prefix) [constructor] cls.add_constructor([param('uint8_t', 'prefix')]) ## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const & prefix) [copy constructor] cls.add_constructor([param('ns3::Ipv6Prefix const &', 'prefix')]) ## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const * prefix) [constructor] cls.add_constructor([param('ns3::Ipv6Prefix const *', 'prefix')]) ## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::GetBytes(uint8_t * buf) const [member function] cls.add_method('GetBytes', 'void', [param('uint8_t *', 'buf')], is_const=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetLoopback() [member function] cls.add_method('GetLoopback', 'ns3::Ipv6Prefix', [], is_static=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetOnes() [member function] cls.add_method('GetOnes', 'ns3::Ipv6Prefix', [], is_static=True) ## ipv6-address.h (module 'network'): uint8_t ns3::Ipv6Prefix::GetPrefixLength() const [member function] cls.add_method('GetPrefixLength', 'uint8_t', [], is_const=True) ## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetZero() [member function] cls.add_method('GetZero', 'ns3::Ipv6Prefix', [], is_static=True) ## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsEqual(ns3::Ipv6Prefix const & other) const [member function] cls.add_method('IsEqual', 'bool', [param('ns3::Ipv6Prefix const &', 'other')], is_const=True) ## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsMatch(ns3::Ipv6Address a, ns3::Ipv6Address b) const [member function] cls.add_method('IsMatch', 'bool', [param('ns3::Ipv6Address', 'a'), param('ns3::Ipv6Address', 'b')], is_const=True) ## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::Print(std::ostream & os) const [member function] cls.add_method('Print', 'void', [param('std::ostream &', 'os')], is_const=True) return def register_Ns3NetDeviceContainer_methods(root_module, cls): ## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(ns3::NetDeviceContainer const & arg0) [copy constructor] cls.add_constructor([param('ns3::NetDeviceContainer const &', 'arg0')]) ## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer() [constructor] cls.add_constructor([]) ## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(ns3::Ptr<ns3::NetDevice> dev) [constructor] cls.add_constructor([param('ns3::Ptr< ns3::NetDevice >', 'dev')]) ## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(std::string devName) [constructor] cls.add_constructor([param('std::string', 'devName')]) ## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(ns3::NetDeviceContainer const & a, ns3::NetDeviceContainer const & b) [constructor] cls.add_constructor([param('ns3::NetDeviceContainer const &', 'a'), param('ns3::NetDeviceContainer const &', 'b')]) ## net-device-container.h (module 'network'): void ns3::NetDeviceContainer::Add(ns3::NetDeviceContainer other) [member function] cls.add_method('Add', 'void', [param('ns3::NetDeviceContainer', 'other')]) ## net-device-container.h (module 'network'): void ns3::NetDeviceContainer::Add(ns3::Ptr<ns3::NetDevice> device) [member function] cls.add_method('Add', 'void', [param('ns3::Ptr< ns3::NetDevice >', 'device')]) ## net-device-container.h (module 'network'): void ns3::NetDeviceContainer::Add(std::string deviceName) [member function] cls.add_method('Add', 'void', [param('std::string', 'deviceName')]) ## net-device-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::NetDevice>*,std::vector<ns3::Ptr<ns3::NetDevice>, std::allocator<ns3::Ptr<ns3::NetDevice> > > > ns3::NetDeviceContainer::Begin() const [member function] cls.add_method('Begin', '__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::NetDevice > const, std::vector< ns3::Ptr< ns3::NetDevice > > >', [], is_const=True) ## net-device-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::NetDevice>*,std::vector<ns3::Ptr<ns3::NetDevice>, std::allocator<ns3::Ptr<ns3::NetDevice> > > > ns3::NetDeviceContainer::End() const [member function] cls.add_method('End', '__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::NetDevice > const, std::vector< ns3::Ptr< ns3::NetDevice > > >', [], is_const=True) ## net-device-container.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::NetDeviceContainer::Get(uint32_t i) const [member function] cls.add_method('Get', 'ns3::Ptr< ns3::NetDevice >', [param('uint32_t', 'i')], is_const=True) ## net-device-container.h (module 'network'): uint32_t ns3::NetDeviceContainer::GetN() const [member function] cls.add_method('GetN', 'uint32_t', [], is_const=True) return def register_Ns3ObjectBase_methods(root_module, cls): ## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase() [constructor] cls.add_constructor([]) ## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase(ns3::ObjectBase const & arg0) [copy constructor] cls.add_constructor([param('ns3::ObjectBase const &', 'arg0')]) ## object-base.h (module 'core'): void ns3::ObjectBase::GetAttribute(std::string name, ns3::AttributeValue & value) const [member function] cls.add_method('GetAttribute', 'void', [param('std::string', 'name'), param('ns3::AttributeValue &', 'value')], is_const=True) ## object-base.h (module 'core'): bool ns3::ObjectBase::GetAttributeFailSafe(std::string name, ns3::AttributeValue & value) const [member function] cls.add_method('GetAttributeFailSafe', 'bool', [param('std::string', 'name'), param('ns3::AttributeValue &', 'value')], is_const=True) ## object-base.h (module 'core'): ns3::TypeId ns3::ObjectBase::GetInstanceTypeId() const [member function] cls.add_method('GetInstanceTypeId', 'ns3::TypeId', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## object-base.h (module 'core'): static ns3::TypeId ns3::ObjectBase::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## object-base.h (module 'core'): void ns3::ObjectBase::SetAttribute(std::string name, ns3::AttributeValue const & value) [member function] cls.add_method('SetAttribute', 'void', [param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')]) ## object-base.h (module 'core'): bool ns3::ObjectBase::SetAttributeFailSafe(std::string name, ns3::AttributeValue const & value) [member function] cls.add_method('SetAttributeFailSafe', 'bool', [param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')]) ## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function] cls.add_method('TraceConnect', 'bool', [param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')]) ## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function] cls.add_method('TraceConnectWithoutContext', 'bool', [param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')]) ## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function] cls.add_method('TraceDisconnect', 'bool', [param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')]) ## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function] cls.add_method('TraceDisconnectWithoutContext', 'bool', [param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')]) ## object-base.h (module 'core'): void ns3::ObjectBase::ConstructSelf(ns3::AttributeConstructionList const & attributes) [member function] cls.add_method('ConstructSelf', 'void', [param('ns3::AttributeConstructionList const &', 'attributes')], visibility='protected') ## object-base.h (module 'core'): void ns3::ObjectBase::NotifyConstructionCompleted() [member function] cls.add_method('NotifyConstructionCompleted', 'void', [], visibility='protected', is_virtual=True) return def register_Ns3ObjectDeleter_methods(root_module, cls): ## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter() [constructor] cls.add_constructor([]) ## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter(ns3::ObjectDeleter const & arg0) [copy constructor] cls.add_constructor([param('ns3::ObjectDeleter const &', 'arg0')]) ## object.h (module 'core'): static void ns3::ObjectDeleter::Delete(ns3::Object * object) [member function] cls.add_method('Delete', 'void', [param('ns3::Object *', 'object')], is_static=True) return def register_Ns3ObjectFactory_methods(root_module, cls): cls.add_output_stream_operator() ## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(ns3::ObjectFactory const & arg0) [copy constructor] cls.add_constructor([param('ns3::ObjectFactory const &', 'arg0')]) ## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory() [constructor] cls.add_constructor([]) ## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(std::string typeId) [constructor] cls.add_constructor([param('std::string', 'typeId')]) ## object-factory.h (module 'core'): ns3::Ptr<ns3::Object> ns3::ObjectFactory::Create() const [member function] cls.add_method('Create', 'ns3::Ptr< ns3::Object >', [], is_const=True) ## object-factory.h (module 'core'): ns3::TypeId ns3::ObjectFactory::GetTypeId() const [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_const=True) ## object-factory.h (module 'core'): void ns3::ObjectFactory::Set(std::string name, ns3::AttributeValue const & value) [member function] cls.add_method('Set', 'void', [param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')]) ## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(ns3::TypeId tid) [member function] cls.add_method('SetTypeId', 'void', [param('ns3::TypeId', 'tid')]) ## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(char const * tid) [member function] cls.add_method('SetTypeId', 'void', [param('char const *', 'tid')]) ## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(std::string tid) [member function] cls.add_method('SetTypeId', 'void', [param('std::string', 'tid')]) return def register_Ns3PacketMetadata_methods(root_module, cls): ## packet-metadata.h (module 'network'): ns3::PacketMetadata::PacketMetadata(uint64_t uid, uint32_t size) [constructor] cls.add_constructor([param('uint64_t', 'uid'), param('uint32_t', 'size')]) ## packet-metadata.h (module 'network'): ns3::PacketMetadata::PacketMetadata(ns3::PacketMetadata const & o) [copy constructor] cls.add_constructor([param('ns3::PacketMetadata const &', 'o')]) ## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddAtEnd(ns3::PacketMetadata const & o) [member function] cls.add_method('AddAtEnd', 'void', [param('ns3::PacketMetadata const &', 'o')]) ## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddHeader(ns3::Header const & header, uint32_t size) [member function] cls.add_method('AddHeader', 'void', [param('ns3::Header const &', 'header'), param('uint32_t', 'size')]) ## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddPaddingAtEnd(uint32_t end) [member function] cls.add_method('AddPaddingAtEnd', 'void', [param('uint32_t', 'end')]) ## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddTrailer(ns3::Trailer const & trailer, uint32_t size) [member function] cls.add_method('AddTrailer', 'void', [param('ns3::Trailer const &', 'trailer'), param('uint32_t', 'size')]) ## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator ns3::PacketMetadata::BeginItem(ns3::Buffer buffer) const [member function] cls.add_method('BeginItem', 'ns3::PacketMetadata::ItemIterator', [param('ns3::Buffer', 'buffer')], is_const=True) ## packet-metadata.h (module 'network'): ns3::PacketMetadata ns3::PacketMetadata::CreateFragment(uint32_t start, uint32_t end) const [member function] cls.add_method('CreateFragment', 'ns3::PacketMetadata', [param('uint32_t', 'start'), param('uint32_t', 'end')], is_const=True) ## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::Deserialize(uint8_t const * buffer, uint32_t size) [member function] cls.add_method('Deserialize', 'uint32_t', [param('uint8_t const *', 'buffer'), param('uint32_t', 'size')]) ## packet-metadata.h (module 'network'): static void ns3::PacketMetadata::Enable() [member function] cls.add_method('Enable', 'void', [], is_static=True) ## packet-metadata.h (module 'network'): static void ns3::PacketMetadata::EnableChecking() [member function] cls.add_method('EnableChecking', 'void', [], is_static=True) ## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::GetSerializedSize() const [member function] cls.add_method('GetSerializedSize', 'uint32_t', [], is_const=True) ## packet-metadata.h (module 'network'): uint64_t ns3::PacketMetadata::GetUid() const [member function] cls.add_method('GetUid', 'uint64_t', [], is_const=True) ## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveAtEnd(uint32_t end) [member function] cls.add_method('RemoveAtEnd', 'void', [param('uint32_t', 'end')]) ## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveAtStart(uint32_t start) [member function] cls.add_method('RemoveAtStart', 'void', [param('uint32_t', 'start')]) ## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveHeader(ns3::Header const & header, uint32_t size) [member function] cls.add_method('RemoveHeader', 'void', [param('ns3::Header const &', 'header'), param('uint32_t', 'size')]) ## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveTrailer(ns3::Trailer const & trailer, uint32_t size) [member function] cls.add_method('RemoveTrailer', 'void', [param('ns3::Trailer const &', 'trailer'), param('uint32_t', 'size')]) ## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function] cls.add_method('Serialize', 'uint32_t', [param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')], is_const=True) return def register_Ns3PacketMetadataItem_methods(root_module, cls): ## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::Item() [constructor] cls.add_constructor([]) ## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::Item(ns3::PacketMetadata::Item const & arg0) [copy constructor] cls.add_constructor([param('ns3::PacketMetadata::Item const &', 'arg0')]) ## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::current [variable] cls.add_instance_attribute('current', 'ns3::Buffer::Iterator', is_const=False) ## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentSize [variable] cls.add_instance_attribute('currentSize', 'uint32_t', is_const=False) ## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentTrimedFromEnd [variable] cls.add_instance_attribute('currentTrimedFromEnd', 'uint32_t', is_const=False) ## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentTrimedFromStart [variable] cls.add_instance_attribute('currentTrimedFromStart', 'uint32_t', is_const=False) ## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::isFragment [variable] cls.add_instance_attribute('isFragment', 'bool', is_const=False) ## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::tid [variable] cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False) return def register_Ns3PacketMetadataItemIterator_methods(root_module, cls): ## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator::ItemIterator(ns3::PacketMetadata::ItemIterator const & arg0) [copy constructor] cls.add_constructor([param('ns3::PacketMetadata::ItemIterator const &', 'arg0')]) ## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator::ItemIterator(ns3::PacketMetadata const * metadata, ns3::Buffer buffer) [constructor] cls.add_constructor([param('ns3::PacketMetadata const *', 'metadata'), param('ns3::Buffer', 'buffer')]) ## packet-metadata.h (module 'network'): bool ns3::PacketMetadata::ItemIterator::HasNext() const [member function] cls.add_method('HasNext', 'bool', [], is_const=True) ## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item ns3::PacketMetadata::ItemIterator::Next() [member function] cls.add_method('Next', 'ns3::PacketMetadata::Item', []) return def register_Ns3PacketTagIterator_methods(root_module, cls): ## packet.h (module 'network'): ns3::PacketTagIterator::PacketTagIterator(ns3::PacketTagIterator const & arg0) [copy constructor] cls.add_constructor([param('ns3::PacketTagIterator const &', 'arg0')]) ## packet.h (module 'network'): bool ns3::PacketTagIterator::HasNext() const [member function] cls.add_method('HasNext', 'bool', [], is_const=True) ## packet.h (module 'network'): ns3::PacketTagIterator::Item ns3::PacketTagIterator::Next() [member function] cls.add_method('Next', 'ns3::PacketTagIterator::Item', []) return def register_Ns3PacketTagIteratorItem_methods(root_module, cls): ## packet.h (module 'network'): ns3::PacketTagIterator::Item::Item(ns3::PacketTagIterator::Item const & arg0) [copy constructor] cls.add_constructor([param('ns3::PacketTagIterator::Item const &', 'arg0')]) ## packet.h (module 'network'): void ns3::PacketTagIterator::Item::GetTag(ns3::Tag & tag) const [member function] cls.add_method('GetTag', 'void', [param('ns3::Tag &', 'tag')], is_const=True) ## packet.h (module 'network'): ns3::TypeId ns3::PacketTagIterator::Item::GetTypeId() const [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_const=True) return def register_Ns3PacketTagList_methods(root_module, cls): ## packet-tag-list.h (module 'network'): ns3::PacketTagList::PacketTagList() [constructor] cls.add_constructor([]) ## packet-tag-list.h (module 'network'): ns3::PacketTagList::PacketTagList(ns3::PacketTagList const & o) [copy constructor] cls.add_constructor([param('ns3::PacketTagList const &', 'o')]) ## packet-tag-list.h (module 'network'): void ns3::PacketTagList::Add(ns3::Tag const & tag) const [member function] cls.add_method('Add', 'void', [param('ns3::Tag const &', 'tag')], is_const=True) ## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData const * ns3::PacketTagList::Head() const [member function] cls.add_method('Head', 'ns3::PacketTagList::TagData const *', [], is_const=True) ## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Peek(ns3::Tag & tag) const [member function] cls.add_method('Peek', 'bool', [param('ns3::Tag &', 'tag')], is_const=True) ## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Remove(ns3::Tag & tag) [member function] cls.add_method('Remove', 'bool', [param('ns3::Tag &', 'tag')]) ## packet-tag-list.h (module 'network'): void ns3::PacketTagList::RemoveAll() [member function] cls.add_method('RemoveAll', 'void', []) ## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Replace(ns3::Tag & tag) [member function] cls.add_method('Replace', 'bool', [param('ns3::Tag &', 'tag')]) return def register_Ns3PacketTagListTagData_methods(root_module, cls): ## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData() [constructor] cls.add_constructor([]) ## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData(ns3::PacketTagList::TagData const & arg0) [copy constructor] cls.add_constructor([param('ns3::PacketTagList::TagData const &', 'arg0')]) ## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::count [variable] cls.add_instance_attribute('count', 'uint32_t', is_const=False) ## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::data [variable] cls.add_instance_attribute('data', 'uint8_t [ 20 ]', is_const=False) ## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::next [variable] cls.add_instance_attribute('next', 'ns3::PacketTagList::TagData *', is_const=False) ## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::tid [variable] cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False) return def register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount(ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3SixLowPanDispatch_methods(root_module, cls): ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanDispatch::SixLowPanDispatch(ns3::SixLowPanDispatch const & arg0) [copy constructor] cls.add_constructor([param('ns3::SixLowPanDispatch const &', 'arg0')]) ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanDispatch::SixLowPanDispatch() [constructor] cls.add_constructor([]) ## sixlowpan-header.h (module 'sixlowpan'): static ns3::SixLowPanDispatch::Dispatch_e ns3::SixLowPanDispatch::GetDispatchType(uint8_t dispatch) [member function] cls.add_method('GetDispatchType', 'ns3::SixLowPanDispatch::Dispatch_e', [param('uint8_t', 'dispatch')], is_static=True) ## sixlowpan-header.h (module 'sixlowpan'): static ns3::SixLowPanDispatch::NhcDispatch_e ns3::SixLowPanDispatch::GetNhcDispatchType(uint8_t dispatch) [member function] cls.add_method('GetNhcDispatchType', 'ns3::SixLowPanDispatch::NhcDispatch_e', [param('uint8_t', 'dispatch')], is_static=True) return def register_Ns3SixLowPanHelper_methods(root_module, cls): ## sixlowpan-helper.h (module 'sixlowpan'): ns3::SixLowPanHelper::SixLowPanHelper(ns3::SixLowPanHelper const & arg0) [copy constructor] cls.add_constructor([param('ns3::SixLowPanHelper const &', 'arg0')]) ## sixlowpan-helper.h (module 'sixlowpan'): ns3::SixLowPanHelper::SixLowPanHelper() [constructor] cls.add_constructor([]) ## sixlowpan-helper.h (module 'sixlowpan'): int64_t ns3::SixLowPanHelper::AssignStreams(ns3::NetDeviceContainer c, int64_t stream) [member function] cls.add_method('AssignStreams', 'int64_t', [param('ns3::NetDeviceContainer', 'c'), param('int64_t', 'stream')]) ## sixlowpan-helper.h (module 'sixlowpan'): ns3::NetDeviceContainer ns3::SixLowPanHelper::Install(ns3::NetDeviceContainer c) [member function] cls.add_method('Install', 'ns3::NetDeviceContainer', [param('ns3::NetDeviceContainer', 'c')]) ## sixlowpan-helper.h (module 'sixlowpan'): void ns3::SixLowPanHelper::SetDeviceAttribute(std::string n1, ns3::AttributeValue const & v1) [member function] cls.add_method('SetDeviceAttribute', 'void', [param('std::string', 'n1'), param('ns3::AttributeValue const &', 'v1')]) return def register_Ns3Tag_methods(root_module, cls): ## tag.h (module 'network'): ns3::Tag::Tag() [constructor] cls.add_constructor([]) ## tag.h (module 'network'): ns3::Tag::Tag(ns3::Tag const & arg0) [copy constructor] cls.add_constructor([param('ns3::Tag const &', 'arg0')]) ## tag.h (module 'network'): void ns3::Tag::Deserialize(ns3::TagBuffer i) [member function] cls.add_method('Deserialize', 'void', [param('ns3::TagBuffer', 'i')], is_pure_virtual=True, is_virtual=True) ## tag.h (module 'network'): uint32_t ns3::Tag::GetSerializedSize() const [member function] cls.add_method('GetSerializedSize', 'uint32_t', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## tag.h (module 'network'): static ns3::TypeId ns3::Tag::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## tag.h (module 'network'): void ns3::Tag::Print(std::ostream & os) const [member function] cls.add_method('Print', 'void', [param('std::ostream &', 'os')], is_pure_virtual=True, is_const=True, is_virtual=True) ## tag.h (module 'network'): void ns3::Tag::Serialize(ns3::TagBuffer i) const [member function] cls.add_method('Serialize', 'void', [param('ns3::TagBuffer', 'i')], is_pure_virtual=True, is_const=True, is_virtual=True) return def register_Ns3TagBuffer_methods(root_module, cls): ## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(ns3::TagBuffer const & arg0) [copy constructor] cls.add_constructor([param('ns3::TagBuffer const &', 'arg0')]) ## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(uint8_t * start, uint8_t * end) [constructor] cls.add_constructor([param('uint8_t *', 'start'), param('uint8_t *', 'end')]) ## tag-buffer.h (module 'network'): void ns3::TagBuffer::CopyFrom(ns3::TagBuffer o) [member function] cls.add_method('CopyFrom', 'void', [param('ns3::TagBuffer', 'o')]) ## tag-buffer.h (module 'network'): void ns3::TagBuffer::Read(uint8_t * buffer, uint32_t size) [member function] cls.add_method('Read', 'void', [param('uint8_t *', 'buffer'), param('uint32_t', 'size')]) ## tag-buffer.h (module 'network'): double ns3::TagBuffer::ReadDouble() [member function] cls.add_method('ReadDouble', 'double', []) ## tag-buffer.h (module 'network'): uint16_t ns3::TagBuffer::ReadU16() [member function] cls.add_method('ReadU16', 'uint16_t', []) ## tag-buffer.h (module 'network'): uint32_t ns3::TagBuffer::ReadU32() [member function] cls.add_method('ReadU32', 'uint32_t', []) ## tag-buffer.h (module 'network'): uint64_t ns3::TagBuffer::ReadU64() [member function] cls.add_method('ReadU64', 'uint64_t', []) ## tag-buffer.h (module 'network'): uint8_t ns3::TagBuffer::ReadU8() [member function] cls.add_method('ReadU8', 'uint8_t', []) ## tag-buffer.h (module 'network'): void ns3::TagBuffer::TrimAtEnd(uint32_t trim) [member function] cls.add_method('TrimAtEnd', 'void', [param('uint32_t', 'trim')]) ## tag-buffer.h (module 'network'): void ns3::TagBuffer::Write(uint8_t const * buffer, uint32_t size) [member function] cls.add_method('Write', 'void', [param('uint8_t const *', 'buffer'), param('uint32_t', 'size')]) ## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteDouble(double v) [member function] cls.add_method('WriteDouble', 'void', [param('double', 'v')]) ## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU16(uint16_t data) [member function] cls.add_method('WriteU16', 'void', [param('uint16_t', 'data')]) ## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU32(uint32_t data) [member function] cls.add_method('WriteU32', 'void', [param('uint32_t', 'data')]) ## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU64(uint64_t v) [member function] cls.add_method('WriteU64', 'void', [param('uint64_t', 'v')]) ## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU8(uint8_t v) [member function] cls.add_method('WriteU8', 'void', [param('uint8_t', 'v')]) return def register_Ns3TimeWithUnit_methods(root_module, cls): cls.add_output_stream_operator() ## nstime.h (module 'core'): ns3::TimeWithUnit::TimeWithUnit(ns3::TimeWithUnit const & arg0) [copy constructor] cls.add_constructor([param('ns3::TimeWithUnit const &', 'arg0')]) ## nstime.h (module 'core'): ns3::TimeWithUnit::TimeWithUnit(ns3::Time const time, ns3::Time::Unit const unit) [constructor] cls.add_constructor([param('ns3::Time const', 'time'), param('ns3::Time::Unit const', 'unit')]) return def register_Ns3TypeId_methods(root_module, cls): cls.add_binary_comparison_operator('<') cls.add_binary_comparison_operator('!=') cls.add_output_stream_operator() cls.add_binary_comparison_operator('==') ## type-id.h (module 'core'): ns3::TypeId::TypeId(char const * name) [constructor] cls.add_constructor([param('char const *', 'name')]) ## type-id.h (module 'core'): ns3::TypeId::TypeId() [constructor] cls.add_constructor([]) ## type-id.h (module 'core'): ns3::TypeId::TypeId(ns3::TypeId const & o) [copy constructor] cls.add_constructor([param('ns3::TypeId const &', 'o')]) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('AddAttribute', 'ns3::TypeId', [param('std::string', 'name'), param('std::string', 'help'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')]) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, uint32_t flags, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('AddAttribute', 'ns3::TypeId', [param('std::string', 'name'), param('std::string', 'help'), param('uint32_t', 'flags'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')]) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<ns3::TraceSourceAccessor const> accessor) [member function] cls.add_method('AddTraceSource', 'ns3::TypeId', [param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor')], deprecated=True) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<ns3::TraceSourceAccessor const> accessor, std::string callback) [member function] cls.add_method('AddTraceSource', 'ns3::TypeId', [param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor'), param('std::string', 'callback')]) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation ns3::TypeId::GetAttribute(uint32_t i) const [member function] cls.add_method('GetAttribute', 'ns3::TypeId::AttributeInformation', [param('uint32_t', 'i')], is_const=True) ## type-id.h (module 'core'): std::string ns3::TypeId::GetAttributeFullName(uint32_t i) const [member function] cls.add_method('GetAttributeFullName', 'std::string', [param('uint32_t', 'i')], is_const=True) ## type-id.h (module 'core'): uint32_t ns3::TypeId::GetAttributeN() const [member function] cls.add_method('GetAttributeN', 'uint32_t', [], is_const=True) ## type-id.h (module 'core'): ns3::Callback<ns3::ObjectBase*,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> ns3::TypeId::GetConstructor() const [member function] cls.add_method('GetConstructor', 'ns3::Callback< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', [], is_const=True) ## type-id.h (module 'core'): std::string ns3::TypeId::GetGroupName() const [member function] cls.add_method('GetGroupName', 'std::string', [], is_const=True) ## type-id.h (module 'core'): uint32_t ns3::TypeId::GetHash() const [member function] cls.add_method('GetHash', 'uint32_t', [], is_const=True) ## type-id.h (module 'core'): std::string ns3::TypeId::GetName() const [member function] cls.add_method('GetName', 'std::string', [], is_const=True) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::GetParent() const [member function] cls.add_method('GetParent', 'ns3::TypeId', [], is_const=True) ## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::GetRegistered(uint32_t i) [member function] cls.add_method('GetRegistered', 'ns3::TypeId', [param('uint32_t', 'i')], is_static=True) ## type-id.h (module 'core'): static uint32_t ns3::TypeId::GetRegisteredN() [member function] cls.add_method('GetRegisteredN', 'uint32_t', [], is_static=True) ## type-id.h (module 'core'): std::size_t ns3::TypeId::GetSize() const [member function] cls.add_method('GetSize', 'std::size_t', [], is_const=True) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation ns3::TypeId::GetTraceSource(uint32_t i) const [member function] cls.add_method('GetTraceSource', 'ns3::TypeId::TraceSourceInformation', [param('uint32_t', 'i')], is_const=True) ## type-id.h (module 'core'): uint32_t ns3::TypeId::GetTraceSourceN() const [member function] cls.add_method('GetTraceSourceN', 'uint32_t', [], is_const=True) ## type-id.h (module 'core'): uint16_t ns3::TypeId::GetUid() const [member function] cls.add_method('GetUid', 'uint16_t', [], is_const=True) ## type-id.h (module 'core'): bool ns3::TypeId::HasConstructor() const [member function] cls.add_method('HasConstructor', 'bool', [], is_const=True) ## type-id.h (module 'core'): bool ns3::TypeId::HasParent() const [member function] cls.add_method('HasParent', 'bool', [], is_const=True) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::HideFromDocumentation() [member function] cls.add_method('HideFromDocumentation', 'ns3::TypeId', []) ## type-id.h (module 'core'): bool ns3::TypeId::IsChildOf(ns3::TypeId other) const [member function] cls.add_method('IsChildOf', 'bool', [param('ns3::TypeId', 'other')], is_const=True) ## type-id.h (module 'core'): bool ns3::TypeId::LookupAttributeByName(std::string name, ns3::TypeId::AttributeInformation * info) const [member function] cls.add_method('LookupAttributeByName', 'bool', [param('std::string', 'name'), param('ns3::TypeId::AttributeInformation *', 'info', transfer_ownership=False)], is_const=True) ## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByHash(uint32_t hash) [member function] cls.add_method('LookupByHash', 'ns3::TypeId', [param('uint32_t', 'hash')], is_static=True) ## type-id.h (module 'core'): static bool ns3::TypeId::LookupByHashFailSafe(uint32_t hash, ns3::TypeId * tid) [member function] cls.add_method('LookupByHashFailSafe', 'bool', [param('uint32_t', 'hash'), param('ns3::TypeId *', 'tid')], is_static=True) ## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByName(std::string name) [member function] cls.add_method('LookupByName', 'ns3::TypeId', [param('std::string', 'name')], is_static=True) ## type-id.h (module 'core'): ns3::Ptr<ns3::TraceSourceAccessor const> ns3::TypeId::LookupTraceSourceByName(std::string name) const [member function] cls.add_method('LookupTraceSourceByName', 'ns3::Ptr< ns3::TraceSourceAccessor const >', [param('std::string', 'name')], is_const=True) ## type-id.h (module 'core'): bool ns3::TypeId::MustHideFromDocumentation() const [member function] cls.add_method('MustHideFromDocumentation', 'bool', [], is_const=True) ## type-id.h (module 'core'): bool ns3::TypeId::SetAttributeInitialValue(uint32_t i, ns3::Ptr<ns3::AttributeValue const> initialValue) [member function] cls.add_method('SetAttributeInitialValue', 'bool', [param('uint32_t', 'i'), param('ns3::Ptr< ns3::AttributeValue const >', 'initialValue')]) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetGroupName(std::string groupName) [member function] cls.add_method('SetGroupName', 'ns3::TypeId', [param('std::string', 'groupName')]) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetParent(ns3::TypeId tid) [member function] cls.add_method('SetParent', 'ns3::TypeId', [param('ns3::TypeId', 'tid')]) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetSize(std::size_t size) [member function] cls.add_method('SetSize', 'ns3::TypeId', [param('std::size_t', 'size')]) ## type-id.h (module 'core'): void ns3::TypeId::SetUid(uint16_t tid) [member function] cls.add_method('SetUid', 'void', [param('uint16_t', 'tid')]) return def register_Ns3TypeIdAttributeInformation_methods(root_module, cls): ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation() [constructor] cls.add_constructor([]) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation(ns3::TypeId::AttributeInformation const & arg0) [copy constructor] cls.add_constructor([param('ns3::TypeId::AttributeInformation const &', 'arg0')]) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::accessor [variable] cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::AttributeAccessor const >', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::checker [variable] cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::flags [variable] cls.add_instance_attribute('flags', 'uint32_t', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::help [variable] cls.add_instance_attribute('help', 'std::string', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::initialValue [variable] cls.add_instance_attribute('initialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::name [variable] cls.add_instance_attribute('name', 'std::string', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::originalInitialValue [variable] cls.add_instance_attribute('originalInitialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False) return def register_Ns3TypeIdTraceSourceInformation_methods(root_module, cls): ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation() [constructor] cls.add_constructor([]) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation(ns3::TypeId::TraceSourceInformation const & arg0) [copy constructor] cls.add_constructor([param('ns3::TypeId::TraceSourceInformation const &', 'arg0')]) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::accessor [variable] cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::TraceSourceAccessor const >', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::callback [variable] cls.add_instance_attribute('callback', 'std::string', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::help [variable] cls.add_instance_attribute('help', 'std::string', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::name [variable] cls.add_instance_attribute('name', 'std::string', is_const=False) return def register_Ns3Empty_methods(root_module, cls): ## empty.h (module 'core'): ns3::empty::empty() [constructor] cls.add_constructor([]) ## empty.h (module 'core'): ns3::empty::empty(ns3::empty const & arg0) [copy constructor] cls.add_constructor([param('ns3::empty const &', 'arg0')]) return def register_Ns3Int64x64_t_methods(root_module, cls): cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right')) cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right')) cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right')) cls.add_unary_numeric_operator('-') cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right')) cls.add_binary_comparison_operator('<') cls.add_binary_comparison_operator('>') cls.add_binary_comparison_operator('!=') cls.add_inplace_numeric_operator('*=', param('ns3::int64x64_t const &', u'right')) cls.add_inplace_numeric_operator('+=', param('ns3::int64x64_t const &', u'right')) cls.add_inplace_numeric_operator('-=', param('ns3::int64x64_t const &', u'right')) cls.add_inplace_numeric_operator('/=', param('ns3::int64x64_t const &', u'right')) cls.add_output_stream_operator() cls.add_binary_comparison_operator('<=') cls.add_binary_comparison_operator('==') cls.add_binary_comparison_operator('>=') ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t() [constructor] cls.add_constructor([]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(double v) [constructor] cls.add_constructor([param('double', 'v')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long double v) [constructor] cls.add_constructor([param('long double', 'v')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int v) [constructor] cls.add_constructor([param('int', 'v')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long int v) [constructor] cls.add_constructor([param('long int', 'v')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long int v) [constructor] cls.add_constructor([param('long long int', 'v')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(unsigned int v) [constructor] cls.add_constructor([param('unsigned int', 'v')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long unsigned int v) [constructor] cls.add_constructor([param('long unsigned int', 'v')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long unsigned int v) [constructor] cls.add_constructor([param('long long unsigned int', 'v')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int64_t hi, uint64_t lo) [constructor] cls.add_constructor([param('int64_t', 'hi'), param('uint64_t', 'lo')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(ns3::int64x64_t const & o) [copy constructor] cls.add_constructor([param('ns3::int64x64_t const &', 'o')]) ## int64x64-double.h (module 'core'): double ns3::int64x64_t::GetDouble() const [member function] cls.add_method('GetDouble', 'double', [], is_const=True) ## int64x64-double.h (module 'core'): int64_t ns3::int64x64_t::GetHigh() const [member function] cls.add_method('GetHigh', 'int64_t', [], is_const=True) ## int64x64-double.h (module 'core'): uint64_t ns3::int64x64_t::GetLow() const [member function] cls.add_method('GetLow', 'uint64_t', [], is_const=True) ## int64x64-double.h (module 'core'): static ns3::int64x64_t ns3::int64x64_t::Invert(uint64_t v) [member function] cls.add_method('Invert', 'ns3::int64x64_t', [param('uint64_t', 'v')], is_static=True) ## int64x64-double.h (module 'core'): void ns3::int64x64_t::MulByInvert(ns3::int64x64_t const & o) [member function] cls.add_method('MulByInvert', 'void', [param('ns3::int64x64_t const &', 'o')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::implementation [variable] cls.add_static_attribute('implementation', 'ns3::int64x64_t::impl_type const', is_const=True) return def register_Ns3Chunk_methods(root_module, cls): ## chunk.h (module 'network'): ns3::Chunk::Chunk() [constructor] cls.add_constructor([]) ## chunk.h (module 'network'): ns3::Chunk::Chunk(ns3::Chunk const & arg0) [copy constructor] cls.add_constructor([param('ns3::Chunk const &', 'arg0')]) ## chunk.h (module 'network'): uint32_t ns3::Chunk::Deserialize(ns3::Buffer::Iterator start) [member function] cls.add_method('Deserialize', 'uint32_t', [param('ns3::Buffer::Iterator', 'start')], is_pure_virtual=True, is_virtual=True) ## chunk.h (module 'network'): static ns3::TypeId ns3::Chunk::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## chunk.h (module 'network'): void ns3::Chunk::Print(std::ostream & os) const [member function] cls.add_method('Print', 'void', [param('std::ostream &', 'os')], is_pure_virtual=True, is_const=True, is_virtual=True) return def register_Ns3Header_methods(root_module, cls): cls.add_output_stream_operator() ## header.h (module 'network'): ns3::Header::Header() [constructor] cls.add_constructor([]) ## header.h (module 'network'): ns3::Header::Header(ns3::Header const & arg0) [copy constructor] cls.add_constructor([param('ns3::Header const &', 'arg0')]) ## header.h (module 'network'): uint32_t ns3::Header::Deserialize(ns3::Buffer::Iterator start) [member function] cls.add_method('Deserialize', 'uint32_t', [param('ns3::Buffer::Iterator', 'start')], is_pure_virtual=True, is_virtual=True) ## header.h (module 'network'): uint32_t ns3::Header::GetSerializedSize() const [member function] cls.add_method('GetSerializedSize', 'uint32_t', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## header.h (module 'network'): static ns3::TypeId ns3::Header::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## header.h (module 'network'): void ns3::Header::Print(std::ostream & os) const [member function] cls.add_method('Print', 'void', [param('std::ostream &', 'os')], is_pure_virtual=True, is_const=True, is_virtual=True) ## header.h (module 'network'): void ns3::Header::Serialize(ns3::Buffer::Iterator start) const [member function] cls.add_method('Serialize', 'void', [param('ns3::Buffer::Iterator', 'start')], is_pure_virtual=True, is_const=True, is_virtual=True) return def register_Ns3Ipv6Header_methods(root_module, cls): ## ipv6-header.h (module 'internet'): ns3::Ipv6Header::Ipv6Header(ns3::Ipv6Header const & arg0) [copy constructor] cls.add_constructor([param('ns3::Ipv6Header const &', 'arg0')]) ## ipv6-header.h (module 'internet'): ns3::Ipv6Header::Ipv6Header() [constructor] cls.add_constructor([]) ## ipv6-header.h (module 'internet'): uint32_t ns3::Ipv6Header::Deserialize(ns3::Buffer::Iterator start) [member function] cls.add_method('Deserialize', 'uint32_t', [param('ns3::Buffer::Iterator', 'start')], is_virtual=True) ## ipv6-header.h (module 'internet'): ns3::Ipv6Address ns3::Ipv6Header::GetDestinationAddress() const [member function] cls.add_method('GetDestinationAddress', 'ns3::Ipv6Address', [], is_const=True) ## ipv6-header.h (module 'internet'): uint32_t ns3::Ipv6Header::GetFlowLabel() const [member function] cls.add_method('GetFlowLabel', 'uint32_t', [], is_const=True) ## ipv6-header.h (module 'internet'): uint8_t ns3::Ipv6Header::GetHopLimit() const [member function] cls.add_method('GetHopLimit', 'uint8_t', [], is_const=True) ## ipv6-header.h (module 'internet'): ns3::TypeId ns3::Ipv6Header::GetInstanceTypeId() const [member function] cls.add_method('GetInstanceTypeId', 'ns3::TypeId', [], is_const=True, is_virtual=True) ## ipv6-header.h (module 'internet'): uint8_t ns3::Ipv6Header::GetNextHeader() const [member function] cls.add_method('GetNextHeader', 'uint8_t', [], is_const=True) ## ipv6-header.h (module 'internet'): uint16_t ns3::Ipv6Header::GetPayloadLength() const [member function] cls.add_method('GetPayloadLength', 'uint16_t', [], is_const=True) ## ipv6-header.h (module 'internet'): uint32_t ns3::Ipv6Header::GetSerializedSize() const [member function] cls.add_method('GetSerializedSize', 'uint32_t', [], is_const=True, is_virtual=True) ## ipv6-header.h (module 'internet'): ns3::Ipv6Address ns3::Ipv6Header::GetSourceAddress() const [member function] cls.add_method('GetSourceAddress', 'ns3::Ipv6Address', [], is_const=True) ## ipv6-header.h (module 'internet'): uint8_t ns3::Ipv6Header::GetTrafficClass() const [member function] cls.add_method('GetTrafficClass', 'uint8_t', [], is_const=True) ## ipv6-header.h (module 'internet'): static ns3::TypeId ns3::Ipv6Header::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::Print(std::ostream & os) const [member function] cls.add_method('Print', 'void', [param('std::ostream &', 'os')], is_const=True, is_virtual=True) ## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::Serialize(ns3::Buffer::Iterator start) const [member function] cls.add_method('Serialize', 'void', [param('ns3::Buffer::Iterator', 'start')], is_const=True, is_virtual=True) ## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetDestinationAddress(ns3::Ipv6Address dst) [member function] cls.add_method('SetDestinationAddress', 'void', [param('ns3::Ipv6Address', 'dst')]) ## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetFlowLabel(uint32_t flow) [member function] cls.add_method('SetFlowLabel', 'void', [param('uint32_t', 'flow')]) ## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetHopLimit(uint8_t limit) [member function] cls.add_method('SetHopLimit', 'void', [param('uint8_t', 'limit')]) ## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetNextHeader(uint8_t next) [member function] cls.add_method('SetNextHeader', 'void', [param('uint8_t', 'next')]) ## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetPayloadLength(uint16_t len) [member function] cls.add_method('SetPayloadLength', 'void', [param('uint16_t', 'len')]) ## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetSourceAddress(ns3::Ipv6Address src) [member function] cls.add_method('SetSourceAddress', 'void', [param('ns3::Ipv6Address', 'src')]) ## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetTrafficClass(uint8_t traffic) [member function] cls.add_method('SetTrafficClass', 'void', [param('uint8_t', 'traffic')]) return def register_Ns3Object_methods(root_module, cls): ## object.h (module 'core'): ns3::Object::Object() [constructor] cls.add_constructor([]) ## object.h (module 'core'): void ns3::Object::AggregateObject(ns3::Ptr<ns3::Object> other) [member function] cls.add_method('AggregateObject', 'void', [param('ns3::Ptr< ns3::Object >', 'other')]) ## object.h (module 'core'): void ns3::Object::Dispose() [member function] cls.add_method('Dispose', 'void', []) ## object.h (module 'core'): ns3::Object::AggregateIterator ns3::Object::GetAggregateIterator() const [member function] cls.add_method('GetAggregateIterator', 'ns3::Object::AggregateIterator', [], is_const=True) ## object.h (module 'core'): ns3::TypeId ns3::Object::GetInstanceTypeId() const [member function] cls.add_method('GetInstanceTypeId', 'ns3::TypeId', [], is_const=True, is_virtual=True) ## object.h (module 'core'): static ns3::TypeId ns3::Object::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## object.h (module 'core'): void ns3::Object::Initialize() [member function] cls.add_method('Initialize', 'void', []) ## object.h (module 'core'): ns3::Object::Object(ns3::Object const & o) [copy constructor] cls.add_constructor([param('ns3::Object const &', 'o')], visibility='protected') ## object.h (module 'core'): void ns3::Object::DoDispose() [member function] cls.add_method('DoDispose', 'void', [], visibility='protected', is_virtual=True) ## object.h (module 'core'): void ns3::Object::DoInitialize() [member function] cls.add_method('DoInitialize', 'void', [], visibility='protected', is_virtual=True) ## object.h (module 'core'): void ns3::Object::NotifyNewAggregate() [member function] cls.add_method('NotifyNewAggregate', 'void', [], visibility='protected', is_virtual=True) return def register_Ns3ObjectAggregateIterator_methods(root_module, cls): ## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator(ns3::Object::AggregateIterator const & arg0) [copy constructor] cls.add_constructor([param('ns3::Object::AggregateIterator const &', 'arg0')]) ## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator() [constructor] cls.add_constructor([]) ## object.h (module 'core'): bool ns3::Object::AggregateIterator::HasNext() const [member function] cls.add_method('HasNext', 'bool', [], is_const=True) ## object.h (module 'core'): ns3::Ptr<ns3::Object const> ns3::Object::AggregateIterator::Next() [member function] cls.add_method('Next', 'ns3::Ptr< ns3::Object const >', []) return def register_Ns3RandomVariableStream_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::RandomVariableStream::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::RandomVariableStream::RandomVariableStream() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): void ns3::RandomVariableStream::SetStream(int64_t stream) [member function] cls.add_method('SetStream', 'void', [param('int64_t', 'stream')]) ## random-variable-stream.h (module 'core'): int64_t ns3::RandomVariableStream::GetStream() const [member function] cls.add_method('GetStream', 'int64_t', [], is_const=True) ## random-variable-stream.h (module 'core'): void ns3::RandomVariableStream::SetAntithetic(bool isAntithetic) [member function] cls.add_method('SetAntithetic', 'void', [param('bool', 'isAntithetic')]) ## random-variable-stream.h (module 'core'): bool ns3::RandomVariableStream::IsAntithetic() const [member function] cls.add_method('IsAntithetic', 'bool', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::RandomVariableStream::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_pure_virtual=True, is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::RandomVariableStream::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_pure_virtual=True, is_virtual=True) ## random-variable-stream.h (module 'core'): ns3::RngStream * ns3::RandomVariableStream::Peek() const [member function] cls.add_method('Peek', 'ns3::RngStream *', [], is_const=True, visibility='protected') return def register_Ns3SequentialRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::SequentialRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::SequentialRandomVariable::SequentialRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): double ns3::SequentialRandomVariable::GetMin() const [member function] cls.add_method('GetMin', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::SequentialRandomVariable::GetMax() const [member function] cls.add_method('GetMax', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): ns3::Ptr<ns3::RandomVariableStream> ns3::SequentialRandomVariable::GetIncrement() const [member function] cls.add_method('GetIncrement', 'ns3::Ptr< ns3::RandomVariableStream >', [], is_const=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::SequentialRandomVariable::GetConsecutive() const [member function] cls.add_method('GetConsecutive', 'uint32_t', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::SequentialRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::SequentialRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter< ns3::AttributeAccessor > > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter< ns3::AttributeChecker > > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter< ns3::AttributeValue > > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount(ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter< ns3::CallbackImplBase > > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3SimpleRefCount__Ns3EventImpl_Ns3Empty_Ns3DefaultDeleter__lt__ns3EventImpl__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::SimpleRefCount(ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> > const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter< ns3::EventImpl > > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter< ns3::Hash::Implementation > > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3SimpleRefCount__Ns3NixVector_Ns3Empty_Ns3DefaultDeleter__lt__ns3NixVector__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::SimpleRefCount(ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> > const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter< ns3::NixVector > > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3SimpleRefCount__Ns3Packet_Ns3Empty_Ns3DefaultDeleter__lt__ns3Packet__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> > const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter< ns3::Packet > > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter< ns3::TraceSourceAccessor > > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3SixLowPanFrag1_methods(root_module, cls): cls.add_output_stream_operator() ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanFrag1::SixLowPanFrag1(ns3::SixLowPanFrag1 const & arg0) [copy constructor] cls.add_constructor([param('ns3::SixLowPanFrag1 const &', 'arg0')]) ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanFrag1::SixLowPanFrag1() [constructor] cls.add_constructor([]) ## sixlowpan-header.h (module 'sixlowpan'): uint32_t ns3::SixLowPanFrag1::Deserialize(ns3::Buffer::Iterator start) [member function] cls.add_method('Deserialize', 'uint32_t', [param('ns3::Buffer::Iterator', 'start')], is_virtual=True) ## sixlowpan-header.h (module 'sixlowpan'): uint16_t ns3::SixLowPanFrag1::GetDatagramSize() const [member function] cls.add_method('GetDatagramSize', 'uint16_t', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): uint16_t ns3::SixLowPanFrag1::GetDatagramTag() const [member function] cls.add_method('GetDatagramTag', 'uint16_t', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): ns3::TypeId ns3::SixLowPanFrag1::GetInstanceTypeId() const [member function] cls.add_method('GetInstanceTypeId', 'ns3::TypeId', [], is_const=True, is_virtual=True) ## sixlowpan-header.h (module 'sixlowpan'): uint32_t ns3::SixLowPanFrag1::GetSerializedSize() const [member function] cls.add_method('GetSerializedSize', 'uint32_t', [], is_const=True, is_virtual=True) ## sixlowpan-header.h (module 'sixlowpan'): static ns3::TypeId ns3::SixLowPanFrag1::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanFrag1::Print(std::ostream & os) const [member function] cls.add_method('Print', 'void', [param('std::ostream &', 'os')], is_const=True, is_virtual=True) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanFrag1::Serialize(ns3::Buffer::Iterator start) const [member function] cls.add_method('Serialize', 'void', [param('ns3::Buffer::Iterator', 'start')], is_const=True, is_virtual=True) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanFrag1::SetDatagramSize(uint16_t datagramSize) [member function] cls.add_method('SetDatagramSize', 'void', [param('uint16_t', 'datagramSize')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanFrag1::SetDatagramTag(uint16_t datagramTag) [member function] cls.add_method('SetDatagramTag', 'void', [param('uint16_t', 'datagramTag')]) return def register_Ns3SixLowPanFragN_methods(root_module, cls): cls.add_output_stream_operator() ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanFragN::SixLowPanFragN(ns3::SixLowPanFragN const & arg0) [copy constructor] cls.add_constructor([param('ns3::SixLowPanFragN const &', 'arg0')]) ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanFragN::SixLowPanFragN() [constructor] cls.add_constructor([]) ## sixlowpan-header.h (module 'sixlowpan'): uint32_t ns3::SixLowPanFragN::Deserialize(ns3::Buffer::Iterator start) [member function] cls.add_method('Deserialize', 'uint32_t', [param('ns3::Buffer::Iterator', 'start')], is_virtual=True) ## sixlowpan-header.h (module 'sixlowpan'): uint8_t ns3::SixLowPanFragN::GetDatagramOffset() const [member function] cls.add_method('GetDatagramOffset', 'uint8_t', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): uint16_t ns3::SixLowPanFragN::GetDatagramSize() const [member function] cls.add_method('GetDatagramSize', 'uint16_t', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): uint16_t ns3::SixLowPanFragN::GetDatagramTag() const [member function] cls.add_method('GetDatagramTag', 'uint16_t', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): ns3::TypeId ns3::SixLowPanFragN::GetInstanceTypeId() const [member function] cls.add_method('GetInstanceTypeId', 'ns3::TypeId', [], is_const=True, is_virtual=True) ## sixlowpan-header.h (module 'sixlowpan'): uint32_t ns3::SixLowPanFragN::GetSerializedSize() const [member function] cls.add_method('GetSerializedSize', 'uint32_t', [], is_const=True, is_virtual=True) ## sixlowpan-header.h (module 'sixlowpan'): static ns3::TypeId ns3::SixLowPanFragN::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanFragN::Print(std::ostream & os) const [member function] cls.add_method('Print', 'void', [param('std::ostream &', 'os')], is_const=True, is_virtual=True) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanFragN::Serialize(ns3::Buffer::Iterator start) const [member function] cls.add_method('Serialize', 'void', [param('ns3::Buffer::Iterator', 'start')], is_const=True, is_virtual=True) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanFragN::SetDatagramOffset(uint8_t datagramOffset) [member function] cls.add_method('SetDatagramOffset', 'void', [param('uint8_t', 'datagramOffset')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanFragN::SetDatagramSize(uint16_t datagramSize) [member function] cls.add_method('SetDatagramSize', 'void', [param('uint16_t', 'datagramSize')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanFragN::SetDatagramTag(uint16_t datagramTag) [member function] cls.add_method('SetDatagramTag', 'void', [param('uint16_t', 'datagramTag')]) return def register_Ns3SixLowPanHc1_methods(root_module, cls): cls.add_output_stream_operator() ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanHc1::SixLowPanHc1(ns3::SixLowPanHc1 const & arg0) [copy constructor] cls.add_constructor([param('ns3::SixLowPanHc1 const &', 'arg0')]) ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanHc1::SixLowPanHc1() [constructor] cls.add_constructor([]) ## sixlowpan-header.h (module 'sixlowpan'): uint32_t ns3::SixLowPanHc1::Deserialize(ns3::Buffer::Iterator start) [member function] cls.add_method('Deserialize', 'uint32_t', [param('ns3::Buffer::Iterator', 'start')], is_virtual=True) ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanHc1::LowPanHc1Addr_e ns3::SixLowPanHc1::GetDstCompression() const [member function] cls.add_method('GetDstCompression', 'ns3::SixLowPanHc1::LowPanHc1Addr_e', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): uint8_t const * ns3::SixLowPanHc1::GetDstInterface() const [member function] cls.add_method('GetDstInterface', 'uint8_t const *', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): uint8_t const * ns3::SixLowPanHc1::GetDstPrefix() const [member function] cls.add_method('GetDstPrefix', 'uint8_t const *', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): uint32_t ns3::SixLowPanHc1::GetFlowLabel() const [member function] cls.add_method('GetFlowLabel', 'uint32_t', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): uint8_t ns3::SixLowPanHc1::GetHopLimit() const [member function] cls.add_method('GetHopLimit', 'uint8_t', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): ns3::TypeId ns3::SixLowPanHc1::GetInstanceTypeId() const [member function] cls.add_method('GetInstanceTypeId', 'ns3::TypeId', [], is_const=True, is_virtual=True) ## sixlowpan-header.h (module 'sixlowpan'): uint8_t ns3::SixLowPanHc1::GetNextHeader() const [member function] cls.add_method('GetNextHeader', 'uint8_t', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): uint32_t ns3::SixLowPanHc1::GetSerializedSize() const [member function] cls.add_method('GetSerializedSize', 'uint32_t', [], is_const=True, is_virtual=True) ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanHc1::LowPanHc1Addr_e ns3::SixLowPanHc1::GetSrcCompression() const [member function] cls.add_method('GetSrcCompression', 'ns3::SixLowPanHc1::LowPanHc1Addr_e', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): uint8_t const * ns3::SixLowPanHc1::GetSrcInterface() const [member function] cls.add_method('GetSrcInterface', 'uint8_t const *', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): uint8_t const * ns3::SixLowPanHc1::GetSrcPrefix() const [member function] cls.add_method('GetSrcPrefix', 'uint8_t const *', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): uint8_t ns3::SixLowPanHc1::GetTrafficClass() const [member function] cls.add_method('GetTrafficClass', 'uint8_t', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): static ns3::TypeId ns3::SixLowPanHc1::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## sixlowpan-header.h (module 'sixlowpan'): bool ns3::SixLowPanHc1::IsHc2HeaderPresent() const [member function] cls.add_method('IsHc2HeaderPresent', 'bool', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): bool ns3::SixLowPanHc1::IsTcflCompression() const [member function] cls.add_method('IsTcflCompression', 'bool', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanHc1::Print(std::ostream & os) const [member function] cls.add_method('Print', 'void', [param('std::ostream &', 'os')], is_const=True, is_virtual=True) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanHc1::Serialize(ns3::Buffer::Iterator start) const [member function] cls.add_method('Serialize', 'void', [param('ns3::Buffer::Iterator', 'start')], is_const=True, is_virtual=True) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanHc1::SetDstCompression(ns3::SixLowPanHc1::LowPanHc1Addr_e dstCompression) [member function] cls.add_method('SetDstCompression', 'void', [param('ns3::SixLowPanHc1::LowPanHc1Addr_e', 'dstCompression')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanHc1::SetDstInterface(uint8_t const * dstInterface) [member function] cls.add_method('SetDstInterface', 'void', [param('uint8_t const *', 'dstInterface')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanHc1::SetDstPrefix(uint8_t const * dstPrefix) [member function] cls.add_method('SetDstPrefix', 'void', [param('uint8_t const *', 'dstPrefix')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanHc1::SetFlowLabel(uint32_t flowLabel) [member function] cls.add_method('SetFlowLabel', 'void', [param('uint32_t', 'flowLabel')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanHc1::SetHc2HeaderPresent(bool hc2HeaderPresent) [member function] cls.add_method('SetHc2HeaderPresent', 'void', [param('bool', 'hc2HeaderPresent')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanHc1::SetHopLimit(uint8_t limit) [member function] cls.add_method('SetHopLimit', 'void', [param('uint8_t', 'limit')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanHc1::SetNextHeader(uint8_t nextHeader) [member function] cls.add_method('SetNextHeader', 'void', [param('uint8_t', 'nextHeader')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanHc1::SetSrcCompression(ns3::SixLowPanHc1::LowPanHc1Addr_e srcCompression) [member function] cls.add_method('SetSrcCompression', 'void', [param('ns3::SixLowPanHc1::LowPanHc1Addr_e', 'srcCompression')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanHc1::SetSrcInterface(uint8_t const * srcInterface) [member function] cls.add_method('SetSrcInterface', 'void', [param('uint8_t const *', 'srcInterface')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanHc1::SetSrcPrefix(uint8_t const * srcPrefix) [member function] cls.add_method('SetSrcPrefix', 'void', [param('uint8_t const *', 'srcPrefix')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanHc1::SetTcflCompression(bool tcflCompression) [member function] cls.add_method('SetTcflCompression', 'void', [param('bool', 'tcflCompression')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanHc1::SetTrafficClass(uint8_t trafficClass) [member function] cls.add_method('SetTrafficClass', 'void', [param('uint8_t', 'trafficClass')]) return def register_Ns3SixLowPanIphc_methods(root_module, cls): cls.add_output_stream_operator() ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanIphc::SixLowPanIphc(ns3::SixLowPanIphc const & arg0) [copy constructor] cls.add_constructor([param('ns3::SixLowPanIphc const &', 'arg0')]) ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanIphc::SixLowPanIphc() [constructor] cls.add_constructor([]) ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanIphc::SixLowPanIphc(uint8_t dispatch) [constructor] cls.add_constructor([param('uint8_t', 'dispatch')]) ## sixlowpan-header.h (module 'sixlowpan'): uint32_t ns3::SixLowPanIphc::Deserialize(ns3::Buffer::Iterator start) [member function] cls.add_method('Deserialize', 'uint32_t', [param('ns3::Buffer::Iterator', 'start')], is_virtual=True) ## sixlowpan-header.h (module 'sixlowpan'): bool ns3::SixLowPanIphc::GetCid() const [member function] cls.add_method('GetCid', 'bool', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): bool ns3::SixLowPanIphc::GetDac() const [member function] cls.add_method('GetDac', 'bool', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanIphc::HeaderCompression_e ns3::SixLowPanIphc::GetDam() const [member function] cls.add_method('GetDam', 'ns3::SixLowPanIphc::HeaderCompression_e', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): uint8_t ns3::SixLowPanIphc::GetDscp() const [member function] cls.add_method('GetDscp', 'uint8_t', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): ns3::Ipv6Address ns3::SixLowPanIphc::GetDstAddress() const [member function] cls.add_method('GetDstAddress', 'ns3::Ipv6Address', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): uint8_t ns3::SixLowPanIphc::GetDstContextId() const [member function] cls.add_method('GetDstContextId', 'uint8_t', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): uint8_t ns3::SixLowPanIphc::GetEcn() const [member function] cls.add_method('GetEcn', 'uint8_t', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): uint32_t ns3::SixLowPanIphc::GetFlowLabel() const [member function] cls.add_method('GetFlowLabel', 'uint32_t', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanIphc::Hlim_e ns3::SixLowPanIphc::GetHlim() const [member function] cls.add_method('GetHlim', 'ns3::SixLowPanIphc::Hlim_e', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): uint8_t ns3::SixLowPanIphc::GetHopLimit() const [member function] cls.add_method('GetHopLimit', 'uint8_t', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): ns3::TypeId ns3::SixLowPanIphc::GetInstanceTypeId() const [member function] cls.add_method('GetInstanceTypeId', 'ns3::TypeId', [], is_const=True, is_virtual=True) ## sixlowpan-header.h (module 'sixlowpan'): bool ns3::SixLowPanIphc::GetM() const [member function] cls.add_method('GetM', 'bool', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): uint8_t ns3::SixLowPanIphc::GetNextHeader() const [member function] cls.add_method('GetNextHeader', 'uint8_t', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): bool ns3::SixLowPanIphc::GetNh() const [member function] cls.add_method('GetNh', 'bool', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): bool ns3::SixLowPanIphc::GetSac() const [member function] cls.add_method('GetSac', 'bool', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanIphc::HeaderCompression_e ns3::SixLowPanIphc::GetSam() const [member function] cls.add_method('GetSam', 'ns3::SixLowPanIphc::HeaderCompression_e', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): uint32_t ns3::SixLowPanIphc::GetSerializedSize() const [member function] cls.add_method('GetSerializedSize', 'uint32_t', [], is_const=True, is_virtual=True) ## sixlowpan-header.h (module 'sixlowpan'): ns3::Ipv6Address ns3::SixLowPanIphc::GetSrcAddress() const [member function] cls.add_method('GetSrcAddress', 'ns3::Ipv6Address', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): uint8_t ns3::SixLowPanIphc::GetSrcContextId() const [member function] cls.add_method('GetSrcContextId', 'uint8_t', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanIphc::TrafficClassFlowLabel_e ns3::SixLowPanIphc::GetTf() const [member function] cls.add_method('GetTf', 'ns3::SixLowPanIphc::TrafficClassFlowLabel_e', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): static ns3::TypeId ns3::SixLowPanIphc::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::Print(std::ostream & os) const [member function] cls.add_method('Print', 'void', [param('std::ostream &', 'os')], is_const=True, is_virtual=True) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::Serialize(ns3::Buffer::Iterator start) const [member function] cls.add_method('Serialize', 'void', [param('ns3::Buffer::Iterator', 'start')], is_const=True, is_virtual=True) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::SetCid(bool cidField) [member function] cls.add_method('SetCid', 'void', [param('bool', 'cidField')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::SetDac(bool dacField) [member function] cls.add_method('SetDac', 'void', [param('bool', 'dacField')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::SetDam(ns3::SixLowPanIphc::HeaderCompression_e damField) [member function] cls.add_method('SetDam', 'void', [param('ns3::SixLowPanIphc::HeaderCompression_e', 'damField')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::SetDscp(uint8_t dscp) [member function] cls.add_method('SetDscp', 'void', [param('uint8_t', 'dscp')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::SetDstAddress(ns3::Ipv6Address dstAddress) [member function] cls.add_method('SetDstAddress', 'void', [param('ns3::Ipv6Address', 'dstAddress')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::SetDstContextId(uint8_t dstContextId) [member function] cls.add_method('SetDstContextId', 'void', [param('uint8_t', 'dstContextId')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::SetEcn(uint8_t ecn) [member function] cls.add_method('SetEcn', 'void', [param('uint8_t', 'ecn')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::SetFlowLabel(uint32_t flowLabel) [member function] cls.add_method('SetFlowLabel', 'void', [param('uint32_t', 'flowLabel')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::SetHlim(ns3::SixLowPanIphc::Hlim_e hlimField) [member function] cls.add_method('SetHlim', 'void', [param('ns3::SixLowPanIphc::Hlim_e', 'hlimField')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::SetHopLimit(uint8_t hopLimit) [member function] cls.add_method('SetHopLimit', 'void', [param('uint8_t', 'hopLimit')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::SetM(bool mField) [member function] cls.add_method('SetM', 'void', [param('bool', 'mField')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::SetNextHeader(uint8_t nextHeader) [member function] cls.add_method('SetNextHeader', 'void', [param('uint8_t', 'nextHeader')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::SetNh(bool nhField) [member function] cls.add_method('SetNh', 'void', [param('bool', 'nhField')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::SetSac(bool sacField) [member function] cls.add_method('SetSac', 'void', [param('bool', 'sacField')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::SetSam(ns3::SixLowPanIphc::HeaderCompression_e samField) [member function] cls.add_method('SetSam', 'void', [param('ns3::SixLowPanIphc::HeaderCompression_e', 'samField')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::SetSrcAddress(ns3::Ipv6Address srcAddress) [member function] cls.add_method('SetSrcAddress', 'void', [param('ns3::Ipv6Address', 'srcAddress')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::SetSrcContextId(uint8_t srcContextId) [member function] cls.add_method('SetSrcContextId', 'void', [param('uint8_t', 'srcContextId')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIphc::SetTf(ns3::SixLowPanIphc::TrafficClassFlowLabel_e tfField) [member function] cls.add_method('SetTf', 'void', [param('ns3::SixLowPanIphc::TrafficClassFlowLabel_e', 'tfField')]) return def register_Ns3SixLowPanIpv6_methods(root_module, cls): cls.add_output_stream_operator() ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanIpv6::SixLowPanIpv6(ns3::SixLowPanIpv6 const & arg0) [copy constructor] cls.add_constructor([param('ns3::SixLowPanIpv6 const &', 'arg0')]) ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanIpv6::SixLowPanIpv6() [constructor] cls.add_constructor([]) ## sixlowpan-header.h (module 'sixlowpan'): uint32_t ns3::SixLowPanIpv6::Deserialize(ns3::Buffer::Iterator start) [member function] cls.add_method('Deserialize', 'uint32_t', [param('ns3::Buffer::Iterator', 'start')], is_virtual=True) ## sixlowpan-header.h (module 'sixlowpan'): ns3::TypeId ns3::SixLowPanIpv6::GetInstanceTypeId() const [member function] cls.add_method('GetInstanceTypeId', 'ns3::TypeId', [], is_const=True, is_virtual=True) ## sixlowpan-header.h (module 'sixlowpan'): uint32_t ns3::SixLowPanIpv6::GetSerializedSize() const [member function] cls.add_method('GetSerializedSize', 'uint32_t', [], is_const=True, is_virtual=True) ## sixlowpan-header.h (module 'sixlowpan'): static ns3::TypeId ns3::SixLowPanIpv6::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIpv6::Print(std::ostream & os) const [member function] cls.add_method('Print', 'void', [param('std::ostream &', 'os')], is_const=True, is_virtual=True) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanIpv6::Serialize(ns3::Buffer::Iterator start) const [member function] cls.add_method('Serialize', 'void', [param('ns3::Buffer::Iterator', 'start')], is_const=True, is_virtual=True) return def register_Ns3SixLowPanNhcExtension_methods(root_module, cls): cls.add_output_stream_operator() ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanNhcExtension::SixLowPanNhcExtension(ns3::SixLowPanNhcExtension const & arg0) [copy constructor] cls.add_constructor([param('ns3::SixLowPanNhcExtension const &', 'arg0')]) ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanNhcExtension::SixLowPanNhcExtension() [constructor] cls.add_constructor([]) ## sixlowpan-header.h (module 'sixlowpan'): uint32_t ns3::SixLowPanNhcExtension::CopyBlob(uint8_t * blob, uint32_t size) const [member function] cls.add_method('CopyBlob', 'uint32_t', [param('uint8_t *', 'blob'), param('uint32_t', 'size')], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): uint32_t ns3::SixLowPanNhcExtension::Deserialize(ns3::Buffer::Iterator start) [member function] cls.add_method('Deserialize', 'uint32_t', [param('ns3::Buffer::Iterator', 'start')], is_virtual=True) ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanNhcExtension::Eid_e ns3::SixLowPanNhcExtension::GetEid() const [member function] cls.add_method('GetEid', 'ns3::SixLowPanNhcExtension::Eid_e', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): ns3::TypeId ns3::SixLowPanNhcExtension::GetInstanceTypeId() const [member function] cls.add_method('GetInstanceTypeId', 'ns3::TypeId', [], is_const=True, is_virtual=True) ## sixlowpan-header.h (module 'sixlowpan'): uint8_t ns3::SixLowPanNhcExtension::GetNextHeader() const [member function] cls.add_method('GetNextHeader', 'uint8_t', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): bool ns3::SixLowPanNhcExtension::GetNh() const [member function] cls.add_method('GetNh', 'bool', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanDispatch::NhcDispatch_e ns3::SixLowPanNhcExtension::GetNhcDispatchType() const [member function] cls.add_method('GetNhcDispatchType', 'ns3::SixLowPanDispatch::NhcDispatch_e', [], is_const=True, is_virtual=True) ## sixlowpan-header.h (module 'sixlowpan'): uint32_t ns3::SixLowPanNhcExtension::GetSerializedSize() const [member function] cls.add_method('GetSerializedSize', 'uint32_t', [], is_const=True, is_virtual=True) ## sixlowpan-header.h (module 'sixlowpan'): static ns3::TypeId ns3::SixLowPanNhcExtension::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanNhcExtension::Print(std::ostream & os) const [member function] cls.add_method('Print', 'void', [param('std::ostream &', 'os')], is_const=True, is_virtual=True) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanNhcExtension::Serialize(ns3::Buffer::Iterator start) const [member function] cls.add_method('Serialize', 'void', [param('ns3::Buffer::Iterator', 'start')], is_const=True, is_virtual=True) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanNhcExtension::SetBlob(uint8_t const * blob, uint32_t size) [member function] cls.add_method('SetBlob', 'void', [param('uint8_t const *', 'blob'), param('uint32_t', 'size')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanNhcExtension::SetEid(ns3::SixLowPanNhcExtension::Eid_e extensionHeaderType) [member function] cls.add_method('SetEid', 'void', [param('ns3::SixLowPanNhcExtension::Eid_e', 'extensionHeaderType')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanNhcExtension::SetNextHeader(uint8_t nextHeader) [member function] cls.add_method('SetNextHeader', 'void', [param('uint8_t', 'nextHeader')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanNhcExtension::SetNh(bool nhField) [member function] cls.add_method('SetNh', 'void', [param('bool', 'nhField')]) return def register_Ns3SixLowPanUdpNhcExtension_methods(root_module, cls): cls.add_output_stream_operator() ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanUdpNhcExtension::SixLowPanUdpNhcExtension(ns3::SixLowPanUdpNhcExtension const & arg0) [copy constructor] cls.add_constructor([param('ns3::SixLowPanUdpNhcExtension const &', 'arg0')]) ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanUdpNhcExtension::SixLowPanUdpNhcExtension() [constructor] cls.add_constructor([]) ## sixlowpan-header.h (module 'sixlowpan'): uint32_t ns3::SixLowPanUdpNhcExtension::Deserialize(ns3::Buffer::Iterator start) [member function] cls.add_method('Deserialize', 'uint32_t', [param('ns3::Buffer::Iterator', 'start')], is_virtual=True) ## sixlowpan-header.h (module 'sixlowpan'): bool ns3::SixLowPanUdpNhcExtension::GetC() const [member function] cls.add_method('GetC', 'bool', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): uint16_t ns3::SixLowPanUdpNhcExtension::GetChecksum() const [member function] cls.add_method('GetChecksum', 'uint16_t', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): uint16_t ns3::SixLowPanUdpNhcExtension::GetDstPort() const [member function] cls.add_method('GetDstPort', 'uint16_t', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): ns3::TypeId ns3::SixLowPanUdpNhcExtension::GetInstanceTypeId() const [member function] cls.add_method('GetInstanceTypeId', 'ns3::TypeId', [], is_const=True, is_virtual=True) ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanDispatch::NhcDispatch_e ns3::SixLowPanUdpNhcExtension::GetNhcDispatchType() const [member function] cls.add_method('GetNhcDispatchType', 'ns3::SixLowPanDispatch::NhcDispatch_e', [], is_const=True, is_virtual=True) ## sixlowpan-header.h (module 'sixlowpan'): ns3::SixLowPanUdpNhcExtension::Ports_e ns3::SixLowPanUdpNhcExtension::GetPorts() const [member function] cls.add_method('GetPorts', 'ns3::SixLowPanUdpNhcExtension::Ports_e', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): uint32_t ns3::SixLowPanUdpNhcExtension::GetSerializedSize() const [member function] cls.add_method('GetSerializedSize', 'uint32_t', [], is_const=True, is_virtual=True) ## sixlowpan-header.h (module 'sixlowpan'): uint16_t ns3::SixLowPanUdpNhcExtension::GetSrcPort() const [member function] cls.add_method('GetSrcPort', 'uint16_t', [], is_const=True) ## sixlowpan-header.h (module 'sixlowpan'): static ns3::TypeId ns3::SixLowPanUdpNhcExtension::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanUdpNhcExtension::Print(std::ostream & os) const [member function] cls.add_method('Print', 'void', [param('std::ostream &', 'os')], is_const=True, is_virtual=True) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanUdpNhcExtension::Serialize(ns3::Buffer::Iterator start) const [member function] cls.add_method('Serialize', 'void', [param('ns3::Buffer::Iterator', 'start')], is_const=True, is_virtual=True) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanUdpNhcExtension::SetC(bool cField) [member function] cls.add_method('SetC', 'void', [param('bool', 'cField')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanUdpNhcExtension::SetChecksum(uint16_t checksum) [member function] cls.add_method('SetChecksum', 'void', [param('uint16_t', 'checksum')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanUdpNhcExtension::SetDstPort(uint16_t port) [member function] cls.add_method('SetDstPort', 'void', [param('uint16_t', 'port')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanUdpNhcExtension::SetPorts(ns3::SixLowPanUdpNhcExtension::Ports_e port) [member function] cls.add_method('SetPorts', 'void', [param('ns3::SixLowPanUdpNhcExtension::Ports_e', 'port')]) ## sixlowpan-header.h (module 'sixlowpan'): void ns3::SixLowPanUdpNhcExtension::SetSrcPort(uint16_t port) [member function] cls.add_method('SetSrcPort', 'void', [param('uint16_t', 'port')]) return def register_Ns3Time_methods(root_module, cls): cls.add_binary_numeric_operator('*', root_module['ns3::Time'], root_module['ns3::Time'], param('int64_t const &', u'right')) cls.add_binary_numeric_operator('+', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', u'right')) cls.add_binary_numeric_operator('-', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', u'right')) cls.add_binary_numeric_operator('/', root_module['ns3::Time'], root_module['ns3::Time'], param('int64_t const &', u'right')) cls.add_binary_comparison_operator('<') cls.add_binary_comparison_operator('>') cls.add_binary_comparison_operator('!=') cls.add_inplace_numeric_operator('+=', param('ns3::Time const &', u'right')) cls.add_inplace_numeric_operator('-=', param('ns3::Time const &', u'right')) cls.add_output_stream_operator() cls.add_binary_comparison_operator('<=') cls.add_binary_comparison_operator('==') cls.add_binary_comparison_operator('>=') ## nstime.h (module 'core'): ns3::Time::Time() [constructor] cls.add_constructor([]) ## nstime.h (module 'core'): ns3::Time::Time(ns3::Time const & o) [copy constructor] cls.add_constructor([param('ns3::Time const &', 'o')]) ## nstime.h (module 'core'): ns3::Time::Time(double v) [constructor] cls.add_constructor([param('double', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(int v) [constructor] cls.add_constructor([param('int', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(long int v) [constructor] cls.add_constructor([param('long int', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(long long int v) [constructor] cls.add_constructor([param('long long int', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(unsigned int v) [constructor] cls.add_constructor([param('unsigned int', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(long unsigned int v) [constructor] cls.add_constructor([param('long unsigned int', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(long long unsigned int v) [constructor] cls.add_constructor([param('long long unsigned int', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(ns3::int64x64_t const & v) [constructor] cls.add_constructor([param('ns3::int64x64_t const &', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(std::string const & s) [constructor] cls.add_constructor([param('std::string const &', 's')]) ## nstime.h (module 'core'): ns3::TimeWithUnit ns3::Time::As(ns3::Time::Unit const unit) const [member function] cls.add_method('As', 'ns3::TimeWithUnit', [param('ns3::Time::Unit const', 'unit')], is_const=True) ## nstime.h (module 'core'): int ns3::Time::Compare(ns3::Time const & o) const [member function] cls.add_method('Compare', 'int', [param('ns3::Time const &', 'o')], is_const=True) ## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & value) [member function] cls.add_method('From', 'ns3::Time', [param('ns3::int64x64_t const &', 'value')], is_static=True) ## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & value, ns3::Time::Unit unit) [member function] cls.add_method('From', 'ns3::Time', [param('ns3::int64x64_t const &', 'value'), param('ns3::Time::Unit', 'unit')], is_static=True) ## nstime.h (module 'core'): static ns3::Time ns3::Time::FromDouble(double value, ns3::Time::Unit unit) [member function] cls.add_method('FromDouble', 'ns3::Time', [param('double', 'value'), param('ns3::Time::Unit', 'unit')], is_static=True) ## nstime.h (module 'core'): static ns3::Time ns3::Time::FromInteger(uint64_t value, ns3::Time::Unit unit) [member function] cls.add_method('FromInteger', 'ns3::Time', [param('uint64_t', 'value'), param('ns3::Time::Unit', 'unit')], is_static=True) ## nstime.h (module 'core'): double ns3::Time::GetDays() const [member function] cls.add_method('GetDays', 'double', [], is_const=True) ## nstime.h (module 'core'): double ns3::Time::GetDouble() const [member function] cls.add_method('GetDouble', 'double', [], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::GetFemtoSeconds() const [member function] cls.add_method('GetFemtoSeconds', 'int64_t', [], is_const=True) ## nstime.h (module 'core'): double ns3::Time::GetHours() const [member function] cls.add_method('GetHours', 'double', [], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::GetInteger() const [member function] cls.add_method('GetInteger', 'int64_t', [], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::GetMicroSeconds() const [member function] cls.add_method('GetMicroSeconds', 'int64_t', [], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::GetMilliSeconds() const [member function] cls.add_method('GetMilliSeconds', 'int64_t', [], is_const=True) ## nstime.h (module 'core'): double ns3::Time::GetMinutes() const [member function] cls.add_method('GetMinutes', 'double', [], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::GetNanoSeconds() const [member function] cls.add_method('GetNanoSeconds', 'int64_t', [], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::GetPicoSeconds() const [member function] cls.add_method('GetPicoSeconds', 'int64_t', [], is_const=True) ## nstime.h (module 'core'): static ns3::Time::Unit ns3::Time::GetResolution() [member function] cls.add_method('GetResolution', 'ns3::Time::Unit', [], is_static=True) ## nstime.h (module 'core'): double ns3::Time::GetSeconds() const [member function] cls.add_method('GetSeconds', 'double', [], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::GetTimeStep() const [member function] cls.add_method('GetTimeStep', 'int64_t', [], is_const=True) ## nstime.h (module 'core'): double ns3::Time::GetYears() const [member function] cls.add_method('GetYears', 'double', [], is_const=True) ## nstime.h (module 'core'): bool ns3::Time::IsNegative() const [member function] cls.add_method('IsNegative', 'bool', [], is_const=True) ## nstime.h (module 'core'): bool ns3::Time::IsPositive() const [member function] cls.add_method('IsPositive', 'bool', [], is_const=True) ## nstime.h (module 'core'): bool ns3::Time::IsStrictlyNegative() const [member function] cls.add_method('IsStrictlyNegative', 'bool', [], is_const=True) ## nstime.h (module 'core'): bool ns3::Time::IsStrictlyPositive() const [member function] cls.add_method('IsStrictlyPositive', 'bool', [], is_const=True) ## nstime.h (module 'core'): bool ns3::Time::IsZero() const [member function] cls.add_method('IsZero', 'bool', [], is_const=True) ## nstime.h (module 'core'): static ns3::Time ns3::Time::Max() [member function] cls.add_method('Max', 'ns3::Time', [], is_static=True) ## nstime.h (module 'core'): static ns3::Time ns3::Time::Min() [member function] cls.add_method('Min', 'ns3::Time', [], is_static=True) ## nstime.h (module 'core'): static void ns3::Time::SetResolution(ns3::Time::Unit resolution) [member function] cls.add_method('SetResolution', 'void', [param('ns3::Time::Unit', 'resolution')], is_static=True) ## nstime.h (module 'core'): static bool ns3::Time::StaticInit() [member function] cls.add_method('StaticInit', 'bool', [], is_static=True) ## nstime.h (module 'core'): ns3::int64x64_t ns3::Time::To(ns3::Time::Unit unit) const [member function] cls.add_method('To', 'ns3::int64x64_t', [param('ns3::Time::Unit', 'unit')], is_const=True) ## nstime.h (module 'core'): double ns3::Time::ToDouble(ns3::Time::Unit unit) const [member function] cls.add_method('ToDouble', 'double', [param('ns3::Time::Unit', 'unit')], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::ToInteger(ns3::Time::Unit unit) const [member function] cls.add_method('ToInteger', 'int64_t', [param('ns3::Time::Unit', 'unit')], is_const=True) return def register_Ns3TraceSourceAccessor_methods(root_module, cls): ## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor(ns3::TraceSourceAccessor const & arg0) [copy constructor] cls.add_constructor([param('ns3::TraceSourceAccessor const &', 'arg0')]) ## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor() [constructor] cls.add_constructor([]) ## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Connect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function] cls.add_method('Connect', 'bool', [param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')], is_pure_virtual=True, is_const=True, is_virtual=True) ## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::ConnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function] cls.add_method('ConnectWithoutContext', 'bool', [param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')], is_pure_virtual=True, is_const=True, is_virtual=True) ## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Disconnect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function] cls.add_method('Disconnect', 'bool', [param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')], is_pure_virtual=True, is_const=True, is_virtual=True) ## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::DisconnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function] cls.add_method('DisconnectWithoutContext', 'bool', [param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')], is_pure_virtual=True, is_const=True, is_virtual=True) return def register_Ns3Trailer_methods(root_module, cls): cls.add_output_stream_operator() ## trailer.h (module 'network'): ns3::Trailer::Trailer() [constructor] cls.add_constructor([]) ## trailer.h (module 'network'): ns3::Trailer::Trailer(ns3::Trailer const & arg0) [copy constructor] cls.add_constructor([param('ns3::Trailer const &', 'arg0')]) ## trailer.h (module 'network'): uint32_t ns3::Trailer::Deserialize(ns3::Buffer::Iterator end) [member function] cls.add_method('Deserialize', 'uint32_t', [param('ns3::Buffer::Iterator', 'end')], is_pure_virtual=True, is_virtual=True) ## trailer.h (module 'network'): uint32_t ns3::Trailer::GetSerializedSize() const [member function] cls.add_method('GetSerializedSize', 'uint32_t', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## trailer.h (module 'network'): static ns3::TypeId ns3::Trailer::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## trailer.h (module 'network'): void ns3::Trailer::Print(std::ostream & os) const [member function] cls.add_method('Print', 'void', [param('std::ostream &', 'os')], is_pure_virtual=True, is_const=True, is_virtual=True) ## trailer.h (module 'network'): void ns3::Trailer::Serialize(ns3::Buffer::Iterator start) const [member function] cls.add_method('Serialize', 'void', [param('ns3::Buffer::Iterator', 'start')], is_pure_virtual=True, is_const=True, is_virtual=True) return def register_Ns3TriangularRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::TriangularRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::TriangularRandomVariable::TriangularRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetMean() const [member function] cls.add_method('GetMean', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetMin() const [member function] cls.add_method('GetMin', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetMax() const [member function] cls.add_method('GetMax', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetValue(double mean, double min, double max) [member function] cls.add_method('GetValue', 'double', [param('double', 'mean'), param('double', 'min'), param('double', 'max')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::TriangularRandomVariable::GetInteger(uint32_t mean, uint32_t min, uint32_t max) [member function] cls.add_method('GetInteger', 'uint32_t', [param('uint32_t', 'mean'), param('uint32_t', 'min'), param('uint32_t', 'max')]) ## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::TriangularRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3UniformRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::UniformRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::UniformRandomVariable::UniformRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): double ns3::UniformRandomVariable::GetMin() const [member function] cls.add_method('GetMin', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::UniformRandomVariable::GetMax() const [member function] cls.add_method('GetMax', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::UniformRandomVariable::GetValue(double min, double max) [member function] cls.add_method('GetValue', 'double', [param('double', 'min'), param('double', 'max')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::UniformRandomVariable::GetInteger(uint32_t min, uint32_t max) [member function] cls.add_method('GetInteger', 'uint32_t', [param('uint32_t', 'min'), param('uint32_t', 'max')]) ## random-variable-stream.h (module 'core'): double ns3::UniformRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::UniformRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3WeibullRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::WeibullRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::WeibullRandomVariable::WeibullRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetScale() const [member function] cls.add_method('GetScale', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetShape() const [member function] cls.add_method('GetShape', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetBound() const [member function] cls.add_method('GetBound', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetValue(double scale, double shape, double bound) [member function] cls.add_method('GetValue', 'double', [param('double', 'scale'), param('double', 'shape'), param('double', 'bound')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::WeibullRandomVariable::GetInteger(uint32_t scale, uint32_t shape, uint32_t bound) [member function] cls.add_method('GetInteger', 'uint32_t', [param('uint32_t', 'scale'), param('uint32_t', 'shape'), param('uint32_t', 'bound')]) ## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::WeibullRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3ZetaRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ZetaRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::ZetaRandomVariable::ZetaRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): double ns3::ZetaRandomVariable::GetAlpha() const [member function] cls.add_method('GetAlpha', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::ZetaRandomVariable::GetValue(double alpha) [member function] cls.add_method('GetValue', 'double', [param('double', 'alpha')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::ZetaRandomVariable::GetInteger(uint32_t alpha) [member function] cls.add_method('GetInteger', 'uint32_t', [param('uint32_t', 'alpha')]) ## random-variable-stream.h (module 'core'): double ns3::ZetaRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::ZetaRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3ZipfRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ZipfRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::ZipfRandomVariable::ZipfRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): uint32_t ns3::ZipfRandomVariable::GetN() const [member function] cls.add_method('GetN', 'uint32_t', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::ZipfRandomVariable::GetAlpha() const [member function] cls.add_method('GetAlpha', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::ZipfRandomVariable::GetValue(uint32_t n, double alpha) [member function] cls.add_method('GetValue', 'double', [param('uint32_t', 'n'), param('double', 'alpha')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::ZipfRandomVariable::GetInteger(uint32_t n, uint32_t alpha) [member function] cls.add_method('GetInteger', 'uint32_t', [param('uint32_t', 'n'), param('uint32_t', 'alpha')]) ## random-variable-stream.h (module 'core'): double ns3::ZipfRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::ZipfRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3AttributeAccessor_methods(root_module, cls): ## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor(ns3::AttributeAccessor const & arg0) [copy constructor] cls.add_constructor([param('ns3::AttributeAccessor const &', 'arg0')]) ## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor() [constructor] cls.add_constructor([]) ## attribute.h (module 'core'): bool ns3::AttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function] cls.add_method('Get', 'bool', [param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasGetter() const [member function] cls.add_method('HasGetter', 'bool', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasSetter() const [member function] cls.add_method('HasSetter', 'bool', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::AttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function] cls.add_method('Set', 'bool', [param('ns3::ObjectBase *', 'object', transfer_ownership=False), param('ns3::AttributeValue const &', 'value')], is_pure_virtual=True, is_const=True, is_virtual=True) return def register_Ns3AttributeChecker_methods(root_module, cls): ## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker(ns3::AttributeChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::AttributeChecker const &', 'arg0')]) ## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker() [constructor] cls.add_constructor([]) ## attribute.h (module 'core'): bool ns3::AttributeChecker::Check(ns3::AttributeValue const & value) const [member function] cls.add_method('Check', 'bool', [param('ns3::AttributeValue const &', 'value')], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::AttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function] cls.add_method('Copy', 'bool', [param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::Create() const [member function] cls.add_method('Create', 'ns3::Ptr< ns3::AttributeValue >', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::CreateValidValue(ns3::AttributeValue const & value) const [member function] cls.add_method('CreateValidValue', 'ns3::Ptr< ns3::AttributeValue >', [param('ns3::AttributeValue const &', 'value')], is_const=True) ## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetUnderlyingTypeInformation() const [member function] cls.add_method('GetUnderlyingTypeInformation', 'std::string', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetValueTypeName() const [member function] cls.add_method('GetValueTypeName', 'std::string', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::AttributeChecker::HasUnderlyingTypeInformation() const [member function] cls.add_method('HasUnderlyingTypeInformation', 'bool', [], is_pure_virtual=True, is_const=True, is_virtual=True) return def register_Ns3AttributeValue_methods(root_module, cls): ## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue(ns3::AttributeValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::AttributeValue const &', 'arg0')]) ## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue() [constructor] cls.add_constructor([]) ## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::AttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_pure_virtual=True, is_virtual=True) ## attribute.h (module 'core'): std::string ns3::AttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_pure_virtual=True, is_const=True, is_virtual=True) return def register_Ns3CallbackChecker_methods(root_module, cls): ## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker() [constructor] cls.add_constructor([]) ## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker(ns3::CallbackChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::CallbackChecker const &', 'arg0')]) return def register_Ns3CallbackImplBase_methods(root_module, cls): ## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase() [constructor] cls.add_constructor([]) ## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase(ns3::CallbackImplBase const & arg0) [copy constructor] cls.add_constructor([param('ns3::CallbackImplBase const &', 'arg0')]) ## callback.h (module 'core'): bool ns3::CallbackImplBase::IsEqual(ns3::Ptr<ns3::CallbackImplBase const> other) const [member function] cls.add_method('IsEqual', 'bool', [param('ns3::Ptr< ns3::CallbackImplBase const >', 'other')], is_pure_virtual=True, is_const=True, is_virtual=True) return def register_Ns3CallbackValue_methods(root_module, cls): ## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::CallbackValue const &', 'arg0')]) ## callback.h (module 'core'): ns3::CallbackValue::CallbackValue() [constructor] cls.add_constructor([]) ## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackBase const & base) [constructor] cls.add_constructor([param('ns3::CallbackBase const &', 'base')]) ## callback.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::CallbackValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## callback.h (module 'core'): bool ns3::CallbackValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## callback.h (module 'core'): std::string ns3::CallbackValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## callback.h (module 'core'): void ns3::CallbackValue::Set(ns3::CallbackBase base) [member function] cls.add_method('Set', 'void', [param('ns3::CallbackBase', 'base')]) return def register_Ns3ConstantRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ConstantRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::ConstantRandomVariable::ConstantRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): double ns3::ConstantRandomVariable::GetConstant() const [member function] cls.add_method('GetConstant', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::ConstantRandomVariable::GetValue(double constant) [member function] cls.add_method('GetValue', 'double', [param('double', 'constant')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::ConstantRandomVariable::GetInteger(uint32_t constant) [member function] cls.add_method('GetInteger', 'uint32_t', [param('uint32_t', 'constant')]) ## random-variable-stream.h (module 'core'): double ns3::ConstantRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::ConstantRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3DeterministicRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::DeterministicRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::DeterministicRandomVariable::DeterministicRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): void ns3::DeterministicRandomVariable::SetValueArray(double * values, uint64_t length) [member function] cls.add_method('SetValueArray', 'void', [param('double *', 'values'), param('uint64_t', 'length')]) ## random-variable-stream.h (module 'core'): double ns3::DeterministicRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::DeterministicRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3EmpiricalRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): ns3::EmpiricalRandomVariable::EmpiricalRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): void ns3::EmpiricalRandomVariable::CDF(double v, double c) [member function] cls.add_method('CDF', 'void', [param('double', 'v'), param('double', 'c')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::EmpiricalRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::EmpiricalRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): double ns3::EmpiricalRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): double ns3::EmpiricalRandomVariable::Interpolate(double arg0, double arg1, double arg2, double arg3, double arg4) [member function] cls.add_method('Interpolate', 'double', [param('double', 'arg0'), param('double', 'arg1'), param('double', 'arg2'), param('double', 'arg3'), param('double', 'arg4')], visibility='private', is_virtual=True) ## random-variable-stream.h (module 'core'): void ns3::EmpiricalRandomVariable::Validate() [member function] cls.add_method('Validate', 'void', [], visibility='private', is_virtual=True) return def register_Ns3EmptyAttributeValue_methods(root_module, cls): ## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue(ns3::EmptyAttributeValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::EmptyAttributeValue const &', 'arg0')]) ## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue() [constructor] cls.add_constructor([]) ## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, visibility='private', is_virtual=True) ## attribute.h (module 'core'): bool ns3::EmptyAttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], visibility='private', is_virtual=True) ## attribute.h (module 'core'): std::string ns3::EmptyAttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, visibility='private', is_virtual=True) return def register_Ns3ErlangRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ErlangRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::ErlangRandomVariable::ErlangRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): uint32_t ns3::ErlangRandomVariable::GetK() const [member function] cls.add_method('GetK', 'uint32_t', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::ErlangRandomVariable::GetLambda() const [member function] cls.add_method('GetLambda', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::ErlangRandomVariable::GetValue(uint32_t k, double lambda) [member function] cls.add_method('GetValue', 'double', [param('uint32_t', 'k'), param('double', 'lambda')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::ErlangRandomVariable::GetInteger(uint32_t k, uint32_t lambda) [member function] cls.add_method('GetInteger', 'uint32_t', [param('uint32_t', 'k'), param('uint32_t', 'lambda')]) ## random-variable-stream.h (module 'core'): double ns3::ErlangRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::ErlangRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3EventImpl_methods(root_module, cls): ## event-impl.h (module 'core'): ns3::EventImpl::EventImpl(ns3::EventImpl const & arg0) [copy constructor] cls.add_constructor([param('ns3::EventImpl const &', 'arg0')]) ## event-impl.h (module 'core'): ns3::EventImpl::EventImpl() [constructor] cls.add_constructor([]) ## event-impl.h (module 'core'): void ns3::EventImpl::Cancel() [member function] cls.add_method('Cancel', 'void', []) ## event-impl.h (module 'core'): void ns3::EventImpl::Invoke() [member function] cls.add_method('Invoke', 'void', []) ## event-impl.h (module 'core'): bool ns3::EventImpl::IsCancelled() [member function] cls.add_method('IsCancelled', 'bool', []) ## event-impl.h (module 'core'): void ns3::EventImpl::Notify() [member function] cls.add_method('Notify', 'void', [], is_pure_virtual=True, visibility='protected', is_virtual=True) return def register_Ns3ExponentialRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ExponentialRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::ExponentialRandomVariable::ExponentialRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): double ns3::ExponentialRandomVariable::GetMean() const [member function] cls.add_method('GetMean', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::ExponentialRandomVariable::GetBound() const [member function] cls.add_method('GetBound', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::ExponentialRandomVariable::GetValue(double mean, double bound) [member function] cls.add_method('GetValue', 'double', [param('double', 'mean'), param('double', 'bound')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::ExponentialRandomVariable::GetInteger(uint32_t mean, uint32_t bound) [member function] cls.add_method('GetInteger', 'uint32_t', [param('uint32_t', 'mean'), param('uint32_t', 'bound')]) ## random-variable-stream.h (module 'core'): double ns3::ExponentialRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::ExponentialRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3GammaRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::GammaRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::GammaRandomVariable::GammaRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): double ns3::GammaRandomVariable::GetAlpha() const [member function] cls.add_method('GetAlpha', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::GammaRandomVariable::GetBeta() const [member function] cls.add_method('GetBeta', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::GammaRandomVariable::GetValue(double alpha, double beta) [member function] cls.add_method('GetValue', 'double', [param('double', 'alpha'), param('double', 'beta')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::GammaRandomVariable::GetInteger(uint32_t alpha, uint32_t beta) [member function] cls.add_method('GetInteger', 'uint32_t', [param('uint32_t', 'alpha'), param('uint32_t', 'beta')]) ## random-variable-stream.h (module 'core'): double ns3::GammaRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::GammaRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3Ipv4AddressChecker_methods(root_module, cls): ## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker() [constructor] cls.add_constructor([]) ## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker(ns3::Ipv4AddressChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::Ipv4AddressChecker const &', 'arg0')]) return def register_Ns3Ipv4AddressValue_methods(root_module, cls): ## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue() [constructor] cls.add_constructor([]) ## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4AddressValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::Ipv4AddressValue const &', 'arg0')]) ## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4Address const & value) [constructor] cls.add_constructor([param('ns3::Ipv4Address const &', 'value')]) ## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4AddressValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## ipv4-address.h (module 'network'): bool ns3::Ipv4AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4AddressValue::Get() const [member function] cls.add_method('Get', 'ns3::Ipv4Address', [], is_const=True) ## ipv4-address.h (module 'network'): std::string ns3::Ipv4AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## ipv4-address.h (module 'network'): void ns3::Ipv4AddressValue::Set(ns3::Ipv4Address const & value) [member function] cls.add_method('Set', 'void', [param('ns3::Ipv4Address const &', 'value')]) return def register_Ns3Ipv4MaskChecker_methods(root_module, cls): ## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker() [constructor] cls.add_constructor([]) ## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker(ns3::Ipv4MaskChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::Ipv4MaskChecker const &', 'arg0')]) return def register_Ns3Ipv4MaskValue_methods(root_module, cls): ## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue() [constructor] cls.add_constructor([]) ## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4MaskValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::Ipv4MaskValue const &', 'arg0')]) ## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4Mask const & value) [constructor] cls.add_constructor([param('ns3::Ipv4Mask const &', 'value')]) ## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4MaskValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## ipv4-address.h (module 'network'): bool ns3::Ipv4MaskValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## ipv4-address.h (module 'network'): ns3::Ipv4Mask ns3::Ipv4MaskValue::Get() const [member function] cls.add_method('Get', 'ns3::Ipv4Mask', [], is_const=True) ## ipv4-address.h (module 'network'): std::string ns3::Ipv4MaskValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## ipv4-address.h (module 'network'): void ns3::Ipv4MaskValue::Set(ns3::Ipv4Mask const & value) [member function] cls.add_method('Set', 'void', [param('ns3::Ipv4Mask const &', 'value')]) return def register_Ns3Ipv6AddressChecker_methods(root_module, cls): ## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker() [constructor] cls.add_constructor([]) ## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker(ns3::Ipv6AddressChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::Ipv6AddressChecker const &', 'arg0')]) return def register_Ns3Ipv6AddressValue_methods(root_module, cls): ## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue() [constructor] cls.add_constructor([]) ## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6AddressValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::Ipv6AddressValue const &', 'arg0')]) ## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6Address const & value) [constructor] cls.add_constructor([param('ns3::Ipv6Address const &', 'value')]) ## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6AddressValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## ipv6-address.h (module 'network'): bool ns3::Ipv6AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6AddressValue::Get() const [member function] cls.add_method('Get', 'ns3::Ipv6Address', [], is_const=True) ## ipv6-address.h (module 'network'): std::string ns3::Ipv6AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## ipv6-address.h (module 'network'): void ns3::Ipv6AddressValue::Set(ns3::Ipv6Address const & value) [member function] cls.add_method('Set', 'void', [param('ns3::Ipv6Address const &', 'value')]) return def register_Ns3Ipv6PrefixChecker_methods(root_module, cls): ## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker() [constructor] cls.add_constructor([]) ## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker(ns3::Ipv6PrefixChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::Ipv6PrefixChecker const &', 'arg0')]) return def register_Ns3Ipv6PrefixValue_methods(root_module, cls): ## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue() [constructor] cls.add_constructor([]) ## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6PrefixValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::Ipv6PrefixValue const &', 'arg0')]) ## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6Prefix const & value) [constructor] cls.add_constructor([param('ns3::Ipv6Prefix const &', 'value')]) ## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6PrefixValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## ipv6-address.h (module 'network'): bool ns3::Ipv6PrefixValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## ipv6-address.h (module 'network'): ns3::Ipv6Prefix ns3::Ipv6PrefixValue::Get() const [member function] cls.add_method('Get', 'ns3::Ipv6Prefix', [], is_const=True) ## ipv6-address.h (module 'network'): std::string ns3::Ipv6PrefixValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## ipv6-address.h (module 'network'): void ns3::Ipv6PrefixValue::Set(ns3::Ipv6Prefix const & value) [member function] cls.add_method('Set', 'void', [param('ns3::Ipv6Prefix const &', 'value')]) return def register_Ns3LogNormalRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::LogNormalRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::LogNormalRandomVariable::LogNormalRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): double ns3::LogNormalRandomVariable::GetMu() const [member function] cls.add_method('GetMu', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::LogNormalRandomVariable::GetSigma() const [member function] cls.add_method('GetSigma', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::LogNormalRandomVariable::GetValue(double mu, double sigma) [member function] cls.add_method('GetValue', 'double', [param('double', 'mu'), param('double', 'sigma')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::LogNormalRandomVariable::GetInteger(uint32_t mu, uint32_t sigma) [member function] cls.add_method('GetInteger', 'uint32_t', [param('uint32_t', 'mu'), param('uint32_t', 'sigma')]) ## random-variable-stream.h (module 'core'): double ns3::LogNormalRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::LogNormalRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3NetDevice_methods(root_module, cls): ## net-device.h (module 'network'): ns3::NetDevice::NetDevice() [constructor] cls.add_constructor([]) ## net-device.h (module 'network'): ns3::NetDevice::NetDevice(ns3::NetDevice const & arg0) [copy constructor] cls.add_constructor([param('ns3::NetDevice const &', 'arg0')]) ## net-device.h (module 'network'): void ns3::NetDevice::AddLinkChangeCallback(ns3::Callback<void,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> callback) [member function] cls.add_method('AddLinkChangeCallback', 'void', [param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')], is_pure_virtual=True, is_virtual=True) ## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetAddress() const [member function] cls.add_method('GetAddress', 'ns3::Address', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetBroadcast() const [member function] cls.add_method('GetBroadcast', 'ns3::Address', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## net-device.h (module 'network'): ns3::Ptr<ns3::Channel> ns3::NetDevice::GetChannel() const [member function] cls.add_method('GetChannel', 'ns3::Ptr< ns3::Channel >', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## net-device.h (module 'network'): uint32_t ns3::NetDevice::GetIfIndex() const [member function] cls.add_method('GetIfIndex', 'uint32_t', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## net-device.h (module 'network'): uint16_t ns3::NetDevice::GetMtu() const [member function] cls.add_method('GetMtu', 'uint16_t', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv4Address multicastGroup) const [member function] cls.add_method('GetMulticast', 'ns3::Address', [param('ns3::Ipv4Address', 'multicastGroup')], is_pure_virtual=True, is_const=True, is_virtual=True) ## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv6Address addr) const [member function] cls.add_method('GetMulticast', 'ns3::Address', [param('ns3::Ipv6Address', 'addr')], is_pure_virtual=True, is_const=True, is_virtual=True) ## net-device.h (module 'network'): ns3::Ptr<ns3::Node> ns3::NetDevice::GetNode() const [member function] cls.add_method('GetNode', 'ns3::Ptr< ns3::Node >', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## net-device.h (module 'network'): static ns3::TypeId ns3::NetDevice::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## net-device.h (module 'network'): bool ns3::NetDevice::IsBridge() const [member function] cls.add_method('IsBridge', 'bool', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## net-device.h (module 'network'): bool ns3::NetDevice::IsBroadcast() const [member function] cls.add_method('IsBroadcast', 'bool', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## net-device.h (module 'network'): bool ns3::NetDevice::IsLinkUp() const [member function] cls.add_method('IsLinkUp', 'bool', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## net-device.h (module 'network'): bool ns3::NetDevice::IsMulticast() const [member function] cls.add_method('IsMulticast', 'bool', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## net-device.h (module 'network'): bool ns3::NetDevice::IsPointToPoint() const [member function] cls.add_method('IsPointToPoint', 'bool', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## net-device.h (module 'network'): bool ns3::NetDevice::NeedsArp() const [member function] cls.add_method('NeedsArp', 'bool', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## net-device.h (module 'network'): bool ns3::NetDevice::Send(ns3::Ptr<ns3::Packet> packet, ns3::Address const & dest, uint16_t protocolNumber) [member function] cls.add_method('Send', 'bool', [param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')], is_pure_virtual=True, is_virtual=True) ## net-device.h (module 'network'): bool ns3::NetDevice::SendFrom(ns3::Ptr<ns3::Packet> packet, ns3::Address const & source, ns3::Address const & dest, uint16_t protocolNumber) [member function] cls.add_method('SendFrom', 'bool', [param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'source'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')], is_pure_virtual=True, is_virtual=True) ## net-device.h (module 'network'): void ns3::NetDevice::SetAddress(ns3::Address address) [member function] cls.add_method('SetAddress', 'void', [param('ns3::Address', 'address')], is_pure_virtual=True, is_virtual=True) ## net-device.h (module 'network'): void ns3::NetDevice::SetIfIndex(uint32_t const index) [member function] cls.add_method('SetIfIndex', 'void', [param('uint32_t const', 'index')], is_pure_virtual=True, is_virtual=True) ## net-device.h (module 'network'): bool ns3::NetDevice::SetMtu(uint16_t const mtu) [member function] cls.add_method('SetMtu', 'bool', [param('uint16_t const', 'mtu')], is_pure_virtual=True, is_virtual=True) ## net-device.h (module 'network'): void ns3::NetDevice::SetNode(ns3::Ptr<ns3::Node> node) [member function] cls.add_method('SetNode', 'void', [param('ns3::Ptr< ns3::Node >', 'node')], is_pure_virtual=True, is_virtual=True) ## net-device.h (module 'network'): void ns3::NetDevice::SetPromiscReceiveCallback(ns3::Callback<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> cb) [member function] cls.add_method('SetPromiscReceiveCallback', 'void', [param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'cb')], is_pure_virtual=True, is_virtual=True) ## net-device.h (module 'network'): void ns3::NetDevice::SetReceiveCallback(ns3::Callback<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> cb) [member function] cls.add_method('SetReceiveCallback', 'void', [param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')], is_pure_virtual=True, is_virtual=True) ## net-device.h (module 'network'): bool ns3::NetDevice::SupportsSendFrom() const [member function] cls.add_method('SupportsSendFrom', 'bool', [], is_pure_virtual=True, is_const=True, is_virtual=True) return def register_Ns3NixVector_methods(root_module, cls): cls.add_output_stream_operator() ## nix-vector.h (module 'network'): ns3::NixVector::NixVector() [constructor] cls.add_constructor([]) ## nix-vector.h (module 'network'): ns3::NixVector::NixVector(ns3::NixVector const & o) [copy constructor] cls.add_constructor([param('ns3::NixVector const &', 'o')]) ## nix-vector.h (module 'network'): void ns3::NixVector::AddNeighborIndex(uint32_t newBits, uint32_t numberOfBits) [member function] cls.add_method('AddNeighborIndex', 'void', [param('uint32_t', 'newBits'), param('uint32_t', 'numberOfBits')]) ## nix-vector.h (module 'network'): uint32_t ns3::NixVector::BitCount(uint32_t numberOfNeighbors) const [member function] cls.add_method('BitCount', 'uint32_t', [param('uint32_t', 'numberOfNeighbors')], is_const=True) ## nix-vector.h (module 'network'): ns3::Ptr<ns3::NixVector> ns3::NixVector::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::NixVector >', [], is_const=True) ## nix-vector.h (module 'network'): uint32_t ns3::NixVector::Deserialize(uint32_t const * buffer, uint32_t size) [member function] cls.add_method('Deserialize', 'uint32_t', [param('uint32_t const *', 'buffer'), param('uint32_t', 'size')]) ## nix-vector.h (module 'network'): uint32_t ns3::NixVector::ExtractNeighborIndex(uint32_t numberOfBits) [member function] cls.add_method('ExtractNeighborIndex', 'uint32_t', [param('uint32_t', 'numberOfBits')]) ## nix-vector.h (module 'network'): uint32_t ns3::NixVector::GetRemainingBits() [member function] cls.add_method('GetRemainingBits', 'uint32_t', []) ## nix-vector.h (module 'network'): uint32_t ns3::NixVector::GetSerializedSize() const [member function] cls.add_method('GetSerializedSize', 'uint32_t', [], is_const=True) ## nix-vector.h (module 'network'): uint32_t ns3::NixVector::Serialize(uint32_t * buffer, uint32_t maxSize) const [member function] cls.add_method('Serialize', 'uint32_t', [param('uint32_t *', 'buffer'), param('uint32_t', 'maxSize')], is_const=True) return def register_Ns3NormalRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): ns3::NormalRandomVariable::INFINITE_VALUE [variable] cls.add_static_attribute('INFINITE_VALUE', 'double const', is_const=True) ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::NormalRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::NormalRandomVariable::NormalRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetMean() const [member function] cls.add_method('GetMean', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetVariance() const [member function] cls.add_method('GetVariance', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetBound() const [member function] cls.add_method('GetBound', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetValue(double mean, double variance, double bound=ns3::NormalRandomVariable::INFINITE_VALUE) [member function] cls.add_method('GetValue', 'double', [param('double', 'mean'), param('double', 'variance'), param('double', 'bound', default_value='ns3::NormalRandomVariable::INFINITE_VALUE')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::NormalRandomVariable::GetInteger(uint32_t mean, uint32_t variance, uint32_t bound) [member function] cls.add_method('GetInteger', 'uint32_t', [param('uint32_t', 'mean'), param('uint32_t', 'variance'), param('uint32_t', 'bound')]) ## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::NormalRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3ObjectFactoryChecker_methods(root_module, cls): ## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker() [constructor] cls.add_constructor([]) ## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker(ns3::ObjectFactoryChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::ObjectFactoryChecker const &', 'arg0')]) return def register_Ns3ObjectFactoryValue_methods(root_module, cls): ## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue() [constructor] cls.add_constructor([]) ## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactoryValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::ObjectFactoryValue const &', 'arg0')]) ## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactory const & value) [constructor] cls.add_constructor([param('ns3::ObjectFactory const &', 'value')]) ## object-factory.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::ObjectFactoryValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## object-factory.h (module 'core'): bool ns3::ObjectFactoryValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## object-factory.h (module 'core'): ns3::ObjectFactory ns3::ObjectFactoryValue::Get() const [member function] cls.add_method('Get', 'ns3::ObjectFactory', [], is_const=True) ## object-factory.h (module 'core'): std::string ns3::ObjectFactoryValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## object-factory.h (module 'core'): void ns3::ObjectFactoryValue::Set(ns3::ObjectFactory const & value) [member function] cls.add_method('Set', 'void', [param('ns3::ObjectFactory const &', 'value')]) return def register_Ns3Packet_methods(root_module, cls): cls.add_output_stream_operator() ## packet.h (module 'network'): ns3::Packet::Packet() [constructor] cls.add_constructor([]) ## packet.h (module 'network'): ns3::Packet::Packet(ns3::Packet const & o) [copy constructor] cls.add_constructor([param('ns3::Packet const &', 'o')]) ## packet.h (module 'network'): ns3::Packet::Packet(uint32_t size) [constructor] cls.add_constructor([param('uint32_t', 'size')]) ## packet.h (module 'network'): ns3::Packet::Packet(uint8_t const * buffer, uint32_t size, bool magic) [constructor] cls.add_constructor([param('uint8_t const *', 'buffer'), param('uint32_t', 'size'), param('bool', 'magic')]) ## packet.h (module 'network'): ns3::Packet::Packet(uint8_t const * buffer, uint32_t size) [constructor] cls.add_constructor([param('uint8_t const *', 'buffer'), param('uint32_t', 'size')]) ## packet.h (module 'network'): void ns3::Packet::AddAtEnd(ns3::Ptr<const ns3::Packet> packet) [member function] cls.add_method('AddAtEnd', 'void', [param('ns3::Ptr< ns3::Packet const >', 'packet')]) ## packet.h (module 'network'): void ns3::Packet::AddByteTag(ns3::Tag const & tag) const [member function] cls.add_method('AddByteTag', 'void', [param('ns3::Tag const &', 'tag')], is_const=True) ## packet.h (module 'network'): void ns3::Packet::AddHeader(ns3::Header const & header) [member function] cls.add_method('AddHeader', 'void', [param('ns3::Header const &', 'header')]) ## packet.h (module 'network'): void ns3::Packet::AddPacketTag(ns3::Tag const & tag) const [member function] cls.add_method('AddPacketTag', 'void', [param('ns3::Tag const &', 'tag')], is_const=True) ## packet.h (module 'network'): void ns3::Packet::AddPaddingAtEnd(uint32_t size) [member function] cls.add_method('AddPaddingAtEnd', 'void', [param('uint32_t', 'size')]) ## packet.h (module 'network'): void ns3::Packet::AddTrailer(ns3::Trailer const & trailer) [member function] cls.add_method('AddTrailer', 'void', [param('ns3::Trailer const &', 'trailer')]) ## packet.h (module 'network'): ns3::PacketMetadata::ItemIterator ns3::Packet::BeginItem() const [member function] cls.add_method('BeginItem', 'ns3::PacketMetadata::ItemIterator', [], is_const=True) ## packet.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Packet::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::Packet >', [], is_const=True) ## packet.h (module 'network'): uint32_t ns3::Packet::CopyData(uint8_t * buffer, uint32_t size) const [member function] cls.add_method('CopyData', 'uint32_t', [param('uint8_t *', 'buffer'), param('uint32_t', 'size')], is_const=True) ## packet.h (module 'network'): void ns3::Packet::CopyData(std::ostream * os, uint32_t size) const [member function] cls.add_method('CopyData', 'void', [param('std::ostream *', 'os'), param('uint32_t', 'size')], is_const=True) ## packet.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Packet::CreateFragment(uint32_t start, uint32_t length) const [member function] cls.add_method('CreateFragment', 'ns3::Ptr< ns3::Packet >', [param('uint32_t', 'start'), param('uint32_t', 'length')], is_const=True) ## packet.h (module 'network'): static void ns3::Packet::EnableChecking() [member function] cls.add_method('EnableChecking', 'void', [], is_static=True) ## packet.h (module 'network'): static void ns3::Packet::EnablePrinting() [member function] cls.add_method('EnablePrinting', 'void', [], is_static=True) ## packet.h (module 'network'): bool ns3::Packet::FindFirstMatchingByteTag(ns3::Tag & tag) const [member function] cls.add_method('FindFirstMatchingByteTag', 'bool', [param('ns3::Tag &', 'tag')], is_const=True) ## packet.h (module 'network'): ns3::ByteTagIterator ns3::Packet::GetByteTagIterator() const [member function] cls.add_method('GetByteTagIterator', 'ns3::ByteTagIterator', [], is_const=True) ## packet.h (module 'network'): ns3::Ptr<ns3::NixVector> ns3::Packet::GetNixVector() const [member function] cls.add_method('GetNixVector', 'ns3::Ptr< ns3::NixVector >', [], is_const=True) ## packet.h (module 'network'): ns3::PacketTagIterator ns3::Packet::GetPacketTagIterator() const [member function] cls.add_method('GetPacketTagIterator', 'ns3::PacketTagIterator', [], is_const=True) ## packet.h (module 'network'): uint32_t ns3::Packet::GetSerializedSize() const [member function] cls.add_method('GetSerializedSize', 'uint32_t', [], is_const=True) ## packet.h (module 'network'): uint32_t ns3::Packet::GetSize() const [member function] cls.add_method('GetSize', 'uint32_t', [], is_const=True) ## packet.h (module 'network'): uint64_t ns3::Packet::GetUid() const [member function] cls.add_method('GetUid', 'uint64_t', [], is_const=True) ## packet.h (module 'network'): uint32_t ns3::Packet::PeekHeader(ns3::Header & header) const [member function] cls.add_method('PeekHeader', 'uint32_t', [param('ns3::Header &', 'header')], is_const=True) ## packet.h (module 'network'): bool ns3::Packet::PeekPacketTag(ns3::Tag & tag) const [member function] cls.add_method('PeekPacketTag', 'bool', [param('ns3::Tag &', 'tag')], is_const=True) ## packet.h (module 'network'): uint32_t ns3::Packet::PeekTrailer(ns3::Trailer & trailer) [member function] cls.add_method('PeekTrailer', 'uint32_t', [param('ns3::Trailer &', 'trailer')]) ## packet.h (module 'network'): void ns3::Packet::Print(std::ostream & os) const [member function] cls.add_method('Print', 'void', [param('std::ostream &', 'os')], is_const=True) ## packet.h (module 'network'): void ns3::Packet::PrintByteTags(std::ostream & os) const [member function] cls.add_method('PrintByteTags', 'void', [param('std::ostream &', 'os')], is_const=True) ## packet.h (module 'network'): void ns3::Packet::PrintPacketTags(std::ostream & os) const [member function] cls.add_method('PrintPacketTags', 'void', [param('std::ostream &', 'os')], is_const=True) ## packet.h (module 'network'): void ns3::Packet::RemoveAllByteTags() [member function] cls.add_method('RemoveAllByteTags', 'void', []) ## packet.h (module 'network'): void ns3::Packet::RemoveAllPacketTags() [member function] cls.add_method('RemoveAllPacketTags', 'void', []) ## packet.h (module 'network'): void ns3::Packet::RemoveAtEnd(uint32_t size) [member function] cls.add_method('RemoveAtEnd', 'void', [param('uint32_t', 'size')]) ## packet.h (module 'network'): void ns3::Packet::RemoveAtStart(uint32_t size) [member function] cls.add_method('RemoveAtStart', 'void', [param('uint32_t', 'size')]) ## packet.h (module 'network'): uint32_t ns3::Packet::RemoveHeader(ns3::Header & header) [member function] cls.add_method('RemoveHeader', 'uint32_t', [param('ns3::Header &', 'header')]) ## packet.h (module 'network'): bool ns3::Packet::RemovePacketTag(ns3::Tag & tag) [member function] cls.add_method('RemovePacketTag', 'bool', [param('ns3::Tag &', 'tag')]) ## packet.h (module 'network'): uint32_t ns3::Packet::RemoveTrailer(ns3::Trailer & trailer) [member function] cls.add_method('RemoveTrailer', 'uint32_t', [param('ns3::Trailer &', 'trailer')]) ## packet.h (module 'network'): bool ns3::Packet::ReplacePacketTag(ns3::Tag & tag) [member function] cls.add_method('ReplacePacketTag', 'bool', [param('ns3::Tag &', 'tag')]) ## packet.h (module 'network'): uint32_t ns3::Packet::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function] cls.add_method('Serialize', 'uint32_t', [param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')], is_const=True) ## packet.h (module 'network'): void ns3::Packet::SetNixVector(ns3::Ptr<ns3::NixVector> nixVector) [member function] cls.add_method('SetNixVector', 'void', [param('ns3::Ptr< ns3::NixVector >', 'nixVector')]) ## packet.h (module 'network'): std::string ns3::Packet::ToString() const [member function] cls.add_method('ToString', 'std::string', [], is_const=True) return def register_Ns3ParetoRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ParetoRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::ParetoRandomVariable::ParetoRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetMean() const [member function] cls.add_method('GetMean', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetShape() const [member function] cls.add_method('GetShape', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetBound() const [member function] cls.add_method('GetBound', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetValue(double mean, double shape, double bound) [member function] cls.add_method('GetValue', 'double', [param('double', 'mean'), param('double', 'shape'), param('double', 'bound')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::ParetoRandomVariable::GetInteger(uint32_t mean, uint32_t shape, uint32_t bound) [member function] cls.add_method('GetInteger', 'uint32_t', [param('uint32_t', 'mean'), param('uint32_t', 'shape'), param('uint32_t', 'bound')]) ## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::ParetoRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3SixLowPanNetDevice_methods(root_module, cls): ## sixlowpan-net-device.h (module 'sixlowpan'): ns3::SixLowPanNetDevice::SixLowPanNetDevice() [constructor] cls.add_constructor([]) ## sixlowpan-net-device.h (module 'sixlowpan'): void ns3::SixLowPanNetDevice::AddLinkChangeCallback(ns3::Callback<void,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> callback) [member function] cls.add_method('AddLinkChangeCallback', 'void', [param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')], is_virtual=True) ## sixlowpan-net-device.h (module 'sixlowpan'): int64_t ns3::SixLowPanNetDevice::AssignStreams(int64_t stream) [member function] cls.add_method('AssignStreams', 'int64_t', [param('int64_t', 'stream')]) ## sixlowpan-net-device.h (module 'sixlowpan'): ns3::Address ns3::SixLowPanNetDevice::GetAddress() const [member function] cls.add_method('GetAddress', 'ns3::Address', [], is_const=True, is_virtual=True) ## sixlowpan-net-device.h (module 'sixlowpan'): ns3::Address ns3::SixLowPanNetDevice::GetBroadcast() const [member function] cls.add_method('GetBroadcast', 'ns3::Address', [], is_const=True, is_virtual=True) ## sixlowpan-net-device.h (module 'sixlowpan'): ns3::Ptr<ns3::Channel> ns3::SixLowPanNetDevice::GetChannel() const [member function] cls.add_method('GetChannel', 'ns3::Ptr< ns3::Channel >', [], is_const=True, is_virtual=True) ## sixlowpan-net-device.h (module 'sixlowpan'): uint32_t ns3::SixLowPanNetDevice::GetIfIndex() const [member function] cls.add_method('GetIfIndex', 'uint32_t', [], is_const=True, is_virtual=True) ## sixlowpan-net-device.h (module 'sixlowpan'): uint16_t ns3::SixLowPanNetDevice::GetMtu() const [member function] cls.add_method('GetMtu', 'uint16_t', [], is_const=True, is_virtual=True) ## sixlowpan-net-device.h (module 'sixlowpan'): ns3::Address ns3::SixLowPanNetDevice::GetMulticast(ns3::Ipv4Address multicastGroup) const [member function] cls.add_method('GetMulticast', 'ns3::Address', [param('ns3::Ipv4Address', 'multicastGroup')], is_const=True, is_virtual=True) ## sixlowpan-net-device.h (module 'sixlowpan'): ns3::Address ns3::SixLowPanNetDevice::GetMulticast(ns3::Ipv6Address addr) const [member function] cls.add_method('GetMulticast', 'ns3::Address', [param('ns3::Ipv6Address', 'addr')], is_const=True, is_virtual=True) ## sixlowpan-net-device.h (module 'sixlowpan'): ns3::Ptr<ns3::NetDevice> ns3::SixLowPanNetDevice::GetNetDevice() const [member function] cls.add_method('GetNetDevice', 'ns3::Ptr< ns3::NetDevice >', [], is_const=True) ## sixlowpan-net-device.h (module 'sixlowpan'): ns3::Ptr<ns3::Node> ns3::SixLowPanNetDevice::GetNode() const [member function] cls.add_method('GetNode', 'ns3::Ptr< ns3::Node >', [], is_const=True, is_virtual=True) ## sixlowpan-net-device.h (module 'sixlowpan'): static ns3::TypeId ns3::SixLowPanNetDevice::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## sixlowpan-net-device.h (module 'sixlowpan'): bool ns3::SixLowPanNetDevice::IsBridge() const [member function] cls.add_method('IsBridge', 'bool', [], is_const=True, is_virtual=True) ## sixlowpan-net-device.h (module 'sixlowpan'): bool ns3::SixLowPanNetDevice::IsBroadcast() const [member function] cls.add_method('IsBroadcast', 'bool', [], is_const=True, is_virtual=True) ## sixlowpan-net-device.h (module 'sixlowpan'): bool ns3::SixLowPanNetDevice::IsLinkUp() const [member function] cls.add_method('IsLinkUp', 'bool', [], is_const=True, is_virtual=True) ## sixlowpan-net-device.h (module 'sixlowpan'): bool ns3::SixLowPanNetDevice::IsMulticast() const [member function] cls.add_method('IsMulticast', 'bool', [], is_const=True, is_virtual=True) ## sixlowpan-net-device.h (module 'sixlowpan'): bool ns3::SixLowPanNetDevice::IsPointToPoint() const [member function] cls.add_method('IsPointToPoint', 'bool', [], is_const=True, is_virtual=True) ## sixlowpan-net-device.h (module 'sixlowpan'): bool ns3::SixLowPanNetDevice::NeedsArp() const [member function] cls.add_method('NeedsArp', 'bool', [], is_const=True, is_virtual=True) ## sixlowpan-net-device.h (module 'sixlowpan'): bool ns3::SixLowPanNetDevice::Send(ns3::Ptr<ns3::Packet> packet, ns3::Address const & dest, uint16_t protocolNumber) [member function] cls.add_method('Send', 'bool', [param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')], is_virtual=True) ## sixlowpan-net-device.h (module 'sixlowpan'): bool ns3::SixLowPanNetDevice::SendFrom(ns3::Ptr<ns3::Packet> packet, ns3::Address const & source, ns3::Address const & dest, uint16_t protocolNumber) [member function] cls.add_method('SendFrom', 'bool', [param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'source'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')], is_virtual=True) ## sixlowpan-net-device.h (module 'sixlowpan'): void ns3::SixLowPanNetDevice::SetAddress(ns3::Address address) [member function] cls.add_method('SetAddress', 'void', [param('ns3::Address', 'address')], is_virtual=True) ## sixlowpan-net-device.h (module 'sixlowpan'): void ns3::SixLowPanNetDevice::SetIfIndex(uint32_t const index) [member function] cls.add_method('SetIfIndex', 'void', [param('uint32_t const', 'index')], is_virtual=True) ## sixlowpan-net-device.h (module 'sixlowpan'): bool ns3::SixLowPanNetDevice::SetMtu(uint16_t const mtu) [member function] cls.add_method('SetMtu', 'bool', [param('uint16_t const', 'mtu')], is_virtual=True) ## sixlowpan-net-device.h (module 'sixlowpan'): void ns3::SixLowPanNetDevice::SetNetDevice(ns3::Ptr<ns3::NetDevice> device) [member function] cls.add_method('SetNetDevice', 'void', [param('ns3::Ptr< ns3::NetDevice >', 'device')]) ## sixlowpan-net-device.h (module 'sixlowpan'): void ns3::SixLowPanNetDevice::SetNode(ns3::Ptr<ns3::Node> node) [member function] cls.add_method('SetNode', 'void', [param('ns3::Ptr< ns3::Node >', 'node')], is_virtual=True) ## sixlowpan-net-device.h (module 'sixlowpan'): void ns3::SixLowPanNetDevice::SetPromiscReceiveCallback(ns3::Callback<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> cb) [member function] cls.add_method('SetPromiscReceiveCallback', 'void', [param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'cb')], is_virtual=True) ## sixlowpan-net-device.h (module 'sixlowpan'): void ns3::SixLowPanNetDevice::SetReceiveCallback(ns3::Callback<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> cb) [member function] cls.add_method('SetReceiveCallback', 'void', [param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')], is_virtual=True) ## sixlowpan-net-device.h (module 'sixlowpan'): bool ns3::SixLowPanNetDevice::SupportsSendFrom() const [member function] cls.add_method('SupportsSendFrom', 'bool', [], is_const=True, is_virtual=True) ## sixlowpan-net-device.h (module 'sixlowpan'): void ns3::SixLowPanNetDevice::DoDispose() [member function] cls.add_method('DoDispose', 'void', [], visibility='protected', is_virtual=True) return def register_Ns3TimeValue_methods(root_module, cls): ## nstime.h (module 'core'): ns3::TimeValue::TimeValue() [constructor] cls.add_constructor([]) ## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::TimeValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::TimeValue const &', 'arg0')]) ## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::Time const & value) [constructor] cls.add_constructor([param('ns3::Time const &', 'value')]) ## nstime.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TimeValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## nstime.h (module 'core'): bool ns3::TimeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## nstime.h (module 'core'): ns3::Time ns3::TimeValue::Get() const [member function] cls.add_method('Get', 'ns3::Time', [], is_const=True) ## nstime.h (module 'core'): std::string ns3::TimeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## nstime.h (module 'core'): void ns3::TimeValue::Set(ns3::Time const & value) [member function] cls.add_method('Set', 'void', [param('ns3::Time const &', 'value')]) return def register_Ns3TypeIdChecker_methods(root_module, cls): ## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker() [constructor] cls.add_constructor([]) ## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker(ns3::TypeIdChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::TypeIdChecker const &', 'arg0')]) return def register_Ns3TypeIdValue_methods(root_module, cls): ## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue() [constructor] cls.add_constructor([]) ## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeIdValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::TypeIdValue const &', 'arg0')]) ## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeId const & value) [constructor] cls.add_constructor([param('ns3::TypeId const &', 'value')]) ## type-id.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TypeIdValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## type-id.h (module 'core'): bool ns3::TypeIdValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeIdValue::Get() const [member function] cls.add_method('Get', 'ns3::TypeId', [], is_const=True) ## type-id.h (module 'core'): std::string ns3::TypeIdValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## type-id.h (module 'core'): void ns3::TypeIdValue::Set(ns3::TypeId const & value) [member function] cls.add_method('Set', 'void', [param('ns3::TypeId const &', 'value')]) return def register_Ns3AddressChecker_methods(root_module, cls): ## address.h (module 'network'): ns3::AddressChecker::AddressChecker() [constructor] cls.add_constructor([]) ## address.h (module 'network'): ns3::AddressChecker::AddressChecker(ns3::AddressChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::AddressChecker const &', 'arg0')]) return def register_Ns3AddressValue_methods(root_module, cls): ## address.h (module 'network'): ns3::AddressValue::AddressValue() [constructor] cls.add_constructor([]) ## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::AddressValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::AddressValue const &', 'arg0')]) ## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::Address const & value) [constructor] cls.add_constructor([param('ns3::Address const &', 'value')]) ## address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::AddressValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## address.h (module 'network'): bool ns3::AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## address.h (module 'network'): ns3::Address ns3::AddressValue::Get() const [member function] cls.add_method('Get', 'ns3::Address', [], is_const=True) ## address.h (module 'network'): std::string ns3::AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## address.h (module 'network'): void ns3::AddressValue::Set(ns3::Address const & value) [member function] cls.add_method('Set', 'void', [param('ns3::Address const &', 'value')]) return def register_Ns3HashImplementation_methods(root_module, cls): ## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation(ns3::Hash::Implementation const & arg0) [copy constructor] cls.add_constructor([param('ns3::Hash::Implementation const &', 'arg0')]) ## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation() [constructor] cls.add_constructor([]) ## hash-function.h (module 'core'): uint32_t ns3::Hash::Implementation::GetHash32(char const * buffer, size_t const size) [member function] cls.add_method('GetHash32', 'uint32_t', [param('char const *', 'buffer'), param('size_t const', 'size')], is_pure_virtual=True, is_virtual=True) ## hash-function.h (module 'core'): uint64_t ns3::Hash::Implementation::GetHash64(char const * buffer, size_t const size) [member function] cls.add_method('GetHash64', 'uint64_t', [param('char const *', 'buffer'), param('size_t const', 'size')], is_virtual=True) ## hash-function.h (module 'core'): void ns3::Hash::Implementation::clear() [member function] cls.add_method('clear', 'void', [], is_pure_virtual=True, is_virtual=True) return def register_Ns3HashFunctionFnv1a_methods(root_module, cls): ## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a(ns3::Hash::Function::Fnv1a const & arg0) [copy constructor] cls.add_constructor([param('ns3::Hash::Function::Fnv1a const &', 'arg0')]) ## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a() [constructor] cls.add_constructor([]) ## hash-fnv.h (module 'core'): uint32_t ns3::Hash::Function::Fnv1a::GetHash32(char const * buffer, size_t const size) [member function] cls.add_method('GetHash32', 'uint32_t', [param('char const *', 'buffer'), param('size_t const', 'size')], is_virtual=True) ## hash-fnv.h (module 'core'): uint64_t ns3::Hash::Function::Fnv1a::GetHash64(char const * buffer, size_t const size) [member function] cls.add_method('GetHash64', 'uint64_t', [param('char const *', 'buffer'), param('size_t const', 'size')], is_virtual=True) ## hash-fnv.h (module 'core'): void ns3::Hash::Function::Fnv1a::clear() [member function] cls.add_method('clear', 'void', [], is_virtual=True) return def register_Ns3HashFunctionHash32_methods(root_module, cls): ## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Function::Hash32 const & arg0) [copy constructor] cls.add_constructor([param('ns3::Hash::Function::Hash32 const &', 'arg0')]) ## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Hash32Function_ptr hp) [constructor] cls.add_constructor([param('ns3::Hash::Hash32Function_ptr', 'hp')]) ## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash32::GetHash32(char const * buffer, size_t const size) [member function] cls.add_method('GetHash32', 'uint32_t', [param('char const *', 'buffer'), param('size_t const', 'size')], is_virtual=True) ## hash-function.h (module 'core'): void ns3::Hash::Function::Hash32::clear() [member function] cls.add_method('clear', 'void', [], is_virtual=True) return def register_Ns3HashFunctionHash64_methods(root_module, cls): ## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Function::Hash64 const & arg0) [copy constructor] cls.add_constructor([param('ns3::Hash::Function::Hash64 const &', 'arg0')]) ## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Hash64Function_ptr hp) [constructor] cls.add_constructor([param('ns3::Hash::Hash64Function_ptr', 'hp')]) ## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash64::GetHash32(char const * buffer, size_t const size) [member function] cls.add_method('GetHash32', 'uint32_t', [param('char const *', 'buffer'), param('size_t const', 'size')], is_virtual=True) ## hash-function.h (module 'core'): uint64_t ns3::Hash::Function::Hash64::GetHash64(char const * buffer, size_t const size) [member function] cls.add_method('GetHash64', 'uint64_t', [param('char const *', 'buffer'), param('size_t const', 'size')], is_virtual=True) ## hash-function.h (module 'core'): void ns3::Hash::Function::Hash64::clear() [member function] cls.add_method('clear', 'void', [], is_virtual=True) return def register_Ns3HashFunctionMurmur3_methods(root_module, cls): ## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3(ns3::Hash::Function::Murmur3 const & arg0) [copy constructor] cls.add_constructor([param('ns3::Hash::Function::Murmur3 const &', 'arg0')]) ## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3() [constructor] cls.add_constructor([]) ## hash-murmur3.h (module 'core'): uint32_t ns3::Hash::Function::Murmur3::GetHash32(char const * buffer, size_t const size) [member function] cls.add_method('GetHash32', 'uint32_t', [param('char const *', 'buffer'), param('size_t const', 'size')], is_virtual=True) ## hash-murmur3.h (module 'core'): uint64_t ns3::Hash::Function::Murmur3::GetHash64(char const * buffer, size_t const size) [member function] cls.add_method('GetHash64', 'uint64_t', [param('char const *', 'buffer'), param('size_t const', 'size')], is_virtual=True) ## hash-murmur3.h (module 'core'): void ns3::Hash::Function::Murmur3::clear() [member function] cls.add_method('clear', 'void', [], is_virtual=True) return def register_functions(root_module): module = root_module register_functions_ns3_FatalImpl(module.get_submodule('FatalImpl'), root_module) register_functions_ns3_Hash(module.get_submodule('Hash'), root_module) return def register_functions_ns3_FatalImpl(module, root_module): return def register_functions_ns3_Hash(module, root_module): register_functions_ns3_Hash_Function(module.get_submodule('Function'), root_module) return def register_functions_ns3_Hash_Function(module, root_module): return def main(): out = FileCodeSink(sys.stdout) root_module = module_init() register_types(root_module) register_methods(root_module) register_functions(root_module) root_module.generate(out) if __name__ == '__main__': main()
gpl-2.0
vasyarv/edx-ora2
openassessment/assessment/migrations/0001_initial.py
7
13026
# -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding model 'Rubric' db.create_table('assessment_rubric', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('content_hash', self.gf('django.db.models.fields.CharField')(unique=True, max_length=40, db_index=True)), )) db.send_create_signal('assessment', ['Rubric']) # Adding model 'Criterion' db.create_table('assessment_criterion', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('rubric', self.gf('django.db.models.fields.related.ForeignKey')(related_name='criteria', to=orm['assessment.Rubric'])), ('name', self.gf('django.db.models.fields.CharField')(max_length=100)), ('order_num', self.gf('django.db.models.fields.PositiveIntegerField')()), ('prompt', self.gf('django.db.models.fields.TextField')(max_length=10000)), )) db.send_create_signal('assessment', ['Criterion']) # Adding model 'CriterionOption' db.create_table('assessment_criterionoption', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('criterion', self.gf('django.db.models.fields.related.ForeignKey')(related_name='options', to=orm['assessment.Criterion'])), ('order_num', self.gf('django.db.models.fields.PositiveIntegerField')()), ('points', self.gf('django.db.models.fields.PositiveIntegerField')()), ('name', self.gf('django.db.models.fields.CharField')(max_length=100)), ('explanation', self.gf('django.db.models.fields.TextField')(max_length=10000, blank=True)), )) db.send_create_signal('assessment', ['CriterionOption']) # Adding model 'Assessment' db.create_table('assessment_assessment', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('submission_uuid', self.gf('django.db.models.fields.CharField')(max_length=128, db_index=True)), ('rubric', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['assessment.Rubric'])), ('scored_at', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now, db_index=True)), ('scorer_id', self.gf('django.db.models.fields.CharField')(max_length=40, db_index=True)), ('score_type', self.gf('django.db.models.fields.CharField')(max_length=2)), ('feedback', self.gf('django.db.models.fields.TextField')(default='', max_length=10000, blank=True)), )) db.send_create_signal('assessment', ['Assessment']) # Adding model 'AssessmentPart' db.create_table('assessment_assessmentpart', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('assessment', self.gf('django.db.models.fields.related.ForeignKey')(related_name='parts', to=orm['assessment.Assessment'])), ('option', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['assessment.CriterionOption'])), )) db.send_create_signal('assessment', ['AssessmentPart']) # Adding model 'AssessmentFeedback' db.create_table('assessment_assessmentfeedback', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('submission_uuid', self.gf('django.db.models.fields.CharField')(unique=True, max_length=128, db_index=True)), ('helpfulness', self.gf('django.db.models.fields.IntegerField')(default=2)), ('feedback', self.gf('django.db.models.fields.TextField')(default='', max_length=10000)), )) db.send_create_signal('assessment', ['AssessmentFeedback']) # Adding M2M table for field assessments on 'AssessmentFeedback' db.create_table('assessment_assessmentfeedback_assessments', ( ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)), ('assessmentfeedback', models.ForeignKey(orm['assessment.assessmentfeedback'], null=False)), ('assessment', models.ForeignKey(orm['assessment.assessment'], null=False)) )) db.create_unique('assessment_assessmentfeedback_assessments', ['assessmentfeedback_id', 'assessment_id']) # Adding model 'PeerWorkflow' db.create_table('assessment_peerworkflow', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('student_id', self.gf('django.db.models.fields.CharField')(max_length=40, db_index=True)), ('item_id', self.gf('django.db.models.fields.CharField')(max_length=128, db_index=True)), ('course_id', self.gf('django.db.models.fields.CharField')(max_length=40, db_index=True)), ('submission_uuid', self.gf('django.db.models.fields.CharField')(unique=True, max_length=128, db_index=True)), ('created_at', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now, db_index=True)), ('completed_at', self.gf('django.db.models.fields.DateTimeField')(null=True, db_index=True)), )) db.send_create_signal('assessment', ['PeerWorkflow']) # Adding model 'PeerWorkflowItem' db.create_table('assessment_peerworkflowitem', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('scorer', self.gf('django.db.models.fields.related.ForeignKey')(related_name='graded', to=orm['assessment.PeerWorkflow'])), ('author', self.gf('django.db.models.fields.related.ForeignKey')(related_name='graded_by', to=orm['assessment.PeerWorkflow'])), ('submission_uuid', self.gf('django.db.models.fields.CharField')(max_length=128, db_index=True)), ('started_at', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now, db_index=True)), ('assessment', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['assessment.Assessment'], null=True)), ('scored', self.gf('django.db.models.fields.BooleanField')(default=False)), )) db.send_create_signal('assessment', ['PeerWorkflowItem']) def backwards(self, orm): # Deleting model 'Rubric' db.delete_table('assessment_rubric') # Deleting model 'Criterion' db.delete_table('assessment_criterion') # Deleting model 'CriterionOption' db.delete_table('assessment_criterionoption') # Deleting model 'Assessment' db.delete_table('assessment_assessment') # Deleting model 'AssessmentPart' db.delete_table('assessment_assessmentpart') # Deleting model 'AssessmentFeedback' db.delete_table('assessment_assessmentfeedback') # Removing M2M table for field assessments on 'AssessmentFeedback' db.delete_table('assessment_assessmentfeedback_assessments') # Deleting model 'PeerWorkflow' db.delete_table('assessment_peerworkflow') # Deleting model 'PeerWorkflowItem' db.delete_table('assessment_peerworkflowitem') models = { 'assessment.assessment': { 'Meta': {'ordering': "['-scored_at', '-id']", 'object_name': 'Assessment'}, 'feedback': ('django.db.models.fields.TextField', [], {'default': "''", 'max_length': '10000', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'rubric': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['assessment.Rubric']"}), 'score_type': ('django.db.models.fields.CharField', [], {'max_length': '2'}), 'scored_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}), 'scorer_id': ('django.db.models.fields.CharField', [], {'max_length': '40', 'db_index': 'True'}), 'submission_uuid': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}) }, 'assessment.assessmentfeedback': { 'Meta': {'object_name': 'AssessmentFeedback'}, 'assessments': ('django.db.models.fields.related.ManyToManyField', [], {'default': 'None', 'related_name': "'assessment_feedback'", 'symmetrical': 'False', 'to': "orm['assessment.Assessment']"}), 'feedback': ('django.db.models.fields.TextField', [], {'default': "''", 'max_length': '10000'}), 'helpfulness': ('django.db.models.fields.IntegerField', [], {'default': '2'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'submission_uuid': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128', 'db_index': 'True'}) }, 'assessment.assessmentpart': { 'Meta': {'object_name': 'AssessmentPart'}, 'assessment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parts'", 'to': "orm['assessment.Assessment']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'option': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['assessment.CriterionOption']"}) }, 'assessment.criterion': { 'Meta': {'ordering': "['rubric', 'order_num']", 'object_name': 'Criterion'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'order_num': ('django.db.models.fields.PositiveIntegerField', [], {}), 'prompt': ('django.db.models.fields.TextField', [], {'max_length': '10000'}), 'rubric': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'criteria'", 'to': "orm['assessment.Rubric']"}) }, 'assessment.criterionoption': { 'Meta': {'ordering': "['criterion', 'order_num']", 'object_name': 'CriterionOption'}, 'criterion': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'options'", 'to': "orm['assessment.Criterion']"}), 'explanation': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'order_num': ('django.db.models.fields.PositiveIntegerField', [], {}), 'points': ('django.db.models.fields.PositiveIntegerField', [], {}) }, 'assessment.peerworkflow': { 'Meta': {'ordering': "['created_at', 'id']", 'object_name': 'PeerWorkflow'}, 'completed_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}), 'course_id': ('django.db.models.fields.CharField', [], {'max_length': '40', 'db_index': 'True'}), 'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'item_id': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}), 'student_id': ('django.db.models.fields.CharField', [], {'max_length': '40', 'db_index': 'True'}), 'submission_uuid': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128', 'db_index': 'True'}) }, 'assessment.peerworkflowitem': { 'Meta': {'ordering': "['started_at', 'id']", 'object_name': 'PeerWorkflowItem'}, 'assessment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['assessment.Assessment']", 'null': 'True'}), 'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'graded_by'", 'to': "orm['assessment.PeerWorkflow']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'scored': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'scorer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'graded'", 'to': "orm['assessment.PeerWorkflow']"}), 'started_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}), 'submission_uuid': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}) }, 'assessment.rubric': { 'Meta': {'object_name': 'Rubric'}, 'content_hash': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40', 'db_index': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) } } complete_apps = ['assessment']
agpl-3.0
mnahm5/django-estore
Lib/site-packages/nose/plugins/__init__.py
97
6291
""" Writing Plugins --------------- nose supports plugins for test collection, selection, observation and reporting. There are two basic rules for plugins: * Plugin classes should subclass :class:`nose.plugins.Plugin`. * Plugins may implement any of the methods described in the class :doc:`IPluginInterface <interface>` in nose.plugins.base. Please note that this class is for documentary purposes only; plugins may not subclass IPluginInterface. Hello World =========== Here's a basic plugin. It doesn't do much so read on for more ideas or dive into the :doc:`IPluginInterface <interface>` to see all available hooks. .. code-block:: python import logging import os from nose.plugins import Plugin log = logging.getLogger('nose.plugins.helloworld') class HelloWorld(Plugin): name = 'helloworld' def options(self, parser, env=os.environ): super(HelloWorld, self).options(parser, env=env) def configure(self, options, conf): super(HelloWorld, self).configure(options, conf) if not self.enabled: return def finalize(self, result): log.info('Hello pluginized world!') Registering =========== .. Note:: Important note: the following applies only to the default plugin manager. Other plugin managers may use different means to locate and load plugins. For nose to find a plugin, it must be part of a package that uses setuptools_, and the plugin must be included in the entry points defined in the setup.py for the package: .. code-block:: python setup(name='Some plugin', # ... entry_points = { 'nose.plugins.0.10': [ 'someplugin = someplugin:SomePlugin' ] }, # ... ) Once the package is installed with install or develop, nose will be able to load the plugin. .. _setuptools: http://peak.telecommunity.com/DevCenter/setuptools Registering a plugin without setuptools ======================================= It is currently possible to register a plugin programmatically by creating a custom nose runner like this : .. code-block:: python import nose from yourplugin import YourPlugin if __name__ == '__main__': nose.main(addplugins=[YourPlugin()]) Defining options ================ All plugins must implement the methods ``options(self, parser, env)`` and ``configure(self, options, conf)``. Subclasses of nose.plugins.Plugin that want the standard options should call the superclass methods. nose uses optparse.OptionParser from the standard library to parse arguments. A plugin's ``options()`` method receives a parser instance. It's good form for a plugin to use that instance only to add additional arguments that take only long arguments (--like-this). Most of nose's built-in arguments get their default value from an environment variable. A plugin's ``configure()`` method receives the parsed ``OptionParser`` options object, as well as the current config object. Plugins should configure their behavior based on the user-selected settings, and may raise exceptions if the configured behavior is nonsensical. Logging ======= nose uses the logging classes from the standard library. To enable users to view debug messages easily, plugins should use ``logging.getLogger()`` to acquire a logger in the ``nose.plugins`` namespace. Recipes ======= * Writing a plugin that monitors or controls test result output Implement any or all of ``addError``, ``addFailure``, etc., to monitor test results. If you also want to monitor output, implement ``setOutputStream`` and keep a reference to the output stream. If you want to prevent the builtin ``TextTestResult`` output, implement ``setOutputSteam`` and *return a dummy stream*. The default output will go to the dummy stream, while you send your desired output to the real stream. Example: `examples/html_plugin/htmlplug.py`_ * Writing a plugin that handles exceptions Subclass :doc:`ErrorClassPlugin <errorclasses>`. Examples: :doc:`nose.plugins.deprecated <deprecated>`, :doc:`nose.plugins.skip <skip>` * Writing a plugin that adds detail to error reports Implement ``formatError`` and/or ``formatFailure``. The error tuple you return (error class, error message, traceback) will replace the original error tuple. Examples: :doc:`nose.plugins.capture <capture>`, :doc:`nose.plugins.failuredetail <failuredetail>` * Writing a plugin that loads tests from files other than python modules Implement ``wantFile`` and ``loadTestsFromFile``. In ``wantFile``, return True for files that you want to examine for tests. In ``loadTestsFromFile``, for those files, return an iterable containing TestCases (or yield them as you find them; ``loadTestsFromFile`` may also be a generator). Example: :doc:`nose.plugins.doctests <doctests>` * Writing a plugin that prints a report Implement ``begin`` if you need to perform setup before testing begins. Implement ``report`` and output your report to the provided stream. Examples: :doc:`nose.plugins.cover <cover>`, :doc:`nose.plugins.prof <prof>` * Writing a plugin that selects or rejects tests Implement any or all ``want*`` methods. Return False to reject the test candidate, True to accept it -- which means that the test candidate will pass through the rest of the system, so you must be prepared to load tests from it if tests can't be loaded by the core loader or another plugin -- and None if you don't care. Examples: :doc:`nose.plugins.attrib <attrib>`, :doc:`nose.plugins.doctests <doctests>`, :doc:`nose.plugins.testid <testid>` More Examples ============= See any builtin plugin or example plugin in the examples_ directory in the nose source distribution. There is a list of third-party plugins `on jottit`_. .. _examples/html_plugin/htmlplug.py: http://python-nose.googlecode.com/svn/trunk/examples/html_plugin/htmlplug.py .. _examples: http://python-nose.googlecode.com/svn/trunk/examples .. _on jottit: http://nose-plugins.jottit.com/ """ from nose.plugins.base import Plugin from nose.plugins.manager import * from nose.plugins.plugintest import PluginTester if __name__ == '__main__': import doctest doctest.testmod()
mit
alsotoes/vsphere-examples
python/.venv/lib/python2.6/site-packages/setuptools/tests/test_packageindex.py
377
7625
"""Package Index Tests """ import sys import os import unittest import pkg_resources from setuptools.compat import urllib2, httplib, HTTPError, unicode, pathname2url import distutils.errors import setuptools.package_index from setuptools.tests.server import IndexServer class TestPackageIndex(unittest.TestCase): def test_bad_url_bad_port(self): index = setuptools.package_index.PackageIndex() url = 'http://127.0.0.1:0/nonesuch/test_package_index' try: v = index.open_url(url) except Exception: v = sys.exc_info()[1] self.assertTrue(url in str(v)) else: self.assertTrue(isinstance(v, HTTPError)) def test_bad_url_typo(self): # issue 16 # easy_install inquant.contentmirror.plone breaks because of a typo # in its home URL index = setuptools.package_index.PackageIndex( hosts=('www.example.com',) ) url = 'url:%20https://svn.plone.org/svn/collective/inquant.contentmirror.plone/trunk' try: v = index.open_url(url) except Exception: v = sys.exc_info()[1] self.assertTrue(url in str(v)) else: self.assertTrue(isinstance(v, HTTPError)) def test_bad_url_bad_status_line(self): index = setuptools.package_index.PackageIndex( hosts=('www.example.com',) ) def _urlopen(*args): raise httplib.BadStatusLine('line') index.opener = _urlopen url = 'http://example.com' try: v = index.open_url(url) except Exception: v = sys.exc_info()[1] self.assertTrue('line' in str(v)) else: raise AssertionError('Should have raise here!') def test_bad_url_double_scheme(self): """ A bad URL with a double scheme should raise a DistutilsError. """ index = setuptools.package_index.PackageIndex( hosts=('www.example.com',) ) # issue 20 url = 'http://http://svn.pythonpaste.org/Paste/wphp/trunk' try: index.open_url(url) except distutils.errors.DistutilsError: error = sys.exc_info()[1] msg = unicode(error) assert 'nonnumeric port' in msg or 'getaddrinfo failed' in msg or 'Name or service not known' in msg return raise RuntimeError("Did not raise") def test_bad_url_screwy_href(self): index = setuptools.package_index.PackageIndex( hosts=('www.example.com',) ) # issue #160 if sys.version_info[0] == 2 and sys.version_info[1] == 7: # this should not fail url = 'http://example.com' page = ('<a href="http://www.famfamfam.com](' 'http://www.famfamfam.com/">') index.process_index(url, page) def test_url_ok(self): index = setuptools.package_index.PackageIndex( hosts=('www.example.com',) ) url = 'file:///tmp/test_package_index' self.assertTrue(index.url_ok(url, True)) def test_links_priority(self): """ Download links from the pypi simple index should be used before external download links. https://bitbucket.org/tarek/distribute/issue/163 Usecase : - someone uploads a package on pypi, a md5 is generated - someone manually copies this link (with the md5 in the url) onto an external page accessible from the package page. - someone reuploads the package (with a different md5) - while easy_installing, an MD5 error occurs because the external link is used -> Setuptools should use the link from pypi, not the external one. """ if sys.platform.startswith('java'): # Skip this test on jython because binding to :0 fails return # start an index server server = IndexServer() server.start() index_url = server.base_url() + 'test_links_priority/simple/' # scan a test index pi = setuptools.package_index.PackageIndex(index_url) requirement = pkg_resources.Requirement.parse('foobar') pi.find_packages(requirement) server.stop() # the distribution has been found self.assertTrue('foobar' in pi) # we have only one link, because links are compared without md5 self.assertTrue(len(pi['foobar'])==1) # the link should be from the index self.assertTrue('correct_md5' in pi['foobar'][0].location) def test_parse_bdist_wininst(self): self.assertEqual(setuptools.package_index.parse_bdist_wininst( 'reportlab-2.5.win32-py2.4.exe'), ('reportlab-2.5', '2.4', 'win32')) self.assertEqual(setuptools.package_index.parse_bdist_wininst( 'reportlab-2.5.win32.exe'), ('reportlab-2.5', None, 'win32')) self.assertEqual(setuptools.package_index.parse_bdist_wininst( 'reportlab-2.5.win-amd64-py2.7.exe'), ('reportlab-2.5', '2.7', 'win-amd64')) self.assertEqual(setuptools.package_index.parse_bdist_wininst( 'reportlab-2.5.win-amd64.exe'), ('reportlab-2.5', None, 'win-amd64')) def test__vcs_split_rev_from_url(self): """ Test the basic usage of _vcs_split_rev_from_url """ vsrfu = setuptools.package_index.PackageIndex._vcs_split_rev_from_url url, rev = vsrfu('https://example.com/bar@2995') self.assertEqual(url, 'https://example.com/bar') self.assertEqual(rev, '2995') def test_local_index(self): """ local_open should be able to read an index from the file system. """ f = open('index.html', 'w') f.write('<div>content</div>') f.close() try: url = 'file:' + pathname2url(os.getcwd()) + '/' res = setuptools.package_index.local_open(url) finally: os.remove('index.html') assert 'content' in res.read() class TestContentCheckers(unittest.TestCase): def test_md5(self): checker = setuptools.package_index.HashChecker.from_url( 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478') checker.feed('You should probably not be using MD5'.encode('ascii')) self.assertEqual(checker.hash.hexdigest(), 'f12895fdffbd45007040d2e44df98478') self.assertTrue(checker.is_valid()) def test_other_fragment(self): "Content checks should succeed silently if no hash is present" checker = setuptools.package_index.HashChecker.from_url( 'http://foo/bar#something%20completely%20different') checker.feed('anything'.encode('ascii')) self.assertTrue(checker.is_valid()) def test_blank_md5(self): "Content checks should succeed if a hash is empty" checker = setuptools.package_index.HashChecker.from_url( 'http://foo/bar#md5=') checker.feed('anything'.encode('ascii')) self.assertTrue(checker.is_valid()) def test_get_hash_name_md5(self): checker = setuptools.package_index.HashChecker.from_url( 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478') self.assertEqual(checker.hash_name, 'md5') def test_report(self): checker = setuptools.package_index.HashChecker.from_url( 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478') rep = checker.report(lambda x: x, 'My message about %s') self.assertEqual(rep, 'My message about md5')
gpl-3.0
alilotfi/django
django/template/library.py
348
12752
import functools import warnings from importlib import import_module from django.utils import six from django.utils.deprecation import RemovedInDjango20Warning from django.utils.html import conditional_escape from django.utils.inspect import getargspec from django.utils.itercompat import is_iterable from .base import Node, Template, token_kwargs from .exceptions import TemplateSyntaxError class InvalidTemplateLibrary(Exception): pass class Library(object): """ A class for registering template tags and filters. Compiled filter and template tag functions are stored in the filters and tags attributes. The filter, simple_tag, and inclusion_tag methods provide a convenient way to register callables as tags. """ def __init__(self): self.filters = {} self.tags = {} def tag(self, name=None, compile_function=None): if name is None and compile_function is None: # @register.tag() return self.tag_function elif name is not None and compile_function is None: if callable(name): # @register.tag return self.tag_function(name) else: # @register.tag('somename') or @register.tag(name='somename') def dec(func): return self.tag(name, func) return dec elif name is not None and compile_function is not None: # register.tag('somename', somefunc) self.tags[name] = compile_function return compile_function else: raise ValueError( "Unsupported arguments to Library.tag: (%r, %r)" % (name, compile_function), ) def tag_function(self, func): self.tags[getattr(func, "_decorated_function", func).__name__] = func return func def filter(self, name=None, filter_func=None, **flags): """ Register a callable as a template filter. Example: @register.filter def lower(value): return value.lower() """ if name is None and filter_func is None: # @register.filter() def dec(func): return self.filter_function(func, **flags) return dec elif name is not None and filter_func is None: if callable(name): # @register.filter return self.filter_function(name, **flags) else: # @register.filter('somename') or @register.filter(name='somename') def dec(func): return self.filter(name, func, **flags) return dec elif name is not None and filter_func is not None: # register.filter('somename', somefunc) self.filters[name] = filter_func for attr in ('expects_localtime', 'is_safe', 'needs_autoescape'): if attr in flags: value = flags[attr] # set the flag on the filter for FilterExpression.resolve setattr(filter_func, attr, value) # set the flag on the innermost decorated function # for decorators that need it, e.g. stringfilter if hasattr(filter_func, "_decorated_function"): setattr(filter_func._decorated_function, attr, value) filter_func._filter_name = name return filter_func else: raise ValueError( "Unsupported arguments to Library.filter: (%r, %r)" % (name, filter_func), ) def filter_function(self, func, **flags): name = getattr(func, "_decorated_function", func).__name__ return self.filter(name, func, **flags) def simple_tag(self, func=None, takes_context=None, name=None): """ Register a callable as a compiled template tag. Example: @register.simple_tag def hello(*args, **kwargs): return 'world' """ def dec(func): params, varargs, varkw, defaults = getargspec(func) function_name = (name or getattr(func, '_decorated_function', func).__name__) @functools.wraps(func) def compile_func(parser, token): bits = token.split_contents()[1:] target_var = None if len(bits) >= 2 and bits[-2] == 'as': target_var = bits[-1] bits = bits[:-2] args, kwargs = parse_bits(parser, bits, params, varargs, varkw, defaults, takes_context, function_name) return SimpleNode(func, takes_context, args, kwargs, target_var) self.tag(function_name, compile_func) return func if func is None: # @register.simple_tag(...) return dec elif callable(func): # @register.simple_tag return dec(func) else: raise ValueError("Invalid arguments provided to simple_tag") def assignment_tag(self, func=None, takes_context=None, name=None): warnings.warn( "assignment_tag() is deprecated. Use simple_tag() instead", RemovedInDjango20Warning, stacklevel=2, ) return self.simple_tag(func, takes_context, name) def inclusion_tag(self, filename, func=None, takes_context=None, name=None): """ Register a callable as an inclusion tag: @register.inclusion_tag('results.html') def show_results(poll): choices = poll.choice_set.all() return {'choices': choices} """ def dec(func): params, varargs, varkw, defaults = getargspec(func) function_name = (name or getattr(func, '_decorated_function', func).__name__) @functools.wraps(func) def compile_func(parser, token): bits = token.split_contents()[1:] args, kwargs = parse_bits( parser, bits, params, varargs, varkw, defaults, takes_context, function_name, ) return InclusionNode( func, takes_context, args, kwargs, filename, ) self.tag(function_name, compile_func) return func return dec class TagHelperNode(Node): """ Base class for tag helper nodes such as SimpleNode and InclusionNode. Manages the positional and keyword arguments to be passed to the decorated function. """ def __init__(self, func, takes_context, args, kwargs): self.func = func self.takes_context = takes_context self.args = args self.kwargs = kwargs def get_resolved_arguments(self, context): resolved_args = [var.resolve(context) for var in self.args] if self.takes_context: resolved_args = [context] + resolved_args resolved_kwargs = {k: v.resolve(context) for k, v in self.kwargs.items()} return resolved_args, resolved_kwargs class SimpleNode(TagHelperNode): def __init__(self, func, takes_context, args, kwargs, target_var): super(SimpleNode, self).__init__(func, takes_context, args, kwargs) self.target_var = target_var def render(self, context): resolved_args, resolved_kwargs = self.get_resolved_arguments(context) output = self.func(*resolved_args, **resolved_kwargs) if self.target_var is not None: context[self.target_var] = output return '' if context.autoescape: output = conditional_escape(output) return output class InclusionNode(TagHelperNode): def __init__(self, func, takes_context, args, kwargs, filename): super(InclusionNode, self).__init__(func, takes_context, args, kwargs) self.filename = filename def render(self, context): """ Render the specified template and context. Cache the template object in render_context to avoid reparsing and loading when used in a for loop. """ resolved_args, resolved_kwargs = self.get_resolved_arguments(context) _dict = self.func(*resolved_args, **resolved_kwargs) t = context.render_context.get(self) if t is None: if isinstance(self.filename, Template): t = self.filename elif isinstance(getattr(self.filename, 'template', None), Template): t = self.filename.template elif not isinstance(self.filename, six.string_types) and is_iterable(self.filename): t = context.template.engine.select_template(self.filename) else: t = context.template.engine.get_template(self.filename) context.render_context[self] = t new_context = context.new(_dict) # Copy across the CSRF token, if present, because inclusion tags are # often used for forms, and we need instructions for using CSRF # protection to be as simple as possible. csrf_token = context.get('csrf_token') if csrf_token is not None: new_context['csrf_token'] = csrf_token return t.render(new_context) def parse_bits(parser, bits, params, varargs, varkw, defaults, takes_context, name): """ Parse bits for template tag helpers simple_tag and inclusion_tag, in particular by detecting syntax errors and by extracting positional and keyword arguments. """ if takes_context: if params[0] == 'context': params = params[1:] else: raise TemplateSyntaxError( "'%s' is decorated with takes_context=True so it must " "have a first argument of 'context'" % name) args = [] kwargs = {} unhandled_params = list(params) for bit in bits: # First we try to extract a potential kwarg from the bit kwarg = token_kwargs([bit], parser) if kwarg: # The kwarg was successfully extracted param, value = kwarg.popitem() if param not in params and varkw is None: # An unexpected keyword argument was supplied raise TemplateSyntaxError( "'%s' received unexpected keyword argument '%s'" % (name, param)) elif param in kwargs: # The keyword argument has already been supplied once raise TemplateSyntaxError( "'%s' received multiple values for keyword argument '%s'" % (name, param)) else: # All good, record the keyword argument kwargs[str(param)] = value if param in unhandled_params: # If using the keyword syntax for a positional arg, then # consume it. unhandled_params.remove(param) else: if kwargs: raise TemplateSyntaxError( "'%s' received some positional argument(s) after some " "keyword argument(s)" % name) else: # Record the positional argument args.append(parser.compile_filter(bit)) try: # Consume from the list of expected positional arguments unhandled_params.pop(0) except IndexError: if varargs is None: raise TemplateSyntaxError( "'%s' received too many positional arguments" % name) if defaults is not None: # Consider the last n params handled, where n is the # number of defaults. unhandled_params = unhandled_params[:-len(defaults)] if unhandled_params: # Some positional arguments were not supplied raise TemplateSyntaxError( "'%s' did not receive value(s) for the argument(s): %s" % (name, ", ".join("'%s'" % p for p in unhandled_params))) return args, kwargs def import_library(name): """ Load a Library object from a template tag module. """ try: module = import_module(name) except ImportError as e: raise InvalidTemplateLibrary( "Invalid template library specified. ImportError raised when " "trying to load '%s': %s" % (name, e) ) try: return module.register except AttributeError: raise InvalidTemplateLibrary( "Module %s does not have a variable named 'register'" % name, )
bsd-3-clause
jazkarta/edx-platform-for-isc
common/djangoapps/terrain/stubs/xqueue.py
123
8708
""" Stub implementation of XQueue for acceptance tests. Configuration values: "default" (dict): Default response to be sent to LMS as a grade for a submission "<submission>" (dict): Grade response to return for submissions containing the text <submission> "register_submission_url" (str): URL to send grader payloads when we receive a submission If no grade response is configured, a default response will be returned. """ from .http import StubHttpRequestHandler, StubHttpService, require_params import json import copy from requests import post from threading import Timer class StubXQueueHandler(StubHttpRequestHandler): """ A handler for XQueue POST requests. """ DEFAULT_RESPONSE_DELAY = 2 DEFAULT_GRADE_RESPONSE = {'correct': True, 'score': 1, 'msg': ''} @require_params('POST', 'xqueue_body', 'xqueue_header') def do_POST(self): """ Handle a POST request from the client Sends back an immediate success/failure response. It then POSTS back to the client with grading results. """ msg = "XQueue received POST request {0} to path {1}".format(self.post_dict, self.path) self.log_message(msg) # Respond only to grading requests if self._is_grade_request(): # If configured, send the grader payload to other services. self._register_submission(self.post_dict['xqueue_body']) try: xqueue_header = json.loads(self.post_dict['xqueue_header']) callback_url = xqueue_header['lms_callback_url'] except KeyError: # If the message doesn't have a header or body, # then it's malformed. Respond with failure error_msg = "XQueue received invalid grade request" self._send_immediate_response(False, message=error_msg) except ValueError: # If we could not decode the body or header, # respond with failure error_msg = "XQueue could not decode grade request" self._send_immediate_response(False, message=error_msg) else: # Send an immediate response of success # The grade request is formed correctly self._send_immediate_response(True) # Wait a bit before POSTing back to the callback url with the # grade result configured by the server # Otherwise, the problem will not realize it's # queued and it will keep waiting for a response indefinitely delayed_grade_func = lambda: self._send_grade_response( callback_url, xqueue_header, self.post_dict['xqueue_body'] ) delay = self.server.config.get('response_delay', self.DEFAULT_RESPONSE_DELAY) Timer(delay, delayed_grade_func).start() # If we get a request that's not to the grading submission # URL, return an error else: self._send_immediate_response(False, message="Invalid request URL") def _send_immediate_response(self, success, message=""): """ Send an immediate success/failure message back to the client """ # Send the response indicating success/failure response_str = json.dumps( {'return_code': 0 if success else 1, 'content': message} ) if self._is_grade_request(): self.send_response( 200, content=response_str, headers={'Content-type': 'text/plain'} ) self.log_message("XQueue: sent response {0}".format(response_str)) else: self.send_response(500) def _send_grade_response(self, postback_url, xqueue_header, xqueue_body_json): """ POST the grade response back to the client using the response provided by the server configuration. Uses the server configuration to determine what response to send: 1) Specific response for submissions containing matching text in `xqueue_body` 2) Default submission configured by client 3) Default submission `postback_url` is the URL the client told us to post back to `xqueue_header` (dict) is the full header the client sent us, which we will send back to the client so it can authenticate us. `xqueue_body_json` (json-encoded string) is the body of the submission the client sent us. """ # First check if we have a configured response that matches the submission body grade_response = None # This matches the pattern against the JSON-encoded xqueue_body # This is very simplistic, but sufficient to associate a student response # with a grading response. # There is a danger here that a submission will match multiple response patterns. # Rather than fail silently (which could cause unpredictable behavior in tests) # we abort and log a debugging message. for pattern, response in self.server.queue_responses: if pattern in xqueue_body_json: if grade_response is None: grade_response = response # Multiple matches, so abort and log an error else: self.log_error( "Multiple response patterns matched '{0}'".format(xqueue_body_json), ) return # Fall back to the default grade response configured for this queue, # then to the default response. if grade_response is None: grade_response = self.server.config.get( 'default', copy.deepcopy(self.DEFAULT_GRADE_RESPONSE) ) # Wrap the message in <div> tags to ensure that it is valid XML if isinstance(grade_response, dict) and 'msg' in grade_response: grade_response['msg'] = "<div>{0}</div>".format(grade_response['msg']) data = { 'xqueue_header': json.dumps(xqueue_header), 'xqueue_body': json.dumps(grade_response) } post(postback_url, data=data) self.log_message("XQueue: sent grading response {0} to {1}".format(data, postback_url)) def _register_submission(self, xqueue_body_json): """ If configured, send the submission's grader payload to another service. """ url = self.server.config.get('register_submission_url') # If not configured, do not need to send anything if url is not None: try: xqueue_body = json.loads(xqueue_body_json) except ValueError: self.log_error( "Could not decode XQueue body as JSON: '{0}'".format(xqueue_body_json)) else: # Retrieve the grader payload, which should be a JSON-encoded dict. # We pass the payload directly to the service we are notifying, without # inspecting the contents. grader_payload = xqueue_body.get('grader_payload') if grader_payload is not None: response = post(url, data={'grader_payload': grader_payload}) if not response.ok: self.log_error( "Could register submission at URL '{0}'. Status was {1}".format( url, response.status_code)) else: self.log_message( "XQueue body is missing 'grader_payload' key: '{0}'".format(xqueue_body) ) def _is_grade_request(self): """ Return a boolean indicating whether the requested URL indicates a submission. """ return 'xqueue/submit' in self.path class StubXQueueService(StubHttpService): """ A stub XQueue grading server that responds to POST requests to localhost. """ HANDLER_CLASS = StubXQueueHandler NON_QUEUE_CONFIG_KEYS = ['default', 'register_submission_url'] @property def queue_responses(self): """ Returns a list of (pattern, response) tuples, where `pattern` is a pattern to match in the XQueue body, and `response` is a dictionary to return as the response from the grader. Every configuration key is a queue name, except for 'default' and 'register_submission_url' which have special meaning """ return { key: value for key, value in self.config.iteritems() if key not in self.NON_QUEUE_CONFIG_KEYS }.items()
agpl-3.0
hn8841182/20150623-test02
static/Brython3.1.1-20150328-091302/Lib/site-packages/pygame/pkgdata.py
603
2146
"""pkgdata is a simple, extensible way for a package to acquire data file resources. The getResource function is equivalent to the standard idioms, such as the following minimal implementation:: import sys, os def getResource(identifier, pkgname=__name__): pkgpath = os.path.dirname(sys.modules[pkgname].__file__) path = os.path.join(pkgpath, identifier) return file(os.path.normpath(path), mode='rb') When a __loader__ is present on the module given by __name__, it will defer getResource to its get_data implementation and return it as a file-like object (such as StringIO). """ __all__ = ['getResource'] import sys import os #from cStringIO import StringIO from io import StringIO try: # Try to use setuptools if available. from pkg_resources import resource_stream _have_resource_stream = True except ImportError: _have_resource_stream = False def getResource(identifier, pkgname=__name__): """Acquire a readable object for a given package name and identifier. An IOError will be raised if the resource can not be found. For example:: mydata = getResource('mypkgdata.jpg').read() Note that the package name must be fully qualified, if given, such that it would be found in sys.modules. In some cases, getResource will return a real file object. In that case, it may be useful to use its name attribute to get the path rather than use it as a file-like object. For example, you may be handing data off to a C API. """ # Prefer setuptools if _have_resource_stream: return resource_stream(pkgname, identifier) mod = sys.modules[pkgname] fn = getattr(mod, '__file__', None) if fn is None: raise IOError("%r has no __file__!") path = os.path.join(os.path.dirname(fn), identifier) loader = getattr(mod, '__loader__', None) if loader is not None: try: data = loader.get_data(path) except IOError: pass else: return StringIO(data) #return file(os.path.normpath(path), 'rb') return open(os.path.normpath(path), 'rb')
gpl-3.0
alphagov/backdrop
backdrop/core/storage/postgres.py
1
6697
from datetime import datetime import dateutil.parser import dateutil.tz import psycopg2 import psycopg2.extras import pytz import logging from uuid import uuid4 from sql_query_factory import ( create_sql_query, create_data_set_exists_query, create_delete_data_set_query, create_get_last_updated_query, create_find_record_query, create_update_record_query, create_delete_record_query, create_batch_last_updated_query, CREATE_TABLE_SQL, DROP_TABLE_SQL, ) from .. import timeutils logger = logging.getLogger(__name__) class PostgresStorageEngine(object): def __init__(self, datatbase_url): self.connection = psycopg2.connect(datatbase_url) def create_table_and_indices(self): """ This is probably only going to be used by the tests, or run manually when setting up the database for the first time. Database migrations are for losers (it is in fact us, the people who support this project, who are the losers). """ with self.connection.cursor() as cursor: query = CREATE_TABLE_SQL logger.debug( 'create_table_and_indices - executing sql query: ' + query) cursor.execute(query) self.connection.commit() def drop_table_and_indices(self): """ As with the create above, this is likely only used during tests. """ with self.connection.cursor() as cursor: query = DROP_TABLE_SQL logger.debug( 'drop_table_and_indices - executing sql query: ' + query) cursor.execute(query) self.connection.commit() def data_set_exists(self, data_set_id): # This is slightly different to the mongo implementation # in that it will return False if `create_data_set` has # been called, but no records have been saved. with self.connection.cursor() as cursor: query = create_data_set_exists_query(cursor.mogrify, data_set_id) logger.debug('data_set_exists - executing sql query: ' + query) cursor.execute(query) return cursor.rowcount > 0 def create_data_set(self, data_set_id, size): pass def delete_data_set(self, data_set_id): with self.connection.cursor() as cursor: query = create_delete_data_set_query(cursor.mogrify, data_set_id) logger.debug('delete_data_set - executing sql query: ' + query) cursor.execute(query) self.connection.commit() def get_last_updated(self, data_set_id): with self.connection.cursor() as cursor: query = create_get_last_updated_query(cursor.mogrify, data_set_id) logger.debug('get_last_updated - executing sql query: ' + query) cursor.execute(query) if cursor.rowcount == 0: return None (record,) = cursor.fetchone() return _parse_datetime_fields(record)['_updated_at'] def batch_last_updated(self, data_sets): collections = [collection.name for collection in data_sets] with self.connection.cursor() as cursor: query = create_batch_last_updated_query( cursor.mogrify, collections) logger.debug('batch_last_updated - executing sql query: ' + query) cursor.execute(query) results = cursor.fetchall() timestamp_by_collection = { collection: max_timestamp for [collection, max_timestamp] in results} for data_set in data_sets: if timestamp_by_collection[data_set.name] is not None: data_set._last_updated = timestamp_by_collection[ data_set.name].replace(tzinfo=pytz.UTC) else: data_set._last_updated = None def empty_data_set(self, data_set_id): self.delete_data_set(data_set_id) def save_record(self, data_set_id, record): if '_id' not in record: # Mongo used to add an _id field automatically. # Postgres doesn't, so we're replicating the functionality here: record['_id'] = str(uuid4()) self.update_record(data_set_id, record['_id'], record) def find_record(self, data_set_id, record_id): with self.connection.cursor() as cursor: query = create_find_record_query( cursor.mogrify, data_set_id, record_id) logger.debug('find_record - executing sql query: ' + query) cursor.execute(query) (record,) = cursor.fetchone() return _parse_datetime_fields(record) def update_record(self, data_set_id, record_id, record): updated_at = timeutils.now() record['_updated_at'] = updated_at ts = record['_timestamp'] if '_timestamp' in record else updated_at with self.connection.cursor() as cursor: query = create_update_record_query( cursor.mogrify, data_set_id, record, record_id, ts, updated_at) logger.debug('update_record - executing sql query: ' + query) cursor.execute(query) self.connection.commit() def delete_record(self, data_set_id, record_id): with self.connection.cursor() as cursor: query = create_delete_record_query( cursor.mogrify, data_set_id, record_id) logger.debug('delete_record - executing sql query: ' + query) cursor.execute(query) self.connection.commit() def execute_query(self, data_set_id, query): with self.connection.cursor() as cursor: query, convert_query_result_to_dictionaries = create_sql_query( cursor.mogrify, data_set_id, query) logger.debug('execute_query - executing sql query: ' + query) cursor.execute(query) records = convert_query_result_to_dictionaries(cursor.fetchall()) return [_parse_datetime_fields(record) for record in records] def _parse_datetime_fields(obj): """ The code expects _updated_at to be a datetime, but it's stored as a string. >>> _parse_datetime_fields({'_updated_at':'1988-01-20T00:00:00'}) {'_updated_at': datetime.datetime(1988, 1, 20, 0, 0, tzinfo=<UTC>)} """ obj_copy = obj.copy() for field in ['_updated_at', '_timestamp']: if field in obj: obj_copy[field] = dateutil.parser.parse( obj[field]).replace(tzinfo=pytz.UTC) for key, value in obj.iteritems(): if isinstance(value, datetime): obj_copy[key] = value.replace(tzinfo=pytz.UTC) return obj_copy
mit
AaronGeist/Llama
main.py
1
2976
import sys import time from biz.pt.mteam import AdultAlert, UploadCheck, UserCrawl, CandidateVote, MessageReader, NormalAlert from biz.ipnotifier import IpNotifier from biz.life.weather import WeatherReport from biz.miui import Miui from biz.pt.putao import FreeFeedAlert, MagicPointChecker, UploadMonitor from biz.pt.putao_watchdog import PuTaoWatchDog from biz.pt.ttg_rss import TTGRSS from biz.rpi import CpuTemperature, Memory, Thread from biz.watchlist.ShuHuiWatchDog import ShuHuiWatchDog from core.seedmanager import SeedManager from core.tts import TextToSpeech cmd_map = { "seed_ls": SeedManager.parse_current_seeds, "pt_seed_check": PuTaoWatchDog().check, "pt_seed_add": PuTaoWatchDog().manual_add_seed, "pt_seed_ls": PuTaoWatchDog().crawl, "pt_seed_ignore": PuTaoWatchDog().ignore, "pt_stat": PuTaoWatchDog().stat, "ttg_rss_check": TTGRSS().check, "feed_check": FreeFeedAlert().check, "mp_check": MagicPointChecker().check, "mp_monitor": MagicPointChecker().monitor, "up_monitor": UploadMonitor().monitor, "cpu_temp_monitor": CpuTemperature().monitor, "memory_monitor": Memory().monitor, "thread_monitor": Thread().monitor_db, "weather": WeatherReport.report_weather, "tts": TextToSpeech.convert_and_play, "mt_normal": NormalAlert().check, "mt_adult": AdultAlert().check, "mt_adult_init": AdultAlert().init, "mt_add_normal": NormalAlert().add_seed, "mt_add": AdultAlert().add_seed, "mt_up_init": UploadCheck().init, "mt_up_check": UploadCheck().check, "mt_up_check_not_store": UploadCheck().check_not_store, "mt_user_init": UserCrawl().crawl, "mt_user_refresh": UserCrawl().refresh, "mt_user_warn": UserCrawl().warn, "mt_id": UserCrawl().load_by_id, "mt_name": UserCrawl().load_by_name, "mt_order": UserCrawl().order, "mt_vote": CandidateVote().check, "mt_clean": SeedManager.seed_file_clean_up, "mt_page_ls": AdultAlert().crawl, "mt_msg": MessageReader().get_cmd, "miui_keep_alive": Miui().check_in, "miui_water": Miui().water_copy, "miui_zz": Miui().zz, "miui_zz_copy": Miui().zz_copy, "miui_vote": Miui().vote, "miui_sign": Miui().async_sign, "ip_change_check": IpNotifier.check_change, "shu_hui_update_check": ShuHuiWatchDog.check_and_notify } def usage(): print("please choose argument below:") for cmd in cmd_map.keys(): print(" -- " + cmd) if __name__ == "__main__": if len(sys.argv) < 2: print("Usage: must have one argument") usage() quit() cmd = sys.argv[1] if cmd in cmd_map.keys(): now = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())) print("%s Processing %s" % (now, cmd)) if len(sys.argv) > 3: cmd_map[cmd](sys.argv[2], sys.argv[3]) elif len(sys.argv) > 2: cmd_map[cmd](sys.argv[2]) else: cmd_map[cmd]() else: usage()
gpl-3.0
okolisny/integration_tests
cfme/cloud/provider/__init__.py
1
8467
""" A model of a Cloud Provider in CFME """ from functools import partial from navmazing import NavigateToSibling, NavigateToAttribute from widgetastic_manageiq import TimelinesView from cfme.base.login import BaseLoggedInPage from cfme.common import TagPageView from cfme.common.provider_views import (CloudProviderAddView, CloudProviderEditView, CloudProviderDetailsView, CloudProvidersView, CloudProvidersDiscoverView, ProvidersManagePoliciesView ) import cfme.fixtures.pytest_selenium as sel from cfme.common.provider import CloudInfraProvider from cfme.web_ui import InfoBlock, match_location from cfme.utils.appliance import Navigatable from cfme.utils.appliance.implementations.ui import navigator, navigate_to, CFMENavigateStep from cfme.utils.log import logger from cfme.utils.wait import wait_for from cfme.utils.pretty import Pretty match_page = partial(match_location, controller='ems_cloud', title='Cloud Providers') class CloudProviderTimelinesView(TimelinesView, BaseLoggedInPage): @property def is_displayed(self): return self.logged_in_as_current_user and \ self.navigation.currently_selected == ['Compute', 'Clouds', 'Providers'] and \ super(TimelinesView, self).is_displayed class CloudProvider(Pretty, CloudInfraProvider): """ Abstract model of a cloud provider in cfme. See EC2Provider or OpenStackProvider. Args: name: Name of the provider. endpoints: one or several provider endpoints like DefaultEndpoint. it should be either dict in format dict{endpoint.name, endpoint, endpoint_n.name, endpoint_n}, list of endpoints or mere one endpoint key: The CFME key of the provider in the yaml. Usage: credentials = Credential(principal='bad', secret='reallybad') endpoint = DefaultEndpoint(hostname='some_host', region='us-west', credentials=credentials) myprov = VMwareProvider(name='foo', endpoints=endpoint) myprov.create() """ provider_types = {} category = "cloud" pretty_attrs = ['name', 'credentials', 'zone', 'key'] STATS_TO_MATCH = ['num_template', 'num_vm'] string_name = "Cloud" page_name = "clouds" templates_destination_name = "Images" vm_name = "Instances" template_name = "Images" db_types = ["CloudManager"] def __init__(self, name=None, endpoints=None, zone=None, key=None, appliance=None): Navigatable.__init__(self, appliance=appliance) self.name = name self.zone = zone self.key = key self.endpoints = self._prepare_endpoints(endpoints) def as_fill_value(self): return self.name @property def view_value_mapping(self): """Maps values to view attrs""" return {'name': self.name} @staticmethod def discover_dict(credential): """Returns the discovery credentials dictionary, needs overiding""" raise NotImplementedError("This provider doesn't support discovery") @navigator.register(CloudProvider, 'All') class All(CFMENavigateStep): VIEW = CloudProvidersView prerequisite = NavigateToAttribute('appliance.server', 'LoggedIn') def step(self): self.prerequisite_view.navigation.select('Compute', 'Clouds', 'Providers') def resetter(self): tb = self.view.toolbar paginator = self.view.entities.paginator if 'Grid View' not in tb.view_selector.selected: tb.view_selector.select('Grid View') if paginator.exists: paginator.check_all() paginator.uncheck_all() @navigator.register(CloudProvider, 'Add') class New(CFMENavigateStep): VIEW = CloudProviderAddView prerequisite = NavigateToSibling('All') def step(self): self.prerequisite_view.toolbar.configuration.item_select('Add a New Cloud Provider') @navigator.register(CloudProvider, 'Discover') class Discover(CFMENavigateStep): VIEW = CloudProvidersDiscoverView prerequisite = NavigateToSibling('All') def step(self): self.prerequisite_view.toolbar.configuration.item_select('Discover Cloud Providers') @navigator.register(CloudProvider, 'Details') class Details(CFMENavigateStep): VIEW = CloudProviderDetailsView prerequisite = NavigateToSibling('All') def step(self): self.prerequisite_view.entities.get_entity(by_name=self.obj.name, surf_pages=True).click() @navigator.register(CloudProvider, 'Edit') class Edit(CFMENavigateStep): VIEW = CloudProviderEditView prerequisite = NavigateToSibling('All') def step(self): self.prerequisite_view.entities.get_entity(by_name=self.obj.name, surf_pages=True).check() self.prerequisite_view.toolbar.configuration.item_select('Edit Selected Cloud Provider') @navigator.register(CloudProvider, 'EditFromDetails') class EditFromDetails(CFMENavigateStep): VIEW = CloudProviderEditView prerequisite = NavigateToSibling('Details') def step(self): self.prerequisite_view.toolbar.configuration.item_select('Edit this Cloud Provider') @navigator.register(CloudProvider, 'ManagePolicies') class ManagePolicies(CFMENavigateStep): VIEW = ProvidersManagePoliciesView prerequisite = NavigateToSibling('All') def step(self): self.prerequisite_view.entities.get_entity(by_name=self.obj.name, surf_pages=True).check() self.prerequisite_view.toolbar.policy.item_select('Manage Policies') @navigator.register(CloudProvider, 'ManagePoliciesFromDetails') class ManagePoliciesFromDetails(CFMENavigateStep): VIEW = ProvidersManagePoliciesView prerequisite = NavigateToSibling('Details') def step(self): self.prerequisite_view.toolbar.policy.item_select('Manage Policies') @navigator.register(CloudProvider, 'EditTags') class EditTags(CFMENavigateStep): VIEW = TagPageView prerequisite = NavigateToSibling('All') def step(self): self.prerequisite_view.entities.get_entity(by_name=self.obj.name, surf_pages=True).check() self.prerequisite_view.toolbar.policy.item_select('Edit Tags') @navigator.register(CloudProvider, 'Timelines') class Timelines(CFMENavigateStep): VIEW = CloudProviderTimelinesView prerequisite = NavigateToSibling('Details') def step(self): mon = self.prerequisite_view.toolbar.monitoring mon.item_select('Timelines') @navigator.register(CloudProvider, 'Instances') class Instances(CFMENavigateStep): prerequisite = NavigateToSibling('Details') def am_i_here(self): return match_page(summary='{} (All Instances)'.format(self.obj.name)) def step(self, *args, **kwargs): sel.click(InfoBlock.element('Relationships', 'Instances')) @navigator.register(CloudProvider, 'Images') class Images(CFMENavigateStep): prerequisite = NavigateToSibling('Details') def am_i_here(self): return match_page(summary='{} (All Images)'.format(self.obj.name)) def step(self, *args, **kwargs): sel.click(InfoBlock.element('Relationships', 'Images')) def get_all_providers(): """Returns list of all providers""" view = navigate_to(CloudProvider, 'All') return [item.name for item in view.entities.get_all(surf_pages=True)] def discover(credential, discover_cls, cancel=False): """ Discover cloud providers. Note: only starts discovery, doesn't wait for it to finish. Args: credential (cfme.base.credential.Credential): Discovery credentials. cancel (boolean): Whether to cancel out of the discover UI. discover_cls: class of the discovery item """ view = navigate_to(CloudProvider, 'Discover') if discover_cls: view.fill({'discover_type': discover_cls.discover_name}) view.fields.fill(discover_cls.discover_dict(credential)) if cancel: view.cancel.click() else: view.start.click() def wait_for_a_provider(): view = navigate_to(CloudProvider, 'All') logger.info('Waiting for a provider to appear...') wait_for(lambda: int(view.entities.paginator.items_amount), fail_condition=0, message="Wait for any provider to appear", num_sec=1000, fail_func=view.browser.refresh)
gpl-2.0
Exa-Networks/exaddos
lib/exaddos/application.py
2
9124
""" application.py Created by Thomas Mangin on 2014-02-06. Copyright (c) 2014-2014 Exa Networks. All rights reserved. """ import os import sys import pwd import socket import errno import atexit from log import log,err,silence from exaddos import reactor def __exit(memory,code): if memory: from exaddos.leak import objgraph print "memory utilisation" print print objgraph.show_most_common_types(limit=20) print print print "generating memory utilisation graph" print obj = objgraph.by_type('run') objgraph.show_backrefs([obj], max_depth=10) sys.exit(code) def __drop_privileges (user): """returns true if we are left with insecure privileges""" try: user = pwd.getpwnam(user) nuid = int(user.pw_uid) ngid = int(user.pw_gid) except KeyError: return False uid = os.getuid() gid = os.getgid() # not sure you can change your gid if you do not have a pid of zero try: # we must change the GID first otherwise it may fail after change UID if not gid: os.setgid(ngid) if not uid: os.setuid(nuid) cuid = os.getuid() ceid = os.geteuid() cgid = os.getgid() if cuid < 0: cuid = (1<<32) + cuid if cgid < 0: cgid = (1<<32) + cgid if ceid < 0: ceid = (1<<32) + ceid if nuid != cuid or nuid != ceid or ngid != cgid: return False except OSError: return False return True def drop_privileges (configuration): # os.name can be ['posix', 'nt', 'os2', 'ce', 'java', 'riscos'] if os.name not in ['posix',]: return True if os.getuid() != 0: err('not running as root, not changing UID') return True users = [configuration.daemon.user,'nobody'] for user in users: if __drop_privileges(user): return True return False def daemonise (daemonize): if not daemonize: return def fork_exit (): try: pid = os.fork() if pid > 0: os._exit(0) except OSError, e: err('Can not fork, errno %d : %s' % (e.errno,e.strerror)) def mute (): # closing more would close the log file too if open maxfd = 3 for fd in range(0, maxfd): try: os.close(fd) except OSError: pass os.open("/dev/null", os.O_RDWR) os.dup2(0, 1) os.dup2(0, 2) def is_socket (fd): try: s = socket.fromfd(fd, socket.AF_INET, socket.SOCK_RAW) except ValueError,e: # The file descriptor is closed return False try: s.getsockopt(socket.SOL_SOCKET, socket.SO_TYPE) except socket.error, e: # It is look like one but it is not a socket ... if e.args[0] == errno.ENOTSOCK: return False return True # do not detach if we are already supervised or run by init like process if is_socket(sys.__stdin__.fileno()) or os.getppid() == 1: return fork_exit() os.setsid() fork_exit() mute() silence() def savepid (location): if not location: return ownid = os.getpid() flags = os.O_CREAT | os.O_EXCL | os.O_WRONLY mode = ((os.R_OK | os.W_OK) << 6) | (os.R_OK << 3) | os.R_OK try: fd = os.open(location,flags,mode) except OSError: err("PIDfile already exists, not updated %s" % location) return False try: f = os.fdopen(fd,'w') line = "%d\n" % ownid f.write(line) f.close() except IOError: err("Can not create PIDfile %s" % location) return False log("Created PIDfile %s with value %d" % (location,ownid)) atexit.register(removepid,location) return True def removepid (location): if not location: return try: os.remove(location) except OSError, e: if e.errno == errno.ENOENT: pass else: err("Can not remove PIDfile %s" % location) return log("Removed PIDfile %s" % location) def help (): sys.stdout.write('usage:\n exaddos [options]\n') sys.stdout.write('\n') sys.stdout.write(' -h, --help : this help\n') sys.stdout.write(' -c, --conf-file : configuration file to use (ini format)\n') sys.stdout.write(' -i, --ini : display the configuration using the ini format\n') sys.stdout.write(' -e, --env : display the configuration using the env format\n') sys.stdout.write(' -di, --diff-ini : display non-default configurations values using the ini format\n') sys.stdout.write(' -de, --diff-env : display non-default configurations values using the env format\n') sys.stdout.write(' -d, --debug : shortcut to turn on all subsystems debugging to LOG_DEBUG\n') sys.stdout.write(' -p, --pdb : start the python debugger on serious logging and on SIGTERM\n') sys.stdout.write(' -m, --memory : display memory usage information on exit\n') sys.stdout.write('\n') sys.stdout.write('iEnum will automatically look for its configuration file (in windows ini format)\n') sys.stdout.write(' - in the etc/exaddos folder located within the extracted tar.gz \n') sys.stdout.write(' - in /etc/exaddos/exaddos.conf\n') sys.stdout.write('\n') sys.stdout.write('Every configuration value has a sensible built-in default\n') sys.stdout.write('\n') sys.stdout.write('Individual configuration options can be set using environment variables, such as :\n') sys.stdout.write(' > env exaddos.http.port=39200 ./sbin/exaddos\n') sys.stdout.write('or > env exaddos_http_port=39200 ./sbin/exaddos\n') sys.stdout.write('or > export exaddos_http_port=39200; ./sbin/exaddos\n') sys.stdout.write('\n') sys.stdout.write('Multiple environment values can be set\n') sys.stdout.write('and the order of preference is :\n') sys.stdout.write(' - 1 : command line env value using dot separated notation\n') sys.stdout.write(' - 2 : exported value from the shell using dot separated notation\n') sys.stdout.write(' - 3 : command line env value using underscore separated notation\n') sys.stdout.write(' - 4 : exported value from the shell using underscore separated notation\n') sys.stdout.write(' - 5 : the value in the ini configuration file\n') sys.stdout.write('\n') sys.stdout.write('Valid configuration options are :\n') sys.stdout.write('\n') for line in default(): sys.stdout.write(' - %s\n' % line) sys.stdout.write('\n') def version_warning (): sys.stderr.write('This version of python is not supported\n') if __name__ == '__main__': main = int(sys.version[0]) secondary = int(sys.version[2]) if main != 2 or secondary < 4: sys.exit('This program can not work (is not tested) with your python version (< 2.4 or >= 3.0)') if main == 2 and secondary == 4: version_warning() try: from pysnmp.smi import builder builder.MibBuilder().loadModules('SNMPv2-MIB', 'IF-MIB') except: sys.exit('This program requires python netsnmp\n> pip install pysnmp\n> pip install pysnmp_mibs') try: from pysnmp.proto.rfc1905 import NoSuchInstance except: # some version of pysnmp do not have this API :-( sys.exit( 'This program requires a version of pysnmp which is not compatible with the one installed\n' 'You _may_ be able to replace the installed version with the lastest one on pypi\n' '> pip install pysnmp\n' '> pip install pysnmp_mibs' ) from exaddos.configuration import ConfigurationError,load,ini,env,default next = '' arguments = { 'configuration' : '', } for arg in sys.argv[1:]: if next: arguments[next] = arg next = '' continue if arg in ['-c','--conf-file']: next = 'configuration' for arg in sys.argv[1:]: if arg in ['--',]: break if arg in ['-h','--help']: help() sys.exit(0) try: configuration = load(arguments['configuration']) except ConfigurationError,e: err('configuration issue, %s' % str(e)) sys.exit(1) for arg in sys.argv[1:]: if arg in ['--',]: break if arg in ['-h','--help']: help() sys.exit(0) if arg in ['-i','--ini']: ini() sys.exit(0) if arg in ['-e','--env']: env() sys.exit(0) if arg in ['-di','--diff-ini']: ini(True) sys.exit(0) if arg in ['-de','--diff-env']: env(True) sys.exit(0) if arg in ['-p','--pdb']: # The following may fail on old version of python (but is required for debug.py) os.environ['PDB'] = 'true' configuration.debug.pdb = True if arg in ['-D']: configuration.daemon.daemonize = True if arg in ['-m','--memory']: configuration.debug.memory = True # check the database is well only 400 by the user we use # start web server :) reactor.setup(configuration) if not drop_privileges(configuration): err('could not drop privileges') __exit(configuration.debug.memory,0) daemonise(configuration.daemon.daemonize) savepid(configuration.daemon.pidfile) if not configuration.profile.enable: try: reactor.run() except socket.error,e: # XXXX: Look at ExaBGP code fore better handling if e.errno == errno.EADDRINUSE: err('can not bind to %s:%d (port already/still in use)' % (configuration.http.host, configuration.http.port)) if e.errno == errno.EADDRNOTAVAIL: err('can not bind to %s:%d (IP unavailable)' % (configuration.http.host, configuration.http.port)) __exit(configuration.debug.memory,0) try: import cProfile as profile except: try: import profile except: err('could not perform profiling') class profile (object): @staticmethod def run (function): eval(function) profile.run('reactor.run()') __exit(configuration.debug.memory,0)
bsd-3-clause
dragondjf/tornado_nginx_supervisor
ptest/main.py
1
1231
#!/usr/bin/env python # # Copyright 2009 Facebook # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import tornado.httpserver import tornado.ioloop import tornado.options import tornado.web from tornado.options import define, options define("port", default=8888, help="run on the given port", type=int) class MainHandler(tornado.web.RequestHandler): def get(self): self.write("Hello, world") def main(): tornado.options.parse_command_line() application = tornado.web.Application([ (r"/", MainHandler), ]) http_server = tornado.httpserver.HTTPServer(application) http_server.listen(options.port) tornado.ioloop.IOLoop.instance().start() if __name__ == "__main__": main()
gpl-2.0
CentOS-PaaS-SIG/linch-pin
linchpin/fetch/fetch_git.py
3
2306
from __future__ import absolute_import import os import subprocess import tempfile from .fetch import Fetch from linchpin.exceptions import LinchpinError class FetchGit(Fetch): def __init__(self, ctx, fetch_type, src, dest, cache_dir, root='', root_ws='', ref=None): super(FetchGit, self).__init__(ctx, fetch_type, dest, root=root, root_ws=root_ws, ref=ref) self.src = src self.cache_dir = os.path.join(cache_dir, "git") if not os.path.exists(self.cache_dir): os.mkdir(self.cache_dir) def fetch_files(self): # The key cannot contain ':' since linchpin does not support python 3. # Configparser uses ':' as a delimiter, which poses problem when using # urls in key during parsing. Delimiters can be specified when # initializing a configparser object in python 3 so this does not # become an issue. ref = 'None' if self.ref: ref = self.ref key = "{0}|{1}".format(self.src.replace(':', ''), ref) fetch_dir = self.cfgs["git"].get(key, None) self.td = self.call_clone(fetch_dir) self.td_w_root = '{0}/{1}'.format(self.td, self.root) if not fetch_dir: self.write_cfg("git", key, self.td) def call_clone(self, fetch_dir=None): ref = None src = self.src if self.ref: ref = self.ref src = '{0}@{1}'.format(self.src, ref) if fetch_dir and os.path.exists(fetch_dir): cmd = ['git', 'pull', '--quiet'] retval = subprocess.call(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=fetch_dir) else: if not fetch_dir: fetch_dir = tempfile.mkdtemp(prefix="git_", dir=self.cache_dir) cmd = ['git', 'clone', '--quiet', self.src] if ref: cmd.extend(['-b', ref]) cmd.append(fetch_dir) retval = subprocess.call(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) if retval != 0: raise LinchpinError("Unable to clone {0}".format(src)) return fetch_dir
gpl-3.0
tersmitten/ansible
test/units/modules/network/netvisor/test_pn_admin_service.py
15
2421
# Copyright: (c) 2018, Pluribus Networks # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import (absolute_import, division, print_function) __metaclass__ = type import json from units.compat.mock import patch from ansible.modules.network.netvisor import pn_admin_service from units.modules.utils import set_module_args from .nvos_module import TestNvosModule, load_fixture class TestAdminServiceModule(TestNvosModule): module = pn_admin_service def setUp(self): self.mock_run_nvos_commands = patch('ansible.modules.network.netvisor.pn_admin_service.run_cli') self.run_nvos_commands = self.mock_run_nvos_commands.start() def tearDown(self): self.mock_run_nvos_commands.stop() def run_cli_patch(self, module, cli, state_map): if state_map['update'] == 'admin-service-modify': results = dict( changed=True, cli_cmd=cli ) module.exit_json(**results) def load_fixtures(self, commands=None, state=None, transport='cli'): self.run_nvos_commands.side_effect = self.run_cli_patch def test_admin_service_modify_t1(self): set_module_args({'pn_cliswitch': 'sw01', 'pn__if': 'mgmt', 'pn_web': 'False', 'state': 'update'}) result = self.execute_module(changed=True, state='update') expected_cmd = ' switch sw01 admin-service-modify if mgmt no-web ' self.assertEqual(result['cli_cmd'], expected_cmd) def test_admin_service_modify_t2(self): set_module_args({'pn_cliswitch': 'sw01', 'pn__if': 'mgmt', 'pn_snmp': 'True', 'pn_net_api': 'True', 'pn_ssh': 'True', 'state': 'update'}) result = self.execute_module(changed=True, state='update') expected_cmd = ' switch sw01 admin-service-modify if mgmt snmp ssh net-api ' self.assertEqual(result['cli_cmd'], expected_cmd) def test_admin_service_modify_t3(self): set_module_args({'pn_cliswitch': 'sw01', 'pn__if': 'data', 'pn_web_port': '8080', 'pn_net_api': 'True', 'pn_web_log': 'True', 'state': 'update'}) result = self.execute_module(changed=True, state='update') expected_cmd = ' switch sw01 admin-service-modify if data web-port 8080 net-api web-log ' self.assertEqual(result['cli_cmd'], expected_cmd)
gpl-3.0
nzavagli/UnrealPy
UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/python-dateutil-2.4.2/dateutil/parser.py
102
44085
# -*- coding:iso-8859-1 -*- """ This module offers a generic date/time string parser which is able to parse most known formats to represent a date and/or time. Additional resources about date/time string formats can be found below: - `A summary of the international standard date and time notation <http://www.cl.cam.ac.uk/~mgk25/iso-time.html>`_ - `W3C Date and Time Formats <http://www.w3.org/TR/NOTE-datetime>`_ - `Time Formats (Planetary Rings Node) <http://pds-rings.seti.org/tools/time_formats.html>`_ - `CPAN ParseDate module <http://search.cpan.org/~muir/Time-modules-2013.0912/lib/Time/ParseDate.pm>`_ - `Java SimpleDateFormat Class <https://docs.oracle.com/javase/6/docs/api/java/text/SimpleDateFormat.html>`_ """ from __future__ import unicode_literals import datetime import string import time import collections from io import StringIO from six import text_type, binary_type, integer_types from . import relativedelta from . import tz __all__ = ["parse", "parserinfo"] class _timelex(object): def __init__(self, instream): if isinstance(instream, text_type): instream = StringIO(instream) self.instream = instream self.wordchars = ('abcdfeghijklmnopqrstuvwxyz' 'ABCDEFGHIJKLMNOPQRSTUVWXYZ_' 'ßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿ' 'ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞ') self.numchars = '0123456789' self.whitespace = ' \t\r\n' self.charstack = [] self.tokenstack = [] self.eof = False def get_token(self): """ This function breaks the time string into lexical units (tokens), which can be parsed by the parser. Lexical units are demarcated by changes in the character set, so any continuous string of letters is considered one unit, any continuous string of numbers is considered one unit. The main complication arises from the fact that dots ('.') can be used both as separators (e.g. "Sep.20.2009") or decimal points (e.g. "4:30:21.447"). As such, it is necessary to read the full context of any dot-separated strings before breaking it into tokens; as such, this function maintains a "token stack", for when the ambiguous context demands that multiple tokens be parsed at once. """ if self.tokenstack: return self.tokenstack.pop(0) seenletters = False token = None state = None wordchars = self.wordchars numchars = self.numchars whitespace = self.whitespace while not self.eof: # We only realize that we've reached the end of a token when we find # a character that's not part of the current token - since that # character may be part of the next token, it's stored in the # charstack. if self.charstack: nextchar = self.charstack.pop(0) else: nextchar = self.instream.read(1) while nextchar == '\x00': nextchar = self.instream.read(1) if not nextchar: self.eof = True break elif not state: # First character of the token - determines if we're starting # to parse a word, a number or something else. token = nextchar if nextchar in wordchars: state = 'a' elif nextchar in numchars: state = '0' elif nextchar in whitespace: token = ' ' break # emit token else: break # emit token elif state == 'a': # If we've already started reading a word, we keep reading # letters until we find something that's not part of a word. seenletters = True if nextchar in wordchars: token += nextchar elif nextchar == '.': token += nextchar state = 'a.' else: self.charstack.append(nextchar) break # emit token elif state == '0': # If we've already started reading a number, we keep reading # numbers until we find something that doesn't fit. if nextchar in numchars: token += nextchar elif nextchar == '.': token += nextchar state = '0.' else: self.charstack.append(nextchar) break # emit token elif state == 'a.': # If we've seen some letters and a dot separator, continue # parsing, and the tokens will be broken up later. seenletters = True if nextchar == '.' or nextchar in wordchars: token += nextchar elif nextchar in numchars and token[-1] == '.': token += nextchar state = '0.' else: self.charstack.append(nextchar) break # emit token elif state == '0.': # If we've seen at least one dot separator, keep going, we'll # break up the tokens later. if nextchar == '.' or nextchar in numchars: token += nextchar elif nextchar in wordchars and token[-1] == '.': token += nextchar state = 'a.' else: self.charstack.append(nextchar) break # emit token if (state in ('a.', '0.') and (seenletters or token.count('.') > 1 or token[-1] == '.')): l = token.split('.') token = l[0] for tok in l[1:]: self.tokenstack.append('.') if tok: self.tokenstack.append(tok) return token def __iter__(self): return self def __next__(self): token = self.get_token() if token is None: raise StopIteration return token def next(self): return self.__next__() # Python 2.x support def split(cls, s): return list(cls(s)) split = classmethod(split) class _resultbase(object): def __init__(self): for attr in self.__slots__: setattr(self, attr, None) def _repr(self, classname): l = [] for attr in self.__slots__: value = getattr(self, attr) if value is not None: l.append("%s=%s" % (attr, repr(value))) return "%s(%s)" % (classname, ", ".join(l)) def __repr__(self): return self._repr(self.__class__.__name__) class parserinfo(object): """ Class which handles what inputs are accepted. Subclass this to customize the language and acceptable values for each parameter. :param dayfirst: Whether to interpret the first value in an ambiguous 3-integer date (e.g. 01/05/09) as the day (`True`) or month (`False`). If `yearfirst` is set to `True`, this distinguishes between YDM and YMD. Default is `False`. :param yearfirst: Whether to interpret the first value in an ambiguous 3-integer date (e.g. 01/05/09) as the year. If `True`, the first number is taken to be the year, otherwise the last number is taken to be the year. Default is `False`. """ # m from a.m/p.m, t from ISO T separator JUMP = [" ", ".", ",", ";", "-", "/", "'", "at", "on", "and", "ad", "m", "t", "of", "st", "nd", "rd", "th"] WEEKDAYS = [("Mon", "Monday"), ("Tue", "Tuesday"), ("Wed", "Wednesday"), ("Thu", "Thursday"), ("Fri", "Friday"), ("Sat", "Saturday"), ("Sun", "Sunday")] MONTHS = [("Jan", "January"), ("Feb", "February"), ("Mar", "March"), ("Apr", "April"), ("May", "May"), ("Jun", "June"), ("Jul", "July"), ("Aug", "August"), ("Sep", "Sept", "September"), ("Oct", "October"), ("Nov", "November"), ("Dec", "December")] HMS = [("h", "hour", "hours"), ("m", "minute", "minutes"), ("s", "second", "seconds")] AMPM = [("am", "a"), ("pm", "p")] UTCZONE = ["UTC", "GMT", "Z"] PERTAIN = ["of"] TZOFFSET = {} def __init__(self, dayfirst=False, yearfirst=False): self._jump = self._convert(self.JUMP) self._weekdays = self._convert(self.WEEKDAYS) self._months = self._convert(self.MONTHS) self._hms = self._convert(self.HMS) self._ampm = self._convert(self.AMPM) self._utczone = self._convert(self.UTCZONE) self._pertain = self._convert(self.PERTAIN) self.dayfirst = dayfirst self.yearfirst = yearfirst self._year = time.localtime().tm_year self._century = self._year // 100*100 def _convert(self, lst): dct = {} for i, v in enumerate(lst): if isinstance(v, tuple): for v in v: dct[v.lower()] = i else: dct[v.lower()] = i return dct def jump(self, name): return name.lower() in self._jump def weekday(self, name): if len(name) >= 3: try: return self._weekdays[name.lower()] except KeyError: pass return None def month(self, name): if len(name) >= 3: try: return self._months[name.lower()]+1 except KeyError: pass return None def hms(self, name): try: return self._hms[name.lower()] except KeyError: return None def ampm(self, name): try: return self._ampm[name.lower()] except KeyError: return None def pertain(self, name): return name.lower() in self._pertain def utczone(self, name): return name.lower() in self._utczone def tzoffset(self, name): if name in self._utczone: return 0 return self.TZOFFSET.get(name) def convertyear(self, year): if year < 100: year += self._century if abs(year-self._year) >= 50: if year < self._year: year += 100 else: year -= 100 return year def validate(self, res): # move to info if res.year is not None: res.year = self.convertyear(res.year) if res.tzoffset == 0 and not res.tzname or res.tzname == 'Z': res.tzname = "UTC" res.tzoffset = 0 elif res.tzoffset != 0 and res.tzname and self.utczone(res.tzname): res.tzoffset = 0 return True class parser(object): def __init__(self, info=None): self.info = info or parserinfo() def parse(self, timestr, default=None, ignoretz=False, tzinfos=None, **kwargs): """ Parse the date/time string into a datetime object. :param timestr: Any date/time string using the supported formats. :param default: The default datetime object, if this is a datetime object and not `None`, elements specified in `timestr` replace elements in the default object. :param ignoretz: Whether or not to ignore the time zone. :param tzinfos: A time zone, to be applied to the date, if `ignoretz` is `True`. This can be either a subclass of `tzinfo`, a time zone string or an integer offset. :param **kwargs: Keyword arguments as passed to `_parse()`. :return: Returns a `datetime.datetime` object or, if the `fuzzy_with_tokens` option is `True`, returns a tuple, the first element being a `datetime.datetime` object, the second a tuple containing the fuzzy tokens. :raises ValueError: Raised for invalid or unknown string format, if the provided `tzinfo` is not in a valid format, or if an invalid date would be created. :raises OverFlowError: Raised if the parsed date exceeds the largest valid C integer on your system. """ default_specified = default is not None if not default_specified: default = datetime.datetime.now().replace(hour=0, minute=0, second=0, microsecond=0) if kwargs.get('fuzzy_with_tokens', False): res, skipped_tokens = self._parse(timestr, **kwargs) else: res = self._parse(timestr, **kwargs) if res is None: raise ValueError("Unknown string format") repl = {} for attr in ["year", "month", "day", "hour", "minute", "second", "microsecond"]: value = getattr(res, attr) if value is not None: repl[attr] = value ret = default.replace(**repl) if res.weekday is not None and not res.day: ret = ret+relativedelta.relativedelta(weekday=res.weekday) if not ignoretz: if (isinstance(tzinfos, collections.Callable) or tzinfos and res.tzname in tzinfos): if isinstance(tzinfos, collections.Callable): tzdata = tzinfos(res.tzname, res.tzoffset) else: tzdata = tzinfos.get(res.tzname) if isinstance(tzdata, datetime.tzinfo): tzinfo = tzdata elif isinstance(tzdata, text_type): tzinfo = tz.tzstr(tzdata) elif isinstance(tzdata, integer_types): tzinfo = tz.tzoffset(res.tzname, tzdata) else: raise ValueError("Offset must be tzinfo subclass, " "tz string, or int offset.") ret = ret.replace(tzinfo=tzinfo) elif res.tzname and res.tzname in time.tzname: ret = ret.replace(tzinfo=tz.tzlocal()) elif res.tzoffset == 0: ret = ret.replace(tzinfo=tz.tzutc()) elif res.tzoffset: ret = ret.replace(tzinfo=tz.tzoffset(res.tzname, res.tzoffset)) if kwargs.get('fuzzy_with_tokens', False): return ret, skipped_tokens else: return ret class _result(_resultbase): __slots__ = ["year", "month", "day", "weekday", "hour", "minute", "second", "microsecond", "tzname", "tzoffset", "ampm"] def _parse(self, timestr, dayfirst=None, yearfirst=None, fuzzy=False, fuzzy_with_tokens=False): """ Private method which performs the heavy lifting of parsing, called from `parse()`, which passes on its `kwargs` to this function. :param timestr: The string to parse. :param dayfirst: Whether to interpret the first value in an ambiguous 3-integer date (e.g. 01/05/09) as the day (`True`) or month (`False`). If `yearfirst` is set to `True`, this distinguishes between YDM and YMD. If set to `None`, this value is retrieved from the current `parserinfo` object (which itself defaults to `False`). :param yearfirst: Whether to interpret the first value in an ambiguous 3-integer date (e.g. 01/05/09) as the year. If `True`, the first number is taken to be the year, otherwise the last number is taken to be the year. If this is set to `None`, the value is retrieved from the current `parserinfo` object (which itself defaults to `False`). :param fuzzy: Whether to allow fuzzy parsing, allowing for string like "Today is January 1, 2047 at 8:21:00AM". :param fuzzy_with_tokens: If `True`, `fuzzy` is automatically set to True, and the parser will return a tuple where the first element is the parsed `datetime.datetime` datetimestamp and the second element is a tuple containing the portions of the string which were ignored, e.g. "Today is January 1, 2047 at 8:21:00AM" should return `(datetime.datetime(2011, 1, 1, 8, 21), (u'Today is ', u' ', u'at '))` """ if fuzzy_with_tokens: fuzzy = True info = self.info if dayfirst is None: dayfirst = info.dayfirst if yearfirst is None: yearfirst = info.yearfirst res = self._result() l = _timelex.split(timestr) # Splits the timestr into tokens # keep up with the last token skipped so we can recombine # consecutively skipped tokens (-2 for when i begins at 0). last_skipped_token_i = -2 skipped_tokens = list() try: # year/month/day list ymd = [] # Index of the month string in ymd mstridx = -1 len_l = len(l) i = 0 while i < len_l: # Check if it's a number try: value_repr = l[i] value = float(value_repr) except ValueError: value = None if value is not None: # Token is a number len_li = len(l[i]) i += 1 if (len(ymd) == 3 and len_li in (2, 4) and res.hour is None and (i >= len_l or (l[i] != ':' and info.hms(l[i]) is None))): # 19990101T23[59] s = l[i-1] res.hour = int(s[:2]) if len_li == 4: res.minute = int(s[2:]) elif len_li == 6 or (len_li > 6 and l[i-1].find('.') == 6): # YYMMDD or HHMMSS[.ss] s = l[i-1] if not ymd and l[i-1].find('.') == -1: ymd.append(info.convertyear(int(s[:2]))) ymd.append(int(s[2:4])) ymd.append(int(s[4:])) else: # 19990101T235959[.59] res.hour = int(s[:2]) res.minute = int(s[2:4]) res.second, res.microsecond = _parsems(s[4:]) elif len_li == 8: # YYYYMMDD s = l[i-1] ymd.append(int(s[:4])) ymd.append(int(s[4:6])) ymd.append(int(s[6:])) elif len_li in (12, 14): # YYYYMMDDhhmm[ss] s = l[i-1] ymd.append(int(s[:4])) ymd.append(int(s[4:6])) ymd.append(int(s[6:8])) res.hour = int(s[8:10]) res.minute = int(s[10:12]) if len_li == 14: res.second = int(s[12:]) elif ((i < len_l and info.hms(l[i]) is not None) or (i+1 < len_l and l[i] == ' ' and info.hms(l[i+1]) is not None)): # HH[ ]h or MM[ ]m or SS[.ss][ ]s if l[i] == ' ': i += 1 idx = info.hms(l[i]) while True: if idx == 0: res.hour = int(value) if value % 1: res.minute = int(60*(value % 1)) elif idx == 1: res.minute = int(value) if value % 1: res.second = int(60*(value % 1)) elif idx == 2: res.second, res.microsecond = \ _parsems(value_repr) i += 1 if i >= len_l or idx == 2: break # 12h00 try: value_repr = l[i] value = float(value_repr) except ValueError: break else: i += 1 idx += 1 if i < len_l: newidx = info.hms(l[i]) if newidx is not None: idx = newidx elif (i == len_l and l[i-2] == ' ' and info.hms(l[i-3]) is not None): # X h MM or X m SS idx = info.hms(l[i-3]) + 1 if idx == 1: res.minute = int(value) if value % 1: res.second = int(60*(value % 1)) elif idx == 2: res.second, res.microsecond = \ _parsems(value_repr) i += 1 elif i+1 < len_l and l[i] == ':': # HH:MM[:SS[.ss]] res.hour = int(value) i += 1 value = float(l[i]) res.minute = int(value) if value % 1: res.second = int(60*(value % 1)) i += 1 if i < len_l and l[i] == ':': res.second, res.microsecond = _parsems(l[i+1]) i += 2 elif i < len_l and l[i] in ('-', '/', '.'): sep = l[i] ymd.append(int(value)) i += 1 if i < len_l and not info.jump(l[i]): try: # 01-01[-01] ymd.append(int(l[i])) except ValueError: # 01-Jan[-01] value = info.month(l[i]) if value is not None: ymd.append(value) assert mstridx == -1 mstridx = len(ymd)-1 else: return None i += 1 if i < len_l and l[i] == sep: # We have three members i += 1 value = info.month(l[i]) if value is not None: ymd.append(value) mstridx = len(ymd)-1 assert mstridx == -1 else: ymd.append(int(l[i])) i += 1 elif i >= len_l or info.jump(l[i]): if i+1 < len_l and info.ampm(l[i+1]) is not None: # 12 am res.hour = int(value) if res.hour < 12 and info.ampm(l[i+1]) == 1: res.hour += 12 elif res.hour == 12 and info.ampm(l[i+1]) == 0: res.hour = 0 i += 1 else: # Year, month or day ymd.append(int(value)) i += 1 elif info.ampm(l[i]) is not None: # 12am res.hour = int(value) if res.hour < 12 and info.ampm(l[i]) == 1: res.hour += 12 elif res.hour == 12 and info.ampm(l[i]) == 0: res.hour = 0 i += 1 elif not fuzzy: return None else: i += 1 continue # Check weekday value = info.weekday(l[i]) if value is not None: res.weekday = value i += 1 continue # Check month name value = info.month(l[i]) if value is not None: ymd.append(value) assert mstridx == -1 mstridx = len(ymd)-1 i += 1 if i < len_l: if l[i] in ('-', '/'): # Jan-01[-99] sep = l[i] i += 1 ymd.append(int(l[i])) i += 1 if i < len_l and l[i] == sep: # Jan-01-99 i += 1 ymd.append(int(l[i])) i += 1 elif (i+3 < len_l and l[i] == l[i+2] == ' ' and info.pertain(l[i+1])): # Jan of 01 # In this case, 01 is clearly year try: value = int(l[i+3]) except ValueError: # Wrong guess pass else: # Convert it here to become unambiguous ymd.append(info.convertyear(value)) i += 4 continue # Check am/pm value = info.ampm(l[i]) if value is not None: # For fuzzy parsing, 'a' or 'am' (both valid English words) # may erroneously trigger the AM/PM flag. Deal with that # here. val_is_ampm = True # If there's already an AM/PM flag, this one isn't one. if fuzzy and res.ampm is not None: val_is_ampm = False # If AM/PM is found and hour is not, raise a ValueError if res.hour is None: if fuzzy: val_is_ampm = False else: raise ValueError('No hour specified with ' + 'AM or PM flag.') elif not 0 <= res.hour <= 12: # If AM/PM is found, it's a 12 hour clock, so raise # an error for invalid range if fuzzy: val_is_ampm = False else: raise ValueError('Invalid hour specified for ' + '12-hour clock.') if val_is_ampm: if value == 1 and res.hour < 12: res.hour += 12 elif value == 0 and res.hour == 12: res.hour = 0 res.ampm = value i += 1 continue # Check for a timezone name if (res.hour is not None and len(l[i]) <= 5 and res.tzname is None and res.tzoffset is None and not [x for x in l[i] if x not in string.ascii_uppercase]): res.tzname = l[i] res.tzoffset = info.tzoffset(res.tzname) i += 1 # Check for something like GMT+3, or BRST+3. Notice # that it doesn't mean "I am 3 hours after GMT", but # "my time +3 is GMT". If found, we reverse the # logic so that timezone parsing code will get it # right. if i < len_l and l[i] in ('+', '-'): l[i] = ('+', '-')[l[i] == '+'] res.tzoffset = None if info.utczone(res.tzname): # With something like GMT+3, the timezone # is *not* GMT. res.tzname = None continue # Check for a numbered timezone if res.hour is not None and l[i] in ('+', '-'): signal = (-1, 1)[l[i] == '+'] i += 1 len_li = len(l[i]) if len_li == 4: # -0300 res.tzoffset = int(l[i][:2])*3600+int(l[i][2:])*60 elif i+1 < len_l and l[i+1] == ':': # -03:00 res.tzoffset = int(l[i])*3600+int(l[i+2])*60 i += 2 elif len_li <= 2: # -[0]3 res.tzoffset = int(l[i][:2])*3600 else: return None i += 1 res.tzoffset *= signal # Look for a timezone name between parenthesis if (i+3 < len_l and info.jump(l[i]) and l[i+1] == '(' and l[i+3] == ')' and 3 <= len(l[i+2]) <= 5 and not [x for x in l[i+2] if x not in string.ascii_uppercase]): # -0300 (BRST) res.tzname = l[i+2] i += 4 continue # Check jumps if not (info.jump(l[i]) or fuzzy): return None if last_skipped_token_i == i - 1: # recombine the tokens skipped_tokens[-1] += l[i] else: # just append skipped_tokens.append(l[i]) last_skipped_token_i = i i += 1 # Process year/month/day len_ymd = len(ymd) if len_ymd > 3: # More than three members!? return None elif len_ymd == 1 or (mstridx != -1 and len_ymd == 2): # One member, or two members with a month string if mstridx != -1: res.month = ymd[mstridx] del ymd[mstridx] if len_ymd > 1 or mstridx == -1: if ymd[0] > 31: res.year = ymd[0] else: res.day = ymd[0] elif len_ymd == 2: # Two members with numbers if ymd[0] > 31: # 99-01 res.year, res.month = ymd elif ymd[1] > 31: # 01-99 res.month, res.year = ymd elif dayfirst and ymd[1] <= 12: # 13-01 res.day, res.month = ymd else: # 01-13 res.month, res.day = ymd elif len_ymd == 3: # Three members if mstridx == 0: res.month, res.day, res.year = ymd elif mstridx == 1: if ymd[0] > 31 or (yearfirst and ymd[2] <= 31): # 99-Jan-01 res.year, res.month, res.day = ymd else: # 01-Jan-01 # Give precendence to day-first, since # two-digit years is usually hand-written. res.day, res.month, res.year = ymd elif mstridx == 2: # WTF!? if ymd[1] > 31: # 01-99-Jan res.day, res.year, res.month = ymd else: # 99-01-Jan res.year, res.day, res.month = ymd else: if ymd[0] > 31 or \ (yearfirst and ymd[1] <= 12 and ymd[2] <= 31): # 99-01-01 res.year, res.month, res.day = ymd elif ymd[0] > 12 or (dayfirst and ymd[1] <= 12): # 13-01-01 res.day, res.month, res.year = ymd else: # 01-13-01 res.month, res.day, res.year = ymd except (IndexError, ValueError, AssertionError): return None if not info.validate(res): return None if fuzzy_with_tokens: return res, tuple(skipped_tokens) else: return res DEFAULTPARSER = parser() def parse(timestr, parserinfo=None, **kwargs): """ Parse a string in one of the supported formats, using the `parserinfo` parameters. :param timestr: A string containing a date/time stamp. :param parserinfo: A :class:`parserinfo` object containing parameters for the parser. If `None`, the default arguments to the `parserinfo` constructor are used. The `**kwargs` parameter takes the following keyword arguments: :param default: The default datetime object, if this is a datetime object and not `None`, elements specified in `timestr` replace elements in the default object. :param ignoretz: Whether or not to ignore the time zone (boolean). :param tzinfos: A time zone, to be applied to the date, if `ignoretz` is `True`. This can be either a subclass of `tzinfo`, a time zone string or an integer offset. :param dayfirst: Whether to interpret the first value in an ambiguous 3-integer date (e.g. 01/05/09) as the day (`True`) or month (`False`). If `yearfirst` is set to `True`, this distinguishes between YDM and YMD. If set to `None`, this value is retrieved from the current :class:`parserinfo` object (which itself defaults to `False`). :param yearfirst: Whether to interpret the first value in an ambiguous 3-integer date (e.g. 01/05/09) as the year. If `True`, the first number is taken to be the year, otherwise the last number is taken to be the year. If this is set to `None`, the value is retrieved from the current :class:`parserinfo` object (which itself defaults to `False`). :param fuzzy: Whether to allow fuzzy parsing, allowing for string like "Today is January 1, 2047 at 8:21:00AM". :param fuzzy_with_tokens: If `True`, `fuzzy` is automatically set to True, and the parser will return a tuple where the first element is the parsed `datetime.datetime` datetimestamp and the second element is a tuple containing the portions of the string which were ignored, e.g. "Today is January 1, 2047 at 8:21:00AM" should return `(datetime.datetime(2011, 1, 1, 8, 21), (u'Today is ', u' ', u'at '))` """ # Python 2.x support: datetimes return their string presentation as # bytes in 2.x and unicode in 3.x, so it's reasonable to expect that # the parser will get both kinds. Internally we use unicode only. if isinstance(timestr, binary_type): timestr = timestr.decode() if parserinfo: return parser(parserinfo).parse(timestr, **kwargs) else: return DEFAULTPARSER.parse(timestr, **kwargs) class _tzparser(object): class _result(_resultbase): __slots__ = ["stdabbr", "stdoffset", "dstabbr", "dstoffset", "start", "end"] class _attr(_resultbase): __slots__ = ["month", "week", "weekday", "yday", "jyday", "day", "time"] def __repr__(self): return self._repr("") def __init__(self): _resultbase.__init__(self) self.start = self._attr() self.end = self._attr() def parse(self, tzstr): # Python 2.x compatibility: tzstr should be converted to unicode before # being passed to _timelex. if isinstance(tzstr, binary_type): tzstr = tzstr.decode() res = self._result() l = _timelex.split(tzstr) try: len_l = len(l) i = 0 while i < len_l: # BRST+3[BRDT[+2]] j = i while j < len_l and not [x for x in l[j] if x in "0123456789:,-+"]: j += 1 if j != i: if not res.stdabbr: offattr = "stdoffset" res.stdabbr = "".join(l[i:j]) else: offattr = "dstoffset" res.dstabbr = "".join(l[i:j]) i = j if (i < len_l and (l[i] in ('+', '-') or l[i][0] in "0123456789")): if l[i] in ('+', '-'): # Yes, that's right. See the TZ variable # documentation. signal = (1, -1)[l[i] == '+'] i += 1 else: signal = -1 len_li = len(l[i]) if len_li == 4: # -0300 setattr(res, offattr, (int(l[i][:2])*3600 + int(l[i][2:])*60)*signal) elif i+1 < len_l and l[i+1] == ':': # -03:00 setattr(res, offattr, (int(l[i])*3600+int(l[i+2])*60)*signal) i += 2 elif len_li <= 2: # -[0]3 setattr(res, offattr, int(l[i][:2])*3600*signal) else: return None i += 1 if res.dstabbr: break else: break if i < len_l: for j in range(i, len_l): if l[j] == ';': l[j] = ',' assert l[i] == ',' i += 1 if i >= len_l: pass elif (8 <= l.count(',') <= 9 and not [y for x in l[i:] if x != ',' for y in x if y not in "0123456789"]): # GMT0BST,3,0,30,3600,10,0,26,7200[,3600] for x in (res.start, res.end): x.month = int(l[i]) i += 2 if l[i] == '-': value = int(l[i+1])*-1 i += 1 else: value = int(l[i]) i += 2 if value: x.week = value x.weekday = (int(l[i])-1) % 7 else: x.day = int(l[i]) i += 2 x.time = int(l[i]) i += 2 if i < len_l: if l[i] in ('-', '+'): signal = (-1, 1)[l[i] == "+"] i += 1 else: signal = 1 res.dstoffset = (res.stdoffset+int(l[i]))*signal elif (l.count(',') == 2 and l[i:].count('/') <= 2 and not [y for x in l[i:] if x not in (',', '/', 'J', 'M', '.', '-', ':') for y in x if y not in "0123456789"]): for x in (res.start, res.end): if l[i] == 'J': # non-leap year day (1 based) i += 1 x.jyday = int(l[i]) elif l[i] == 'M': # month[-.]week[-.]weekday i += 1 x.month = int(l[i]) i += 1 assert l[i] in ('-', '.') i += 1 x.week = int(l[i]) if x.week == 5: x.week = -1 i += 1 assert l[i] in ('-', '.') i += 1 x.weekday = (int(l[i])-1) % 7 else: # year day (zero based) x.yday = int(l[i])+1 i += 1 if i < len_l and l[i] == '/': i += 1 # start time len_li = len(l[i]) if len_li == 4: # -0300 x.time = (int(l[i][:2])*3600+int(l[i][2:])*60) elif i+1 < len_l and l[i+1] == ':': # -03:00 x.time = int(l[i])*3600+int(l[i+2])*60 i += 2 if i+1 < len_l and l[i+1] == ':': i += 2 x.time += int(l[i]) elif len_li <= 2: # -[0]3 x.time = (int(l[i][:2])*3600) else: return None i += 1 assert i == len_l or l[i] == ',' i += 1 assert i >= len_l except (IndexError, ValueError, AssertionError): return None return res DEFAULTTZPARSER = _tzparser() def _parsetz(tzstr): return DEFAULTTZPARSER.parse(tzstr) def _parsems(value): """Parse a I[.F] seconds value into (seconds, microseconds).""" if "." not in value: return int(value), 0 else: i, f = value.split(".") return int(i), int(f.ljust(6, "0")[:6]) # vim:ts=4:sw=4:et
mit
kenshay/ImageScripter
ProgramData/SystemFiles/Python/Lib/ctypes/test/test_find.py
9
2116
import unittest import os.path import sys from test import test_support from ctypes import * from ctypes.util import find_library from ctypes.test import is_resource_enabled if sys.platform == "win32": lib_gl = find_library("OpenGL32") lib_glu = find_library("Glu32") lib_gle = None elif sys.platform == "darwin": lib_gl = lib_glu = find_library("OpenGL") lib_gle = None else: lib_gl = find_library("GL") lib_glu = find_library("GLU") lib_gle = find_library("gle") ## print, for debugging if is_resource_enabled("printing"): if lib_gl or lib_glu or lib_gle: print "OpenGL libraries:" for item in (("GL", lib_gl), ("GLU", lib_glu), ("gle", lib_gle)): print "\t", item # On some systems, loading the OpenGL libraries needs the RTLD_GLOBAL mode. class Test_OpenGL_libs(unittest.TestCase): def setUp(self): self.gl = self.glu = self.gle = None if lib_gl: try: self.gl = CDLL(lib_gl, mode=RTLD_GLOBAL) except OSError: pass if lib_glu: try: self.glu = CDLL(lib_glu, RTLD_GLOBAL) except OSError: pass if lib_gle: try: self.gle = CDLL(lib_gle) except OSError: pass def tearDown(self): self.gl = self.glu = self.gle = None @unittest.skipUnless(lib_gl, 'lib_gl not available') def test_gl(self): if self.gl: self.gl.glClearIndex @unittest.skipUnless(lib_glu, 'lib_glu not available') def test_glu(self): if self.glu: self.glu.gluBeginCurve @unittest.skipUnless(lib_gle, 'lib_gle not available') def test_gle(self): if self.gle: self.gle.gleGetJoinStyle def test_shell_injection(self): result = find_library('; echo Hello shell > ' + test_support.TESTFN) self.assertFalse(os.path.lexists(test_support.TESTFN)) self.assertIsNone(result) if __name__ == "__main__": unittest.main()
gpl-3.0
mrphrazer/miasm
test/core/utils.py
5
1161
#! /usr/bin/env python2 #-*- coding:utf-8 -*- from __future__ import print_function from builtins import range import unittest class TestUtils(unittest.TestCase): def test_boundedDict(self): from miasm.core.utils import BoundedDict # Use a callback def logger(key): print("DELETE", key) # Create a 5/2 dictionary bd = BoundedDict(5, 2, initialdata={"element": "value"}, delete_cb=logger) bd["element2"] = "value2" assert("element" in bd) assert("element2" in bd) self.assertEqual(bd["element"], "value") self.assertEqual(bd["element2"], "value2") # Increase 'element2' use _ = bd["element2"] for i in range(6): bd[i] = i print("Insert %d -> %s" % (i, bd)) assert(len(bd) == 2) assert("element2" in bd) self.assertEqual(bd["element2"], "value2") if __name__ == '__main__': testsuite = unittest.TestLoader().loadTestsFromTestCase(TestUtils) report = unittest.TextTestRunner(verbosity=2).run(testsuite) exit(len(report.errors + report.failures))
gpl-2.0
chb/indivo_server
indivo/tests/unit/models/audit.py
4
1558
from indivo.tests.internal_tests import InternalTests from indivo.models import Audit import datetime class AuditModelUnitTests(InternalTests): def setUp(self): super(AuditModelUnitTests, self).setUp() def tearDown(self): super(AuditModelUnitTests, self).tearDown() def test_construction(self): now = datetime.datetime.now() args = {'datetime':now, 'view_func':'create_record', 'request_successful':True, 'effective_principal_email':'[email protected]', 'proxied_by_email':'[email protected]', 'carenet_id':'abcdefg', 'record_id':'abcdef', 'pha_id':'[email protected]', 'document_id':'abcdef', 'external_id':'EXTERNAL', 'message_id':'abcdef', 'req_url':'/records/', 'req_ip_address':'1.0.0.0', 'req_domain':'hi.com', 'req_headers':'abcd', 'req_method':'POST', 'resp_code':200, 'resp_headers':'abcd', } minimal_args = {'datetime':now, 'request_successful':True, } # Should construct normally a = Audit.objects.create(**args) self.assertEqual(a, Audit.objects.get(pk=a.pk)) # Should construct normally with minimal args a = Audit.objects.create(**minimal_args) self.assertEqual(a, Audit.objects.get(pk=a.pk))
gpl-3.0
2014c2g4/2015cda0623
static/Brython3.1.0-20150301-090019/Lib/tempfile.py
728
22357
"""Temporary files. This module provides generic, low- and high-level interfaces for creating temporary files and directories. The interfaces listed as "safe" just below can be used without fear of race conditions. Those listed as "unsafe" cannot, and are provided for backward compatibility only. This module also provides some data items to the user: TMP_MAX - maximum number of names that will be tried before giving up. tempdir - If this is set to a string before the first use of any routine from this module, it will be considered as another candidate location to store temporary files. """ __all__ = [ "NamedTemporaryFile", "TemporaryFile", # high level safe interfaces "SpooledTemporaryFile", "TemporaryDirectory", "mkstemp", "mkdtemp", # low level safe interfaces "mktemp", # deprecated unsafe interface "TMP_MAX", "gettempprefix", # constants "tempdir", "gettempdir" ] # Imports. import warnings as _warnings import sys as _sys import io as _io import os as _os import errno as _errno from random import Random as _Random try: import fcntl as _fcntl except ImportError: def _set_cloexec(fd): pass else: def _set_cloexec(fd): try: flags = _fcntl.fcntl(fd, _fcntl.F_GETFD, 0) except OSError: pass else: # flags read successfully, modify flags |= _fcntl.FD_CLOEXEC _fcntl.fcntl(fd, _fcntl.F_SETFD, flags) try: import _thread except ImportError: import _dummy_thread as _thread _allocate_lock = _thread.allocate_lock _text_openflags = _os.O_RDWR | _os.O_CREAT | _os.O_EXCL if hasattr(_os, 'O_NOINHERIT'): _text_openflags |= _os.O_NOINHERIT if hasattr(_os, 'O_NOFOLLOW'): _text_openflags |= _os.O_NOFOLLOW _bin_openflags = _text_openflags if hasattr(_os, 'O_BINARY'): _bin_openflags |= _os.O_BINARY if hasattr(_os, 'TMP_MAX'): TMP_MAX = _os.TMP_MAX else: TMP_MAX = 10000 # Although it does not have an underscore for historical reasons, this # variable is an internal implementation detail (see issue 10354). template = "tmp" # Internal routines. _once_lock = _allocate_lock() if hasattr(_os, "lstat"): _stat = _os.lstat elif hasattr(_os, "stat"): _stat = _os.stat else: # Fallback. All we need is something that raises OSError if the # file doesn't exist. def _stat(fn): f = open(fn) f.close() def _exists(fn): try: _stat(fn) except OSError: return False else: return True class _RandomNameSequence: """An instance of _RandomNameSequence generates an endless sequence of unpredictable strings which can safely be incorporated into file names. Each string is six characters long. Multiple threads can safely use the same instance at the same time. _RandomNameSequence is an iterator.""" characters = "abcdefghijklmnopqrstuvwxyz0123456789_" @property def rng(self): cur_pid = _os.getpid() if cur_pid != getattr(self, '_rng_pid', None): self._rng = _Random() self._rng_pid = cur_pid return self._rng def __iter__(self): return self def __next__(self): c = self.characters choose = self.rng.choice letters = [choose(c) for dummy in "123456"] return ''.join(letters) def _candidate_tempdir_list(): """Generate a list of candidate temporary directories which _get_default_tempdir will try.""" dirlist = [] # First, try the environment. for envname in 'TMPDIR', 'TEMP', 'TMP': dirname = _os.getenv(envname) if dirname: dirlist.append(dirname) # Failing that, try OS-specific locations. if _os.name == 'nt': dirlist.extend([ r'c:\temp', r'c:\tmp', r'\temp', r'\tmp' ]) else: dirlist.extend([ '/tmp', '/var/tmp', '/usr/tmp' ]) # As a last resort, the current directory. try: dirlist.append(_os.getcwd()) except (AttributeError, OSError): dirlist.append(_os.curdir) return dirlist def _get_default_tempdir(): """Calculate the default directory to use for temporary files. This routine should be called exactly once. We determine whether or not a candidate temp dir is usable by trying to create and write to a file in that directory. If this is successful, the test file is deleted. To prevent denial of service, the name of the test file must be randomized.""" namer = _RandomNameSequence() dirlist = _candidate_tempdir_list() for dir in dirlist: if dir != _os.curdir: dir = _os.path.normcase(_os.path.abspath(dir)) # Try only a few names per directory. for seq in range(100): name = next(namer) filename = _os.path.join(dir, name) try: fd = _os.open(filename, _bin_openflags, 0o600) try: try: with _io.open(fd, 'wb', closefd=False) as fp: fp.write(b'blat') finally: _os.close(fd) finally: _os.unlink(filename) return dir except FileExistsError: pass except OSError: break # no point trying more names in this directory raise FileNotFoundError(_errno.ENOENT, "No usable temporary directory found in %s" % dirlist) _name_sequence = None def _get_candidate_names(): """Common setup sequence for all user-callable interfaces.""" global _name_sequence if _name_sequence is None: _once_lock.acquire() try: if _name_sequence is None: _name_sequence = _RandomNameSequence() finally: _once_lock.release() return _name_sequence def _mkstemp_inner(dir, pre, suf, flags): """Code common to mkstemp, TemporaryFile, and NamedTemporaryFile.""" names = _get_candidate_names() for seq in range(TMP_MAX): name = next(names) file = _os.path.join(dir, pre + name + suf) try: fd = _os.open(file, flags, 0o600) _set_cloexec(fd) return (fd, _os.path.abspath(file)) except FileExistsError: continue # try again except PermissionError: # This exception is thrown when a directory with the chosen name # already exists on windows. if _os.name == 'nt': continue else: raise raise FileExistsError(_errno.EEXIST, "No usable temporary file name found") # User visible interfaces. def gettempprefix(): """Accessor for tempdir.template.""" return template tempdir = None def gettempdir(): """Accessor for tempfile.tempdir.""" global tempdir if tempdir is None: _once_lock.acquire() try: if tempdir is None: tempdir = _get_default_tempdir() finally: _once_lock.release() return tempdir def mkstemp(suffix="", prefix=template, dir=None, text=False): """User-callable function to create and return a unique temporary file. The return value is a pair (fd, name) where fd is the file descriptor returned by os.open, and name is the filename. If 'suffix' is specified, the file name will end with that suffix, otherwise there will be no suffix. If 'prefix' is specified, the file name will begin with that prefix, otherwise a default prefix is used. If 'dir' is specified, the file will be created in that directory, otherwise a default directory is used. If 'text' is specified and true, the file is opened in text mode. Else (the default) the file is opened in binary mode. On some operating systems, this makes no difference. The file is readable and writable only by the creating user ID. If the operating system uses permission bits to indicate whether a file is executable, the file is executable by no one. The file descriptor is not inherited by children of this process. Caller is responsible for deleting the file when done with it. """ if dir is None: dir = gettempdir() if text: flags = _text_openflags else: flags = _bin_openflags return _mkstemp_inner(dir, prefix, suffix, flags) def mkdtemp(suffix="", prefix=template, dir=None): """User-callable function to create and return a unique temporary directory. The return value is the pathname of the directory. Arguments are as for mkstemp, except that the 'text' argument is not accepted. The directory is readable, writable, and searchable only by the creating user. Caller is responsible for deleting the directory when done with it. """ if dir is None: dir = gettempdir() names = _get_candidate_names() for seq in range(TMP_MAX): name = next(names) file = _os.path.join(dir, prefix + name + suffix) try: _os.mkdir(file, 0o700) return file except FileExistsError: continue # try again raise FileExistsError(_errno.EEXIST, "No usable temporary directory name found") def mktemp(suffix="", prefix=template, dir=None): """User-callable function to return a unique temporary file name. The file is not created. Arguments are as for mkstemp, except that the 'text' argument is not accepted. This function is unsafe and should not be used. The file name refers to a file that did not exist at some point, but by the time you get around to creating it, someone else may have beaten you to the punch. """ ## from warnings import warn as _warn ## _warn("mktemp is a potential security risk to your program", ## RuntimeWarning, stacklevel=2) if dir is None: dir = gettempdir() names = _get_candidate_names() for seq in range(TMP_MAX): name = next(names) file = _os.path.join(dir, prefix + name + suffix) if not _exists(file): return file raise FileExistsError(_errno.EEXIST, "No usable temporary filename found") class _TemporaryFileWrapper: """Temporary file wrapper This class provides a wrapper around files opened for temporary use. In particular, it seeks to automatically remove the file when it is no longer needed. """ def __init__(self, file, name, delete=True): self.file = file self.name = name self.close_called = False self.delete = delete def __getattr__(self, name): # Attribute lookups are delegated to the underlying file # and cached for non-numeric results # (i.e. methods are cached, closed and friends are not) file = self.__dict__['file'] a = getattr(file, name) if not isinstance(a, int): setattr(self, name, a) return a # The underlying __enter__ method returns the wrong object # (self.file) so override it to return the wrapper def __enter__(self): self.file.__enter__() return self # iter() doesn't use __getattr__ to find the __iter__ method def __iter__(self): return iter(self.file) # NT provides delete-on-close as a primitive, so we don't need # the wrapper to do anything special. We still use it so that # file.name is useful (i.e. not "(fdopen)") with NamedTemporaryFile. if _os.name != 'nt': # Cache the unlinker so we don't get spurious errors at # shutdown when the module-level "os" is None'd out. Note # that this must be referenced as self.unlink, because the # name TemporaryFileWrapper may also get None'd out before # __del__ is called. unlink = _os.unlink def close(self): if not self.close_called: self.close_called = True self.file.close() if self.delete: self.unlink(self.name) def __del__(self): self.close() # Need to trap __exit__ as well to ensure the file gets # deleted when used in a with statement def __exit__(self, exc, value, tb): result = self.file.__exit__(exc, value, tb) self.close() return result else: def __exit__(self, exc, value, tb): self.file.__exit__(exc, value, tb) def NamedTemporaryFile(mode='w+b', buffering=-1, encoding=None, newline=None, suffix="", prefix=template, dir=None, delete=True): """Create and return a temporary file. Arguments: 'prefix', 'suffix', 'dir' -- as for mkstemp. 'mode' -- the mode argument to io.open (default "w+b"). 'buffering' -- the buffer size argument to io.open (default -1). 'encoding' -- the encoding argument to io.open (default None) 'newline' -- the newline argument to io.open (default None) 'delete' -- whether the file is deleted on close (default True). The file is created as mkstemp() would do it. Returns an object with a file-like interface; the name of the file is accessible as file.name. The file will be automatically deleted when it is closed unless the 'delete' argument is set to False. """ if dir is None: dir = gettempdir() flags = _bin_openflags # Setting O_TEMPORARY in the flags causes the OS to delete # the file when it is closed. This is only supported by Windows. if _os.name == 'nt' and delete: flags |= _os.O_TEMPORARY (fd, name) = _mkstemp_inner(dir, prefix, suffix, flags) file = _io.open(fd, mode, buffering=buffering, newline=newline, encoding=encoding) return _TemporaryFileWrapper(file, name, delete) if _os.name != 'posix' or _os.sys.platform == 'cygwin': # On non-POSIX and Cygwin systems, assume that we cannot unlink a file # while it is open. TemporaryFile = NamedTemporaryFile else: def TemporaryFile(mode='w+b', buffering=-1, encoding=None, newline=None, suffix="", prefix=template, dir=None): """Create and return a temporary file. Arguments: 'prefix', 'suffix', 'dir' -- as for mkstemp. 'mode' -- the mode argument to io.open (default "w+b"). 'buffering' -- the buffer size argument to io.open (default -1). 'encoding' -- the encoding argument to io.open (default None) 'newline' -- the newline argument to io.open (default None) The file is created as mkstemp() would do it. Returns an object with a file-like interface. The file has no name, and will cease to exist when it is closed. """ if dir is None: dir = gettempdir() flags = _bin_openflags (fd, name) = _mkstemp_inner(dir, prefix, suffix, flags) try: _os.unlink(name) return _io.open(fd, mode, buffering=buffering, newline=newline, encoding=encoding) except: _os.close(fd) raise class SpooledTemporaryFile: """Temporary file wrapper, specialized to switch from BytesIO or StringIO to a real file when it exceeds a certain size or when a fileno is needed. """ _rolled = False def __init__(self, max_size=0, mode='w+b', buffering=-1, encoding=None, newline=None, suffix="", prefix=template, dir=None): if 'b' in mode: self._file = _io.BytesIO() else: # Setting newline="\n" avoids newline translation; # this is important because otherwise on Windows we'd # hget double newline translation upon rollover(). self._file = _io.StringIO(newline="\n") self._max_size = max_size self._rolled = False self._TemporaryFileArgs = {'mode': mode, 'buffering': buffering, 'suffix': suffix, 'prefix': prefix, 'encoding': encoding, 'newline': newline, 'dir': dir} def _check(self, file): if self._rolled: return max_size = self._max_size if max_size and file.tell() > max_size: self.rollover() def rollover(self): if self._rolled: return file = self._file newfile = self._file = TemporaryFile(**self._TemporaryFileArgs) del self._TemporaryFileArgs newfile.write(file.getvalue()) newfile.seek(file.tell(), 0) self._rolled = True # The method caching trick from NamedTemporaryFile # won't work here, because _file may change from a # BytesIO/StringIO instance to a real file. So we list # all the methods directly. # Context management protocol def __enter__(self): if self._file.closed: raise ValueError("Cannot enter context with closed file") return self def __exit__(self, exc, value, tb): self._file.close() # file protocol def __iter__(self): return self._file.__iter__() def close(self): self._file.close() @property def closed(self): return self._file.closed @property def encoding(self): try: return self._file.encoding except AttributeError: if 'b' in self._TemporaryFileArgs['mode']: raise return self._TemporaryFileArgs['encoding'] def fileno(self): self.rollover() return self._file.fileno() def flush(self): self._file.flush() def isatty(self): return self._file.isatty() @property def mode(self): try: return self._file.mode except AttributeError: return self._TemporaryFileArgs['mode'] @property def name(self): try: return self._file.name except AttributeError: return None @property def newlines(self): try: return self._file.newlines except AttributeError: if 'b' in self._TemporaryFileArgs['mode']: raise return self._TemporaryFileArgs['newline'] def read(self, *args): return self._file.read(*args) def readline(self, *args): return self._file.readline(*args) def readlines(self, *args): return self._file.readlines(*args) def seek(self, *args): self._file.seek(*args) @property def softspace(self): return self._file.softspace def tell(self): return self._file.tell() def truncate(self, size=None): if size is None: self._file.truncate() else: if size > self._max_size: self.rollover() self._file.truncate(size) def write(self, s): file = self._file rv = file.write(s) self._check(file) return rv def writelines(self, iterable): file = self._file rv = file.writelines(iterable) self._check(file) return rv class TemporaryDirectory(object): """Create and return a temporary directory. This has the same behavior as mkdtemp but can be used as a context manager. For example: with TemporaryDirectory() as tmpdir: ... Upon exiting the context, the directory and everything contained in it are removed. """ def __init__(self, suffix="", prefix=template, dir=None): self._closed = False self.name = None # Handle mkdtemp raising an exception self.name = mkdtemp(suffix, prefix, dir) def __repr__(self): return "<{} {!r}>".format(self.__class__.__name__, self.name) def __enter__(self): return self.name def cleanup(self, _warn=False): if self.name and not self._closed: try: self._rmtree(self.name) except (TypeError, AttributeError) as ex: # Issue #10188: Emit a warning on stderr # if the directory could not be cleaned # up due to missing globals if "None" not in str(ex): raise print("ERROR: {!r} while cleaning up {!r}".format(ex, self,), file=_sys.stderr) return self._closed = True if _warn: self._warn("Implicitly cleaning up {!r}".format(self), ResourceWarning) def __exit__(self, exc, value, tb): self.cleanup() def __del__(self): # Issue a ResourceWarning if implicit cleanup needed self.cleanup(_warn=True) # XXX (ncoghlan): The following code attempts to make # this class tolerant of the module nulling out process # that happens during CPython interpreter shutdown # Alas, it doesn't actually manage it. See issue #10188 _listdir = staticmethod(_os.listdir) _path_join = staticmethod(_os.path.join) _isdir = staticmethod(_os.path.isdir) _islink = staticmethod(_os.path.islink) _remove = staticmethod(_os.remove) _rmdir = staticmethod(_os.rmdir) _os_error = OSError _warn = _warnings.warn def _rmtree(self, path): # Essentially a stripped down version of shutil.rmtree. We can't # use globals because they may be None'ed out at shutdown. for name in self._listdir(path): fullname = self._path_join(path, name) try: isdir = self._isdir(fullname) and not self._islink(fullname) except self._os_error: isdir = False if isdir: self._rmtree(fullname) else: try: self._remove(fullname) except self._os_error: pass try: self._rmdir(path) except self._os_error: pass
gpl-3.0
faywong/FFPlayer
project/jni/python/src/Lib/plat-mac/aepack.py
31
12210
"""Tools for use in AppleEvent clients and servers: conversion between AE types and python types pack(x) converts a Python object to an AEDesc object unpack(desc) does the reverse coerce(x, wanted_sample) coerces a python object to another python object """ # # This code was originally written by Guido, and modified/extended by Jack # to include the various types that were missing. The reference used is # Apple Event Registry, chapter 9. # from warnings import warnpy3k warnpy3k("In 3.x, the aepack module is removed.", stacklevel=2) import struct import types from types import * from Carbon import AE from Carbon.AppleEvents import * import MacOS import Carbon.File import aetypes from aetypes import mkenum, ObjectSpecifier # These ones seem to be missing from AppleEvents # (they're in AERegistry.h) #typeColorTable = 'clrt' #typeDrawingArea = 'cdrw' #typePixelMap = 'cpix' #typePixelMapMinus = 'tpmm' #typeRotation = 'trot' #typeTextStyles = 'tsty' #typeStyledText = 'STXT' #typeAEText = 'tTXT' #typeEnumeration = 'enum' # # Some AE types are immedeately coerced into something # we like better (and which is equivalent) # unpacker_coercions = { typeComp : typeFloat, typeColorTable : typeAEList, typeDrawingArea : typeAERecord, typeFixed : typeFloat, typeExtended : typeFloat, typePixelMap : typeAERecord, typeRotation : typeAERecord, typeStyledText : typeAERecord, typeTextStyles : typeAERecord, }; # # Some python types we need in the packer: # AEDescType = AE.AEDescType FSSType = Carbon.File.FSSpecType FSRefType = Carbon.File.FSRefType AliasType = Carbon.File.AliasType def packkey(ae, key, value): if hasattr(key, 'which'): keystr = key.which elif hasattr(key, 'want'): keystr = key.want else: keystr = key ae.AEPutParamDesc(keystr, pack(value)) def pack(x, forcetype = None): """Pack a python object into an AE descriptor""" if forcetype: if type(x) is StringType: return AE.AECreateDesc(forcetype, x) else: return pack(x).AECoerceDesc(forcetype) if x is None: return AE.AECreateDesc('null', '') if isinstance(x, AEDescType): return x if isinstance(x, FSSType): return AE.AECreateDesc('fss ', x.data) if isinstance(x, FSRefType): return AE.AECreateDesc('fsrf', x.data) if isinstance(x, AliasType): return AE.AECreateDesc('alis', x.data) if isinstance(x, IntType): return AE.AECreateDesc('long', struct.pack('l', x)) if isinstance(x, FloatType): return AE.AECreateDesc('doub', struct.pack('d', x)) if isinstance(x, StringType): return AE.AECreateDesc('TEXT', x) if isinstance(x, UnicodeType): data = x.encode('utf16') if data[:2] == '\xfe\xff': data = data[2:] return AE.AECreateDesc('utxt', data) if isinstance(x, ListType): list = AE.AECreateList('', 0) for item in x: list.AEPutDesc(0, pack(item)) return list if isinstance(x, DictionaryType): record = AE.AECreateList('', 1) for key, value in x.items(): packkey(record, key, value) #record.AEPutParamDesc(key, pack(value)) return record if type(x) == types.ClassType and issubclass(x, ObjectSpecifier): # Note: we are getting a class object here, not an instance return AE.AECreateDesc('type', x.want) if hasattr(x, '__aepack__'): return x.__aepack__() if hasattr(x, 'which'): return AE.AECreateDesc('TEXT', x.which) if hasattr(x, 'want'): return AE.AECreateDesc('TEXT', x.want) return AE.AECreateDesc('TEXT', repr(x)) # Copout def unpack(desc, formodulename=""): """Unpack an AE descriptor to a python object""" t = desc.type if unpacker_coercions.has_key(t): desc = desc.AECoerceDesc(unpacker_coercions[t]) t = desc.type # This is a guess by Jack.... if t == typeAEList: l = [] for i in range(desc.AECountItems()): keyword, item = desc.AEGetNthDesc(i+1, '****') l.append(unpack(item, formodulename)) return l if t == typeAERecord: d = {} for i in range(desc.AECountItems()): keyword, item = desc.AEGetNthDesc(i+1, '****') d[keyword] = unpack(item, formodulename) return d if t == typeAEText: record = desc.AECoerceDesc('reco') return mkaetext(unpack(record, formodulename)) if t == typeAlias: return Carbon.File.Alias(rawdata=desc.data) # typeAppleEvent returned as unknown if t == typeBoolean: return struct.unpack('b', desc.data)[0] if t == typeChar: return desc.data if t == typeUnicodeText: return unicode(desc.data, 'utf16') # typeColorTable coerced to typeAEList # typeComp coerced to extended # typeData returned as unknown # typeDrawingArea coerced to typeAERecord if t == typeEnumeration: return mkenum(desc.data) # typeEPS returned as unknown if t == typeFalse: return 0 if t == typeFloat: data = desc.data return struct.unpack('d', data)[0] if t == typeFSS: return Carbon.File.FSSpec(rawdata=desc.data) if t == typeFSRef: return Carbon.File.FSRef(rawdata=desc.data) if t == typeInsertionLoc: record = desc.AECoerceDesc('reco') return mkinsertionloc(unpack(record, formodulename)) # typeInteger equal to typeLongInteger if t == typeIntlText: script, language = struct.unpack('hh', desc.data[:4]) return aetypes.IntlText(script, language, desc.data[4:]) if t == typeIntlWritingCode: script, language = struct.unpack('hh', desc.data) return aetypes.IntlWritingCode(script, language) if t == typeKeyword: return mkkeyword(desc.data) if t == typeLongInteger: return struct.unpack('l', desc.data)[0] if t == typeLongDateTime: a, b = struct.unpack('lL', desc.data) return (long(a) << 32) + b if t == typeNull: return None if t == typeMagnitude: v = struct.unpack('l', desc.data) if v < 0: v = 0x100000000L + v return v if t == typeObjectSpecifier: record = desc.AECoerceDesc('reco') # If we have been told the name of the module we are unpacking aedescs for, # we can attempt to create the right type of python object from that module. if formodulename: return mkobjectfrommodule(unpack(record, formodulename), formodulename) return mkobject(unpack(record, formodulename)) # typePict returned as unknown # typePixelMap coerced to typeAERecord # typePixelMapMinus returned as unknown # typeProcessSerialNumber returned as unknown if t == typeQDPoint: v, h = struct.unpack('hh', desc.data) return aetypes.QDPoint(v, h) if t == typeQDRectangle: v0, h0, v1, h1 = struct.unpack('hhhh', desc.data) return aetypes.QDRectangle(v0, h0, v1, h1) if t == typeRGBColor: r, g, b = struct.unpack('hhh', desc.data) return aetypes.RGBColor(r, g, b) # typeRotation coerced to typeAERecord # typeScrapStyles returned as unknown # typeSessionID returned as unknown if t == typeShortFloat: return struct.unpack('f', desc.data)[0] if t == typeShortInteger: return struct.unpack('h', desc.data)[0] # typeSMFloat identical to typeShortFloat # typeSMInt indetical to typeShortInt # typeStyledText coerced to typeAERecord if t == typeTargetID: return mktargetid(desc.data) # typeTextStyles coerced to typeAERecord # typeTIFF returned as unknown if t == typeTrue: return 1 if t == typeType: return mktype(desc.data, formodulename) # # The following are special # if t == 'rang': record = desc.AECoerceDesc('reco') return mkrange(unpack(record, formodulename)) if t == 'cmpd': record = desc.AECoerceDesc('reco') return mkcomparison(unpack(record, formodulename)) if t == 'logi': record = desc.AECoerceDesc('reco') return mklogical(unpack(record, formodulename)) return mkunknown(desc.type, desc.data) def coerce(data, egdata): """Coerce a python object to another type using the AE coercers""" pdata = pack(data) pegdata = pack(egdata) pdata = pdata.AECoerceDesc(pegdata.type) return unpack(pdata) # # Helper routines for unpack # def mktargetid(data): sessionID = getlong(data[:4]) name = mkppcportrec(data[4:4+72]) location = mklocationnamerec(data[76:76+36]) rcvrName = mkppcportrec(data[112:112+72]) return sessionID, name, location, rcvrName def mkppcportrec(rec): namescript = getword(rec[:2]) name = getpstr(rec[2:2+33]) portkind = getword(rec[36:38]) if portkind == 1: ctor = rec[38:42] type = rec[42:46] identity = (ctor, type) else: identity = getpstr(rec[38:38+33]) return namescript, name, portkind, identity def mklocationnamerec(rec): kind = getword(rec[:2]) stuff = rec[2:] if kind == 0: stuff = None if kind == 2: stuff = getpstr(stuff) return kind, stuff def mkunknown(type, data): return aetypes.Unknown(type, data) def getpstr(s): return s[1:1+ord(s[0])] def getlong(s): return (ord(s[0])<<24) | (ord(s[1])<<16) | (ord(s[2])<<8) | ord(s[3]) def getword(s): return (ord(s[0])<<8) | (ord(s[1])<<0) def mkkeyword(keyword): return aetypes.Keyword(keyword) def mkrange(dict): return aetypes.Range(dict['star'], dict['stop']) def mkcomparison(dict): return aetypes.Comparison(dict['obj1'], dict['relo'].enum, dict['obj2']) def mklogical(dict): return aetypes.Logical(dict['logc'], dict['term']) def mkstyledtext(dict): return aetypes.StyledText(dict['ksty'], dict['ktxt']) def mkaetext(dict): return aetypes.AEText(dict[keyAEScriptTag], dict[keyAEStyles], dict[keyAEText]) def mkinsertionloc(dict): return aetypes.InsertionLoc(dict[keyAEObject], dict[keyAEPosition]) def mkobject(dict): want = dict['want'].type form = dict['form'].enum seld = dict['seld'] fr = dict['from'] if form in ('name', 'indx', 'rang', 'test'): if want == 'text': return aetypes.Text(seld, fr) if want == 'cha ': return aetypes.Character(seld, fr) if want == 'cwor': return aetypes.Word(seld, fr) if want == 'clin': return aetypes.Line(seld, fr) if want == 'cpar': return aetypes.Paragraph(seld, fr) if want == 'cwin': return aetypes.Window(seld, fr) if want == 'docu': return aetypes.Document(seld, fr) if want == 'file': return aetypes.File(seld, fr) if want == 'cins': return aetypes.InsertionPoint(seld, fr) if want == 'prop' and form == 'prop' and aetypes.IsType(seld): return aetypes.Property(seld.type, fr) return aetypes.ObjectSpecifier(want, form, seld, fr) # Note by Jack: I'm not 100% sure of the following code. This was # provided by Donovan Preston, but I wonder whether the assignment # to __class__ is safe. Moreover, shouldn't there be a better # initializer for the classes in the suites? def mkobjectfrommodule(dict, modulename): if type(dict['want']) == types.ClassType and issubclass(dict['want'], ObjectSpecifier): # The type has already been converted to Python. Convert back:-( classtype = dict['want'] dict['want'] = aetypes.mktype(classtype.want) want = dict['want'].type module = __import__(modulename) codenamemapper = module._classdeclarations classtype = codenamemapper.get(want, None) newobj = mkobject(dict) if classtype: assert issubclass(classtype, ObjectSpecifier) newobj.__class__ = classtype return newobj def mktype(typecode, modulename=None): if modulename: module = __import__(modulename) codenamemapper = module._classdeclarations classtype = codenamemapper.get(typecode, None) if classtype: return classtype return aetypes.mktype(typecode)
lgpl-2.1
bennymartinson/Oort
oort/abstract.py
1
9782
import schedule import rtcmix_import as rtcmix #from utilities import * import dynamic_value import errors class OortObject(object): """Abstract base class for all Oort instruments and behaviors.""" def __getattribute__(self, *args, **kwargs): """When a dynamic value object is accessed, return its number value instead.""" attr = object.__getattribute__(self, *args, **kwargs) if isinstance(attr, dynamic_value.DynamicValue): return attr.value() else: return attr total_instrument_plays = 0 class Instrument(OortObject): """Abstract base class for all Oort instruments.""" outsk=0 dur=1 amp=10000 pitch=440 pan=0.5 instrument = None loaded = False pfields = () min_control_rate = 0 _prev_control_rate = 0 def __init__(self, args): import inspect spec = inspect.getargspec(self.__init__) argnames = spec[0] extra_args = spec[1] # the name of the list extra_kwargs = spec[2] # the name of the dict play = False for argname in argnames: key = argname.lower() # we don't want to have to remember which case everything is in! if argname is 'self': continue if args[argname] is not None: if argname not in (extra_args, extra_kwargs): play = True setattr(self, key, args[argname]) elif not hasattr(self, key): setattr(self, key, None) if extra_args is not None: argcount = len(argnames) i = 0 for arg in args[extra_args]: play = True setattr(self, 'p'+str(argcount-1+i), arg) i+=1 if extra_kwargs is not None: for key, arg in args[extra_kwargs].items(): play = True setattr(self, key, arg) if play: # if at least one argument was passed to the constructor self.play() def _passback(self, args): Instrument.__init__(self, args) def set(self, **kwargs): """Set several params at once by key and value.""" for key,val in kwargs.items(): setattr(self, key, val) return self def _get(self, name): """Used to obtain calculated values to be passed to RTcmix instrument. Uses method get_{value} if this method exists.For example, outsk is modified by get_outsk to add the current time to outsk.""" if hasattr(self, 'get_'+name): return getattr(self, 'get_'+name)() else: return getattr(self, name) def get_outsk(self): return self.outsk + schedule.now() def apply_behaviors(self, *behaviors): for behavior in behaviors: self.apply_behavior(behavior) def apply_behavior(self, behavior): """Apply a behavior to this instrument. This applies each method in the behavior as a decorator to a corresponding method in the instrument.""" if not self.can_apply_behavior(behavior): raise errors.OortBehaviorError('This behavior requires a parameter to be pfield-enabled which is not pfield-enabled. Please try a different behavior, or check that your instrument has the correct pfields parameter set.') for var in dir(behavior): if (var.startswith('get_') or var in ('_play', '_before_play', '_after_play')): # need to call a separate function for closure to capture vars self._build_behavior_dispatch(behavior, var) def can_apply_behavior(self, behavior): if not hasattr(behavior, 'required_pfields') or not len(behavior.required_pfields): return True if not hasattr(self, 'pfields'): return False for pfield in behavior.required_pfields: if pfield not in self.pfields: return False return True def _build_behavior_dispatch(self, behavior, var): if hasattr(self, var) and hasattr(getattr(self, var), '__call__'): # if self has function with name var, decorate function fn = getattr(self, var) elif var.startswith('get_'): # if var in form get_var, return function getting var from self varname = var[4:] fn = lambda: getattr(self, varname, 0) else: # not interested return new_fn = getattr(behavior, var) def _behavior_dispatch(*args): return new_fn(self, fn, *args) setattr(self, var, _behavior_dispatch) def play(self): """Runs the corresponding RTcmix command. Do not override. Instead, override _play()""" self._load() # Set to default in case it was not already set: rtcmix.rtsetparams(44100, 2) args,keys = self.get_instrument_args() if rtcmix.verbose: print "Calling instrument",self.instrument,"with parameters",args self._before_play(()) # Make sure control rate is at least min_control_rate self._prev_control_rate = rtcmix.current_control_rate() if self._prev_control_rate < self.min_control_rate: rtcmix.control_rate(self.min_control_rate) # Play self._play(args,keys) # Return control rate to prev value. if self._prev_control_rate != rtcmix.current_control_rate(): rtcmix.control_rate(self._prev_control_rate) self._after_play(()) global total_instrument_plays total_instrument_plays+=1 return self def get_instrument_arg_names(self): import inspect args = inspect.getargspec(self.__init__) return args[0] def get_instrument_args(self): # Read arguments from instrument's __init__() function argnames = self.get_instrument_arg_names() # Collect list of consecutive non-None parameters args = [] keys = [] for argname in argnames: argname = argname.lower() if argname == 'self': continue arg = self._get(argname) if arg is None: break; # We hit a None parameter, so can't add more to arg list args.append(arg) keys.append(argname) # Look for pN parameters i=1 repeat = True while repeat: name = 'p'+str(i) if hasattr(self, name) and i <= len(args): if i<len(keys): args[i] = getattr(self, name) else: keys.append(name) args.append(getattr(self, name)) elif i > len(args): repeat = False break i+=1 # Look for parameters following kw_pattern if hasattr(self, 'kw_pattern'): i=1 repeat = True while repeat: for prefix in self.kw_pattern: name = prefix+str(i) if hasattr(self, name): keys.append(name) args.append(getattr(self, name)) else: repeat = False break i+=1 return args, keys def _play(self, args, keys): """Call RTcmix function. Override for custom functionality.""" getattr(rtcmix, self.instrument)(*args) def _before_play(self, args): """ Called once before playing an instrument. Override for custom functionality.""" pass def _after_play(self, args): """ Called once after playing an instrument. Override for custom functionality.""" pass def _load(self): """Load instruments as necessary. Override for complex instruments.""" load = getattr(self, 'load', self.instrument) if not hasattr(load, '__iter__'): load = load, for l in load: try: rtcmix.load(l) except: raise errors.OortInstrumentError('The package '+str(l)+' does not exist to load in instrument '+str(self.instrument)) def docs(self): import webbrowser webbrowser.open('http://music.columbia.edu/cmc/RTcmix/docs/instruments/'+self.instrument+'.html') class Effect(Instrument): insk = 0 amp = 1 class Behavior(OortObject): """Abstract behavior class. Behaviors let one easily apply new functionality to any instrument(s) using function decorators. Unlike subclassing an instrument, behaviors are intended to be ignorant of the type of instrument to which they are applied, and a single behavior instance can be applied to several instrument instances. See Behavior's subclasses for examples.""" # List of parameters that must be pfield-enabled # in order for this behavior to be set to an instrument. required_pfields = () def __init__(self, **args): for k,v in args.items(): setattr(self, k, v) def _play(self, inst, fn, args, keys): """Override this to execute different actions when playing""" return fn(args, keys) def _get(self, name): if hasattr(self, 'get_'+name): return getattr(self, 'get_'+name)() else: return getattr(self, name)
gpl-3.0
fitnr/censusname
censusname/censusname.py
1
6235
# -*- coding: utf-8 -*- # Copyright 2014-5 Neil Freeman # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from __future__ import print_function from pkg_resources import resource_stream, resource_exists from contextlib import closing import codecs import random import csv from . import formatters # Name files should have at least the following columns: # name (string) # cumul_frequency (float) number from 0 to 100 SURNAME2000 = "data/dist.all.last.2000.csv" SURNAME1990 = "data/dist.all.last.1990.csv" MALEFIRST1990 = "data/dist.male.first.1990.csv" FEMALEFIRST1990 = "data/dist.female.first.1990.csv" # Name files don't contain every name, so hard coding the maximum frequency here. # This way we don't over-pick the least common names MAX_FREQUENCIES = { SURNAME2000: 89.75356, SURNAME1990: 90.483, MALEFIRST1990: 90.040, FEMALEFIRST1990: 90.024 } GIVENNAMEFILES = { 'male': MALEFIRST1990, 'female': FEMALEFIRST1990 } # 1990 is commented out because it's (a) out of date (b) not based on a random sample anyway # Feel free to use it by doing something like: # from censusname import censusname # my_surnamefiles = { '1990': censusname.SURNAME1990 } SURNAMEFILES = { '2000': SURNAME2000, # '1990': SURNAME1990 } NAMEFILES = { 'given': GIVENNAMEFILES, 'surname': SURNAMEFILES } FORMATTERS = { 'surname': [formatters.recapitalize_surnames] } class Censusname(object): """Generate a random name from an arbitrary set of files""" def __init__(self, nameformat='{given} {surname}', namefiles=None, max_frequencies=None, **kwargs): self.namefiles = namefiles or NAMEFILES if self.namefiles == NAMEFILES: self.max_frequencies = MAX_FREQUENCIES # If no max frequencies given, assume they go to 100 for each file if max_frequencies is None: max_frequencies = dict((self.namefiles[k][x], 100) for k in list(self.namefiles.keys()) for x in self.namefiles[k]) self.nameformat = nameformat if 'csv_args' in kwargs: self.csv_args = kwargs['csv_args'] else: self.csv_args = {'delimiter': ','} if 'formatters' in kwargs: if type(kwargs['formatters']) is not dict: raise TypeError("Keyword argument 'formatters' for censusname() must be a dict.") self.formatters = kwargs['formatters'] else: self.formatters = FORMATTERS if 'capitalize' in kwargs: self.capitalize = kwargs['capitalize'] else: self.capitalize = True def generate(self, nameformat=None, capitalize=None, formatters=None, **kwargs): '''Pick a random name form a specified list of name parts''' nameformat = nameformat or self.nameformat capitalize = capitalize or self.capitalize formatters = formatters or {} lines = self._get_lines(kwargs) names = dict((k, v['name']) for k, v in list(lines.items())) if capitalize: names = dict((k, n.capitalize()) for k, n in list(names.items())) merged_formatters = dict() try: merged_formatters = dict( (k, self.formatters.get(k, []) + formatters.get(k, [])) for k in set(list(self.formatters.keys()) + list(formatters.keys())) ) except AttributeError: raise TypeError("keyword argument 'formatters' for Censusname.generate() must be a dict") if merged_formatters: for key, functions in list(merged_formatters.items()): # 'surname', [func_a, func_b] for func in functions: # names['surname'] = func_a(name['surname']) names[key] = func(names[key]) return nameformat.format(**names) def _get_lines(self, nametypes): datafile, frequency, lines = '', 0.0, {} # The key of each name file is its namepart, e.g. surname or given for namepart in list(self.namefiles.keys()): datafile = self._pick_file(namepart, nametypes.get(namepart, None)) frequency = random.uniform(0, self.max_frequencies[datafile]) lines[namepart] = self.pick_frequency_line(datafile, frequency) return lines def _pick_file(self, namepart, namekeys=None): result = None if type(namekeys) is not list: namekeys = [namekeys] if namekeys: key = random.choice(namekeys) result = self.namefiles[namepart].get(key) if result is None: return random.choice(list(self.namefiles[namepart].values())) else: return result def pick_frequency_line(self, filename, frequency, cumulativefield='cumulative_frequency'): '''Given a numeric frequency, pick a line from a csv with a cumulative frequency field''' if resource_exists('censusname', filename): with closing(resource_stream('censusname', filename)) as b: g = codecs.iterdecode(b, 'ascii') return self._pick_frequency_line(g, frequency, cumulativefield) else: with open(filename, encoding='ascii') as g: return self._pick_frequency_line(g, frequency, cumulativefield) def _pick_frequency_line(self, handle, frequency, cumulativefield): reader = csv.DictReader(handle, **self.csv_args) for line in reader: if float(line[cumulativefield]) >= frequency: return line # helper generate function _C = Censusname() def generate(*args, **kwargs): return _C.generate(*args, **kwargs)
gpl-3.0
alshedivat/tensorflow
tensorflow/contrib/tpu/python/tpu/topology.py
5
6403
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ====================================== """Defines the `Topology` class, that describes a TPU fabric topology.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.contrib.tpu.proto import topology_pb2 class Topology(object): """Describes a set of TPU devices. Represents both the shape of the physical mesh, and the mapping between TensorFlow TPU devices to physical mesh coordinates. """ def __init__(self, serialized=None, mesh_shape=None, device_coordinates=None): """Builds a Topology object. If `serialized` is not `None`, the topology is parsed from `serialized` and the other arguments are ignored. Otherwise, the topology is computed from `mesh_shape` and `device_coordinates`. Args: serialized: A serialized `TopologyProto`, or `None`. If not `None`, the serialized proto is parsed to discover the topology. mesh_shape: A sequence of 3 positive integers, or `None`. If not `None`, the shape of the TPU topology, in number of cores. Ignored if `serialized` is not `None`. device_coordinates: A rank 3 numpy array that describes the mapping from TensorFlow TPU devices to TPU fabric coordinates, or `None`. Ignored if `serialized is not `None`. Raises: ValueError: If `serialized` does not describe a well-formed topology. ValueError: If `serialized` is `None` and `mesh_shape` is not a sequence of 3 positive integers. ValueError: If `serialized` is `None` and `device_coordinates` is not a rank 3 numpy int32 array that describes a valid coordinate mapping. """ self._serialized = serialized if serialized: self._parse_topology(serialized) else: self._mesh_shape = np.asarray(mesh_shape, dtype=np.int32) self._device_coordinates = np.asarray(device_coordinates, np.int32) if len(self._mesh_shape) != 3 or any(self._mesh_shape < 1): raise ValueError("`mesh_shape` must be a sequence of 3 positive " "entries; got {}".format(self._mesh_shape)) if (len(self._device_coordinates.shape) != 3 or self._device_coordinates.shape[2] != len(self._mesh_shape)): raise ValueError("`device_coordinates` must be a rank 3 int32 array " "with minor dimension equal to the mesh shape rank") def _parse_topology(self, serialized): """Parses a serialized `TopologyProto` into `self`.""" proto = topology_pb2.TopologyProto() proto.ParseFromString(serialized) self._mesh_shape = np.array(proto.mesh_shape, dtype=np.int32) if len(self._mesh_shape) != 3 or any(self._mesh_shape < 1): raise ValueError("`mesh_shape` must be a vector of size 3 with positive " "entries; got {}".format(self._mesh_shape)) if proto.num_tasks < 0: raise ValueError("`num_tasks` must be >= 0; got {}".format( proto.num_tasks)) if proto.num_tpu_devices_per_task < 0: raise ValueError("`num_tpu_devices_per_task` must be >= 0; got {}".format( proto.num_tpu_devices_per_task)) expected_coordinates_size = ( proto.num_tasks * proto.num_tpu_devices_per_task * len( proto.mesh_shape)) if len(proto.device_coordinates) != expected_coordinates_size: raise ValueError("`device_coordinates` must have shape num_tasks ({}) * " "num_tpu_devices_per_task ({}) * len(mesh_shape) ({}); " "got shape {}".format(proto.num_tasks, proto.num_tpu_devices_per_task, proto.mesh_shape, len(proto.device_coordinates))) coords = np.array(proto.device_coordinates, dtype=np.int32) if any(coords < 0): raise ValueError("`device_coordinates` must be >= 0") coords = coords.reshape((proto.num_tasks, proto.num_tpu_devices_per_task, len(proto.mesh_shape))) self._device_coordinates = coords @property def mesh_shape(self): """A rank 1 int32 array describing the shape of the TPU topology.""" return self._mesh_shape @property def mesh_rank(self): """Returns the number of dimensions in the mesh.""" return len(self._mesh_shape) @property def device_coordinates(self): """Describes the mapping from TPU devices to topology coordinates. Returns: A rank 3 int32 array with shape `[tasks, devices, axis]`. `tasks` is the number of tasks in the TPU cluster, `devices` is the number of TPU devices per task, and `axis` is the number of axes in the TPU cluster topology. Each entry gives the `axis`-th coordinate in the topology of a task/device pair. TPU topologies are 3-dimensional, with dimensions `(x, y, core number)`. """ return self._device_coordinates @property def num_tasks(self): """Returns the number of TensorFlow tasks in the TPU slice.""" return self._device_coordinates.shape[0] @property def num_tpus_per_task(self): """Returns the number of TPU devices per task in the TPU slice.""" return self._device_coordinates.shape[1] def serialized(self): """Returns the serialized form of the topology.""" if self._serialized is None: proto = topology_pb2.TopologyProto() proto.mesh_shape[:] = list(self._mesh_shape) proto.num_tasks = self._device_coordinates.shape[0] proto.num_tpu_devices_per_task = self._device_coordinates.shape[1] proto.device_coordinates.extend(list(self._device_coordinates.flatten())) self._serialized = proto.SerializeToString() return self._serialized
apache-2.0
GdZ/scriptfile
software/googleAppEngine/lib/django_1_3/tests/modeltests/proxy_model_inheritance/tests.py
50
1253
""" XX. Proxy model inheritance Proxy model inheritance across apps can result in syncdb not creating the table for the proxied model (as described in #12286). This test creates two dummy apps and calls syncdb, then verifies that the table has been created. """ import os import sys from django.conf import settings, Settings from django.core.management import call_command from django.db.models.loading import load_app from django.test import TransactionTestCase class ProxyModelInheritanceTests(TransactionTestCase): def setUp(self): self.old_sys_path = sys.path[:] sys.path.append(os.path.dirname(os.path.abspath(__file__))) self.old_installed_apps = settings.INSTALLED_APPS settings.INSTALLED_APPS = ('app1', 'app2') map(load_app, settings.INSTALLED_APPS) call_command('syncdb', verbosity=0) global ProxyModel, NiceModel from app1.models import ProxyModel from app2.models import NiceModel def tearDown(self): settings.INSTALLED_APPS = self.old_installed_apps sys.path = self.old_sys_path def test_table_exists(self): self.assertEqual(NiceModel.objects.all().count(), 0) self.assertEqual(ProxyModel.objects.all().count(), 0)
mit
sgerhart/ansible
lib/ansible/modules/network/onyx/onyx_magp.py
66
7830
#!/usr/bin/python # # Copyright: Ansible Project # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = """ --- module: onyx_magp version_added: "2.5" author: "Samer Deeb (@samerd)" short_description: Manage MAGP protocol on Mellanox ONYX network devices description: - This module provides declarative management of MAGP protocol on vlan interface of Mellanox ONYX network devices. notes: - Tested on ONYX 3.6.4000 options: magp_id: description: - "MAGP instance number 1-255" required: true interface: description: - VLAN Interface name. required: true state: description: - MAGP state. default: present choices: ['present', 'absent', 'enabled', 'disabled'] router_ip: description: - MAGP router IP address. router_mac: description: - MAGP router MAC address. """ EXAMPLES = """ - name: run add vlan interface with magp onyx_magp: magp_id: 103 router_ip: 192.168.8.2 router_mac: AA:1B:2C:3D:4E:5F interface: Vlan 1002 """ RETURN = """ commands: description: The list of configuration mode commands to send to the device. returned: always type: list sample: - interface vlan 234 magp 103 - exit - interface vlan 234 magp 103 ip virtual-router address 1.2.3.4 """ import re from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.network.onyx.onyx import BaseOnyxModule from ansible.module_utils.network.onyx.onyx import show_cmd class OnyxMagpModule(BaseOnyxModule): IF_VLAN_REGEX = re.compile(r"^Vlan (\d+)$") @classmethod def _get_element_spec(cls): return dict( magp_id=dict(type='int', required=True), state=dict(default='present', choices=['present', 'absent', 'enabled', 'disabled']), interface=dict(required=True), router_ip=dict(), router_mac=dict(), ) def init_module(self): """ Ansible module initialization """ element_spec = self._get_element_spec() argument_spec = dict() argument_spec.update(element_spec) self._module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=True) def validate_magp_id(self, value): if value and not 1 <= int(value) <= 255: self._module.fail_json(msg='magp id must be between 1 and 255') def get_required_config(self): module_params = self._module.params interface = module_params['interface'] match = self.IF_VLAN_REGEX.match(interface) vlan_id = 0 if match: vlan_id = int(match.group(1)) else: self._module.fail_json( msg='Invalid interface name: should be "Vlan <vlan_id>"') self._required_config = dict( magp_id=module_params['magp_id'], state=module_params['state'], vlan_id=vlan_id, router_ip=module_params['router_ip'], router_mac=module_params['router_mac']) self.validate_param_values(self._required_config) @classmethod def get_magp_id(cls, item): header = cls.get_config_attr(item, "header") return int(header.split()[1]) def _create_magp_instance_data(self, magp_id, item): vlan_id = int(self.get_config_attr(item, "Interface vlan")) state = self.get_config_attr(item, "Admin state").lower() return dict( magp_id=magp_id, state=state, vlan_id=vlan_id, router_ip=self.get_config_attr(item, "Virtual IP"), router_mac=self.get_config_attr(item, "Virtual MAC")) def _update_magp_data(self, magp_data): for magp_item in magp_data: magp_id = self.get_magp_id(magp_item) inst_data = self._create_magp_instance_data(magp_id, magp_item) self._current_config[magp_id] = inst_data def _get_magp_config(self): cmd = "show magp" return show_cmd(self._module, cmd, json_fmt=True, fail_on_error=False) def load_current_config(self): # called in base class in run function self._current_config = dict() magp_data = self._get_magp_config() if magp_data: self._update_magp_data(magp_data) def _generate_no_magp_commands(self): req_vlan_id = self._required_config['vlan_id'] req_magp_id = self._required_config['magp_id'] curr_magp_data = self._current_config.get(req_magp_id) if not curr_magp_data: return curr_vlan_id = curr_magp_data.get(req_vlan_id) if curr_vlan_id == req_vlan_id: cmd = 'interface vlan %s no magp %s' % (req_vlan_id, req_magp_id) self._commands.append(cmd) def _generate_magp_commands(self, req_state): req_vlan_id = self._required_config['vlan_id'] req_magp_id = self._required_config['magp_id'] curr_magp_data = self._current_config.get(req_magp_id, dict()) curr_vlan_id = curr_magp_data.get('vlan_id') magp_prefix = 'interface vlan %s magp %s' % (req_vlan_id, req_magp_id) create_new_magp = False if curr_vlan_id != req_vlan_id: if curr_vlan_id: cmd = 'interface vlan %s no magp %s' % ( curr_vlan_id, req_magp_id) self._commands.append(cmd) create_new_magp = True self._commands.append(magp_prefix) self._commands.append('exit') req_router_ip = self._required_config['router_ip'] curr_router_ip = curr_magp_data.get('router_ip') if req_router_ip: if curr_router_ip != req_router_ip or create_new_magp: cmd = '%s ip virtual-router address %s' % ( magp_prefix, req_router_ip) self._commands.append(cmd) else: if curr_router_ip and curr_router_ip != '0.0.0.0': cmd = '%s no ip virtual-router address' % magp_prefix self._commands.append(cmd) req_router_mac = self._required_config['router_mac'] curr_router_mac = curr_magp_data.get('router_mac') if curr_router_mac: curr_router_mac = curr_router_mac.lower() if req_router_mac: req_router_mac = req_router_mac.lower() if curr_router_mac != req_router_mac or create_new_magp: cmd = '%s ip virtual-router mac-address %s' % ( magp_prefix, req_router_mac) self._commands.append(cmd) else: if curr_router_mac and curr_router_mac != '00:00:00:00:00:00': cmd = '%s no ip virtual-router mac-address' % magp_prefix self._commands.append(cmd) if req_state in ('enabled', 'disabled'): curr_state = curr_magp_data.get('state', 'enabled') if curr_state != req_state: if req_state == 'enabled': suffix = 'no shutdown' else: suffix = 'shutdown' cmd = '%s %s' % (magp_prefix, suffix) self._commands.append(cmd) def generate_commands(self): req_state = self._required_config['state'] if req_state == 'absent': return self._generate_no_magp_commands() return self._generate_magp_commands(req_state) def main(): """ main entry point for module execution """ OnyxMagpModule.main() if __name__ == '__main__': main()
mit
eckucukoglu/arm-linux-gnueabihf
arm-linux-gnueabihf/libc/usr/lib/python2.7/test/test_file_eintr.py
95
10480
# Written to test interrupted system calls interfering with our many buffered # IO implementations. http://bugs.python.org/issue12268 # # This tests the '_io' module. Similar tests for Python 2.x's older # default file I/O implementation exist within test_file2k.py. # # It was suggested that this code could be merged into test_io and the tests # made to work using the same method as the existing signal tests in test_io. # I was unable to get single process tests using alarm or setitimer that way # to reproduce the EINTR problems. This process based test suite reproduces # the problems prior to the issue12268 patch reliably on Linux and OSX. # - gregory.p.smith import os import select import signal import subprocess import sys from test.test_support import run_unittest import time import unittest # Test import all of the things we're about to try testing up front. from _io import FileIO @unittest.skipUnless(os.name == 'posix', 'tests requires a posix system.') class TestFileIOSignalInterrupt(unittest.TestCase): def setUp(self): self._process = None def tearDown(self): if self._process and self._process.poll() is None: try: self._process.kill() except OSError: pass def _generate_infile_setup_code(self): """Returns the infile = ... line of code for the reader process. subclasseses should override this to test different IO objects. """ return ('import _io ;' 'infile = _io.FileIO(sys.stdin.fileno(), "rb")') def fail_with_process_info(self, why, stdout=b'', stderr=b'', communicate=True): """A common way to cleanup and fail with useful debug output. Kills the process if it is still running, collects remaining output and fails the test with an error message including the output. Args: why: Text to go after "Error from IO process" in the message. stdout, stderr: standard output and error from the process so far to include in the error message. communicate: bool, when True we call communicate() on the process after killing it to gather additional output. """ if self._process.poll() is None: time.sleep(0.1) # give it time to finish printing the error. try: self._process.terminate() # Ensure it dies. except OSError: pass if communicate: stdout_end, stderr_end = self._process.communicate() stdout += stdout_end stderr += stderr_end self.fail('Error from IO process %s:\nSTDOUT:\n%sSTDERR:\n%s\n' % (why, stdout.decode(), stderr.decode())) def _test_reading(self, data_to_write, read_and_verify_code): """Generic buffered read method test harness to validate EINTR behavior. Also validates that Python signal handlers are run during the read. Args: data_to_write: String to write to the child process for reading before sending it a signal, confirming the signal was handled, writing a final newline and closing the infile pipe. read_and_verify_code: Single "line" of code to read from a file object named 'infile' and validate the result. This will be executed as part of a python subprocess fed data_to_write. """ infile_setup_code = self._generate_infile_setup_code() # Total pipe IO in this function is smaller than the minimum posix OS # pipe buffer size of 512 bytes. No writer should block. assert len(data_to_write) < 512, 'data_to_write must fit in pipe buf.' # Start a subprocess to call our read method while handling a signal. self._process = subprocess.Popen( [sys.executable, '-u', '-c', 'import io, signal, sys ;' 'signal.signal(signal.SIGINT, ' 'lambda s, f: sys.stderr.write("$\\n")) ;' + infile_setup_code + ' ;' + 'sys.stderr.write("Worm Sign!\\n") ;' + read_and_verify_code + ' ;' + 'infile.close()' ], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) # Wait for the signal handler to be installed. worm_sign = self._process.stderr.read(len(b'Worm Sign!\n')) if worm_sign != b'Worm Sign!\n': # See also, Dune by Frank Herbert. self.fail_with_process_info('while awaiting a sign', stderr=worm_sign) self._process.stdin.write(data_to_write) signals_sent = 0 rlist = [] # We don't know when the read_and_verify_code in our child is actually # executing within the read system call we want to interrupt. This # loop waits for a bit before sending the first signal to increase # the likelihood of that. Implementations without correct EINTR # and signal handling usually fail this test. while not rlist: rlist, _, _ = select.select([self._process.stderr], (), (), 0.05) self._process.send_signal(signal.SIGINT) signals_sent += 1 if signals_sent > 200: self._process.kill() self.fail('reader process failed to handle our signals.') # This assumes anything unexpected that writes to stderr will also # write a newline. That is true of the traceback printing code. signal_line = self._process.stderr.readline() if signal_line != b'$\n': self.fail_with_process_info('while awaiting signal', stderr=signal_line) # We append a newline to our input so that a readline call can # end on its own before the EOF is seen and so that we're testing # the read call that was interrupted by a signal before the end of # the data stream has been reached. stdout, stderr = self._process.communicate(input=b'\n') if self._process.returncode: self.fail_with_process_info( 'exited rc=%d' % self._process.returncode, stdout, stderr, communicate=False) # PASS! # String format for the read_and_verify_code used by read methods. _READING_CODE_TEMPLATE = ( 'got = infile.{read_method_name}() ;' 'expected = {expected!r} ;' 'assert got == expected, (' '"{read_method_name} returned wrong data.\\n"' '"got data %r\\nexpected %r" % (got, expected))' ) def test_readline(self): """readline() must handle signals and not lose data.""" self._test_reading( data_to_write=b'hello, world!', read_and_verify_code=self._READING_CODE_TEMPLATE.format( read_method_name='readline', expected=b'hello, world!\n')) def test_readlines(self): """readlines() must handle signals and not lose data.""" self._test_reading( data_to_write=b'hello\nworld!', read_and_verify_code=self._READING_CODE_TEMPLATE.format( read_method_name='readlines', expected=[b'hello\n', b'world!\n'])) def test_readall(self): """readall() must handle signals and not lose data.""" self._test_reading( data_to_write=b'hello\nworld!', read_and_verify_code=self._READING_CODE_TEMPLATE.format( read_method_name='readall', expected=b'hello\nworld!\n')) # read() is the same thing as readall(). self._test_reading( data_to_write=b'hello\nworld!', read_and_verify_code=self._READING_CODE_TEMPLATE.format( read_method_name='read', expected=b'hello\nworld!\n')) class TestBufferedIOSignalInterrupt(TestFileIOSignalInterrupt): def _generate_infile_setup_code(self): """Returns the infile = ... line of code to make a BufferedReader.""" return ('infile = io.open(sys.stdin.fileno(), "rb") ;' 'import _io ;assert isinstance(infile, _io.BufferedReader)') def test_readall(self): """BufferedReader.read() must handle signals and not lose data.""" self._test_reading( data_to_write=b'hello\nworld!', read_and_verify_code=self._READING_CODE_TEMPLATE.format( read_method_name='read', expected=b'hello\nworld!\n')) class TestTextIOSignalInterrupt(TestFileIOSignalInterrupt): def _generate_infile_setup_code(self): """Returns the infile = ... line of code to make a TextIOWrapper.""" return ('infile = io.open(sys.stdin.fileno(), "rt", newline=None) ;' 'import _io ;assert isinstance(infile, _io.TextIOWrapper)') def test_readline(self): """readline() must handle signals and not lose data.""" self._test_reading( data_to_write=b'hello, world!', read_and_verify_code=self._READING_CODE_TEMPLATE.format( read_method_name='readline', expected='hello, world!\n')) def test_readlines(self): """readlines() must handle signals and not lose data.""" self._test_reading( data_to_write=b'hello\r\nworld!', read_and_verify_code=self._READING_CODE_TEMPLATE.format( read_method_name='readlines', expected=['hello\n', 'world!\n'])) def test_readall(self): """read() must handle signals and not lose data.""" self._test_reading( data_to_write=b'hello\nworld!', read_and_verify_code=self._READING_CODE_TEMPLATE.format( read_method_name='read', expected="hello\nworld!\n")) def test_main(): test_cases = [ tc for tc in globals().values() if isinstance(tc, type) and issubclass(tc, unittest.TestCase)] run_unittest(*test_cases) if __name__ == '__main__': test_main()
gpl-2.0
SDSG-Invenio/invenio
invenio/modules/submit/fixtures.py
13
35463
# -*- coding: utf-8 -*- # # This file is part of Invenio. # Copyright (C) 2013 CERN. # # Invenio is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 2 of the # License, or (at your option) any later version. # # Invenio is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Invenio; if not, write to the Free Software Foundation, Inc., # 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. import datetime from fixture import DataSet class SbmACTIONData(DataSet): class SbmACTION_APP: md = datetime.date(2002, 6, 11) lactname = u'Approve Record' statustext = u'Approve Record' actionbutton = u'' sactname = u'APP' dir = u'approve' cd = datetime.date(2001, 11, 8) class SbmACTION_MBI: md = datetime.date(2001, 11, 7) lactname = u'Modify Record' statustext = u'Modify Record' actionbutton = u'' sactname = u'MBI' dir = u'modify' cd = datetime.date(1998, 8, 17) class SbmACTION_SBI: md = datetime.date(2001, 8, 8) lactname = u'Submit New Record' statustext = u'Submit New Record' actionbutton = u'' sactname = u'SBI' dir = u'running' cd = datetime.date(1998, 8, 17) class SbmACTION_SRV: md = datetime.date(2001, 11, 7) lactname = u'Submit New File' statustext = u'Submit New File' actionbutton = u'' sactname = u'SRV' dir = u'revise' cd = None class SbmALLFUNCDESCRData(DataSet): class SbmALLFUNCDESCR_Ask_For_Record_Details_Confirmation: function = u'Ask_For_Record_Details_Confirmation' description = u'' class SbmALLFUNCDESCR_CaseEDS: function = u'CaseEDS' description = u'' class SbmALLFUNCDESCR_Create_Modify_Interface: function = u'Create_Modify_Interface' description = None class SbmALLFUNCDESCR_Create_Recid: function = u'Create_Recid' description = None class SbmALLFUNCDESCR_Create_Upload_Files_Interface: function = u'Create_Upload_Files_Interface' description = u'Display generic interface to add/revise/delete files. To be used before function "Move_Uploaded_Files_to_Storage"' class SbmALLFUNCDESCR_Finish_Submission: function = u'Finish_Submission' description = u'' class SbmALLFUNCDESCR_Get_Info: function = u'Get_Info' description = u'' class SbmALLFUNCDESCR_Get_Recid: function = u'Get_Recid' description = u'This function gets the recid for a document with a given report-number (as stored in the global variable rn).' class SbmALLFUNCDESCR_Get_Report_Number: function = u'Get_Report_Number' description = None class SbmALLFUNCDESCR_Get_Sysno: function = u'Get_Sysno' description = None class SbmALLFUNCDESCR_Insert_Modify_Record: function = u'Insert_Modify_Record' description = u'' class SbmALLFUNCDESCR_Insert_Record: function = u'Insert_Record' description = None class SbmALLFUNCDESCR_Is_Original_Submitter: function = u'Is_Original_Submitter' description = u'' class SbmALLFUNCDESCR_Is_Referee: function = u'Is_Referee' description = u'This function checks whether the logged user is a referee for the current document' class SbmALLFUNCDESCR_Link_Records: function = u'Link_Records' description = u'Link two records toghether via MARC' class SbmALLFUNCDESCR_Mail_Approval_Request_to_Referee: function = u'Mail_Approval_Request_to_Referee' description = None class SbmALLFUNCDESCR_Mail_Approval_Withdrawn_to_Referee: function = u'Mail_Approval_Withdrawn_to_Referee' description = None class SbmALLFUNCDESCR_Mail_Submitter: function = u'Mail_Submitter' description = None class SbmALLFUNCDESCR_Make_Dummy_MARC_XML_Record: function = u'Make_Dummy_MARC_XML_Record' description = u'' class SbmALLFUNCDESCR_Make_Modify_Record: function = u'Make_Modify_Record' description = None class SbmALLFUNCDESCR_Make_Record: function = u'Make_Record' description = u'' class SbmALLFUNCDESCR_Move_CKEditor_Files_to_Storage: function = u'Move_CKEditor_Files_to_Storage' description = u'Transfer files attached to the record with the CKEditor' class SbmALLFUNCDESCR_Move_Files_to_Storage: function = u'Move_Files_to_Storage' description = u'Attach files received from chosen file input element(s)' class SbmALLFUNCDESCR_Move_From_Pending: function = u'Move_From_Pending' description = u'' class SbmALLFUNCDESCR_Move_Photos_to_Storage: function = u'Move_Photos_to_Storage' description = u'Attach/edit the pictures uploaded with the "create_photos_manager_interface()" function' class SbmALLFUNCDESCR_Move_Revised_Files_to_Storage: function = u'Move_Revised_Files_to_Storage' description = u'Revise files initially uploaded with "Move_Files_to_Storage"' class SbmALLFUNCDESCR_Move_Uploaded_Files_to_Storage: function = u'Move_Uploaded_Files_to_Storage' description = u'Attach files uploaded with "Create_Upload_Files_Interface"' class SbmALLFUNCDESCR_Move_to_Done: function = u'Move_to_Done' description = None class SbmALLFUNCDESCR_Move_to_Pending: function = u'Move_to_Pending' description = None class SbmALLFUNCDESCR_Notify_URL: function = u'Notify_URL' description = u'Access URL, possibly to post content' class SbmALLFUNCDESCR_Print_Success: function = u'Print_Success' description = u'' class SbmALLFUNCDESCR_Print_Success_APP: function = u'Print_Success_APP' description = u'' class SbmALLFUNCDESCR_Print_Success_Approval_Request: function = u'Print_Success_Approval_Request' description = None class SbmALLFUNCDESCR_Print_Success_DEL: function = u'Print_Success_DEL' description = u'Prepare a message for the user informing them that their record was successfully deleted.' class SbmALLFUNCDESCR_Print_Success_MBI: function = u'Print_Success_MBI' description = None class SbmALLFUNCDESCR_Print_Success_SRV: function = u'Print_Success_SRV' description = None class SbmALLFUNCDESCR_Register_Approval_Request: function = u'Register_Approval_Request' description = None class SbmALLFUNCDESCR_Register_Referee_Decision: function = u'Register_Referee_Decision' description = None class SbmALLFUNCDESCR_Report_Number_Generation: function = u'Report_Number_Generation' description = None class SbmALLFUNCDESCR_Second_Report_Number_Generation: function = u'Second_Report_Number_Generation' description = u'Generate a secondary report number for a document.' class SbmALLFUNCDESCR_Send_APP_Mail: function = u'Send_APP_Mail' description = u'' class SbmALLFUNCDESCR_Send_Approval_Request: function = u'Send_Approval_Request' description = None class SbmALLFUNCDESCR_Send_Delete_Mail: function = u'Send_Delete_Mail' description = u'' class SbmALLFUNCDESCR_Send_Modify_Mail: function = u'Send_Modify_Mail' description = None class SbmALLFUNCDESCR_Send_SRV_Mail: function = u'Send_SRV_Mail' description = None class SbmALLFUNCDESCR_Set_Embargo: function = u'Set_Embargo' description = u'Set an embargo on all the documents of a given record.' class SbmALLFUNCDESCR_Set_RN_From_Sysno: function = u'Set_RN_From_Sysno' description = u'Set the value of global rn variable to the report number identified by sysno (recid)' class SbmALLFUNCDESCR_Stamp_Replace_Single_File_Approval: function = u'Stamp_Replace_Single_File_Approval' description = u'Stamp a single file when a document is approved.' class SbmALLFUNCDESCR_Stamp_Uploaded_Files: function = u'Stamp_Uploaded_Files' description = u'Stamp some of the files that were uploaded during a submission.' class SbmALLFUNCDESCR_Test_Status: function = u'Test_Status' description = u'' class SbmALLFUNCDESCR_Update_Approval_DB: function = u'Update_Approval_DB' description = None class SbmALLFUNCDESCR_User_is_Record_Owner_or_Curator: function = u'User_is_Record_Owner_or_Curator' description = u'Check if user is owner or special editor of a record' class SbmALLFUNCDESCR_Video_Processing: function = u'Video_Processing' description = None class SbmALLFUNCDESCR_Withdraw_Approval_Request: function = u'Withdraw_Approval_Request' description = None class SbmALLFUNCDESCR_Run_PlotExtractor: function = u'Run_PlotExtractor' description = u'Run PlotExtractor on the current record' class SbmCHECKSData(DataSet): class SbmCHECKS_AUCheck: md = None chefi1 = u'' chefi2 = u'' chdesc = u'function AUCheck(txt) {\r\n var res=1;\r\n tmp=txt.indexOf("\\015");\r\n while (tmp != -1) {\r\n left=txt.substring(0,tmp);\r\n right=txt.substring(tmp+2,txt.length);\r\n txt=left + "\\012" + right;\r\n tmp=txt.indexOf("\\015");\r\n }\r\n tmp=txt.indexOf("\\012");\r\n if (tmp==-1){\r\n line=txt;\r\n txt=\'\';}\r\n else{\r\n line=txt.substring(0,tmp);\r\n txt=txt.substring(tmp+1,txt.length);}\r\n while (line != ""){\r\n coma=line.indexOf(",");\r\n left=line.substring(0,coma);\r\n right=line.substring(coma+1,line.length);\r\n coma2=right.indexOf(",");\r\n space=right.indexOf(" ");\r\n if ((coma==-1)||(left=="")||(right=="")||(space!=0)||(coma2!=-1)){\r\n res=0;\r\n error_log=line;\r\n }\r\n tmp=txt.indexOf("\\012");\r\n if (tmp==-1){\r\n line=txt;\r\n txt=\'\';}\r\n else{\r\n line=txt.substring(0,tmp-1);\r\n txt=txt.substring(tmp+1,txt.length);}\r\n }\r\n if (res == 0){\r\n alert("This author name cannot be managed \\: \\012\\012" + error_log + " \\012\\012It is not in the required format!\\012Put one author per line and a comma (,) between the name and the firstname initial letters. \\012The name is going first, followed by the firstname initial letters.\\012Do not forget the whitespace after the comma!!!\\012\\012Example \\: Put\\012\\012Le Meur, J Y \\012Baron, T \\012\\012for\\012\\012Le Meur Jean-Yves & Baron Thomas.");\r\n return 0;\r\n } \r\n return 1; \r\n}' chname = u'AUCheck' cd = datetime.date(1998, 8, 18) class SbmCHECKS_DatCheckNew: md = None chefi1 = u'' chefi2 = u'' chdesc = u'function DatCheckNew(txt) {\r\n var res=1;\r\n if (txt.length != 10){res=0;}\r\n if (txt.indexOf("/") != 2){res=0;}\r\n if (txt.lastIndexOf("/") != 5){res=0;}\r\n tmp=parseInt(txt.substring(0,2),10);\r\n if ((tmp > 31)||(tmp < 1)||(isNaN(tmp))){res=0;}\r\n tmp=parseInt(txt.substring(3,5),10);\r\n if ((tmp > 12)||(tmp < 1)||(isNaN(tmp))){res=0;}\r\n tmp=parseInt(txt.substring(6,10),10);\r\n if ((tmp < 1)||(isNaN(tmp))){res=0;}\r\n if (txt.length == 0){res=1;}\r\n if (res == 0){\r\n alert("Please enter a correct Date \\012Format: dd/mm/yyyy");\r\n return 0;\r\n }\r\n return 1; \r\n}' chname = u'DatCheckNew' cd = None class SbmFORMATEXTENSIONData(DataSet): class SbmFORMATEXTENSION_CompressedPostScript_psgz: FILE_EXTENSION = u'.ps.gz' FILE_FORMAT = u'Compressed PostScript' class SbmFORMATEXTENSION_GIF_gif: FILE_EXTENSION = u'.gif' FILE_FORMAT = u'GIF' class SbmFORMATEXTENSION_HTML_htm: FILE_EXTENSION = u'.htm' FILE_FORMAT = u'HTML' class SbmFORMATEXTENSION_HTML_html: FILE_EXTENSION = u'.html' FILE_FORMAT = u'HTML' class SbmFORMATEXTENSION_JPEG_jpeg: FILE_EXTENSION = u'.jpeg' FILE_FORMAT = u'JPEG' class SbmFORMATEXTENSION_JPEG_jpg: FILE_EXTENSION = u'.jpg' FILE_FORMAT = u'JPEG' class SbmFORMATEXTENSION_Latex_tex: FILE_EXTENSION = u'.tex' FILE_FORMAT = u'Latex' class SbmFORMATEXTENSION_PDF_pdf: FILE_EXTENSION = u'.pdf' FILE_FORMAT = u'PDF' class SbmFORMATEXTENSION_PPT_ppt: FILE_EXTENSION = u'.ppt' FILE_FORMAT = u'PPT' class SbmFORMATEXTENSION_PostScript_ps: FILE_EXTENSION = u'.ps' FILE_FORMAT = u'PostScript' class SbmFORMATEXTENSION_TarredTextar_tar: FILE_EXTENSION = u'.tar' FILE_FORMAT = u'Tarred Tex (.tar)' class SbmFORMATEXTENSION_Text_txt: FILE_EXTENSION = u'.txt' FILE_FORMAT = u'Text' class SbmFORMATEXTENSION_WORD_doc: FILE_EXTENSION = u'.doc' FILE_FORMAT = u'WORD' class SbmFUNDESCData(DataSet): class SbmFUNDESC_CaseEDS_casedefault: function = u'CaseEDS' param = u'casedefault' class SbmFUNDESC_CaseEDS_casesteps: function = u'CaseEDS' param = u'casesteps' class SbmFUNDESC_CaseEDS_casevalues: function = u'CaseEDS' param = u'casevalues' class SbmFUNDESC_CaseEDS_casevariable: function = u'CaseEDS' param = u'casevariable' class SbmFUNDESC_CreateModifyInterface_fieldnameMBI: function = u'Create_Modify_Interface' param = u'fieldnameMBI' class SbmFUNDESC_CreateUploadFilesInterface_canAddFormatDoctypes: function = u'Create_Upload_Files_Interface' param = u'canAddFormatDoctypes' class SbmFUNDESC_CreateUploadFilesInterface_canCommentDoctypes: function = u'Create_Upload_Files_Interface' param = u'canCommentDoctypes' class SbmFUNDESC_CreateUploadFilesInterface_canDeleteDoctypes: function = u'Create_Upload_Files_Interface' param = u'canDeleteDoctypes' class SbmFUNDESC_CreateUploadFilesInterface_canDescribeDoctypes: function = u'Create_Upload_Files_Interface' param = u'canDescribeDoctypes' class SbmFUNDESC_CreateUploadFilesInterface_canKeepDoctypes: function = u'Create_Upload_Files_Interface' param = u'canKeepDoctypes' class SbmFUNDESC_CreateUploadFilesInterface_canNameNewFiles: function = u'Create_Upload_Files_Interface' param = u'canNameNewFiles' class SbmFUNDESC_CreateUploadFilesInterface_canRenameDoctypes: function = u'Create_Upload_Files_Interface' param = u'canRenameDoctypes' class SbmFUNDESC_CreateUploadFilesInterface_canRestrictDoctypes: function = u'Create_Upload_Files_Interface' param = u'canRestrictDoctypes' class SbmFUNDESC_CreateUploadFilesInterface_canReviseDoctypes: function = u'Create_Upload_Files_Interface' param = u'canReviseDoctypes' class SbmFUNDESC_CreateUploadFilesInterface_commentLabel: function = u'Create_Upload_Files_Interface' param = u'commentLabel' class SbmFUNDESC_CreateUploadFilesInterface_createRelatedFormats: function = u'Create_Upload_Files_Interface' param = u'createRelatedFormats' class SbmFUNDESC_CreateUploadFilesInterface_defaultFilenameDoctypes: function = u'Create_Upload_Files_Interface' param = u'defaultFilenameDoctypes' class SbmFUNDESC_CreateUploadFilesInterface_descriptionLabel: function = u'Create_Upload_Files_Interface' param = u'descriptionLabel' class SbmFUNDESC_CreateUploadFilesInterface_doctypes: function = u'Create_Upload_Files_Interface' param = u'doctypes' class SbmFUNDESC_CreateUploadFilesInterface_endDoc: function = u'Create_Upload_Files_Interface' param = u'endDoc' class SbmFUNDESC_CreateUploadFilesInterface_fileLabel: function = u'Create_Upload_Files_Interface' param = u'fileLabel' class SbmFUNDESC_CreateUploadFilesInterface_filenameLabel: function = u'Create_Upload_Files_Interface' param = u'filenameLabel' class SbmFUNDESC_CreateUploadFilesInterface_keepDefault: function = u'Create_Upload_Files_Interface' param = u'keepDefault' class SbmFUNDESC_CreateUploadFilesInterface_maxFilesDoctypes: function = u'Create_Upload_Files_Interface' param = u'maxFilesDoctypes' class SbmFUNDESC_CreateUploadFilesInterface_maxsize: function = u'Create_Upload_Files_Interface' param = u'maxsize' class SbmFUNDESC_CreateUploadFilesInterface_minsize: function = u'Create_Upload_Files_Interface' param = u'minsize' class SbmFUNDESC_CreateUploadFilesInterface_restrictionLabel: function = u'Create_Upload_Files_Interface' param = u'restrictionLabel' class SbmFUNDESC_CreateUploadFilesInterface_restrictions: function = u'Create_Upload_Files_Interface' param = u'restrictions' class SbmFUNDESC_CreateUploadFilesInterface_showLinks: function = u'Create_Upload_Files_Interface' param = u'showLinks' class SbmFUNDESC_CreateUploadFilesInterface_startDoc: function = u'Create_Upload_Files_Interface' param = u'startDoc' class SbmFUNDESC_GetInfo_authorFile: function = u'Get_Info' param = u'authorFile' class SbmFUNDESC_GetInfo_emailFile: function = u'Get_Info' param = u'emailFile' class SbmFUNDESC_GetInfo_titleFile: function = u'Get_Info' param = u'titleFile' class SbmFUNDESC_GetRecid_recordsearchpattern: function = u'Get_Recid' param = u'record_search_pattern' class SbmFUNDESC_GetReportNumber_edsrn: function = u'Get_Report_Number' param = u'edsrn' class SbmFUNDESC_LinkRecords_directRelationship: function = u'Link_Records' param = u'directRelationship' class SbmFUNDESC_LinkRecords_edsrn: function = u'Link_Records' param = u'edsrn' class SbmFUNDESC_LinkRecords_edsrn2: function = u'Link_Records' param = u'edsrn2' class SbmFUNDESC_LinkRecords_keeporiginaledsrn2: function = u'Link_Records' param = u'keep_original_edsrn2' class SbmFUNDESC_LinkRecords_reverseRelationship: function = u'Link_Records' param = u'reverseRelationship' class SbmFUNDESC_MailApprovalRequesttoReferee_categfileappreq: function = u'Mail_Approval_Request_to_Referee' param = u'categ_file_appreq' class SbmFUNDESC_MailApprovalRequesttoReferee_categrnseekappreq: function = u'Mail_Approval_Request_to_Referee' param = u'categ_rnseek_appreq' class SbmFUNDESC_MailApprovalRequesttoReferee_edsrn: function = u'Mail_Approval_Request_to_Referee' param = u'edsrn' class SbmFUNDESC_MailApprovalWithdrawntoReferee_categfilewithd: function = u'Mail_Approval_Withdrawn_to_Referee' param = u'categ_file_withd' class SbmFUNDESC_MailApprovalWithdrawntoReferee_categrnseekwithd: function = u'Mail_Approval_Withdrawn_to_Referee' param = u'categ_rnseek_withd' class SbmFUNDESC_MailSubmitter_authorfile: function = u'Mail_Submitter' param = u'authorfile' class SbmFUNDESC_MailSubmitter_edsrn: function = u'Mail_Submitter' param = u'edsrn' class SbmFUNDESC_MailSubmitter_emailFile: function = u'Mail_Submitter' param = u'emailFile' class SbmFUNDESC_MailSubmitter_newrnin: function = u'Mail_Submitter' param = u'newrnin' class SbmFUNDESC_MailSubmitter_status: function = u'Mail_Submitter' param = u'status' class SbmFUNDESC_MailSubmitter_titleFile: function = u'Mail_Submitter' param = u'titleFile' class SbmFUNDESC_MakeDummyMARCXMLRecord_dummyreccreatetpl: function = u'Make_Dummy_MARC_XML_Record' param = u'dummyrec_create_tpl' class SbmFUNDESC_MakeDummyMARCXMLRecord_dummyrecsourcetpl: function = u'Make_Dummy_MARC_XML_Record' param = u'dummyrec_source_tpl' class SbmFUNDESC_MakeModifyRecord_modifyTemplate: function = u'Make_Modify_Record' param = u'modifyTemplate' class SbmFUNDESC_MakeModifyRecord_sourceTemplate: function = u'Make_Modify_Record' param = u'sourceTemplate' class SbmFUNDESC_MakeRecord_createTemplate: function = u'Make_Record' param = u'createTemplate' class SbmFUNDESC_MakeRecord_sourceTemplate: function = u'Make_Record' param = u'sourceTemplate' class SbmFUNDESC_MoveCKEditorFilestoStorage_inputfields: function = u'Move_CKEditor_Files_to_Storage' param = u'input_fields' class SbmFUNDESC_MoveFilestoStorage_documenttype: function = u'Move_Files_to_Storage' param = u'documenttype' class SbmFUNDESC_MoveFilestoStorage_iconsize: function = u'Move_Files_to_Storage' param = u'iconsize' class SbmFUNDESC_MoveFilestoStorage_pathsanddoctypes: function = u'Move_Files_to_Storage' param = u'paths_and_doctypes' class SbmFUNDESC_MoveFilestoStorage_pathsandrestrictions: function = u'Move_Files_to_Storage' param = u'paths_and_restrictions' class SbmFUNDESC_MoveFilestoStorage_pathsandsuffixes: function = u'Move_Files_to_Storage' param = u'paths_and_suffixes' class SbmFUNDESC_MoveFilestoStorage_rename: function = u'Move_Files_to_Storage' param = u'rename' class SbmFUNDESC_MovePhotostoStorage_iconformat: function = u'Move_Photos_to_Storage' param = u'iconformat' class SbmFUNDESC_MovePhotostoStorage_iconsize: function = u'Move_Photos_to_Storage' param = u'iconsize' class SbmFUNDESC_MoveRevisedFilestoStorage_createIconDoctypes: function = u'Move_Revised_Files_to_Storage' param = u'createIconDoctypes' class SbmFUNDESC_MoveRevisedFilestoStorage_createRelatedFormats: function = u'Move_Revised_Files_to_Storage' param = u'createRelatedFormats' class SbmFUNDESC_MoveRevisedFilestoStorage_elementNameToDoctype: function = u'Move_Revised_Files_to_Storage' param = u'elementNameToDoctype' class SbmFUNDESC_MoveRevisedFilestoStorage_iconsize: function = u'Move_Revised_Files_to_Storage' param = u'iconsize' class SbmFUNDESC_MoveRevisedFilestoStorage_keepPreviousVersionDoctypes: function = u'Move_Revised_Files_to_Storage' param = u'keepPreviousVersionDoctypes' class SbmFUNDESC_MoveUploadedFilestoStorage_createIconDoctypes: function = u'Move_Uploaded_Files_to_Storage' param = u'createIconDoctypes' class SbmFUNDESC_MoveUploadedFilestoStorage_forceFileRevision: function = u'Move_Uploaded_Files_to_Storage' param = u'forceFileRevision' class SbmFUNDESC_MoveUploadedFilestoStorage_iconsize: function = u'Move_Uploaded_Files_to_Storage' param = u'iconsize' class SbmFUNDESC_NotifyURL_adminemails: function = u'Notify_URL' param = u'admin_emails' class SbmFUNDESC_NotifyURL_attemptsleeptime: function = u'Notify_URL' param = u'attempt_sleeptime' class SbmFUNDESC_NotifyURL_attempttimes: function = u'Notify_URL' param = u'attempt_times' class SbmFUNDESC_NotifyURL_contenttype: function = u'Notify_URL' param = u'content_type' class SbmFUNDESC_NotifyURL_data: function = u'Notify_URL' param = u'data' class SbmFUNDESC_NotifyURL_url: function = u'Notify_URL' param = u'url' class SbmFUNDESC_NotifyURL_user: function = u'Notify_URL' param = u'user' class SbmFUNDESC_PrintSuccessAPP_decisionfile: function = u'Print_Success_APP' param = u'decision_file' class SbmFUNDESC_PrintSuccessAPP_newrnin: function = u'Print_Success_APP' param = u'newrnin' class SbmFUNDESC_PrintSuccess_edsrn: function = u'Print_Success' param = u'edsrn' class SbmFUNDESC_PrintSuccess_newrnin: function = u'Print_Success' param = u'newrnin' class SbmFUNDESC_PrintSuccess_status: function = u'Print_Success' param = u'status' class SbmFUNDESC_RegisterApprovalRequest_categfileappreq: function = u'Register_Approval_Request' param = u'categ_file_appreq' class SbmFUNDESC_RegisterApprovalRequest_categrnseekappreq: function = u'Register_Approval_Request' param = u'categ_rnseek_appreq' class SbmFUNDESC_RegisterApprovalRequest_notefileappreq: function = u'Register_Approval_Request' param = u'note_file_appreq' class SbmFUNDESC_RegisterRefereeDecision_decisionfile: function = u'Register_Referee_Decision' param = u'decision_file' class SbmFUNDESC_ReportNumberGeneration_autorngen: function = u'Report_Number_Generation' param = u'autorngen' class SbmFUNDESC_ReportNumberGeneration_counterpath: function = u'Report_Number_Generation' param = u'counterpath' class SbmFUNDESC_ReportNumberGeneration_edsrn: function = u'Report_Number_Generation' param = u'edsrn' class SbmFUNDESC_ReportNumberGeneration_initialvalue: function = u'Report_Number_Generation' param = u'initialvalue' class SbmFUNDESC_ReportNumberGeneration_nblength: function = u'Report_Number_Generation' param = u'nblength' class SbmFUNDESC_ReportNumberGeneration_rnformat: function = u'Report_Number_Generation' param = u'rnformat' class SbmFUNDESC_ReportNumberGeneration_rnin: function = u'Report_Number_Generation' param = u'rnin' class SbmFUNDESC_ReportNumberGeneration_yeargen: function = u'Report_Number_Generation' param = u'yeargen' class SbmFUNDESC_SecondReportNumberGeneration_2ndcounterpath: function = u'Second_Report_Number_Generation' param = u'2nd_counterpath' class SbmFUNDESC_SecondReportNumberGeneration_2ndnblength: function = u'Second_Report_Number_Generation' param = u'2nd_nb_length' class SbmFUNDESC_SecondReportNumberGeneration_2ndrncategfile: function = u'Second_Report_Number_Generation' param = u'2nd_rncateg_file' class SbmFUNDESC_SecondReportNumberGeneration_2ndrnfile: function = u'Second_Report_Number_Generation' param = u'2nd_rn_file' class SbmFUNDESC_SecondReportNumberGeneration_2ndrnformat: function = u'Second_Report_Number_Generation' param = u'2nd_rn_format' class SbmFUNDESC_SecondReportNumberGeneration_2ndrnyeargen: function = u'Second_Report_Number_Generation' param = u'2nd_rn_yeargen' class SbmFUNDESC_SendAPPMail_addressesAPP: function = u'Send_APP_Mail' param = u'addressesAPP' class SbmFUNDESC_SendAPPMail_categformatAPP: function = u'Send_APP_Mail' param = u'categformatAPP' class SbmFUNDESC_SendAPPMail_commentsfile: function = u'Send_APP_Mail' param = u'comments_file' class SbmFUNDESC_SendAPPMail_decisionfile: function = u'Send_APP_Mail' param = u'decision_file' class SbmFUNDESC_SendAPPMail_edsrn: function = u'Send_APP_Mail' param = u'edsrn' class SbmFUNDESC_SendAPPMail_newrnin: function = u'Send_APP_Mail' param = u'newrnin' class SbmFUNDESC_SendApprovalRequest_addressesDAM: function = u'Send_Approval_Request' param = u'addressesDAM' class SbmFUNDESC_SendApprovalRequest_authorfile: function = u'Send_Approval_Request' param = u'authorfile' class SbmFUNDESC_SendApprovalRequest_categformatDAM: function = u'Send_Approval_Request' param = u'categformatDAM' class SbmFUNDESC_SendApprovalRequest_directory: function = u'Send_Approval_Request' param = u'directory' class SbmFUNDESC_SendApprovalRequest_titleFile: function = u'Send_Approval_Request' param = u'titleFile' class SbmFUNDESC_SendDeleteMail_edsrn: function = u'Send_Delete_Mail' param = u'edsrn' class SbmFUNDESC_SendDeleteMail_recordmanagers: function = u'Send_Delete_Mail' param = u'record_managers' class SbmFUNDESC_SendModifyMail_addressesMBI: function = u'Send_Modify_Mail' param = u'addressesMBI' class SbmFUNDESC_SendModifyMail_emailFile: function = u'Send_Modify_Mail' param = u'emailFile' class SbmFUNDESC_SendModifyMail_fieldnameMBI: function = u'Send_Modify_Mail' param = u'fieldnameMBI' class SbmFUNDESC_SendModifyMail_sourceDoc: function = u'Send_Modify_Mail' param = u'sourceDoc' class SbmFUNDESC_SendSRVMail_addressesSRV: function = u'Send_SRV_Mail' param = u'addressesSRV' class SbmFUNDESC_SendSRVMail_categformatDAM: function = u'Send_SRV_Mail' param = u'categformatDAM' class SbmFUNDESC_SendSRVMail_emailFile: function = u'Send_SRV_Mail' param = u'emailFile' class SbmFUNDESC_SendSRVMail_noteFile: function = u'Send_SRV_Mail' param = u'noteFile' class SbmFUNDESC_SetEmbargo_datefile: function = u'Set_Embargo' param = u'date_file' class SbmFUNDESC_SetEmbargo_dateformat: function = u'Set_Embargo' param = u'date_format' class SbmFUNDESC_SetRNFromSysno_edsrn: function = u'Set_RN_From_Sysno' param = u'edsrn' class SbmFUNDESC_SetRNFromSysno_recordsearchpattern: function = u'Set_RN_From_Sysno' param = u'record_search_pattern' class SbmFUNDESC_SetRNFromSysno_reptags: function = u'Set_RN_From_Sysno' param = u'rep_tags' class SbmFUNDESC_StampReplaceSingleFileApproval_filetobestamped: function = u'Stamp_Replace_Single_File_Approval' param = u'file_to_be_stamped' class SbmFUNDESC_StampReplaceSingleFileApproval_latextemplate: function = u'Stamp_Replace_Single_File_Approval' param = u'latex_template' class SbmFUNDESC_StampReplaceSingleFileApproval_latextemplatevars: function = u'Stamp_Replace_Single_File_Approval' param = u'latex_template_vars' class SbmFUNDESC_StampReplaceSingleFileApproval_layer: function = u'Stamp_Replace_Single_File_Approval' param = u'layer' class SbmFUNDESC_StampReplaceSingleFileApproval_newfilename: function = u'Stamp_Replace_Single_File_Approval' param = u'new_file_name' class SbmFUNDESC_StampReplaceSingleFileApproval_stamp: function = u'Stamp_Replace_Single_File_Approval' param = u'stamp' class SbmFUNDESC_StampReplaceSingleFileApproval_switchfile: function = u'Stamp_Replace_Single_File_Approval' param = u'switch_file' class SbmFUNDESC_StampUploadedFiles_filestobestamped: function = u'Stamp_Uploaded_Files' param = u'files_to_be_stamped' class SbmFUNDESC_StampUploadedFiles_latextemplate: function = u'Stamp_Uploaded_Files' param = u'latex_template' class SbmFUNDESC_StampUploadedFiles_latextemplatevars: function = u'Stamp_Uploaded_Files' param = u'latex_template_vars' class SbmFUNDESC_StampUploadedFiles_layer: function = u'Stamp_Uploaded_Files' param = u'layer' class SbmFUNDESC_StampUploadedFiles_stamp: function = u'Stamp_Uploaded_Files' param = u'stamp' class SbmFUNDESC_StampUploadedFiles_switchfile: function = u'Stamp_Uploaded_Files' param = u'switch_file' class SbmFUNDESC_UpdateApprovalDB_categformatDAM: function = u'Update_Approval_DB' param = u'categformatDAM' class SbmFUNDESC_UpdateApprovalDB_decisionfile: function = u'Update_Approval_DB' param = u'decision_file' class SbmFUNDESC_UserisRecordOwnerorCurator_curatorflag: function = u'User_is_Record_Owner_or_Curator' param = u'curator_flag' class SbmFUNDESC_UserisRecordOwnerorCurator_curatorrole: function = u'User_is_Record_Owner_or_Curator' param = u'curator_role' class SbmFUNDESC_VideoProcessing_aspect: function = u'Video_Processing' param = u'aspect' class SbmFUNDESC_VideoProcessing_batchtemplate: function = u'Video_Processing' param = u'batch_template' class SbmFUNDESC_VideoProcessing_title: function = u'Video_Processing' param = u'title' class SbmFUNDESC_WithdrawApprovalRequest_categfilewithd: function = u'Withdraw_Approval_Request' param = u'categ_file_withd' class SbmFUNDESC_WithdrawApprovalRequest_categrnseekwithd: function = u'Withdraw_Approval_Request' param = u'categ_rnseek_withd' class SbmFUNDESC_Run_PlotExtractor_with_docname: function = u'Run_PlotExtractor' param = u'with_docname' class SbmFUNDESC_Run_PlotExtractor_with_doctype: function = u'Run_PlotExtractor' param = u'with_doctype' class SbmFUNDESC_Run_PlotExtractor_with_docformat: function = u'Run_PlotExtractor' param = u'with_docformat' class SbmFUNDESC_Run_PlotExtractor_extract_plots_switch_file: function = u'Run_PlotExtractor' param = u'extract_plots_switch_file' class SbmGFILERESULTData(DataSet): class SbmGFILERESULT_CompressedPostScript_gzipcompresseddata: RESULT = u'gzip compressed data' FORMAT = u'Compressed PostScript' class SbmGFILERESULT_GIF_GIF: RESULT = u'GIF' FORMAT = u'GIF' class SbmGFILERESULT_HTML_HTMLdocument: RESULT = u'HTML document' FORMAT = u'HTML' class SbmGFILERESULT_JPEG_JPEGimage: RESULT = u'JPEG image' FORMAT = u'JPEG' class SbmGFILERESULT_PDF_PDFdocument: RESULT = u'PDF document' FORMAT = u'PDF' class SbmGFILERESULT_PostScript_HPPrinterJobLanguagedata: RESULT = u'HP Printer Job Language data' FORMAT = u'PostScript' class SbmGFILERESULT_PostScript_PostScriptdocument: RESULT = u'PostScript document' FORMAT = u'PostScript' class SbmGFILERESULT_PostScript_data: RESULT = u'data ' FORMAT = u'PostScript' class SbmGFILERESULT_TarredTextar_tararchive: RESULT = u'tar archive' FORMAT = u'Tarred Tex (.tar)' class SbmGFILERESULT_WORD_data: RESULT = u'data' FORMAT = u'WORD' class SbmGFILERESULT_jpg_JPEGimage: RESULT = u'JPEG image' FORMAT = u'jpg'
gpl-2.0
gregcowell/BAM
btt/auth/views.py
2
13985
"""Module that handles auth views.""" from flask import ( render_template, url_for, request, redirect, session, flash, Blueprint, current_app) from flask_login import login_required, login_user, logout_user, current_user from werkzeug.urls import url_parse from datetime import datetime from sqlalchemy.orm.exc import NoResultFound from sqlalchemy.exc import IntegrityError from ..database import User, Group, MemberShip from .forms import ( LoginForm, RegistrationForm, ChangeEmailForm, ChangePasswordForm, PasswordResetForm, PasswordResetRequestForm, DeleteUserForm, ChangeGroupForm, ModifyGroupNameForm, DeleteGroupMemberForm, AddGroupMemberForm) from ..database import db from ..email import send_email auth = Blueprint('auth', __name__) @auth.before_app_request def before_request(): """Check user is confirmed before every request.""" if current_user.is_authenticated: # current_user.ping() if not current_user.confirmed \ and request.endpoint \ and request.blueprint != 'auth' \ and request.endpoint != 'static': return redirect(url_for('auth.unconfirmed')) @auth.route('/unconfirmed') def unconfirmed(): """User is unconfirmed.""" if current_user.is_anonymous or current_user.confirmed: return redirect(url_for('web.home_page')) return render_template('auth/unconfirmed.html') @auth.route('/confirm') @login_required def resend_confirmation(): """Resend account confirmation.""" token = current_user.generate_confirmation_token() send_email(current_user.email, 'Confirm Your Account', 'auth/mail/confirm', user=current_user, token=token) flash('A new confirmation email has been sent to you by email.') return redirect(url_for('web.home_page')) @auth.route('/login', methods=['GET', 'POST']) def login(): """Login and return home page.""" if current_user.is_authenticated: return redirect(url_for('web.home_page')) form = LoginForm() app = current_app._get_current_object() if form.validate_on_submit(): user = User.query.filter_by(email=form.email.data).first() if user is not None and user.verify_password(form.password.data): login_user(user, form.remember_me.data) app.logger.info('%s logged in successfully', user.email) session['login_time'] = datetime.utcnow() # Need this because login_user/remember_me is ignored # when using server side sessions session.permanent = form.remember_me.data next_page = request.args.get('next') if not next_page or url_parse(next_page).netloc != '': next_page = url_for('web.home_page') return redirect(next_page) app.logger.info('%s failed to log in', form.email.data) flash('Invalid email or password.') return render_template('auth/login.html', form=form) @auth.route('/logout') def logout(): """Log out and return login form.""" app = current_app._get_current_object() app.logger.info('%s logged out', current_user.email) logout_user() flash('You have been logged out.') return redirect(url_for('auth.login')) @auth.route('/register', methods=['GET', 'POST']) def register(): """User registration form.""" if current_user.is_authenticated: return redirect(url_for('web.home_page')) form = RegistrationForm() if form.validate_on_submit(): user = User( email=form.email.data, password=form.password.data) group = Group(name='Group:' + form.email.data) group.add_categories_accounts() membership = MemberShip(user=user, group=group, active=True) db.session.add(user) db.session.add(group) db.session.add(membership) db.session.commit() token = user.generate_confirmation_token() send_email( user.email, 'Confirm Your Account', 'auth/mail/confirm', user=user, token=token) flash('A confirmation email has been sent to you by email.') return redirect(url_for('auth.login')) return render_template('auth/register.html', form=form) @auth.route('/confirm/<token>') @login_required def confirm(token): """Confirm account.""" if current_user.confirmed: return redirect(url_for('web.home_page')) if current_user.confirm(token): db.session.commit() flash('You have confirmed your account. Thanks!') else: flash('The confirmation link is invalid or has expired.') return redirect(url_for('web.home_page')) @auth.route('/change-password', methods=['GET', 'POST']) @login_required def change_password(): """Change password.""" form = ChangePasswordForm() if form.validate_on_submit(): if current_user.verify_password(form.old_password.data): current_user.password = form.password.data db.session.add(current_user) db.session.commit() flash('Your password has been updated.') return redirect(url_for('web.home_page')) else: flash('Invalid password.') return render_template("auth/change_password.html", form=form) @auth.route('/reset', methods=['GET', 'POST']) def password_reset_request(): """Password reset request.""" if not current_user.is_anonymous: return redirect(url_for('web.home_page')) form = PasswordResetRequestForm() if form.validate_on_submit(): user = User.query.filter_by(email=form.email.data).first() if user: token = user.generate_reset_token() send_email(user.email, 'Reset Your Password', 'auth/mail/reset_password', user=user, token=token, next=request.args.get('next')) flash('An email with instructions to reset your password has been ' 'sent to you.') return redirect(url_for('auth.login')) return render_template('auth/reset_password.html', form=form) @auth.route('/reset/<token>', methods=['GET', 'POST']) def password_reset(token): """Password reset.""" if not current_user.is_anonymous: return redirect(url_for('web.home_page')) form = PasswordResetForm() if form.validate_on_submit(): if User.reset_password(token, form.password.data): db.session.commit() flash('Your password has been updated.') return redirect(url_for('auth.login')) else: return redirect(url_for('web.home_page')) return render_template('auth/reset_password.html', form=form) @auth.route('/change_email', methods=['GET', 'POST']) @login_required def change_email_request(): """Change email request.""" form = ChangeEmailForm() if form.validate_on_submit(): if current_user.verify_password(form.password.data): new_email = form.email.data token = current_user.generate_email_change_token(new_email) send_email(new_email, 'Confirm your email address', 'auth/mail/change_email', user=current_user, token=token) flash('An email with instructions to confirm your new email ' 'address has been sent to you.') return redirect(url_for('web.home_page')) else: flash('Invalid email or password.') return render_template("auth/change_email.html", form=form) @auth.route('/change_email/<token>') @login_required def change_email(token): """Change email.""" if current_user.change_email(token): db.session.commit() flash('Your email address has been updated.') else: flash('Invalid request.') return redirect(url_for('web.home_page')) @auth.route('/delete_user', methods=['GET', 'POST']) @login_required def delete_user(): """Delete user and data.""" form = DeleteUserForm() if form.validate_on_submit(): if form.yes.data: for membership in current_user.memberships: group = membership.group if len(group.memberships) == 1: # Last member of group db.session.delete(group) db.session.delete(current_user) db.session.commit() elif form.no.data: pass return redirect(url_for('web.home_page')) return render_template('auth/delete_user.html', form=form, menu="home") @auth.route('/change_group', methods=['GET', 'POST']) @login_required def change_group(): """Change active group.""" form = ChangeGroupForm() memberships = current_user.memberships form.groups.choices = [] for member in memberships: form.groups.choices.append((str(member.group.group_id), '')) if member.active: active_member = member form.groups.default = str(active_member.group.group_id) print(form.groups.default) if form.validate_on_submit(): if form.submit.data: new_active_group_id = int(form.groups.data) active_member.active = False for member in memberships: if member.group.group_id == new_active_group_id: member.active = True db.session.add(member) db.session.commit() elif form.cancel.data: pass return redirect(url_for('web.home_page')) form.process() # Do this after validate_on_submit or breaks CSRF token return render_template( 'auth/change_group.html', memberships=memberships, form=form, menu="myaccount") @auth.route('/modify_group_name/<int:group_id>/', methods=['GET', 'POST']) @login_required def modify_group_name(group_id): """Modify group name.""" form = ModifyGroupNameForm() try: group = ( db.session.query(Group) .filter(Group.group_id == group_id) .filter(MemberShip.group_id == Group.group_id) .filter(MemberShip.id == current_user.id) .one()) except NoResultFound: flash('Invalid group.') return redirect(url_for('auth.change_group')) form.group_name.default = group.name if form.validate_on_submit(): if form.modify.data: group.name = form.group_name.data db.session.add(group) db.session.commit() if form.cancel.data: pass return redirect(url_for('auth.change_group')) form.process() # Do this after validate_on_submit or breaks CSRF token return render_template( 'auth/modify_group_name.html', form=form, menu="myaccount") @auth.route('/delete_group_member/<int:group_id>/', methods=['GET', 'POST']) @login_required def delete_group_member(group_id): """Delete group member.""" form = DeleteGroupMemberForm() try: group = ( db.session.query(Group) .filter(Group.group_id == group_id) .filter(MemberShip.group_id == Group.group_id) .filter(MemberShip.id == current_user.id) .one()) members = ( MemberShip.query.filter(MemberShip.group_id == group.group_id) .all()) except NoResultFound: flash('Invalid group.') return redirect(url_for('auth.change_group')) form.del_email.choices = [] for member in members: if member.user != current_user: email = member.user.email form.del_email.choices.append((email, email)) if form.validate_on_submit(): if form.delete.data: for member in members: if member.user.email in form.del_email.data: other_membership = ( MemberShip.query .filter(MemberShip.id == member.user.id) .filter(MemberShip.group_id != group.group_id) .first()) other_membership.active = True db.session.delete(member) db.session.commit() if form.cancel.data: pass return redirect(url_for('auth.change_group')) form.process() # Do this after validate_on_submit or breaks CSRF token return render_template( 'auth/delete_group_member.html', form=form, menu="myaccount") @auth.route('/add_group_member/<int:group_id>/', methods=['GET', 'POST']) @login_required def add_group_member(group_id): """Add group member.""" form = AddGroupMemberForm() try: group = ( db.session.query(Group) .filter(Group.group_id == group_id) .filter(MemberShip.group_id == Group.group_id) .filter(MemberShip.id == current_user.id) .one()) except NoResultFound: flash('Invalid group.') return redirect(url_for('auth.change_group')) form.add_email.default = '[email protected]' if form.validate_on_submit(): if form.add.data: try: new_user = ( User.query.filter_by(email=form.add_email.data).one()) except NoResultFound: flash('Email does not belong to an existing user.') return redirect(url_for('auth.change_group')) try: new_member = MemberShip( user=new_user, group=group, active=False) db.session.add(new_member) db.session.commit() except IntegrityError: db.session.rollback() flash('Email is already a member.') return redirect(url_for('auth.change_group')) if form.cancel.data: pass return redirect(url_for('auth.change_group')) form.process() # Do this after validate_on_submit or breaks CSRF token return render_template( 'auth/add_group_member.html', form=form, menu="myaccount")
apache-2.0
emonty/deb-vhd-util
tools/xm-test/lib/XmTestLib/network_utils.py
42
1828
#!/usr/bin/python # Copyright (C) International Business Machines Corp., 2005 # Author: Murillo F. Bernardes <[email protected]> from XmTestLib import * def count_eth(console): try: run = console.runCmd("ifconfig -a | grep eth") except ConsoleError, e: FAIL(str(e)) return len(run['output'].splitlines()) def get_state(domain_name, number): s, o = traceCommand("xm network-list %s | awk '/^%d/ {print $5}'" % (domain_name, number)) print o if s != 0: FAIL("network-list failed") if o == "": return 0 else: return int(o) def network_attach(domain_name, console, bridge=None): eths_before = count_eth(console) if bridge: status, output = traceCommand("xm network-attach %s bridge=%s" % (domain_name, bridge)) else: status, output = traceCommand("xm network-attach %s" % domain_name) if status != 0: return -1, "xm network-attach returned invalid %i != 0" % status eths_after = count_eth(console) if (eths_after != (eths_before+1)): return -2, "Network device is not actually connected to domU" return 0, None def network_detach(domain_name, console, num=0): eths_before = count_eth(console) status, output = traceCommand("xm network-detach %s %d" % (domain_name, num)) if status != 0: return -1, "xm network-detach returned invalid %i != 0" % status for i in range(10): if get_state(domain_name, num) == 0: break time.sleep(1) else: FAIL("network-detach failed: device did not disappear") eths_after = count_eth(console) if eths_after != (eths_before-1): return -2, "Network device was not actually disconnected from domU" return 0, None
gpl-2.0
twisted/mantissa
xmantissa/test/test_offering.py
1
12563
""" Tests for xmantissa.offering. """ from zope.interface import Interface, implements from zope.interface.verify import verifyClass, verifyObject from twisted.trial import unittest from axiom.store import Store from axiom import item, attributes, userbase from axiom.plugins.mantissacmd import Mantissa from axiom.dependency import installedOn from xmantissa import ixmantissa, offering from xmantissa.web import SiteConfiguration from xmantissa.ampserver import AMPConfiguration from xmantissa.plugins.baseoff import baseOffering, ampOffering from xmantissa.plugins.offerings import peopleOffering class TestSiteRequirement(item.Item): typeName = 'test_site_requirement' schemaVersion = 1 attr = attributes.integer() class TestAppPowerup(item.Item): typeName = 'test_app_powerup' schemaVersion = 1 attr = attributes.integer() class ITestInterface(Interface): """ An interface to which no object can be adapted. Used to ensure failed adaption causes a powerup to be installed. """ class OfferingPluginTest(unittest.TestCase): """ A simple test for getOffering. """ def test_getOfferings(self): """ getOffering should use the Twisted plugin system to load the plugins provided with Mantissa. Since this is dynamic, we can't assert anything about the complete list, but we can at least verify that all the plugins that should be there, are. """ foundOfferings = list(offering.getOfferings()) allExpectedOfferings = [baseOffering, ampOffering, peopleOffering] for expected in allExpectedOfferings: self.assertIn(expected, foundOfferings) class OfferingTest(unittest.TestCase): def setUp(self): self.store = Store(filesdir=self.mktemp()) Mantissa().installSite(self.store, u"localhost", u"", False) Mantissa().installAdmin(self.store, u'admin', u'localhost', u'asdf') self.userbase = self.store.findUnique(userbase.LoginSystem) self.adminAccount = self.userbase.accountByAddress( u'admin', u'localhost') off = offering.Offering( name=u'test_offering', description=u'This is an offering which tests the offering ' 'installation mechanism', siteRequirements=[(ITestInterface, TestSiteRequirement)], appPowerups=[TestAppPowerup], installablePowerups=[], loginInterfaces=[], themes=[], ) self.offering = off # Add this somewhere that the plugin system is going to see it. self._originalGetOfferings = offering.getOfferings offering.getOfferings = self.fakeGetOfferings def fakeGetOfferings(self): """ Return standard list of offerings, plus one extra. """ return list(self._originalGetOfferings()) + [self.offering] def tearDown(self): """ Remove the temporary offering. """ offering.getOfferings = self._originalGetOfferings def test_installOffering(self): """ L{OfferingConfiguration.installOffering} should install the given offering on the Mantissa server. """ conf = self.adminAccount.avatars.open().findUnique( offering.OfferingConfiguration) io = conf.installOffering(self.offering, None) # InstalledOffering should be returned, and installed on the site store foundIO = self.store.findUnique(offering.InstalledOffering, offering.InstalledOffering.offeringName == self.offering.name) self.assertIdentical(io, foundIO) # Site store requirements should be on the site store tsr = self.store.findUnique(TestSiteRequirement) self.failUnless(installedOn(tsr), self.store) # App store should have been created appStore = self.userbase.accountByAddress(self.offering.name, None) self.assertNotEqual(appStore, None) # App store requirements should be on the app store ss = appStore.avatars.open() tap = ss.findUnique(TestAppPowerup) self.failUnless(installedOn(tap), ss) self.assertRaises(offering.OfferingAlreadyInstalled, conf.installOffering, self.offering, None) def test_getInstalledOfferingNames(self): """ L{getInstalledOfferingNames} should list the names of offerings installed on the given site store. """ self.assertEquals(offering.getInstalledOfferingNames(self.store), ['mantissa-base']) self.test_installOffering() installed = offering.getInstalledOfferingNames(self.store) installed.sort() expected = [u"mantissa-base", u"test_offering"] expected.sort() self.assertEquals(installed, expected) def test_getInstalledOfferings(self): """ getInstalledOfferings should return a mapping of offering name to L{Offering} object for each installed offering on a given site store. """ self.assertEquals(offering.getInstalledOfferings(self.store), {baseOffering.name: baseOffering}) self.test_installOffering() self.assertEquals(offering.getInstalledOfferings(self.store), {baseOffering.name: baseOffering, self.offering.name: self.offering}) def test_isAppStore(self): """ isAppStore returns True for stores with offerings installed on them, False otherwise. """ conf = self.adminAccount.avatars.open().findUnique( offering.OfferingConfiguration) conf.installOffering(self.offering, None) app = self.userbase.accountByAddress(self.offering.name, None) self.failUnless(offering.isAppStore(app.avatars.open())) self.failIf(offering.isAppStore(self.adminAccount.avatars.open())) class FakeOfferingTechnician(object): """ In-memory only implementation of the offering inspection/installation API. @ivar installedOfferings: A mapping from offering names to corresponding L{IOffering} providers which have been passed to C{installOffering}. """ implements(ixmantissa.IOfferingTechnician) def __init__(self): self.installedOfferings = {} def installOffering(self, offering): """ Add the given L{IOffering} provider to the list of installed offerings. """ self.installedOfferings[offering.name] = offering def getInstalledOfferings(self): """ Return a copy of the internal installed offerings mapping. """ return self.installedOfferings.copy() def getInstalledOfferingNames(self): """ Return the names from the internal installed offerings mapping. """ return self.installedOfferings.keys() class OfferingTechnicianTestMixin: """ L{unittest.TestCase} mixin which defines unit tests for classes which implement L{IOfferingTechnician}. @ivar offerings: A C{list} of L{Offering} instances which will be installed by the tests this mixin defines. """ offerings = [ offering.Offering(u'an offering', None, [], [], [], [], []), offering.Offering(u'another offering', None, [], [], [], [], [])] def createTechnician(self): """ @return: An L{IOfferingTechnician} provider which will be tested. """ raise NotImplementedError( "%r did not implement createTechnician" % (self.__class__,)) def test_interface(self): """ L{createTechnician} returns an instance of a type which declares that it implements L{IOfferingTechnician} and has all of the methods and attributes defined by the interface. """ technician = self.createTechnician() technicianType = type(technician) self.assertTrue( ixmantissa.IOfferingTechnician.implementedBy(technicianType)) self.assertTrue( verifyClass(ixmantissa.IOfferingTechnician, technicianType)) self.assertTrue( verifyObject(ixmantissa.IOfferingTechnician, technician)) def test_getInstalledOfferingNames(self): """ The L{ixmantissa.IOfferingTechnician.getInstalledOfferingNames} implementation returns a C{list} of C{unicode} strings, each element giving the name of an offering which has been installed. """ offer = self.createTechnician() self.assertEqual(offer.getInstalledOfferingNames(), []) expected = [] for dummyOffering in self.offerings: offer.installOffering(dummyOffering) expected.append(dummyOffering.name) expected.sort() installed = offer.getInstalledOfferingNames() installed.sort() self.assertEqual(installed, expected) def test_getInstalledOfferings(self): """ The L{ixmantissa.IOfferingTechnician.getInstalledOfferings} implementation returns a C{dict} mapping C{unicode} offering names to the corresponding L{IOffering} providers. """ offer = self.createTechnician() self.assertEqual(offer.getInstalledOfferings(), {}) expected = {} for dummyOffering in self.offerings: offer.installOffering(dummyOffering) expected[dummyOffering.name] = dummyOffering self.assertEqual(offer.getInstalledOfferings(), expected) class OfferingAdapterTests(unittest.TestCase, OfferingTechnicianTestMixin): """ Tests for L{offering.OfferingAdapter}. """ def setUp(self): """ Hook offering plugin discovery so that only the fake offerings the test wants exist. """ self.origGetOfferings = offering.getOfferings offering.getOfferings = self.getOfferings def tearDown(self): """ Restore the original L{getOfferings} function. """ offering.getOfferings = self.origGetOfferings def getOfferings(self): """ Return some dummy offerings, as defined by C{self.offerings}. """ return self.offerings def createTechnician(self): """ Create an L{offering.OfferingAdapter}. """ store = Store() technician = offering.OfferingAdapter(store) return technician class FakeOfferingTechnicianTests(unittest.TestCase, OfferingTechnicianTestMixin): """ Tests (ie, verification) for L{FakeOfferingTechnician}. """ def createTechnician(self): """ Create a L{FakeOfferingTechnician}. """ return FakeOfferingTechnician() class BaseOfferingTests(unittest.TestCase): """ Tests for the base Mantissa offering, L{xmantissa.plugins.baseoff.baseOffering}. """ def test_interface(self): """ C{baseOffering} provides L{IOffering}. """ self.assertTrue(verifyObject(ixmantissa.IOffering, baseOffering)) def test_staticContentPath(self): """ C{baseOffering.staticContentPath} gives the location of a directory which has I{mantissa.css} in it. """ self.assertTrue( baseOffering.staticContentPath.child('mantissa.css').exists()) def _siteRequirementTest(self, offering, cls): """ Verify that installing C{offering} results in an instance of the given Item subclass being installed as a powerup for IProtocolFactoryFactory. """ store = Store() ixmantissa.IOfferingTechnician(store).installOffering(offering) factories = list(store.powerupsFor(ixmantissa.IProtocolFactoryFactory)) for factory in factories: if isinstance(factory, cls): break else: self.fail("No instance of %r in %r" % (cls, factories)) def test_siteConfiguration(self): """ L{SiteConfiguration} powers up a store for L{IProtocolFactoryFactory} when L{baseOffering} is installed on that store. """ self._siteRequirementTest(baseOffering, SiteConfiguration) def test_ampConfiguration(self): """ L{AMPConfiguration} powers up a store for L{IProtocolFactoryFactory} when L{ampOffering} is installed on that store. """ self._siteRequirementTest(ampOffering, AMPConfiguration)
mit
Hikari-Kubota/sandbox-todo-ts-angular
node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py
1825
17014
# Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """GYP backend that generates Eclipse CDT settings files. This backend DOES NOT generate Eclipse CDT projects. Instead, it generates XML files that can be imported into an Eclipse CDT project. The XML file contains a list of include paths and symbols (i.e. defines). Because a full .cproject definition is not created by this generator, it's not possible to properly define the include dirs and symbols for each file individually. Instead, one set of includes/symbols is generated for the entire project. This works fairly well (and is a vast improvement in general), but may still result in a few indexer issues here and there. This generator has no automated tests, so expect it to be broken. """ from xml.sax.saxutils import escape import os.path import subprocess import gyp import gyp.common import gyp.msvs_emulation import shlex import xml.etree.cElementTree as ET generator_wants_static_library_dependencies_adjusted = False generator_default_variables = { } for dirname in ['INTERMEDIATE_DIR', 'PRODUCT_DIR', 'LIB_DIR', 'SHARED_LIB_DIR']: # Some gyp steps fail if these are empty(!), so we convert them to variables generator_default_variables[dirname] = '$' + dirname for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME', 'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT', 'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX', 'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX', 'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX', 'CONFIGURATION_NAME']: generator_default_variables[unused] = '' # Include dirs will occasionally use the SHARED_INTERMEDIATE_DIR variable as # part of the path when dealing with generated headers. This value will be # replaced dynamically for each configuration. generator_default_variables['SHARED_INTERMEDIATE_DIR'] = \ '$SHARED_INTERMEDIATE_DIR' def CalculateVariables(default_variables, params): generator_flags = params.get('generator_flags', {}) for key, val in generator_flags.items(): default_variables.setdefault(key, val) flavor = gyp.common.GetFlavor(params) default_variables.setdefault('OS', flavor) if flavor == 'win': # Copy additional generator configuration data from VS, which is shared # by the Eclipse generator. import gyp.generator.msvs as msvs_generator generator_additional_non_configuration_keys = getattr(msvs_generator, 'generator_additional_non_configuration_keys', []) generator_additional_path_sections = getattr(msvs_generator, 'generator_additional_path_sections', []) gyp.msvs_emulation.CalculateCommonVariables(default_variables, params) def CalculateGeneratorInputInfo(params): """Calculate the generator specific info that gets fed to input (called by gyp).""" generator_flags = params.get('generator_flags', {}) if generator_flags.get('adjust_static_libraries', False): global generator_wants_static_library_dependencies_adjusted generator_wants_static_library_dependencies_adjusted = True def GetAllIncludeDirectories(target_list, target_dicts, shared_intermediate_dirs, config_name, params, compiler_path): """Calculate the set of include directories to be used. Returns: A list including all the include_dir's specified for every target followed by any include directories that were added as cflag compiler options. """ gyp_includes_set = set() compiler_includes_list = [] # Find compiler's default include dirs. if compiler_path: command = shlex.split(compiler_path) command.extend(['-E', '-xc++', '-v', '-']) proc = subprocess.Popen(args=command, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) output = proc.communicate()[1] # Extract the list of include dirs from the output, which has this format: # ... # #include "..." search starts here: # #include <...> search starts here: # /usr/include/c++/4.6 # /usr/local/include # End of search list. # ... in_include_list = False for line in output.splitlines(): if line.startswith('#include'): in_include_list = True continue if line.startswith('End of search list.'): break if in_include_list: include_dir = line.strip() if include_dir not in compiler_includes_list: compiler_includes_list.append(include_dir) flavor = gyp.common.GetFlavor(params) if flavor == 'win': generator_flags = params.get('generator_flags', {}) for target_name in target_list: target = target_dicts[target_name] if config_name in target['configurations']: config = target['configurations'][config_name] # Look for any include dirs that were explicitly added via cflags. This # may be done in gyp files to force certain includes to come at the end. # TODO(jgreenwald): Change the gyp files to not abuse cflags for this, and # remove this. if flavor == 'win': msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags) cflags = msvs_settings.GetCflags(config_name) else: cflags = config['cflags'] for cflag in cflags: if cflag.startswith('-I'): include_dir = cflag[2:] if include_dir not in compiler_includes_list: compiler_includes_list.append(include_dir) # Find standard gyp include dirs. if config.has_key('include_dirs'): include_dirs = config['include_dirs'] for shared_intermediate_dir in shared_intermediate_dirs: for include_dir in include_dirs: include_dir = include_dir.replace('$SHARED_INTERMEDIATE_DIR', shared_intermediate_dir) if not os.path.isabs(include_dir): base_dir = os.path.dirname(target_name) include_dir = base_dir + '/' + include_dir include_dir = os.path.abspath(include_dir) gyp_includes_set.add(include_dir) # Generate a list that has all the include dirs. all_includes_list = list(gyp_includes_set) all_includes_list.sort() for compiler_include in compiler_includes_list: if not compiler_include in gyp_includes_set: all_includes_list.append(compiler_include) # All done. return all_includes_list def GetCompilerPath(target_list, data, options): """Determine a command that can be used to invoke the compiler. Returns: If this is a gyp project that has explicit make settings, try to determine the compiler from that. Otherwise, see if a compiler was specified via the CC_target environment variable. """ # First, see if the compiler is configured in make's settings. build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0]) make_global_settings_dict = data[build_file].get('make_global_settings', {}) for key, value in make_global_settings_dict: if key in ['CC', 'CXX']: return os.path.join(options.toplevel_dir, value) # Check to see if the compiler was specified as an environment variable. for key in ['CC_target', 'CC', 'CXX']: compiler = os.environ.get(key) if compiler: return compiler return 'gcc' def GetAllDefines(target_list, target_dicts, data, config_name, params, compiler_path): """Calculate the defines for a project. Returns: A dict that includes explict defines declared in gyp files along with all of the default defines that the compiler uses. """ # Get defines declared in the gyp files. all_defines = {} flavor = gyp.common.GetFlavor(params) if flavor == 'win': generator_flags = params.get('generator_flags', {}) for target_name in target_list: target = target_dicts[target_name] if flavor == 'win': msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags) extra_defines = msvs_settings.GetComputedDefines(config_name) else: extra_defines = [] if config_name in target['configurations']: config = target['configurations'][config_name] target_defines = config['defines'] else: target_defines = [] for define in target_defines + extra_defines: split_define = define.split('=', 1) if len(split_define) == 1: split_define.append('1') if split_define[0].strip() in all_defines: # Already defined continue all_defines[split_define[0].strip()] = split_define[1].strip() # Get default compiler defines (if possible). if flavor == 'win': return all_defines # Default defines already processed in the loop above. if compiler_path: command = shlex.split(compiler_path) command.extend(['-E', '-dM', '-']) cpp_proc = subprocess.Popen(args=command, cwd='.', stdin=subprocess.PIPE, stdout=subprocess.PIPE) cpp_output = cpp_proc.communicate()[0] cpp_lines = cpp_output.split('\n') for cpp_line in cpp_lines: if not cpp_line.strip(): continue cpp_line_parts = cpp_line.split(' ', 2) key = cpp_line_parts[1] if len(cpp_line_parts) >= 3: val = cpp_line_parts[2] else: val = '1' all_defines[key] = val return all_defines def WriteIncludePaths(out, eclipse_langs, include_dirs): """Write the includes section of a CDT settings export file.""" out.write(' <section name="org.eclipse.cdt.internal.ui.wizards.' \ 'settingswizards.IncludePaths">\n') out.write(' <language name="holder for library settings"></language>\n') for lang in eclipse_langs: out.write(' <language name="%s">\n' % lang) for include_dir in include_dirs: out.write(' <includepath workspace_path="false">%s</includepath>\n' % include_dir) out.write(' </language>\n') out.write(' </section>\n') def WriteMacros(out, eclipse_langs, defines): """Write the macros section of a CDT settings export file.""" out.write(' <section name="org.eclipse.cdt.internal.ui.wizards.' \ 'settingswizards.Macros">\n') out.write(' <language name="holder for library settings"></language>\n') for lang in eclipse_langs: out.write(' <language name="%s">\n' % lang) for key in sorted(defines.iterkeys()): out.write(' <macro><name>%s</name><value>%s</value></macro>\n' % (escape(key), escape(defines[key]))) out.write(' </language>\n') out.write(' </section>\n') def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name): options = params['options'] generator_flags = params.get('generator_flags', {}) # build_dir: relative path from source root to our output files. # e.g. "out/Debug" build_dir = os.path.join(generator_flags.get('output_dir', 'out'), config_name) toplevel_build = os.path.join(options.toplevel_dir, build_dir) # Ninja uses out/Debug/gen while make uses out/Debug/obj/gen as the # SHARED_INTERMEDIATE_DIR. Include both possible locations. shared_intermediate_dirs = [os.path.join(toplevel_build, 'obj', 'gen'), os.path.join(toplevel_build, 'gen')] GenerateCdtSettingsFile(target_list, target_dicts, data, params, config_name, os.path.join(toplevel_build, 'eclipse-cdt-settings.xml'), options, shared_intermediate_dirs) GenerateClasspathFile(target_list, target_dicts, options.toplevel_dir, toplevel_build, os.path.join(toplevel_build, 'eclipse-classpath.xml')) def GenerateCdtSettingsFile(target_list, target_dicts, data, params, config_name, out_name, options, shared_intermediate_dirs): gyp.common.EnsureDirExists(out_name) with open(out_name, 'w') as out: out.write('<?xml version="1.0" encoding="UTF-8"?>\n') out.write('<cdtprojectproperties>\n') eclipse_langs = ['C++ Source File', 'C Source File', 'Assembly Source File', 'GNU C++', 'GNU C', 'Assembly'] compiler_path = GetCompilerPath(target_list, data, options) include_dirs = GetAllIncludeDirectories(target_list, target_dicts, shared_intermediate_dirs, config_name, params, compiler_path) WriteIncludePaths(out, eclipse_langs, include_dirs) defines = GetAllDefines(target_list, target_dicts, data, config_name, params, compiler_path) WriteMacros(out, eclipse_langs, defines) out.write('</cdtprojectproperties>\n') def GenerateClasspathFile(target_list, target_dicts, toplevel_dir, toplevel_build, out_name): '''Generates a classpath file suitable for symbol navigation and code completion of Java code (such as in Android projects) by finding all .java and .jar files used as action inputs.''' gyp.common.EnsureDirExists(out_name) result = ET.Element('classpath') def AddElements(kind, paths): # First, we need to normalize the paths so they are all relative to the # toplevel dir. rel_paths = set() for path in paths: if os.path.isabs(path): rel_paths.add(os.path.relpath(path, toplevel_dir)) else: rel_paths.add(path) for path in sorted(rel_paths): entry_element = ET.SubElement(result, 'classpathentry') entry_element.set('kind', kind) entry_element.set('path', path) AddElements('lib', GetJavaJars(target_list, target_dicts, toplevel_dir)) AddElements('src', GetJavaSourceDirs(target_list, target_dicts, toplevel_dir)) # Include the standard JRE container and a dummy out folder AddElements('con', ['org.eclipse.jdt.launching.JRE_CONTAINER']) # Include a dummy out folder so that Eclipse doesn't use the default /bin # folder in the root of the project. AddElements('output', [os.path.join(toplevel_build, '.eclipse-java-build')]) ET.ElementTree(result).write(out_name) def GetJavaJars(target_list, target_dicts, toplevel_dir): '''Generates a sequence of all .jars used as inputs.''' for target_name in target_list: target = target_dicts[target_name] for action in target.get('actions', []): for input_ in action['inputs']: if os.path.splitext(input_)[1] == '.jar' and not input_.startswith('$'): if os.path.isabs(input_): yield input_ else: yield os.path.join(os.path.dirname(target_name), input_) def GetJavaSourceDirs(target_list, target_dicts, toplevel_dir): '''Generates a sequence of all likely java package root directories.''' for target_name in target_list: target = target_dicts[target_name] for action in target.get('actions', []): for input_ in action['inputs']: if (os.path.splitext(input_)[1] == '.java' and not input_.startswith('$')): dir_ = os.path.dirname(os.path.join(os.path.dirname(target_name), input_)) # If there is a parent 'src' or 'java' folder, navigate up to it - # these are canonical package root names in Chromium. This will # break if 'src' or 'java' exists in the package structure. This # could be further improved by inspecting the java file for the # package name if this proves to be too fragile in practice. parent_search = dir_ while os.path.basename(parent_search) not in ['src', 'java']: parent_search, _ = os.path.split(parent_search) if not parent_search or parent_search == toplevel_dir: # Didn't find a known root, just return the original path yield dir_ break else: yield parent_search def GenerateOutput(target_list, target_dicts, data, params): """Generate an XML settings file that can be imported into a CDT project.""" if params['options'].generator_output: raise NotImplementedError("--generator_output not implemented for eclipse") user_config = params.get('generator_flags', {}).get('config', None) if user_config: GenerateOutputForConfig(target_list, target_dicts, data, params, user_config) else: config_names = target_dicts[target_list[0]]['configurations'].keys() for config_name in config_names: GenerateOutputForConfig(target_list, target_dicts, data, params, config_name)
mit
danieljaouen/ansible
lib/ansible/modules/cloud/vmware/vmware_vmkernel_ip_config.py
41
3638
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright: (c) 2015, Joseph Callen <jcallen () csc.com> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: vmware_vmkernel_ip_config short_description: Configure the VMkernel IP Address description: - Configure the VMkernel IP Address version_added: 2.0 author: - Joseph Callen (@jcpowermac) - Russell Teague (@mtnbikenc) notes: - Tested on vSphere 5.5 requirements: - "python >= 2.6" - PyVmomi options: vmk_name: description: - VMkernel interface name required: True ip_address: description: - IP address to assign to VMkernel interface required: True subnet_mask: description: - Subnet Mask to assign to VMkernel interface required: True extends_documentation_fragment: vmware.documentation ''' EXAMPLES = ''' # Example command from Ansible Playbook - name: Configure IP address on ESX host vmware_vmkernel_ip_config: hostname: '{{ esxi_hostname }}' username: '{{ esxi_username }}' password: '{{ esxi_password }}' vmk_name: vmk0 ip_address: 10.0.0.10 subnet_mask: 255.255.255.0 delegate_to: localhost ''' try: from pyVmomi import vim, vmodl HAS_PYVMOMI = True except ImportError: HAS_PYVMOMI = False from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.vmware import HAS_PYVMOMI, connect_to_api, get_all_objs, vmware_argument_spec def configure_vmkernel_ip_address(host_system, vmk_name, ip_address, subnet_mask): host_config_manager = host_system.configManager host_network_system = host_config_manager.networkSystem for vnic in host_network_system.networkConfig.vnic: if vnic.device == vmk_name: spec = vnic.spec if spec.ip.ipAddress != ip_address: spec.ip.dhcp = False spec.ip.ipAddress = ip_address spec.ip.subnetMask = subnet_mask host_network_system.UpdateVirtualNic(vmk_name, spec) return True return False def main(): argument_spec = vmware_argument_spec() argument_spec.update(dict(vmk_name=dict(required=True, type='str'), ip_address=dict(required=True, type='str'), subnet_mask=dict(required=True, type='str'))) module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False) if not HAS_PYVMOMI: module.fail_json(msg='pyvmomi is required for this module') vmk_name = module.params['vmk_name'] ip_address = module.params['ip_address'] subnet_mask = module.params['subnet_mask'] try: content = connect_to_api(module, False) host = get_all_objs(content, [vim.HostSystem]) if not host: module.fail_json(msg="Unable to locate Physical Host.") host_system = host.keys()[0] changed = configure_vmkernel_ip_address(host_system, vmk_name, ip_address, subnet_mask) module.exit_json(changed=changed) except vmodl.RuntimeFault as runtime_fault: module.fail_json(msg=runtime_fault.msg) except vmodl.MethodFault as method_fault: module.fail_json(msg=method_fault.msg) except Exception as e: module.fail_json(msg=str(e)) if __name__ == '__main__': main()
gpl-3.0
iver333/phantomjs
src/qt/qtwebkit/Tools/Scripts/webkitpy/common/net/bugzilla/bugzilla.py
117
39195
# Copyright (c) 2011 Google Inc. All rights reserved. # Copyright (c) 2009 Apple Inc. All rights reserved. # Copyright (c) 2010 Research In Motion Limited. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # WebKit's Python module for interacting with Bugzilla import logging import mimetypes import re import StringIO import socket import urllib from datetime import datetime # used in timestamp() from .attachment import Attachment from .bug import Bug from webkitpy.common.config import committers import webkitpy.common.config.urls as config_urls from webkitpy.common.net.credentials import Credentials from webkitpy.common.system.user import User from webkitpy.thirdparty.BeautifulSoup import BeautifulSoup, BeautifulStoneSoup, SoupStrainer _log = logging.getLogger(__name__) class EditUsersParser(object): def __init__(self): self._group_name_to_group_string_cache = {} def _login_and_uid_from_row(self, row): first_cell = row.find("td") # The first row is just headers, we skip it. if not first_cell: return None # When there were no results, we have a fake "<none>" entry in the table. if first_cell.find(text="<none>"): return None # Otherwise the <td> contains a single <a> which contains the login name or a single <i> with the string "<none>". anchor_tag = first_cell.find("a") login = unicode(anchor_tag.string).strip() user_id = int(re.search(r"userid=(\d+)", str(anchor_tag['href'])).group(1)) return (login, user_id) def login_userid_pairs_from_edit_user_results(self, results_page): soup = BeautifulSoup(results_page, convertEntities=BeautifulStoneSoup.HTML_ENTITIES) results_table = soup.find(id="admin_table") login_userid_pairs = [self._login_and_uid_from_row(row) for row in results_table('tr')] # Filter out None from the logins. return filter(lambda pair: bool(pair), login_userid_pairs) def _group_name_and_string_from_row(self, row): label_element = row.find('label') group_string = unicode(label_element['for']) group_name = unicode(label_element.find('strong').string).rstrip(':') return (group_name, group_string) def user_dict_from_edit_user_page(self, page): soup = BeautifulSoup(page, convertEntities=BeautifulStoneSoup.HTML_ENTITIES) user_table = soup.find("table", {'class': 'main'}) user_dict = {} for row in user_table('tr'): label_element = row.find('label') if not label_element: continue # This must not be a row we know how to parse. if row.find('table'): continue # Skip the <tr> holding the groups table. key = label_element['for'] if "group" in key: key = "groups" value = user_dict.get('groups', set()) # We must be parsing a "tr" inside the inner group table. (group_name, _) = self._group_name_and_string_from_row(row) if row.find('input', {'type': 'checkbox', 'checked': 'checked'}): value.add(group_name) else: value = unicode(row.find('td').string).strip() user_dict[key] = value return user_dict def _group_rows_from_edit_user_page(self, edit_user_page): soup = BeautifulSoup(edit_user_page, convertEntities=BeautifulSoup.HTML_ENTITIES) return soup('td', {'class': 'groupname'}) def group_string_from_name(self, edit_user_page, group_name): # Bugzilla uses "group_NUMBER" strings, which may be different per install # so we just look them up once and cache them. if not self._group_name_to_group_string_cache: rows = self._group_rows_from_edit_user_page(edit_user_page) name_string_pairs = map(self._group_name_and_string_from_row, rows) self._group_name_to_group_string_cache = dict(name_string_pairs) return self._group_name_to_group_string_cache[group_name] def timestamp(): return datetime.now().strftime("%Y%m%d%H%M%S") # A container for all of the logic for making and parsing bugzilla queries. class BugzillaQueries(object): def __init__(self, bugzilla): self._bugzilla = bugzilla def _is_xml_bugs_form(self, form): # ClientForm.HTMLForm.find_control throws if the control is not found, # so we do a manual search instead: return "xml" in [control.id for control in form.controls] # This is kinda a hack. There is probably a better way to get this information from bugzilla. def _parse_result_count(self, results_page): result_count_text = BeautifulSoup(results_page).find(attrs={'class': 'bz_result_count'}).string result_count_parts = result_count_text.strip().split(" ") if result_count_parts[0] == "Zarro": return 0 if result_count_parts[0] == "One": return 1 return int(result_count_parts[0]) # Note: _load_query, _fetch_bug and _fetch_bugs_from_advanced_query # are the only methods which access self._bugzilla. def _load_query(self, query): self._bugzilla.authenticate() full_url = "%s%s" % (config_urls.bug_server_url, query) return self._bugzilla.browser.open(full_url) def _fetch_bugs_from_advanced_query(self, query): results_page = self._load_query(query) # Some simple searches can return a single result. results_url = results_page.geturl() if results_url.find("/show_bug.cgi?id=") != -1: bug_id = int(results_url.split("=")[-1]) return [self._fetch_bug(bug_id)] if not self._parse_result_count(results_page): return [] # Bugzilla results pages have an "XML" submit button at the bottom # which can be used to get an XML page containing all of the <bug> elements. # This is slighty lame that this assumes that _load_query used # self._bugzilla.browser and that it's in an acceptable state. self._bugzilla.browser.select_form(predicate=self._is_xml_bugs_form) bugs_xml = self._bugzilla.browser.submit() return self._bugzilla._parse_bugs_from_xml(bugs_xml) def _fetch_bug(self, bug_id): return self._bugzilla.fetch_bug(bug_id) def _fetch_bug_ids_advanced_query(self, query): soup = BeautifulSoup(self._load_query(query)) # The contents of the <a> inside the cells in the first column happen # to be the bug id. return [int(bug_link_cell.find("a").string) for bug_link_cell in soup('td', "first-child")] def _parse_attachment_ids_request_query(self, page): digits = re.compile("\d+") attachment_href = re.compile("attachment.cgi\?id=\d+&action=review") attachment_links = SoupStrainer("a", href=attachment_href) return [int(digits.search(tag["href"]).group(0)) for tag in BeautifulSoup(page, parseOnlyThese=attachment_links)] def _fetch_attachment_ids_request_query(self, query): return self._parse_attachment_ids_request_query(self._load_query(query)) def _parse_quips(self, page): soup = BeautifulSoup(page, convertEntities=BeautifulSoup.HTML_ENTITIES) quips = soup.find(text=re.compile(r"Existing quips:")).findNext("ul").findAll("li") return [unicode(quip_entry.string) for quip_entry in quips] def fetch_quips(self): return self._parse_quips(self._load_query("/quips.cgi?action=show")) # List of all r+'d bugs. def fetch_bug_ids_from_pending_commit_list(self): needs_commit_query_url = "buglist.cgi?query_format=advanced&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&field0-0-0=flagtypes.name&type0-0-0=equals&value0-0-0=review%2B" return self._fetch_bug_ids_advanced_query(needs_commit_query_url) def fetch_bugs_matching_quicksearch(self, search_string): # We may want to use a more explicit query than "quicksearch". # If quicksearch changes we should probably change to use # a normal buglist.cgi?query_format=advanced query. quicksearch_url = "buglist.cgi?quicksearch=%s" % urllib.quote(search_string) return self._fetch_bugs_from_advanced_query(quicksearch_url) # Currently this returns all bugs across all components. # In the future we may wish to extend this API to construct more restricted searches. def fetch_bugs_matching_search(self, search_string): query = "buglist.cgi?query_format=advanced" if search_string: query += "&short_desc_type=allwordssubstr&short_desc=%s" % urllib.quote(search_string) return self._fetch_bugs_from_advanced_query(query) def fetch_patches_from_pending_commit_list(self): return sum([self._fetch_bug(bug_id).reviewed_patches() for bug_id in self.fetch_bug_ids_from_pending_commit_list()], []) def fetch_bugs_from_review_queue(self, cc_email=None): query = "buglist.cgi?query_format=advanced&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&field0-0-0=flagtypes.name&type0-0-0=equals&value0-0-0=review?" if cc_email: query += "&emailcc1=1&emailtype1=substring&email1=%s" % urllib.quote(cc_email) return self._fetch_bugs_from_advanced_query(query) def fetch_bug_ids_from_commit_queue(self): commit_queue_url = "buglist.cgi?query_format=advanced&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&field0-0-0=flagtypes.name&type0-0-0=equals&value0-0-0=commit-queue%2B&order=Last+Changed" return self._fetch_bug_ids_advanced_query(commit_queue_url) def fetch_patches_from_commit_queue(self): # This function will only return patches which have valid committers # set. It won't reject patches with invalid committers/reviewers. return sum([self._fetch_bug(bug_id).commit_queued_patches() for bug_id in self.fetch_bug_ids_from_commit_queue()], []) def fetch_bug_ids_from_review_queue(self): review_queue_url = "buglist.cgi?query_format=advanced&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&field0-0-0=flagtypes.name&type0-0-0=equals&value0-0-0=review?" return self._fetch_bug_ids_advanced_query(review_queue_url) # This method will make several requests to bugzilla. def fetch_patches_from_review_queue(self, limit=None): # [:None] returns the whole array. return sum([self._fetch_bug(bug_id).unreviewed_patches() for bug_id in self.fetch_bug_ids_from_review_queue()[:limit]], []) # NOTE: This is the only client of _fetch_attachment_ids_request_query # This method only makes one request to bugzilla. def fetch_attachment_ids_from_review_queue(self): review_queue_url = "request.cgi?action=queue&type=review&group=type" return self._fetch_attachment_ids_request_query(review_queue_url) # This only works if your account has edituser privileges. # We could easily parse https://bugs.webkit.org/userprefs.cgi?tab=permissions to # check permissions, but bugzilla will just return an error if we don't have them. def fetch_login_userid_pairs_matching_substring(self, search_string): review_queue_url = "editusers.cgi?action=list&matchvalue=login_name&matchstr=%s&matchtype=substr" % urllib.quote(search_string) results_page = self._load_query(review_queue_url) # We could pull the EditUsersParser off Bugzilla if needed. return EditUsersParser().login_userid_pairs_from_edit_user_results(results_page) # FIXME: We should consider adding a BugzillaUser class. def fetch_logins_matching_substring(self, search_string): pairs = self.fetch_login_userid_pairs_matching_substring(search_string) return map(lambda pair: pair[0], pairs) class Bugzilla(object): def __init__(self, committers=committers.CommitterList()): self.authenticated = False self.queries = BugzillaQueries(self) self.committers = committers self.cached_quips = [] self.edit_user_parser = EditUsersParser() self._browser = None def _get_browser(self): if not self._browser: self.setdefaulttimeout(600) from webkitpy.thirdparty.autoinstalled.mechanize import Browser self._browser = Browser() # Ignore bugs.webkit.org/robots.txt until we fix it to allow this script. self._browser.set_handle_robots(False) return self._browser def _set_browser(self, value): self._browser = value browser = property(_get_browser, _set_browser) def setdefaulttimeout(self, value): socket.setdefaulttimeout(value) def fetch_user(self, user_id): self.authenticate() edit_user_page = self.browser.open(self.edit_user_url_for_id(user_id)) return self.edit_user_parser.user_dict_from_edit_user_page(edit_user_page) def add_user_to_groups(self, user_id, group_names): self.authenticate() user_edit_page = self.browser.open(self.edit_user_url_for_id(user_id)) self.browser.select_form(nr=1) for group_name in group_names: group_string = self.edit_user_parser.group_string_from_name(user_edit_page, group_name) self.browser.find_control(group_string).items[0].selected = True self.browser.submit() def quips(self): # We only fetch and parse the list of quips once per instantiation # so that we do not burden bugs.webkit.org. if not self.cached_quips: self.cached_quips = self.queries.fetch_quips() return self.cached_quips def bug_url_for_bug_id(self, bug_id, xml=False): if not bug_id: return None content_type = "&ctype=xml&excludefield=attachmentdata" if xml else "" return "%sshow_bug.cgi?id=%s%s" % (config_urls.bug_server_url, bug_id, content_type) def short_bug_url_for_bug_id(self, bug_id): if not bug_id: return None return "http://webkit.org/b/%s" % bug_id def add_attachment_url(self, bug_id): return "%sattachment.cgi?action=enter&bugid=%s" % (config_urls.bug_server_url, bug_id) def attachment_url_for_id(self, attachment_id, action="view"): if not attachment_id: return None action_param = "" if action and action != "view": action_param = "&action=%s" % action return "%sattachment.cgi?id=%s%s" % (config_urls.bug_server_url, attachment_id, action_param) def edit_user_url_for_id(self, user_id): return "%seditusers.cgi?action=edit&userid=%s" % (config_urls.bug_server_url, user_id) def _parse_attachment_flag(self, element, flag_name, attachment, result_key): flag = element.find('flag', attrs={'name': flag_name}) if flag: attachment[flag_name] = flag['status'] if flag['status'] == '+': attachment[result_key] = flag['setter'] # Sadly show_bug.cgi?ctype=xml does not expose the flag modification date. def _string_contents(self, soup): # WebKit's bugzilla instance uses UTF-8. # BeautifulStoneSoup always returns Unicode strings, however # the .string method returns a (unicode) NavigableString. # NavigableString can confuse other parts of the code, so we # convert from NavigableString to a real unicode() object using unicode(). return unicode(soup.string) # Example: 2010-01-20 14:31 PST # FIXME: Some bugzilla dates seem to have seconds in them? # Python does not support timezones out of the box. # Assume that bugzilla always uses PST (which is true for bugs.webkit.org) _bugzilla_date_format = "%Y-%m-%d %H:%M:%S" @classmethod def _parse_date(cls, date_string): (date, time, time_zone) = date_string.split(" ") if time.count(':') == 1: # Add seconds into the time. time += ':0' # Ignore the timezone because python doesn't understand timezones out of the box. date_string = "%s %s" % (date, time) return datetime.strptime(date_string, cls._bugzilla_date_format) def _date_contents(self, soup): return self._parse_date(self._string_contents(soup)) def _parse_attachment_element(self, element, bug_id): attachment = {} attachment['bug_id'] = bug_id attachment['is_obsolete'] = (element.has_key('isobsolete') and element['isobsolete'] == "1") attachment['is_patch'] = (element.has_key('ispatch') and element['ispatch'] == "1") attachment['id'] = int(element.find('attachid').string) # FIXME: No need to parse out the url here. attachment['url'] = self.attachment_url_for_id(attachment['id']) attachment["attach_date"] = self._date_contents(element.find("date")) attachment['name'] = self._string_contents(element.find('desc')) attachment['attacher_email'] = self._string_contents(element.find('attacher')) attachment['type'] = self._string_contents(element.find('type')) self._parse_attachment_flag( element, 'review', attachment, 'reviewer_email') self._parse_attachment_flag( element, 'commit-queue', attachment, 'committer_email') return attachment def _parse_log_descr_element(self, element): comment = {} comment['comment_email'] = self._string_contents(element.find('who')) comment['comment_date'] = self._date_contents(element.find('bug_when')) comment['text'] = self._string_contents(element.find('thetext')) return comment def _parse_bugs_from_xml(self, page): soup = BeautifulSoup(page) # Without the unicode() call, BeautifulSoup occasionally complains of being # passed None for no apparent reason. return [Bug(self._parse_bug_dictionary_from_xml(unicode(bug_xml)), self) for bug_xml in soup('bug')] def _parse_bug_dictionary_from_xml(self, page): soup = BeautifulStoneSoup(page, convertEntities=BeautifulStoneSoup.XML_ENTITIES) bug = {} bug["id"] = int(soup.find("bug_id").string) bug["title"] = self._string_contents(soup.find("short_desc")) bug["bug_status"] = self._string_contents(soup.find("bug_status")) dup_id = soup.find("dup_id") if dup_id: bug["dup_id"] = self._string_contents(dup_id) bug["reporter_email"] = self._string_contents(soup.find("reporter")) bug["assigned_to_email"] = self._string_contents(soup.find("assigned_to")) bug["cc_emails"] = [self._string_contents(element) for element in soup.findAll('cc')] bug["attachments"] = [self._parse_attachment_element(element, bug["id"]) for element in soup.findAll('attachment')] bug["comments"] = [self._parse_log_descr_element(element) for element in soup.findAll('long_desc')] return bug # Makes testing fetch_*_from_bug() possible until we have a better # BugzillaNetwork abstration. def _fetch_bug_page(self, bug_id): bug_url = self.bug_url_for_bug_id(bug_id, xml=True) _log.info("Fetching: %s" % bug_url) return self.browser.open(bug_url) def fetch_bug_dictionary(self, bug_id): try: return self._parse_bug_dictionary_from_xml(self._fetch_bug_page(bug_id)) except KeyboardInterrupt: raise except: self.authenticate() return self._parse_bug_dictionary_from_xml(self._fetch_bug_page(bug_id)) # FIXME: A BugzillaCache object should provide all these fetch_ methods. def fetch_bug(self, bug_id): return Bug(self.fetch_bug_dictionary(bug_id), self) def fetch_attachment_contents(self, attachment_id): attachment_url = self.attachment_url_for_id(attachment_id) # We need to authenticate to download patches from security bugs. self.authenticate() return self.browser.open(attachment_url).read() def _parse_bug_id_from_attachment_page(self, page): # The "Up" relation happens to point to the bug. up_link = BeautifulSoup(page).find('link', rel='Up') if not up_link: # This attachment does not exist (or you don't have permissions to # view it). return None match = re.search("show_bug.cgi\?id=(?P<bug_id>\d+)", up_link['href']) return int(match.group('bug_id')) def bug_id_for_attachment_id(self, attachment_id): self.authenticate() attachment_url = self.attachment_url_for_id(attachment_id, 'edit') _log.info("Fetching: %s" % attachment_url) page = self.browser.open(attachment_url) return self._parse_bug_id_from_attachment_page(page) # FIXME: This should just return Attachment(id), which should be able to # lazily fetch needed data. def fetch_attachment(self, attachment_id): # We could grab all the attachment details off of the attachment edit # page but we already have working code to do so off of the bugs page, # so re-use that. bug_id = self.bug_id_for_attachment_id(attachment_id) if not bug_id: return None attachments = self.fetch_bug(bug_id).attachments(include_obsolete=True) for attachment in attachments: if attachment.id() == int(attachment_id): return attachment return None # This should never be hit. def authenticate(self): if self.authenticated: return credentials = Credentials(config_urls.bug_server_host, git_prefix="bugzilla") attempts = 0 while not self.authenticated: attempts += 1 username, password = credentials.read_credentials() _log.info("Logging in as %s..." % username) self.browser.open(config_urls.bug_server_url + "index.cgi?GoAheadAndLogIn=1") self.browser.select_form(name="login") self.browser['Bugzilla_login'] = username self.browser['Bugzilla_password'] = password self.browser.find_control("Bugzilla_restrictlogin").items[0].selected = False response = self.browser.submit() match = re.search("<title>(.+?)</title>", response.read()) # If the resulting page has a title, and it contains the word # "invalid" assume it's the login failure page. if match and re.search("Invalid", match.group(1), re.IGNORECASE): errorMessage = "Bugzilla login failed: %s" % match.group(1) # raise an exception only if this was the last attempt if attempts < 5: _log.error(errorMessage) else: raise Exception(errorMessage) else: self.authenticated = True self.username = username # FIXME: Use enum instead of two booleans def _commit_queue_flag(self, mark_for_landing, mark_for_commit_queue): if mark_for_landing: user = self.committers.contributor_by_email(self.username) mark_for_commit_queue = True if not user: _log.warning("Your Bugzilla login is not listed in committers.py. Uploading with cq? instead of cq+") mark_for_landing = False elif not user.can_commit: _log.warning("You're not a committer yet or haven't updated committers.py yet. Uploading with cq? instead of cq+") mark_for_landing = False if mark_for_landing: return '+' if mark_for_commit_queue: return '?' return 'X' # FIXME: mark_for_commit_queue and mark_for_landing should be joined into a single commit_flag argument. def _fill_attachment_form(self, description, file_object, mark_for_review=False, mark_for_commit_queue=False, mark_for_landing=False, is_patch=False, filename=None, mimetype=None): self.browser['description'] = description if is_patch: self.browser['ispatch'] = ("1",) # FIXME: Should this use self._find_select_element_for_flag? self.browser['flag_type-1'] = ('?',) if mark_for_review else ('X',) self.browser['flag_type-3'] = (self._commit_queue_flag(mark_for_landing, mark_for_commit_queue),) filename = filename or "%s.patch" % timestamp() if not mimetype: mimetypes.add_type('text/plain', '.patch') # Make sure mimetypes knows about .patch mimetype, _ = mimetypes.guess_type(filename) if not mimetype: mimetype = "text/plain" # Bugzilla might auto-guess for us and we might not need this? self.browser.add_file(file_object, mimetype, filename, 'data') def _file_object_for_upload(self, file_or_string): if hasattr(file_or_string, 'read'): return file_or_string # Only if file_or_string is not already encoded do we want to encode it. if isinstance(file_or_string, unicode): file_or_string = file_or_string.encode('utf-8') return StringIO.StringIO(file_or_string) # timestamp argument is just for unittests. def _filename_for_upload(self, file_object, bug_id, extension="txt", timestamp=timestamp): if hasattr(file_object, "name"): return file_object.name return "bug-%s-%s.%s" % (bug_id, timestamp(), extension) def add_attachment_to_bug(self, bug_id, file_or_string, description, filename=None, comment_text=None, mimetype=None): self.authenticate() _log.info('Adding attachment "%s" to %s' % (description, self.bug_url_for_bug_id(bug_id))) self.browser.open(self.add_attachment_url(bug_id)) self.browser.select_form(name="entryform") file_object = self._file_object_for_upload(file_or_string) filename = filename or self._filename_for_upload(file_object, bug_id) self._fill_attachment_form(description, file_object, filename=filename, mimetype=mimetype) if comment_text: _log.info(comment_text) self.browser['comment'] = comment_text self.browser.submit() # FIXME: The arguments to this function should be simplified and then # this should be merged into add_attachment_to_bug def add_patch_to_bug(self, bug_id, file_or_string, description, comment_text=None, mark_for_review=False, mark_for_commit_queue=False, mark_for_landing=False): self.authenticate() _log.info('Adding patch "%s" to %s' % (description, self.bug_url_for_bug_id(bug_id))) self.browser.open(self.add_attachment_url(bug_id)) self.browser.select_form(name="entryform") file_object = self._file_object_for_upload(file_or_string) filename = self._filename_for_upload(file_object, bug_id, extension="patch") self._fill_attachment_form(description, file_object, mark_for_review=mark_for_review, mark_for_commit_queue=mark_for_commit_queue, mark_for_landing=mark_for_landing, is_patch=True, filename=filename) if comment_text: _log.info(comment_text) self.browser['comment'] = comment_text self.browser.submit() # FIXME: There has to be a more concise way to write this method. def _check_create_bug_response(self, response_html): match = re.search("<title>Bug (?P<bug_id>\d+) Submitted[^<]*</title>", response_html) if match: return match.group('bug_id') match = re.search( '<div id="bugzilla-body">(?P<error_message>.+)<div id="footer">', response_html, re.DOTALL) error_message = "FAIL" if match: text_lines = BeautifulSoup( match.group('error_message')).findAll(text=True) error_message = "\n" + '\n'.join( [" " + line.strip() for line in text_lines if line.strip()]) raise Exception("Bug not created: %s" % error_message) def create_bug(self, bug_title, bug_description, component=None, diff=None, patch_description=None, cc=None, blocked=None, assignee=None, mark_for_review=False, mark_for_commit_queue=False): self.authenticate() _log.info('Creating bug with title "%s"' % bug_title) self.browser.open(config_urls.bug_server_url + "enter_bug.cgi?product=WebKit") self.browser.select_form(name="Create") component_items = self.browser.find_control('component').items component_names = map(lambda item: item.name, component_items) if not component: component = "New Bugs" if component not in component_names: component = User.prompt_with_list("Please pick a component:", component_names) self.browser["component"] = [component] if cc: self.browser["cc"] = cc if blocked: self.browser["blocked"] = unicode(blocked) if not assignee: assignee = self.username if assignee and not self.browser.find_control("assigned_to").disabled: self.browser["assigned_to"] = assignee self.browser["short_desc"] = bug_title self.browser["comment"] = bug_description if diff: # _fill_attachment_form expects a file-like object # Patch files are already binary, so no encoding needed. assert(isinstance(diff, str)) patch_file_object = StringIO.StringIO(diff) self._fill_attachment_form( patch_description, patch_file_object, mark_for_review=mark_for_review, mark_for_commit_queue=mark_for_commit_queue, is_patch=True) response = self.browser.submit() bug_id = self._check_create_bug_response(response.read()) _log.info("Bug %s created." % bug_id) _log.info("%sshow_bug.cgi?id=%s" % (config_urls.bug_server_url, bug_id)) return bug_id def _find_select_element_for_flag(self, flag_name): # FIXME: This will break if we ever re-order attachment flags if flag_name == "review": return self.browser.find_control(type='select', nr=0) elif flag_name == "commit-queue": return self.browser.find_control(type='select', nr=1) raise Exception("Don't know how to find flag named \"%s\"" % flag_name) def clear_attachment_flags(self, attachment_id, additional_comment_text=None): self.authenticate() comment_text = "Clearing flags on attachment: %s" % attachment_id if additional_comment_text: comment_text += "\n\n%s" % additional_comment_text _log.info(comment_text) self.browser.open(self.attachment_url_for_id(attachment_id, 'edit')) self.browser.select_form(nr=1) self.browser.set_value(comment_text, name='comment', nr=0) self._find_select_element_for_flag('review').value = ("X",) self._find_select_element_for_flag('commit-queue').value = ("X",) self.browser.submit() def set_flag_on_attachment(self, attachment_id, flag_name, flag_value, comment_text=None): # FIXME: We need a way to test this function on a live bugzilla # instance. self.authenticate() _log.info(comment_text) self.browser.open(self.attachment_url_for_id(attachment_id, 'edit')) self.browser.select_form(nr=1) if comment_text: self.browser.set_value(comment_text, name='comment', nr=0) self._find_select_element_for_flag(flag_name).value = (flag_value,) self.browser.submit() # FIXME: All of these bug editing methods have a ridiculous amount of # copy/paste code. def obsolete_attachment(self, attachment_id, comment_text=None): self.authenticate() _log.info("Obsoleting attachment: %s" % attachment_id) self.browser.open(self.attachment_url_for_id(attachment_id, 'edit')) self.browser.select_form(nr=1) self.browser.find_control('isobsolete').items[0].selected = True # Also clear any review flag (to remove it from review/commit queues) self._find_select_element_for_flag('review').value = ("X",) self._find_select_element_for_flag('commit-queue').value = ("X",) if comment_text: _log.info(comment_text) # Bugzilla has two textareas named 'comment', one is somehow # hidden. We want the first. self.browser.set_value(comment_text, name='comment', nr=0) self.browser.submit() def add_cc_to_bug(self, bug_id, email_address_list): self.authenticate() _log.info("Adding %s to the CC list for bug %s" % (email_address_list, bug_id)) self.browser.open(self.bug_url_for_bug_id(bug_id)) self.browser.select_form(name="changeform") self.browser["newcc"] = ", ".join(email_address_list) self.browser.submit() def post_comment_to_bug(self, bug_id, comment_text, cc=None): self.authenticate() _log.info("Adding comment to bug %s" % bug_id) self.browser.open(self.bug_url_for_bug_id(bug_id)) self.browser.select_form(name="changeform") self.browser["comment"] = comment_text if cc: self.browser["newcc"] = ", ".join(cc) self.browser.submit() def close_bug_as_fixed(self, bug_id, comment_text=None): self.authenticate() _log.info("Closing bug %s as fixed" % bug_id) self.browser.open(self.bug_url_for_bug_id(bug_id)) self.browser.select_form(name="changeform") if comment_text: self.browser['comment'] = comment_text self.browser['bug_status'] = ['RESOLVED'] self.browser['resolution'] = ['FIXED'] self.browser.submit() def _has_control(self, form, id): return id in [control.id for control in form.controls] def reassign_bug(self, bug_id, assignee=None, comment_text=None): self.authenticate() if not assignee: assignee = self.username _log.info("Assigning bug %s to %s" % (bug_id, assignee)) self.browser.open(self.bug_url_for_bug_id(bug_id)) self.browser.select_form(name="changeform") if not self._has_control(self.browser, "assigned_to"): _log.warning("""Failed to assign bug to you (can't find assigned_to) control. Ignore this message if you don't have EditBugs privileges (https://bugs.webkit.org/userprefs.cgi?tab=permissions)""") return if comment_text: _log.info(comment_text) self.browser["comment"] = comment_text self.browser["assigned_to"] = assignee self.browser.submit() def reopen_bug(self, bug_id, comment_text): self.authenticate() _log.info("Re-opening bug %s" % bug_id) # Bugzilla requires a comment when re-opening a bug, so we know it will # never be None. _log.info(comment_text) self.browser.open(self.bug_url_for_bug_id(bug_id)) self.browser.select_form(name="changeform") bug_status = self.browser.find_control("bug_status", type="select") # This is a hack around the fact that ClientForm.ListControl seems to # have no simpler way to ask if a control has an item named "REOPENED" # without using exceptions for control flow. possible_bug_statuses = map(lambda item: item.name, bug_status.items) if "REOPENED" in possible_bug_statuses: bug_status.value = ["REOPENED"] # If the bug was never confirmed it will not have a "REOPENED" # state, but only an "UNCONFIRMED" state. elif "UNCONFIRMED" in possible_bug_statuses: bug_status.value = ["UNCONFIRMED"] else: # FIXME: This logic is slightly backwards. We won't print this # message if the bug is already open with state "UNCONFIRMED". _log.info("Did not reopen bug %s, it appears to already be open with status %s." % (bug_id, bug_status.value)) self.browser['comment'] = comment_text self.browser.submit()
bsd-3-clause
x111ong/odoo
addons/account_analytic_plans/report/__init__.py
445
1084
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import crossovered_analytic # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
mbohlool/client-python
kubernetes/client/models/v1beta1_daemon_set_list.py
1
6557
# coding: utf-8 """ Kubernetes No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) OpenAPI spec version: v1.8.2 Generated by: https://github.com/swagger-api/swagger-codegen.git """ from pprint import pformat from six import iteritems import re class V1beta1DaemonSetList(object): """ NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'api_version': 'str', 'items': 'list[V1beta1DaemonSet]', 'kind': 'str', 'metadata': 'V1ListMeta' } attribute_map = { 'api_version': 'apiVersion', 'items': 'items', 'kind': 'kind', 'metadata': 'metadata' } def __init__(self, api_version=None, items=None, kind=None, metadata=None): """ V1beta1DaemonSetList - a model defined in Swagger """ self._api_version = None self._items = None self._kind = None self._metadata = None self.discriminator = None if api_version is not None: self.api_version = api_version self.items = items if kind is not None: self.kind = kind if metadata is not None: self.metadata = metadata @property def api_version(self): """ Gets the api_version of this V1beta1DaemonSetList. APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources :return: The api_version of this V1beta1DaemonSetList. :rtype: str """ return self._api_version @api_version.setter def api_version(self, api_version): """ Sets the api_version of this V1beta1DaemonSetList. APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources :param api_version: The api_version of this V1beta1DaemonSetList. :type: str """ self._api_version = api_version @property def items(self): """ Gets the items of this V1beta1DaemonSetList. A list of daemon sets. :return: The items of this V1beta1DaemonSetList. :rtype: list[V1beta1DaemonSet] """ return self._items @items.setter def items(self, items): """ Sets the items of this V1beta1DaemonSetList. A list of daemon sets. :param items: The items of this V1beta1DaemonSetList. :type: list[V1beta1DaemonSet] """ if items is None: raise ValueError("Invalid value for `items`, must not be `None`") self._items = items @property def kind(self): """ Gets the kind of this V1beta1DaemonSetList. Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds :return: The kind of this V1beta1DaemonSetList. :rtype: str """ return self._kind @kind.setter def kind(self, kind): """ Sets the kind of this V1beta1DaemonSetList. Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds :param kind: The kind of this V1beta1DaemonSetList. :type: str """ self._kind = kind @property def metadata(self): """ Gets the metadata of this V1beta1DaemonSetList. Standard list metadata. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#metadata :return: The metadata of this V1beta1DaemonSetList. :rtype: V1ListMeta """ return self._metadata @metadata.setter def metadata(self, metadata): """ Sets the metadata of this V1beta1DaemonSetList. Standard list metadata. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#metadata :param metadata: The metadata of this V1beta1DaemonSetList. :type: V1ListMeta """ self._metadata = metadata def to_dict(self): """ Returns the model properties as a dict """ result = {} for attr, _ in iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """ Returns the string representation of the model """ return pformat(self.to_dict()) def __repr__(self): """ For `print` and `pprint` """ return self.to_str() def __eq__(self, other): """ Returns true if both objects are equal """ if not isinstance(other, V1beta1DaemonSetList): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """ Returns true if both objects are not equal """ return not self == other
apache-2.0
deniszgonjanin/ckanext-showcase
ckanext/showcase/tests/test_helpers.py
1
2522
from nose import tools as nosetools from nose import SkipTest from ckan.plugins import toolkit as tk import ckan.new_tests.helpers as helpers import ckan.new_tests.factories as factories import ckanext.showcase.logic.helpers as showcase_helpers class TestGetSiteStatistics(helpers.FunctionalTestBase): def test_dataset_count_no_datasets(self): ''' Dataset and showcase count is 0 when no datasets, and no showcases. ''' if not tk.check_ckan_version(min_version='2.5'): raise SkipTest('get_site_statistics without user broken in CKAN 2.4') stats = showcase_helpers.get_site_statistics() nosetools.assert_equal(stats['dataset_count'], 0) nosetools.assert_equal(stats['showcase_count'], 0) def test_dataset_count_no_datasets_some_showcases(self): ''' Dataset and showcase count is 0 when no datasets, but some showcases. ''' if not tk.check_ckan_version(min_version='2.5'): raise SkipTest('get_site_statistics without user broken in CKAN 2.4') for i in xrange(0, 10): factories.Dataset(type='showcase') stats = showcase_helpers.get_site_statistics() nosetools.assert_equal(stats['dataset_count'], 0) nosetools.assert_equal(stats['showcase_count'], 10) def test_dataset_count_some_datasets_no_showcases(self): ''' Dataset and showcase count is correct when there are datasets, but no showcases. ''' if not tk.check_ckan_version(min_version='2.5'): raise SkipTest('get_site_statistics without user broken in CKAN 2.4') for i in xrange(0, 10): factories.Dataset() stats = showcase_helpers.get_site_statistics() nosetools.assert_equal(stats['dataset_count'], 10) nosetools.assert_equal(stats['showcase_count'], 0) def test_dataset_count_some_datasets_some_showcases(self): ''' Dataset and showcase count is correct when there are datasets and some showcases. ''' if not tk.check_ckan_version(min_version='2.5'): raise SkipTest('get_site_statistics without user broken in CKAN 2.4') for i in xrange(0, 10): factories.Dataset() for i in xrange(0, 5): factories.Dataset(type='showcase') stats = showcase_helpers.get_site_statistics() nosetools.assert_equal(stats['dataset_count'], 10) nosetools.assert_equal(stats['showcase_count'], 5)
agpl-3.0
ChristopheVuillot/qiskit-sdk-py
qiskit/unroll/_unrollererror.py
1
1078
# -*- coding: utf-8 -*- # Copyright 2017 IBM RESEARCH. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================= """ Exception for errors raised by unroller. """ from qiskit import QISKitError class UnrollerError(QISKitError): """Base class for errors raised by unroller.""" def __init__(self, *message): """Set the error message.""" self.message = ' '.join(message) def __str__(self): """Return the message.""" return repr(self.message)
apache-2.0
mattuuh7/incubator-airflow
tests/contrib/operators/test_emr_add_steps_operator.py
56
1722
# -*- coding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest from mock import MagicMock, patch from airflow import configuration from airflow.contrib.operators.emr_add_steps_operator import EmrAddStepsOperator ADD_STEPS_SUCCESS_RETURN = { 'ResponseMetadata': { 'HTTPStatusCode': 200 }, 'StepIds': ['s-2LH3R5GW3A53T'] } class TestEmrAddStepsOperator(unittest.TestCase): def setUp(self): configuration.load_test_config() # Mock out the emr_client (moto has incorrect response) mock_emr_client = MagicMock() mock_emr_client.add_job_flow_steps.return_value = ADD_STEPS_SUCCESS_RETURN # Mock out the emr_client creator self.boto3_client_mock = MagicMock(return_value=mock_emr_client) def test_execute_adds_steps_to_the_job_flow_and_returns_step_ids(self): with patch('boto3.client', self.boto3_client_mock): operator = EmrAddStepsOperator( task_id='test_task', job_flow_id='j-8989898989', aws_conn_id='aws_default' ) self.assertEqual(operator.execute(None), ['s-2LH3R5GW3A53T']) if __name__ == '__main__': unittest.main()
apache-2.0
brandonium21/snowflake
snowflakeEnv/lib/python2.7/site-packages/jinja2/testsuite/core_tags.py
412
11858
# -*- coding: utf-8 -*- """ jinja2.testsuite.core_tags ~~~~~~~~~~~~~~~~~~~~~~~~~~ Test the core tags like for and if. :copyright: (c) 2010 by the Jinja Team. :license: BSD, see LICENSE for more details. """ import unittest from jinja2.testsuite import JinjaTestCase from jinja2 import Environment, TemplateSyntaxError, UndefinedError, \ DictLoader env = Environment() class ForLoopTestCase(JinjaTestCase): def test_simple(self): tmpl = env.from_string('{% for item in seq %}{{ item }}{% endfor %}') assert tmpl.render(seq=list(range(10))) == '0123456789' def test_else(self): tmpl = env.from_string('{% for item in seq %}XXX{% else %}...{% endfor %}') assert tmpl.render() == '...' def test_empty_blocks(self): tmpl = env.from_string('<{% for item in seq %}{% else %}{% endfor %}>') assert tmpl.render() == '<>' def test_context_vars(self): tmpl = env.from_string('''{% for item in seq -%} {{ loop.index }}|{{ loop.index0 }}|{{ loop.revindex }}|{{ loop.revindex0 }}|{{ loop.first }}|{{ loop.last }}|{{ loop.length }}###{% endfor %}''') one, two, _ = tmpl.render(seq=[0, 1]).split('###') (one_index, one_index0, one_revindex, one_revindex0, one_first, one_last, one_length) = one.split('|') (two_index, two_index0, two_revindex, two_revindex0, two_first, two_last, two_length) = two.split('|') assert int(one_index) == 1 and int(two_index) == 2 assert int(one_index0) == 0 and int(two_index0) == 1 assert int(one_revindex) == 2 and int(two_revindex) == 1 assert int(one_revindex0) == 1 and int(two_revindex0) == 0 assert one_first == 'True' and two_first == 'False' assert one_last == 'False' and two_last == 'True' assert one_length == two_length == '2' def test_cycling(self): tmpl = env.from_string('''{% for item in seq %}{{ loop.cycle('<1>', '<2>') }}{% endfor %}{% for item in seq %}{{ loop.cycle(*through) }}{% endfor %}''') output = tmpl.render(seq=list(range(4)), through=('<1>', '<2>')) assert output == '<1><2>' * 4 def test_scope(self): tmpl = env.from_string('{% for item in seq %}{% endfor %}{{ item }}') output = tmpl.render(seq=list(range(10))) assert not output def test_varlen(self): def inner(): for item in range(5): yield item tmpl = env.from_string('{% for item in iter %}{{ item }}{% endfor %}') output = tmpl.render(iter=inner()) assert output == '01234' def test_noniter(self): tmpl = env.from_string('{% for item in none %}...{% endfor %}') self.assert_raises(TypeError, tmpl.render) def test_recursive(self): tmpl = env.from_string('''{% for item in seq recursive -%} [{{ item.a }}{% if item.b %}<{{ loop(item.b) }}>{% endif %}] {%- endfor %}''') assert tmpl.render(seq=[ dict(a=1, b=[dict(a=1), dict(a=2)]), dict(a=2, b=[dict(a=1), dict(a=2)]), dict(a=3, b=[dict(a='a')]) ]) == '[1<[1][2]>][2<[1][2]>][3<[a]>]' def test_recursive_depth0(self): tmpl = env.from_string('''{% for item in seq recursive -%} [{{ loop.depth0 }}:{{ item.a }}{% if item.b %}<{{ loop(item.b) }}>{% endif %}] {%- endfor %}''') self.assertEqual(tmpl.render(seq=[ dict(a=1, b=[dict(a=1), dict(a=2)]), dict(a=2, b=[dict(a=1), dict(a=2)]), dict(a=3, b=[dict(a='a')]) ]), '[0:1<[1:1][1:2]>][0:2<[1:1][1:2]>][0:3<[1:a]>]') def test_recursive_depth(self): tmpl = env.from_string('''{% for item in seq recursive -%} [{{ loop.depth }}:{{ item.a }}{% if item.b %}<{{ loop(item.b) }}>{% endif %}] {%- endfor %}''') self.assertEqual(tmpl.render(seq=[ dict(a=1, b=[dict(a=1), dict(a=2)]), dict(a=2, b=[dict(a=1), dict(a=2)]), dict(a=3, b=[dict(a='a')]) ]), '[1:1<[2:1][2:2]>][1:2<[2:1][2:2]>][1:3<[2:a]>]') def test_looploop(self): tmpl = env.from_string('''{% for row in table %} {%- set rowloop = loop -%} {% for cell in row -%} [{{ rowloop.index }}|{{ loop.index }}] {%- endfor %} {%- endfor %}''') assert tmpl.render(table=['ab', 'cd']) == '[1|1][1|2][2|1][2|2]' def test_reversed_bug(self): tmpl = env.from_string('{% for i in items %}{{ i }}' '{% if not loop.last %}' ',{% endif %}{% endfor %}') assert tmpl.render(items=reversed([3, 2, 1])) == '1,2,3' def test_loop_errors(self): tmpl = env.from_string('''{% for item in [1] if loop.index == 0 %}...{% endfor %}''') self.assert_raises(UndefinedError, tmpl.render) tmpl = env.from_string('''{% for item in [] %}...{% else %}{{ loop }}{% endfor %}''') assert tmpl.render() == '' def test_loop_filter(self): tmpl = env.from_string('{% for item in range(10) if item ' 'is even %}[{{ item }}]{% endfor %}') assert tmpl.render() == '[0][2][4][6][8]' tmpl = env.from_string(''' {%- for item in range(10) if item is even %}[{{ loop.index }}:{{ item }}]{% endfor %}''') assert tmpl.render() == '[1:0][2:2][3:4][4:6][5:8]' def test_loop_unassignable(self): self.assert_raises(TemplateSyntaxError, env.from_string, '{% for loop in seq %}...{% endfor %}') def test_scoped_special_var(self): t = env.from_string('{% for s in seq %}[{{ loop.first }}{% for c in s %}' '|{{ loop.first }}{% endfor %}]{% endfor %}') assert t.render(seq=('ab', 'cd')) == '[True|True|False][False|True|False]' def test_scoped_loop_var(self): t = env.from_string('{% for x in seq %}{{ loop.first }}' '{% for y in seq %}{% endfor %}{% endfor %}') assert t.render(seq='ab') == 'TrueFalse' t = env.from_string('{% for x in seq %}{% for y in seq %}' '{{ loop.first }}{% endfor %}{% endfor %}') assert t.render(seq='ab') == 'TrueFalseTrueFalse' def test_recursive_empty_loop_iter(self): t = env.from_string(''' {%- for item in foo recursive -%}{%- endfor -%} ''') assert t.render(dict(foo=[])) == '' def test_call_in_loop(self): t = env.from_string(''' {%- macro do_something() -%} [{{ caller() }}] {%- endmacro %} {%- for i in [1, 2, 3] %} {%- call do_something() -%} {{ i }} {%- endcall %} {%- endfor -%} ''') assert t.render() == '[1][2][3]' def test_scoping_bug(self): t = env.from_string(''' {%- for item in foo %}...{{ item }}...{% endfor %} {%- macro item(a) %}...{{ a }}...{% endmacro %} {{- item(2) -}} ''') assert t.render(foo=(1,)) == '...1......2...' def test_unpacking(self): tmpl = env.from_string('{% for a, b, c in [[1, 2, 3]] %}' '{{ a }}|{{ b }}|{{ c }}{% endfor %}') assert tmpl.render() == '1|2|3' class IfConditionTestCase(JinjaTestCase): def test_simple(self): tmpl = env.from_string('''{% if true %}...{% endif %}''') assert tmpl.render() == '...' def test_elif(self): tmpl = env.from_string('''{% if false %}XXX{% elif true %}...{% else %}XXX{% endif %}''') assert tmpl.render() == '...' def test_else(self): tmpl = env.from_string('{% if false %}XXX{% else %}...{% endif %}') assert tmpl.render() == '...' def test_empty(self): tmpl = env.from_string('[{% if true %}{% else %}{% endif %}]') assert tmpl.render() == '[]' def test_complete(self): tmpl = env.from_string('{% if a %}A{% elif b %}B{% elif c == d %}' 'C{% else %}D{% endif %}') assert tmpl.render(a=0, b=False, c=42, d=42.0) == 'C' def test_no_scope(self): tmpl = env.from_string('{% if a %}{% set foo = 1 %}{% endif %}{{ foo }}') assert tmpl.render(a=True) == '1' tmpl = env.from_string('{% if true %}{% set foo = 1 %}{% endif %}{{ foo }}') assert tmpl.render() == '1' class MacrosTestCase(JinjaTestCase): env = Environment(trim_blocks=True) def test_simple(self): tmpl = self.env.from_string('''\ {% macro say_hello(name) %}Hello {{ name }}!{% endmacro %} {{ say_hello('Peter') }}''') assert tmpl.render() == 'Hello Peter!' def test_scoping(self): tmpl = self.env.from_string('''\ {% macro level1(data1) %} {% macro level2(data2) %}{{ data1 }}|{{ data2 }}{% endmacro %} {{ level2('bar') }}{% endmacro %} {{ level1('foo') }}''') assert tmpl.render() == 'foo|bar' def test_arguments(self): tmpl = self.env.from_string('''\ {% macro m(a, b, c='c', d='d') %}{{ a }}|{{ b }}|{{ c }}|{{ d }}{% endmacro %} {{ m() }}|{{ m('a') }}|{{ m('a', 'b') }}|{{ m(1, 2, 3) }}''') assert tmpl.render() == '||c|d|a||c|d|a|b|c|d|1|2|3|d' def test_varargs(self): tmpl = self.env.from_string('''\ {% macro test() %}{{ varargs|join('|') }}{% endmacro %}\ {{ test(1, 2, 3) }}''') assert tmpl.render() == '1|2|3' def test_simple_call(self): tmpl = self.env.from_string('''\ {% macro test() %}[[{{ caller() }}]]{% endmacro %}\ {% call test() %}data{% endcall %}''') assert tmpl.render() == '[[data]]' def test_complex_call(self): tmpl = self.env.from_string('''\ {% macro test() %}[[{{ caller('data') }}]]{% endmacro %}\ {% call(data) test() %}{{ data }}{% endcall %}''') assert tmpl.render() == '[[data]]' def test_caller_undefined(self): tmpl = self.env.from_string('''\ {% set caller = 42 %}\ {% macro test() %}{{ caller is not defined }}{% endmacro %}\ {{ test() }}''') assert tmpl.render() == 'True' def test_include(self): self.env = Environment(loader=DictLoader({'include': '{% macro test(foo) %}[{{ foo }}]{% endmacro %}'})) tmpl = self.env.from_string('{% from "include" import test %}{{ test("foo") }}') assert tmpl.render() == '[foo]' def test_macro_api(self): tmpl = self.env.from_string('{% macro foo(a, b) %}{% endmacro %}' '{% macro bar() %}{{ varargs }}{{ kwargs }}{% endmacro %}' '{% macro baz() %}{{ caller() }}{% endmacro %}') assert tmpl.module.foo.arguments == ('a', 'b') assert tmpl.module.foo.defaults == () assert tmpl.module.foo.name == 'foo' assert not tmpl.module.foo.caller assert not tmpl.module.foo.catch_kwargs assert not tmpl.module.foo.catch_varargs assert tmpl.module.bar.arguments == () assert tmpl.module.bar.defaults == () assert not tmpl.module.bar.caller assert tmpl.module.bar.catch_kwargs assert tmpl.module.bar.catch_varargs assert tmpl.module.baz.caller def test_callself(self): tmpl = self.env.from_string('{% macro foo(x) %}{{ x }}{% if x > 1 %}|' '{{ foo(x - 1) }}{% endif %}{% endmacro %}' '{{ foo(5) }}') assert tmpl.render() == '5|4|3|2|1' def suite(): suite = unittest.TestSuite() suite.addTest(unittest.makeSuite(ForLoopTestCase)) suite.addTest(unittest.makeSuite(IfConditionTestCase)) suite.addTest(unittest.makeSuite(MacrosTestCase)) return suite
bsd-2-clause
invisiblek/python-for-android
python3-alpha/python3-src/Lib/tkinter/test/test_tkinter/test_text.py
54
1136
import unittest import tkinter from test.support import requires, run_unittest from tkinter.ttk import setup_master requires('gui') class TextTest(unittest.TestCase): def setUp(self): self.root = setup_master() self.text = tkinter.Text(self.root) def tearDown(self): self.text.destroy() def test_search(self): text = self.text # pattern and index are obligatory arguments. self.assertRaises(tkinter.TclError, text.search, None, '1.0') self.assertRaises(tkinter.TclError, text.search, 'a', None) self.assertRaises(tkinter.TclError, text.search, None, None) # Invalid text index. self.assertRaises(tkinter.TclError, text.search, '', 0) # Check if we are getting the indices as strings -- you are likely # to get Tcl_Obj under Tk 8.5 if Tkinter doesn't convert it. text.insert('1.0', 'hi-test') self.assertEqual(text.search('-test', '1.0', 'end'), '1.2') self.assertEqual(text.search('test', '1.0', 'end'), '1.3') tests_gui = (TextTest, ) if __name__ == "__main__": run_unittest(*tests_gui)
apache-2.0
PedroTrujilloV/nest-simulator
topology/doc/old_doc/plotting_tools/plot_connections.py
6
3755
# -*- coding: utf-8 -*- # # plot_connections.py # # This file is part of NEST. # # Copyright (C) 2004 The NEST Initiative # # NEST is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # # NEST is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with NEST. If not, see <http://www.gnu.org/licenses/>. ## Python script that creates a set of Mayavi2 graphs that gives ## on overview of the connection profile of a layer. ## Mayavi2 is required to run this script! # The histogram2d function must be loaded before calling the # functions in this file. #execfile(plotting_folder+'histogram2d.py') import numpy as np import enthought.mayavi.mlab as mlab # Load Mayavi2 ## Function that checks if a node satisfies certain criterias. ## Returns true if that is the case. ## ## Input: ## gid - node ## params - dictionary with specification of layer and model type ## def check_node(gid, params): if 'layer' in params: if nest.GetLayer(gid) != params['layer']: return False if 'model' in params: if nest.GetStatus(gid)[0]['model'] != params['model']: return False return True ## ## Creates a Mayavi2 plot of connection data. ## ## Input: ## data_file - data file created with the PrintLayerConnections command ## min/max - lower left and upper right corner - [x, y] ## bins - number of histogram bins - [x_number, y_number] ## should in most cases be quite alot smaller than the number ## of rows and columns in the layer ## params - restriction on connection type (see check_node(..) above) ## output - output directory ## ## Example: plot_connections('out.txt', [-1.0, -1.0], [1.0, 1.0], [9, 9], ## {'model'= 'iaf_neuron'}, output='folder/') ## def plot_connections(data_file, min, max, bins, params=None, output=''): print("Creating connection profile graphs.") # Read data points from file f = open(data_file, 'r') # Ignore first line f.readline() data = [] for line in f: temp = line.split(' ') if params != None: if check_node([int(temp[1])], params): data.append([float(temp[4]), float(temp[5])]); else: data.append([float(temp[4]), float(temp[5])]); # Create histogram data based on the retrieved data. histogram_data = histogram2d(data, min, max, bins) # Open a new Mayavi2 figure f = mlab.figure() # Convert histogram bin count to relative densities. m = np.max(histogram_data[2].max(axis=0)) histogram_data[2] = histogram_data[2]/float(m) # Plot histogram data mlab.mesh(histogram_data[0], histogram_data[1], histogram_data[2]) #surf(histogram_data[0], histogram_data[1], histogram_data[2]) # Create and save various viewpoints of histogram figure mlab.axes(z_axis_visibility=False) mlab.view(azimuth=0, elevation=90) # X mlab.savefig(output+"xaxis.eps", size=[600,400]) mlab.view(azimuth=90, elevation=270) # Y mlab.savefig(output+"yaxis.eps", size=[600,400]) mlab.view(azimuth=45, elevation=45) # Perspective mlab.savefig(output+"perspective.eps", size=[600,400]) mlab.colorbar(orientation="vertical") mlab.view(azimuth=0, elevation=0) # Z mlab.savefig(output+"above.eps", size=[600,400])
gpl-2.0
AnishShah/tensorflow
tensorflow/contrib/timeseries/python/timeseries/state_space_models/periodic.py
92
24154
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """State space components for modeling seasonality.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy from tensorflow.contrib.timeseries.python.timeseries.state_space_models import state_space_model from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.ops import array_ops from tensorflow.python.ops import control_flow_ops from tensorflow.python.ops import gen_math_ops from tensorflow.python.ops import math_ops class CycleStateSpaceModel(state_space_model.StateSpaceModel): """A state space model component which cycles between values. Stores N values using N - 1 latent values, the Nth being the negative sum of those explicitly stored. At any given timestep one of these values is observed. Noise is assumed to affect only one of the transitions. """ def __init__( self, periodicity, configuration=state_space_model.StateSpaceModelConfiguration()): self._periodicity = periodicity super(CycleStateSpaceModel, self).__init__(configuration=configuration) def get_state_transition(self): return self.transition_to_powers(array_ops.ones([], dtype=dtypes.int32)) def get_noise_transform(self): # transition_power_noise_accumulator makes assumptions about this # transformation. If the noise transform is modified or overridden, # transition_power_noise_accumulator must be modified as well (or discarded, # as it is simply an optimization). return array_ops.pad( array_ops.ones([1], dtype=self.dtype), paddings=[(0, self._periodicity - 2)])[..., None] def transition_to_powers(self, powers): """Computes powers of the cycle transition matrix efficiently. Args: powers: An integer Tensor, shape [...], with powers to raise the transition matrix to. Returns: A floating point Tensor with shape [..., self._periodicity - 1, self._periodicity - 1] containing: (transition^power)_{i, j} = { 1 if (i - j) % self._periodicity == power % self._periodicity -1 if (i + 1) % self._periodicity == power % self._periodicity 0 otherwise} """ powers %= self._periodicity range_shape_padded = array_ops.reshape( math_ops.range(self._periodicity - 1, dtype=powers.dtype), array_ops.concat( [ array_ops.ones([array_ops.rank(powers)], dtype=dtypes.int32), [self._periodicity - 1] ], axis=0)) is_row_negative = math_ops.equal(range_shape_padded + 1, powers[..., None]) row_indicator_shape = array_ops.shape(is_row_negative) negative_row_indicator = array_ops.where(is_row_negative, -array_ops.ones( shape=row_indicator_shape, dtype=self.dtype), array_ops.zeros( row_indicator_shape, dtype=self.dtype)) coord_diff = (range_shape_padded[..., None] - range_shape_padded[..., None, :]) is_one = math_ops.equal(coord_diff % self._periodicity, powers[..., None, None]) positive_ones = array_ops.where(is_one, array_ops.ones( array_ops.shape(is_one), dtype=self.dtype), array_ops.zeros( array_ops.shape(is_one), dtype=self.dtype)) return math_ops.cast(positive_ones + negative_row_indicator[..., None], self.dtype) def transition_power_noise_accumulator( self, num_steps, noise_addition_coefficient=1): r"""Sum the transitioned covariance matrix over a number of steps. Assumes that state_transition_noise_covariance is a matrix with a single non-zero value in the upper left. Args: num_steps: A [...] shape integer Tensor with numbers of steps to compute power sums for. noise_addition_coefficient: A multiplier for the state transition noise covariance (used in ResolutionCycleModel to compute multiples of full period sums). Returns: The computed power sum, with shape [..., state dimension, state dimension] containing: [\sum_{p=0}^{num_steps - 1} ( state_transition^p * state_transition_noise_covariance * (state_transition^p)^T)]_{i, j} = { -contribution_{j + 1} if j == i - 1 contribution_{j + 1} + contribution{j} if j == i -contribution_{j} if j == i + 1 0 otherwise } contribution_k = noise_scalar * ((num_steps + self._periodicity - 1 - (k % self._periodicity)) // self._periodicity) Where contribution_k is the sum of noise_scalar additions to component k of the periodicity. """ noise_addition_scalar = array_ops.squeeze( self.state_transition_noise_covariance, axis=[-1, -2]) period_range_reshaped = array_ops.reshape( math_ops.range(self._periodicity, dtype=num_steps.dtype), array_ops.concat( [ array_ops.ones([array_ops.rank(num_steps)], dtype=dtypes.int32), [self._periodicity] ], axis=0)) reversed_remaining_steps = ((period_range_reshaped - (num_steps[..., None] - 1)) % self._periodicity) period_additions_reversed = (ops.convert_to_tensor( noise_addition_coefficient, self.dtype)[..., None] * noise_addition_scalar * math_ops.cast( (num_steps[..., None] + reversed_remaining_steps) // self._periodicity, dtype=self.dtype)) period_additions_diag = array_ops.matrix_diag(period_additions_reversed) upper_band = array_ops.concat( [ array_ops.zeros_like(period_additions_diag[..., :-1, 0:1]), -period_additions_diag[..., :-1, 0:-2] ], axis=-1) lower_band = array_ops.concat( [ array_ops.zeros_like(period_additions_diag[..., 0:1, :-1]), -period_additions_diag[..., 0:-2, :-1] ], axis=-2) period_additions_rotated = array_ops.concat( [ period_additions_reversed[..., -1:], period_additions_reversed[..., :-2] ], axis=-1) diagonal = array_ops.matrix_diag(period_additions_reversed[..., :-1] + period_additions_rotated) return diagonal + lower_band + upper_band def get_observation_model(self, times): """Observe only the first of the rotating latent values. See StateSpaceModel.get_observation_model. Args: times: Unused. See the parent class for details. Returns: A static, univariate observation model for later broadcasting. """ del times # Does not rely on times. Uses broadcasting from the parent. return array_ops.concat( values=[ array_ops.ones([1], dtype=self.dtype), array_ops.zeros( [self._periodicity - 2], dtype=self.dtype) ], axis=0) class ResolutionCycleModel(CycleStateSpaceModel): """A version of CycleStateSpaceModel with variable resolution. Cycles between "num_latent_values" latent values over a period of "periodicity", smoothly interpolating. Simply raises the transition matrix from CycleStateSpaceModel to the power (num_latent_values / periodicity). Specifically, ResolutionCycleModel uses the following eigendecomposition of the CycleStateSpaceModel matrix (there are several parameterizations, others leading to roots of the matrix with complex values): eigenvectors_{i, j} = root_of_unity(floor(j / 2) + 1, i * (-1)^(j + 1)) - root_of_unity(floor(j / 2) + 1, (i + 1) * (-1)^(j + 1)) eigenvalues_j = root_of_unity(floor(j / 2) + 1, (-1)^j) root_of_unity(root_number, to_power) = exp(to_power * 2 * pi * sqrt(-1) * root_number / num_latent_values) The transition matrix for ResolutionCycleModel is then: eigenvectors * diag(eigenvalues^(num_latent_values / periodicity)) * eigenvectors^-1 Since the eigenvalues are paired with their conjugates (conj(e^(sqrt(-1)*x)) = e^(-sqrt(-1)*x)), the resulting matrix has real components (this is why only odd numbers of latent values are supported, since the size of the matrix is one less than the number of latent values and there must be an even number of eigenvalues to pair them off). See ./g3doc/periodic_multires_derivation.md for details. """ def __init__( self, num_latent_values, periodicity, near_integer_threshold=1e-8, configuration=state_space_model.StateSpaceModelConfiguration()): """Initialize the ResolutionCycleModel. Args: num_latent_values: Controls the representational power and memory usage of the model. The transition matrix has shape [num_latent_values - 1, num_latent_values - 1]. Must be an odd integer (see class docstring for why). periodicity: The number of steps for cyclic behavior. May be a Tensor, and need not be an integer (although integer values greater than num_latent_values have more efficient special cases). near_integer_threshold: When avoiding singularities, controls how close a number should be to that singularity before the special case takes over. configuration: A StateSpaceModelConfiguration object. Raises: ValueError: If num_latent_values is not odd. """ if num_latent_values % 2 != 1: raise ValueError("Only odd numbers of latent values are supported.") self._num_latent_values = num_latent_values self._true_periodicity = periodicity self._near_integer_threshold = near_integer_threshold super(ResolutionCycleModel, self).__init__( periodicity=num_latent_values, configuration=configuration) def _close_to_integer(self, value): value = math_ops.cast(value, self.dtype) return math_ops.less( math_ops.abs(value - gen_math_ops.round(value)), self._near_integer_threshold) def transition_to_powers(self, powers): """Computes TransitionMatrix^power efficiently. For an n x n transition matrix we have: (TransitionMatrix**power)_{i, j) = (-1) ** i * sin(pi * power) / (n + 1) * ((-1) ** j / sin(pi / (n + 1) * (power - i + j)) + 1 / sin(pi / (n + 1) * (power - i - 1))) The sin(pi * power) term is zero whenever "power" is an integer. However, the 1 / sin(x) terms (cosecants) occasionally (when their arguments are multiples of pi) cancel out this value. The limit as the argument approaches an integer value gives the "correct" result, but computing these separately gives 0 * inf = NaN. Instead, there is a special case for near-integer values. Args: powers: A floating point Tensor of powers to raise the transition matrix to. Returns: A [..., self._num_latent_values - 1, self._num_latent_values - 1] floating point Tensor with the transition matrix raised to each power in `powers`. """ num_latent_values_float = math_ops.cast(self._num_latent_values, self.dtype) latent_values_per_period = (num_latent_values_float / math_ops.cast( self._true_periodicity, dtype=self.dtype)) original_matrix_powers = (math_ops.cast(powers, self.dtype) * latent_values_per_period) global_coeff = (math_ops.sin(original_matrix_powers * numpy.pi) / num_latent_values_float)[..., None, None] matrix_dimension_range = array_ops.reshape( math_ops.range(self._num_latent_values - 1), array_ops.concat( [ array_ops.ones( [array_ops.rank(original_matrix_powers)], dtype=dtypes.int32), [self._num_latent_values - 1] ], axis=0)) matrix_dimension_range_float = math_ops.cast(matrix_dimension_range, self.dtype) alternating = math_ops.cast(1 - 2 * (matrix_dimension_range % 2), self.dtype) row_addend = 1. / math_ops.sin(numpy.pi / num_latent_values_float * ( original_matrix_powers[..., None] - matrix_dimension_range_float - 1)) column_minus_row = (matrix_dimension_range_float[..., None, :] - matrix_dimension_range_float[..., None]) full_matrix_addend = (alternating[..., None, :] / math_ops.sin( numpy.pi / num_latent_values_float * (original_matrix_powers[..., None, None] + column_minus_row))) continuous_construction = global_coeff * alternating[..., None] * ( row_addend[..., None] + full_matrix_addend) # For integer powers, the above formula is only correct in the limit, # yielding NaNs as written. We defer to the super-class in such cases, which # computes integer powers exactly. return array_ops.where( self._close_to_integer(original_matrix_powers), super(ResolutionCycleModel, self).transition_to_powers( math_ops.cast( gen_math_ops.round(original_matrix_powers), dtypes.int64)), continuous_construction) def transition_power_noise_accumulator(self, num_steps): """Sum the transitioned covariance matrix over a number of steps. Args: num_steps: An integer Tensor of any shape [...] indicating the number of steps to compute for each part of the batch. Returns: A [..., self._num_latent_values - 1, self._num_latent_values - 1] floating point Tensor corresponding to each requested number of steps, containing: sum_{i=1}^{steps} transition^i * noise_covariance * (transition^i)^T """ def _whole_periods_folded(): """A more efficient special casing for integer periods. We knock off full periods, leaving at most self._true_periodicity steps to compute. Returns: A tuple of (remaining_whole_steps, current_accumulation): remaining_whole_steps: An integer Tensor with the same shape as the `num_steps` argument to `transition_power_noise_accumulator`, indicating the reduced number of steps which must be computed sequentially and added to `current_accumulation`. current_accumulation: A [..., self._num_latent_values - 1, self._num_latent_values - 1] floating point Tensor corresponding to the accumulations for steps which were computed in this function. """ original_transition_noise_addition_coefficient = (math_ops.cast( self._true_periodicity, self.dtype) / math_ops.cast( self._num_latent_values, self.dtype)) full_period_accumulation = super( ResolutionCycleModel, self).transition_power_noise_accumulator( noise_addition_coefficient= original_transition_noise_addition_coefficient, num_steps=ops.convert_to_tensor( self._num_latent_values, dtype=num_steps.dtype)) periodicity_integer = math_ops.cast(self._true_periodicity, num_steps.dtype) full_periods = math_ops.cast(num_steps // periodicity_integer, self.dtype) current_accumulation = full_periods[..., None, None] * array_ops.reshape( full_period_accumulation, array_ops.concat( [ array_ops.ones( [array_ops.rank(full_periods)], dtype=dtypes.int32), array_ops.shape(full_period_accumulation) ], axis=0)) remaining_whole_steps = num_steps % periodicity_integer return remaining_whole_steps, current_accumulation def _no_whole_period_computation(): """A less efficient special casing for real valued periods. This special casing is still preferable to computing using sequential matrix multiplies (parallelizable, more numerically stable), but is linear in the number of steps. Returns: Same shapes and types as `_whole_periods_folded`, but no folding is done in this function. """ current_accumulation = array_ops.zeros( array_ops.concat( [ array_ops.shape(num_steps), [self._num_latent_values - 1, self._num_latent_values - 1] ], axis=0), dtype=self.dtype) remaining_whole_steps = num_steps return remaining_whole_steps, current_accumulation # Decide whether it's feasible to compute whole periods in closed form, # taking advantage of the fact that a sum over self._true_periodicity steps # in our transition matrix is proportional to a sum over # self._num_latent_values steps in the unmodified matrix (because each # latent value gets the same treatment). This is possible for integer # self._true_periodicity, since we stay aligned to integer steps. For real # valued self._true_periodicity, or when the cyclic behavior is a higher # resolution than 1 per step, taking whole periods leads to misalignment # with integer steps, which would be difficult to recover from. remaining_whole_steps, current_accumulation = control_flow_ops.cond( self._whole_period_folding(), _whole_periods_folded, _no_whole_period_computation) steps_to_compute = math_ops.reduce_max(remaining_whole_steps) remaining_step_noise_additions = self._power_sum_array(steps_to_compute) noise_addition_scalar = array_ops.squeeze( self.state_transition_noise_covariance, axis=[-1, -2]) return current_accumulation + noise_addition_scalar * array_ops.gather( remaining_step_noise_additions, indices=remaining_whole_steps) def _whole_period_folding(self): """Decides whether computing a whole period maintains alignment.""" return math_ops.logical_and( self._close_to_integer(self._true_periodicity), math_ops.greater_equal(self._true_periodicity, self._num_latent_values)) def _power_sum_array(self, max_remaining_steps): r"""Computes \sum_{i=0}^{N-1} A^i B (A^i)^T for N=0..max_remaining_steps. A is the transition matrix and B is the noise covariance. This is more efficient in practice than math_utils.power_sums_tensor, since each A^i B (A^i)^T term has a closed-form expression not depending on i - 1. Thus vectorization can replace explicit looping. Uses a cumulative sum on the following expression: (transition^p * transition_covariance * (transition^p)^T)_{i, j} = (-1)^(i + j) * sin^2(pi * p) / num_latent_values^2 * (1/sin(pi / num_latent_values * (p - i)) + 1/sin(pi / num_latent_values * (p - i - 1))) * (1/sin(pi / num_latent_values * (p - j)) + 1/sin(pi / num_latent_values * (p - j - 1))) The expression being derived from the eigenvectors and eigenvalues given in the class docstring (and as with CycleStateSpaceModel taking advantage of the sparsity of the transition covariance). Args: max_remaining_steps: A scalar integer Tensor indicating the number of non-trivial values to compute. Returns: A [max_remaining_steps + 1, self._num_latent_values - 1, self._num_latent_values - 1] floating point Tensor S with cumulative power sums. S[N] = \sum_{i=0}^{N-1} A^i B (A^i)^T S[0] is the zero matrix S[1] is B S[2] is A B A^T + B """ num_latent_values_float = math_ops.cast(self._num_latent_values, self.dtype) latent_values_per_period = (num_latent_values_float / math_ops.cast( self._true_periodicity, dtype=self.dtype)) original_matrix_powers = (math_ops.cast( math_ops.range(max_remaining_steps), self.dtype) * latent_values_per_period) matrix_dimension_range = math_ops.range( self._num_latent_values - 1)[None, ...] matrix_dimension_range_float = math_ops.cast(matrix_dimension_range, self.dtype) def _cosecant_with_freq(coefficient): return 1. / math_ops.sin(numpy.pi / num_latent_values_float * coefficient) power_minus_index = (original_matrix_powers[..., None] - matrix_dimension_range_float) mesh_values = (_cosecant_with_freq(power_minus_index) + _cosecant_with_freq(power_minus_index - 1.)) meshed = mesh_values[..., None, :] * mesh_values[..., None] full_matrix_alternating = math_ops.cast(1 - 2 * ( (matrix_dimension_range[..., None, :] + matrix_dimension_range[..., None]) % 2), self.dtype) def _sine_discontinuity(value): """A special case for dealing with discontinuities. Decides whether `value` is close to an integer, and if so computes: lim x->n |sin(x * pi)| / sin(x * pi) = sign(sin(n * pi)) = cos(n * pi) Args: value: The floating point Tensor value which may lead to a discontinuity. Returns: A tuple of (is_discontinuous, sign): is_discontinuous: A boolean Tensor of the same shape as `value`, indicating whether it is near an integer. sign: A floating point Tensor indicating the sign of the discontinuity (being near 1 or -1 when `is_discontinuous` is True), of the same shape and type as `value`. """ normalized = value / num_latent_values_float is_discontinuous = self._close_to_integer(normalized) sign = math_ops.cos(normalized * numpy.pi) return is_discontinuous, sign index_discontinuous, index_sign = _sine_discontinuity( original_matrix_powers[..., None] - matrix_dimension_range_float) index_minus_discontinuous, index_minus_sign = _sine_discontinuity( original_matrix_powers[..., None] - matrix_dimension_range_float - 1) ones_mask_vector = math_ops.logical_or(index_discontinuous, index_minus_discontinuous) ones_sign_vector = array_ops.where(index_discontinuous, index_sign, index_minus_sign) ones_mask = math_ops.logical_and(ones_mask_vector[..., None], ones_mask_vector[..., None, :]) zeros_mask = self._close_to_integer(original_matrix_powers) zeroed = array_ops.where(zeros_mask, array_ops.zeros_like(meshed), meshed) global_coefficient = (math_ops.sin(numpy.pi * original_matrix_powers) / num_latent_values_float) masked_meshed = array_ops.where( ones_mask, ones_sign_vector[..., None] * ones_sign_vector[..., None, :], zeroed * global_coefficient[..., None, None]**2) powers_above_zero = full_matrix_alternating * masked_meshed return array_ops.pad( math_ops.cumsum(powers_above_zero), [(1, 0), (0, 0), (0, 0)])
apache-2.0
tectronics/syncless
examples/demo_orig_webapp.py
5
1089
#! /usr/local/bin/stackless2.6 # Example invocation: PYTHONPATH="$HOME/prg/google_appengine/google/appengine/ext:$HOME/prg/google_appengine/lib/webob" ./examples/demo_orig_webapp.py try: from google.appengine.ext import webapp except ImportError: import webapp class MainPage(webapp.RequestHandler): def get(self): self.response.out.write( '<html><body><form action="/hello" method="post">' 'Name: <input name="name" type="text" size="20"> ' '<input type="submit" value="Say Hello"></form></body></html>') class HelloPage(webapp.RequestHandler): def post(self): self.response.headers['Content-Type'] = 'text/plain' self.response.out.write('Hello, %s' % self.request.get('name')) application = webapp.WSGIApplication([ ('/', MainPage), ('/hello', HelloPage) ], debug=True) if __name__ == '__main__': import wsgiref.simple_server server_host = '' server_port = 8080 server = wsgiref.simple_server.make_server( server_host, server_port, application) print 'Serving on %s:%s' % (server_host, server_port) server.serve_forever()
apache-2.0
spunkmars/ProFTPD-Admin
src/proftpd/ftpadmin/templatetags/FtpTags.py
1
6554
#coding=utf-8 from django import template from proftpd.ftpadmin.lib.common import initlog from django.core.urlresolvers import reverse from django.shortcuts import render_to_response, get_object_or_404 from proftpd.ftpadmin.models.ftpusers import Ftpuser from proftpd.ftpadmin.models.ftpgroups import Ftpgroup #logger2 = initlog() register = template.Library() def do_get_sort_by_url(parser, token): try: tag_name, current_sort_by, target_sort_by = token.split_contents() except: raise template.TemplateSyntaxError, "%r tags error" % token.split_contents[0] #另一种取得模板变量值方法 步骤1 #current_sort_by = parser.compile_filter(current_sort_by) #target_sort_by = parser.compile_filter(target_sort_by) #logger2.info("hhh%shhh, vvv%svvv, ddd%sddd" % (tag_name, current_sort_by, target_sort_by) ) return FtpXferNode(current_sort_by, target_sort_by) class FtpXferNode(template.Node): def __init__(self, current_sort_by, target_sort_by): #另一种取得模板变量值方法 步骤2 #self.current_sort_by = current_sort_by #self.target_sort_by = target_sort_by self.current_sort_by = template.Variable(current_sort_by) self.target_sort_by = template.Variable(target_sort_by) def render(self, context): #另一种取得模板变量值方法 步骤3 #sort_by = self.current_sort_by.resolve(context, True) #target_sort_by = self.target_sort_by.resolve(context, True) sort_by = self.current_sort_by.resolve(context) target_sort_by = self.target_sort_by.resolve(context) if (sort_by == target_sort_by): output_sort_by = '-' + target_sort_by else: output_sort_by = target_sort_by return output_sort_by register.tag('get_sort_by_url', do_get_sort_by_url) #---------------------------------------------------------------------------- def do_get_user_url_by_username(parser, token): try: tag_name, do_action, user_name = token.split_contents() except: raise template.TemplateSyntaxError, "%r tags error" % token.split_contents[0] return FtpUserNode1(do_action, user_name) class FtpUserNode1(template.Node): def __init__(self, do_action, user_name): self.user_name = template.Variable(user_name) self.do_action = template.Variable(do_action) def render(self, context): user_name = self.user_name.resolve(context) do_action = self.do_action.resolve(context) user_detail_url = '' url_type = 'ftpuser_user_detail' if do_action == 'edit' : url_type = 'ftpuser_edit_user' elif do_action == 'del' : url_type = 'ftpuser_del_user' elif do_action == 'detail' : url_type = 'ftpuser_user_detail' ftpuser = get_object_or_404(Ftpuser, username=user_name) if ftpuser : user_detail_url = reverse(url_type, args=[ftpuser.id]) return user_detail_url register.tag('get_user_url_by_username', do_get_user_url_by_username) #---------------------------------------------------------------------------- def do_get_user_group_url_by_username(parser, token): try: tag_name, user_name = token.split_contents() except: raise template.TemplateSyntaxError, "%r tags error" % token.split_contents[0] return FtpUserGroupNode1(user_name) class FtpUserGroupNode1(template.Node): def __init__(self,user_name): self.user_name = template.Variable(user_name) def render(self, context): user_name = self.user_name.resolve(context) group_edit_url = '' ftpuser = get_object_or_404(Ftpuser, username=user_name) if ftpuser : ftpgroup = get_object_or_404(Ftpgroup, pk=ftpuser.id) if ftpgroup : group_edit_url = reverse('ftpgroup_edit_group', args=[ftpgroup.id]) return group_edit_url register.tag('get_user_group_url_by_username', do_get_user_group_url_by_username) def do_get_user_group_url_by_groupname(parser, token): try: tag_name, do_action, group_name = token.split_contents() except: raise template.TemplateSyntaxError, "%r tags error" % token.split_contents[0] return FtpUserGroupNode2(do_action, group_name) class FtpUserGroupNode2(template.Node): def __init__(self, do_action, group_name): self.group_name = template.Variable(group_name) self.do_action = template.Variable(do_action) def render(self, context): group_name = self.group_name.resolve(context) do_action = self.do_action.resolve(context) group_edit_url = '' url_type = 'ftpgroup_group_detail' if do_action == 'edit' : url_type = 'ftpgroup_edit_group' elif do_action == 'del' : url_type = 'ftpgroup_del_group' elif do_action == 'detail' : url_type = 'ftpgroup_group_detail' ftpgroup = get_object_or_404(Ftpgroup, groupname=group_name) if ftpgroup : group_edit_url = reverse(url_type, args=[ftpgroup.id]) return group_edit_url register.tag('get_user_group_url_by_groupname', do_get_user_group_url_by_groupname) #---------------------------------------------------------------------------- def do_get_group_member_html_context(parser, token): try: tag_name, mem_str = token.split_contents() except: raise template.TemplateSyntaxError, "%r tags error" % token.split_contents[0] return FtpUserGroupNode3(mem_str) class FtpUserGroupNode3(template.Node): def __init__(self, mem_str): self.mem_str = template.Variable(mem_str) def render(self, context): mem_str = self.mem_str.resolve(context) mem_html_context = '' url_array = [] mem_array = [] if mem_str : mem_array = mem_str.split(',') list_url = reverse('ftpuser_list_user') for member in mem_array : search_url = list_url + '?q=' + member url_array.append('<a href="' + search_url + '">' + member + '</a>') mem_html_context = ',&nbsp;'.join(url_array) return mem_html_context register.tag('get_group_member_html_context', do_get_group_member_html_context)
bsd-3-clause
mims2707/bite-project
deps/gdata-python-client/samples/apps/marketplace_sample/atom/mock_service.py
277
10350
#!/usr/bin/python # # Copyright (C) 2008 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """MockService provides CRUD ops. for mocking calls to AtomPub services. MockService: Exposes the publicly used methods of AtomService to provide a mock interface which can be used in unit tests. """ import atom.service import pickle __author__ = 'api.jscudder (Jeffrey Scudder)' # Recordings contains pairings of HTTP MockRequest objects with MockHttpResponse objects. recordings = [] # If set, the mock service HttpRequest are actually made through this object. real_request_handler = None def ConcealValueWithSha(source): import sha return sha.new(source[:-5]).hexdigest() def DumpRecordings(conceal_func=ConcealValueWithSha): if conceal_func: for recording_pair in recordings: recording_pair[0].ConcealSecrets(conceal_func) return pickle.dumps(recordings) def LoadRecordings(recordings_file_or_string): if isinstance(recordings_file_or_string, str): atom.mock_service.recordings = pickle.loads(recordings_file_or_string) elif hasattr(recordings_file_or_string, 'read'): atom.mock_service.recordings = pickle.loads( recordings_file_or_string.read()) def HttpRequest(service, operation, data, uri, extra_headers=None, url_params=None, escape_params=True, content_type='application/atom+xml'): """Simulates an HTTP call to the server, makes an actual HTTP request if real_request_handler is set. This function operates in two different modes depending on if real_request_handler is set or not. If real_request_handler is not set, HttpRequest will look in this module's recordings list to find a response which matches the parameters in the function call. If real_request_handler is set, this function will call real_request_handler.HttpRequest, add the response to the recordings list, and respond with the actual response. Args: service: atom.AtomService object which contains some of the parameters needed to make the request. The following members are used to construct the HTTP call: server (str), additional_headers (dict), port (int), and ssl (bool). operation: str The HTTP operation to be performed. This is usually one of 'GET', 'POST', 'PUT', or 'DELETE' data: ElementTree, filestream, list of parts, or other object which can be converted to a string. Should be set to None when performing a GET or PUT. If data is a file-like object which can be read, this method will read a chunk of 100K bytes at a time and send them. If the data is a list of parts to be sent, each part will be evaluated and sent. uri: The beginning of the URL to which the request should be sent. Examples: '/', '/base/feeds/snippets', '/m8/feeds/contacts/default/base' extra_headers: dict of strings. HTTP headers which should be sent in the request. These headers are in addition to those stored in service.additional_headers. url_params: dict of strings. Key value pairs to be added to the URL as URL parameters. For example {'foo':'bar', 'test':'param'} will become ?foo=bar&test=param. escape_params: bool default True. If true, the keys and values in url_params will be URL escaped when the form is constructed (Special characters converted to %XX form.) content_type: str The MIME type for the data being sent. Defaults to 'application/atom+xml', this is only used if data is set. """ full_uri = atom.service.BuildUri(uri, url_params, escape_params) (server, port, ssl, uri) = atom.service.ProcessUrl(service, uri) current_request = MockRequest(operation, full_uri, host=server, ssl=ssl, data=data, extra_headers=extra_headers, url_params=url_params, escape_params=escape_params, content_type=content_type) # If the request handler is set, we should actually make the request using # the request handler and record the response to replay later. if real_request_handler: response = real_request_handler.HttpRequest(service, operation, data, uri, extra_headers=extra_headers, url_params=url_params, escape_params=escape_params, content_type=content_type) # TODO: need to copy the HTTP headers from the real response into the # recorded_response. recorded_response = MockHttpResponse(body=response.read(), status=response.status, reason=response.reason) # Insert a tuple which maps the request to the response object returned # when making an HTTP call using the real_request_handler. recordings.append((current_request, recorded_response)) return recorded_response else: # Look through available recordings to see if one matches the current # request. for request_response_pair in recordings: if request_response_pair[0].IsMatch(current_request): return request_response_pair[1] return None class MockRequest(object): """Represents a request made to an AtomPub server. These objects are used to determine if a client request matches a recorded HTTP request to determine what the mock server's response will be. """ def __init__(self, operation, uri, host=None, ssl=False, port=None, data=None, extra_headers=None, url_params=None, escape_params=True, content_type='application/atom+xml'): """Constructor for a MockRequest Args: operation: str One of 'GET', 'POST', 'PUT', or 'DELETE' this is the HTTP operation requested on the resource. uri: str The URL describing the resource to be modified or feed to be retrieved. This should include the protocol (http/https) and the host (aka domain). For example, these are some valud full_uris: 'http://example.com', 'https://www.google.com/accounts/ClientLogin' host: str (optional) The server name which will be placed at the beginning of the URL if the uri parameter does not begin with 'http'. Examples include 'example.com', 'www.google.com', 'www.blogger.com'. ssl: boolean (optional) If true, the request URL will begin with https instead of http. data: ElementTree, filestream, list of parts, or other object which can be converted to a string. (optional) Should be set to None when performing a GET or PUT. If data is a file-like object which can be read, the constructor will read the entire file into memory. If the data is a list of parts to be sent, each part will be evaluated and stored. extra_headers: dict (optional) HTTP headers included in the request. url_params: dict (optional) Key value pairs which should be added to the URL as URL parameters in the request. For example uri='/', url_parameters={'foo':'1','bar':'2'} could become '/?foo=1&bar=2'. escape_params: boolean (optional) Perform URL escaping on the keys and values specified in url_params. Defaults to True. content_type: str (optional) Provides the MIME type of the data being sent. """ self.operation = operation self.uri = _ConstructFullUrlBase(uri, host=host, ssl=ssl) self.data = data self.extra_headers = extra_headers self.url_params = url_params or {} self.escape_params = escape_params self.content_type = content_type def ConcealSecrets(self, conceal_func): """Conceal secret data in this request.""" if self.extra_headers.has_key('Authorization'): self.extra_headers['Authorization'] = conceal_func( self.extra_headers['Authorization']) def IsMatch(self, other_request): """Check to see if the other_request is equivalent to this request. Used to determine if a recording matches an incoming request so that a recorded response should be sent to the client. The matching is not exact, only the operation and URL are examined currently. Args: other_request: MockRequest The request which we want to check this (self) MockRequest against to see if they are equivalent. """ # More accurate matching logic will likely be required. return (self.operation == other_request.operation and self.uri == other_request.uri) def _ConstructFullUrlBase(uri, host=None, ssl=False): """Puts URL components into the form http(s)://full.host.strinf/uri/path Used to construct a roughly canonical URL so that URLs which begin with 'http://example.com/' can be compared to a uri of '/' when the host is set to 'example.com' If the uri contains 'http://host' already, the host and ssl parameters are ignored. Args: uri: str The path component of the URL, examples include '/' host: str (optional) The host name which should prepend the URL. Example: 'example.com' ssl: boolean (optional) If true, the returned URL will begin with https instead of http. Returns: String which has the form http(s)://example.com/uri/string/contents """ if uri.startswith('http'): return uri if ssl: return 'https://%s%s' % (host, uri) else: return 'http://%s%s' % (host, uri) class MockHttpResponse(object): """Returned from MockService crud methods as the server's response.""" def __init__(self, body=None, status=None, reason=None, headers=None): """Construct a mock HTTPResponse and set members. Args: body: str (optional) The HTTP body of the server's response. status: int (optional) reason: str (optional) headers: dict (optional) """ self.body = body self.status = status self.reason = reason self.headers = headers or {} def read(self): return self.body def getheader(self, header_name): return self.headers[header_name]
apache-2.0
RichardLitt/wyrd-django-dev
tests/regressiontests/queryset_pickle/models.py
65
1056
from __future__ import absolute_import import datetime from django.db import models from django.utils.translation import ugettext_lazy as _ def standalone_number(self): return 1 class Numbers(object): @staticmethod def get_static_number(self): return 2 @classmethod def get_class_number(self): return 3 def get_member_number(self): return 4 nn = Numbers() class Group(models.Model): name = models.CharField(_('name'), max_length=100) class Event(models.Model): group = models.ForeignKey(Group) class Happening(models.Model): when = models.DateTimeField(blank=True, default=datetime.datetime.now) name = models.CharField(blank=True, max_length=100, default=lambda:"test") number1 = models.IntegerField(blank=True, default=standalone_number) number2 = models.IntegerField(blank=True, default=Numbers.get_static_number) number3 = models.IntegerField(blank=True, default=Numbers.get_class_number) number4 = models.IntegerField(blank=True, default=nn.get_member_number)
bsd-3-clause
sklnet/openatv-enigma2
lib/python/Components/ConditionalWidget.py
84
1700
from GUIComponent import GUIComponent from enigma import eTimer class ConditionalWidget(GUIComponent): def __init__(self, withTimer = True): GUIComponent.__init__(self) self.setConnect(None) if withTimer: self.conditionCheckTimer = eTimer() self.conditionCheckTimer.callback.append(self.update) self.conditionCheckTimer.start(1000) def postWidgetCreate(self, instance): self.visible = 0 def setConnect(self, conditionalFunction): self.conditionalFunction = conditionalFunction def activateCondition(self, condition): if condition: self.visible = 1 else: self.visible = 0 def update(self): if self.conditionalFunction is not None: try: self.activateCondition(self.conditionalFunction()) except: self.conditionalFunction = None self.activateCondition(False) class BlinkingWidget(GUIComponent): def __init__(self): GUIComponent.__init__(self) self.blinking = False self.setBlinkTime(500) self.timer = eTimer() self.timer.callback.append(self.blink) def setBlinkTime(self, time): self.blinktime = time def blink(self): if self.blinking: self.visible = not self.visible def startBlinking(self): self.blinking = True self.timer.start(self.blinktime) def stopBlinking(self): self.blinking = False if self.visible: self.hide() self.timer.stop() class BlinkingWidgetConditional(BlinkingWidget, ConditionalWidget): def __init__(self): BlinkingWidget.__init__(self) ConditionalWidget.__init__(self) def activateCondition(self, condition): if condition: if not self.blinking: # we are already blinking self.startBlinking() else: if self.blinking: # we are blinking self.stopBlinking()
gpl-2.0
myFengo2015/volatility
volatility/plugins/mac/mac_yarascan.py
44
3999
# Volatility # Copyright (C) 2007-2013 Volatility Foundation # # This file is part of Volatility. # # Volatility is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # Volatility is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Volatility. If not, see <http://www.gnu.org/licenses/>. # import volatility.plugins.malware.malfind as malfind import volatility.plugins.mac.pstasks as pstasks import volatility.plugins.mac.common as common import volatility.utils as utils import volatility.debug as debug import volatility.obj as obj try: import yara has_yara = True except ImportError: has_yara = False class MapYaraScanner(malfind.BaseYaraScanner): """A scanner over all memory regions of a process.""" def __init__(self, task = None, **kwargs): """Scan the process address space through the VMAs. Args: task: The task_struct object for this task. """ self.task = task malfind.BaseYaraScanner.__init__(self, address_space = task.get_process_address_space(), **kwargs) def scan(self, offset = 0, maxlen = None): for map in self.task.get_proc_maps(): for match in malfind.BaseYaraScanner.scan(self, map.links.start, map.links.end - map.links.start): yield match class mac_yarascan(malfind.YaraScan): """Scan memory for yara signatures""" @staticmethod def is_valid_profile(profile): return profile.metadata.get('os', 'Unknown').lower() == 'mac' def calculate(self): ## we need this module imported if not has_yara: debug.error("Please install Yara from code.google.com/p/yara-project") ## leveraged from the windows yarascan plugin rules = self._compile_rules() ## set the linux plugin address spaces common.set_plugin_members(self) if self._config.KERNEL: ## http://fxr.watson.org/fxr/source/osfmk/mach/i386/vm_param.h?v=xnu-2050.18.24 if self.addr_space.profile.metadata.get('memory_model', '32bit') == "32bit": if not common.is_64bit_capable(self.addr_space): kernel_start = 0 else: kernel_start = 0xc0000000 else: kernel_start = 0xffffff8000000000 scanner = malfind.DiscontigYaraScanner(rules = rules, address_space = self.addr_space) for hit, address in scanner.scan(start_offset = kernel_start): yield (None, address, hit, scanner.address_space.zread(address, 64)) else: # Scan each process memory block for task in pstasks.mac_tasks(self._config).calculate(): scanner = MapYaraScanner(task = task, rules = rules) for hit, address in scanner.scan(): yield (task, address, hit, scanner.address_space.zread(address, 64)) def render_text(self, outfd, data): for task, address, hit, buf in data: if task: outfd.write("Task: {0} pid {1} rule {2} addr {3:#x}\n".format( task.p_comm, task.p_pid, hit.rule, address)) else: outfd.write("[kernel] rule {0} addr {1:#x}\n".format(hit.rule, address)) outfd.write("".join(["{0:#018x} {1:<48} {2}\n".format( address + o, h, ''.join(c)) for o, h, c in utils.Hexdump(buf)]))
gpl-2.0
mvtuong/Yelp-Challenge
LIBSVM/tools/subset.py
124
3202
#!/usr/bin/env python import os, sys, math, random from collections import defaultdict if sys.version_info[0] >= 3: xrange = range def exit_with_help(argv): print("""\ Usage: {0} [options] dataset subset_size [output1] [output2] This script randomly selects a subset of the dataset. options: -s method : method of selection (default 0) 0 -- stratified selection (classification only) 1 -- random selection output1 : the subset (optional) output2 : rest of the data (optional) If output1 is omitted, the subset will be printed on the screen.""".format(argv[0])) exit(1) def process_options(argv): argc = len(argv) if argc < 3: exit_with_help(argv) # default method is stratified selection method = 0 subset_file = sys.stdout rest_file = None i = 1 while i < argc: if argv[i][0] != "-": break if argv[i] == "-s": i = i + 1 method = int(argv[i]) if method not in [0,1]: print("Unknown selection method {0}".format(method)) exit_with_help(argv) i = i + 1 dataset = argv[i] subset_size = int(argv[i+1]) if i+2 < argc: subset_file = open(argv[i+2],'w') if i+3 < argc: rest_file = open(argv[i+3],'w') return dataset, subset_size, method, subset_file, rest_file def random_selection(dataset, subset_size): l = sum(1 for line in open(dataset,'r')) return sorted(random.sample(xrange(l), subset_size)) def stratified_selection(dataset, subset_size): labels = [line.split(None,1)[0] for line in open(dataset)] label_linenums = defaultdict(list) for i, label in enumerate(labels): label_linenums[label] += [i] l = len(labels) remaining = subset_size ret = [] # classes with fewer data are sampled first; otherwise # some rare classes may not be selected for label in sorted(label_linenums, key=lambda x: len(label_linenums[x])): linenums = label_linenums[label] label_size = len(linenums) # at least one instance per class s = int(min(remaining, max(1, math.ceil(label_size*(float(subset_size)/l))))) if s == 0: sys.stderr.write('''\ Error: failed to have at least one instance per class 1. You may have regression data. 2. Your classification data is unbalanced or too small. Please use -s 1. ''') sys.exit(-1) remaining -= s ret += [linenums[i] for i in random.sample(xrange(label_size), s)] return sorted(ret) def main(argv=sys.argv): dataset, subset_size, method, subset_file, rest_file = process_options(argv) #uncomment the following line to fix the random seed #random.seed(0) selected_lines = [] if method == 0: selected_lines = stratified_selection(dataset, subset_size) elif method == 1: selected_lines = random_selection(dataset, subset_size) #select instances based on selected_lines dataset = open(dataset,'r') prev_selected_linenum = -1 for i in xrange(len(selected_lines)): for cnt in xrange(selected_lines[i]-prev_selected_linenum-1): line = dataset.readline() if rest_file: rest_file.write(line) subset_file.write(dataset.readline()) prev_selected_linenum = selected_lines[i] subset_file.close() if rest_file: for line in dataset: rest_file.write(line) rest_file.close() dataset.close() if __name__ == '__main__': main(sys.argv)
apache-2.0
Avinash-Raj/appengine-django-skeleton
lib/django/templatetags/i18n.py
219
19311
from __future__ import unicode_literals import sys from django.conf import settings from django.template import Library, Node, TemplateSyntaxError, Variable from django.template.base import TOKEN_TEXT, TOKEN_VAR, render_value_in_context from django.template.defaulttags import token_kwargs from django.utils import six, translation from django.utils.safestring import SafeData, mark_safe register = Library() class GetAvailableLanguagesNode(Node): def __init__(self, variable): self.variable = variable def render(self, context): context[self.variable] = [(k, translation.ugettext(v)) for k, v in settings.LANGUAGES] return '' class GetLanguageInfoNode(Node): def __init__(self, lang_code, variable): self.lang_code = lang_code self.variable = variable def render(self, context): lang_code = self.lang_code.resolve(context) context[self.variable] = translation.get_language_info(lang_code) return '' class GetLanguageInfoListNode(Node): def __init__(self, languages, variable): self.languages = languages self.variable = variable def get_language_info(self, language): # ``language`` is either a language code string or a sequence # with the language code as its first item if len(language[0]) > 1: return translation.get_language_info(language[0]) else: return translation.get_language_info(str(language)) def render(self, context): langs = self.languages.resolve(context) context[self.variable] = [self.get_language_info(lang) for lang in langs] return '' class GetCurrentLanguageNode(Node): def __init__(self, variable): self.variable = variable def render(self, context): context[self.variable] = translation.get_language() return '' class GetCurrentLanguageBidiNode(Node): def __init__(self, variable): self.variable = variable def render(self, context): context[self.variable] = translation.get_language_bidi() return '' class TranslateNode(Node): def __init__(self, filter_expression, noop, asvar=None, message_context=None): self.noop = noop self.asvar = asvar self.message_context = message_context self.filter_expression = filter_expression if isinstance(self.filter_expression.var, six.string_types): self.filter_expression.var = Variable("'%s'" % self.filter_expression.var) def render(self, context): self.filter_expression.var.translate = not self.noop if self.message_context: self.filter_expression.var.message_context = ( self.message_context.resolve(context)) output = self.filter_expression.resolve(context) value = render_value_in_context(output, context) # Restore percent signs. Percent signs in template text are doubled # so they are not interpreted as string format flags. is_safe = isinstance(value, SafeData) value = value.replace('%%', '%') value = mark_safe(value) if is_safe else value if self.asvar: context[self.asvar] = value return '' else: return value class BlockTranslateNode(Node): def __init__(self, extra_context, singular, plural=None, countervar=None, counter=None, message_context=None, trimmed=False, asvar=None): self.extra_context = extra_context self.singular = singular self.plural = plural self.countervar = countervar self.counter = counter self.message_context = message_context self.trimmed = trimmed self.asvar = asvar def render_token_list(self, tokens): result = [] vars = [] for token in tokens: if token.token_type == TOKEN_TEXT: result.append(token.contents.replace('%', '%%')) elif token.token_type == TOKEN_VAR: result.append('%%(%s)s' % token.contents) vars.append(token.contents) msg = ''.join(result) if self.trimmed: msg = translation.trim_whitespace(msg) return msg, vars def render(self, context, nested=False): if self.message_context: message_context = self.message_context.resolve(context) else: message_context = None tmp_context = {} for var, val in self.extra_context.items(): tmp_context[var] = val.resolve(context) # Update() works like a push(), so corresponding context.pop() is at # the end of function context.update(tmp_context) singular, vars = self.render_token_list(self.singular) if self.plural and self.countervar and self.counter: count = self.counter.resolve(context) context[self.countervar] = count plural, plural_vars = self.render_token_list(self.plural) if message_context: result = translation.npgettext(message_context, singular, plural, count) else: result = translation.ungettext(singular, plural, count) vars.extend(plural_vars) else: if message_context: result = translation.pgettext(message_context, singular) else: result = translation.ugettext(singular) default_value = context.template.engine.string_if_invalid def render_value(key): if key in context: val = context[key] else: val = default_value % key if '%s' in default_value else default_value return render_value_in_context(val, context) data = {v: render_value(v) for v in vars} context.pop() try: result = result % data except (KeyError, ValueError): if nested: # Either string is malformed, or it's a bug raise TemplateSyntaxError("'blocktrans' is unable to format " "string returned by gettext: %r using %r" % (result, data)) with translation.override(None): result = self.render(context, nested=True) if self.asvar: context[self.asvar] = result return '' else: return result class LanguageNode(Node): def __init__(self, nodelist, language): self.nodelist = nodelist self.language = language def render(self, context): with translation.override(self.language.resolve(context)): output = self.nodelist.render(context) return output @register.tag("get_available_languages") def do_get_available_languages(parser, token): """ This will store a list of available languages in the context. Usage:: {% get_available_languages as languages %} {% for language in languages %} ... {% endfor %} This will just pull the LANGUAGES setting from your setting file (or the default settings) and put it into the named variable. """ # token.split_contents() isn't useful here because this tag doesn't accept variable as arguments args = token.contents.split() if len(args) != 3 or args[1] != 'as': raise TemplateSyntaxError("'get_available_languages' requires 'as variable' (got %r)" % args) return GetAvailableLanguagesNode(args[2]) @register.tag("get_language_info") def do_get_language_info(parser, token): """ This will store the language information dictionary for the given language code in a context variable. Usage:: {% get_language_info for LANGUAGE_CODE as l %} {{ l.code }} {{ l.name }} {{ l.name_translated }} {{ l.name_local }} {{ l.bidi|yesno:"bi-directional,uni-directional" }} """ args = token.split_contents() if len(args) != 5 or args[1] != 'for' or args[3] != 'as': raise TemplateSyntaxError("'%s' requires 'for string as variable' (got %r)" % (args[0], args[1:])) return GetLanguageInfoNode(parser.compile_filter(args[2]), args[4]) @register.tag("get_language_info_list") def do_get_language_info_list(parser, token): """ This will store a list of language information dictionaries for the given language codes in a context variable. The language codes can be specified either as a list of strings or a settings.LANGUAGES style list (or any sequence of sequences whose first items are language codes). Usage:: {% get_language_info_list for LANGUAGES as langs %} {% for l in langs %} {{ l.code }} {{ l.name }} {{ l.name_translated }} {{ l.name_local }} {{ l.bidi|yesno:"bi-directional,uni-directional" }} {% endfor %} """ args = token.split_contents() if len(args) != 5 or args[1] != 'for' or args[3] != 'as': raise TemplateSyntaxError("'%s' requires 'for sequence as variable' (got %r)" % (args[0], args[1:])) return GetLanguageInfoListNode(parser.compile_filter(args[2]), args[4]) @register.filter def language_name(lang_code): return translation.get_language_info(lang_code)['name'] @register.filter def language_name_translated(lang_code): english_name = translation.get_language_info(lang_code)['name'] return translation.ugettext(english_name) @register.filter def language_name_local(lang_code): return translation.get_language_info(lang_code)['name_local'] @register.filter def language_bidi(lang_code): return translation.get_language_info(lang_code)['bidi'] @register.tag("get_current_language") def do_get_current_language(parser, token): """ This will store the current language in the context. Usage:: {% get_current_language as language %} This will fetch the currently active language and put it's value into the ``language`` context variable. """ # token.split_contents() isn't useful here because this tag doesn't accept variable as arguments args = token.contents.split() if len(args) != 3 or args[1] != 'as': raise TemplateSyntaxError("'get_current_language' requires 'as variable' (got %r)" % args) return GetCurrentLanguageNode(args[2]) @register.tag("get_current_language_bidi") def do_get_current_language_bidi(parser, token): """ This will store the current language layout in the context. Usage:: {% get_current_language_bidi as bidi %} This will fetch the currently active language's layout and put it's value into the ``bidi`` context variable. True indicates right-to-left layout, otherwise left-to-right """ # token.split_contents() isn't useful here because this tag doesn't accept variable as arguments args = token.contents.split() if len(args) != 3 or args[1] != 'as': raise TemplateSyntaxError("'get_current_language_bidi' requires 'as variable' (got %r)" % args) return GetCurrentLanguageBidiNode(args[2]) @register.tag("trans") def do_translate(parser, token): """ This will mark a string for translation and will translate the string for the current language. Usage:: {% trans "this is a test" %} This will mark the string for translation so it will be pulled out by mark-messages.py into the .po files and will run the string through the translation engine. There is a second form:: {% trans "this is a test" noop %} This will only mark for translation, but will return the string unchanged. Use it when you need to store values into forms that should be translated later on. You can use variables instead of constant strings to translate stuff you marked somewhere else:: {% trans variable %} This will just try to translate the contents of the variable ``variable``. Make sure that the string in there is something that is in the .po file. It is possible to store the translated string into a variable:: {% trans "this is a test" as var %} {{ var }} Contextual translations are also supported:: {% trans "this is a test" context "greeting" %} This is equivalent to calling pgettext instead of (u)gettext. """ bits = token.split_contents() if len(bits) < 2: raise TemplateSyntaxError("'%s' takes at least one argument" % bits[0]) message_string = parser.compile_filter(bits[1]) remaining = bits[2:] noop = False asvar = None message_context = None seen = set() invalid_context = {'as', 'noop'} while remaining: option = remaining.pop(0) if option in seen: raise TemplateSyntaxError( "The '%s' option was specified more than once." % option, ) elif option == 'noop': noop = True elif option == 'context': try: value = remaining.pop(0) except IndexError: msg = "No argument provided to the '%s' tag for the context option." % bits[0] six.reraise(TemplateSyntaxError, TemplateSyntaxError(msg), sys.exc_info()[2]) if value in invalid_context: raise TemplateSyntaxError( "Invalid argument '%s' provided to the '%s' tag for the context option" % (value, bits[0]), ) message_context = parser.compile_filter(value) elif option == 'as': try: value = remaining.pop(0) except IndexError: msg = "No argument provided to the '%s' tag for the as option." % bits[0] six.reraise(TemplateSyntaxError, TemplateSyntaxError(msg), sys.exc_info()[2]) asvar = value else: raise TemplateSyntaxError( "Unknown argument for '%s' tag: '%s'. The only options " "available are 'noop', 'context' \"xxx\", and 'as VAR'." % ( bits[0], option, ) ) seen.add(option) return TranslateNode(message_string, noop, asvar, message_context) @register.tag("blocktrans") def do_block_translate(parser, token): """ This will translate a block of text with parameters. Usage:: {% blocktrans with bar=foo|filter boo=baz|filter %} This is {{ bar }} and {{ boo }}. {% endblocktrans %} Additionally, this supports pluralization:: {% blocktrans count count=var|length %} There is {{ count }} object. {% plural %} There are {{ count }} objects. {% endblocktrans %} This is much like ngettext, only in template syntax. The "var as value" legacy format is still supported:: {% blocktrans with foo|filter as bar and baz|filter as boo %} {% blocktrans count var|length as count %} The translated string can be stored in a variable using `asvar`:: {% blocktrans with bar=foo|filter boo=baz|filter asvar var %} This is {{ bar }} and {{ boo }}. {% endblocktrans %} {{ var }} Contextual translations are supported:: {% blocktrans with bar=foo|filter context "greeting" %} This is {{ bar }}. {% endblocktrans %} This is equivalent to calling pgettext/npgettext instead of (u)gettext/(u)ngettext. """ bits = token.split_contents() options = {} remaining_bits = bits[1:] asvar = None while remaining_bits: option = remaining_bits.pop(0) if option in options: raise TemplateSyntaxError('The %r option was specified more ' 'than once.' % option) if option == 'with': value = token_kwargs(remaining_bits, parser, support_legacy=True) if not value: raise TemplateSyntaxError('"with" in %r tag needs at least ' 'one keyword argument.' % bits[0]) elif option == 'count': value = token_kwargs(remaining_bits, parser, support_legacy=True) if len(value) != 1: raise TemplateSyntaxError('"count" in %r tag expected exactly ' 'one keyword argument.' % bits[0]) elif option == "context": try: value = remaining_bits.pop(0) value = parser.compile_filter(value) except Exception: msg = ( '"context" in %r tag expected ' 'exactly one argument.') % bits[0] six.reraise(TemplateSyntaxError, TemplateSyntaxError(msg), sys.exc_info()[2]) elif option == "trimmed": value = True elif option == "asvar": try: value = remaining_bits.pop(0) except IndexError: msg = "No argument provided to the '%s' tag for the asvar option." % bits[0] six.reraise(TemplateSyntaxError, TemplateSyntaxError(msg), sys.exc_info()[2]) asvar = value else: raise TemplateSyntaxError('Unknown argument for %r tag: %r.' % (bits[0], option)) options[option] = value if 'count' in options: countervar, counter = list(options['count'].items())[0] else: countervar, counter = None, None if 'context' in options: message_context = options['context'] else: message_context = None extra_context = options.get('with', {}) trimmed = options.get("trimmed", False) singular = [] plural = [] while parser.tokens: token = parser.next_token() if token.token_type in (TOKEN_VAR, TOKEN_TEXT): singular.append(token) else: break if countervar and counter: if token.contents.strip() != 'plural': raise TemplateSyntaxError("'blocktrans' doesn't allow other block tags inside it") while parser.tokens: token = parser.next_token() if token.token_type in (TOKEN_VAR, TOKEN_TEXT): plural.append(token) else: break if token.contents.strip() != 'endblocktrans': raise TemplateSyntaxError("'blocktrans' doesn't allow other block tags (seen %r) inside it" % token.contents) return BlockTranslateNode(extra_context, singular, plural, countervar, counter, message_context, trimmed=trimmed, asvar=asvar) @register.tag def language(parser, token): """ This will enable the given language just for this block. Usage:: {% language "de" %} This is {{ bar }} and {{ boo }}. {% endlanguage %} """ bits = token.split_contents() if len(bits) != 2: raise TemplateSyntaxError("'%s' takes one argument (language)" % bits[0]) language = parser.compile_filter(bits[1]) nodelist = parser.parse(('endlanguage',)) parser.delete_first_token() return LanguageNode(nodelist, language)
bsd-3-clause
MrReN/django-oscar
sites/demo/apps/order/migrations/0001_initial.py
16
48092
# encoding: utf-8 import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): depends_on = ( ('catalogue', '0001_initial'), ('customer', '0001_initial'), ('partner', '0001_initial'), ('address', '0001_initial'), ) def forwards(self, orm): # Adding model 'PaymentEventQuantity' db.create_table('order_paymenteventquantity', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('event', self.gf('django.db.models.fields.related.ForeignKey')(related_name='line_quantities', to=orm['order.PaymentEvent'])), ('line', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['order.Line'])), ('quantity', self.gf('django.db.models.fields.PositiveIntegerField')()), )) db.send_create_signal('order', ['PaymentEventQuantity']) # Adding model 'ShippingEventQuantity' db.create_table('order_shippingeventquantity', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('event', self.gf('django.db.models.fields.related.ForeignKey')(related_name='line_quantities', to=orm['order.ShippingEvent'])), ('line', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['order.Line'])), ('quantity', self.gf('django.db.models.fields.PositiveIntegerField')()), )) db.send_create_signal('order', ['ShippingEventQuantity']) # Adding model 'Order' db.create_table('order_order', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('number', self.gf('django.db.models.fields.CharField')(max_length=128, db_index=True)), ('site', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['sites.Site'])), ('basket_id', self.gf('django.db.models.fields.PositiveIntegerField')(null=True, blank=True)), ('user', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='orders', null=True, to=orm['auth.User'])), ('billing_address', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['order.BillingAddress'], null=True, blank=True)), ('total_incl_tax', self.gf('django.db.models.fields.DecimalField')(max_digits=12, decimal_places=2)), ('total_excl_tax', self.gf('django.db.models.fields.DecimalField')(max_digits=12, decimal_places=2)), ('shipping_incl_tax', self.gf('django.db.models.fields.DecimalField')(default=0, max_digits=12, decimal_places=2)), ('shipping_excl_tax', self.gf('django.db.models.fields.DecimalField')(default=0, max_digits=12, decimal_places=2)), ('shipping_address', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['order.ShippingAddress'], null=True, blank=True)), ('shipping_method', self.gf('django.db.models.fields.CharField')(max_length=128, null=True, blank=True)), ('status', self.gf('django.db.models.fields.CharField')(max_length=100, null=True, blank=True)), ('date_placed', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, db_index=True, blank=True)), )) db.send_create_signal('order', ['Order']) # Adding model 'OrderNote' db.create_table('order_ordernote', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('order', self.gf('django.db.models.fields.related.ForeignKey')(related_name='notes', to=orm['order.Order'])), ('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True)), ('note_type', self.gf('django.db.models.fields.CharField')(max_length=128, null=True)), ('message', self.gf('django.db.models.fields.TextField')()), ('date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)), )) db.send_create_signal('order', ['OrderNote']) # Adding model 'CommunicationEvent' db.create_table('order_communicationevent', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('order', self.gf('django.db.models.fields.related.ForeignKey')(related_name='communication_events', to=orm['order.Order'])), ('event_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['customer.CommunicationEventType'])), ('date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)), )) db.send_create_signal('order', ['CommunicationEvent']) # Adding model 'ShippingAddress' db.create_table('order_shippingaddress', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('title', self.gf('django.db.models.fields.CharField')(max_length=64, null=True, blank=True)), ('first_name', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)), ('last_name', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)), ('line1', self.gf('django.db.models.fields.CharField')(max_length=255)), ('line2', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)), ('line3', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)), ('line4', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)), ('state', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)), ('postcode', self.gf('django.db.models.fields.CharField')(max_length=64)), ('country', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['address.Country'])), ('search_text', self.gf('django.db.models.fields.CharField')(max_length=1000)), ('phone_number', self.gf('django.db.models.fields.CharField')(max_length=32, null=True, blank=True)), ('notes', self.gf('django.db.models.fields.TextField')(null=True, blank=True)), )) db.send_create_signal('order', ['ShippingAddress']) # Adding model 'BillingAddress' db.create_table('order_billingaddress', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('title', self.gf('django.db.models.fields.CharField')(max_length=64, null=True, blank=True)), ('first_name', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)), ('last_name', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)), ('line1', self.gf('django.db.models.fields.CharField')(max_length=255)), ('line2', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)), ('line3', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)), ('line4', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)), ('state', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)), ('postcode', self.gf('django.db.models.fields.CharField')(max_length=64)), ('country', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['address.Country'])), ('search_text', self.gf('django.db.models.fields.CharField')(max_length=1000)), )) db.send_create_signal('order', ['BillingAddress']) # Adding model 'Line' db.create_table('order_line', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('order', self.gf('django.db.models.fields.related.ForeignKey')(related_name='lines', to=orm['order.Order'])), ('partner', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='order_lines', null=True, to=orm['partner.Partner'])), ('partner_name', self.gf('django.db.models.fields.CharField')(max_length=128)), ('partner_sku', self.gf('django.db.models.fields.CharField')(max_length=128)), ('title', self.gf('django.db.models.fields.CharField')(max_length=255)), ('product', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['catalogue.Product'], null=True, blank=True)), ('quantity', self.gf('django.db.models.fields.PositiveIntegerField')(default=1)), ('line_price_incl_tax', self.gf('django.db.models.fields.DecimalField')(max_digits=12, decimal_places=2)), ('line_price_excl_tax', self.gf('django.db.models.fields.DecimalField')(max_digits=12, decimal_places=2)), ('line_price_before_discounts_incl_tax', self.gf('django.db.models.fields.DecimalField')(max_digits=12, decimal_places=2)), ('line_price_before_discounts_excl_tax', self.gf('django.db.models.fields.DecimalField')(max_digits=12, decimal_places=2)), ('unit_cost_price', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=12, decimal_places=2, blank=True)), ('unit_price_incl_tax', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=12, decimal_places=2, blank=True)), ('unit_price_excl_tax', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=12, decimal_places=2, blank=True)), ('unit_retail_price', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=12, decimal_places=2, blank=True)), ('partner_line_reference', self.gf('django.db.models.fields.CharField')(max_length=128, null=True, blank=True)), ('partner_line_notes', self.gf('django.db.models.fields.TextField')(null=True, blank=True)), ('status', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)), ('est_dispatch_date', self.gf('django.db.models.fields.DateField')(null=True, blank=True)), )) db.send_create_signal('order', ['Line']) # Adding model 'LinePrice' db.create_table('order_lineprice', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('order', self.gf('django.db.models.fields.related.ForeignKey')(related_name='line_prices', to=orm['order.Order'])), ('line', self.gf('django.db.models.fields.related.ForeignKey')(related_name='prices', to=orm['order.Line'])), ('quantity', self.gf('django.db.models.fields.PositiveIntegerField')(default=1)), ('price_incl_tax', self.gf('django.db.models.fields.DecimalField')(max_digits=12, decimal_places=2)), ('price_excl_tax', self.gf('django.db.models.fields.DecimalField')(max_digits=12, decimal_places=2)), ('shipping_incl_tax', self.gf('django.db.models.fields.DecimalField')(default=0, max_digits=12, decimal_places=2)), ('shipping_excl_tax', self.gf('django.db.models.fields.DecimalField')(default=0, max_digits=12, decimal_places=2)), )) db.send_create_signal('order', ['LinePrice']) # Adding model 'LineAttribute' db.create_table('order_lineattribute', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('line', self.gf('django.db.models.fields.related.ForeignKey')(related_name='attributes', to=orm['order.Line'])), ('option', self.gf('django.db.models.fields.related.ForeignKey')(related_name='line_attributes', null=True, to=orm['catalogue.Option'])), ('type', self.gf('django.db.models.fields.CharField')(max_length=128)), ('value', self.gf('django.db.models.fields.CharField')(max_length=255)), )) db.send_create_signal('order', ['LineAttribute']) # Adding model 'ShippingEvent' db.create_table('order_shippingevent', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('order', self.gf('django.db.models.fields.related.ForeignKey')(related_name='shipping_events', to=orm['order.Order'])), ('event_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['order.ShippingEventType'])), ('notes', self.gf('django.db.models.fields.TextField')(null=True, blank=True)), ('date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)), )) db.send_create_signal('order', ['ShippingEvent']) # Adding model 'ShippingEventType' db.create_table('order_shippingeventtype', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=255)), ('code', self.gf('django.db.models.fields.SlugField')(unique=True, max_length=128, db_index=True)), ('is_required', self.gf('django.db.models.fields.BooleanField')(default=True)), ('sequence_number', self.gf('django.db.models.fields.PositiveIntegerField')(default=0)), )) db.send_create_signal('order', ['ShippingEventType']) # Adding model 'PaymentEvent' db.create_table('order_paymentevent', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('order', self.gf('django.db.models.fields.related.ForeignKey')(related_name='payment_events', to=orm['order.Order'])), ('amount', self.gf('django.db.models.fields.DecimalField')(max_digits=12, decimal_places=2)), ('event_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['order.PaymentEventType'])), ('date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)), )) db.send_create_signal('order', ['PaymentEvent']) # Adding model 'PaymentEventType' db.create_table('order_paymenteventtype', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=128)), ('code', self.gf('django.db.models.fields.SlugField')(unique=True, max_length=128, db_index=True)), ('sequence_number', self.gf('django.db.models.fields.PositiveIntegerField')(default=0)), )) db.send_create_signal('order', ['PaymentEventType']) # Adding model 'OrderDiscount' db.create_table('order_orderdiscount', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('order', self.gf('django.db.models.fields.related.ForeignKey')(related_name='discounts', to=orm['order.Order'])), ('offer_id', self.gf('django.db.models.fields.PositiveIntegerField')(null=True, blank=True)), ('voucher_id', self.gf('django.db.models.fields.PositiveIntegerField')(null=True, blank=True)), ('voucher_code', self.gf('django.db.models.fields.CharField')(max_length=128, null=True, db_index=True)), ('amount', self.gf('django.db.models.fields.DecimalField')(default=0, max_digits=12, decimal_places=2)), )) db.send_create_signal('order', ['OrderDiscount']) def backwards(self, orm): # Deleting model 'PaymentEventQuantity' db.delete_table('order_paymenteventquantity') # Deleting model 'ShippingEventQuantity' db.delete_table('order_shippingeventquantity') # Deleting model 'Order' db.delete_table('order_order') # Deleting model 'OrderNote' db.delete_table('order_ordernote') # Deleting model 'CommunicationEvent' db.delete_table('order_communicationevent') # Deleting model 'ShippingAddress' db.delete_table('order_shippingaddress') # Deleting model 'BillingAddress' db.delete_table('order_billingaddress') # Deleting model 'Line' db.delete_table('order_line') # Deleting model 'LinePrice' db.delete_table('order_lineprice') # Deleting model 'LineAttribute' db.delete_table('order_lineattribute') # Deleting model 'ShippingEvent' db.delete_table('order_shippingevent') # Deleting model 'ShippingEventType' db.delete_table('order_shippingeventtype') # Deleting model 'PaymentEvent' db.delete_table('order_paymentevent') # Deleting model 'PaymentEventType' db.delete_table('order_paymenteventtype') # Deleting model 'OrderDiscount' db.delete_table('order_orderdiscount') models = { 'address.country': { 'Meta': {'ordering': "('-is_highlighted', 'name')", 'object_name': 'Country'}, 'is_highlighted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}), 'is_shipping_country': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}), 'iso_3166_1_a2': ('django.db.models.fields.CharField', [], {'max_length': '2', 'primary_key': 'True'}), 'iso_3166_1_a3': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'db_index': 'True'}), 'iso_3166_1_numeric': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'db_index': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'printable_name': ('django.db.models.fields.CharField', [], {'max_length': '128'}) }, 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'catalogue.attributeentity': { 'Meta': {'object_name': 'AttributeEntity'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'slug': ('django.db.models.fields.SlugField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}), 'type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'entities'", 'to': "orm['catalogue.AttributeEntityType']"}) }, 'catalogue.attributeentitytype': { 'Meta': {'object_name': 'AttributeEntityType'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'slug': ('django.db.models.fields.SlugField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}) }, 'catalogue.attributeoption': { 'Meta': {'object_name': 'AttributeOption'}, 'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'options'", 'to': "orm['catalogue.AttributeOptionGroup']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'option': ('django.db.models.fields.CharField', [], {'max_length': '255'}) }, 'catalogue.attributeoptiongroup': { 'Meta': {'object_name': 'AttributeOptionGroup'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}) }, 'catalogue.category': { 'Meta': {'ordering': "['name']", 'object_name': 'Category'}, 'depth': ('django.db.models.fields.PositiveIntegerField', [], {}), 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'full_name': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'db_index': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), 'numchild': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), 'path': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '1024', 'db_index': 'True'}) }, 'catalogue.option': { 'Meta': {'object_name': 'Option'}, 'code': ('django.db.models.fields.SlugField', [], {'max_length': '128', 'db_index': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'type': ('django.db.models.fields.CharField', [], {'default': "'Required'", 'max_length': '128'}) }, 'catalogue.product': { 'Meta': {'ordering': "['-date_created']", 'object_name': 'Product'}, 'attributes': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.ProductAttribute']", 'through': "orm['catalogue.ProductAttributeValue']", 'symmetrical': 'False'}), 'categories': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Category']", 'through': "orm['catalogue.ProductCategory']", 'symmetrical': 'False'}), 'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}), 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'variants'", 'null': 'True', 'to': "orm['catalogue.Product']"}), 'product_class': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.ProductClass']", 'null': 'True'}), 'product_options': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Option']", 'symmetrical': 'False', 'blank': 'True'}), 'recommended_products': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Product']", 'symmetrical': 'False', 'through': "orm['catalogue.ProductRecommendation']", 'blank': 'True'}), 'related_products': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'relations'", 'blank': 'True', 'to': "orm['catalogue.Product']"}), 'score': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'db_index': 'True'}), 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'db_index': 'True'}), 'status': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'upc': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True', 'blank': 'True'}) }, 'catalogue.productattribute': { 'Meta': {'ordering': "['code']", 'object_name': 'ProductAttribute'}, 'code': ('django.db.models.fields.SlugField', [], {'max_length': '128', 'db_index': 'True'}), 'entity_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeEntityType']", 'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'option_group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeOptionGroup']", 'null': 'True', 'blank': 'True'}), 'product_class': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'attributes'", 'null': 'True', 'to': "orm['catalogue.ProductClass']"}), 'required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'type': ('django.db.models.fields.CharField', [], {'default': "'text'", 'max_length': '20'}) }, 'catalogue.productattributevalue': { 'Meta': {'object_name': 'ProductAttributeValue'}, 'attribute': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.ProductAttribute']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attribute_values'", 'to': "orm['catalogue.Product']"}), 'value_boolean': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'value_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'value_entity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeEntity']", 'null': 'True', 'blank': 'True'}), 'value_float': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}), 'value_integer': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}), 'value_option': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeOption']", 'null': 'True', 'blank': 'True'}), 'value_richtext': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'value_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}) }, 'catalogue.productcategory': { 'Meta': {'ordering': "['-is_canonical']", 'object_name': 'ProductCategory'}, 'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Category']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_canonical': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}), 'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Product']"}) }, 'catalogue.productclass': { 'Meta': {'ordering': "['name']", 'object_name': 'ProductClass'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'options': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Option']", 'symmetrical': 'False', 'blank': 'True'}), 'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128', 'db_index': 'True'}) }, 'catalogue.productrecommendation': { 'Meta': {'object_name': 'ProductRecommendation'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'primary': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'primary_recommendations'", 'to': "orm['catalogue.Product']"}), 'ranking': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}), 'recommendation': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Product']"}) }, 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'customer.communicationeventtype': { 'Meta': {'object_name': 'CommunicationEventType'}, 'category': ('django.db.models.fields.CharField', [], {'default': "'Order related'", 'max_length': '255'}), 'code': ('django.db.models.fields.SlugField', [], {'max_length': '128', 'db_index': 'True'}), 'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'email_body_html_template': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'email_body_template': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'email_subject_template': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'sms_template': ('django.db.models.fields.CharField', [], {'max_length': '170', 'blank': 'True'}) }, 'order.billingaddress': { 'Meta': {'object_name': 'BillingAddress'}, 'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['address.Country']"}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), 'line1': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'line2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'line3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'line4': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'postcode': ('django.db.models.fields.CharField', [], {'max_length': '64'}), 'search_text': ('django.db.models.fields.CharField', [], {'max_length': '1000'}), 'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}) }, 'order.communicationevent': { 'Meta': {'object_name': 'CommunicationEvent'}, 'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'event_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['customer.CommunicationEventType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'communication_events'", 'to': "orm['order.Order']"}) }, 'order.line': { 'Meta': {'object_name': 'Line'}, 'est_dispatch_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'line_price_before_discounts_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}), 'line_price_before_discounts_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}), 'line_price_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}), 'line_price_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}), 'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'lines'", 'to': "orm['order.Order']"}), 'partner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'order_lines'", 'null': 'True', 'to': "orm['partner.Partner']"}), 'partner_line_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'partner_line_reference': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}), 'partner_name': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'partner_sku': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Product']", 'null': 'True', 'blank': 'True'}), 'quantity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}), 'status': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'unit_cost_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}), 'unit_price_excl_tax': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}), 'unit_price_incl_tax': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}), 'unit_retail_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}) }, 'order.lineattribute': { 'Meta': {'object_name': 'LineAttribute'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'line': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attributes'", 'to': "orm['order.Line']"}), 'option': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_attributes'", 'null': 'True', 'to': "orm['catalogue.Option']"}), 'type': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'value': ('django.db.models.fields.CharField', [], {'max_length': '255'}) }, 'order.lineprice': { 'Meta': {'object_name': 'LinePrice'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'line': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'prices'", 'to': "orm['order.Line']"}), 'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_prices'", 'to': "orm['order.Order']"}), 'price_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}), 'price_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}), 'quantity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}), 'shipping_excl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}), 'shipping_incl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}) }, 'order.order': { 'Meta': {'ordering': "['-date_placed']", 'object_name': 'Order'}, 'basket_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}), 'billing_address': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.BillingAddress']", 'null': 'True', 'blank': 'True'}), 'date_placed': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'number': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}), 'shipping_address': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.ShippingAddress']", 'null': 'True', 'blank': 'True'}), 'shipping_excl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}), 'shipping_incl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}), 'shipping_method': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}), 'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"}), 'status': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), 'total_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}), 'total_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'orders'", 'null': 'True', 'to': "orm['auth.User']"}) }, 'order.orderdiscount': { 'Meta': {'object_name': 'OrderDiscount'}, 'amount': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'offer_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}), 'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'discounts'", 'to': "orm['order.Order']"}), 'voucher_code': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}), 'voucher_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}) }, 'order.ordernote': { 'Meta': {'object_name': 'OrderNote'}, 'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'message': ('django.db.models.fields.TextField', [], {}), 'note_type': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}), 'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'notes'", 'to': "orm['order.Order']"}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}) }, 'order.paymentevent': { 'Meta': {'object_name': 'PaymentEvent'}, 'amount': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}), 'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'event_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.PaymentEventType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'lines': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['order.Line']", 'through': "orm['order.PaymentEventQuantity']", 'symmetrical': 'False'}), 'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'payment_events'", 'to': "orm['order.Order']"}) }, 'order.paymenteventquantity': { 'Meta': {'object_name': 'PaymentEventQuantity'}, 'event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_quantities'", 'to': "orm['order.PaymentEvent']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'line': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.Line']"}), 'quantity': ('django.db.models.fields.PositiveIntegerField', [], {}) }, 'order.paymenteventtype': { 'Meta': {'ordering': "('sequence_number',)", 'object_name': 'PaymentEventType'}, 'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128', 'db_index': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}), 'sequence_number': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}) }, 'order.shippingaddress': { 'Meta': {'object_name': 'ShippingAddress'}, 'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['address.Country']"}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), 'line1': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'line2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'line3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'line4': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'phone_number': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}), 'postcode': ('django.db.models.fields.CharField', [], {'max_length': '64'}), 'search_text': ('django.db.models.fields.CharField', [], {'max_length': '1000'}), 'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}) }, 'order.shippingevent': { 'Meta': {'ordering': "['-date']", 'object_name': 'ShippingEvent'}, 'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'event_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.ShippingEventType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'lines': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['order.Line']", 'through': "orm['order.ShippingEventQuantity']", 'symmetrical': 'False'}), 'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'shipping_events'", 'to': "orm['order.Order']"}) }, 'order.shippingeventquantity': { 'Meta': {'object_name': 'ShippingEventQuantity'}, 'event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_quantities'", 'to': "orm['order.ShippingEvent']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'line': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.Line']"}), 'quantity': ('django.db.models.fields.PositiveIntegerField', [], {}) }, 'order.shippingeventtype': { 'Meta': {'ordering': "('sequence_number',)", 'object_name': 'ShippingEventType'}, 'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128', 'db_index': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_required': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), 'sequence_number': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}) }, 'partner.partner': { 'Meta': {'object_name': 'Partner'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}), 'users': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'partners'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['auth.User']"}) }, 'sites.site': { 'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"}, 'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) } } complete_apps = ['order']
bsd-3-clause
bood/htc-magic-kernel
tools/perf/scripts/python/syscall-counts.py
944
1429
# system call counts # (c) 2010, Tom Zanussi <[email protected]> # Licensed under the terms of the GNU GPL License version 2 # # Displays system-wide system call totals, broken down by syscall. # If a [comm] arg is specified, only syscalls called by [comm] are displayed. import os import sys sys.path.append(os.environ['PERF_EXEC_PATH'] + \ '/scripts/python/Perf-Trace-Util/lib/Perf/Trace') from perf_trace_context import * from Core import * usage = "perf trace -s syscall-counts.py [comm]\n"; for_comm = None if len(sys.argv) > 2: sys.exit(usage) if len(sys.argv) > 1: for_comm = sys.argv[1] syscalls = autodict() def trace_begin(): pass def trace_end(): print_syscall_totals() def raw_syscalls__sys_enter(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, id, args): if for_comm is not None: if common_comm != for_comm: return try: syscalls[id] += 1 except TypeError: syscalls[id] = 1 def print_syscall_totals(): if for_comm is not None: print "\nsyscall events for %s:\n\n" % (for_comm), else: print "\nsyscall events:\n\n", print "%-40s %10s\n" % ("event", "count"), print "%-40s %10s\n" % ("----------------------------------------", \ "-----------"), for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \ reverse = True): print "%-40d %10d\n" % (id, val),
gpl-2.0
wangdkchina/vnpy
vn.strategy/strategyEngine.py
46
23417
# encoding: UTF-8 from datetime import datetime from pymongo import Connection from pymongo.errors import * from eventEngine import * # 常量定义 OFFSET_OPEN = '0' # 开仓 OFFSET_CLOSE = '1' # 平仓 DIRECTION_BUY = '0' # 买入 DIRECTION_SELL = '1' # 卖出 PRICETYPE_LIMIT = '2' # 限价 ######################################################################## class Tick: """Tick数据对象""" #---------------------------------------------------------------------- def __init__(self, symbol): """Constructor""" self.symbol = symbol # 合约代码 self.openPrice = 0 # OHLC self.highPrice = 0 self.lowPrice = 0 self.lastPrice = 0 self.volume = 0 # 成交量 self.openInterest = 0 # 持仓量 self.upperLimit = 0 # 涨停价 self.lowerLimit = 0 # 跌停价 self.time = '' # 更新时间和毫秒 self.ms= 0 self.bidPrice1 = 0 # 深度行情 self.bidPrice2 = 0 self.bidPrice3 = 0 self.bidPrice4 = 0 self.bidPrice5 = 0 self.askPrice1 = 0 self.askPrice2 = 0 self.askPrice3 = 0 self.askPrice4 = 0 self.askPrice5 = 0 self.bidVolume1 = 0 self.bidVolume2 = 0 self.bidVolume3 = 0 self.bidVolume4 = 0 self.bidVolume5 = 0 self.askVolume1 = 0 self.askVolume2 = 0 self.askVolume3 = 0 self.askVolume4 = 0 self.askVolume5 = 0 ######################################################################## class Trade: """成交数据对象""" #---------------------------------------------------------------------- def __init__(self, symbol): """Constructor""" self.symbol = symbol # 合约代码 self.orderRef = '' # 报单号 self.tradeID = '' # 成交编号 self.direction = None # 方向 self.offset = None # 开平 self.price = 0 # 成交价 self.volume = 0 # 成交量 ######################################################################## class Order: """报单数据对象""" #---------------------------------------------------------------------- def __init__(self, symbol): """Constructor""" self.symbol = symbol # 合约代码 self.orderRef = '' # 报单编号 self.direction = None # 方向 self.offset = None # 开平 self.price = 0 # 委托价 self.volumeOriginal = 0 # 报单量 self.volumeTraded = 0 # 已成交数量 self.insertTime = '' # 报单时间 self.cancelTime = '' # 撤单时间 self.frontID = 0 # 前置机编号 self.sessionID = 0 # 会话编号 self.status = '' # 报单状态代码 ######################################################################## class StopOrder: """ 停止单对象 用于实现价格突破某一水平后自动追入 即通常的条件单和止损单 """ #---------------------------------------------------------------------- def __init__(self, symbol, direction, offset, price, volume, strategy): """Constructor""" self.symbol = symbol self.direction = direction self.offset = offset self.price = price self.volume = volume self.strategy = strategy ######################################################################## class StrategyEngine(object): """策略引擎""" #---------------------------------------------------------------------- def __init__(self, eventEngine, mainEngine): """Constructor""" self.__eventEngine = eventEngine self.mainEngine = mainEngine # 获取代表今日的datetime t = datetime.today() self.today = t.replace(hour=0, minute=0, second=0, microsecond=0) # 保存所有报单数据的字典 self.__dictOrder = {} # 保存策略对象的字典 # key为策略名称 # value为策略对象 self.dictStrategy = {} # 保存合约代码和策略对象映射关系的字典 # key为合约代码 # value为交易该合约的策略列表 self.__dictSymbolStrategy = {} # 保存报单编号和策略对象映射关系的字典 # key为报单编号 # value为策略对象 self.__dictOrderRefStrategy = {} # 保存合约代码和相关停止单的字典 # key为合约代码 # value为该合约相关的停止单列表 self.__dictStopOrder = {} # MongoDB数据库相关 self.__mongoConnected = False self.__mongoConnection = None self.__mongoTickDB = None # 调用函数 self.__connectMongo() self.__registerEvent() #---------------------------------------------------------------------- def createStrategy(self, strategyName, strategySymbol, strategyClass, strategySetting): """创建策略""" strategy = strategyClass(strategyName, strategySymbol, self) self.dictStrategy[strategyName] = strategy strategy.loadSetting(strategySetting) # 订阅合约行情,注意这里因为是CTP,所以ExchangeID可以忽略 self.mainEngine.subscribe(strategySymbol, None) # 注册策略监听 self.registerStrategy(strategySymbol, strategy) #---------------------------------------------------------------------- def __connectMongo(self): """连接MongoDB数据库""" try: self.__mongoConnection = Connection() self.__mongoConnected = True self.__mongoTickDB = self.__mongoConnection['TickDB'] self.writeLog(u'策略引擎连接MongoDB成功') except ConnectionFailure: self.writeLog(u'策略引擎连接MongoDB失败') #---------------------------------------------------------------------- def __recordTick(self, data): """将Tick数据插入到MongoDB中""" if self.__mongoConnected: symbol = data['InstrumentID'] data['date'] = self.today self.__mongoTickDB[symbol].insert(data) #---------------------------------------------------------------------- def loadTick(self, symbol, dt): """从MongoDB中读取Tick数据""" if self.__mongoConnected: collection = self.__mongoTickDB[symbol] cx = collection.find({'date':{'$gte':dt}}) return cx else: return None #---------------------------------------------------------------------- def __updateMarketData(self, event): """行情更新""" data = event.dict_['data'] symbol = data['InstrumentID'] # 检查是否存在交易该合约的策略 if symbol in self.__dictSymbolStrategy: # 创建TICK数据对象并更新数据 tick = Tick(symbol) tick.openPrice = data['OpenPrice'] tick.highPrice = data['HighestPrice'] tick.lowPrice = data['LowestPrice'] tick.lastPrice = data['LastPrice'] tick.volume = data['Volume'] tick.openInterest = data['OpenInterest'] tick.upperLimit = data['UpperLimitPrice'] tick.lowerLimit = data['LowerLimitPrice'] tick.time = data['UpdateTime'] tick.ms = data['UpdateMillisec'] tick.bidPrice1 = data['BidPrice1'] tick.bidPrice2 = data['BidPrice2'] tick.bidPrice3 = data['BidPrice3'] tick.bidPrice4 = data['BidPrice4'] tick.bidPrice5 = data['BidPrice5'] tick.askPrice1 = data['AskPrice1'] tick.askPrice2 = data['AskPrice2'] tick.askPrice3 = data['AskPrice3'] tick.askPrice4 = data['AskPrice4'] tick.askPrice5 = data['AskPrice5'] tick.bidVolume1 = data['BidVolume1'] tick.bidVolume2 = data['BidVolume2'] tick.bidVolume3 = data['BidVolume3'] tick.bidVolume4 = data['BidVolume4'] tick.bidVolume5 = data['BidVolume5'] tick.askVolume1 = data['AskVolume1'] tick.askVolume2 = data['AskVolume2'] tick.askVolume3 = data['AskVolume3'] tick.askVolume4 = data['AskVolume4'] tick.askVolume5 = data['AskVolume5'] # 首先检查停止单是否需要发出 self.__processStopOrder(tick) # 将该TICK数据推送给每个策略 for strategy in self.__dictSymbolStrategy[symbol]: strategy.onTick(tick) # 将数据插入MongoDB数据库,实盘建议另开程序记录TICK数据 self.__recordTick(data) #---------------------------------------------------------------------- def __processStopOrder(self, tick): """处理停止单""" symbol = tick.symbol lastPrice = tick.lastPrice upperLimit = tick.upperLimit lowerLimit = tick.lowerLimit # 如果当前有该合约上的止损单 if symbol in self.__dictStopOrder: # 获取止损单列表 listSO = self.__dictStopOrder[symbol] # SO:stop order # 准备一个空的已发止损单列表 listSent = [] for so in listSO: # 如果是买入停止单,且最新成交价大于停止触发价 if so.direction == DIRECTION_BUY and lastPrice >= so.price: # 以当日涨停价发出限价单买入 ref = self.sendOrder(symbol, DIRECTION_BUY, so.offset, upperLimit, so.volume, strategy) # 触发策略的止损单发出更新 so.strategy.onStopOrder(ref) # 将该止损单对象保存到已发送列表中 listSent.append(so) # 如果是卖出停止单,且最新成交价小于停止触发价 elif so.direction == DIRECTION_SELL and lastPrice <= so.price: ref = self.sendOrder(symbol, DIRECTION_SELL, so.offset, lowerLimit, so.volume, strategy) so.strategy.onStopOrder(ref) listSent.append(so) # 从停止单列表中移除已经发单的停止单对象 if listSent: for so in listSent: listSO.remove(so) # 检查停止单列表是否为空,若为空,则从停止单字典中移除该合约代码 if not listSO: del self.__dictStopOrder[symbol] #---------------------------------------------------------------------- def __updateOrder(self, event): """报单更新""" data = event.dict_['data'] orderRef = data['OrderRef'] # 检查是否存在监听该报单的策略 if orderRef in self.__dictOrderRefStrategy: # 创建Order数据对象 order = Order(data['InstrumentID']) order.orderRef = data['OrderRef'] order.direction = data['Direction'] order.offset = data['CombOffsetFlag'] order.price = data['LimitPrice'] order.volumeOriginal = data['VolumeTotalOriginal'] order.volumeTraded = data['VolumeTraded'] order.insertTime = data['InsertTime'] order.cancelTime = data['CancelTime'] order.frontID = data['FrontID'] order.sessionID = data['SessionID'] order.status = data['OrderStatus'] # 推送给策略 strategy = self.__dictOrderRefStrategy[orderRef] strategy.onOrder(order) # 记录该Order的数据 self.__dictOrder[orderRef] = data #---------------------------------------------------------------------- def __updateTrade(self, event): """成交更新""" print 'updateTrade' data = event.dict_['data'] orderRef = data['OrderRef'] print 'trade:', orderRef if orderRef in self.__dictOrderRefStrategy: # 创建Trade数据对象 trade = Trade(data['InstrumentID']) trade.orderRef = orderRef trade.tradeID = data['TradeID'] trade.direction = data['Direction'] trade.offset = data['OffsetFlag'] trade.price = data['Price'] trade.volume = data['Volume'] # 推送给策略 strategy = self.__dictOrderRefStrategy[orderRef] strategy.onTrade(trade) #---------------------------------------------------------------------- def sendOrder(self, symbol, direction, offset, price, volume, strategy): """ 发单(仅允许限价单) symbol:合约代码 direction:方向,DIRECTION_BUY/DIRECTION_SELL offset:开平,OFFSET_OPEN/OFFSET_CLOSE price:下单价格 volume:下单手数 strategy:策略对象 """ contract = self.mainEngine.selectInstrument(symbol) if contract: ref = self.mainEngine.sendOrder(symbol, contract['ExchangeID'], price, PRICETYPE_LIMIT, volume, direction, offset) self.__dictOrderRefStrategy[ref] = strategy print 'ref:', ref print 'strategy:', strategy.name return ref #---------------------------------------------------------------------- def cancelOrder(self, orderRef): """ 撤单 """ order = self.__dictOrder[orderRef] symbol = order['InstrumentID'] contract = self.mainEngine.selectInstrument(symbol) if contract: self.mainEngine.cancelOrder(symbol, contract['ExchangeID'], orderRef, order['FrontID'], order['SessionID']) #---------------------------------------------------------------------- def __registerEvent(self): """注册事件监听""" self.__eventEngine.register(EVENT_MARKETDATA, self.__updateMarketData) self.__eventEngine.register(EVENT_ORDER, self.__updateOrder) self.__eventEngine.register(EVENT_TRADE ,self.__updateTrade) #---------------------------------------------------------------------- def writeLog(self, log): """写日志""" event = Event(type_=EVENT_LOG) event.dict_['log'] = log self.__eventEngine.put(event) #---------------------------------------------------------------------- def registerStrategy(self, symbol, strategy): """注册策略对合约TICK数据的监听""" # 尝试获取监听该合约代码的策略的列表,若无则创建 try: listStrategy = self.__dictSymbolStrategy[symbol] except KeyError: listStrategy = [] self.__dictSymbolStrategy[symbol] = listStrategy # 防止重复注册 if strategy not in listStrategy: listStrategy.append(strategy) #---------------------------------------------------------------------- def placeStopOrder(self, symbol, direction, offset, price, volume, strategy): """ 下停止单(运行于本地引擎中) 注意这里的price是停止单的触发价 """ # 创建止损单对象 so = StopOrder(symbol, direction, offset, price, volume, strategy) # 获取该合约相关的止损单列表 try: listSO = self.__dictStopOrder[symbol] except KeyError: listSO = [] self.__dictStopOrder[symbol] = listSO # 将该止损单插入列表中 listSO.append(so) return so #---------------------------------------------------------------------- def cancelStopOrder(self, so): """撤销停止单""" symbol = so.symbol try: listSO = self.__dictStopOrder[symbol] if so in listSO: listSO.remove(so) if not listSO: del self.__dictStopOrder[symbol] except KeyError: pass #---------------------------------------------------------------------- def startAll(self): """启动所有策略""" for strategy in self.dictStrategy.values(): strategy.start() #---------------------------------------------------------------------- def stopAll(self): """停止所有策略""" for strategy in self.dictStrategy.values(): strategy.stop() ######################################################################## class StrategyTemplate(object): """策略模板""" #---------------------------------------------------------------------- def __init__(self, name, symbol, engine): """Constructor""" self.name = name # 策略名称(注意唯一性) self.symbol = symbol # 策略交易的合约 self.engine = engine # 策略引擎对象 self.trading = False # 策略是否启动交易 #---------------------------------------------------------------------- def onTick(self, tick): """行情更新""" raise NotImplementedError #---------------------------------------------------------------------- def onTrade(self, trade): """交易更新""" raise NotImplementedError #---------------------------------------------------------------------- def onOrder(self, order): """报单更新""" raise NotImplementedError #---------------------------------------------------------------------- def onStopOrder(self, orderRef): """停止单更新""" raise NotImplementedError #---------------------------------------------------------------------- def onBar(self, o, h, l, c, volume, time): """K线数据更新""" raise NotImplementedError #---------------------------------------------------------------------- def start(self): """ 启动交易 这里是最简单的改变self.trading 有需要可以重新实现更复杂的操作 """ self.trading = True self.engine.writeLog(self.name + u'开始运行') #---------------------------------------------------------------------- def stop(self): """ 停止交易 同上 """ self.trading = False self.engine.writeLog(self.name + u'停止运行') #---------------------------------------------------------------------- def loadSetting(self, setting): """ 载入设置 setting通常是一个包含了参数设置的字典 """ raise NotImplementedError #---------------------------------------------------------------------- def buy(self, price, volume, stopOrder=False): """买入开仓""" if self.trading: if stopOrder: so = self.engine.placeStopOrder(self.symbol, DIRECTION_BUY, OFFSET_OPEN, price, volume, self) return so else: ref = self.engine.sendOrder(self.symbol, DIRECTION_BUY, OFFSET_OPEN, price, volume, self) return ref else: return None #---------------------------------------------------------------------- def cover(self, price, volume, StopOrder=False): """买入平仓""" if self.trading: if stopOrder: so = self.engine.placeStopOrder(self.symbol, DIRECTION_BUY, OFFSET_CLOSE, price, volume, self) return so else: ref = self.engine.sendOrder(self.symbol, DIRECTION_BUY, OFFSET_CLOSE, price, volume, self) return ref else: return None #---------------------------------------------------------------------- def sell(self, price, volume, stopOrder=False): """卖出平仓""" if self.trading: if stopOrder: so = self.engine.placeStopOrder(self.symbol, DIRECTION_SELL, OFFSET_CLOSE, price, volume, self) return so else: ref = self.engine.sendOrder(self.symbol, DIRECTION_SELL, OFFSET_CLOSE, price, volume, self) return ref else: return None #---------------------------------------------------------------------- def short(self, price, volume, stopOrder=False): """卖出开仓""" if self.trading: if stopOrder: so = self.engine.placeStopOrder(self.symbol, DIRECTION_SELL, OFFSET_OPEN, price, volume, self) return so else: ref = self.engine.sendOrder(self.symbol, DIRECTION_SELL, OFFSET_OPEN, price, volume, self) return ref else: return None #---------------------------------------------------------------------- def cancelOrder(self, orderRef): """撤单""" self.engine.cancelOrder(orderRef) #---------------------------------------------------------------------- def cancelStopOrder(self, so): """撤销停止单""" self.engine.cancelStopOrder(so)
mit
Kazade/NeHe-Website
google_appengine/lib/django-1.3/tests/regressiontests/middleware_exceptions/tests.py
48
39506
import sys from django.conf import settings from django.core.signals import got_request_exception from django.http import HttpResponse from django.template.response import TemplateResponse from django.template import Template from django.test import TestCase class TestException(Exception): pass # A middleware base class that tracks which methods have been called class TestMiddleware(object): def __init__(self): self.process_request_called = False self.process_view_called = False self.process_response_called = False self.process_template_response_called = False self.process_exception_called = False def process_request(self, request): self.process_request_called = True def process_view(self, request, view_func, view_args, view_kwargs): self.process_view_called = True def process_template_response(self, request, response): self.process_template_response_called = True return response def process_response(self, request, response): self.process_response_called = True return response def process_exception(self, request, exception): self.process_exception_called = True # Middleware examples that do the right thing class RequestMiddleware(TestMiddleware): def process_request(self, request): super(RequestMiddleware, self).process_request(request) return HttpResponse('Request Middleware') class ViewMiddleware(TestMiddleware): def process_view(self, request, view_func, view_args, view_kwargs): super(ViewMiddleware, self).process_view(request, view_func, view_args, view_kwargs) return HttpResponse('View Middleware') class ResponseMiddleware(TestMiddleware): def process_response(self, request, response): super(ResponseMiddleware, self).process_response(request, response) return HttpResponse('Response Middleware') class TemplateResponseMiddleware(TestMiddleware): def process_template_response(self, request, response): super(TemplateResponseMiddleware, self).process_template_response(request, response) return TemplateResponse(request, Template('Template Response Middleware')) class ExceptionMiddleware(TestMiddleware): def process_exception(self, request, exception): super(ExceptionMiddleware, self).process_exception(request, exception) return HttpResponse('Exception Middleware') # Sample middlewares that raise exceptions class BadRequestMiddleware(TestMiddleware): def process_request(self, request): super(BadRequestMiddleware, self).process_request(request) raise TestException('Test Request Exception') class BadViewMiddleware(TestMiddleware): def process_view(self, request, view_func, view_args, view_kwargs): super(BadViewMiddleware, self).process_view(request, view_func, view_args, view_kwargs) raise TestException('Test View Exception') class BadTemplateResponseMiddleware(TestMiddleware): def process_template_response(self, request, response): super(BadTemplateResponseMiddleware, self).process_template_response(request, response) raise TestException('Test Template Response Exception') class BadResponseMiddleware(TestMiddleware): def process_response(self, request, response): super(BadResponseMiddleware, self).process_response(request, response) raise TestException('Test Response Exception') class BadExceptionMiddleware(TestMiddleware): def process_exception(self, request, exception): super(BadExceptionMiddleware, self).process_exception(request, exception) raise TestException('Test Exception Exception') class BaseMiddlewareExceptionTest(TestCase): def setUp(self): self.exceptions = [] got_request_exception.connect(self._on_request_exception) self.client.handler.load_middleware() def tearDown(self): got_request_exception.disconnect(self._on_request_exception) self.exceptions = [] def _on_request_exception(self, sender, request, **kwargs): self.exceptions.append(sys.exc_info()) def _add_middleware(self, middleware): self.client.handler._request_middleware.insert(0, middleware.process_request) self.client.handler._view_middleware.insert(0, middleware.process_view) self.client.handler._template_response_middleware.append(middleware.process_template_response) self.client.handler._response_middleware.append(middleware.process_response) self.client.handler._exception_middleware.append(middleware.process_exception) def assert_exceptions_handled(self, url, errors, extra_error=None): try: response = self.client.get(url) except TestException, e: # Test client intentionally re-raises any exceptions being raised # during request handling. Hence actual testing that exception was # properly handled is done by relying on got_request_exception # signal being sent. pass except Exception, e: if type(extra_error) != type(e): self.fail("Unexpected exception: %s" % e) self.assertEqual(len(self.exceptions), len(errors)) for i, error in enumerate(errors): exception, value, tb = self.exceptions[i] self.assertEqual(value.args, (error, )) def assert_middleware_usage(self, middleware, request, view, template_response, response, exception): self.assertEqual(middleware.process_request_called, request) self.assertEqual(middleware.process_view_called, view) self.assertEqual(middleware.process_template_response_called, template_response) self.assertEqual(middleware.process_response_called, response) self.assertEqual(middleware.process_exception_called, exception) class MiddlewareTests(BaseMiddlewareExceptionTest): def test_process_request_middleware(self): pre_middleware = TestMiddleware() middleware = RequestMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/view/', []) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, False, False, True, False) self.assert_middleware_usage(middleware, True, False, False, True, False) self.assert_middleware_usage(post_middleware, False, False, False, True, False) def test_process_view_middleware(self): pre_middleware = TestMiddleware() middleware = ViewMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/view/', []) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, True, False, True, False) self.assert_middleware_usage(middleware, True, True, False, True, False) self.assert_middleware_usage(post_middleware, True, False, False, True, False) def test_process_response_middleware(self): pre_middleware = TestMiddleware() middleware = ResponseMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/view/', []) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, True, False, True, False) self.assert_middleware_usage(middleware, True, True, False, True, False) self.assert_middleware_usage(post_middleware, True, True, False, True, False) def test_process_template_response_middleware(self): pre_middleware = TestMiddleware() middleware = TemplateResponseMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/template_response/', []) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, True, True, True, False) self.assert_middleware_usage(middleware, True, True, True, True, False) self.assert_middleware_usage(post_middleware, True, True, True, True, False) def test_process_exception_middleware(self): pre_middleware = TestMiddleware() middleware = ExceptionMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/view/', []) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, True, False, True, False) self.assert_middleware_usage(middleware, True, True, False, True, False) self.assert_middleware_usage(post_middleware, True, True, False, True, False) def test_process_request_middleware_not_found(self): pre_middleware = TestMiddleware() middleware = RequestMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/not_found/', []) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, False, False, True, False) self.assert_middleware_usage(middleware, True, False, False, True, False) self.assert_middleware_usage(post_middleware, False, False, False, True, False) def test_process_view_middleware_not_found(self): pre_middleware = TestMiddleware() middleware = ViewMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/not_found/', []) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, True, False, True, False) self.assert_middleware_usage(middleware, True, True, False, True, False) self.assert_middleware_usage(post_middleware, True, False, False, True, False) def test_process_template_response_middleware_not_found(self): pre_middleware = TestMiddleware() middleware = TemplateResponseMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/not_found/', []) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, True, False, True, True) self.assert_middleware_usage(middleware, True, True, False, True, True) self.assert_middleware_usage(post_middleware, True, True, False, True, True) def test_process_response_middleware_not_found(self): pre_middleware = TestMiddleware() middleware = ResponseMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/not_found/', []) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, True, False, True, True) self.assert_middleware_usage(middleware, True, True, False, True, True) self.assert_middleware_usage(post_middleware, True, True, False, True, True) def test_process_exception_middleware_not_found(self): pre_middleware = TestMiddleware() middleware = ExceptionMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/not_found/', []) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, True, False, True, False) self.assert_middleware_usage(middleware, True, True, False, True, True) self.assert_middleware_usage(post_middleware, True, True, False, True, True) def test_process_request_middleware_exception(self): pre_middleware = TestMiddleware() middleware = RequestMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/error/', []) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, False, False, True, False) self.assert_middleware_usage(middleware, True, False, False, True, False) self.assert_middleware_usage(post_middleware, False, False, False, True, False) def test_process_view_middleware_exception(self): pre_middleware = TestMiddleware() middleware = ViewMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/error/', []) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, True, False, True, False) self.assert_middleware_usage(middleware, True, True, False, True, False) self.assert_middleware_usage(post_middleware, True, False, False, True, False) def test_process_response_middleware_exception(self): pre_middleware = TestMiddleware() middleware = ResponseMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/error/', ['Error in view'], Exception()) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, True, False, True, True) self.assert_middleware_usage(middleware, True, True, False, True, True) self.assert_middleware_usage(post_middleware, True, True, False, True, True) def test_process_exception_middleware_exception(self): pre_middleware = TestMiddleware() middleware = ExceptionMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/error/', []) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, True, False, True, False) self.assert_middleware_usage(middleware, True, True, False, True, True) self.assert_middleware_usage(post_middleware, True, True, False, True, True) def test_process_request_middleware_null_view(self): pre_middleware = TestMiddleware() middleware = RequestMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/null_view/', []) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, False, False, True, False) self.assert_middleware_usage(middleware, True, False, False, True, False) self.assert_middleware_usage(post_middleware, False, False, False, True, False) def test_process_view_middleware_null_view(self): pre_middleware = TestMiddleware() middleware = ViewMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/null_view/', []) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, True, False, True, False) self.assert_middleware_usage(middleware, True, True, False, True, False) self.assert_middleware_usage(post_middleware, True, False, False, True, False) def test_process_response_middleware_null_view(self): pre_middleware = TestMiddleware() middleware = ResponseMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/null_view/', [ "The view regressiontests.middleware_exceptions.views.null_view didn't return an HttpResponse object.", ], ValueError()) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, True, False, True, False) self.assert_middleware_usage(middleware, True, True, False, True, False) self.assert_middleware_usage(post_middleware, True, True, False, True, False) def test_process_exception_middleware_null_view(self): pre_middleware = TestMiddleware() middleware = ExceptionMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/null_view/', [ "The view regressiontests.middleware_exceptions.views.null_view didn't return an HttpResponse object." ], ValueError()) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, True, False, True, False) self.assert_middleware_usage(middleware, True, True, False, True, False) self.assert_middleware_usage(post_middleware, True, True, False, True, False) def test_process_request_middleware_permission_denied(self): pre_middleware = TestMiddleware() middleware = RequestMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', []) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, False, False, True, False) self.assert_middleware_usage(middleware, True, False, False, True, False) self.assert_middleware_usage(post_middleware, False, False, False, True, False) def test_process_view_middleware_permission_denied(self): pre_middleware = TestMiddleware() middleware = ViewMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', []) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, True, False, True, False) self.assert_middleware_usage(middleware, True, True, False, True, False) self.assert_middleware_usage(post_middleware, True, False, False, True, False) def test_process_response_middleware_permission_denied(self): pre_middleware = TestMiddleware() middleware = ResponseMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', []) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, True, False, True, True) self.assert_middleware_usage(middleware, True, True, False, True, True) self.assert_middleware_usage(post_middleware, True, True, False, True, True) def test_process_exception_middleware_permission_denied(self): pre_middleware = TestMiddleware() middleware = ExceptionMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', []) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, True, False, True, False) self.assert_middleware_usage(middleware, True, True, False, True, True) self.assert_middleware_usage(post_middleware, True, True, False, True, True) def test_process_template_response_error(self): middleware = TestMiddleware() self._add_middleware(middleware) self.assert_exceptions_handled('/middleware_exceptions/template_response_error/', []) # Check that the right middleware methods have been invoked self.assert_middleware_usage(middleware, True, True, True, True, False) class BadMiddlewareTests(BaseMiddlewareExceptionTest): def test_process_request_bad_middleware(self): pre_middleware = TestMiddleware() bad_middleware = BadRequestMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(bad_middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/view/', ['Test Request Exception']) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, False, False, True, False) self.assert_middleware_usage(bad_middleware, True, False, False, True, False) self.assert_middleware_usage(post_middleware, False, False, False, True, False) def test_process_view_bad_middleware(self): pre_middleware = TestMiddleware() bad_middleware = BadViewMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(bad_middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/view/', ['Test View Exception']) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, True, False, True, False) self.assert_middleware_usage(bad_middleware, True, True, False, True, False) self.assert_middleware_usage(post_middleware, True, False, False, True, False) def test_process_template_response_bad_middleware(self): pre_middleware = TestMiddleware() bad_middleware = BadTemplateResponseMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(bad_middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/template_response/', ['Test Template Response Exception']) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, True, False, True, False) self.assert_middleware_usage(bad_middleware, True, True, True, True, False) self.assert_middleware_usage(post_middleware, True, True, True, True, False) def test_process_response_bad_middleware(self): pre_middleware = TestMiddleware() bad_middleware = BadResponseMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(bad_middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/view/', ['Test Response Exception']) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, True, False, False, False) self.assert_middleware_usage(bad_middleware, True, True, False, True, False) self.assert_middleware_usage(post_middleware, True, True, False, True, False) def test_process_exception_bad_middleware(self): pre_middleware = TestMiddleware() bad_middleware = BadExceptionMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(bad_middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/view/', []) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, True, False, True, False) self.assert_middleware_usage(bad_middleware, True, True, False, True, False) self.assert_middleware_usage(post_middleware, True, True, False, True, False) def test_process_request_bad_middleware_not_found(self): pre_middleware = TestMiddleware() bad_middleware = BadRequestMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(bad_middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/not_found/', ['Test Request Exception']) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, False, False, True, False) self.assert_middleware_usage(bad_middleware, True, False, False, True, False) self.assert_middleware_usage(post_middleware, False, False, False, True, False) def test_process_view_bad_middleware_not_found(self): pre_middleware = TestMiddleware() bad_middleware = BadViewMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(bad_middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/not_found/', ['Test View Exception']) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, True, False, True, False) self.assert_middleware_usage(bad_middleware, True, True, False, True, False) self.assert_middleware_usage(post_middleware, True, False, False, True, False) def test_process_response_bad_middleware_not_found(self): pre_middleware = TestMiddleware() bad_middleware = BadResponseMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(bad_middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/not_found/', ['Test Response Exception']) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, True, False, False, True) self.assert_middleware_usage(bad_middleware, True, True, False, True, True) self.assert_middleware_usage(post_middleware, True, True, False, True, True) def test_process_exception_bad_middleware_not_found(self): pre_middleware = TestMiddleware() bad_middleware = BadExceptionMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(bad_middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/not_found/', ['Test Exception Exception']) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, True, False, True, False) self.assert_middleware_usage(bad_middleware, True, True, False, True, True) self.assert_middleware_usage(post_middleware, True, True, False, True, True) def test_process_request_bad_middleware_exception(self): pre_middleware = TestMiddleware() bad_middleware = BadRequestMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(bad_middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/error/', ['Test Request Exception']) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, False, False, True, False) self.assert_middleware_usage(bad_middleware, True, False, False, True, False) self.assert_middleware_usage(post_middleware, False, False, False, True, False) def test_process_view_bad_middleware_exception(self): pre_middleware = TestMiddleware() bad_middleware = BadViewMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(bad_middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/error/', ['Test View Exception']) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, True, False, True, False) self.assert_middleware_usage(bad_middleware, True, True, False, True, False) self.assert_middleware_usage(post_middleware, True, False, False, True, False) def test_process_response_bad_middleware_exception(self): pre_middleware = TestMiddleware() bad_middleware = BadResponseMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(bad_middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/error/', ['Error in view', 'Test Response Exception']) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, True, False, False, True) self.assert_middleware_usage(bad_middleware, True, True, False, True, True) self.assert_middleware_usage(post_middleware, True, True, False, True, True) def test_process_exception_bad_middleware_exception(self): pre_middleware = TestMiddleware() bad_middleware = BadExceptionMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(bad_middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/error/', ['Test Exception Exception']) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, True, False, True, False) self.assert_middleware_usage(bad_middleware, True, True, False, True, True) self.assert_middleware_usage(post_middleware, True, True, False, True, True) def test_process_request_bad_middleware_null_view(self): pre_middleware = TestMiddleware() bad_middleware = BadRequestMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(bad_middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/null_view/', ['Test Request Exception']) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, False, False, True, False) self.assert_middleware_usage(bad_middleware, True, False, False, True, False) self.assert_middleware_usage(post_middleware, False, False, False, True, False) def test_process_view_bad_middleware_null_view(self): pre_middleware = TestMiddleware() bad_middleware = BadViewMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(bad_middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/null_view/', ['Test View Exception']) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, True, False, True, False) self.assert_middleware_usage(bad_middleware, True, True, False, True, False) self.assert_middleware_usage(post_middleware, True, False, False, True, False) def test_process_response_bad_middleware_null_view(self): pre_middleware = TestMiddleware() bad_middleware = BadResponseMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(bad_middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/null_view/', [ "The view regressiontests.middleware_exceptions.views.null_view didn't return an HttpResponse object.", 'Test Response Exception' ]) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, True, False, False, False) self.assert_middleware_usage(bad_middleware, True, True, False, True, False) self.assert_middleware_usage(post_middleware, True, True, False, True, False) def test_process_exception_bad_middleware_null_view(self): pre_middleware = TestMiddleware() bad_middleware = BadExceptionMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(bad_middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/null_view/', [ "The view regressiontests.middleware_exceptions.views.null_view didn't return an HttpResponse object." ], ValueError()) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, True, False, True, False) self.assert_middleware_usage(bad_middleware, True, True, False, True, False) self.assert_middleware_usage(post_middleware, True, True, False, True, False) def test_process_request_bad_middleware_permission_denied(self): pre_middleware = TestMiddleware() bad_middleware = BadRequestMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(bad_middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', ['Test Request Exception']) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, False, False, True, False) self.assert_middleware_usage(bad_middleware, True, False, False, True, False) self.assert_middleware_usage(post_middleware, False, False, False, True, False) def test_process_view_bad_middleware_permission_denied(self): pre_middleware = TestMiddleware() bad_middleware = BadViewMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(bad_middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', ['Test View Exception']) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, True, False, True, False) self.assert_middleware_usage(bad_middleware, True, True, False, True, False) self.assert_middleware_usage(post_middleware, True, False, False, True, False) def test_process_response_bad_middleware_permission_denied(self): pre_middleware = TestMiddleware() bad_middleware = BadResponseMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(bad_middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', ['Test Response Exception']) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, True, False, False, True) self.assert_middleware_usage(bad_middleware, True, True, False, True, True) self.assert_middleware_usage(post_middleware, True, True, False, True, True) def test_process_exception_bad_middleware_permission_denied(self): pre_middleware = TestMiddleware() bad_middleware = BadExceptionMiddleware() post_middleware = TestMiddleware() self._add_middleware(post_middleware) self._add_middleware(bad_middleware) self._add_middleware(pre_middleware) self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', ['Test Exception Exception']) # Check that the right middleware methods have been invoked self.assert_middleware_usage(pre_middleware, True, True, False, True, False) self.assert_middleware_usage(bad_middleware, True, True, False, True, True) self.assert_middleware_usage(post_middleware, True, True, False, True, True) _missing = object() class RootUrlconfTests(TestCase): def test_missing_root_urlconf(self): try: original_ROOT_URLCONF = settings.ROOT_URLCONF del settings.ROOT_URLCONF except AttributeError: original_ROOT_URLCONF = _missing self.assertRaises(AttributeError, self.client.get, "/middleware_exceptions/view/" ) if original_ROOT_URLCONF is not _missing: settings.ROOT_URLCONF = original_ROOT_URLCONF
bsd-3-clause
nhaney90/developer-support
python/general-python/export-users-csv/accountHelper.py
9
5538
#------------------------------------------------------------------------------- # Name: Account Helper with user and role dictionaries # Purpose: Queries critical information from ArcGIS Online organization which # can be used in other scripts # # Author: Kelly Gerrow [email protected] # # Created: 09/11/2014 # Copyright: (c) Kelly 2014 # Licence: <your licence> #------------------------------------------------------------------------------- import requests import json, time, datetime import string, smtplib, os class agolAdmin(object): #Initializes script reporting on needed values def __init__(self, username, password): self.username = username self.password = password self.__token, self.__ssl= self.__getToken(username, password) if self.__ssl == False: self.__pref='http://' else: self.__pref='https://' self.__urlKey, self.__id, self.__Name, self.__FullName, self.__Email, self.__maxUsers = self.__GetAccount() self.__portalUrl = self.__pref+self.__urlKey self.__userDict = self.__userDictMethod() self.__roleDict = self.__roleDictMethod() #assigns Variables to names @property def token(self): return self.__token @property def portalUrl(self): return self.__portalUrl @property def orgID(self): return self.__id @property def orgName(self): return self.__Name @property def fullName(self): return self.__FullName @property def adminEmail(self): return self.__Email @property def maxUser(self): return self.__maxUsers @property def userDict(self): return self.__userDict @property def roleDict(self): return self.__roleDict #----------------------------------------------------Account Information ----------------------------------------------- #generates token def __getToken(self,adminUser, pw): data = {'username': adminUser, 'password': pw, 'referer' : 'https://www.arcgis.com', 'expiration': '432000', 'f': 'json'} url = 'https://arcgis.com/sharing/rest/generateToken' jres = requests.post(url, data=data, verify=False).json() return jres['token'],jres['ssl'] #generates account information def __GetAccount(self): URL= self.__pref+'www.arcgis.com/sharing/rest/portals/self?f=json&token=' + self.token response = requests.get(URL, verify=False) jres = json.loads(response.text) return jres['urlKey'], jres['id'], jres['name'], jres['user']['fullName'], jres['user']['email'], jres['subscriptionInfo']['maxUsers'] #creates dictionary of role names and corresponding IDs def __roleDictMethod(self): roleVal = {'administrator':'org_admin', 'publisher':'org_publisher', 'user': 'org_user'} start = 1 number = 50 while start != -1: roleUrl= self.__pref+'www.arcgis.com/sharing/rest/portals/self/roles?f=json&start='+str(start)+'&num='+str(number)+'&token=' + self.token response = requests.get(roleUrl, verify = False) jres = json.loads(response.text) for item in jres['roles']: roleVal[str(item['name'])] = str(item['id']) start =jres['nextStart'] return roleVal #creates a dictionary of Usernames and related information def __userDictMethod(self): start = 1 number = 200 #retreive information of all users in organization userDict = [] while start != -1: listURL ='{}.maps.arcgis.com/sharing/rest/portals/self/users'.format(self.portalUrl) request = listURL +"?start="+str(start)+"&num="+str(number)+"&f=json&token="+self.token response = requests.get(request, verify = False) jres = json.loads(response.text) for row in jres['users']: userDict.append(row) start =jres['nextStart'] return userDict #updates username properties depending on the input def updateUser(self,userName,myEsri=None,fullName = None,description=None, access=None,tags=None,email=None, password=None): userURL ='https://{}.maps.arcgis.com/sharing/rest/community/users/{}/update'.format(self.__urlKey, userName) data = {'f':'json','token':self.token} if access: data['access'] = access if fullName : data['fullName']= fullName if description: data['description'] = description if myEsri: data['usertype'] = myEsri if tags: data['tags']= tags if email: data['email'] = email if password: data['password'] = password print data response = requests.post(userURL, data=data, verify=False).json() #Assign a name or ID for a user role def roleAssign(self,roleInput): for key,val in self.roleDict.iteritems(): if key.lower() == roleInput.lower(): return val if val.lower() == roleInput.lower(): return key def myEsriAssign(self, myEsriInput): myEsriVal={'my esri': 'both', 'arcgis online':'arcgisonly'} for key,val in myEsriVal.iteritems(): if key.lower() == myEsriInput.lower(): return val if val.lower() == myEsriInput.lower(): return key
apache-2.0
ryfeus/lambda-packs
Keras_tensorflow_nightly/source2.7/tensorflow/contrib/distributions/python/ops/test_util.py
44
16499
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Utilities for testing distributions and/or bijectors.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.python.framework import ops from tensorflow.python.ops import array_ops from tensorflow.python.ops import histogram_ops from tensorflow.python.ops import linalg_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import variables as variables_ops __all__ = [ "DiscreteScalarDistributionTestHelpers", "VectorDistributionTestHelpers", ] class DiscreteScalarDistributionTestHelpers(object): """DiscreteScalarDistributionTestHelpers.""" def run_test_sample_consistent_log_prob( self, sess_run_fn, dist, num_samples=int(1e5), num_threshold=int(1e3), seed=42, batch_size=None, rtol=1e-2, atol=0.): """Tests that sample/log_prob are consistent with each other. "Consistency" means that `sample` and `log_prob` correspond to the same distribution. Note: this test only verifies a necessary condition for consistency--it does does not verify sufficiency hence does not prove `sample`, `log_prob` truly are consistent. Args: sess_run_fn: Python `callable` taking `list`-like of `Tensor`s and returning a list of results after running one "step" of TensorFlow computation, typically set to `sess.run`. dist: Distribution instance or object which implements `sample`, `log_prob`, `event_shape_tensor` and `batch_shape_tensor`. num_samples: Python `int` scalar indicating the number of Monte-Carlo samples to draw from `dist`. num_threshold: Python `int` scalar indicating the number of samples a bucket must contain before being compared to the probability. Default value: 1e3; must be at least 1. Warning, set too high will cause test to falsely pass but setting too low will cause the test to falsely fail. seed: Python `int` indicating the seed to use when sampling from `dist`. In general it is not recommended to use `None` during a test as this increases the likelihood of spurious test failure. batch_size: Hint for unpacking result of samples. Default: `None` means batch_size is inferred. rtol: Python `float`-type indicating the admissible relative error between analytical and sample statistics. atol: Python `float`-type indicating the admissible absolute error between analytical and sample statistics. Raises: ValueError: if `num_threshold < 1`. """ if num_threshold < 1: raise ValueError("num_threshold({}) must be at least 1.".format( num_threshold)) # Histogram only supports vectors so we call it once per batch coordinate. y = dist.sample(num_samples, seed=seed) y = array_ops.reshape(y, shape=[num_samples, -1]) if batch_size is None: batch_size = math_ops.reduce_prod(dist.batch_shape_tensor()) batch_dims = array_ops.shape(dist.batch_shape_tensor())[0] edges_expanded_shape = 1 + array_ops.pad([-2], paddings=[[0, batch_dims]]) for b, x in enumerate(array_ops.unstack(y, num=batch_size, axis=1)): counts, edges = self.histogram(x) edges = array_ops.reshape(edges, edges_expanded_shape) probs = math_ops.exp(dist.log_prob(edges)) probs = array_ops.reshape(probs, shape=[-1, batch_size])[:, b] [counts_, probs_] = sess_run_fn([counts, probs]) valid = counts_ > num_threshold probs_ = probs_[valid] counts_ = counts_[valid] self.assertAllClose(probs_, counts_ / num_samples, rtol=rtol, atol=atol) def run_test_sample_consistent_mean_variance( self, sess_run_fn, dist, num_samples=int(1e5), seed=24, rtol=1e-2, atol=0.): """Tests that sample/mean/variance are consistent with each other. "Consistency" means that `sample`, `mean`, `variance`, etc all correspond to the same distribution. Args: sess_run_fn: Python `callable` taking `list`-like of `Tensor`s and returning a list of results after running one "step" of TensorFlow computation, typically set to `sess.run`. dist: Distribution instance or object which implements `sample`, `log_prob`, `event_shape_tensor` and `batch_shape_tensor`. num_samples: Python `int` scalar indicating the number of Monte-Carlo samples to draw from `dist`. seed: Python `int` indicating the seed to use when sampling from `dist`. In general it is not recommended to use `None` during a test as this increases the likelihood of spurious test failure. rtol: Python `float`-type indicating the admissible relative error between analytical and sample statistics. atol: Python `float`-type indicating the admissible absolute error between analytical and sample statistics. """ x = math_ops.to_float(dist.sample(num_samples, seed=seed)) sample_mean = math_ops.reduce_mean(x, axis=0) sample_variance = math_ops.reduce_mean( math_ops.square(x - sample_mean), axis=0) sample_stddev = math_ops.sqrt(sample_variance) [ sample_mean_, sample_variance_, sample_stddev_, mean_, variance_, stddev_ ] = sess_run_fn([ sample_mean, sample_variance, sample_stddev, dist.mean(), dist.variance(), dist.stddev(), ]) self.assertAllClose(mean_, sample_mean_, rtol=rtol, atol=atol) self.assertAllClose(variance_, sample_variance_, rtol=rtol, atol=atol) self.assertAllClose(stddev_, sample_stddev_, rtol=rtol, atol=atol) def histogram(self, x, value_range=None, nbins=None, name=None): """Return histogram of values. Given the tensor `values`, this operation returns a rank 1 histogram counting the number of entries in `values` that fell into every bin. The bins are equal width and determined by the arguments `value_range` and `nbins`. Args: x: 1D numeric `Tensor` of items to count. value_range: Shape [2] `Tensor`. `new_values <= value_range[0]` will be mapped to `hist[0]`, `values >= value_range[1]` will be mapped to `hist[-1]`. Must be same dtype as `x`. nbins: Scalar `int32 Tensor`. Number of histogram bins. name: Python `str` name prefixed to Ops created by this class. Returns: counts: 1D `Tensor` of counts, i.e., `counts[i] = sum{ edges[i-1] <= values[j] < edges[i] : j }`. edges: 1D `Tensor` characterizing intervals used for counting. """ with ops.name_scope(name, "histogram", [x]): x = ops.convert_to_tensor(x, name="x") if value_range is None: value_range = [math_ops.reduce_min(x), 1 + math_ops.reduce_max(x)] value_range = ops.convert_to_tensor(value_range, name="value_range") lo = value_range[0] hi = value_range[1] if nbins is None: nbins = math_ops.to_int32(hi - lo) delta = (hi - lo) / math_ops.cast( nbins, dtype=value_range.dtype.base_dtype) edges = math_ops.range( start=lo, limit=hi, delta=delta, dtype=x.dtype.base_dtype) counts = histogram_ops.histogram_fixed_width( x, value_range=value_range, nbins=nbins) return counts, edges class VectorDistributionTestHelpers(object): """VectorDistributionTestHelpers helps test vector-event distributions.""" def run_test_sample_consistent_log_prob( self, sess_run_fn, dist, num_samples=int(1e5), radius=1., center=0., seed=42, rtol=1e-2, atol=0.): """Tests that sample/log_prob are mutually consistent. "Consistency" means that `sample` and `log_prob` correspond to the same distribution. The idea of this test is to compute the Monte-Carlo estimate of the volume enclosed by a hypersphere, i.e., the volume of an `n`-ball. While we could choose an arbitrary function to integrate, the hypersphere's volume is nice because it is intuitive, has an easy analytical expression, and works for `dimensions > 1`. Technical Details: Observe that: ```none int_{R**d} dx [x in Ball(radius=r, center=c)] = E_{p(X)}[ [X in Ball(r, c)] / p(X) ] = lim_{m->infty} m**-1 sum_j^m [x[j] in Ball(r, c)] / p(x[j]), where x[j] ~iid p(X) ``` Thus, for fixed `m`, the above is approximately true when `sample` and `log_prob` are mutually consistent. Furthermore, the above calculation has the analytical result: `pi**(d/2) r**d / Gamma(1 + d/2)`. Note: this test only verifies a necessary condition for consistency--it does does not verify sufficiency hence does not prove `sample`, `log_prob` truly are consistent. For this reason we recommend testing several different hyperspheres (assuming the hypersphere is supported by the distribution). Furthermore, we gain additional trust in this test when also tested `sample` against the first, second moments (`run_test_sample_consistent_mean_covariance`); it is probably unlikely that a "best-effort" implementation of `log_prob` would incorrectly pass both tests and for different hyperspheres. For a discussion on the analytical result (second-line) see: https://en.wikipedia.org/wiki/Volume_of_an_n-ball. For a discussion of importance sampling (fourth-line) see: https://en.wikipedia.org/wiki/Importance_sampling. Args: sess_run_fn: Python `callable` taking `list`-like of `Tensor`s and returning a list of results after running one "step" of TensorFlow computation, typically set to `sess.run`. dist: Distribution instance or object which implements `sample`, `log_prob`, `event_shape_tensor` and `batch_shape_tensor`. The distribution must have non-zero probability of sampling every point enclosed by the hypersphere. num_samples: Python `int` scalar indicating the number of Monte-Carlo samples to draw from `dist`. radius: Python `float`-type indicating the radius of the `n`-ball which we're computing the volume. center: Python floating-type vector (or scalar) indicating the center of the `n`-ball which we're computing the volume. When scalar, the value is broadcast to all event dims. seed: Python `int` indicating the seed to use when sampling from `dist`. In general it is not recommended to use `None` during a test as this increases the likelihood of spurious test failure. rtol: Python `float`-type indicating the admissible relative error between actual- and approximate-volumes. atol: Python `float`-type indicating the admissible absolute error between actual- and approximate-volumes. In general this should be zero since a typical radius implies a non-zero volume. """ def actual_hypersphere_volume(dims, radius): # https://en.wikipedia.org/wiki/Volume_of_an_n-ball # Using tf.lgamma because we'd have to otherwise use SciPy which is not # a required dependency of core. radius = np.asarray(radius) dims = math_ops.cast(dims, dtype=radius.dtype) return math_ops.exp( (dims / 2.) * np.log(np.pi) - math_ops.lgamma(1. + dims / 2.) + dims * math_ops.log(radius)) def is_in_ball(x, radius, center): return math_ops.cast(linalg_ops.norm(x - center, axis=-1) <= radius, dtype=x.dtype) def monte_carlo_hypersphere_volume(dist, num_samples, radius, center): # https://en.wikipedia.org/wiki/Importance_sampling x = dist.sample(num_samples, seed=seed) x = array_ops.identity(x) # Invalidate bijector cacheing. return math_ops.reduce_mean( math_ops.exp(-dist.log_prob(x)) * is_in_ball(x, radius, center), axis=0) # Build graph. with ops.name_scope( "run_test_sample_consistent_log_prob", values=[num_samples, radius, center] + dist._graph_parents): # pylint: disable=protected-access batch_shape = dist.batch_shape_tensor() actual_volume = actual_hypersphere_volume( dims=dist.event_shape_tensor()[0], radius=radius) sample_volume = monte_carlo_hypersphere_volume( dist, num_samples=num_samples, radius=radius, center=center) init_op = variables_ops.global_variables_initializer() # Execute graph. sess_run_fn(init_op) [batch_shape_, actual_volume_, sample_volume_] = sess_run_fn([ batch_shape, actual_volume, sample_volume]) # Check results. self.assertAllClose(np.tile(actual_volume_, reps=batch_shape_), sample_volume_, rtol=rtol, atol=atol) def run_test_sample_consistent_mean_covariance( self, sess_run_fn, dist, num_samples=int(1e5), seed=24, rtol=1e-2, atol=0.1, cov_rtol=None, cov_atol=None): """Tests that sample/mean/covariance are consistent with each other. "Consistency" means that `sample`, `mean`, `covariance`, etc all correspond to the same distribution. Args: sess_run_fn: Python `callable` taking `list`-like of `Tensor`s and returning a list of results after running one "step" of TensorFlow computation, typically set to `sess.run`. dist: Distribution instance or object which implements `sample`, `log_prob`, `event_shape_tensor` and `batch_shape_tensor`. num_samples: Python `int` scalar indicating the number of Monte-Carlo samples to draw from `dist`. seed: Python `int` indicating the seed to use when sampling from `dist`. In general it is not recommended to use `None` during a test as this increases the likelihood of spurious test failure. rtol: Python `float`-type indicating the admissible relative error between analytical and sample statistics. atol: Python `float`-type indicating the admissible absolute error between analytical and sample statistics. cov_rtol: Python `float`-type indicating the admissible relative error between analytical and sample covariance. Default: rtol. cov_atol: Python `float`-type indicating the admissible absolute error between analytical and sample covariance. Default: atol. """ x = dist.sample(num_samples, seed=seed) sample_mean = math_ops.reduce_mean(x, axis=0) sample_covariance = math_ops.reduce_mean( _vec_outer_square(x - sample_mean), axis=0) sample_variance = array_ops.matrix_diag_part(sample_covariance) sample_stddev = math_ops.sqrt(sample_variance) [ sample_mean_, sample_covariance_, sample_variance_, sample_stddev_, mean_, covariance_, variance_, stddev_ ] = sess_run_fn([ sample_mean, sample_covariance, sample_variance, sample_stddev, dist.mean(), dist.covariance(), dist.variance(), dist.stddev(), ]) self.assertAllClose(mean_, sample_mean_, rtol=rtol, atol=atol) self.assertAllClose(covariance_, sample_covariance_, rtol=cov_rtol or rtol, atol=cov_atol or atol) self.assertAllClose(variance_, sample_variance_, rtol=rtol, atol=atol) self.assertAllClose(stddev_, sample_stddev_, rtol=rtol, atol=atol) def _vec_outer_square(x, name=None): """Computes the outer-product of a vector, i.e., x.T x.""" with ops.name_scope(name, "vec_osquare", [x]): return x[..., :, array_ops.newaxis] * x[..., array_ops.newaxis, :]
mit
frank10704/DF_GCS_W
MissionPlanner-master/packages/IronPython.StdLib.2.7.5-beta1/content/Lib/pstats.py
50
27927
"""Class for printing reports on profiled python code.""" # Class for printing reports on profiled python code. rev 1.0 4/1/94 # # Based on prior profile module by Sjoerd Mullender... # which was hacked somewhat by: Guido van Rossum # # see profile.py for more info. # Copyright 1994, by InfoSeek Corporation, all rights reserved. # Written by James Roskind # # Permission to use, copy, modify, and distribute this Python software # and its associated documentation for any purpose (subject to the # restriction in the following sentence) without fee is hereby granted, # provided that the above copyright notice appears in all copies, and # that both that copyright notice and this permission notice appear in # supporting documentation, and that the name of InfoSeek not be used in # advertising or publicity pertaining to distribution of the software # without specific, written prior permission. This permission is # explicitly restricted to the copying and modification of the software # to remain in Python, compiled Python, or other languages (such as C) # wherein the modified or derived code is exclusively imported into a # Python module. # # INFOSEEK CORPORATION DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS # SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND # FITNESS. IN NO EVENT SHALL INFOSEEK CORPORATION BE LIABLE FOR ANY # SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER # RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. import sys import os import time import marshal import re from functools import cmp_to_key __all__ = ["Stats"] class Stats: """This class is used for creating reports from data generated by the Profile class. It is a "friend" of that class, and imports data either by direct access to members of Profile class, or by reading in a dictionary that was emitted (via marshal) from the Profile class. The big change from the previous Profiler (in terms of raw functionality) is that an "add()" method has been provided to combine Stats from several distinct profile runs. Both the constructor and the add() method now take arbitrarily many file names as arguments. All the print methods now take an argument that indicates how many lines to print. If the arg is a floating point number between 0 and 1.0, then it is taken as a decimal percentage of the available lines to be printed (e.g., .1 means print 10% of all available lines). If it is an integer, it is taken to mean the number of lines of data that you wish to have printed. The sort_stats() method now processes some additional options (i.e., in addition to the old -1, 0, 1, or 2). It takes an arbitrary number of quoted strings to select the sort order. For example sort_stats('time', 'name') sorts on the major key of 'internal function time', and on the minor key of 'the name of the function'. Look at the two tables in sort_stats() and get_sort_arg_defs(self) for more examples. All methods return self, so you can string together commands like: Stats('foo', 'goo').strip_dirs().sort_stats('calls').\ print_stats(5).print_callers(5) """ def __init__(self, *args, **kwds): # I can't figure out how to explictly specify a stream keyword arg # with *args: # def __init__(self, *args, stream=sys.stdout): ... # so I use **kwds and sqauwk if something unexpected is passed in. self.stream = sys.stdout if "stream" in kwds: self.stream = kwds["stream"] del kwds["stream"] if kwds: keys = kwds.keys() keys.sort() extras = ", ".join(["%s=%s" % (k, kwds[k]) for k in keys]) raise ValueError, "unrecognized keyword args: %s" % extras if not len(args): arg = None else: arg = args[0] args = args[1:] self.init(arg) self.add(*args) def init(self, arg): self.all_callees = None # calc only if needed self.files = [] self.fcn_list = None self.total_tt = 0 self.total_calls = 0 self.prim_calls = 0 self.max_name_len = 0 self.top_level = {} self.stats = {} self.sort_arg_dict = {} self.load_stats(arg) trouble = 1 try: self.get_top_level_stats() trouble = 0 finally: if trouble: print >> self.stream, "Invalid timing data", if self.files: print >> self.stream, self.files[-1], print >> self.stream def load_stats(self, arg): if not arg: self.stats = {} elif isinstance(arg, basestring): f = open(arg, 'rb') self.stats = marshal.load(f) f.close() try: file_stats = os.stat(arg) arg = time.ctime(file_stats.st_mtime) + " " + arg except: # in case this is not unix pass self.files = [ arg ] elif hasattr(arg, 'create_stats'): arg.create_stats() self.stats = arg.stats arg.stats = {} if not self.stats: raise TypeError, "Cannot create or construct a %r object from '%r''" % ( self.__class__, arg) return def get_top_level_stats(self): for func, (cc, nc, tt, ct, callers) in self.stats.items(): self.total_calls += nc self.prim_calls += cc self.total_tt += tt if ("jprofile", 0, "profiler") in callers: self.top_level[func] = None if len(func_std_string(func)) > self.max_name_len: self.max_name_len = len(func_std_string(func)) def add(self, *arg_list): if not arg_list: return self if len(arg_list) > 1: self.add(*arg_list[1:]) other = arg_list[0] if type(self) != type(other) or self.__class__ != other.__class__: other = Stats(other) self.files += other.files self.total_calls += other.total_calls self.prim_calls += other.prim_calls self.total_tt += other.total_tt for func in other.top_level: self.top_level[func] = None if self.max_name_len < other.max_name_len: self.max_name_len = other.max_name_len self.fcn_list = None for func, stat in other.stats.iteritems(): if func in self.stats: old_func_stat = self.stats[func] else: old_func_stat = (0, 0, 0, 0, {},) self.stats[func] = add_func_stats(old_func_stat, stat) return self def dump_stats(self, filename): """Write the profile data to a file we know how to load back.""" f = file(filename, 'wb') try: marshal.dump(self.stats, f) finally: f.close() # list the tuple indices and directions for sorting, # along with some printable description sort_arg_dict_default = { "calls" : (((1,-1), ), "call count"), "cumulative": (((3,-1), ), "cumulative time"), "file" : (((4, 1), ), "file name"), "line" : (((5, 1), ), "line number"), "module" : (((4, 1), ), "file name"), "name" : (((6, 1), ), "function name"), "nfl" : (((6, 1),(4, 1),(5, 1),), "name/file/line"), "pcalls" : (((0,-1), ), "call count"), "stdname" : (((7, 1), ), "standard name"), "time" : (((2,-1), ), "internal time"), } def get_sort_arg_defs(self): """Expand all abbreviations that are unique.""" if not self.sort_arg_dict: self.sort_arg_dict = dict = {} bad_list = {} for word, tup in self.sort_arg_dict_default.iteritems(): fragment = word while fragment: if not fragment: break if fragment in dict: bad_list[fragment] = 0 break dict[fragment] = tup fragment = fragment[:-1] for word in bad_list: del dict[word] return self.sort_arg_dict def sort_stats(self, *field): if not field: self.fcn_list = 0 return self if len(field) == 1 and isinstance(field[0], (int, long)): # Be compatible with old profiler field = [ {-1: "stdname", 0: "calls", 1: "time", 2: "cumulative"}[field[0]] ] sort_arg_defs = self.get_sort_arg_defs() sort_tuple = () self.sort_type = "" connector = "" for word in field: sort_tuple = sort_tuple + sort_arg_defs[word][0] self.sort_type += connector + sort_arg_defs[word][1] connector = ", " stats_list = [] for func, (cc, nc, tt, ct, callers) in self.stats.iteritems(): stats_list.append((cc, nc, tt, ct) + func + (func_std_string(func), func)) stats_list.sort(key=cmp_to_key(TupleComp(sort_tuple).compare)) self.fcn_list = fcn_list = [] for tuple in stats_list: fcn_list.append(tuple[-1]) return self def reverse_order(self): if self.fcn_list: self.fcn_list.reverse() return self def strip_dirs(self): oldstats = self.stats self.stats = newstats = {} max_name_len = 0 for func, (cc, nc, tt, ct, callers) in oldstats.iteritems(): newfunc = func_strip_path(func) if len(func_std_string(newfunc)) > max_name_len: max_name_len = len(func_std_string(newfunc)) newcallers = {} for func2, caller in callers.iteritems(): newcallers[func_strip_path(func2)] = caller if newfunc in newstats: newstats[newfunc] = add_func_stats( newstats[newfunc], (cc, nc, tt, ct, newcallers)) else: newstats[newfunc] = (cc, nc, tt, ct, newcallers) old_top = self.top_level self.top_level = new_top = {} for func in old_top: new_top[func_strip_path(func)] = None self.max_name_len = max_name_len self.fcn_list = None self.all_callees = None return self def calc_callees(self): if self.all_callees: return self.all_callees = all_callees = {} for func, (cc, nc, tt, ct, callers) in self.stats.iteritems(): if not func in all_callees: all_callees[func] = {} for func2, caller in callers.iteritems(): if not func2 in all_callees: all_callees[func2] = {} all_callees[func2][func] = caller return #****************************************************************** # The following functions support actual printing of reports #****************************************************************** # Optional "amount" is either a line count, or a percentage of lines. def eval_print_amount(self, sel, list, msg): new_list = list if isinstance(sel, basestring): try: rex = re.compile(sel) except re.error: msg += " <Invalid regular expression %r>\n" % sel return new_list, msg new_list = [] for func in list: if rex.search(func_std_string(func)): new_list.append(func) else: count = len(list) if isinstance(sel, float) and 0.0 <= sel < 1.0: count = int(count * sel + .5) new_list = list[:count] elif isinstance(sel, (int, long)) and 0 <= sel < count: count = sel new_list = list[:count] if len(list) != len(new_list): msg += " List reduced from %r to %r due to restriction <%r>\n" % ( len(list), len(new_list), sel) return new_list, msg def get_print_list(self, sel_list): width = self.max_name_len if self.fcn_list: stat_list = self.fcn_list[:] msg = " Ordered by: " + self.sort_type + '\n' else: stat_list = self.stats.keys() msg = " Random listing order was used\n" for selection in sel_list: stat_list, msg = self.eval_print_amount(selection, stat_list, msg) count = len(stat_list) if not stat_list: return 0, stat_list print >> self.stream, msg if count < len(self.stats): width = 0 for func in stat_list: if len(func_std_string(func)) > width: width = len(func_std_string(func)) return width+2, stat_list def print_stats(self, *amount): for filename in self.files: print >> self.stream, filename if self.files: print >> self.stream indent = ' ' * 8 for func in self.top_level: print >> self.stream, indent, func_get_function_name(func) print >> self.stream, indent, self.total_calls, "function calls", if self.total_calls != self.prim_calls: print >> self.stream, "(%d primitive calls)" % self.prim_calls, print >> self.stream, "in %.3f seconds" % self.total_tt print >> self.stream width, list = self.get_print_list(amount) if list: self.print_title() for func in list: self.print_line(func) print >> self.stream print >> self.stream return self def print_callees(self, *amount): width, list = self.get_print_list(amount) if list: self.calc_callees() self.print_call_heading(width, "called...") for func in list: if func in self.all_callees: self.print_call_line(width, func, self.all_callees[func]) else: self.print_call_line(width, func, {}) print >> self.stream print >> self.stream return self def print_callers(self, *amount): width, list = self.get_print_list(amount) if list: self.print_call_heading(width, "was called by...") for func in list: cc, nc, tt, ct, callers = self.stats[func] self.print_call_line(width, func, callers, "<-") print >> self.stream print >> self.stream return self def print_call_heading(self, name_size, column_title): print >> self.stream, "Function ".ljust(name_size) + column_title # print sub-header only if we have new-style callers subheader = False for cc, nc, tt, ct, callers in self.stats.itervalues(): if callers: value = callers.itervalues().next() subheader = isinstance(value, tuple) break if subheader: print >> self.stream, " "*name_size + " ncalls tottime cumtime" def print_call_line(self, name_size, source, call_dict, arrow="->"): print >> self.stream, func_std_string(source).ljust(name_size) + arrow, if not call_dict: print >> self.stream return clist = call_dict.keys() clist.sort() indent = "" for func in clist: name = func_std_string(func) value = call_dict[func] if isinstance(value, tuple): nc, cc, tt, ct = value if nc != cc: substats = '%d/%d' % (nc, cc) else: substats = '%d' % (nc,) substats = '%s %s %s %s' % (substats.rjust(7+2*len(indent)), f8(tt), f8(ct), name) left_width = name_size + 1 else: substats = '%s(%r) %s' % (name, value, f8(self.stats[func][3])) left_width = name_size + 3 print >> self.stream, indent*left_width + substats indent = " " def print_title(self): print >> self.stream, ' ncalls tottime percall cumtime percall', print >> self.stream, 'filename:lineno(function)' def print_line(self, func): # hack : should print percentages cc, nc, tt, ct, callers = self.stats[func] c = str(nc) if nc != cc: c = c + '/' + str(cc) print >> self.stream, c.rjust(9), print >> self.stream, f8(tt), if nc == 0: print >> self.stream, ' '*8, else: print >> self.stream, f8(float(tt)/nc), print >> self.stream, f8(ct), if cc == 0: print >> self.stream, ' '*8, else: print >> self.stream, f8(float(ct)/cc), print >> self.stream, func_std_string(func) class TupleComp: """This class provides a generic function for comparing any two tuples. Each instance records a list of tuple-indices (from most significant to least significant), and sort direction (ascending or decending) for each tuple-index. The compare functions can then be used as the function argument to the system sort() function when a list of tuples need to be sorted in the instances order.""" def __init__(self, comp_select_list): self.comp_select_list = comp_select_list def compare (self, left, right): for index, direction in self.comp_select_list: l = left[index] r = right[index] if l < r: return -direction if l > r: return direction return 0 #************************************************************************** # func_name is a triple (file:string, line:int, name:string) def func_strip_path(func_name): filename, line, name = func_name return os.path.basename(filename), line, name def func_get_function_name(func): return func[2] def func_std_string(func_name): # match what old profile produced if func_name[:2] == ('~', 0): # special case for built-in functions name = func_name[2] if name.startswith('<') and name.endswith('>'): return '{%s}' % name[1:-1] else: return name else: return "%s:%d(%s)" % func_name #************************************************************************** # The following functions combine statists for pairs functions. # The bulk of the processing involves correctly handling "call" lists, # such as callers and callees. #************************************************************************** def add_func_stats(target, source): """Add together all the stats for two profile entries.""" cc, nc, tt, ct, callers = source t_cc, t_nc, t_tt, t_ct, t_callers = target return (cc+t_cc, nc+t_nc, tt+t_tt, ct+t_ct, add_callers(t_callers, callers)) def add_callers(target, source): """Combine two caller lists in a single list.""" new_callers = {} for func, caller in target.iteritems(): new_callers[func] = caller for func, caller in source.iteritems(): if func in new_callers: if isinstance(caller, tuple): # format used by cProfile new_callers[func] = tuple([i[0] + i[1] for i in zip(caller, new_callers[func])]) else: # format used by profile new_callers[func] += caller else: new_callers[func] = caller return new_callers def count_calls(callers): """Sum the caller statistics to get total number of calls received.""" nc = 0 for calls in callers.itervalues(): nc += calls return nc #************************************************************************** # The following functions support printing of reports #************************************************************************** def f8(x): return "%8.3f" % x #************************************************************************** # Statistics browser added by ESR, April 2001 #************************************************************************** if __name__ == '__main__': import cmd try: import readline except ImportError: pass class ProfileBrowser(cmd.Cmd): def __init__(self, profile=None): cmd.Cmd.__init__(self) self.prompt = "% " self.stats = None self.stream = sys.stdout if profile is not None: self.do_read(profile) def generic(self, fn, line): args = line.split() processed = [] for term in args: try: processed.append(int(term)) continue except ValueError: pass try: frac = float(term) if frac > 1 or frac < 0: print >> self.stream, "Fraction argument must be in [0, 1]" continue processed.append(frac) continue except ValueError: pass processed.append(term) if self.stats: getattr(self.stats, fn)(*processed) else: print >> self.stream, "No statistics object is loaded." return 0 def generic_help(self): print >> self.stream, "Arguments may be:" print >> self.stream, "* An integer maximum number of entries to print." print >> self.stream, "* A decimal fractional number between 0 and 1, controlling" print >> self.stream, " what fraction of selected entries to print." print >> self.stream, "* A regular expression; only entries with function names" print >> self.stream, " that match it are printed." def do_add(self, line): if self.stats: self.stats.add(line) else: print >> self.stream, "No statistics object is loaded." return 0 def help_add(self): print >> self.stream, "Add profile info from given file to current statistics object." def do_callees(self, line): return self.generic('print_callees', line) def help_callees(self): print >> self.stream, "Print callees statistics from the current stat object." self.generic_help() def do_callers(self, line): return self.generic('print_callers', line) def help_callers(self): print >> self.stream, "Print callers statistics from the current stat object." self.generic_help() def do_EOF(self, line): print >> self.stream, "" return 1 def help_EOF(self): print >> self.stream, "Leave the profile brower." def do_quit(self, line): return 1 def help_quit(self): print >> self.stream, "Leave the profile brower." def do_read(self, line): if line: try: self.stats = Stats(line) except IOError, args: print >> self.stream, args[1] return except Exception as err: print >> self.stream, err.__class__.__name__ + ':', err return self.prompt = line + "% " elif len(self.prompt) > 2: line = self.prompt[:-2] self.do_read(line) else: print >> self.stream, "No statistics object is current -- cannot reload." return 0 def help_read(self): print >> self.stream, "Read in profile data from a specified file." print >> self.stream, "Without argument, reload the current file." def do_reverse(self, line): if self.stats: self.stats.reverse_order() else: print >> self.stream, "No statistics object is loaded." return 0 def help_reverse(self): print >> self.stream, "Reverse the sort order of the profiling report." def do_sort(self, line): if not self.stats: print >> self.stream, "No statistics object is loaded." return abbrevs = self.stats.get_sort_arg_defs() if line and all((x in abbrevs) for x in line.split()): self.stats.sort_stats(*line.split()) else: print >> self.stream, "Valid sort keys (unique prefixes are accepted):" for (key, value) in Stats.sort_arg_dict_default.iteritems(): print >> self.stream, "%s -- %s" % (key, value[1]) return 0 def help_sort(self): print >> self.stream, "Sort profile data according to specified keys." print >> self.stream, "(Typing `sort' without arguments lists valid keys.)" def complete_sort(self, text, *args): return [a for a in Stats.sort_arg_dict_default if a.startswith(text)] def do_stats(self, line): return self.generic('print_stats', line) def help_stats(self): print >> self.stream, "Print statistics from the current stat object." self.generic_help() def do_strip(self, line): if self.stats: self.stats.strip_dirs() else: print >> self.stream, "No statistics object is loaded." def help_strip(self): print >> self.stream, "Strip leading path information from filenames in the report." def help_help(self): print >> self.stream, "Show help for a given command." def postcmd(self, stop, line): if stop: return stop return None import sys if len(sys.argv) > 1: initprofile = sys.argv[1] else: initprofile = None try: browser = ProfileBrowser(initprofile) print >> browser.stream, "Welcome to the profile statistics browser." browser.cmdloop() print >> browser.stream, "Goodbye." except KeyboardInterrupt: pass # That's all, folks.
gpl-3.0
kunesj/io3d
tests/sample_data_test.py
1
1948
#! /usr/bin/python # -*- coding: utf-8 -*- # import funkcí z jiného adresáře import os import os.path from nose.plugins.attrib import attr path_to_script = os.path.dirname(os.path.abspath(__file__)) import unittest import shutil import numpy as np import logging logger = logging.getLogger(__name__) # from imtools import qmisc # from imtools import misc import io3d.datasets as sd # class SampleDataTest(unittest.TestCase): interactivetTest = False # interactivetTest = True def sample_data_test(self): sd.download("head", "delete_head") self.assertTrue(os.path.exists("./delete_head/matlab/examples/sample_data/DICOM/digest_article/brain_001.dcm")) shutil.rmtree("delete_head") # import imtools.vesseltree_export as vt # yaml_input = os.path.join(path_to_script, "vt_biodur.yaml") # yaml_output = os.path.join(path_to_script, "delme_esofspy.txt") # vt.vt2esofspy(yaml_input, yaml_output) @attr("slow") def sample_data_get_all_test(self): keys = sd.data_urls.keys() sd.download(keys, "delete_all") self.assertTrue(os.path.exists("./delete_all/matlab/examples/sample_data/DICOM/digest_article/brain_001.dcm")) shutil.rmtree("delete_all") def sample_data_batch_test(self): tmp_sample_data_path = "delete_sample_data" if os.path.exists(tmp_sample_data_path): shutil.rmtree(tmp_sample_data_path) sd.download(["head", "exp_small"], tmp_sample_data_path) self.assertTrue(os.path.exists("./delete_sample_data/exp_small/seeds/org-liver-orig003-seeds.pklz")) self.assertTrue(os.path.exists("./delete_sample_data/matlab/examples/sample_data/DICOM/digest_article/brain_001.dcm")) shutil.rmtree(tmp_sample_data_path) def generate_liver_test(self): liver = sd.generate_abdominal() if __name__ == "__main__": # logging.basicConfig() unittest.main()
mit
audihsu-qci/ONL
components/all/platform-config/powerpc-quanta-lb9a-r0/src/python/onlpc.py
9
2801
#!/usr/bin/python ############################################################ # <bsn.cl fy=2013 v=onl> # # Copyright 2013, 2014 Big Switch Networks, Inc. # # Licensed under the Eclipse Public License, Version 1.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.eclipse.org/legal/epl-v10.html # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, # either express or implied. See the License for the specific # language governing permissions and limitations under the # License. # # </bsn.cl> ############################################################ ############################################################ # # Platform Driver for the Quanta LB9A # ############################################################ import os import struct import time import subprocess from onl.platform.base import * from onl.vendor.quanta import * class OpenNetworkPlatformImplementation(OpenNetworkPlatformQuanta): def _eeprom_file(self): return "/sys/devices/e0000000.soc8541/e0003000.i2c/i2c-0/0-0054/eeprom" def model(self): return "LB9A" def platform(self): return "powerpc-quanta-lb9a-r0" def _plat_info_dict(self): return { platinfo.LAG_COMPONENT_MAX : 8, platinfo.PORT_COUNT : 52 } def _plat_oid_table(self): return { oids.TEMP_SENSORS : { 'ctemp1' : '.1.3.6.1.4.1.2021.13.16.2.1.3.1', 'ctemp2' : '.1.3.6.1.4.1.2021.13.16.2.1.3.5', 'ctemp3' : '.1.3.6.1.4.1.2021.13.16.2.1.3.9', 'ctemp4' : '.1.3.6.1.4.1.2021.13.16.2.1.3.13', 'ctemp5' : '.1.3.6.1.4.1.2021.13.16.2.1.3.17', 'pwr-temp1' : '.1.3.6.1.4.1.2021.13.16.2.1.3.41', 'pwr-temp2' : '.1.3.6.1.4.1.2021.13.16.2.1.3.44', 'pwr-temp3' : '.1.3.6.1.4.1.2021.13.16.2.1.3.46', }, oids.CHASSIS_FAN_SENSORS : { 'cfan1' : '.1.3.6.1.4.1.2021.13.16.3.1.3.1', 'cfan2' : '.1.3.6.1.4.1.2021.13.16.3.1.3.5', 'cfan3' : '.1.3.6.1.4.1.2021.13.16.3.1.3.9', 'cfan4' : '.1.3.6.1.4.1.2021.13.16.3.1.3.13', }, oids.POWER_FAN_SENSORS : { 'pwr-fan' : '.1.3.6.1.4.1.2021.13.16.3.1.3.33', }, oids.POWER_SENSORS : { 'power' : '.1.3.6.1.4.1.2021.13.16.5.1.3.8' }, } def sys_init(self): pass if __name__ == "__main__": print OpenNetworkPlatformImplementation()
epl-1.0
josejpalacios/knowledge-management
Server/controllers/file_controller.py
2
3328
from Server import verboseFunc from . import db, SOCKET_EOF import os import os import pickle from . import db, SUCCESS, FAILURE @verboseFunc def upload_file(connection, upload_info): """ upload a user's file to the database :param connection: client connection :type socket.socket: :param upload_info: file info (i.e. name and tags) :type upload_info: dict """ filename = upload_info['fname'] tags = [tag.strip() for tag in upload_info['tags'].split(',')] group_id = int(upload_info['gid']) notes = upload_info['notes'] mod_time = float(upload_info['mod_time']) if group_id != 0: owner = db.get_username(group_id) else: owner = 'DUMMY_SHARED_USER' db.upload(filename, tags, owner, group_id, notes, mod_time) if db.__contains__(filename,owner): connection.send(FAILURE + "ERROR: file already exists".encode()) else: connection.send(SUCCESS) prefix = 'FILE_REPO' repo_name = db.repo_name(group_id) if not repo_name: print('REPO NAME ERROR') return file = open(os.path.normpath( os.path.join( os.getcwd(), prefix, repo_name, filename)), 'wb') print("\tOpened file: " + filename) # date_time_info = connection.recv(1024) # # print(date_time_info) # # print("File receieved. Sending response.") # # connection.send("SUCCESS".encode()) # # months = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"] # file_metadata = date_time_info.decode().split("|") # # month = months[int(file_metadata[1]) - 1] # print(file_metadata[0] + " " + month) while True: line = connection.recv(1024) print(line) if line == SOCKET_EOF: break else: file.write(line) file.close() print("Closed File") @verboseFunc def retrieve_file(connection, filename): print("Inside RetrieveHandler") print(filename.decode()) file = open(filename, 'rb') for line in file: connection.send(line) print("\tOpened file: " + filename.decode()) file.close() print("Leaving RetrieveHandler") @verboseFunc def retrieve_repo(connection, query): if 'group_ids' not in query: connection.send(FAILURE) return group_ids = query['group_ids'] group_ids = group_ids.split(',') connection.send(SUCCESS) result = [] for group_id in group_ids: result.extend(db.retrieve_repo(int(group_id))) pickled_repo = pickle.dumps(result) connection.send(pickled_repo) connection.send(SOCKET_EOF) @verboseFunc def retrieve_personal_repo(connection, uname): repo_id = db.get_personal_repo_id(uname) retrieve_repo(connection, {'group_id': repo_id}) @verboseFunc def delete_file(connection, query): print("Inside DeleteHandler") if 'filename' not in query or 'group_id' not in query: connection.send(connection.send(FAILURE + " ERROR: missing parameters".encode())) filename = query['filename'] group_id = query['group_id'] if db.delete(filename, group_id): connection.send(SUCCESS) else: connection.send(FAILURE + " ERROR: deletion failed".encode()) print("Leaving DeleteHandler")
mit
Kamik423/uni_plan
plan/plan/lib/python3.4/site-packages/pip/_vendor/__init__.py
329
4670
""" pip._vendor is for vendoring dependencies of pip to prevent needing pip to depend on something external. Files inside of pip._vendor should be considered immutable and should only be updated to versions from upstream. """ from __future__ import absolute_import import glob import os.path import sys # Downstream redistributors which have debundled our dependencies should also # patch this value to be true. This will trigger the additional patching # to cause things like "six" to be available as pip. DEBUNDLED = False # By default, look in this directory for a bunch of .whl files which we will # add to the beginning of sys.path before attempting to import anything. This # is done to support downstream re-distributors like Debian and Fedora who # wish to create their own Wheels for our dependencies to aid in debundling. WHEEL_DIR = os.path.abspath(os.path.dirname(__file__)) # Define a small helper function to alias our vendored modules to the real ones # if the vendored ones do not exist. This idea of this was taken from # https://github.com/kennethreitz/requests/pull/2567. def vendored(modulename): vendored_name = "{0}.{1}".format(__name__, modulename) try: __import__(vendored_name, globals(), locals(), level=0) except ImportError: try: __import__(modulename, globals(), locals(), level=0) except ImportError: # We can just silently allow import failures to pass here. If we # got to this point it means that ``import pip._vendor.whatever`` # failed and so did ``import whatever``. Since we're importing this # upfront in an attempt to alias imports, not erroring here will # just mean we get a regular import error whenever pip *actually* # tries to import one of these modules to use it, which actually # gives us a better error message than we would have otherwise # gotten. pass else: sys.modules[vendored_name] = sys.modules[modulename] base, head = vendored_name.rsplit(".", 1) setattr(sys.modules[base], head, sys.modules[modulename]) # If we're operating in a debundled setup, then we want to go ahead and trigger # the aliasing of our vendored libraries as well as looking for wheels to add # to our sys.path. This will cause all of this code to be a no-op typically # however downstream redistributors can enable it in a consistent way across # all platforms. if DEBUNDLED: # Actually look inside of WHEEL_DIR to find .whl files and add them to the # front of our sys.path. sys.path[:] = glob.glob(os.path.join(WHEEL_DIR, "*.whl")) + sys.path # Actually alias all of our vendored dependencies. vendored("cachecontrol") vendored("colorama") vendored("distlib") vendored("distro") vendored("html5lib") vendored("lockfile") vendored("six") vendored("six.moves") vendored("six.moves.urllib") vendored("packaging") vendored("packaging.version") vendored("packaging.specifiers") vendored("pkg_resources") vendored("progress") vendored("retrying") vendored("requests") vendored("requests.packages") vendored("requests.packages.urllib3") vendored("requests.packages.urllib3._collections") vendored("requests.packages.urllib3.connection") vendored("requests.packages.urllib3.connectionpool") vendored("requests.packages.urllib3.contrib") vendored("requests.packages.urllib3.contrib.ntlmpool") vendored("requests.packages.urllib3.contrib.pyopenssl") vendored("requests.packages.urllib3.exceptions") vendored("requests.packages.urllib3.fields") vendored("requests.packages.urllib3.filepost") vendored("requests.packages.urllib3.packages") vendored("requests.packages.urllib3.packages.ordered_dict") vendored("requests.packages.urllib3.packages.six") vendored("requests.packages.urllib3.packages.ssl_match_hostname") vendored("requests.packages.urllib3.packages.ssl_match_hostname." "_implementation") vendored("requests.packages.urllib3.poolmanager") vendored("requests.packages.urllib3.request") vendored("requests.packages.urllib3.response") vendored("requests.packages.urllib3.util") vendored("requests.packages.urllib3.util.connection") vendored("requests.packages.urllib3.util.request") vendored("requests.packages.urllib3.util.response") vendored("requests.packages.urllib3.util.retry") vendored("requests.packages.urllib3.util.ssl_") vendored("requests.packages.urllib3.util.timeout") vendored("requests.packages.urllib3.util.url")
apache-2.0
bx5974/sikuli
sikuli-script/src/main/python/sikuli/Screen.py
1
2876
# Copyright 2010-2011, Sikuli.org # Released under the MIT License. from org.sikuli.script import Screen as JScreen import inspect import __main__ import __builtin__ import sys from Region import * from java.awt import Rectangle DEBUG=False class Screen(Region): def __init__(self, id=None): if id != None: r = JScreen.getBounds(id) else: r = JScreen().getBounds() (x, y, w, h) = (int(r.getX()), int(r.getY()), \ int(r.getWidth()), int(r.getHeight())) Region.__init__(self, x, y, w, h) @classmethod def getNumberScreens(cls): return JScreen.getNumberScreens() def getBounds(self): return self.getScreen().getBounds() def selectRegion(self, msg=None): if msg: r = self.getScreen().selectRegion(msg) else: r = self.getScreen().selectRegion() if r: return Region(r) else: return None def showRegion(self, region): self.getScreen().showRegion(region) ## # Enters the screen-capture mode asking the user to capture a region of # the screen if no arguments are given. # If any arguments are specified, capture() automatically captures the given # region of the screen. # @param *args The args can be 4 integers: x, y, w, and h, a <a href="org/sikuli/script/Match.html">Match</a> object or a {@link #Region} object. # @return The path to the captured image. # def capture(self, *args): scr = self.getScreen() if len(args) == 0: simg = scr.userCapture() if simg: return simg.getFilename() else: return None elif len(args) == 1: if __builtin__.type(args[0]) is types.StringType or __builtin__.type(args[0]) is types.UnicodeType: simg = scr.userCapture(args[0]) if simg: return simg.getFilename() else: return None else: return scr.capture(args[0]).getFilename() elif len(args) == 4: return scr.capture(args[0], args[1], args[2], args[3]).getFilename() else: return None def toString(self): return self.getScreen().toString() def _exposeAllMethods(self, mod): exclude_list = [ 'class', 'classDictInit', 'clone', 'equals', 'finalize', 'getClass', 'hashCode', 'notify', 'notifyAll', 'toGlobalCoord', 'toString', 'capture', 'selectRegion'] dict = sys.modules[mod].__dict__ for name in dir(self): if inspect.ismethod(getattr(self,name)) \ and name[0] != '_' and name[:7] != 'super__' and \ not name in exclude_list: if DEBUG: print "expose " + name dict[name] = eval("self."+name) #__main__.__dict__[name] = eval("self."+name)
mit
harterj/moose
python/chigger/tests/utils/test_animate.py
8
1087
#!/usr/bin/env python3 #pylint: disable=missing-docstring #* This file is part of the MOOSE framework #* https://www.mooseframework.org #* #* All rights reserved, see COPYRIGHT for full restrictions #* https://github.com/idaholab/moose/blob/master/COPYRIGHT #* #* Licensed under LGPL 2.1, please see LICENSE for details #* https://www.gnu.org/licenses/lgpl-2.1.html import unittest import chigger import mock import subprocess import os.path class Test(unittest.TestCase): @mock.patch('subprocess.call') def testAnimate(self, subproc): chigger.utils.animate(os.path.join('..', 'adapt', 'gold', 'adapt_*.png'), 'out.gif') subproc.assert_called_with(['convert', '-delay', '20', os.path.join('..', 'adapt', 'gold', 'adapt_0.png'), '-delay', '20', os.path.join('..', 'adapt', 'gold', 'adapt_4.png'), '-delay', '500', os.path.join('..', 'adapt', 'gold', 'adapt_9.png'), '-loop', '0', 'out.gif']) if __name__ == '__main__': unittest.main(module=__name__, verbosity=2)
lgpl-2.1
asmuelle/heekscnc
pycnc/wxOutputWindow.py
25
1840
import wx import HeeksCNC class OutputTextCtrl(wx.TextCtrl): def __init__(self, parent): wx.TextCtrl.__init__(self, parent, style = wx.TE_MULTILINE + wx.TE_DONTWRAP + wx.TE_RICH + wx.TE_RICH2) self.painting = False #self.Bind(wx.EVT_MOUSE_EVENTS, self.OnMouse) #self.Bind(wx.EVT_PAINT, self.OnPaint) def OnMouse(self, event): if event.LeftUp(): pos = self.GetInsertionPoint() HeeksCNC.program.nccode.HighlightBlock(pos) HeeksCNC.cad.repaint() event.Skip() def OnPaint(self, event): dc = wx.PaintDC(self) if self.painting == False: self.painting = True size = self.GetClientSize() scrollpos = self.GetScrollPos(wx.VERTICAL) result0, col0, row0 = self.HitTest(wx.Point(0, 0)) result1, col1, row1 = self.HitTest(wx.Point(size.x, size.y)) pos0 = self.XYToPosition(0, row0) pos1 = self.XYToPosition(1, row1) HeeksCNC.program.nccode.FormatBlocks(self, pos0, pos1) self.SetScrollPos(wx.VERTICAL, scrollpos) self.painting = False event.Skip() class OutputWindow(wx.ScrolledWindow): def __init__(self, parent): wx.ScrolledWindow.__init__(self, parent, name = 'Output', style = wx.HSCROLL + wx.VSCROLL + wx.NO_FULL_REPAINT_ON_RESIZE) self.textCtrl = OutputTextCtrl(self) self.textCtrl.SetMaxLength(0) self.Bind(wx.EVT_SIZE, self.OnSize) self.Resize() def Resize(self): self.textCtrl.SetSize(self.GetClientSize()) def Clear(self): self.textCtrl.Clear() def OnSize(self, event): self.Resize() event.Skip()
bsd-3-clause
sjlehtin/django
django/core/files/locks.py
725
3516
""" Portable file locking utilities. Based partially on an example by Jonathan Feignberg in the Python Cookbook [1] (licensed under the Python Software License) and a ctypes port by Anatoly Techtonik for Roundup [2] (license [3]). [1] http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/65203 [2] http://sourceforge.net/p/roundup/code/ci/default/tree/roundup/backends/portalocker.py [3] http://sourceforge.net/p/roundup/code/ci/default/tree/COPYING.txt Example Usage:: >>> from django.core.files import locks >>> with open('./file', 'wb') as f: ... locks.lock(f, locks.LOCK_EX) ... f.write('Django') """ import os __all__ = ('LOCK_EX', 'LOCK_SH', 'LOCK_NB', 'lock', 'unlock') def _fd(f): """Get a filedescriptor from something which could be a file or an fd.""" return f.fileno() if hasattr(f, 'fileno') else f if os.name == 'nt': import msvcrt from ctypes import (sizeof, c_ulong, c_void_p, c_int64, Structure, Union, POINTER, windll, byref) from ctypes.wintypes import BOOL, DWORD, HANDLE LOCK_SH = 0 # the default LOCK_NB = 0x1 # LOCKFILE_FAIL_IMMEDIATELY LOCK_EX = 0x2 # LOCKFILE_EXCLUSIVE_LOCK # --- Adapted from the pyserial project --- # detect size of ULONG_PTR if sizeof(c_ulong) != sizeof(c_void_p): ULONG_PTR = c_int64 else: ULONG_PTR = c_ulong PVOID = c_void_p # --- Union inside Structure by stackoverflow:3480240 --- class _OFFSET(Structure): _fields_ = [ ('Offset', DWORD), ('OffsetHigh', DWORD)] class _OFFSET_UNION(Union): _anonymous_ = ['_offset'] _fields_ = [ ('_offset', _OFFSET), ('Pointer', PVOID)] class OVERLAPPED(Structure): _anonymous_ = ['_offset_union'] _fields_ = [ ('Internal', ULONG_PTR), ('InternalHigh', ULONG_PTR), ('_offset_union', _OFFSET_UNION), ('hEvent', HANDLE)] LPOVERLAPPED = POINTER(OVERLAPPED) # --- Define function prototypes for extra safety --- LockFileEx = windll.kernel32.LockFileEx LockFileEx.restype = BOOL LockFileEx.argtypes = [HANDLE, DWORD, DWORD, DWORD, DWORD, LPOVERLAPPED] UnlockFileEx = windll.kernel32.UnlockFileEx UnlockFileEx.restype = BOOL UnlockFileEx.argtypes = [HANDLE, DWORD, DWORD, DWORD, LPOVERLAPPED] def lock(f, flags): hfile = msvcrt.get_osfhandle(_fd(f)) overlapped = OVERLAPPED() ret = LockFileEx(hfile, flags, 0, 0, 0xFFFF0000, byref(overlapped)) return bool(ret) def unlock(f): hfile = msvcrt.get_osfhandle(_fd(f)) overlapped = OVERLAPPED() ret = UnlockFileEx(hfile, 0, 0, 0xFFFF0000, byref(overlapped)) return bool(ret) else: try: import fcntl LOCK_SH = fcntl.LOCK_SH # shared lock LOCK_NB = fcntl.LOCK_NB # non-blocking LOCK_EX = fcntl.LOCK_EX except (ImportError, AttributeError): # File locking is not supported. LOCK_EX = LOCK_SH = LOCK_NB = 0 # Dummy functions that don't do anything. def lock(f, flags): # File is not locked return False def unlock(f): # File is unlocked return True else: def lock(f, flags): ret = fcntl.flock(_fd(f), flags) return (ret == 0) def unlock(f): ret = fcntl.flock(_fd(f), fcntl.LOCK_UN) return (ret == 0)
bsd-3-clause
hkawasaki/kawasaki-aio8-2
cms/djangoapps/contentstore/views/tests/test_access.py
16
2238
""" Tests access.py """ from django.test import TestCase from django.contrib.auth.models import User from xmodule.modulestore import Location from xmodule.modulestore.locator import CourseLocator from student.roles import CourseInstructorRole, CourseStaffRole from student.tests.factories import AdminFactory from student.auth import add_users from contentstore.views.access import get_user_role class RolesTest(TestCase): """ Tests for user roles. """ def setUp(self): """ Test case setup """ self.global_admin = AdminFactory() self.instructor = User.objects.create_user('testinstructor', '[email protected]', 'foo') self.staff = User.objects.create_user('teststaff', '[email protected]', 'foo') self.location = Location('i4x', 'mitX', '101', 'course', 'test') self.locator = CourseLocator(url='edx://mitX.101.test') def test_get_user_role_instructor(self): """ Verifies if user is instructor. """ add_users(self.global_admin, CourseInstructorRole(self.location), self.instructor) self.assertEqual( 'instructor', get_user_role(self.instructor, self.location, self.location.course_id) ) def test_get_user_role_instructor_locator(self): """ Verifies if user is instructor, using a CourseLocator. """ add_users(self.global_admin, CourseInstructorRole(self.locator), self.instructor) self.assertEqual( 'instructor', get_user_role(self.instructor, self.locator) ) def test_get_user_role_staff(self): """ Verifies if user is staff. """ add_users(self.global_admin, CourseStaffRole(self.location), self.staff) self.assertEqual( 'staff', get_user_role(self.staff, self.location, self.location.course_id) ) def test_get_user_role_staff_locator(self): """ Verifies if user is staff, using a CourseLocator. """ add_users(self.global_admin, CourseStaffRole(self.locator), self.staff) self.assertEqual( 'staff', get_user_role(self.staff, self.locator) )
agpl-3.0
nvoron23/scikit-learn
sklearn/dummy.py
208
17370
# Author: Mathieu Blondel <[email protected]> # Arnaud Joly <[email protected]> # Maheshakya Wijewardena <[email protected]> # License: BSD 3 clause from __future__ import division import warnings import numpy as np import scipy.sparse as sp from .base import BaseEstimator, ClassifierMixin, RegressorMixin from .utils import check_random_state from .utils.validation import check_array from .utils.validation import check_consistent_length from .utils.random import random_choice_csc from .utils.stats import _weighted_percentile from .utils.multiclass import class_distribution class DummyClassifier(BaseEstimator, ClassifierMixin): """ DummyClassifier is a classifier that makes predictions using simple rules. This classifier is useful as a simple baseline to compare with other (real) classifiers. Do not use it for real problems. Read more in the :ref:`User Guide <dummy_estimators>`. Parameters ---------- strategy : str Strategy to use to generate predictions. * "stratified": generates predictions by respecting the training set's class distribution. * "most_frequent": always predicts the most frequent label in the training set. * "prior": always predicts the class that maximizes the class prior (like "most_frequent") and ``predict_proba`` returns the class prior. * "uniform": generates predictions uniformly at random. * "constant": always predicts a constant label that is provided by the user. This is useful for metrics that evaluate a non-majority class random_state : int seed, RandomState instance, or None (default) The seed of the pseudo random number generator to use. constant : int or str or array of shape = [n_outputs] The explicit constant as predicted by the "constant" strategy. This parameter is useful only for the "constant" strategy. Attributes ---------- classes_ : array or list of array of shape = [n_classes] Class labels for each output. n_classes_ : array or list of array of shape = [n_classes] Number of label for each output. class_prior_ : array or list of array of shape = [n_classes] Probability of each class for each output. n_outputs_ : int, Number of outputs. outputs_2d_ : bool, True if the output at fit is 2d, else false. sparse_output_ : bool, True if the array returned from predict is to be in sparse CSC format. Is automatically set to True if the input y is passed in sparse format. """ def __init__(self, strategy="stratified", random_state=None, constant=None): self.strategy = strategy self.random_state = random_state self.constant = constant def fit(self, X, y, sample_weight=None): """Fit the random classifier. Parameters ---------- X : {array-like, sparse matrix}, shape = [n_samples, n_features] Training vectors, where n_samples is the number of samples and n_features is the number of features. y : array-like, shape = [n_samples] or [n_samples, n_outputs] Target values. sample_weight : array-like of shape = [n_samples], optional Sample weights. Returns ------- self : object Returns self. """ if self.strategy not in ("most_frequent", "stratified", "uniform", "constant", "prior"): raise ValueError("Unknown strategy type.") if self.strategy == "uniform" and sp.issparse(y): y = y.toarray() warnings.warn('A local copy of the target data has been converted ' 'to a numpy array. Predicting on sparse target data ' 'with the uniform strategy would not save memory ' 'and would be slower.', UserWarning) self.sparse_output_ = sp.issparse(y) if not self.sparse_output_: y = np.atleast_1d(y) self.output_2d_ = y.ndim == 2 if y.ndim == 1: y = np.reshape(y, (-1, 1)) self.n_outputs_ = y.shape[1] if self.strategy == "constant": if self.constant is None: raise ValueError("Constant target value has to be specified " "when the constant strategy is used.") else: constant = np.reshape(np.atleast_1d(self.constant), (-1, 1)) if constant.shape[0] != self.n_outputs_: raise ValueError("Constant target value should have " "shape (%d, 1)." % self.n_outputs_) (self.classes_, self.n_classes_, self.class_prior_) = class_distribution(y, sample_weight) if (self.strategy == "constant" and any(constant[k] not in self.classes_[k] for k in range(self.n_outputs_))): # Checking in case of constant strategy if the constant # provided by the user is in y. raise ValueError("The constant target value must be " "present in training data") if self.n_outputs_ == 1 and not self.output_2d_: self.n_classes_ = self.n_classes_[0] self.classes_ = self.classes_[0] self.class_prior_ = self.class_prior_[0] return self def predict(self, X): """Perform classification on test vectors X. Parameters ---------- X : {array-like, sparse matrix}, shape = [n_samples, n_features] Input vectors, where n_samples is the number of samples and n_features is the number of features. Returns ------- y : array, shape = [n_samples] or [n_samples, n_outputs] Predicted target values for X. """ if not hasattr(self, "classes_"): raise ValueError("DummyClassifier not fitted.") X = check_array(X, accept_sparse=['csr', 'csc', 'coo']) # numpy random_state expects Python int and not long as size argument # under Windows n_samples = int(X.shape[0]) rs = check_random_state(self.random_state) n_classes_ = self.n_classes_ classes_ = self.classes_ class_prior_ = self.class_prior_ constant = self.constant if self.n_outputs_ == 1: # Get same type even for self.n_outputs_ == 1 n_classes_ = [n_classes_] classes_ = [classes_] class_prior_ = [class_prior_] constant = [constant] # Compute probability only once if self.strategy == "stratified": proba = self.predict_proba(X) if self.n_outputs_ == 1: proba = [proba] if self.sparse_output_: class_prob = None if self.strategy in ("most_frequent", "prior"): classes_ = [np.array([cp.argmax()]) for cp in class_prior_] elif self.strategy == "stratified": class_prob = class_prior_ elif self.strategy == "uniform": raise ValueError("Sparse target prediction is not " "supported with the uniform strategy") elif self.strategy == "constant": classes_ = [np.array([c]) for c in constant] y = random_choice_csc(n_samples, classes_, class_prob, self.random_state) else: if self.strategy in ("most_frequent", "prior"): y = np.tile([classes_[k][class_prior_[k].argmax()] for k in range(self.n_outputs_)], [n_samples, 1]) elif self.strategy == "stratified": y = np.vstack(classes_[k][proba[k].argmax(axis=1)] for k in range(self.n_outputs_)).T elif self.strategy == "uniform": ret = [classes_[k][rs.randint(n_classes_[k], size=n_samples)] for k in range(self.n_outputs_)] y = np.vstack(ret).T elif self.strategy == "constant": y = np.tile(self.constant, (n_samples, 1)) if self.n_outputs_ == 1 and not self.output_2d_: y = np.ravel(y) return y def predict_proba(self, X): """ Return probability estimates for the test vectors X. Parameters ---------- X : {array-like, sparse matrix}, shape = [n_samples, n_features] Input vectors, where n_samples is the number of samples and n_features is the number of features. Returns ------- P : array-like or list of array-lke of shape = [n_samples, n_classes] Returns the probability of the sample for each class in the model, where classes are ordered arithmetically, for each output. """ if not hasattr(self, "classes_"): raise ValueError("DummyClassifier not fitted.") X = check_array(X, accept_sparse=['csr', 'csc', 'coo']) # numpy random_state expects Python int and not long as size argument # under Windows n_samples = int(X.shape[0]) rs = check_random_state(self.random_state) n_classes_ = self.n_classes_ classes_ = self.classes_ class_prior_ = self.class_prior_ constant = self.constant if self.n_outputs_ == 1 and not self.output_2d_: # Get same type even for self.n_outputs_ == 1 n_classes_ = [n_classes_] classes_ = [classes_] class_prior_ = [class_prior_] constant = [constant] P = [] for k in range(self.n_outputs_): if self.strategy == "most_frequent": ind = np.ones(n_samples, dtype=int) * class_prior_[k].argmax() out = np.zeros((n_samples, n_classes_[k]), dtype=np.float64) out[:, ind] = 1.0 elif self.strategy == "prior": out = np.ones((n_samples, 1)) * class_prior_[k] elif self.strategy == "stratified": out = rs.multinomial(1, class_prior_[k], size=n_samples) elif self.strategy == "uniform": out = np.ones((n_samples, n_classes_[k]), dtype=np.float64) out /= n_classes_[k] elif self.strategy == "constant": ind = np.where(classes_[k] == constant[k]) out = np.zeros((n_samples, n_classes_[k]), dtype=np.float64) out[:, ind] = 1.0 P.append(out) if self.n_outputs_ == 1 and not self.output_2d_: P = P[0] return P def predict_log_proba(self, X): """ Return log probability estimates for the test vectors X. Parameters ---------- X : {array-like, sparse matrix}, shape = [n_samples, n_features] Input vectors, where n_samples is the number of samples and n_features is the number of features. Returns ------- P : array-like or list of array-like of shape = [n_samples, n_classes] Returns the log probability of the sample for each class in the model, where classes are ordered arithmetically for each output. """ proba = self.predict_proba(X) if self.n_outputs_ == 1: return np.log(proba) else: return [np.log(p) for p in proba] class DummyRegressor(BaseEstimator, RegressorMixin): """ DummyRegressor is a regressor that makes predictions using simple rules. This regressor is useful as a simple baseline to compare with other (real) regressors. Do not use it for real problems. Read more in the :ref:`User Guide <dummy_estimators>`. Parameters ---------- strategy : str Strategy to use to generate predictions. * "mean": always predicts the mean of the training set * "median": always predicts the median of the training set * "quantile": always predicts a specified quantile of the training set, provided with the quantile parameter. * "constant": always predicts a constant value that is provided by the user. constant : int or float or array of shape = [n_outputs] The explicit constant as predicted by the "constant" strategy. This parameter is useful only for the "constant" strategy. quantile : float in [0.0, 1.0] The quantile to predict using the "quantile" strategy. A quantile of 0.5 corresponds to the median, while 0.0 to the minimum and 1.0 to the maximum. Attributes ---------- constant_ : float or array of shape [n_outputs] Mean or median or quantile of the training targets or constant value given by the user. n_outputs_ : int, Number of outputs. outputs_2d_ : bool, True if the output at fit is 2d, else false. """ def __init__(self, strategy="mean", constant=None, quantile=None): self.strategy = strategy self.constant = constant self.quantile = quantile def fit(self, X, y, sample_weight=None): """Fit the random regressor. Parameters ---------- X : {array-like, sparse matrix}, shape = [n_samples, n_features] Training vectors, where n_samples is the number of samples and n_features is the number of features. y : array-like, shape = [n_samples] or [n_samples, n_outputs] Target values. sample_weight : array-like of shape = [n_samples], optional Sample weights. Returns ------- self : object Returns self. """ if self.strategy not in ("mean", "median", "quantile", "constant"): raise ValueError("Unknown strategy type: %s, expected " "'mean', 'median', 'quantile' or 'constant'" % self.strategy) y = check_array(y, ensure_2d=False) if len(y) == 0: raise ValueError("y must not be empty.") self.output_2d_ = y.ndim == 2 if y.ndim == 1: y = np.reshape(y, (-1, 1)) self.n_outputs_ = y.shape[1] check_consistent_length(X, y, sample_weight) if self.strategy == "mean": self.constant_ = np.average(y, axis=0, weights=sample_weight) elif self.strategy == "median": if sample_weight is None: self.constant_ = np.median(y, axis=0) else: self.constant_ = [_weighted_percentile(y[:, k], sample_weight, percentile=50.) for k in range(self.n_outputs_)] elif self.strategy == "quantile": if self.quantile is None or not np.isscalar(self.quantile): raise ValueError("Quantile must be a scalar in the range " "[0.0, 1.0], but got %s." % self.quantile) percentile = self.quantile * 100.0 if sample_weight is None: self.constant_ = np.percentile(y, axis=0, q=percentile) else: self.constant_ = [_weighted_percentile(y[:, k], sample_weight, percentile=percentile) for k in range(self.n_outputs_)] elif self.strategy == "constant": if self.constant is None: raise TypeError("Constant target value has to be specified " "when the constant strategy is used.") self.constant = check_array(self.constant, accept_sparse=['csr', 'csc', 'coo'], ensure_2d=False, ensure_min_samples=0) if self.output_2d_ and self.constant.shape[0] != y.shape[1]: raise ValueError( "Constant target value should have " "shape (%d, 1)." % y.shape[1]) self.constant_ = self.constant self.constant_ = np.reshape(self.constant_, (1, -1)) return self def predict(self, X): """ Perform classification on test vectors X. Parameters ---------- X : {array-like, sparse matrix}, shape = [n_samples, n_features] Input vectors, where n_samples is the number of samples and n_features is the number of features. Returns ------- y : array, shape = [n_samples] or [n_samples, n_outputs] Predicted target values for X. """ if not hasattr(self, "constant_"): raise ValueError("DummyRegressor not fitted.") X = check_array(X, accept_sparse=['csr', 'csc', 'coo']) n_samples = X.shape[0] y = np.ones((n_samples, 1)) * self.constant_ if self.n_outputs_ == 1 and not self.output_2d_: y = np.ravel(y) return y
bsd-3-clause
danlurie/C-PAC
CPAC/qc/blue.py
5
2048
#007ffe #007ffd #007efc #007efb #007dfa #007df9 #007cf8 #007cf7 #007bf6 #007bf5 #007af4 #007af3 #0079f2 #0079f1 #0078f0 #0078ef #0077ee #0077ed #0076ec #0076eb #0075ea #0075e9 #0074e8 #0074e7 #0073e6 #0073e5 #0072e4 #0072e3 #0071e2 #0071e1 #0070e0 #0070df #006fde #006fdd #006edc #006edb #006dda #006dd9 #006cd8 #006cd7 #006bd6 #006bd5 #006ad4 #006ad3 #0069d2 #0069d1 #0068d0 #0068cf #0067ce #0067cd #0066cc #0066cb #0065ca #0065c9 #0064c8 #0064c7 #0063c6 #0063c5 #0062c4 #0062c3 #0061c2 #0061c1 #0060c0 #0060bf #005fbf #005fbe #005ebd #005ebc #005dbb #005dba #005cb9 #005cb8 #005bb7 #005bb6 #005ab5 #005ab4 #0059b3 #0059b2 #0058b1 #0058b0 #0057af #0057ae #0056ad #0056ac #0055ab #0055aa #0054a9 #0054a8 #0053a7 #0053a6 #0052a5 #0052a4 #0051a3 #0051a2 #0050a1 #0050a0 #004f9f #004f9e #004e9d #004e9c #004d9b #004d9a #004c99 #004c98 #004b97 #004b96 #004a95 #004a94 #004993 #004992 #004891 #004890 #00478f #00478e #00468d #00468c #00458b #00458a #004489 #004488 #004387 #004386 #004285 #004284 #004183 #004182 #004081 #004080 #003f7f #003f7e #003e7d #003e7c #003d7b #003d7a #003c79 #003c78 #003b77 #003b76 #003a75 #003a74 #003973 #003972 #003871 #003870 #00376f #00376e #00366d #00366c #00356b #00356a #003469 #003468 #003367 #003366 #003265 #003264 #003163 #003162 #003061 #003060 #002f5f #002f5e #002e5d #002e5c #002d5b #002d5a #002c59 #002c58 #002b57 #002b56 #002a55 #002a54 #002953 #002952 #002851 #002850 #00274f #00274e #00264d #00264c #00254b #00254a #002449 #002448 #002347 #002346 #002245 #002244 #002143 #002142 #002041 #002040 #001f40 #001f3f #001e3e #001e3d #001d3c #001d3b #001c3a #001c39 #001b38 #001b37 #001a36 #001a35 #001934 #001933 #001832 #001831 #001730 #00172f #00162e #00162d #00152c #00152b #00142a #001429 #001328 #001327 #001226 #001225 #001124 #001123 #001022 #001021 #000f20 #000f1f #000e1e #000e1d #000d1c #000d1b #000c1a #000c19 #000b18 #000b17 #000a16 #000a15 #000914 #000913 #000812 #000811 #000710 #00070f #00060e #00060d #00050c #00050b #00040a #000409 #000308 #000307 #000206 #000205 #000104 #000103 #000002 #000001
bsd-3-clause
BigFatNoob-NCSU/x9115george2
hw/code/6/models/kursawe.py
1
1196
from __future__ import print_function, division __author__ = 'panzer' from model import * from math import sqrt, sin class Kursawe(Model): def __init__(self): Model.__init__(self) self.__name__ = Kursawe.__name__ self.decisions=[] self.decisions.append(Decision("x1", -5, 5)) self.decisions.append(Decision("x2", -5, 5)) self.decisions.append(Decision("x3", -5, 5)) self.objectives = [] self.objectives.append(Objective("f1", -20, -4.86, to_minimize=True)) self.objectives.append(Objective("f2", -9.63, 22.90, to_minimize=True)) def evaluate(self, one): return [Kursawe.f1(one), Kursawe.f2(one)] @staticmethod def f1(ds): total = 0 for i in range(len(ds)-1): e = -0.2 * sqrt(ds[i]**2 + ds[i+1]**2) total+= -10*exp(e) return total @staticmethod def f2(ds): total = 0 for i in range(len(ds)): total+= abs(ds[i])**0.8 + 5*sin(ds[i]**3) return total @staticmethod def get_extreme_objectives(): o = Kursawe() f1s = [] f2s = [] for one in o.all_inputs(): f1s.append(Kursawe.f1(one)) f2s.append(Kursawe.f2(one)) print(min(f1s), max(f1s)) print(min(f2s), max(f2s))
mit
chrwu/PyGithub
github/PullRequestPart.py
74
3771
# -*- coding: utf-8 -*- # ########################## Copyrights and license ############################ # # # Copyright 2012 Vincent Jacques <[email protected]> # # Copyright 2012 Zearin <[email protected]> # # Copyright 2013 AKFish <[email protected]> # # Copyright 2013 Vincent Jacques <[email protected]> # # # # This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ # # # # PyGithub is free software: you can redistribute it and/or modify it under # # the terms of the GNU Lesser General Public License as published by the Free # # Software Foundation, either version 3 of the License, or (at your option) # # any later version. # # # # PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY # # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more # # details. # # # # You should have received a copy of the GNU Lesser General Public License # # along with PyGithub. If not, see <http://www.gnu.org/licenses/>. # # # # ############################################################################## import github.GithubObject import github.Repository import github.NamedUser class PullRequestPart(github.GithubObject.NonCompletableGithubObject): """ This class represents PullRequestParts as returned for example by http://developer.github.com/v3/todo """ @property def label(self): """ :type: string """ return self._label.value @property def ref(self): """ :type: string """ return self._ref.value @property def repo(self): """ :type: :class:`github.Repository.Repository` """ return self._repo.value @property def sha(self): """ :type: string """ return self._sha.value @property def user(self): """ :type: :class:`github.NamedUser.NamedUser` """ return self._user.value def _initAttributes(self): self._label = github.GithubObject.NotSet self._ref = github.GithubObject.NotSet self._repo = github.GithubObject.NotSet self._sha = github.GithubObject.NotSet self._user = github.GithubObject.NotSet def _useAttributes(self, attributes): if "label" in attributes: # pragma no branch self._label = self._makeStringAttribute(attributes["label"]) if "ref" in attributes: # pragma no branch self._ref = self._makeStringAttribute(attributes["ref"]) if "repo" in attributes: # pragma no branch self._repo = self._makeClassAttribute(github.Repository.Repository, attributes["repo"]) if "sha" in attributes: # pragma no branch self._sha = self._makeStringAttribute(attributes["sha"]) if "user" in attributes: # pragma no branch self._user = self._makeClassAttribute(github.NamedUser.NamedUser, attributes["user"])
gpl-3.0
cxxgtxy/tensorflow
tensorflow/contrib/slim/python/slim/model_analyzer.py
166
3509
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tools for analyzing the operations and variables in a TensorFlow graph. To analyze the operations in a graph: images, labels = LoadData(...) predictions = MyModel(images) slim.model_analyzer.analyze_ops(tf.get_default_graph(), print_info=True) To analyze the model variables in a graph: variables = tf.model_variables() slim.model_analyzer.analyze_vars(variables, print_info=False) """ from __future__ import absolute_import from __future__ import division from __future__ import print_function def tensor_description(var): """Returns a compact and informative string about a tensor. Args: var: A tensor variable. Returns: a string with type and size, e.g.: (float32 1x8x8x1024). """ description = '(' + str(var.dtype.name) + ' ' sizes = var.get_shape() for i, size in enumerate(sizes): description += str(size) if i < len(sizes) - 1: description += 'x' description += ')' return description def analyze_ops(graph, print_info=False): """Compute the estimated size of the ops.outputs in the graph. Args: graph: the graph containing the operations. print_info: Optional, if true print ops and their outputs. Returns: total size of the ops.outputs """ if print_info: print('---------') print('Operations: name -> (type shapes) [size]') print('---------') total_size = 0 for op in graph.get_operations(): op_size = 0 shapes = [] for output in op.outputs: # if output.num_elements() is None or [] assume size 0. output_size = output.get_shape().num_elements() or 0 if output.get_shape(): shapes.append(tensor_description(output)) op_size += output_size if print_info: print(op.name, '\t->', ', '.join(shapes), '[' + str(op_size) + ']') total_size += op_size return total_size def analyze_vars(variables, print_info=False): """Prints the names and shapes of the variables. Args: variables: list of variables, for example tf.global_variables(). print_info: Optional, if true print variables and their shape. Returns: (total size of the variables, total bytes of the variables) """ if print_info: print('---------') print('Variables: name (type shape) [size]') print('---------') total_size = 0 total_bytes = 0 for var in variables: # if var.num_elements() is None or [] assume size 0. var_size = var.get_shape().num_elements() or 0 var_bytes = var_size * var.dtype.size total_size += var_size total_bytes += var_bytes if print_info: print(var.name, tensor_description(var), '[%d, bytes: %d]' % (var_size, var_bytes)) if print_info: print('Total size of variables: %d' % total_size) print('Total bytes of variables: %d' % total_bytes) return total_size, total_bytes
apache-2.0
MartinHjelmare/home-assistant
homeassistant/components/mqtt/camera.py
7
4190
"""Camera that loads a picture from an MQTT topic.""" import asyncio import logging import voluptuous as vol from homeassistant.components import camera, mqtt from homeassistant.components.camera import PLATFORM_SCHEMA, Camera from homeassistant.const import CONF_NAME from homeassistant.core import callback from homeassistant.helpers import config_validation as cv from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.typing import ConfigType, HomeAssistantType from . import ( ATTR_DISCOVERY_HASH, CONF_UNIQUE_ID, MqttDiscoveryUpdate, subscription) from .discovery import MQTT_DISCOVERY_NEW, clear_discovery_hash _LOGGER = logging.getLogger(__name__) CONF_TOPIC = 'topic' DEFAULT_NAME = 'MQTT Camera' PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Required(CONF_TOPIC): mqtt.valid_subscribe_topic, vol.Optional(CONF_UNIQUE_ID): cv.string, }) async def async_setup_platform(hass: HomeAssistantType, config: ConfigType, async_add_entities, discovery_info=None): """Set up MQTT camera through configuration.yaml.""" await _async_setup_entity(config, async_add_entities) async def async_setup_entry(hass, config_entry, async_add_entities): """Set up MQTT camera dynamically through MQTT discovery.""" async def async_discover(discovery_payload): """Discover and add a MQTT camera.""" try: discovery_hash = discovery_payload.pop(ATTR_DISCOVERY_HASH) config = PLATFORM_SCHEMA(discovery_payload) await _async_setup_entity(config, async_add_entities, discovery_hash) except Exception: if discovery_hash: clear_discovery_hash(hass, discovery_hash) raise async_dispatcher_connect( hass, MQTT_DISCOVERY_NEW.format(camera.DOMAIN, 'mqtt'), async_discover) async def _async_setup_entity(config, async_add_entities, discovery_hash=None): """Set up the MQTT Camera.""" async_add_entities([MqttCamera(config, discovery_hash)]) class MqttCamera(MqttDiscoveryUpdate, Camera): """representation of a MQTT camera.""" def __init__(self, config, discovery_hash): """Initialize the MQTT Camera.""" self._config = config self._unique_id = config.get(CONF_UNIQUE_ID) self._sub_state = None self._qos = 0 self._last_image = None Camera.__init__(self) MqttDiscoveryUpdate.__init__(self, discovery_hash, self.discovery_update) async def async_added_to_hass(self): """Subscribe MQTT events.""" await super().async_added_to_hass() await self._subscribe_topics() async def discovery_update(self, discovery_payload): """Handle updated discovery message.""" config = PLATFORM_SCHEMA(discovery_payload) self._config = config await self._subscribe_topics() self.async_write_ha_state() async def _subscribe_topics(self): """(Re)Subscribe to topics.""" @callback def message_received(msg): """Handle new MQTT messages.""" self._last_image = msg.payload self._sub_state = await subscription.async_subscribe_topics( self.hass, self._sub_state, {'state_topic': {'topic': self._config[CONF_TOPIC], 'msg_callback': message_received, 'qos': self._qos, 'encoding': None}}) async def async_will_remove_from_hass(self): """Unsubscribe when removed.""" self._sub_state = await subscription.async_unsubscribe_topics( self.hass, self._sub_state) @asyncio.coroutine def async_camera_image(self): """Return image response.""" return self._last_image @property def name(self): """Return the name of this camera.""" return self._config[CONF_NAME] @property def unique_id(self): """Return a unique ID.""" return self._unique_id
apache-2.0
gsehub/edx-platform
common/djangoapps/third_party_auth/api/tests/test_views.py
9
12482
# pylint: disable=no-member """ Tests for the Third Party Auth REST API """ import unittest import ddt from django.urls import reverse from django.http import QueryDict from mock import patch from provider.constants import CONFIDENTIAL from provider.oauth2.models import Client, AccessToken from openedx.core.lib.api.permissions import ApiKeyHeaderPermission from rest_framework.test import APITestCase from django.conf import settings from django.test.utils import override_settings from social_django.models import UserSocialAuth from student.tests.factories import UserFactory from third_party_auth.api.permissions import ThirdPartyAuthProviderApiPermission from third_party_auth.models import ProviderApiPermissions from third_party_auth.tests.testutil import ThirdPartyAuthTestMixin VALID_API_KEY = "i am a key" IDP_SLUG_TESTSHIB = 'testshib' PROVIDER_ID_TESTSHIB = 'saml-' + IDP_SLUG_TESTSHIB ALICE_USERNAME = "alice" CARL_USERNAME = "carl" STAFF_USERNAME = "staff" ADMIN_USERNAME = "admin" # These users will be created and linked to third party accounts: LINKED_USERS = (ALICE_USERNAME, STAFF_USERNAME, ADMIN_USERNAME) PASSWORD = "edx" def get_mapping_data_by_usernames(usernames): """ Generate mapping data used in response """ return [{'username': username, 'remote_id': 'remote_' + username} for username in usernames] class TpaAPITestCase(ThirdPartyAuthTestMixin, APITestCase): """ Base test class """ def setUp(self): """ Create users for use in the tests """ super(TpaAPITestCase, self).setUp() google = self.configure_google_provider(enabled=True) self.configure_facebook_provider(enabled=True) self.configure_linkedin_provider(enabled=False) self.enable_saml() testshib = self.configure_saml_provider( name='TestShib', enabled=True, slug=IDP_SLUG_TESTSHIB ) # Create several users and link each user to Google and TestShib for username in LINKED_USERS: make_superuser = (username == ADMIN_USERNAME) make_staff = (username == STAFF_USERNAME) or make_superuser user = UserFactory.create( username=username, password=PASSWORD, is_staff=make_staff, is_superuser=make_superuser ) UserSocialAuth.objects.create( user=user, provider=google.backend_name, uid='{}@gmail.com'.format(username), ) UserSocialAuth.objects.create( user=user, provider=testshib.backend_name, uid='{}:remote_{}'.format(testshib.slug, username), ) # Create another user not linked to any providers: UserFactory.create(username=CARL_USERNAME, password=PASSWORD) @override_settings(EDX_API_KEY=VALID_API_KEY) @ddt.ddt @unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms') class UserViewAPITests(TpaAPITestCase): """ Test the Third Party Auth User REST API """ def expected_active(self, username): """ The JSON active providers list response expected for the given user """ if username not in LINKED_USERS: return [] return [ { "provider_id": "oa2-google-oauth2", "name": "Google", "remote_id": "{}@gmail.com".format(username), }, { "provider_id": PROVIDER_ID_TESTSHIB, "name": "TestShib", # The "testshib:" prefix is stored in the UserSocialAuth.uid field but should # not be present in the 'remote_id', since that's an implementation detail: "remote_id": 'remote_' + username, }, ] @ddt.data( # Any user can query their own list of providers (ALICE_USERNAME, ALICE_USERNAME, 200), (CARL_USERNAME, CARL_USERNAME, 200), # A regular user cannot query another user nor deduce the existence of users based on the status code (ALICE_USERNAME, STAFF_USERNAME, 403), (ALICE_USERNAME, "nonexistent_user", 403), # Even Staff cannot query other users (STAFF_USERNAME, ALICE_USERNAME, 403), # But admins can (ADMIN_USERNAME, ALICE_USERNAME, 200), (ADMIN_USERNAME, CARL_USERNAME, 200), (ADMIN_USERNAME, "invalid_username", 404), ) @ddt.unpack def test_list_connected_providers(self, request_user, target_user, expect_result): self.client.login(username=request_user, password=PASSWORD) url = reverse('third_party_auth_users_api', kwargs={'username': target_user}) response = self.client.get(url) self.assertEqual(response.status_code, expect_result) if expect_result == 200: self.assertIn("active", response.data) self.assertItemsEqual(response.data["active"], self.expected_active(target_user)) @ddt.data( # A server with a valid API key can query any user's list of providers (VALID_API_KEY, ALICE_USERNAME, 200), (VALID_API_KEY, "invalid_username", 404), ("i am an invalid key", ALICE_USERNAME, 403), (None, ALICE_USERNAME, 403), ) @ddt.unpack def test_list_connected_providers__withapi_key(self, api_key, target_user, expect_result): url = reverse('third_party_auth_users_api', kwargs={'username': target_user}) response = self.client.get(url, HTTP_X_EDX_API_KEY=api_key) self.assertEqual(response.status_code, expect_result) if expect_result == 200: self.assertIn("active", response.data) self.assertItemsEqual(response.data["active"], self.expected_active(target_user)) @override_settings(EDX_API_KEY=VALID_API_KEY) @ddt.ddt @unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms') class UserMappingViewAPITests(TpaAPITestCase): """ Test the Third Party Auth User Mapping REST API """ @ddt.data( (VALID_API_KEY, PROVIDER_ID_TESTSHIB, 200, get_mapping_data_by_usernames(LINKED_USERS)), ("i am an invalid key", PROVIDER_ID_TESTSHIB, 403, None), (None, PROVIDER_ID_TESTSHIB, 403, None), (VALID_API_KEY, 'non-existing-id', 404, []), ) @ddt.unpack def test_list_all_user_mappings_withapi_key(self, api_key, provider_id, expect_code, expect_data): url = reverse('third_party_auth_user_mapping_api', kwargs={'provider_id': provider_id}) response = self.client.get(url, HTTP_X_EDX_API_KEY=api_key) self._verify_response(response, expect_code, expect_data) @ddt.data( (PROVIDER_ID_TESTSHIB, 'valid-token', 200, get_mapping_data_by_usernames(LINKED_USERS)), ('non-existing-id', 'valid-token', 404, []), (PROVIDER_ID_TESTSHIB, 'invalid-token', 401, []), ) @ddt.unpack def test_list_all_user_mappings_oauth2(self, provider_id, access_token, expect_code, expect_data): url = reverse('third_party_auth_user_mapping_api', kwargs={'provider_id': provider_id}) # create oauth2 auth data user = UserFactory.create(username='api_user') client = Client.objects.create(name='oauth2_client', client_type=CONFIDENTIAL) token = AccessToken.objects.create(user=user, client=client) ProviderApiPermissions.objects.create(client=client, provider_id=provider_id) if access_token == 'valid-token': access_token = token.token response = self.client.get(url, HTTP_AUTHORIZATION='Bearer {}'.format(access_token)) self._verify_response(response, expect_code, expect_data) @ddt.data( ({'username': [ALICE_USERNAME, STAFF_USERNAME]}, 200, get_mapping_data_by_usernames([ALICE_USERNAME, STAFF_USERNAME])), ({'remote_id': ['remote_' + ALICE_USERNAME, 'remote_' + STAFF_USERNAME, 'remote_' + CARL_USERNAME]}, 200, get_mapping_data_by_usernames([ALICE_USERNAME, STAFF_USERNAME])), ({'username': [ALICE_USERNAME, CARL_USERNAME, STAFF_USERNAME]}, 200, get_mapping_data_by_usernames([ALICE_USERNAME, STAFF_USERNAME])), ({'username': [ALICE_USERNAME], 'remote_id': ['remote_' + STAFF_USERNAME]}, 200, get_mapping_data_by_usernames([ALICE_USERNAME, STAFF_USERNAME])), ) @ddt.unpack def test_user_mappings_with_query_params_comma_separated(self, query_params, expect_code, expect_data): """ test queries like username=user1,user2,... """ base_url = reverse( 'third_party_auth_user_mapping_api', kwargs={'provider_id': PROVIDER_ID_TESTSHIB} ) params = [] for attr in ['username', 'remote_id']: if attr in query_params: params.append('{}={}'.format(attr, ','.join(query_params[attr]))) url = "{}?{}".format(base_url, '&'.join(params)) response = self.client.get(url, HTTP_X_EDX_API_KEY=VALID_API_KEY) self._verify_response(response, expect_code, expect_data) @ddt.data( ({'username': [ALICE_USERNAME, STAFF_USERNAME]}, 200, get_mapping_data_by_usernames([ALICE_USERNAME, STAFF_USERNAME])), ({'remote_id': ['remote_' + ALICE_USERNAME, 'remote_' + STAFF_USERNAME, 'remote_' + CARL_USERNAME]}, 200, get_mapping_data_by_usernames([ALICE_USERNAME, STAFF_USERNAME])), ({'username': [ALICE_USERNAME, CARL_USERNAME, STAFF_USERNAME]}, 200, get_mapping_data_by_usernames([ALICE_USERNAME, STAFF_USERNAME])), ({'username': [ALICE_USERNAME], 'remote_id': ['remote_' + STAFF_USERNAME]}, 200, get_mapping_data_by_usernames([ALICE_USERNAME, STAFF_USERNAME])), ) @ddt.unpack def test_user_mappings_with_query_params_multi_value_key(self, query_params, expect_code, expect_data): """ test queries like username=user1&username=user2&... """ base_url = reverse( 'third_party_auth_user_mapping_api', kwargs={'provider_id': PROVIDER_ID_TESTSHIB} ) params = QueryDict('', mutable=True) for attr in ['username', 'remote_id']: if attr in query_params: params.setlist(attr, query_params[attr]) url = "{}?{}".format(base_url, params.urlencode()) response = self.client.get(url, HTTP_X_EDX_API_KEY=VALID_API_KEY) self._verify_response(response, expect_code, expect_data) def test_user_mappings_only_return_requested_idp_mapping_by_provider_id(self): testshib2 = self.configure_saml_provider(name='TestShib2', enabled=True, slug='testshib2') username = 'testshib2user' user = UserFactory.create( username=username, password=PASSWORD, is_staff=False, is_superuser=False ) UserSocialAuth.objects.create( user=user, provider=testshib2.backend_name, uid='{}:{}'.format(testshib2.slug, username), ) url = reverse('third_party_auth_user_mapping_api', kwargs={'provider_id': PROVIDER_ID_TESTSHIB}) response = self.client.get(url, HTTP_X_EDX_API_KEY=VALID_API_KEY) self.assertEqual(response.status_code, 200) self._verify_response(response, 200, get_mapping_data_by_usernames(LINKED_USERS)) @ddt.data( (True, True, 200), (False, True, 200), (True, False, 200), (False, False, 403) ) @ddt.unpack def test_user_mapping_permission_logic(self, api_key_permission, token_permission, expect): url = reverse('third_party_auth_user_mapping_api', kwargs={'provider_id': PROVIDER_ID_TESTSHIB}) with patch.object(ApiKeyHeaderPermission, 'has_permission', return_value=api_key_permission): with patch.object(ThirdPartyAuthProviderApiPermission, 'has_permission', return_value=token_permission): response = self.client.get(url) self.assertEqual(response.status_code, expect) def _verify_response(self, response, expect_code, expect_result): """ verify the items in data_list exists in response and data_results matches results in response """ self.assertEqual(response.status_code, expect_code) if expect_code == 200: for item in ['results', 'count', 'num_pages']: self.assertIn(item, response.data) self.assertItemsEqual(response.data['results'], expect_result)
agpl-3.0
JCBarahona/edX
common/djangoapps/third_party_auth/tests/specs/test_twitter.py
86
1311
""" Separate integration test for Twitter which is an OAuth1 provider. """ from mock import patch from third_party_auth.tests.specs import base class TwitterIntegrationTest(base.Oauth2IntegrationTest): """Integration tests for Twitter backend.""" def setUp(self): super(TwitterIntegrationTest, self).setUp() self.provider = self.configure_twitter_provider( enabled=True, key='twitter_oauth1_key', secret='twitter_oauth1_secret', ) # To test an OAuth1 provider, we need to patch an additional method: patcher = patch( 'social.backends.twitter.TwitterOAuth.unauthorized_token', create=True, return_value="unauth_token" ) patcher.start() self.addCleanup(patcher.stop) TOKEN_RESPONSE_DATA = { 'access_token': 'access_token_value', 'token_type': 'bearer', } USER_RESPONSE_DATA = { 'id': 10101010, 'name': 'Bob Loblaw', 'description': 'A Twitter User', 'screen_name': 'bobloblaw', 'location': 'Twitterverse', 'followers_count': 77, 'verified': False, } def get_username(self): response_data = self.get_response_data() return response_data.get('screen_name')
agpl-3.0
ztemt/N939Sc_5.1_kenel
tools/perf/scripts/python/futex-contention.py
11261
1486
# futex contention # (c) 2010, Arnaldo Carvalho de Melo <[email protected]> # Licensed under the terms of the GNU GPL License version 2 # # Translation of: # # http://sourceware.org/systemtap/wiki/WSFutexContention # # to perf python scripting. # # Measures futex contention import os, sys sys.path.append(os.environ['PERF_EXEC_PATH'] + '/scripts/python/Perf-Trace-Util/lib/Perf/Trace') from Util import * process_names = {} thread_thislock = {} thread_blocktime = {} lock_waits = {} # long-lived stats on (tid,lock) blockage elapsed time process_names = {} # long-lived pid-to-execname mapping def syscalls__sys_enter_futex(event, ctxt, cpu, s, ns, tid, comm, nr, uaddr, op, val, utime, uaddr2, val3): cmd = op & FUTEX_CMD_MASK if cmd != FUTEX_WAIT: return # we don't care about originators of WAKE events process_names[tid] = comm thread_thislock[tid] = uaddr thread_blocktime[tid] = nsecs(s, ns) def syscalls__sys_exit_futex(event, ctxt, cpu, s, ns, tid, comm, nr, ret): if thread_blocktime.has_key(tid): elapsed = nsecs(s, ns) - thread_blocktime[tid] add_stats(lock_waits, (tid, thread_thislock[tid]), elapsed) del thread_blocktime[tid] del thread_thislock[tid] def trace_begin(): print "Press control+C to stop and show the summary" def trace_end(): for (tid, lock) in lock_waits: min, max, avg, count = lock_waits[tid, lock] print "%s[%d] lock %x contended %d times, %d avg ns" % \ (process_names[tid], tid, lock, count, avg)
gpl-2.0
Grumpy-Mike/Mikes-Pi-Bakery
Physical Sequencer/videoSequencer.py
1
5316
#!/usr/bin/env python # Video sequencer with track bars # and multi pixel sampling import time, pygame, pickle import os, sys, math, copy import cv2 pygame.init() # initialise pygame pygame.mixer.quit() pygame.mixer.init(frequency=22050, size=-16, channels=4, buffer=512) pygame.event.set_allowed(None) samples =["clap","closedhat","cowbell","crash","hitom","lotom"] colours = ["Red","Orange","Yellow","Green","Blue","Magenta"] maxCol = 6 seqSound = [ pygame.mixer.Sound("sounds/"+samples[sound]+".wav") for sound in range(0,maxCol)] cv2.namedWindow("Video Sequencer", cv2.CV_WINDOW_AUTOSIZE) camera_index = 0 capture = cv2.VideoCapture(camera_index) if not capture.isOpened(): capture.open(camera_index) hList = [ 0.0 for h in range (0,64)] cList = [ 0.0 for c in range (0,64)] nextNote = time.time() cromaThresh = 0.23 # threshold for a colour def track_mouse(event,x,y,flags,param): pass cv2.setMouseCallback("Video Sequencer",track_mouse) def main(): global capture,samplePoint f = open('grid16x4.txt','r') # change to use another points file samplePoint = pickle.load(f) f.close() print"type Esc to quit - d to save colours to a CSV file and quit" for c in range(0,maxCol): print colours[c],"for",samples[c] cv2.createTrackbar('BPM',"Video Sequencer",120,200,nothing) switch = '0 : Stop \n1 : Run' cv2.createTrackbar(switch,"Video Sequencer",0,1,nothing) startTime = time.time() + 3 while startTime > time.time(): # let web cam's AGC settle ret, frame = capture.read() cv2.imshow("Video Sequencer", frame) c = cv2.waitKey(1) while True: ret = True for i in range (0,5) : #read off 5 frames ret, frame = capture.read() points(frame) cv2.imshow("Video Sequencer", frame) getKey() if cv2.getTrackbarPos(switch,"Video Sequencer") == 1 : soundOut(frame) def getKey(): k = cv2.waitKey(1)& 0xFF if k == 27: terminate(False) if k == ord('d'): terminate(True) def soundOut(frame): global nextNote bpm = 60.0 / float(cv2.getTrackbarPos('BPM',"Video Sequencer")) if bpm > 2.0: bpm = 2.0 for i in range(0,16): temp = cv2.copyMakeBorder(frame,0,0,0,0,1) cv2.line(temp,samplePoint[i],samplePoint[i+16],(0,255,0),3) cv2.line(temp,samplePoint[i+16],samplePoint[i+32],(0,255,0),3) cv2.line(temp,samplePoint[i+32],samplePoint[i+48],(0,255,0),3) while nextNote > time.time() : pass nextNote = time.time() + bpm cv2.imshow("Video Sequencer", temp) getKey() for j in range(0,4): index = i + (j*16) if cList[index] > cromaThresh : seqSound[getColour(hList[index])].play() def nothing(x): #dummy call back function for track bars pass def output(): # CSV format print"Saving colours to file - colours.csv" with open('colours.csv','w') as f: f.write("Hole, Hue, Croma, Colour \n") for c in range(0,16): f.write("\n") for r in range(0,4): i=(r*16) + c entry = str(i)+ ", "+"%.2f" % hList[i] f.write(entry+", ") entry = "%.2f" % cList[i] f.write(entry+", ") if cList[i] > cromaThresh : f.write(describeColour(hList[i])+"\n") else : f.write("neutral \n") f.close() def getColour(h): colour = -1 if h < 1 or h > 340: colour = 0 if h>1 and h< 30: colour = 1 elif h>30 and h< 90 : colour = 2 elif h > 90 and h < 190 : colour = 3 elif h> 190 and h< 300 : colour = 4 elif h> 300 and h < 340 : colour = 5 return colour def describeColour(h): colourNumber = getColour(h) if colourNumber == -1: colour = str(h)+" is unknown" else: colour = colours[colourNumber] return colour def points(frame): # outline sample area and get the colours for point in range(0,64): surround(samplePoint[point][0],samplePoint[point][1] ,(0,0,0),frame,point) def surround(x, y, col, frame, place): getCol(x,y, frame, place) frame[y, x-2] = col frame[y+2,x-2] = col frame[y-2,x-2] = col frame[y+2,x,] = col frame[y-2,x] = col frame[y,x+2] = col frame[y+2,x+2] = col frame[y-2,x+2] = col def getCol(x,y, frame,place): global hList,cList bt = rt = gt = 0 m = 255.0 * 9.0 for ox in range(-1,2): for oy in range(-1,2): blue, green, red = frame[y+oy,x+ox] bt += blue gt += green rt += red r = float(rt) / m # normalise colours g = float(gt) / m b = float(bt) / m alp = 0.5*(2*r - g - b) bet = 0.866*(g - b) hList[place] = math.degrees(math.atan2(bet,alp)) if hList[place] <0 : hList[place] = 360 + hList[place] cList[place] = math.sqrt(alp * alp + bet * bet) def terminate(debug): # close down the program if debug : output() # colours to a csv file print ("Closing down please wait") pygame.quit() # close pygame capture.release() cv2.destroyAllWindows() cv2.waitKey(1) os._exit(1) if __name__ == '__main__': main()
gpl-2.0